diff --git a/.env b/.env new file mode 100644 index 0000000..0ae2de1 --- /dev/null +++ b/.env @@ -0,0 +1,3 @@ +FALKORDB_HOST=localhost +FALKORDB_PORT=6379 + diff --git a/README.md b/README.md index e69de29..f3ef11e 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,11 @@ +flask --app code_graph run --debug + +Process local git repository, ignoring specific folder(s) + +curl -X POST http://127.0.0.1:5000/process_local_repo -H "Content-Type: application/json" -d '{"repo": "/Users/roilipman/Dev/FalkorDB", "ignore": ["./.github", "./sbin", "./.git","./deps", "./bin", "./build"]}' + +Process code coverage +curl -X POST http://127.0.0.1:5000/process_code_coverage -H "Content-Type: application/json" -d '{"lcov": "/Users/roilipman/Dev/code_graph/code_graph/code_coverage/lcov/falkordb.lcov", "repo": "FalkorDB"}' + +Process git information +curl -X POST http://127.0.0.1:5000/process_git_history -H "Content-Type: application/json" -d '{"repo": "/Users/roilipman/Dev/falkorDB"}' diff --git a/code_graph/__init__.py b/code_graph/__init__.py index 9806da1..5ff1d11 100644 --- a/code_graph/__init__.py +++ b/code_graph/__init__.py @@ -1,3 +1,10 @@ -from .analyzers.source_analyzer import * +from .info import * +from .llm import ask +from .graph import * +from .project import * from .entities import * -from .graph import Graph +from .git_utils import * +from .app import create_app +from .code_coverage import * +from .analyzers.source_analyzer import * +from .auto_complete import prefix_search diff --git a/code_graph/analyzers/c/analyzer.py b/code_graph/analyzers/c/analyzer.py index 3e5d446..f5639cf 100644 --- a/code_graph/analyzers/c/analyzer.py +++ b/code_graph/analyzers/c/analyzer.py @@ -1,9 +1,10 @@ import io -from typing import Union, Optional -from pathlib import Path +import os from ..utils import * +from pathlib import Path from ...entities import * from ...graph import Graph +from typing import Union, Optional from ..analyzer import AbstractAnalyzer import tree_sitter_c as tsc @@ -347,13 +348,17 @@ def first_pass(self, path: Path, f: io.TextIOWrapper, graph:Graph) -> None: logger.info(f"Processing {path}") # Create file entity - file = File(str(path.parent), path.name, path.suffix) + file = File(os.path.dirname(path), path.name, path.suffix) graph.add_file(file) # Parse file source_code = f.read() tree = self.parser.parse(source_code) - source_code = source_code.decode('utf-8') + try: + source_code = source_code.decode('utf-8') + except Exception as e: + logger.error(f"Failed decoding source code: {e}") + source_code = '' # Process function definitions query = C_LANGUAGE.query("(function_definition) @function") @@ -412,7 +417,7 @@ def second_pass(self, path: Path, f: io.TextIOWrapper, graph: Graph) -> None: logger.info(f"Processing {path}") # Get file entity - file = graph.get_file(str(path.parent), path.name, path.suffix) + file = graph.get_file(os.path.dirname(path), path.name, path.suffix) if file is None: logger.error(f"File entity not found for: {path}") return diff --git a/code_graph/analyzers/python/analyzer.py b/code_graph/analyzers/python/analyzer.py index 02e2d5e..5376139 100644 --- a/code_graph/analyzers/python/analyzer.py +++ b/code_graph/analyzers/python/analyzer.py @@ -1,9 +1,10 @@ import io -from typing import Union, Optional -from pathlib import Path +import os from ..utils import * +from pathlib import Path from ...entities import * from ...graph import Graph +from typing import Union, Optional from ..analyzer import AbstractAnalyzer import tree_sitter_python as tspython @@ -74,7 +75,7 @@ def process_class_definition(self, node: Node, path: Path) -> tuple[Class, list[ return (c, inherited_classes) - def process_function_definition(self, node: Node, path: Path) -> Function: + def process_function_definition(self, node: Node, path: Path, source_code: str) -> Function: """ Processes a function definition node from the syntax tree and extracts relevant information. @@ -153,7 +154,8 @@ def process_function_definition(self, node: Node, path: Path) -> Function: ret_type = return_type.text.decode('utf-8') if return_type else None # Create Function object - f = Function(str(path), function_name, docstring, ret_type, '', start_line, end_line) + src = source_code[node.start_byte:node.end_byte] + f = Function(str(path), function_name, docstring, ret_type, src, start_line, end_line) # Add arguments to Function object for arg in args: @@ -162,7 +164,7 @@ def process_function_definition(self, node: Node, path: Path) -> Function: return f def first_pass_traverse(self, parent: Union[File,Class,Function], node: Node, - path: Path, graph: Graph) -> None: + path: Path, graph: Graph, source_code: str) -> None: """ Recursively traverses a syntax tree node, processes class and function definitions, and connects them in a graph representation. @@ -197,7 +199,7 @@ def first_pass_traverse(self, parent: Union[File,Class,Function], node: Node, graph.add_class(entity) elif node.type == "function_definition": - entity = self.process_function_definition(node, path) + entity = self.process_function_definition(node, path, source_code) # Add Function object to the graph graph.add_function(entity) @@ -208,7 +210,7 @@ def first_pass_traverse(self, parent: Union[File,Class,Function], node: Node, # Recursivly visit child nodes for child in node.children: - self.first_pass_traverse(parent, child, path, graph) + self.first_pass_traverse(parent, child, path, graph, source_code) def first_pass(self, path: Path, f: io.TextIOWrapper, graph:Graph) -> None: """ @@ -226,15 +228,20 @@ def first_pass(self, path: Path, f: io.TextIOWrapper, graph:Graph) -> None: logger.info(f"Python Processing {path}") # Create file entity - file = File(str(path.parent), path.name, path.suffix) + file = File(os.path.dirname(path), path.name, path.suffix) graph.add_file(file) # Parse file - content = f.read() - tree = self.parser.parse(content) + source_code = f.read() + tree = self.parser.parse(source_code) + try: + source_code = source_code.decode('utf-8') + except Exception as e: + logger.error(f"Failed decoding source code: {e}") + source_code = '' # Walk thought the AST - self.first_pass_traverse(file, tree.root_node, path, graph) + self.first_pass_traverse(file, tree.root_node, path, graph, source_code) def process_function_call(self, node) -> Optional[str]: """ @@ -323,7 +330,7 @@ def process_inheritance(self, cls: Class, super_classes: list[str], graph.connect_entities('INHERITS', cls.id, _super_class.id) def second_pass_traverse(self, parent: Union[File, Class, Function], - node: Node, path: Path, graph: Graph) -> None: + node: Node, path: Path, graph: Graph, source_code: str) -> None: """ Traverse the AST nodes during the second pass and process each node accordingly. @@ -340,7 +347,9 @@ def second_pass_traverse(self, parent: Union[File, Class, Function], parent = cls elif node.type == "function_definition": - func = self.process_function_definition(node, path) + # TODO: simply extract function name, no need to parse entire function + # see C analyzer + func = self.process_function_definition(node, path, source_code) parent = graph.get_function_by_name(func.name) elif node.type == "call": callee = self.process_function_call(node) @@ -349,7 +358,7 @@ def second_pass_traverse(self, parent: Union[File, Class, Function], # Recursivly visit child nodes for child in node.children: - self.second_pass_traverse(parent, child, path, graph) + self.second_pass_traverse(parent, child, path, graph, source_code) def second_pass(self, path: Path, f: io.TextIOWrapper, graph: Graph) -> None: """ @@ -367,17 +376,17 @@ def second_pass(self, path: Path, f: io.TextIOWrapper, graph: Graph) -> None: logger.info(f"Processing {path}") # Get file entity - file = graph.get_file(str(path.parent), path.name, path.suffix) + file = graph.get_file(os.path.dirname(path), path.name, path.suffix) if file is None: logger.error(f"File entity not found for: {path}") return try: # Parse file - content = f.read() - tree = self.parser.parse(content) + source_code = f.read() + tree = self.parser.parse(source_code) # Walk thought the AST - self.second_pass_traverse(file, tree.root_node, path, graph) + self.second_pass_traverse(file, tree.root_node, path, graph, source_code) except Exception as e: logger.error(f"Failed to process file {path}: {e}") diff --git a/code_graph/analyzers/source_analyzer.py b/code_graph/analyzers/source_analyzer.py index 82298fc..dc9c0a5 100644 --- a/code_graph/analyzers/source_analyzer.py +++ b/code_graph/analyzers/source_analyzer.py @@ -1,17 +1,18 @@ import os -import tempfile +import shutil import concurrent.futures from git import Repo from pathlib import Path -from typing import Optional +from typing import Optional, List from ..graph import Graph from .c.analyzer import CAnalyzer from .python.analyzer import PythonAnalyzer import logging -logger = logging.getLogger('code_graph') +# Configure logging +logging.basicConfig(level=logging.DEBUG, format='%(filename)s - %(asctime)s - %(levelname)s - %(message)s') # List of available analyzers analyzers = {'.c': CAnalyzer(), @@ -19,29 +20,33 @@ '.py': PythonAnalyzer()} class SourceAnalyzer(): - def __init__(self, host: str = 'localhost', port: int = 6379, - username: Optional[str] = None, password: Optional[str] = None) -> None: - self.host = host - self.port = port - self.username = username - self.password = password + def supported_types(self) -> List[str]: + """ + """ + return list(analyzers.keys()) - def first_pass(self, base: str, root: str, - executor: concurrent.futures.Executor) -> None: + def first_pass(self, ignore: List[str], executor: concurrent.futures.Executor) -> None: """ Perform the first pass analysis on source files in the given directory tree. Args: - base (str): The base directory path to be used for relative paths. - root (str): The root directory path to start the analysis from. + ignore (list(str)): List of paths to ignore executor (concurrent.futures.Executor): The executor to run tasks concurrently. """ - print(f'root: {root}') tasks = [] - for dirpath, dirnames, filenames in os.walk(root): - logger.info(f'Processing directory: {dirpath}') + for dirpath, dirnames, filenames in os.walk("."): + + # skip current directory if it is within the ignore list + if dirpath in ignore: + # in-place clear dirnames to prevent os.walk from recursing into + # any of the nested directories + logging.info(f'ignoring directory: {dirpath}') + dirnames[:] = [] + continue + + logging.info(f'Processing directory: {dirpath}') # Process each file in the current directory for filename in filenames: @@ -50,16 +55,15 @@ def first_pass(self, base: str, root: str, # Skip none supported files ext = file_path.suffix if ext not in analyzers: - logger.info(f"Skipping none supported file {file_path}") + logging.info(f"Skipping none supported file {file_path}") continue - logger.info(f'Processing file: {file_path}') + logging.info(f'Processing file: {file_path}') def process_file(path: Path) -> None: with open(path, 'rb') as f: - relative_path = str(path).replace(base, '') ext = path.suffix - analyzers[ext].first_pass(Path(relative_path), f, self.graph) + analyzers[ext].first_pass(path, f, self.graph) process_file(file_path) #task = executor.submit(process_file, file_path) @@ -68,8 +72,7 @@ def process_file(path: Path) -> None: # Wait for all tasks to complete #concurrent.futures.wait(tasks) - def second_pass(self, base: str, root: str, - executor: concurrent.futures.Executor) -> None: + def second_pass(self, ignore: List[str], executor: concurrent.futures.Executor) -> None: """ Recursively analyze the contents of a directory. @@ -80,8 +83,17 @@ def second_pass(self, base: str, root: str, """ tasks = [] - for dirpath, dirnames, filenames in os.walk(root): - logger.info(f'Processing directory: {dirpath}') + for dirpath, dirnames, filenames in os.walk("."): + + # skip current directory if it is within the ignore list + if dirpath in ignore: + # in-place clear dirnames to prevent os.walk from recursing into + # any of the nested directories + logging.info(f'ignoring directory: {dirpath}') + dirnames[:] = [] + continue + + logging.info(f'Processing directory: {dirpath}') # Process each file in the current directory for filename in filenames: @@ -92,13 +104,12 @@ def second_pass(self, base: str, root: str, if ext not in analyzers: continue - logger.info(f'Processing file: {file_path}') + logging.info(f'Processing file: {file_path}') def process_file(path: Path) -> None: with open(path, 'rb') as f: - relative_path = str(path).replace(base, '') ext = path.suffix - analyzers[ext].second_pass(Path(relative_path), f, self.graph) + analyzers[ext].second_pass(path, f, self.graph) task = executor.submit(process_file, file_path) tasks.append(task) @@ -106,39 +117,66 @@ def process_file(path: Path) -> None: # Wait for all tasks to complete concurrent.futures.wait(tasks) - def analyze_sources(self, path: Path) -> None: + def analyze_file(self, path: Path, graph: Graph) -> None: + ext = path.suffix + logging.info(f"analyze_file: path: {path}") + logging.info(f"analyze_file: ext: {ext}") + if ext not in analyzers: + return + + with open(path, 'rb') as f: + analyzers[ext].first_pass(path, f, graph) + analyzers[ext].second_pass(path, f, graph) + + def analyze_sources(self, ignore: List[str]) -> None: with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: # First pass analysis of the source code - self.first_pass(path, path, executor) + self.first_pass(ignore, executor) # Second pass analysis of the source code - self.second_pass(path, path, executor) + self.second_pass(ignore, executor) - def analyze_repository(self, url: str) -> None: + def analyze(self, path: str, g: Graph, ignore: Optional[List[str]] = []) -> None: """ - Analyze a Git repository given its URL. + Analyze path. Args: - url (str): The URL of the Git repository to analyze. + path (str): Path to a local folder containing source files to process + ignore (List(str)): List of paths to skip """ - # Extract repository name from the URL - components = url[:url.rfind('.')].split('/') - n = len(components) - repo_name = f'{components[n-2]}/{components[-1]}' - logger.debug(f'repo_name: {repo_name}') - #repo_name = url[url.rfind('/')+1:url.rfind('.')] + # Save original working directory for later restore + original_dir = Path.cwd() + + # change working directory to path + os.chdir(path) # Initialize the graph and analyzer - self.graph = Graph(repo_name, self.host, self.port, self.username, - self.password) + self.graph = g + + # Analyze source files + self.analyze_sources(ignore) + + logging.info("Done analyzing path") + + # Restore original working dir + os.chdir(original_dir) + + def analyze_local_repository(self, path: str, ignore: Optional[List[str]] = []) -> Graph: + """ + Analyze a local Git repository. + + Args: + path (str): Path to a local git repository + ignore (List(str)): List of paths to skip + """ + + self.analyze_local_folder(path, ignore) - # Create a temporary directory for cloning the repository - with tempfile.TemporaryDirectory() as temp_dir: - logger.info(f"Cloning repository {url} to {temp_dir}") - repo = Repo.clone_from(url, temp_dir) + # Save processed commit hash to the DB + repo = Repo(path) + head = repo.commit("HEAD") + self.graph.set_graph_commit(head.hexsha) - # Analyze source files - self.analyze_sources(temp_dir) + return self.graph - logger.info("Done processing repository") diff --git a/code_graph/app.py b/code_graph/app.py new file mode 100644 index 0000000..2331a88 --- /dev/null +++ b/code_graph/app.py @@ -0,0 +1,508 @@ +import os +import datetime +from code_graph import * +from typing import Optional +from falkordb import FalkorDB +from dotenv import load_dotenv +from urllib.parse import urlparse +from .auto_complete import prefix_search +from flask import Flask, request, jsonify, abort + +# Load environment variables from .env file +load_dotenv() + +# Configure the logger +import logging +logging.basicConfig(level=logging.DEBUG, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + +def create_app(): + app = Flask(__name__) + + @app.route('/graph_entities', methods=['GET']) + def graph_entities(): + """ + Endpoint to fetch sub-graph entities from a given repository. + The repository is specified via the 'repo' query parameter. + + Returns: + - 200: Successfully returns the sub-graph. + - 400: Missing or invalid 'repo' parameter. + - 500: Internal server error or database connection issue. + """ + + # Access the 'repo' parameter from the GET request + repo = request.args.get('repo') + + if not repo: + logging.error("Missing 'repo' parameter in request.") + return jsonify({"status": "Missing 'repo' parameter"}), 400 + + if not graph_exists(repo): + logging.error(f"Missing project {repo}") + return jsonify({"status": f"Missing project {repo}"}), 400 + + try: + # Initialize the graph with the provided repo and credentials + g = Graph(repo) + + # Retrieve a sub-graph of up to 100 entities + sub_graph = g.get_sub_graph(100) + + logging.info(f"Successfully retrieved sub-graph for repo: {repo}") + response = { + 'status': 'success', + 'entities': sub_graph + } + + return jsonify(response), 200 + + except Exception as e: + logging.error(f"Error retrieving sub-graph for repo '{repo}': {e}") + return jsonify({"status": "Internal server error"}), 500 + + + @app.route('/get_neighbors', methods=['GET']) + def get_neighbors(): + """ + Endpoint to get neighbors of a specific node in the graph. + Expects 'repo' and 'node_id' as query parameters. + + Returns: + JSON response containing neighbors or error messages. + """ + + # Get query parameters + repo = request.args.get('repo') + node_id = request.args.get('node_id') + + # Validate 'repo' parameter + if not repo: + logging.error("Repository name is missing in the request.") + return jsonify({"status": "Repository name is required."}), 400 + + # Validate 'node_id' parameter + if not node_id: + logging.error("Node ID is missing in the request.") + return jsonify({"status": "Node ID is required."}), 400 + + # Validate repo exists + if not graph_exists(repo): + logging.error(f"Missing project {repo}") + return jsonify({"status": f"Missing project {repo}"}), 400 + + # Try converting node_id to an integer + try: + node_id = int(node_id) + except ValueError: + logging.error(f"Invalid node ID: {node_id}. It must be an integer.") + return jsonify({"status": "Invalid node ID. It must be an integer."}), 400 + + # Initialize the graph with the provided repository + g = Graph(repo) + + # Fetch the neighbors of the specified node + neighbors = g.get_neighbors(node_id) + + # Log and return the neighbors + logging.info(f"Successfully retrieved neighbors for node ID {node_id} in repo '{repo}'.") + + response = { + 'status': 'success', + 'neighbors': neighbors + } + + return jsonify(response), 200 + + + @app.route('/process_repo', methods=['POST']) + def process_repo(): + """ + Process a GitHub repository. + + Expected JSON payload: + { + "repo_url": "string", + "ignore": ["string"] # optional + } + + Returns: + JSON response with processing status + """ + + data = request.get_json() + url = data.get('repo_url') + if url is None: + return jsonify({'status': f'Missing mandatory parameter "url"'}), 400 + logger.debug(f'Received repo_url: {url}') + + ignore = data.get('ignore', []) + + proj = Project.from_git_repository(url) + proj.analyze_sources(ignore) + proj.process_git_history(ignore) + + # Create a response + response = { + 'status': 'success', + } + + return jsonify(response), 200 + + @app.route('/process_local_repo', methods=['POST']) + def process_local_repo(): + # Get JSON data from the request + data = request.get_json() + + # Process the data + repo = data.get('repo') + if repo is None: + return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 + logger.debug(f'Received repo: {repo}') + + ignore = data.get('ignore') + if ignore is not None: + logger.debug(f"Ignoring the following paths: {ignore}") + + # Create source code analyzer + analyzer = SourceAnalyzer() + + try: + analyzer.analyze_local_repository(repo, ignore) + except Exception as e: + logger.error(f'An error occurred: {e}') + return jsonify({'status': f'Failed to process repository: {repo}'}), 400 + + # Create a response + response = { + 'status': 'success', + } + + return jsonify(response), 200 + + @app.route('/process_code_coverage', methods=['POST']) + def process_code_coverage(): + """ + Endpoint to process code coverage data for a given repository. + + Returns: + JSON response indicating success or an error message. + """ + + # Get JSON data from the request + data = request.get_json() + + # Validate that 'repo' is provided + repo = data.get('repo') + if repo is None: + logging.warning("Missing mandatory parameter 'repo'") + return jsonify({'status': 'error', 'message': 'Missing mandatory parameter "repo"'}), 400 + + # Validate that 'lcov' is provided + lcov = data.get('lcov') + if lcov is None: + logging.warning("Missing mandatory parameter 'lcov'") + return jsonify({'status': f'Missing mandatory parameter "lcov"'}), 400 + + # Process the lcov data for the repository + process_lcov(repo, lcov) + + # Create a success response + response = { + 'status': 'success', + } + + return jsonify(response), 200 + + + @app.route('/switch_commit', methods=['POST']) + def switch_commit(): + """ + Endpoint to switch a repository to a specific commit. + + Returns: + JSON response with the change set or an error message. + """ + + # Get JSON data from the request + data = request.get_json() + + # Validate that 'repo' is provided + repo = data.get('repo') + if repo is None: + return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 + + # Validate that 'commit' is provided + commit = data.get('commit') + if commit is None: + return jsonify({'status': f'Missing mandatory parameter "commit"'}), 400 + + # Attempt to switch the repository to the specified commit + change_set = switch_commit(repo, commit) + + # Create a success response + response = { + 'status': 'success', + 'change_set': change_set + } + + return jsonify(response), 200 + + @app.route('/auto_complete', methods=['POST']) + def auto_complete(): + """ + Endpoint to process auto-completion requests for a repository based on a prefix. + + Returns: + JSON response with auto-completion suggestions or an error message. + """ + + # Get JSON data from the request + data = request.get_json() + + # Validate that 'repo' is provided + repo = data.get('repo') + if repo is None: + return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 + + # Validate that 'prefix' is provided + prefix = data.get('prefix') + if prefix is None: + return jsonify({'status': f'Missing mandatory parameter "prefix"'}), 400 + + # Validate repo exists + if not graph_exists(repo): + return jsonify({'status': f'Missing project {repo}'}), 400 + + # Fetch auto-completion results + completions = prefix_search(repo, prefix) + + # Create a success response + response = { + 'status': 'success', + 'completions': completions + } + + return jsonify(response), 200 + + + @app.route('/list_repos', methods=['GET']) + def list_repos(): + """ + Endpoint to list all available repositories. + + Returns: + JSON response with a list of repositories or an error message. + """ + + # Fetch list of repositories + repos = get_repos() + + # Create a success response with the list of repositories + response = { + 'status': 'success', + 'repositories': repos + } + + return jsonify(response), 200 + + + @app.route('/list_commits', methods=['POST']) + def list_commits(): + """ + Endpoint to list all commits of a specified repository. + + Request JSON Structure: + { + "repo": "repository_name" + } + + Returns: + JSON response with a list of commits or an error message. + """ + + # Get JSON data from the request + data = request.get_json() + + # Validate the presence of the 'repo' parameter + repo = data.get('repo') + if repo is None: + return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 + + # Initialize GitGraph object to interact with the repository + git_graph = GitGraph(GitRepoName(repo)) + + # Fetch commits from the repository + commits = git_graph.list_commits() + + # Return success response with the list of commits + response = { + 'status': 'success', + 'commits': commits + } + + return jsonify(response), 200 + + + @app.route('/repo_info', methods=['POST']) + def repo_info(): + """ + Endpoint to retrieve information about a specific repository. + + Expected JSON payload: + { + "repo": + } + + Returns: + JSON: A response containing the status and graph statistics (node and edge counts). + - 'status': 'success' if successful, or an error message. + - 'info': A dictionary with the node and edge counts if the request is successful. + """ + + # Get JSON data from the request + data = request.get_json() + + # Validate the 'repo' parameter + repo = data.get('repo') + if repo is None: + return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 + + # Initialize the graph with the provided repository name + g = Graph(repo) + + # Retrieve statistics from the graph + stats = g.stats() + info = get_repo_info(repo) + + if stats is None or info is None: + return jsonify({'status': f'Missing repository "{repo}"'}), 400 + + stats |= info + + # Create a response + response = { + 'status': 'success', + 'info': stats + } + + return jsonify(response), 200 + + @app.route('/find_paths', methods=['POST']) + def find_paths(): + """ + Finds all paths between a source node (src) and a destination node (dest) in the graph. + The graph is associated with the repository (repo) provided in the request. + + Request Body (JSON): + - repo (str): Name of the repository. + - src (int): ID of the source node. + - dest (int): ID of the destination node. + + Returns: + A JSON response with: + - status (str): Status of the request ("success" or "error"). + - paths (list): List of paths between the source and destination nodes. + """ + + # Get JSON data from the request + data = request.get_json() + + # Validate 'repo' parameter + repo = data.get('repo') + if repo is None: + return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 + + # Validate 'src' parameter + src = data.get('src') + if src is None: + return jsonify({'status': f'Missing mandatory parameter "src"'}), 400 + if not isinstance(src, int): + return jsonify({'status': "src node id must be int"}), 400 + + # Validate 'dest' parameter + dest = data.get('dest') + if dest is None: + return jsonify({'status': f'Missing mandatory parameter "dest"'}), 400 + if not isinstance(dest, int): + return jsonify({'status': "dest node id must be int"}), 400 + + if not graph_exists(repo): + logging.error(f"Missing project {repo}") + return jsonify({"status": f"Missing project {repo}"}), 400 + + # Initialize graph with provided repo and credentials + g = Graph(repo) + + # Find paths between the source and destination nodes + paths = g.find_paths(src, dest) + + # Create and return a successful response + response = { 'status': 'success', 'paths': paths } + + return jsonify(response), 200 + + + @app.route('/unreachable', methods=['POST']) + def unreachable_entities(): + """ + Endpoint to retrieve unreachable entities in the graph. + Expects 'repo', optional 'label', and optional 'relation' as parameters in the POST request. + + Returns: + JSON response with unreachable entities or error message. + """ + + # Get JSON data from the request + data = request.get_json() + + # Validate 'repo' parameter + repo = data.get('repo') + if repo is None: + return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 + + # Get optional 'label' and 'relation' parameters + lbl = data.get('label', None) + rel = data.get('relation', None) + + # Initialize graph with provided repo and credentials + g = Graph(repo) + + # Fetch unreachable entities based on optional label and relation + unreachable_entities = g.unreachable_entities(lbl, rel) + + # Create and return a successful response + response = { 'status': 'success', 'unreachables ': unreachable_entities } + + return jsonify(response), 200 + + @app.route('/chat', methods=['POST']) + def chat(): + # Get JSON data from the request + data = request.get_json() + + # Validate 'repo' parameter + repo = data.get('repo') + if repo is None: + return jsonify({'status': f'Missing mandatory parameter "repo"'}), 400 + + # Get optional 'label' and 'relation' parameters + msg = data.get('msg') + if msg is None: + return jsonify({'status': f'Missing mandatory parameter "msg"'}), 400 + + answer = ask(repo, msg) + + # Create and return a successful response + response = { 'status': 'success', 'response': answer } + + return jsonify(response), 200 + + + return app + +if __name__ == '__main__': + app = create_app() + app.run(debug=True) + diff --git a/code_graph/auto_complete.py b/code_graph/auto_complete.py new file mode 100644 index 0000000..c986bad --- /dev/null +++ b/code_graph/auto_complete.py @@ -0,0 +1,6 @@ +from .graph import Graph + +def prefix_search(repo: str, prefix: str) -> str: + g = Graph(repo) + return g.prefix_search(prefix) + diff --git a/code_graph/code_coverage/__init__.py b/code_graph/code_coverage/__init__.py new file mode 100644 index 0000000..bb38f8a --- /dev/null +++ b/code_graph/code_coverage/__init__.py @@ -0,0 +1 @@ +from .lcov import * diff --git a/code_graph/code_coverage/lcov/__init__.py b/code_graph/code_coverage/lcov/__init__.py new file mode 100644 index 0000000..de111f4 --- /dev/null +++ b/code_graph/code_coverage/lcov/__init__.py @@ -0,0 +1 @@ +from .lcov import process_lcov diff --git a/code_graph/code_coverage/lcov/lcov.py b/code_graph/code_coverage/lcov/lcov.py new file mode 100644 index 0000000..734f285 --- /dev/null +++ b/code_graph/code_coverage/lcov/lcov.py @@ -0,0 +1,216 @@ +import os +import sys +from ...graph import Graph + +def lcovparse(content): + # clean and strip lines + assert 'end_of_record' in content, 'lcov file is missing "end_of_record" line(s)' + files = filter(lambda f: f != '', content.strip().split("end_of_record")) + + records = [] + + for f in files: + record = _part(f) + if record is not None: + records.append(record) + + return records + +def _part(chunk): + # search for TN: marker + lines = chunk.split('\n') + + idx = 0 + for l in lines: + if l.startswith("TN:"): + break + idx += 1 + + if idx == len(lines): + return None + + # remove all lines prior to 'TN:' marker + lines = lines[idx:] + + report = { + "test": None, + "file": None, + "stats": {}, + "lines": [], + "functions": [], + "branches": [] + } + + for l in lines: + _line(l, report) + + return report + +def _line(l, report): + """ + http://ltp.sourceforge.net/test/coverage/lcov.readme.php#10 + """ + if l == '': + return None + + method, content = tuple(l.strip().split(':', 1)) + content = content.strip() + if method == 'TN': + # test title + report["test"] = content + + elif method == 'SF': + # file name + report["file"] = content + + elif method == 'LF': + # lines found + report['stats']['lines'] = int(content) + + elif method == 'LH': + # line hit + report['stats']['hit'] = int(content) + + elif method == 'DA': + if 'null' not in content: + content = content.split(',') + line, hit = int(content[0]), int(content[1]) + report['lines'].append((line, hit)) + + #-------------------------------------------------------------------------- + # Functions + #-------------------------------------------------------------------------- + + elif method == 'FNF': + # functions found + report["stats"]["fn_found"] = int(content) + + elif method == 'FNH': + report["stats"]["fn_hit"] = int(content) + + elif method == 'FN': + line, name = content.split(',', 1) + report['functions'].append(dict(line=int(line), name=name)) + + elif method == 'FNDA': + # function names + # FNDA:75,get_user + hit, name = content.split(',', 1) + if hit not in (None, '-', ''): + for fn in report['functions']: + if fn['name'] == name: + fn['hit'] = int(hit) + + #-------------------------------------------------------------------------- + # Branches + #-------------------------------------------------------------------------- + + elif method == 'BRF': + report['stats']['br_found'] = int(content) + + elif method == 'BRH': + report['stats']['br_hit'] = int(content) + + elif method == 'BRDA': + # branch names + # BRDA:10,1,0,1 + line, block, branch, taken = content.split(',', 3) + report['branches'].append(dict( + line=int(line), + block=int(block), + branch=int(branch), + taken=0 if taken == '-' else int(taken))) + + else: + sys.stdout.write("Unknown method name %s" % method) + +def process_lcov(repo: str, lcov_file: str) -> None: + # create report from coverage lcov file + with open(lcov_file, "r") as file: + content = file.read() # Reads the entire file as a single string + + report = lcovparse(content) + + # SF:/__w/FalkorDB/FalkorDB/src/algorithms/detect_cycle.c + prefix = "/__w/FalkorDB/FalkorDB/" # prefix to remove + + g = Graph(repo) + + #--------------------------------------------------------------------------- + # Process report + #--------------------------------------------------------------------------- + + for r in report: + file_path = r['file'] + file_path = file_path[len(prefix):] + + stats = r['stats'] + lines = stats['lines'] + hit = stats['hit'] + hit_percentage = hit / lines + + ext = os.path.splitext(file_path)[1] + path = os.path.dirname(file_path) + name = os.path.basename(file_path) + + g.set_file_coverage(path, name, ext, hit_percentage) + + #----------------------------------------------------------------------- + # Process functions + #----------------------------------------------------------------------- + + # for each function compute its coverage + if hit_percentage == 1: + # the entire file is covered + continue + + # get functions in file + funcs = g.get_functions_in_file(path, name, ext) + + # no functions, continue + if len(funcs) == 0: + continue + + # sort lines + r['lines'].sort() + lines = r['lines'] + + # sort functions + funcs.sort(key=lambda obj: obj.src_start) + + for f in funcs: + src_start = f.src_start + src_end = f.src_end + + # find first line within function boundries + idx = 0 + while idx < len(lines) and src_start > lines[idx][0]: + idx += 1 + + # couldn't find line within function boundries + # because functions are sorted there wouldn't be any lines + # within the next function boundry + if idx == len(lines): + f.coverage_precentage = 0 + lines = [] # clear lines + + # count number of lines within function boundry + n = len(lines) + hit_count = 0 + while idx < n and src_start <= lines[idx][0] and src_end >= lines[idx][0]: + idx += 1 + hit_count += 1 + + # update function coverage precentage + f.coverage_precentage = hit_count / max(1, (src_end - src_start)) + + # remove consumed lines + lines = lines[idx:] + + # update functions within the graph + ids = [f.id for f in funcs] + metadata = [{'coverage_precentage': f.coverage_precentage } for f in funcs] + g.set_functions_metadata(ids, metadata) + +if __name__ == '__main__': + process_lcov("src", "./falkordb.lcov") diff --git a/code_graph/entities/__init__.py b/code_graph/entities/__init__.py index 6b23957..d627139 100644 --- a/code_graph/entities/__init__.py +++ b/code_graph/entities/__init__.py @@ -1,10 +1,9 @@ # __init__.py -__all__ = ['File', 'Function', 'Argument', 'Class', 'Struct'] - from .file import File from .cls import Class from .struct import Struct from .function import Function from .argument import Argument +from .entity_encoder import * diff --git a/code_graph/entities/entity_encoder.py b/code_graph/entities/entity_encoder.py new file mode 100644 index 0000000..b0969f6 --- /dev/null +++ b/code_graph/entities/entity_encoder.py @@ -0,0 +1,25 @@ +from falkordb import Node, Edge, Path + +def encode_node(n: Node) -> dict: + n.labels.remove('Searchable') + return vars(n) + +def encode_edge(e: Edge) -> dict: + return vars(e) + +def encode_path(p: Path) -> dict: + return { + 'nodes': [encode_node(n) for n in p.nodes()], + 'edges': [encode_edge(e) for e in p.edges()] + } + +def encode_graph_entity(e) -> dict: + if isinstance(e, Node): + return encode_node(e) + elif isinstance(e, Edge): + return encode_edge(e) + elif isinstance(e, Path): + return encode_path(e) + else: + raise Exception("Unable to encode graph entity, unknown graph entity type: {type(e)}") + diff --git a/code_graph/git_utils/__init__.py b/code_graph/git_utils/__init__.py new file mode 100644 index 0000000..4fd3af9 --- /dev/null +++ b/code_graph/git_utils/__init__.py @@ -0,0 +1 @@ +from .git_utils import * diff --git a/code_graph/git_utils/git_graph.py b/code_graph/git_utils/git_graph.py new file mode 100644 index 0000000..42df35a --- /dev/null +++ b/code_graph/git_utils/git_graph.py @@ -0,0 +1,177 @@ +import os +import logging +from git import Commit +from falkordb import FalkorDB, Node +from typing import List, Optional + +# Configure logging +logging.basicConfig(level=logging.DEBUG, format='%(filename)s - %(asctime)s - %(levelname)s - %(message)s') + +class GitGraph(): + """ + Represents a git commit graph + nodes are commits where one commit leads to its parents and children + edges contains queries and parameters for transitioning the code-graph + from the current commit to parent / child + """ + + def __init__(self, name: str): + + self.db = FalkorDB(host=os.getenv('FALKORDB_HOST', 'localhost'), + port=os.getenv('FALKORDB_PORT', 6379), + username=os.getenv('FALKORDB_USERNAME', None), + password=os.getenv('FALKORDB_PASSWORD', None)) + + self.g = self.db.select_graph(name) + + # create indicies + # index commit hash + try: + self.g.create_node_range_index("Commit", "hash") + except Exception: + pass + + def _commit_from_node(self, node:Node) -> dict: + """ + Returns a dict representing a commit node + """ + + return {'hash': node.properties['hash'], + 'date': node.properties['date'], + 'author': node.properties['author'], + 'message': node.properties['message']} + + def add_commit(self, commit: Commit) -> None: + """ + Add a new commit to the graph + """ + date = commit.committed_date + author = commit.author.name + hexsha = commit.hexsha + message = commit.message + logging.info(f"Adding commit {hexsha}: {message}") + + q = "MERGE (c:Commit {hash: $hash, author: $author, message: $message, date: $date})" + params = {'hash': hexsha, 'author': author, 'message': message, 'date': date} + self.g.query(q, params) + + def list_commits(self) -> List[Node]: + """ + List all commits + """ + + q = "MATCH (c:Commit) RETURN c ORDER BY c.date" + result_set = self.g.query(q).result_set + + return [self._commit_from_node(row[0]) for row in result_set] + + def get_commits(self, hashes: List[str]) -> List[dict]: + logging.info(f"Searching for commits {hashes}") + + q = """MATCH (c:Commit) + WHERE c.hash IN $hashes + RETURN c""" + + params = {'hashes': hashes} + res = self.g.query(q, params).result_set + + commits = [] + for row in res: + commit = self._commit_from_node(row[0]) + commits.append(commit) + + logging.info(f"retrived commits: {commits}") + return commits + + def get_child_commit(self, parent) -> Optional[dict]: + q = """MATCH (c:Commit {hash: $parent})-[:CHILD]->(child: Commit) + RETURN child""" + + res = self.g.query(q, {'parent': parent}).result_set + + if len(res) > 0: + assert(len(res) == 1) + return self._commit_from_node(res[0][0]) + + return None + + def connect_commits(self, child: str, parent: str) -> None: + """ + connect commits via both PARENT and CHILD edges + """ + + logging.info(f"Connecting commits {child} -PARENT-> {parent}") + logging.info(f"Connecting commits {parent} -CHILD-> {child}") + + q = """MATCH (child :Commit {hash: $child_hash}), (parent :Commit {hash: $parent_hash}) + MERGE (child)-[:PARENT]->(parent) + MERGE (parent)-[:CHILD]->(child)""" + + params = {'child_hash': child, 'parent_hash': parent} + + self.g.query(q, params) + + + def set_parent_transition(self, child: str, parent: str, queries: [str], params: [str]) -> None: + """ + Sets the queries and parameters needed to transition the code-graph + from the child commit to the parent commit + """ + + q = """MATCH (child :Commit {hash: $child})-[e:PARENT]->(parent :Commit {hash: $parent}) + SET e.queries = $queries, e.params = $params""" + + _params = {'child': child, 'parent': parent, 'queries': queries, 'params': params} + + self.g.query(q, _params) + + + def set_child_transition(self, child: str, parent: str, queries: [str], params: [str]) -> None: + """ + Sets the queries and parameters needed to transition the code-graph + from the parent commit to the child commit + """ + + q = """MATCH (parent :Commit {hash: $parent})-[e:CHILD]->(child :Commit {hash: $child}) + SET e.queries = $queries, e.params = $params""" + + _params = {'child': child, 'parent': parent, 'queries': queries, 'params': params} + + self.g.query(q, _params) + + + def get_parent_transitions(self, child: str, parent: str) -> List[tuple[str: dict]]: + """ + Get queries and parameters transitioning from child commit to parent commit + """ + q = """MATCH path = (:Commit {hash: $child_hash})-[:PARENT*]->(:Commit {hash: $parent_hash}) + WITH path + LIMIT 1 + UNWIND relationships(path) AS e + WITH e + WHERE e.queries is not NULL + RETURN collect(e.queries), collect(e.params) + """ + + res = self.g.query(q, {'child_hash': child, 'parent_hash': parent}).result_set + + return (res[0][0], res[0][1]) + + + def get_child_transitions(self, child: str, parent: str) -> List[tuple[str: dict]]: + """ + Get queries and parameters transitioning from parent commit to child commit + """ + q = """MATCH path = (:Commit {hash: $parent_hash})-[:CHILD*]->(:Commit {hash: $child_hash}) + WITH path + LIMIT 1 + UNWIND relationships(path) AS e + WITH e + WHERE e.queries is not NULL + RETURN collect(e.queries), collect(e.params) + """ + + res = self.g.query(q, {'child_hash': child, 'parent_hash': parent}).result_set + + return (res[0][0], res[0][1]) + diff --git a/code_graph/git_utils/git_utils.py b/code_graph/git_utils/git_utils.py new file mode 100644 index 0000000..fd1965b --- /dev/null +++ b/code_graph/git_utils/git_utils.py @@ -0,0 +1,383 @@ +import os +import time +import json +import redis +import logging +import threading +import subprocess +from ..info import * +from git import Repo +from pathlib import Path +from ..graph import Graph +from .git_graph import GitGraph +from typing import List, Optional +from ..analyzers import SourceAnalyzer + +# Configure logging +logging.basicConfig(level=logging.DEBUG, format='%(filename)s - %(asctime)s - %(levelname)s - %(message)s') + +def GitRepoName(repo_name): + return "{" + repo_name + "}_git" + +def is_ignored(file_path: str, ignore_list: List[str]) -> bool: + """ + Checks if a file should be ignored based on the ignore list. + + Args: + file_path (str): The file path to check. + ignore_list (List[str]): List of patterns to ignore. + + Returns: + bool: True if the file should be ignored, False otherwise. + """ + + return any(file_path.startswith(ignore) for ignore in ignore_list) + +def classify_changes(diff, ignore_list: List[str]) -> (List[str], List[str], List[str]): + """ + Classifies changes into added, deleted, and modified files. + + Args: + diff: The git diff object representing changes between two commits. + ignore_list (List[str]): List of file patterns to ignore. + + Returns: + (List[str], List[str], List[str]): A tuple of lists representing added, deleted, and modified files. + """ + + added, deleted, modified = [], [], [] + + for change in diff: + if change.new_file and not is_ignored(change.b_path, ignore_list): + logging.debug(f"new file: {change.b_path}") + added.append(Path(change.b_path)) + if change.deleted_file and not is_ignored(change.a_path, ignore_list): + logging.debug(f"deleted file: {change.a_path}") + deleted.append(change.a_path) + if change.change_type == 'M' and not is_ignored(change.a_path, ignore_list): + logging.debug(f"change file: {change.a_path}") + modified.append(Path(change.a_path)) + + return added, deleted, modified + +# build a graph capturing the git commit history +def build_commit_graph( + path: str, + repo_name: str, + ignore_list: Optional[List[str]] = [] + ) -> GitGraph: + """ + Builds a graph representation of the git commit history. + + Args: + path (str): Path to the git repository. + repo_name (str): Name of the repository. + ignore_list (List[str], optional): List of file patterns to ignore. + + Returns: + GitGraph: Graph object representing the commit history. + """ + + # Copy the graph into a temporary graph + logging.info(f"Cloning source graph {repo_name} -> {repo_name}_tmp") + # Will be deleted at the end of this function + g = Graph(repo_name).clone(repo_name + "_tmp") + g.enable_backlog() + + analyzer = SourceAnalyzer() + git_graph = GitGraph(GitRepoName(repo_name)) + supported_types = analyzer.supported_types() + + # Initialize with the current commit + # Save current git for later restoration + repo = Repo('.') + current_commit = repo.head.commit + current_commit_hexsha = current_commit.hexsha + + # Add commit to the git graph + git_graph.add_commit(current_commit) + + #-------------------------------------------------------------------------- + # Process git history going backwards + #-------------------------------------------------------------------------- + + logging.info("Computing transition queries moving backwards") + + child_commit = current_commit + while len(child_commit.parents) > 0: + parent_commit = child_commit.parents[0] + + # add commit to the git graph + git_graph.add_commit(parent_commit) + + # connect child parent commits relation + git_graph.connect_commits(child_commit.hexsha, parent_commit.hexsha) + + # Represents the changes going backward! + # e.g. which files need to be deleted when moving back one commit + # + # if we were to switch "direction" going forward + # delete events would become add event + # e.g. which files need to be added when moving forward from this commit + # to the next one + + # Process file changes in this commit + logging.info(f"""Computing diff between + child {child_commit.hexsha}: {child_commit.message} + and {parent_commit.hexsha}: {parent_commit.message}""") + + diff = child_commit.diff(parent_commit) + added, deleted, modified = classify_changes(diff, ignore_list) + + # Checkout prev commit + logging.info(f"Checking out commit: {parent_commit.hexsha}") + repo.git.checkout(parent_commit.hexsha) + + #----------------------------------------------------------------------- + # Apply changes going backwards + #----------------------------------------------------------------------- + + # apply deletions + # TODO: a bit of a waste, compute in previous loop + deleted_files = [] + for deleted_file_path in deleted: + _ext = os.path.splitext(deleted_file_path)[1] + if _ext in supported_types: + _path = os.path.dirname(deleted_file_path) + _name = os.path.basename(deleted_file_path) + deleted_files.append( + {'path': _path, 'name': _name, 'ext' : _ext}) + + # remove deleted files from the graph + if len(deleted_files) > 0: + logging.info(f"Removing deleted files: {deleted_files}") + g.delete_files(deleted_files) + + if len(added) > 0: + for new_file in added: + # New file been added + logging.info(f"Introducing a new source file: {new_file}") + analyzer.analyze_file(new_file, g) + + queries, params = g.clear_backlog() + + # Save transition queries to the git graph + if len(queries) > 0: + assert(len(queries) == len(params)) + + # Covert parameters from dict to JSON formatted string + params = [json.dumps(p) for p in params] + + # Log transitions + logging.debug(f"""Save graph transition from + commit: {child_commit.hexsha} + to + commit: {parent_commit.hexsha} + Queries: {queries} + Parameters: {params} + """) + + git_graph.set_parent_transition(child_commit.hexsha, + parent_commit.hexsha, queries, params) + # advance to the next commit + child_commit = parent_commit + + #-------------------------------------------------------------------------- + # Process git history going forward + #-------------------------------------------------------------------------- + + logging.info("Computing transition queries moving forward") + parent_commit = child_commit + while parent_commit.hexsha != current_commit_hexsha: + child_commit = git_graph.get_child_commit(parent_commit.hexsha) + child_commit = repo.commit(child_commit['hash']) + + # Represents the changes going forward + # e.g. which files need to be deleted when moving forward one commit + + # Process file changes in this commit + logging.info(f"""Computing diff between + child {parent_commit.hexsha}: {parent_commit.message} + and {child_commit.hexsha}: {child_commit.message}""") + + diff = parent_commit.diff(child_commit) + added, deleted, modified = classify_changes(diff, ignore_list) + + # Checkout child commit + logging.info(f"Checking out commit: {child_commit.hexsha}") + repo.git.checkout(child_commit.hexsha) + + #----------------------------------------------------------------------- + # Apply changes going forward + #----------------------------------------------------------------------- + + # apply deletions + # TODO: a bit of a waste, compute in previous loop + deleted_files = [] + for deleted_file_path in deleted: + _ext = os.path.splitext(deleted_file_path)[1] + if _ext in supported_types: + _path = os.path.dirname(deleted_file_path) + _name = os.path.basename(deleted_file_path) + deleted_files.append( + {'path': _path, 'name': _name, 'ext' : _ext}) + + # remove deleted files from the graph + if len(deleted_files) > 0: + logging.info(f"Removing deleted files: {deleted_files}") + g.delete_files(deleted_files) + + if len(added) > 0: + for new_file in added: + # New file been added + logging.info(f"Introducing a new source file: {new_file}") + analyzer.analyze_file(new_file, g) + + queries, params = g.clear_backlog() + + # Save transition queries to the git graph + if len(queries) > 0: + assert(len(queries) == len(params)) + + # Covert parameters from dict to JSON formatted string + params = [json.dumps(p) for p in params] + + # Log transitions + logging.debug(f"""Save graph transition from + commit: {parent_commit.hexsha} + to + commit: {child_commit.hexsha} + Queries: {queries} + Parameters: {params} + """) + + git_graph.set_child_transition(child_commit.hexsha, + parent_commit.hexsha, queries, params) + # advance to the child_commit + parent_commit = child_commit + + logging.debug("Done processing repository commit history") + + #-------------------------------------------------------------------------- + # Clean up + #-------------------------------------------------------------------------- + + # Delete temporaty graph + g.disable_backlog() + + logging.debug(f"Deleting temporary graph {repo_name + '_tmp'}") + g.delete() + + return git_graph + +def switch_commit(repo: str, to: str) -> dict[str, dict[str, list]]: + """ + Switches the state of a graph repository from its current commit to the given commit. + + This function handles switching between two git commits for a graph-based repository. + It identifies the changes (additions, deletions, modifications) in nodes and edges between + the current commit and the target commit and then applies the necessary transitions. + + Args: + repo (str): The name of the graph repository to switch commits. + to (str): The target commit hash to switch the graph to. + + Returns: + dict: A dictionary containing the changes made during the commit switch, organized by: + - 'deletions': { + 'nodes': List of node IDs deleted, + 'edges': List of edge IDs deleted + }, + - 'additions': { + 'nodes': List of new Node objects added, + 'edges': List of new Edge objects added + }, + - 'modifications': { + 'nodes': List of modified Node objects, + 'edges': List of modified Edge objects + } + """ + + # Validate input arguments + if not repo or not isinstance(repo, str): + raise ValueError("Invalid repository name") + + if not to or not isinstance(to, str): + raise ValueError("Invalid desired commit value") + + logging.info(f"Switching to commit: {to}") + + # Initialize return value to an empty change set + change_set = { + 'deletions': { + 'nodes': [], + 'edges': [] + }, + 'additions': { + 'nodes': [], + 'edges': [], + }, + 'modifications': { + 'nodes': [], + 'edges': [] + } + } + + # Initialize the graph and GitGraph objects + g = Graph(repo) + git_graph = GitGraph(GitRepoName(repo)) + + # Get the current commit hash of the graph + current_hash = get_repo_commit(repo) + logging.info(f"Current graph commit: {current_hash}") + + if current_hash == to: + logging.debug("Current commit: {current_hash} is the requested commit") + # No change remain at the current commit + return change_set + + # Find the path between the current commit and the desired commit + commits = git_graph.get_commits([current_hash, to]) + + # Ensure both current and target commits are present + if len(commits) != 2: + logging.error("Missing commits. Unable to proceed.") + raise ValueError("Commits not found") + + # Identify the current and new commits based on their hashes + current_commit, new_commit = (commits if commits[0]['hash'] == current_hash else reversed(commits)) + + # Determine the direction of the switch (forward or backward in the commit history) + child_commit = None + parent_commit = None + if current_commit['date'] > new_commit['date']: + child_commit = current_commit + parent_commit = new_commit + logging.info(f"Moving backward from {child_commit['hash']} to {parent_commit['hash']}") + # Get the transitions (queries and parameters) for moving backward + queries, params = git_graph.get_parent_transitions(child_commit['hash'], parent_commit['hash']) + else: + child_commit = new_commit + parent_commit = current_commit + logging.info(f"Moving forward from {parent_commit['hash']} to {child_commit['hash']}") + # Get the transitions (queries and parameters) for moving forward + queries, params = git_graph.get_child_transitions(child_commit['hash'], parent_commit['hash']) + + # Apply each transition query with its respective parameters + for q, p in zip(queries, params): + for _q, _p in zip(q, p): + _p = json.loads(_p) + logging.debug(f"Executing query: {_q} with params: {_p}") + + # Rerun the query with parameters on the graph + res = g.rerun_query(_q, _p) + if "DELETE" in _q: + deleted_nodes = res.result_set[0][0] + change_set['deletions']['nodes'] += deleted_nodes + + # Update the graph's commit to the new target commit + set_repo_commit(repo, to) + logging.info(f"Graph commit updated to {to}") + + return change_set + diff --git a/code_graph/graph.py b/code_graph/graph.py index f81b4e2..8aa54e5 100644 --- a/code_graph/graph.py +++ b/code_graph/graph.py @@ -1,41 +1,245 @@ +import os +import time from .entities import * -from typing import List, Optional -from falkordb import FalkorDB, Node +from typing import Dict, List, Optional, Tuple +from falkordb import FalkorDB, Path, Node, QueryResult + +# Configure the logger +import logging +logging.basicConfig(level=logging.DEBUG, + format='%(filename)s - %(asctime)s - %(levelname)s - %(message)s') + +def graph_exists(name: str): + db = FalkorDB(host=os.getenv('FALKORDB_HOST', 'localhost'), + port=os.getenv('FALKORDB_PORT', 6379), + username=os.getenv('FALKORDB_USERNAME', None), + password=os.getenv('FALKORDB_PASSWORD', None)) + + return name in db.list_graphs() + +def get_repos() -> List[str]: + """ + List processed repositories + """ + + db = FalkorDB(host=os.getenv('FALKORDB_HOST', 'localhost'), + port=os.getenv('FALKORDB_PORT', 6379), + username=os.getenv('FALKORDB_USERNAME', None), + password=os.getenv('FALKORDB_PASSWORD', None)) + + graphs = db.list_graphs() + graphs = [g for g in graphs if not g.endswith('_git')] + return graphs class Graph(): """ Represents a connection to a graph database using FalkorDB. """ - def __init__(self, name: str, host: str = 'localhost', port: int = 6379, - username: Optional[str] = None, password: Optional[str] = None) -> None: - self.db = FalkorDB(host=host, port=port, username=username, - password=password) + def __init__(self, name: str) -> None: + self.name = name + self.db = FalkorDB(host=os.getenv('FALKORDB_HOST', 'localhost'), + port=os.getenv('FALKORDB_PORT', 6379), + username=os.getenv('FALKORDB_USERNAME', None), + password=os.getenv('FALKORDB_PASSWORD', None)) self.g = self.db.select_graph(name) - def add_class(self, c: Class) -> None: + # Initialize the backlog as disabled by default + self.backlog = None + + # create indicies + + # index File path, name and ext fields + try: + self.g.create_node_range_index("File", "name", "ext") + except Exception: + pass + + # index Function using full-text search + try: + self.g.create_node_fulltext_index("Searchable", "name") + except Exception: + pass + + def clone(self, clone: str) -> "Graph": """ - Adds a class node to the graph database. + Create a copy of the graph under the name clone + + Returns: + a new instance of Graph + """ + + # Make sure key clone isn't already exists + if self.db.connection.exists(clone): + raise Exception(f"Can not create clone, key: {clone} already exists.") + + self.g.copy(clone) + + # Wait for the clone to become available + while not self.db.connection.exists(clone): + # TODO: add a waiting limit + time.sleep(1) + + return Graph(clone) + + + def delete(self) -> None: + """ + Delete graph + """ + self.g.delete() + + def enable_backlog(self) -> None: + """ + Enables the backlog by initializing an empty list. + """ + + self.backlog = {'queries': [], 'params': []} + logging.debug("Backlog enabled") + + def disable_backlog(self) -> None: + """ + Disables the backlog by setting it to None. + """ + + self.backlog = None + logging.debug("Backlog disabled") + + def clear_backlog(self) -> Tuple[List[str], List[dict]]: + """ + Clears and returns the backlog of queries and parameters. + + Returns: + Tuple[List[str], List[dict]]: A tuple containing two lists: + - The first list contains the backlog of queries. + - The second list contains the backlog of query parameters. + """ + + res = [], [] # Default return value + + if self.backlog: + params = self.backlog['params'] + queries = self.backlog['queries'] + + # Clear backlog + self.backlog = {'queries': [], 'params': []} + + logging.debug(f"Backlog queries: {queries}") + logging.debug(f"Backlog params: {params}") + + # Set return value + res = queries, params + + logging.debug("Backlog cleared") + + return res + + + def _query(self, q: str, params: Optional[dict] = None) -> QueryResult: + """ + Executes a query on the graph database and logs changes to the backlog if any. Args: - c (Class): The Class object to be added. + q (str): The query string to execute. + params (dict): The parameters for the query. + + Returns: + QueryResult: The result of the query execution. """ - q = """MERGE (c:Class {name: $name, path: $path, src_start: $src_start, - src_end: $src_end}) - SET c.doc = $doc - RETURN ID(c)""" + result_set = self.g.query(q, params) - params = { - 'doc': c.doc, - 'name': c.name, - 'path': c.path, - 'src_start': c.src_start, - 'src_end': c.src_end, - } + if self.backlog is not None: + # Check if any change occurred in the query results + change_detected = any( + getattr(result_set, attr) > 0 + for attr in [ + 'relationships_deleted', 'nodes_deleted', 'labels_added', + 'labels_removed', 'nodes_created', 'properties_set', + 'properties_removed', 'relationships_created' + ] + ) + logging.info(f"change_detected: {change_detected}") + + # Append the query and parameters to the backlog if changes occurred + if change_detected: + logging.debug(f"logging queries: {q}") + logging.debug(f"logging params: {params}") + self.backlog['queries'].append(q) + self.backlog['params'].append(params) + + return result_set + + def get_sub_graph(self, l: int) -> dict: + + q = """MATCH (src) + OPTIONAL MATCH (src)-[e]->(dest) + RETURN src, e, dest + LIMIT $limit""" + + sub_graph = {'nodes': [], 'edges': [] } + + result_set = self._query(q, {'limit': l}).result_set + for row in result_set: + src = row[0] + e = row[1] + dest = row[2] + + sub_graph['nodes'].append(encode_node(src)) + + if e is not None: + sub_graph['edges'].append(encode_edge(e)) + sub_graph['nodes'].append(encode_node(dest)) + + return sub_graph + + + def get_neighbors(self, node_id: int, rel: Optional[str] = None, lbl: Optional[str] = None) -> Dict[str, List[dict]]: + """ + Fetch the neighbors of a given node in the graph based on relationship type and/or label. + + Args: + node_id (int): The ID of the source node. + rel (str, optional): The type of relationship to filter by. Defaults to None. + lbl (str, optional): The label of the destination node to filter by. Defaults to None. + + Returns: + dict: A dictionary with lists of 'nodes' and 'edges' for the neighbors. + """ + + # Validate inputs + if not isinstance(node_id, int): + raise ValueError("node_id must be an integer") + + # Build relationship and label query parts + rel_query = f":{rel}" if rel else "" + lbl_query = f":{lbl}" if lbl else "" + + # Parameterized Cypher query to find neighbors + query = f""" + MATCH (n)-[e{rel_query}]->(dest{lbl_query}) + WHERE ID(n) = $node_id + RETURN e, dest + """ + + # Initialize the neighbors structure + neighbors = {'nodes': [], 'edges': []} + + try: + # Execute the graph query with node_id parameter + result_set = self._query(query, {'node_id': node_id}).result_set + + # Iterate over the result set and process nodes and edges + for edge, destination_node in result_set: + neighbors['nodes'].append(encode_node(destination_node)) + neighbors['edges'].append(encode_edge(edge)) + + return neighbors + + except Exception as e: + logging.error(f"Error fetching neighbors for node {node_id}: {e}") + return {'nodes': [], 'edges': []} - res = self.g.query(q, params) - c.id = res.result_set[0][0] def _class_from_node(self, n: Node) -> Class: """ @@ -53,9 +257,34 @@ def _class_from_node(self, n: Node) -> Class: return c + def add_class(self, c: Class) -> None: + """ + Adds a class node to the graph database. + + Args: + c (Class): The Class object to be added. + """ + + q = """MERGE (c:Class:Searchable {name: $name, path: $path, src_start: $src_start, + src_end: $src_end}) + SET c.doc = $doc + RETURN c""" + + params = { + 'doc': c.doc, + 'name': c.name, + 'path': c.path, + 'src_start': c.src_start, + 'src_end': c.src_end, + } + + res = self._query(q, params) + node = res.result_set[0][0] + c.id = node.id + def get_class_by_name(self, class_name: str) -> Optional[Class]: q = "MATCH (c:Class) WHERE c.name = $name RETURN c LIMIT 1" - res = self.g.query(q, {'name': class_name}).result_set + res = self._query(q, {'name': class_name}).result_set if len(res) == 0: return None @@ -67,7 +296,7 @@ def get_class(self, class_id: int) -> Optional[Class]: WHERE ID(c) = $class_id RETURN c""" - res = self.g.query(q, {'class_id': class_id}) + res = self._query(q, {'class_id': class_id}) if len(res.result_set) == 0: return None @@ -75,6 +304,30 @@ def get_class(self, class_id: int) -> Optional[Class]: c = res.result_set[0][0] return self._class_from_node(c) + def _function_from_node(self, n: Node) -> Function: + """ + Create a Function from a graph node + """ + + src = n.properties.get('src') + doc = n.properties.get('doc') + path = n.properties.get('path') + name = n.properties.get('name') + args = n.properties.get('args') + src_end = n.properties.get('src_end') + ret_type = n.properties.get('ret_type') + src_start = n.properties.get('src_start') + + f = Function(path, name, doc, ret_type, src, src_start, src_end) + for arg in args: + name = arg[0] + type_ = arg[1] + f.add_argument(name, type_) + + f.id = n.id + + return f + def add_function(self, func: Function) -> None: """ Adds a function node to the graph database. @@ -83,10 +336,10 @@ def add_function(self, func: Function) -> None: func (Function): The Function object to be added. """ - q = """MERGE (f:Function {path: $path, name: $name, + q = """MERGE (f:Function:Searchable {path: $path, name: $name, src_start: $src_start, src_end: $src_end}) SET f.args = $args, f.ret_type = $ret_type, f.src = $src, f.doc = $doc - RETURN ID(f)""" + RETURN f""" # Prepare arguments in a more straightforward manner args = [[arg.name, arg.type] for arg in func.args] @@ -101,49 +354,88 @@ def add_function(self, func: Function) -> None: 'ret_type': func.ret_type } + res = self._query(q, params) + node = res.result_set[0][0] + func.id = node.id - res = self.g.query(q, params) - func.id = res.result_set[0][0] + # set functions metadata + def set_functions_metadata(self, ids: List[int], metadata: List[dict]) -> None: + assert(len(ids) == len(metadata)) - def _function_from_node(self, n: Node) -> Function: - """ - Create a Function from a graph node - """ + # TODO: Match (f:Function) + q = """UNWIND range(0, size($ids)) as i + WITH $ids[i] AS id, $values[i] AS v + MATCH (f) + WHERE ID(f) = id + SET f += v + RETURN f""" + + params = {'ids': ids, 'values': metadata} - src = n.properties.get('src') - doc = n.properties.get('doc') - path = n.properties.get('path') - name = n.properties.get('name') - args = n.properties.get('args') - src_end = n.properties.get('src_end') - ret_type = n.properties.get('ret_type') - src_start = n.properties.get('src_start') + self._query(q, params) - f = Function(path, name, doc, ret_type, src, src_start, src_end) - for arg in args: - name = arg[0] - type_ = arg[1] - f.add_argument(name, type_) - - f.id = n.id + # get all functions defined by file + def get_functions_in_file(self, path: str, name: str, ext: str) -> List[Function]: + q = """MATCH (f:File {path: $path, name: $name, ext: $ext}) + MATCH (f)-[:DEFINES]->(func:Function) + RETURN collect(func)""" - return f + params = {'path': path, 'name': name, 'ext': ext} + funcs = self._query(q, params).result_set[0][0] + + return [self._function_from_node(n) for n in funcs] def get_function_by_name(self, name: str) -> Optional[Function]: q = "MATCH (f:Function) WHERE f.name = $name RETURN f LIMIT 1" - res = self.g.query(q, {'name': name}).result_set + res = self._query(q, {'name': name}).result_set if len(res) == 0: return None return self._function_from_node(res[0][0]) + def prefix_search(self, prefix: str) -> str: + """ + Search for entities by prefix using a full-text search on the graph. + The search is limited to 10 nodes. Each node's name and labels are retrieved, + and the results are sorted based on their labels. + + Args: + prefix (str): The prefix string to search for in the graph database. + + Returns: + str: A list of entity names and corresponding labels, sorted by label. + If no results are found or an error occurs, an empty list is returned. + """ + + # Append a wildcard '*' to the prefix for full-text search. + search_prefix = f"{prefix}*" + + # Cypher query to perform full-text search and limit the result to 10 nodes. + # The 'CALL db.idx.fulltext.queryNodes' method searches for nodes labeled 'Searchable' + # that match the given prefix, collects the nodes, and returns the result. + query = """ + CALL db.idx.fulltext.queryNodes('Searchable', $prefix) + YIELD node + WITH node + RETURN node + LIMIT 10 + """ + + # Execute the query using the provided graph database connection. + result_set = self._query(query, {'prefix': search_prefix}).result_set + + completions = [encode_node(row[0]) for row in result_set] + + return completions + + def get_function(self, func_id: int) -> Optional[Function]: q = """MATCH (f:Function) WHERE ID(f) = $func_id RETURN f""" - res = self.g.query(q, {'func_id': func_id}) + res = self._query(q, {'func_id': func_id}) if len(res.result_set) == 0: return None @@ -158,7 +450,7 @@ def function_calls(self, func_id: int) -> List[Function]: MATCH (f)-[:CALLS]->(callee) RETURN callee""" - res = self.g.query(q, {'func_id': func_id}) + res = self._query(q, {'func_id': func_id}) callees = [] for row in res.result_set: @@ -173,7 +465,7 @@ def function_called_by(self, func_id: int) -> List[Function]: MATCH (caller)-[:CALLS]->(f) RETURN caller""" - res = self.g.query(q, {'func_id': func_id}) + res = self._query(q, {'func_id': func_id}) callers = [] for row in res.result_set: @@ -192,12 +484,39 @@ def add_file(self, file: File) -> None: file_ext (str): Extension of the file. """ - q = """MERGE (f:File {path: $path, name: $name, ext: $ext}) - RETURN ID(f)""" + q = """MERGE (f:File:Searchable {path: $path, name: $name, ext: $ext}) + RETURN f""" params = {'path': file.path, 'name': file.name, 'ext': file.ext} - res = self.g.query(q, params) - file.id = res.result_set[0][0] + res = self._query(q, params) + node = res.result_set[0][0] + file.id = node.id + + def delete_files(self, files: List[dict]) -> tuple[str, dict, List[int]]: + """ + Deletes file(s) from the graph in addition to any other entity + defined in the file + + a file is defined by its path, name and extension + files = [{'path':_, 'name': _, 'ext': _}, ...] + """ + + q = """UNWIND $files AS file + MATCH (f:File {path: file['path'], name: file['name'], ext: file['ext']}) + WITH collect(f) AS Fs + UNWIND Fs AS f + OPTIONAL MATCH (f)-[:DEFINES]->(e) + WITH Fs, collect(e) AS Es + WITH Fs + Es AS entities + UNWIND entities AS e + DELETE e + RETURN collect(ID(e)) + """ + + params = {'files': files} + res = self._query(q, params) + + return None def get_file(self, path: str, name: str, ext: str) -> Optional[File]: """ @@ -224,7 +543,7 @@ def get_file(self, path: str, name: str, ext: str) -> Optional[File]: RETURN f""" params = {'path': path, 'name': name, 'ext': ext} - res = self.g.query(q, params) + res = self._query(q, params) if(len(res.result_set) == 0): return None @@ -239,6 +558,20 @@ def get_file(self, path: str, name: str, ext: str) -> Optional[File]: return file + # set file code coverage + # if file coverage is 100% set every defined function coverage to 100% aswell + def set_file_coverage(self, path: str, name: str, ext: str, coverage: float) -> None: + q = """MATCH (f:File {path: $path, name: $name, ext: $ext}) + SET f.coverage_precentage = $coverage + WITH f + WHERE $coverage = 1.0 + MATCH (f)-[:DEFINES]->(func:Function) + SET func.coverage_precentage = 1.0""" + + params = {'path': path, 'name': name, 'ext': ext, 'coverage': coverage} + + res = self._query(q, params) + def connect_entities(self, relation: str, src_id: int, dest_id: int) -> None: """ Establish a relationship between src and dest @@ -250,10 +583,11 @@ def connect_entities(self, relation: str, src_id: int, dest_id: int) -> None: q = f"""MATCH (src), (dest) WHERE ID(src) = $src_id AND ID(dest) = $dest_id - MERGE (src)-[:{relation}]->(dest)""" + MERGE (src)-[e:{relation}]->(dest) + RETURN e""" params = {'src_id': src_id, 'dest_id': dest_id} - self.g.query(q, params) + self._query(q, params) def function_calls_function(self, caller_id: int, callee_id: int, pos: int) -> None: """ @@ -267,35 +601,11 @@ def function_calls_function(self, caller_id: int, callee_id: int, pos: int) -> N q = """MATCH (caller:Function), (callee:Function) WHERE ID(caller) = $caller_id AND ID(callee) = $callee_id - MERGE (caller)-[e:CALLS {pos:$pos}]->(callee)""" + MERGE (caller)-[e:CALLS {pos:$pos}]->(callee) + RETURN e""" params = {'caller_id': caller_id, 'callee_id': callee_id, 'pos': pos} - self.g.query(q, params) - - def add_struct(self, s: Struct) -> None: - """ - Adds a struct node to the graph database. - - Args: - s (Struct): The Struct object to be added. - """ - - q = """MERGE (s:Struct {name: $name, path: $path, src_start: $src_start, - src_end: $src_end}) - SET s.doc = $doc, s.fields = $fields - RETURN ID(s)""" - - params = { - 'doc': s.doc, - 'name': s.name, - 'path': s.path, - 'src_start': s.src_start, - 'src_end': s.src_end, - 'fields': s.fields - } - - res = self.g.query(q, params) - s.id = res.result_set[0][0] + self._query(q, params) def _struct_from_node(self, n: Node) -> Struct: """ @@ -322,9 +632,35 @@ def _struct_from_node(self, n: Node) -> Struct: return s + def add_struct(self, s: Struct) -> None: + """ + Adds a struct node to the graph database. + + Args: + s (Struct): The Struct object to be added. + """ + + q = """MERGE (s:Struct:Searchable {name: $name, path: $path, src_start: $src_start, + src_end: $src_end}) + SET s.doc = $doc, s.fields = $fields + RETURN s""" + + params = { + 'doc': s.doc, + 'name': s.name, + 'path': s.path, + 'src_start': s.src_start, + 'src_end': s.src_end, + 'fields': s.fields + } + + res = self._query(q, params) + node = res.result_set[0][0] + s.id = node.id + def get_struct_by_name(self, struct_name: str) -> Optional[Struct]: q = "MATCH (s:Struct) WHERE s.name = $name RETURN s LIMIT 1" - res = self.g.query(q, {'name': struct_name}).result_set + res = self._query(q, {'name': struct_name}).result_set if len(res) == 0: return None @@ -336,7 +672,7 @@ def get_struct(self, struct_id: int) -> Optional[Struct]: WHERE ID(s) = $struct_id RETURN s""" - res = self.g.query(q, {'struct_id': struct_id}) + res = self._query(q, {'struct_id': struct_id}) if len(res.result_set) == 0: return None @@ -344,3 +680,93 @@ def get_struct(self, struct_id: int) -> Optional[Struct]: s = res.result_set[0][0] return self._struct_from_node(s) + def rerun_query(self, q: str, params: dict) -> QueryResult: + """ + Re-run a query to transition the graph from one state to another + """ + + return self._query(q, params) + + def find_paths(self, src: int, dest: int) -> List[Path]: + """ + Find all paths between the source (src) and destination (dest) nodes. + + Args: + src (int): The ID of the source node. + dest (int): The ID of the destination node. + + Returns: + List[Optional[Path]]: A list of paths found between the src and dest nodes. + Returns an empty list if no paths are found. + + Raises: + Exception: If the query fails or the graph database returns an error. + """ + + # Define the query to match paths between src and dest nodes. + q = """MATCH (src), (dest) + WHERE ID(src) = $src_id AND ID(dest) = $dest_id + WITH src, dest + MATCH p = (src)-[:CALLS*]->(dest) + RETURN p + """ + + # Perform the query with the source and destination node IDs. + result_set = self._query(q, {'src_id': src, 'dest_id': dest}).result_set + + paths = [] + + # Extract paths from the query result set. + for row in result_set: + path = [] + p = row[0] + nodes = p.nodes() + edges = p.edges() + + for n, e in zip(nodes, edges): + path.append(encode_node(n)) + path.append(encode_edge(e)) + + # encode last node on path + path.append(encode_node(nodes[-1])) + paths.append(path) + + return paths + + def stats(self) -> dict: + """ + Retrieve statistics about the graph, including the number of nodes and edges. + + Returns: + dict: A dictionary containing: + - 'node_count' (int): The total number of nodes in the graph. + - 'edge_count' (int): The total number of edges in the graph. + """ + + q = "MATCH (n) RETURN count(n)" + node_count = self._query(q).result_set[0][0] + + q = "MATCH ()-[e]->() RETURN count(e)" + edge_count = self._query(q).result_set[0][0] + + # Return the statistics + return {'node_count': node_count, 'edge_count': edge_count} + + def unreachable_entities(self, lbl: Optional[str], rel: Optional[str]) -> List[dict]: + lbl = f": {lbl}" if lbl else "" + rel = f": {rel}" if rel else "" + + q = f""" MATCH (n {lbl}) + WHERE not ()-[{rel}]->(n) + RETURN n + """ + + result_set = self._query(q).result_set + + unreachables = [] + for row in result_set: + node = row[0] + unreachables.append(encode_node(node)) + + return unreachables + diff --git a/code_graph/info.py b/code_graph/info.py new file mode 100644 index 0000000..2c058b8 --- /dev/null +++ b/code_graph/info.py @@ -0,0 +1,116 @@ +import os +import redis +import logging +from typing import Optional, Dict + +# Configure logging +logging.basicConfig(level=logging.INFO) + +def _repo_info_key(repo_name: str) -> str: + return f"{{{repo_name}}}_info" + +def get_redis_connection() -> redis.Redis: + """ + Establishes a connection to Redis using environment variables. + + Returns: + redis.Redis: A Redis connection object. + """ + try: + return redis.Redis( + host = os.getenv('FALKORDB_HOST'), + port = os.getenv('FALKORDB_PORT'), + username = os.getenv('FALKORDB_USERNAME'), + password = os.getenv('FALKORDB_PASSWORD'), + decode_responses = True # To ensure string responses + ) + except Exception as e: + logging.error(f"Error connecting to Redis: {e}") + raise + + +def set_repo_commit(repo_name: str, commit_hash: str) -> None: + """Save processed commit hash to the DB""" + + try: + r = get_redis_connection() + key = _repo_info_key(repo_name) # Safely format the key + + # Save the repository URL + r.hset(key, 'commit', commit_hash) + logging.info(f"Repository set current commit to: {commit_hash}") + + except Exception as e: + logging.error(f"Error saving repo info for '{repo_name}': {e}") + raise + + +def get_repo_commit(repo_name: str) -> str: + """Get the current commit the repo is at""" + + try: + r = get_redis_connection() + key = _repo_info_key(repo_name) + + # Retrieve all information about the repository + commit_hash = r.hget(key, "commit") + if not commit_hash: + logging.warning(f"Failed to retrieve {repo_name} current commit hash") + return None + + logging.info(f"Repository current commit hash: {commit_hash}") + return commit_hash + + except Exception as e: + logging.error(f"Error retrieving '{repo_name}' current commit hash: {e}") + raise + + +def save_repo_info(repo_name: str, repo_url: str) -> None: + """ + Saves repository information (URL) to Redis under a hash named {repo_name}_info. + + Args: + repo_name (str): The name of the repository. + repo_url (str): The URL of the repository. + """ + + try: + r = get_redis_connection() + key = _repo_info_key(repo_name) + + # Save the repository URL + r.hset(key, 'repo_url', repo_url) + logging.info(f"Repository info saved for {repo_name}") + + except Exception as e: + logging.error(f"Error saving repo info for '{repo_name}': {e}") + raise + +def get_repo_info(repo_name: str) -> Optional[Dict[str, str]]: + """ + Retrieves repository information from Redis. + + Args: + repo_name (str): The name of the repository. + + Returns: + Optional[Dict[str, str]]: A dictionary of repository information, or None if not found. + """ + try: + r = get_redis_connection() + key = _repo_info_key(repo_name) + + # Retrieve all information about the repository + repo_info = r.hgetall(key) + if not repo_info: + logging.warning(f"No repository info found for {repo_name}") + return None + + logging.info(f"Repository info retrieved for {repo_name}") + return repo_info + + except Exception as e: + logging.error(f"Error retrieving repo info for '{repo_name}': {e}") + raise + diff --git a/code_graph/llm.py b/code_graph/llm.py new file mode 100644 index 0000000..29dc6f9 --- /dev/null +++ b/code_graph/llm.py @@ -0,0 +1,217 @@ +import logging + +from graphrag_sdk.models.openai import OpenAiGenerativeModel +from graphrag_sdk.models.gemini import GeminiGenerativeModel + +from graphrag_sdk import ( + Ontology, + Entity, + Relation, + Attribute, + AttributeType, + KnowledgeGraph, + KnowledgeGraphModelConfig +) + +# Configure logging +logging.basicConfig(level=logging.DEBUG, format='%(filename)s - %(asctime)s - %(levelname)s - %(message)s') + +def _define_ontology() -> Ontology: + # Build ontology: + ontology = Ontology() + + # Entities: + # 1. File + # 2. Class + # 3. Function + # 4. Struct (TODO: Add struct) + + # Relations: + # File - DEFINES -> Class + # File - DEFINES -> Function + # Class - DEFINES -> Class + # Class - DEFINES -> Function + # Function - DEFINES -> Function + # Class - CALLS -> Function + # Function - CALLS -> Function + + # TODO: auto generate ontology + #"call db.labels()" + #"call db.relationshiptypes()" + #"match (n: File) return keys(n) limit 1" + #"match (n: File) return n limit 1" + #"match ()-[e: {}]->() return e limit 1 + + # Function: + # name + # path + # src_start + # src_end + # args "[[cls, Unknown]]" + # src + + function = Entity( + label="Function", + attributes=[ + Attribute( + name="name", + attr_type=AttributeType.STRING, + required=True, + unique=True, + ), + Attribute( + name="path", + attr_type=AttributeType.STRING, + required=False, + unique=False, + ), + Attribute( + name="src_start", + attr_type=AttributeType.NUMBER, + required=False, + unique=False, + ), + Attribute( + name="src_end", + attr_type=AttributeType.NUMBER, + required=False, + unique=False, + ), + Attribute( + name="args", + attr_type=AttributeType.STRING, + required=False, + unique=False, + ), + Attribute( + name="src", + attr_type=AttributeType.STRING, + required=False, + unique=False, + ), + ] + ) + + # File: + # name + # ext + # path + file = Entity( + label="File", + attributes=[ + Attribute( + name="name", + attr_type=AttributeType.STRING, + required=True, + unique=True, + ), + Attribute( + name="path", + attr_type=AttributeType.STRING, + required=False, + unique=False, + ), + Attribute( + name="ext", + attr_type=AttributeType.STRING, + required=False, + unique=False, + ) + ] + ) + + # Class: + # name + # path + # src_start + # src_end + # doc + + cls = Entity( + label="Class", + attributes=[ + Attribute( + name="name", + attr_type=AttributeType.STRING, + required=True, + unique=True, + ), + Attribute( + name="path", + attr_type=AttributeType.STRING, + required=False, + unique=False, + ), + Attribute( + name="src_start", + attr_type=AttributeType.NUMBER, + required=False, + unique=False, + ), + Attribute( + name="src_end", + attr_type=AttributeType.NUMBER, + required=False, + unique=False, + ), + Attribute( + name="doc", + attr_type=AttributeType.STRING, + required=False, + unique=False, + ), + ] + ) + + ontology.add_entity(cls) + ontology.add_entity(file) + ontology.add_entity(function) + + # Relations: + # File - DEFINES -> Class + # File - DEFINES -> Function + # Class - DEFINES -> Class + # Class - DEFINES -> Function + # Function - DEFINES -> Function + # Class - CALLS -> Function + # Function - CALLS -> Function + + ontology.add_relation(Relation("CALLS", "Class", "Function")) + ontology.add_relation(Relation("CALLS", "Function", "Function")) + ontology.add_relation(Relation("DEFINES", "File", "Class")) + ontology.add_relation(Relation("DEFINES", "File", "Function")) + ontology.add_relation(Relation("DEFINES", "Class", "Class")) + ontology.add_relation(Relation("DEFINES", "Class", "Function")) + ontology.add_relation(Relation("DEFINES", "Function", "Function")) + + return ontology + +# Global ontology +ontology = _define_ontology() + +def _create_kg_agent(repo_name: str): + global ontology + + openapi_model = OpenAiGenerativeModel("gpt-4o") + gemini_model = GeminiGenerativeModel("gemini-1.5-flash-001") + gemini_model_pro = GeminiGenerativeModel("gemini-1.5-pro") + + #ontology = _define_ontology() + code_graph_kg = KnowledgeGraph( + name=repo_name, + ontology=ontology, + model_config=KnowledgeGraphModelConfig.with_model(gemini_model), + ) + + return code_graph_kg.chat_session() + +def ask(repo_name: str, question: str) -> str: + chat = _create_kg_agent(repo_name) + + logging.debug(f"Question: {question}") + print(f"Question: {question}") + response = chat.send_message(question) + logging.debug(f"Response: {response}") + print(f"Response: {response}") + return response + diff --git a/code_graph/project.py b/code_graph/project.py new file mode 100644 index 0000000..5ae45ea --- /dev/null +++ b/code_graph/project.py @@ -0,0 +1,111 @@ +import os +import shutil +import logging +import validators +import subprocess +from git import Repo +from .info import * +from pathlib import Path +from .graph import Graph +from typing import Optional, List +from urllib.parse import urlparse +from .analyzers import SourceAnalyzer +from .git_utils import build_commit_graph, GitGraph + +# Configure logging +logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') + +def _clone_source(url: str, name: str) -> Path: + # path to local repositories + path = Path.cwd() / "repositories" / name + print(f"Cloning repository to: {path}") + + # Delete local repository if exists + if path.exists(): + shutil.rmtree(path) + + # Create directory + path.mkdir(parents=True, exist_ok=True) + + # Clone repository + # Prepare the Git clone command + cmd = ["git", "clone", url, path] + + # Run the git clone command and wait for it to finish + result = subprocess.run(cmd, check=True, capture_output=True, text=True) + + return path + +class Project(): + def __init__(self, name: str, path: Path, url: Optional[str]): + self.url = url + self.name = name + self.path = path + self.graph = Graph(name) + + if url is not None: + save_repo_info(name, url) + + @classmethod + def from_git_repository(cls, url: str): + # Validate url + if not validators.url(url): + raise Exception(f"invalid url: {url}") + + # Extract project name from URL + parsed_url = urlparse(url) + name = parsed_url.path.split('/')[-1] + path = _clone_source(url, name) + + return cls(name, path, url) + + @classmethod + def from_local_repository(cls, path: Path|str): + path = Path(path) if isinstance(path, str) else path + + # Validate path exists + if not path.exists(): + raise Exception(f"missing path: {path}") + + # adjust url + # 'git@github.com:FalkorDB/code_graph.git' + url = Repo(path).remotes[0].url + url = url.replace("git@", "https://").replace(":", "/").replace(".git", "") + + name = path.name + + return cls(name, path, url) + + def analyze_sources(self, ignore: Optional[List[str]] = []) -> Graph: + analyzer = SourceAnalyzer() + analyzer.analyze(self.path, self.graph, ignore) + + try: + # Save processed commit hash to the DB + repo = Repo(self.path) + current_commit = repo.head.commit + set_repo_commit(self.name, current_commit.hexsha) + except Exception: + # Probably not .git folder is missing + pass + + return self.graph + + def process_git_history(self, ignore: Optional[List[str]] = []) -> GitGraph: + logging.info(f"processing {self.name} git commit history") + + # Save original working directory for later restore + original_dir = Path.cwd() + + # change working directory to local repository + logging.info(f"Switching current working directory to: {self.path}") + os.chdir(self.path) + + git_graph = build_commit_graph(self.path, self.name, ignore) + + # Restore original working directory + logging.info(f"Restoring current working directory to: {original_dir}") + os.chdir(original_dir) + + return git_graph + diff --git a/main.py b/main.py deleted file mode 100644 index 926ce3f..0000000 --- a/main.py +++ /dev/null @@ -1,197 +0,0 @@ -import os -import redis -import datetime -from code_graph import * -from typing import Optional -from falkordb import FalkorDB -from urllib.parse import urlparse -from flask import Flask, request, jsonify, abort - -# Configuration -FALKORDB_HOST = 'localhost' -FALKORDB_PORT = 6379 -FALKORDB_USERNAME = None -FALKORDB_PASSWORD = None - -app = Flask(__name__, static_folder='static') - -# Configure the logger -import logging -logging.basicConfig(level=logging.DEBUG, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') -logger = logging.getLogger(__name__) - -def extract_org_name_from_url(url: str) -> Optional[tuple[str, str]]: - components = url.split('/') - n = len(components) - - # https://github.com/falkordb/falkordb - # Expecting atleast 4 components - if n < 4: - return None - - return (components[n-2], components[n-1]) - -def get_current_head_commit_hash(repo_url: str) -> str: - import git - - with tempfile.TemporaryDirectory() as temp_dir: - repo = git.Repo.clone_from(repo_url, temp_dir) - commit_hash = repo.head.commit.hexsha - return commit_hash - -def save_repository_metadata(repo_url: str, repo_name: str): - r = redis.Redis(host=FALKORDB_HOST, port=FALKORDB_PORT, - username=FALKORDB_USERNAME, password=FALKORDB_PASSWORD, - decode_responses=True) - - key = f'{repo_name}_metadata' - metadata = { - 'repo_url': repo_url, - 'repo_name': repo_name, - 'date_create': str(datetime.datetime.today().replace(microsecond=0)), - 'commit': get_current_head_commit_hash(repo_url)} - - r.hset(key, mapping=metadata) - -@app.route('/list_repos', methods=['GET']) -def list_repos(): - r = redis.Redis(host=FALKORDB_HOST, port=FALKORDB_PORT, - username=FALKORDB_USERNAME, password=FALKORDB_PASSWORD, - decode_responses=True) - keys = r.keys('*_metadata')[:20] - - repos = [] - for key in keys: - repos.append(r.hgetall(key)) - - return jsonify({'repos': repos}), 200 - -@app.route('/graph_entities', methods=['GET']) -def graph_entities(): - # Access the 'graph_id' parameter from the GET request - graph_id = request.args.get('graph_id') - - # Connect to FalkorDB - db = FalkorDB(host=FALKORDB_HOST, port=FALKORDB_PORT, - username=FALKORDB_USERNAME, password=FALKORDB_PASSWORD) - - # Select graph - g = db.select_graph(graph_id) - - query = """MATCH (src) - OPTIONAL MATCH (src)-[e]->(dest) - RETURN src, e, dest - LIMIT 100""" - - data = [] - res = g.query(query).result_set - for row in res: - src = row[0] - e = row[1] - dest = row[2] - - data.append({'data': {'id': src.id, - 'label': src.labels[0]} }) - - if e is not None: - data.append({'data': {'id': dest.id, - 'label': dest.labels[0]} }) - data.append({'data': {'source': src.id, 'target': dest.id, 'relation': e.relation} }) - - # [ - # { data: { id: 'e' } }, - # { data: { source: 'a', target: 'b' } } - # ] - - return jsonify(data), 200 - -@app.route('/get_neighbors', methods=['GET']) -def get_neighbors(): - # Access the 'node_id' parameter from the GET request - node_id = int(request.args.get('node_id')) - graph_id = request.args.get('graph_id') - - # Connect to FalkorDB - db = FalkorDB(host=FALKORDB_HOST, port=FALKORDB_PORT, - username=FALKORDB_USERNAME, password=FALKORDB_PASSWORD) - - # Select graph - g = db.select_graph(graph_id) - - query = """MATCH (n) - WHERE ID(n) = $node_id - MATCH (n)-[e]-(neighbor) - RETURN neighbor, e""" - - data = [] - res = g.query(query, {'node_id': node_id}).result_set - for row in res: - neighbor = row[0] - e = row[1] - - data.append({'data': {'id': neighbor.id, - 'label': neighbor.labels[0]} }) - data.append({'data': {'source': node_id, 'target': neighbor.id, 'relation': e.relation} }) - - # [ - # { data: { id: 'e' } }, - # { data: { source: 'a', target: 'b' } } - # ] - - return jsonify(data), 200 - -@app.route('/process_repo', methods=['POST']) -def process_repo(): - # Get JSON data from the request - data = request.get_json() - - # Process the data - repo_url = data.get('repo_url') - if repo_url is None: - return jsonify({'status': f'Missing mandatory parameter "repo_url"'}), 400 - logger.debug(f'Received repo_url: {repo_url}') - - # Validate URL - try: - urlparse(repo_url) - except ValueError: - return jsonify({'status': 'Invalid repository URL'}), 400 - - # Extract Organization and Repo name from URL - res = extract_org_name_from_url(repo_url) - if res is None: - return jsonify({'status': f'Failed to process repo_url: {repo_url}'}), 400 - - org, name = extract_org_name_from_url(repo_url) - logger.debug(f'Org: {org}, name: {name}') - - # Convert repo_url to git URL - git_url = repo_url + '.git' - logger.debug(f'git_url: {git_url}') - - # Create source code analyzer - analyzer = SourceAnalyzer(host = FALKORDB_HOST, - port = FALKORDB_PORT, - username = FALKORDB_USERNAME, - password = FALKORDB_PASSWORD) - - try: - analyzer.analyze_repository(git_url) - except Exception as e: - logger.error(f'An error occurred: {e}') - return jsonify({'status': f'Failed to process repository: {git_url}'}), 400 - - repo_name = f'{org}/{name}' - save_repository_metadata(git_url, repo_name) - - # Create a response - response = { - 'status': 'success', - } - - return jsonify(response), 200 - -if __name__ == '__main__': - app.run(debug=True) - diff --git a/poetry.lock b/poetry.lock index 9e2a807..d98648b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,235 +1,2017 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.6.2.post1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.0" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "async_timeout-5.0.0-py3-none-any.whl", hash = "sha256:904719a4bd6e0520047d0ddae220aabee67b877f7ca17bf8cea20f67f6247ae0"}, + {file = "async_timeout-5.0.0.tar.gz", hash = "sha256:49675ec889daacfe65ff66d2dde7dd1447a6f4b2f23721022e4ba121f8772a85"}, +] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "bs4" +version = "0.0.2" +description = "Dummy package for Beautiful Soup (beautifulsoup4)" +optional = false +python-versions = "*" +files = [ + {file = "bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc"}, + {file = "bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "comm" +version = "0.2.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "debugpy" +version = "1.8.7" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.7-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:95fe04a573b8b22896c404365e03f4eda0ce0ba135b7667a1e57bd079793b96b"}, + {file = "debugpy-1.8.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:628a11f4b295ffb4141d8242a9bb52b77ad4a63a2ad19217a93be0f77f2c28c9"}, + {file = "debugpy-1.8.7-cp310-cp310-win32.whl", hash = "sha256:85ce9c1d0eebf622f86cc68618ad64bf66c4fc3197d88f74bb695a416837dd55"}, + {file = "debugpy-1.8.7-cp310-cp310-win_amd64.whl", hash = "sha256:29e1571c276d643757ea126d014abda081eb5ea4c851628b33de0c2b6245b037"}, + {file = "debugpy-1.8.7-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:caf528ff9e7308b74a1749c183d6808ffbedbb9fb6af78b033c28974d9b8831f"}, + {file = "debugpy-1.8.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba1d078cf2e1e0b8402e6bda528bf8fda7ccd158c3dba6c012b7897747c41a0"}, + {file = "debugpy-1.8.7-cp311-cp311-win32.whl", hash = "sha256:171899588bcd412151e593bd40d9907133a7622cd6ecdbdb75f89d1551df13c2"}, + {file = "debugpy-1.8.7-cp311-cp311-win_amd64.whl", hash = "sha256:6e1c4ffb0c79f66e89dfd97944f335880f0d50ad29525dc792785384923e2211"}, + {file = "debugpy-1.8.7-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:4d27d842311353ede0ad572600c62e4bcd74f458ee01ab0dd3a1a4457e7e3706"}, + {file = "debugpy-1.8.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c1fd62ae0356e194f3e7b7a92acd931f71fe81c4b3be2c17a7b8a4b546ec2"}, + {file = "debugpy-1.8.7-cp312-cp312-win32.whl", hash = "sha256:2f729228430ef191c1e4df72a75ac94e9bf77413ce5f3f900018712c9da0aaca"}, + {file = "debugpy-1.8.7-cp312-cp312-win_amd64.whl", hash = "sha256:45c30aaefb3e1975e8a0258f5bbd26cd40cde9bfe71e9e5a7ac82e79bad64e39"}, + {file = "debugpy-1.8.7-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:d050a1ec7e925f514f0f6594a1e522580317da31fbda1af71d1530d6ea1f2b40"}, + {file = "debugpy-1.8.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f4349a28e3228a42958f8ddaa6333d6f8282d5edaea456070e48609c5983b7"}, + {file = "debugpy-1.8.7-cp313-cp313-win32.whl", hash = "sha256:11ad72eb9ddb436afb8337891a986302e14944f0f755fd94e90d0d71e9100bba"}, + {file = "debugpy-1.8.7-cp313-cp313-win_amd64.whl", hash = "sha256:2efb84d6789352d7950b03d7f866e6d180284bc02c7e12cb37b489b7083d81aa"}, + {file = "debugpy-1.8.7-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:4b908291a1d051ef3331484de8e959ef3e66f12b5e610c203b5b75d2725613a7"}, + {file = "debugpy-1.8.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da8df5b89a41f1fd31503b179d0a84a5fdb752dddd5b5388dbd1ae23cda31ce9"}, + {file = "debugpy-1.8.7-cp38-cp38-win32.whl", hash = "sha256:b12515e04720e9e5c2216cc7086d0edadf25d7ab7e3564ec8b4521cf111b4f8c"}, + {file = "debugpy-1.8.7-cp38-cp38-win_amd64.whl", hash = "sha256:93176e7672551cb5281577cdb62c63aadc87ec036f0c6a486f0ded337c504596"}, + {file = "debugpy-1.8.7-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:90d93e4f2db442f8222dec5ec55ccfc8005821028982f1968ebf551d32b28907"}, + {file = "debugpy-1.8.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6db2a370e2700557a976eaadb16243ec9c91bd46f1b3bb15376d7aaa7632c81"}, + {file = "debugpy-1.8.7-cp39-cp39-win32.whl", hash = "sha256:a6cf2510740e0c0b4a40330640e4b454f928c7b99b0c9dbf48b11efba08a8cda"}, + {file = "debugpy-1.8.7-cp39-cp39-win_amd64.whl", hash = "sha256:6a9d9d6d31846d8e34f52987ee0f1a904c7baa4912bf4843ab39dadf9b8f3e0d"}, + {file = "debugpy-1.8.7-py2.py3-none-any.whl", hash = "sha256:57b00de1c8d2c84a61b90880f7e5b6deaf4c312ecbde3a0e8912f2a56c4ac9ae"}, + {file = "debugpy-1.8.7.zip", hash = "sha256:18b8f731ed3e2e1df8e9cdaa23fb1fc9c24e570cd0081625308ec51c82efe42e"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "docstring-parser" +version = "0.16" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, + {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "2.1.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.8" +files = [ + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "falkordb" +version = "1.0.9" +description = "Python client for interacting with FalkorDB database" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "falkordb-1.0.9.tar.gz", hash = "sha256:177008e63c7e4d9ebbdfeb8cad24b0e49175bb0f6e96cac9b4ffb641c0eff0f1"}, +] + +[package.dependencies] +redis = ">=5.0.1,<6.0.0" + +[[package]] +name = "fix-busted-json" +version = "0.0.18" +description = "Fixes broken JSON string objects" +optional = false +python-versions = ">=3.6" +files = [ + {file = "fix-busted-json-0.0.18.tar.gz", hash = "sha256:93c5dab7cae3b5d0b055f2c7043f9fe727a88a80d0be753c5f2c20bb9b69672f"}, + {file = "fix_busted_json-0.0.18-py3-none-any.whl", hash = "sha256:fdce0e02c9a810b3aa28e1c3c32c24b21b44e89f6315ec25d2b963bd52a6ef03"}, +] + +[[package]] +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.43" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] + +[[package]] +name = "google-ai-generativelanguage" +version = "0.6.10" +description = "Google Ai Generativelanguage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_ai_generativelanguage-0.6.10-py3-none-any.whl", hash = "sha256:854a2bf833d18be05ad5ef13c755567b66a4f4a870f099b62c61fe11bddabcf4"}, + {file = "google_ai_generativelanguage-0.6.10.tar.gz", hash = "sha256:6fa642c964d8728006fe7e8771026fc0b599ae0ebeaf83caf550941e8e693455"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + +[[package]] +name = "google-api-core" +version = "2.22.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_api_core-2.22.0-py3-none-any.whl", hash = "sha256:a6652b6bd51303902494998626653671703c420f6f4c88cfd3f50ed723e9d021"}, + {file = "google_api_core-2.22.0.tar.gz", hash = "sha256:26f8d76b96477db42b55fd02a33aae4a42ec8b86b98b94969b7333a2c828bf35"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = [ + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +grpcio-status = [ + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +proto-plus = [ + {version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""}, + {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-api-python-client" +version = "2.151.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_api_python_client-2.151.0-py2.py3-none-any.whl", hash = "sha256:4427b2f47cd88b0355d540c2c52215f68c337f3bc9d6aae1ceeae4525977504c"}, + {file = "google_api_python_client-2.151.0.tar.gz", hash = "sha256:a9d26d630810ed4631aea21d1de3e42072f98240aaf184a8a1a874a371115034"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0.dev0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.dev0" +uritemplate = ">=3.0.1,<5" + +[[package]] +name = "google-auth" +version = "2.35.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, + {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +files = [ + {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, + {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + +[[package]] +name = "google-cloud-aiplatform" +version = "1.71.1" +description = "Vertex AI API client library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "google-cloud-aiplatform-1.71.1.tar.gz", hash = "sha256:0013527e06853382ff0885898195bb7f3cf4a70eb7e5d53e4b1a28c8bd1775e2"}, + {file = "google_cloud_aiplatform-1.71.1-py2.py3-none-any.whl", hash = "sha256:4cd49bbc7f8ad88b92029a090b834ebacf9efadc844226f1e74d015d68f69ef5"}, +] + +[package.dependencies] +docstring-parser = "<1" +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev" +google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" +google-cloud-storage = ">=1.32.0,<3.0.0dev" +packaging = ">=14.3" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" +pydantic = "<3" +shapely = "<3.0.0dev" + +[package.extras] +autologging = ["mlflow (>=1.27.0,<=2.16.0)"] +cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] +endpoint = ["requests (>=2.28.1)"] +evaluation = ["pandas (>=1.0.0)", "tqdm (>=4.23.0)"] +full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] +langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "orjson (<=3.10.6)", "tenacity (<=8.3)"] +langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "orjson (<=3.10.6)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"] +lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] +metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] +pipelines = ["pyyaml (>=5.3.1,<7)"] +prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.114.0)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "setuptools (<70.0.0)"] +ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "ray[train]", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] +reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"] +tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +testing = ["aiohttp", "bigframes", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] +tokenization = ["sentencepiece (>=0.2.0)"] +vizier = ["google-vizier (>=0.1.6)"] +xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] + +[[package]] +name = "google-cloud-bigquery" +version = "3.26.0" +description = "Google BigQuery API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_cloud_bigquery-3.26.0-py2.py3-none-any.whl", hash = "sha256:e0e9ad28afa67a18696e624cbccab284bf2c0a3f6eeb9eeb0426c69b943793a8"}, + {file = "google_cloud_bigquery-3.26.0.tar.gz", hash = "sha256:edbdc788beea659e04c0af7fe4dcd6d9155344b98951a0d5055bd2f15da4ba23"}, +] + +[package.dependencies] +google-api-core = {version = ">=2.11.1,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +google-cloud-core = ">=2.4.1,<3.0.0dev" +google-resumable-media = ">=2.0.0,<3.0dev" +packaging = ">=20.0.0" +python-dateutil = ">=2.7.3,<3.0dev" +requests = ">=2.21.0,<3.0.0dev" + +[package.extras] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "bigquery-magics (>=0.1.0)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] +geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] +ipython = ["bigquery-magics (>=0.1.0)"] +ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] +opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +description = "Google Cloud API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.13.0" +description = "Google Cloud Resource Manager API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_cloud_resource_manager-1.13.0-py2.py3-none-any.whl", hash = "sha256:33beb4528c2b7aee7a97ed843710581a7b4a27f3dd1fa41a0bf3359b3d68853f"}, + {file = "google_cloud_resource_manager-1.13.0.tar.gz", hash = "sha256:ae4bf69443f14b37007d4d84150115b0942e8b01650fd7a1fc6ff4dc1760e5c4"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = [ + {version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""}, + {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""}, +] +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + +[[package]] +name = "google-cloud-storage" +version = "2.18.2" +description = "Google Cloud Storage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_cloud_storage-2.18.2-py2.py3-none-any.whl", hash = "sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166"}, + {file = "google_cloud_storage-2.18.2.tar.gz", hash = "sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99"}, +] + +[package.dependencies] +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.7.2" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +protobuf = ["protobuf (<6.0.0dev)"] +tracing = ["opentelemetry-api (>=1.1.0)"] + +[[package]] +name = "google-crc32c" +version = "1.6.0" +description = "A python wrapper of the C library 'Google CRC32C'" +optional = false +python-versions = ">=3.9" +files = [ + {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"}, + {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"}, + {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7"}, + {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e"}, + {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc"}, + {file = "google_crc32c-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42"}, + {file = "google_crc32c-1.6.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4"}, + {file = "google_crc32c-1.6.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8"}, + {file = "google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d"}, + {file = "google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f"}, + {file = "google_crc32c-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3"}, + {file = "google_crc32c-1.6.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d"}, + {file = "google_crc32c-1.6.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b"}, + {file = "google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00"}, + {file = "google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3"}, + {file = "google_crc32c-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760"}, + {file = "google_crc32c-1.6.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205"}, + {file = "google_crc32c-1.6.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0"}, + {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2"}, + {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871"}, + {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57"}, + {file = "google_crc32c-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c"}, + {file = "google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc"}, + {file = "google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d"}, + {file = "google_crc32c-1.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24"}, + {file = "google_crc32c-1.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d"}, + {file = "google_crc32c-1.6.0.tar.gz", hash = "sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc"}, +] + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "google-generativeai" +version = "0.8.3" +description = "Google Generative AI High level API client library and tools." +optional = false +python-versions = ">=3.9" +files = [ + {file = "google_generativeai-0.8.3-py3-none-any.whl", hash = "sha256:1108ff89d5b8e59f51e63d1a8bf84701cd84656e17ca28d73aeed745e736d9b7"}, +] + +[package.dependencies] +google-ai-generativelanguage = "0.6.10" +google-api-core = "*" +google-api-python-client = "*" +google-auth = ">=2.15.0" +protobuf = "*" +pydantic = "*" +tqdm = "*" +typing-extensions = "*" + +[package.extras] +dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "pyyaml"] + +[[package]] +name = "google-resumable-media" +version = "2.7.2" +description = "Utilities for Google Media Downloads and Resumable Uploads" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, + {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, +] + +[package.dependencies] +google-crc32c = ">=1.0,<2.0dev" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] +requests = ["requests (>=2.18.0,<3.0.0dev)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.65.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, + {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "graphrag-sdk" +version = "0.2.2" +description = "" +optional = false +python-versions = "<4.0.0,>=3.9.0" +files = [ + {file = "graphrag_sdk-0.2.2.tar.gz", hash = "sha256:63e2ebf007949dab31bd7ffe302fe425ab295c602c23c98de41b69c3b9fe16f9"}, +] + +[package.dependencies] +backoff = ">=2.2.1,<3.0.0" +bs4 = ">=0.0.2,<0.0.3" +falkordb = ">=1.0.9,<2.0.0" +fix-busted-json = ">=0.0.18,<0.0.19" +google-generativeai = ">=0.8.1,<0.9.0" +ipykernel = ">=6.29.5,<7.0.0" +ollama = ">=0.2.1,<0.3.0" +openai = ">=1.35.9,<2.0.0" +pypdf = ">=4.2.0,<5.0.0" +python-abc = ">=0.2.0,<0.3.0" +python-dotenv = ">=1.0.1,<2.0.0" +ratelimit = ">=2.2.1,<3.0.0" +typing-extensions = ">=4.12.1,<5.0.0" +vertexai = ">=1.49.0,<2.0.0" + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.1" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, + {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + +[[package]] +name = "grpcio" +version = "1.67.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"}, + {file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa"}, + {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292"}, + {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311"}, + {file = "grpcio-1.67.1-cp310-cp310-win32.whl", hash = "sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed"}, + {file = "grpcio-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e"}, + {file = "grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb"}, + {file = "grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96"}, + {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f"}, + {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970"}, + {file = "grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744"}, + {file = "grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5"}, + {file = "grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953"}, + {file = "grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e"}, + {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75"}, + {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38"}, + {file = "grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78"}, + {file = "grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc"}, + {file = "grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b"}, + {file = "grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8"}, + {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62"}, + {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb"}, + {file = "grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121"}, + {file = "grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba"}, + {file = "grpcio-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65"}, + {file = "grpcio-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3"}, + {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8"}, + {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce"}, + {file = "grpcio-1.67.1-cp38-cp38-win32.whl", hash = "sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46"}, + {file = "grpcio-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771"}, + {file = "grpcio-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335"}, + {file = "grpcio-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04"}, + {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8"}, + {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f"}, + {file = "grpcio-1.67.1-cp39-cp39-win32.whl", hash = "sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e"}, + {file = "grpcio-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98"}, + {file = "grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.67.1)"] + +[[package]] +name = "grpcio-status" +version = "1.67.1" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio_status-1.67.1-py3-none-any.whl", hash = "sha256:16e6c085950bdacac97c779e6a502ea671232385e6e37f258884d6883392c2bd"}, + {file = "grpcio_status-1.67.1.tar.gz", hash = "sha256:2bf38395e028ceeecfd8866b081f61628114b384da7d51ae064ddc8d766a5d11"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.67.1" +protobuf = ">=5.26.1,<6.0dev" + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.6" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httplib2" +version = "0.22.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] + +[package.dependencies] +pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipykernel" +version = "6.29.5" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, + {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=24" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.18.1" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.9" +files = [ + {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, + {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jiter" +version = "0.7.0" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jiter-0.7.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e14027f61101b3f5e173095d9ecf95c1cac03ffe45a849279bde1d97e559e314"}, + {file = "jiter-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:979ec4711c2e37ac949561858bd42028884c9799516a923e1ff0b501ef341a4a"}, + {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:662d5d3cca58ad6af7a3c6226b641c8655de5beebcb686bfde0df0f21421aafa"}, + {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d89008fb47043a469f97ad90840b97ba54e7c3d62dc7cbb6cbf938bd0caf71d"}, + {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8b16c35c846a323ce9067170d5ab8c31ea3dbcab59c4f7608bbbf20c2c3b43f"}, + {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e82daaa1b0a68704f9029b81e664a5a9de3e466c2cbaabcda5875f961702e7"}, + {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43a87a9f586636e1f0dd3651a91f79b491ea0d9fd7cbbf4f5c463eebdc48bda7"}, + {file = "jiter-0.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2ec05b1615f96cc3e4901678bc863958611584072967d9962f9e571d60711d52"}, + {file = "jiter-0.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5cb97e35370bde7aa0d232a7f910f5a0fbbc96bc0a7dbaa044fd5cd6bcd7ec3"}, + {file = "jiter-0.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb316dacaf48c8c187cea75d0d7f835f299137e6fdd13f691dff8f92914015c7"}, + {file = "jiter-0.7.0-cp310-none-win32.whl", hash = "sha256:243f38eb4072763c54de95b14ad283610e0cd3bf26393870db04e520f60eebb3"}, + {file = "jiter-0.7.0-cp310-none-win_amd64.whl", hash = "sha256:2221d5603c139f6764c54e37e7c6960c469cbcd76928fb10d15023ba5903f94b"}, + {file = "jiter-0.7.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91cec0ad755bd786c9f769ce8d843af955df6a8e56b17658771b2d5cb34a3ff8"}, + {file = "jiter-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:feba70a28a27d962e353e978dbb6afd798e711c04cb0b4c5e77e9d3779033a1a"}, + {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d866ec066c3616cacb8535dbda38bb1d470b17b25f0317c4540182bc886ce2"}, + {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7a7a00b6f9f18289dd563596f97ecaba6c777501a8ba04bf98e03087bcbc60"}, + {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9aaf564094c7db8687f2660605e099f3d3e6ea5e7135498486674fcb78e29165"}, + {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4d27e09825c1b3c7a667adb500ce8b840e8fc9f630da8454b44cdd4fb0081bb"}, + {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca7c287da9c1d56dda88da1d08855a787dbb09a7e2bd13c66a2e288700bd7c7"}, + {file = "jiter-0.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db19a6d160f093cbc8cd5ea2abad420b686f6c0e5fb4f7b41941ebc6a4f83cda"}, + {file = "jiter-0.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e46a63c7f877cf7441ffc821c28287cfb9f533ae6ed707bde15e7d4dfafa7ae"}, + {file = "jiter-0.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ba426fa7ff21cb119fa544b75dd3fbee6a70e55a5829709c0338d07ccd30e6d"}, + {file = "jiter-0.7.0-cp311-none-win32.whl", hash = "sha256:c07f55a64912b0c7982377831210836d2ea92b7bd343fca67a32212dd72e38e0"}, + {file = "jiter-0.7.0-cp311-none-win_amd64.whl", hash = "sha256:ed27b2c43e1b5f6c7fedc5c11d4d8bfa627de42d1143d87e39e2e83ddefd861a"}, + {file = "jiter-0.7.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac7930bcaaeb1e229e35c91c04ed2e9f39025b86ee9fc3141706bbf6fff4aeeb"}, + {file = "jiter-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:571feae3e7c901a8eedde9fd2865b0dfc1432fb15cab8c675a8444f7d11b7c5d"}, + {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8af4df8a262fa2778b68c2a03b6e9d1cb4d43d02bea6976d46be77a3a331af1"}, + {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd028d4165097a611eb0c7494d8c1f2aebd46f73ca3200f02a175a9c9a6f22f5"}, + {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6b487247c7836810091e9455efe56a52ec51bfa3a222237e1587d04d3e04527"}, + {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6d28a92f28814e1a9f2824dc11f4e17e1df1f44dc4fdeb94c5450d34bcb2602"}, + {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90443994bbafe134f0b34201dad3ebe1c769f0599004084e046fb249ad912425"}, + {file = "jiter-0.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f9abf464f9faac652542ce8360cea8e68fba2b78350e8a170248f9bcc228702a"}, + {file = "jiter-0.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db7a8d99fc5f842f7d2852f06ccaed066532292c41723e5dff670c339b649f88"}, + {file = "jiter-0.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:15cf691ebd8693b70c94627d6b748f01e6d697d9a6e9f2bc310934fcfb7cf25e"}, + {file = "jiter-0.7.0-cp312-none-win32.whl", hash = "sha256:9dcd54fa422fb66ca398bec296fed5f58e756aa0589496011cfea2abb5be38a5"}, + {file = "jiter-0.7.0-cp312-none-win_amd64.whl", hash = "sha256:cc989951f73f9375b8eacd571baaa057f3d7d11b7ce6f67b9d54642e7475bfad"}, + {file = "jiter-0.7.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:24cecd18df540963cd27c08ca5ce1d0179f229ff78066d9eecbe5add29361340"}, + {file = "jiter-0.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d41b46236b90b043cca73785674c23d2a67d16f226394079d0953f94e765ed76"}, + {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b160db0987171365c153e406a45dcab0ee613ae3508a77bfff42515cb4ce4d6e"}, + {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1c8d91e0f0bd78602eaa081332e8ee4f512c000716f5bc54e9a037306d693a7"}, + {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997706c683195eeff192d2e5285ce64d2a610414f37da3a3f2625dcf8517cf90"}, + {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ea52a8a0ff0229ab2920284079becd2bae0688d432fca94857ece83bb49c541"}, + {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d77449d2738cf74752bb35d75ee431af457e741124d1db5e112890023572c7c"}, + {file = "jiter-0.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8203519907a1d81d6cb00902c98e27c2d0bf25ce0323c50ca594d30f5f1fbcf"}, + {file = "jiter-0.7.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41d15ccc53931c822dd7f1aebf09faa3cda2d7b48a76ef304c7dbc19d1302e51"}, + {file = "jiter-0.7.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:febf3179b2fabf71fbd2fd52acb8594163bb173348b388649567a548f356dbf6"}, + {file = "jiter-0.7.0-cp313-none-win32.whl", hash = "sha256:4a8e2d866e7eda19f012444e01b55079d8e1c4c30346aaac4b97e80c54e2d6d3"}, + {file = "jiter-0.7.0-cp313-none-win_amd64.whl", hash = "sha256:7417c2b928062c496f381fb0cb50412eee5ad1d8b53dbc0e011ce45bb2de522c"}, + {file = "jiter-0.7.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9c62c737b5368e51e74960a08fe1adc807bd270227291daede78db24d5fbf556"}, + {file = "jiter-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e4640722b1bef0f6e342fe4606aafaae0eb4f4be5c84355bb6867f34400f6688"}, + {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f367488c3b9453eab285424c61098faa1cab37bb49425e69c8dca34f2dfe7d69"}, + {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0cf5d42beb3514236459454e3287db53d9c4d56c4ebaa3e9d0efe81b19495129"}, + {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc5190ea1113ee6f7252fa8a5fe5a6515422e378356c950a03bbde5cafbdbaab"}, + {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63ee47a149d698796a87abe445fc8dee21ed880f09469700c76c8d84e0d11efd"}, + {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48592c26ea72d3e71aa4bea0a93454df907d80638c3046bb0705507b6704c0d7"}, + {file = "jiter-0.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:79fef541199bd91cfe8a74529ecccb8eaf1aca38ad899ea582ebbd4854af1e51"}, + {file = "jiter-0.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d1ef6bb66041f2514739240568136c81b9dcc64fd14a43691c17ea793b6535c0"}, + {file = "jiter-0.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca4d950863b1c238e315bf159466e064c98743eef3bd0ff9617e48ff63a4715"}, + {file = "jiter-0.7.0-cp38-none-win32.whl", hash = "sha256:897745f230350dcedb8d1ebe53e33568d48ea122c25e6784402b6e4e88169be7"}, + {file = "jiter-0.7.0-cp38-none-win_amd64.whl", hash = "sha256:b928c76a422ef3d0c85c5e98c498ce3421b313c5246199541e125b52953e1bc0"}, + {file = "jiter-0.7.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c9b669ff6f8ba08270dee9ccf858d3b0203b42314a428a1676762f2d390fbb64"}, + {file = "jiter-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5be919bacd73ca93801c3042bce6e95cb9c555a45ca83617b9b6c89df03b9c2"}, + {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a282e1e8a396dabcea82d64f9d05acf7efcf81ecdd925b967020dcb0e671c103"}, + {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:17ecb1a578a56e97a043c72b463776b5ea30343125308f667fb8fce4b3796735"}, + {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b6045fa0527129218cdcd8a8b839f678219686055f31ebab35f87d354d9c36e"}, + {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:189cc4262a92e33c19d4fd24018f5890e4e6da5b2581f0059938877943f8298c"}, + {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c138414839effbf30d185e30475c6dc8a16411a1e3681e5fd4605ab1233ac67a"}, + {file = "jiter-0.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2791604acef33da6b72d5ecf885a32384bcaf9aa1e4be32737f3b8b9588eef6a"}, + {file = "jiter-0.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae60ec89037a78d60bbf3d8b127f1567769c8fa24886e0abed3f622791dea478"}, + {file = "jiter-0.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:836f03dea312967635233d826f783309b98cfd9ccc76ac776e224cfcef577862"}, + {file = "jiter-0.7.0-cp39-none-win32.whl", hash = "sha256:ebc30ae2ce4bc4986e1764c404b4ea1924f926abf02ce92516485098f8545374"}, + {file = "jiter-0.7.0-cp39-none-win_amd64.whl", hash = "sha256:abf596f951370c648f37aa9899deab296c42a3829736e598b0dd10b08f77a44d"}, + {file = "jiter-0.7.0.tar.gz", hash = "sha256:c061d9738535497b5509f8970584f20de1e900806b239a39a9994fc191dad630"}, +] + +[[package]] +name = "jupyter-client" +version = "8.6.3" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, + {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + +[[package]] +name = "numpy" +version = "2.0.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, +] + +[[package]] +name = "ollama" +version = "0.2.1" +description = "The official Python client for Ollama." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "ollama-0.2.1-py3-none-any.whl", hash = "sha256:b6e2414921c94f573a903d1069d682ba2fb2607070ea9e19ca4a7872f2a460ec"}, + {file = "ollama-0.2.1.tar.gz", hash = "sha256:fa316baa9a81eac3beb4affb0a17deb3008fdd6ed05b123c26306cfbe4c349b6"}, +] + +[package.dependencies] +httpx = ">=0.27.0,<0.28.0" + +[[package]] +name = "openai" +version = "1.53.0" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-1.53.0-py3-none-any.whl", hash = "sha256:20f408c32fc5cb66e60c6882c994cdca580a5648e10045cd840734194f033418"}, + {file = "openai-1.53.0.tar.gz", hash = "sha256:be2c4e77721b166cce8130e544178b7d579f751b4b074ffbaade3854b6f85ec5"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.11,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.48" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "proto-plus" +version = "1.25.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"}, + {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<6.0.0dev" + +[package.extras] +testing = ["google-api-core (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "5.28.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24"}, + {file = "protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868"}, + {file = "protobuf-5.28.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687"}, + {file = "protobuf-5.28.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584"}, + {file = "protobuf-5.28.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135"}, + {file = "protobuf-5.28.3-cp38-cp38-win32.whl", hash = "sha256:3e6101d095dfd119513cde7259aa703d16c6bbdfae2554dfe5cfdbe94e32d548"}, + {file = "protobuf-5.28.3-cp38-cp38-win_amd64.whl", hash = "sha256:27b246b3723692bf1068d5734ddaf2fccc2cdd6e0c9b47fe099244d80200593b"}, + {file = "protobuf-5.28.3-cp39-cp39-win32.whl", hash = "sha256:135658402f71bbd49500322c0f736145731b16fc79dc8f367ab544a17eab4535"}, + {file = "protobuf-5.28.3-cp39-cp39-win_amd64.whl", hash = "sha256:70585a70fc2dd4818c51287ceef5bdba6387f88a578c86d47bb34669b5552c36"}, + {file = "protobuf-5.28.3-py3-none-any.whl", hash = "sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed"}, + {file = "protobuf-5.28.3.tar.gz", hash = "sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b"}, +] + +[[package]] +name = "psutil" +version = "6.1.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, +] + +[package.extras] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] +[package.extras] +tests = ["pytest"] + [[package]] -name = "blinker" -version = "1.8.2" -description = "Fast, simple object-to-object and broadcast signaling" +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ - {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, - {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" +name = "pyasn1-modules" +version = "0.4.1" +description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} +pyasn1 = ">=0.4.6,<0.7.0" [[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." +name = "pycparser" +version = "2.22" +description = "C parser in Python" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] -name = "falkordb" -version = "1.0.5" -description = "Python client for interacting with FalkorDB database" +name = "pydantic" +version = "2.9.2" +description = "Data validation using Python type hints" optional = false -python-versions = "<4.0,>=3.8" +python-versions = ">=3.8" files = [ - {file = "falkordb-1.0.5.tar.gz", hash = "sha256:e74b3f0c3d010b76d5dc8feaa1567dbd71785f99a26d751b2c00ce9820abbc6b"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -redis = ">=5.0.1,<6.0.0" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] -name = "flask" -version = "3.0.3" -description = "A simple framework for building complex web applications." +name = "pydantic-core" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, - {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] -blinker = ">=1.6.2" -click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} -itsdangerous = ">=2.1.2" -Jinja2 = ">=3.1.2" -Werkzeug = ">=3.0.0" +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] [package.extras] -async = ["asgiref (>=3.2)"] -dotenv = ["python-dotenv"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" +name = "pyparsing" +version = "3.2.0" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, + {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, + {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, ] -[package.dependencies] -smmap = ">=3.0.1,<6" +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] -name = "gitpython" -version = "3.1.43" -description = "GitPython is a Python library used to interact with Git repositories" +name = "pypdf" +version = "4.3.1" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, - {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, + {file = "pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418"}, + {file = "pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b"}, ] [package.dependencies] -gitdb = ">=4.0.1,<5" +typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +crypto = ["PyCryptodome", "cryptography"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] +docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] +full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] +image = ["Pillow (>=8.0.0)"] [[package]] -name = "importlib-metadata" -version = "8.0.0" -description = "Read metadata from Python packages" +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] -zipp = ">=0.5" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] -name = "itsdangerous" -version = "2.2.0" -description = "Safely pass data to untrusted environments and back." +name = "python-abc" +version = "0.2.0" +description = "A python implementation of the ABC Software metric" optional = false -python-versions = ">=3.8" +python-versions = "*" files = [ - {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, - {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, + {file = "python-abc-0.2.0.tar.gz", hash = "sha256:90017d09fbac7bde4b64b2c7e1b5d22da9055b64b821d1a2b4dc805b450b251a"}, ] [[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" optional = false -python-versions = ">=3.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] +six = ">=1.5" [[package]] name = "python-dotenv" @@ -245,23 +2027,285 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pywin32" +version = "308" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, +] + +[[package]] +name = "pyzmq" +version = "26.2.0" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, + {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, + {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "ratelimit" +version = "2.2.1" +description = "API rate limit decorator" +optional = false +python-versions = "*" +files = [ + {file = "ratelimit-2.2.1.tar.gz", hash = "sha256:af8a9b64b821529aca09ebaf6d8d279100d766f19e90b5059ac6a718ca6dee42"}, +] + [[package]] name = "redis" -version = "5.0.6" +version = "5.2.0" description = "Python client for Redis database and key-value store" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"}, - {file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"}, + {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, + {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, ] [package.dependencies] async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} [package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "shapely" +version = "2.0.6" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b"}, + {file = "shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b"}, + {file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde"}, + {file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e"}, + {file = "shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e"}, + {file = "shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4"}, + {file = "shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e"}, + {file = "shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2"}, + {file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855"}, + {file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0"}, + {file = "shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d"}, + {file = "shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b"}, + {file = "shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0"}, + {file = "shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3"}, + {file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8"}, + {file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726"}, + {file = "shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f"}, + {file = "shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48"}, + {file = "shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013"}, + {file = "shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7"}, + {file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381"}, + {file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805"}, + {file = "shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a"}, + {file = "shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2"}, + {file = "shapely-2.0.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fa7468e4f5b92049c0f36d63c3e309f85f2775752e076378e36c6387245c5462"}, + {file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5867e598a9e8ac3291da6cc9baa62ca25706eea186117034e8ec0ea4355653"}, + {file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81d9dfe155f371f78c8d895a7b7f323bb241fb148d848a2bf2244f79213123fe"}, + {file = "shapely-2.0.6-cp37-cp37m-win32.whl", hash = "sha256:fbb7bf02a7542dba55129062570211cfb0defa05386409b3e306c39612e7fbcc"}, + {file = "shapely-2.0.6-cp37-cp37m-win_amd64.whl", hash = "sha256:837d395fac58aa01aa544495b97940995211e3e25f9aaf87bc3ba5b3a8cd1ac7"}, + {file = "shapely-2.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c6d88ade96bf02f6bfd667ddd3626913098e243e419a0325ebef2bbd481d1eb6"}, + {file = "shapely-2.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8b3b818c4407eaa0b4cb376fd2305e20ff6df757bf1356651589eadc14aab41b"}, + {file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbc783529a21f2bd50c79cef90761f72d41c45622b3e57acf78d984c50a5d13"}, + {file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2423f6c0903ebe5df6d32e0066b3d94029aab18425ad4b07bf98c3972a6e25a1"}, + {file = "shapely-2.0.6-cp38-cp38-win32.whl", hash = "sha256:2de00c3bfa80d6750832bde1d9487e302a6dd21d90cb2f210515cefdb616e5f5"}, + {file = "shapely-2.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:3a82d58a1134d5e975f19268710e53bddd9c473743356c90d97ce04b73e101ee"}, + {file = "shapely-2.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:392f66f458a0a2c706254f473290418236e52aa4c9b476a072539d63a2460595"}, + {file = "shapely-2.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eba5bae271d523c938274c61658ebc34de6c4b33fdf43ef7e938b5776388c1be"}, + {file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060566bc4888b0c8ed14b5d57df8a0ead5c28f9b69fb6bed4476df31c51b0af"}, + {file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b02154b3e9d076a29a8513dffcb80f047a5ea63c897c0cd3d3679f29363cf7e5"}, + {file = "shapely-2.0.6-cp39-cp39-win32.whl", hash = "sha256:44246d30124a4f1a638a7d5419149959532b99dfa25b54393512e6acc9c211ac"}, + {file = "shapely-2.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:2b542d7f1dbb89192d3512c52b679c822ba916f93479fa5d4fc2fe4fa0b3c9e8"}, + {file = "shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6"}, +] + +[package.dependencies] +numpy = ">=1.14,<3" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] [[package]] name = "smmap" @@ -274,6 +2318,113 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "tomli" +version = "2.0.2" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, +] + +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + +[[package]] +name = "tqdm" +version = "4.66.6" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.6-py3-none-any.whl", hash = "sha256:223e8b5359c2efc4b30555531f09e9f2f3589bcd7fdd389271191031b49b7a63"}, + {file = "tqdm-4.66.6.tar.gz", hash = "sha256:4bdd694238bef1485ce839d67967ab50af8f9272aab687c0d7702a01da0be090"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + [[package]] name = "tree-sitter" version = "0.22.3" @@ -336,15 +2487,135 @@ files = [ [package.extras] core = ["tree-sitter (>=0.21,<1.0)"] +[[package]] +name = "tree-sitter-python" +version = "0.21.0" +description = "Python grammar for tree-sitter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tree_sitter_python-0.21.0-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:29e3addfabdaa88fa2aaaa2426d8ff12f0a0346c46b10dd5a76424355e5fa3cc"}, + {file = "tree_sitter_python-0.21.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b40e71ebd41046ca4fcde78b734e86f0b3f77055f51f1cac6e2662c37ec0520"}, + {file = "tree_sitter_python-0.21.0-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cda4e742627724eabed95e06ca67b640f8b31e86e776905afc396c928082f032"}, + {file = "tree_sitter_python-0.21.0-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:df142d166aa6b575fdb0726a64d56cb1d8cb7a3ad5377eb5fa90557ffe4caffe"}, + {file = "tree_sitter_python-0.21.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:23a0cf850788c990436704837e6125cbf6535fe5a5729b9a84846fc254e915c7"}, + {file = "tree_sitter_python-0.21.0-cp38-abi3-win32.whl", hash = "sha256:86dce33757fa8d420d1c9089280d352507a7b9601b26732c73b77e9d0ddd8604"}, + {file = "tree_sitter_python-0.21.0-cp38-abi3-win_amd64.whl", hash = "sha256:86b5c81b00f07b9cdc87e4fade0497c0af7b95365908608e31070668564b02e7"}, +] + +[package.extras] +core = ["tree-sitter (>=0.21,<1.0)"] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.6" +files = [ + {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, + {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "validators" +version = "0.34.0" +description = "Python Data Validation for Humans™" +optional = false +python-versions = ">=3.8" +files = [ + {file = "validators-0.34.0-py3-none-any.whl", hash = "sha256:c804b476e3e6d3786fa07a30073a4ef694e617805eb1946ceee3fe5a9b8b1321"}, + {file = "validators-0.34.0.tar.gz", hash = "sha256:647fe407b45af9a74d245b943b18e6a816acf4926974278f6dd617778e1e781f"}, +] + +[package.extras] +crypto-eth-addresses = ["eth-hash[pycryptodome] (>=0.7.0)"] + +[[package]] +name = "vertexai" +version = "1.71.1" +description = "Please run pip install vertexai to use the Vertex SDK." +optional = false +python-versions = ">=3.8" +files = [ + {file = "vertexai-1.71.1-py3-none-any.whl", hash = "sha256:86162d5fe18badc76044a0950c8cd8accc7a81c16d787adf7430db32881d6063"}, + {file = "vertexai-1.71.1.tar.gz", hash = "sha256:cd74fe42ea05bb155aff0a4c150fd3d8af74df399297560a09027fa85e1fdbd7"}, +] + +[package.dependencies] +google-cloud-aiplatform = {version = "1.71.1", extras = ["all"]} + +[package.extras] +autologging = ["mlflow (>=1.27.0,<=2.1.1)"] +cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] +endpoint = ["requests (>=2.28.1)"] +full = ["cloudpickle (<3.0)", "cloudpickle (>=2.2.1,<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pydantic (<3)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] +langchain = ["langchain (>=0.1.13,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<0.2)"] +langchain-testing = ["langchain (>=0.1.13,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<0.2)", "pytest-xdist"] +lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] +metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] +pipelines = ["pyyaml (>=5.3.1,<7)"] +prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] +private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +rapid-evaluation = ["nest-asyncio (>=1.0.0,<1.6.0)", "pandas (>=1.0.0,<2.2.0)"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)"] +ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "scikit-learn", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] +reasoningengine = ["cloudpickle (>=2.2.1,<3.0)", "pydantic (<3)"] +tensorboard = ["tensorflow (>=2.3.0,<3.0.0dev)"] +testing = ["bigframes", "cloudpickle (<3.0)", "cloudpickle (>=2.2.1,<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pydantic (<3)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] +vizier = ["google-vizier (>=0.1.6)"] +xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.1.0" description = "The comprehensive WSGI web application library." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.1.0-py3-none-any.whl", hash = "sha256:208a2e31a4a54c8b3d2244f2079ca1d3851629a7a7d546646059c64fb746023a"}, + {file = "werkzeug-3.1.0.tar.gz", hash = "sha256:6f2a0d38f25ba5a75c36c45b4ae350c7a23b57e3b974e9eb2d6851f2c648c00d"}, ] [package.dependencies] @@ -355,20 +2626,24 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "1aa3efe2a7c2460c0eef6c984aba9960ecedd03277550e48bf2519ec5f9c5cb8" +content-hash = "a20f976ff007454985a770d2d3959f63f5c0ca0976b31a288d677c0d9d8c6f2c" diff --git a/pyproject.toml b/pyproject.toml index 7a3573e..223af57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,9 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.9" +graphrag-sdk ="^0.2.2" tree-sitter = "^0.22.3" +validators = "^0.34.0" GitPython = "^3.1.43" falkordb = "^1.0.5" tree-sitter-c = "^0.21.4" @@ -15,6 +17,8 @@ tree-sitter-python = "^0.21.0" flask = "^3.0.3" python-dotenv = "^1.0.1" +[tool.poetry.group.test.dependencies] +pytest = "^8.2.0" [build-system] requires = ["poetry-core"] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..b00a734 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,114 @@ +annotated-types==0.7.0 ; python_version >= "3.9" and python_version < "4.0" +anyio==4.6.2.post1 ; python_version >= "3.9" and python_version < "4.0" +appnope==0.1.4 ; python_version >= "3.9" and python_version < "4.0" and platform_system == "Darwin" +asttokens==2.4.1 ; python_version >= "3.9" and python_version < "4.0" +async-timeout==5.0.0 ; python_version >= "3.9" and python_full_version < "3.11.3" +backoff==2.2.1 ; python_version >= "3.9" and python_version < "4.0" +beautifulsoup4==4.12.3 ; python_version >= "3.9" and python_version < "4.0" +blinker==1.8.2 ; python_version >= "3.9" and python_version < "4.0" +bs4==0.0.2 ; python_version >= "3.9" and python_version < "4.0" +cachetools==5.5.0 ; python_version >= "3.9" and python_version < "4.0" +certifi==2024.8.30 ; python_version >= "3.9" and python_version < "4.0" +cffi==1.17.1 ; python_version >= "3.9" and python_version < "4.0" and implementation_name == "pypy" +charset-normalizer==3.4.0 ; python_version >= "3.9" and python_version < "4.0" +click==8.1.7 ; python_version >= "3.9" and python_version < "4.0" +colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") +comm==0.2.2 ; python_version >= "3.9" and python_version < "4.0" +debugpy==1.8.7 ; python_version >= "3.9" and python_version < "4.0" +decorator==5.1.1 ; python_version >= "3.9" and python_version < "4.0" +distro==1.9.0 ; python_version >= "3.9" and python_version < "4.0" +docstring-parser==0.16 ; python_version >= "3.9" and python_version < "4.0" +exceptiongroup==1.2.2 ; python_version >= "3.9" and python_version < "3.11" +executing==2.1.0 ; python_version >= "3.9" and python_version < "4.0" +falkordb==1.0.9 ; python_version >= "3.9" and python_version < "4.0" +fix-busted-json==0.0.18 ; python_version >= "3.9" and python_version < "4.0" +flask==3.0.3 ; python_version >= "3.9" and python_version < "4.0" +gitdb==4.0.11 ; python_version >= "3.9" and python_version < "4.0" +gitpython==3.1.43 ; python_version >= "3.9" and python_version < "4.0" +google-ai-generativelanguage==0.6.10 ; python_version >= "3.9" and python_version < "4.0" +google-api-core==2.22.0 ; python_version >= "3.9" and python_version < "4.0" +google-api-core[grpc]==2.22.0 ; python_version >= "3.9" and python_version < "4.0" +google-api-python-client==2.151.0 ; python_version >= "3.9" and python_version < "4.0" +google-auth-httplib2==0.2.0 ; python_version >= "3.9" and python_version < "4.0" +google-auth==2.35.0 ; python_version >= "3.9" and python_version < "4.0" +google-cloud-aiplatform[all]==1.71.1 ; python_version >= "3.9" and python_version < "4.0" +google-cloud-bigquery==3.26.0 ; python_version >= "3.9" and python_version < "4.0" +google-cloud-core==2.4.1 ; python_version >= "3.9" and python_version < "4.0" +google-cloud-resource-manager==1.13.0 ; python_version >= "3.9" and python_version < "4.0" +google-cloud-storage==2.18.2 ; python_version >= "3.9" and python_version < "4.0" +google-crc32c==1.6.0 ; python_version >= "3.9" and python_version < "4.0" +google-generativeai==0.8.3 ; python_version >= "3.9" and python_version < "4.0" +google-resumable-media==2.7.2 ; python_version >= "3.9" and python_version < "4.0" +googleapis-common-protos==1.65.0 ; python_version >= "3.9" and python_version < "4.0" +googleapis-common-protos[grpc]==1.65.0 ; python_version >= "3.9" and python_version < "4.0" +graphrag-sdk==0.2.2 ; python_version >= "3.9" and python_version < "4.0" +grpc-google-iam-v1==0.13.1 ; python_version >= "3.9" and python_version < "4.0" +grpcio-status==1.67.1 ; python_version >= "3.9" and python_version < "4.0" +grpcio==1.67.1 ; python_version >= "3.9" and python_version < "4.0" +h11==0.14.0 ; python_version >= "3.9" and python_version < "4.0" +httpcore==1.0.6 ; python_version >= "3.9" and python_version < "4.0" +httplib2==0.22.0 ; python_version >= "3.9" and python_version < "4.0" +httpx==0.27.2 ; python_version >= "3.9" and python_version < "4.0" +idna==3.10 ; python_version >= "3.9" and python_version < "4.0" +importlib-metadata==8.5.0 ; python_version >= "3.9" and python_version < "3.10" +ipykernel==6.29.5 ; python_version >= "3.9" and python_version < "4.0" +ipython==8.18.1 ; python_version >= "3.9" and python_version < "4.0" +itsdangerous==2.2.0 ; python_version >= "3.9" and python_version < "4.0" +jedi==0.19.1 ; python_version >= "3.9" and python_version < "4.0" +jinja2==3.1.4 ; python_version >= "3.9" and python_version < "4.0" +jiter==0.7.0 ; python_version >= "3.9" and python_version < "4.0" +jupyter-client==8.6.3 ; python_version >= "3.9" and python_version < "4.0" +jupyter-core==5.7.2 ; python_version >= "3.9" and python_version < "4.0" +markupsafe==3.0.2 ; python_version >= "3.9" and python_version < "4.0" +matplotlib-inline==0.1.7 ; python_version >= "3.9" and python_version < "4.0" +nest-asyncio==1.6.0 ; python_version >= "3.9" and python_version < "4.0" +numpy==2.0.2 ; python_version >= "3.9" and python_version < "4.0" +ollama==0.2.1 ; python_version >= "3.9" and python_version < "4.0" +openai==1.53.0 ; python_version >= "3.9" and python_version < "4.0" +packaging==24.1 ; python_version >= "3.9" and python_version < "4.0" +parso==0.8.4 ; python_version >= "3.9" and python_version < "4.0" +pexpect==4.9.0 ; python_version >= "3.9" and python_version < "4.0" and sys_platform != "win32" +platformdirs==4.3.6 ; python_version >= "3.9" and python_version < "4.0" +prompt-toolkit==3.0.48 ; python_version >= "3.9" and python_version < "4.0" +proto-plus==1.25.0 ; python_version >= "3.9" and python_version < "4.0" +protobuf==5.28.3 ; python_version >= "3.9" and python_version < "4.0" +psutil==6.1.0 ; python_version >= "3.9" and python_version < "4.0" +ptyprocess==0.7.0 ; python_version >= "3.9" and python_version < "4.0" and sys_platform != "win32" +pure-eval==0.2.3 ; python_version >= "3.9" and python_version < "4.0" +pyasn1-modules==0.4.1 ; python_version >= "3.9" and python_version < "4.0" +pyasn1==0.6.1 ; python_version >= "3.9" and python_version < "4.0" +pycparser==2.22 ; python_version >= "3.9" and python_version < "4.0" and implementation_name == "pypy" +pydantic-core==2.23.4 ; python_version >= "3.9" and python_version < "4.0" +pydantic==2.9.2 ; python_version >= "3.9" and python_version < "4.0" +pygments==2.18.0 ; python_version >= "3.9" and python_version < "4.0" +pyparsing==3.2.0 ; python_version >= "3.9" and python_version < "4.0" +pypdf==4.3.1 ; python_version >= "3.9" and python_version < "4.0" +python-abc==0.2.0 ; python_version >= "3.9" and python_version < "4.0" +python-dateutil==2.9.0.post0 ; python_version >= "3.9" and python_version < "4.0" +python-dotenv==1.0.1 ; python_version >= "3.9" and python_version < "4.0" +pywin32==308 ; sys_platform == "win32" and platform_python_implementation != "PyPy" and python_version >= "3.9" and python_version < "4.0" +pyzmq==26.2.0 ; python_version >= "3.9" and python_version < "4.0" +ratelimit==2.2.1 ; python_version >= "3.9" and python_version < "4.0" +redis==5.2.0 ; python_version >= "3.9" and python_version < "4.0" +requests==2.32.3 ; python_version >= "3.9" and python_version < "4.0" +rsa==4.9 ; python_version >= "3.9" and python_version < "4" +shapely==2.0.6 ; python_version >= "3.9" and python_version < "4.0" +six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" +smmap==5.0.1 ; python_version >= "3.9" and python_version < "4.0" +sniffio==1.3.1 ; python_version >= "3.9" and python_version < "4.0" +soupsieve==2.6 ; python_version >= "3.9" and python_version < "4.0" +stack-data==0.6.3 ; python_version >= "3.9" and python_version < "4.0" +tornado==6.4.1 ; python_version >= "3.9" and python_version < "4.0" +tqdm==4.66.6 ; python_version >= "3.9" and python_version < "4.0" +traitlets==5.14.3 ; python_version >= "3.9" and python_version < "4.0" +tree-sitter-c==0.21.4 ; python_version >= "3.9" and python_version < "4.0" +tree-sitter-python==0.21.0 ; python_version >= "3.9" and python_version < "4.0" +tree-sitter==0.22.3 ; python_version >= "3.9" and python_version < "4.0" +typing-extensions==4.12.2 ; python_version >= "3.9" and python_version < "4.0" +uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "4.0" +urllib3==2.2.3 ; python_version >= "3.9" and python_version < "4.0" +validators==0.34.0 ; python_version >= "3.9" and python_version < "4.0" +vertexai==1.71.1 ; python_version >= "3.9" and python_version < "4.0" +wcwidth==0.2.13 ; python_version >= "3.9" and python_version < "4.0" +werkzeug==3.1.0 ; python_version >= "3.9" and python_version < "4.0" +zipp==3.20.2 ; python_version >= "3.9" and python_version < "3.10" diff --git a/tests/endpoints/__init__.py b/tests/endpoints/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/endpoints/test_auto_complete.py b/tests/endpoints/test_auto_complete.py new file mode 100644 index 0000000..57064db --- /dev/null +++ b/tests/endpoints/test_auto_complete.py @@ -0,0 +1,50 @@ +import redis +import pytest +from pathlib import Path +from code_graph import create_app, Project + +@pytest.fixture() +def app(): + app = create_app() + app.config.update({ "TESTING": True }) + + # other setup can go here + + redis.Redis().flushall() + + yield app + + # clean up / reset resources here + +@pytest.fixture() +def client(app): + return app.test_client() + +@pytest.fixture() +def runner(app): + return app.test_cli_runner() + +def test_auto_complete(client): + # Start with an empty DB + response = client.post("/auto_complete", json={ "repo": "GraphRAG-SDK", "prefix": "set" }) + status = response.json["status"] + + # Expecting an empty response + assert status == "Missing project GraphRAG-SDK" + + # Process Git repository + proj = Project.from_git_repository("https://github.com/FalkorDB/GraphRAG-SDK") + proj.analyze_sources() + proj.process_git_history() + + # Re-issue auto complete request + response = client.post("/auto_complete", json={ "repo": "GraphRAG-SDK", "prefix": "set" }) + status = response.json["status"] + completions = response.json["completions"] + + # Expecting an empty response + assert status == "success" + assert len(completions) > 0 + for completion in completions: + assert completion["properties"]["name"].startswith("set") + diff --git a/tests/endpoints/test_find_paths.py b/tests/endpoints/test_find_paths.py new file mode 100644 index 0000000..8dd341c --- /dev/null +++ b/tests/endpoints/test_find_paths.py @@ -0,0 +1,80 @@ +import os +import redis +import pytest +from pathlib import Path +from code_graph import create_app, Project +from falkordb import FalkorDB, Path, Node, QueryResult + +@pytest.fixture() +def app(): + app = create_app() + app.config.update({ + "TESTING": True, + }) + + # other setup can go here + redis.Redis().flushall() + + yield app + + # clean up / reset resources here + +@pytest.fixture() +def client(app): + return app.test_client() + +@pytest.fixture() +def runner(app): + return app.test_cli_runner() + +def test_find_paths(client): + # Start with an empty DB + response = client.post("/find_paths", json={"repo": "GraphRAG-SDK", "src": 0, "dest": 0}).json + status = response["status"] + + # Expecting an error + assert status == "Missing project GraphRAG-SDK" + + # Process Git repository + proj = Project.from_git_repository("https://github.com/FalkorDB/GraphRAG-SDK") + proj.analyze_sources() + + # Re-issue with invalid src node id + response = client.post("/find_paths", json={"repo": "GraphRAG-SDK", "src": 'invalid', "dest": 0}).json + status = response["status"] + assert status == "src node id must be int" + + # Re-issue with invalid dest node id + response = client.post("/find_paths", json={"repo": "GraphRAG-SDK", "src": 0, "dest": 'invalid'}).json + status = response["status"] + assert status == "dest node id must be int" + + # Find src and dest nodes that are at least 3 hops apart + db = FalkorDB(host=os.getenv('FALKORDB_HOST', 'localhost'), + port=os.getenv('FALKORDB_PORT', 6379), + username=os.getenv('FALKORDB_USERNAME', None), + password=os.getenv('FALKORDB_PASSWORD', None)) + g = db.select_graph("GraphRAG-SDK") + q = """MATCH (a:Function)-[:CALLS*3..5]->(b:Function) + RETURN ID(a), ID(b) + LIMIT 1""" + + result_set = g.query(q).result_set + src_id = result_set[0][0] + dest_id = result_set[0][1] + + # Re-issue with none existing node id + response = client.post("/find_paths", json={ + "repo": "GraphRAG-SDK", + "src": src_id, + "dest": dest_id}).json + + status = response["status"] + paths = response["paths"] + + for p in paths: + assert p[0]['id'] == src_id + assert p[-1]['id'] == dest_id + assert len(p) % 2 == 1 + assert len(p) >= 3 + diff --git a/tests/endpoints/test_get_neighbors.py b/tests/endpoints/test_get_neighbors.py new file mode 100644 index 0000000..ae0b978 --- /dev/null +++ b/tests/endpoints/test_get_neighbors.py @@ -0,0 +1,63 @@ +import redis +import pytest +from pathlib import Path +from code_graph import create_app, Project + +@pytest.fixture() +def app(): + app = create_app() + app.config.update({ + "TESTING": True, + }) + + # other setup can go here + redis.Redis().flushall() + + yield app + + # clean up / reset resources here + +@pytest.fixture() +def client(app): + return app.test_client() + +@pytest.fixture() +def runner(app): + return app.test_cli_runner() + +def test_graph_entities(client): + # Start with an empty DB + response = client.get("/get_neighbors?repo=GraphRAG-SDK&node_id=0").json + status = response["status"] + + # Expecting an error + assert status == "Missing project GraphRAG-SDK" + + # Process Git repository + proj = Project.from_git_repository("https://github.com/FalkorDB/GraphRAG-SDK") + proj.analyze_sources() + + # Re-issue with invalid node id + response = client.get("/get_neighbors?repo=GraphRAG-SDK&node_id=invalid").json + status = response["status"] + assert status == "Invalid node ID. It must be an integer." + + # Re-issue with none existing node id + response = client.get("/get_neighbors?repo=GraphRAG-SDK&node_id=99999999").json + status = response["status"] + neighbors = response["neighbors"] + + assert status == "success" + assert neighbors["nodes"] == [] + assert neighbors["edges"] == [] + + # Re-issue with valid node id + response = client.get("/get_neighbors?repo=GraphRAG-SDK&node_id=0").json + status = response["status"] + neighbors = response["neighbors"] + nodes = neighbors["nodes"] + edges = neighbors["edges"] + + assert status == "success" + assert len(nodes) > 0 and len(nodes) < 1000 + assert len(edges) > 0 and len(edges) < 1000 diff --git a/tests/endpoints/test_graph_entities.py b/tests/endpoints/test_graph_entities.py new file mode 100644 index 0000000..74f1b67 --- /dev/null +++ b/tests/endpoints/test_graph_entities.py @@ -0,0 +1,49 @@ +import redis +import pytest +from pathlib import Path +from code_graph import create_app, Project + +@pytest.fixture() +def app(): + app = create_app() + app.config.update({ + "TESTING": True, + }) + + # other setup can go here + redis.Redis().flushall() + + yield app + + # clean up / reset resources here + +@pytest.fixture() +def client(app): + return app.test_client() + +@pytest.fixture() +def runner(app): + return app.test_cli_runner() + +def test_graph_entities(client): + # Start with an empty DB + response = client.get("/graph_entities?repo=GraphRAG-SDK").json + status = response["status"] + + # Expecting an error + assert status == "Missing project GraphRAG-SDK" + + # Process Git repository + proj = Project.from_git_repository("https://github.com/FalkorDB/GraphRAG-SDK") + proj.analyze_sources() + + # Re-issue graph_entities request + response = client.get("/graph_entities?repo=GraphRAG-SDK").json + status = response["status"] + entities = response["entities"] + nodes = entities["nodes"] + edges = entities["edges"] + + assert status == "success" + assert len(nodes) > 10 and len(nodes) < 1000 + assert len(edges) > 10 and len(edges) < 1000 diff --git a/tests/endpoints/test_list_commits.py b/tests/endpoints/test_list_commits.py new file mode 100644 index 0000000..75f2220 --- /dev/null +++ b/tests/endpoints/test_list_commits.py @@ -0,0 +1,63 @@ +import redis +import pytest +from pathlib import Path +from code_graph import create_app, Project + +@pytest.fixture() +def app(): + app = create_app() + app.config.update({ + "TESTING": True, + }) + + # other setup can go here + + redis.Redis().flushall() + + yield app + + # clean up / reset resources here + +@pytest.fixture() +def client(app): + return app.test_client() + +@pytest.fixture() +def runner(app): + return app.test_cli_runner() + +def test_list_commits(client): + # Start with an empty DB + response = client.post("/list_commits", json={ "repo": "git_repo" }) + status = response.json["status"] + commits = response.json["commits"] + + # Expecting an empty response + assert status == "success" + assert commits == [] + + # Process a local repository + path = Path(__file__).absolute() + path = path.parent.parent / "git_repo" + + proj = Project.from_local_repository(path) + + proj.analyze_sources() + proj.process_git_history() + + # Reissue list_commits request + response = client.post("/list_commits", json={ "repo": "git_repo" }) + status = response.json["status"] + commits = response.json["commits"] + + expected = [ + {'author': 'Roi Lipman', 'date': 1729068452, 'hash': 'fac1698da4ee14c215316859e68841ae0b0275b0', 'message': 'Initial commit\n'}, + {'author': 'Roi Lipman', 'date': 1729068481, 'hash': 'c4332d05bc1b92a33012f2ff380b807d3fbb9c2e', 'message': 'modified a.py\n'}, + {'author': 'Roi Lipman', 'date': 1729068506, 'hash': '5ec6b14612547393e257098e214ae7748ed12c50', 'message': 'added both b.py and c.py\n'}, + {'author': 'Roi Lipman', 'date': 1729068530, 'hash': 'df8d021dbae077a39693c1e76e8438006d62603e', 'message': 'removed b.py\n'} + ] + + # Expecting an empty response + assert status == "success" + assert commits == expected + diff --git a/tests/endpoints/test_list_repos.py b/tests/endpoints/test_list_repos.py new file mode 100644 index 0000000..67a92b7 --- /dev/null +++ b/tests/endpoints/test_list_repos.py @@ -0,0 +1,54 @@ +import redis +import pytest +from pathlib import Path +from code_graph import create_app, Project + +@pytest.fixture() +def app(): + app = create_app() + app.config.update({ + "TESTING": True, + }) + + # other setup can go here + redis.Redis().flushall() + + yield app + + # clean up / reset resources here + +@pytest.fixture() +def client(app): + return app.test_client() + +@pytest.fixture() +def runner(app): + return app.test_cli_runner() + +def test_list_repos(client): + # Start with an empty DB + response = client.get("/list_repos").json + status = response["status"] + repositories = response["repositories"] + + # Expecting an empty response + assert status == "success" + assert repositories == [] + + # Process a local repository + path = Path(__file__).absolute() + path = path.parent.parent / "git_repo" + + proj = Project.from_local_repository(path) + + proj.analyze_sources() + proj.process_git_history() + + # Reissue list_repos request + response = client.get("/list_repos").json + status = response["status"] + repositories = response["repositories"] + + # Expecting an empty response + assert status == "success" + assert repositories == ['git_repo'] diff --git a/tests/endpoints/test_repo_info.py b/tests/endpoints/test_repo_info.py new file mode 100644 index 0000000..8e9e273 --- /dev/null +++ b/tests/endpoints/test_repo_info.py @@ -0,0 +1,52 @@ +import redis +import pytest +from pathlib import Path +from code_graph import create_app, Project + +@pytest.fixture() +def app(): + app = create_app() + app.config.update({ + "TESTING": True, + }) + + # other setup can go here + + redis.Redis().flushall() + + yield app + + # clean up / reset resources here + +@pytest.fixture() +def client(app): + return app.test_client() + +@pytest.fixture() +def runner(app): + return app.test_cli_runner() + +def test_repo_info(client): + # Start with an empty DB + response = client.post("/repo_info", json={ "repo": "GraphRAG-SDK" }) + status = response.json["status"] + + # Expecting an empty response + assert status == "Missing repository \"GraphRAG-SDK\"" + + # Process Git repository + proj = Project.from_git_repository("https://github.com/FalkorDB/GraphRAG-SDK") + proj.analyze_sources() + proj.process_git_history() + + # Reissue list_commits request + response = client.post("/repo_info", json={ "repo": "GraphRAG-SDK" }) + status = response.json["status"] + info = response.json["info"] + + # Expecting an empty response + assert status == "success" + assert 'edge_count' in info + assert 'node_count' in info + assert info['repo_url'] == 'https://github.com/FalkorDB/GraphRAG-SDK' + diff --git a/tests/git_repo b/tests/git_repo new file mode 160000 index 0000000..df8d021 --- /dev/null +++ b/tests/git_repo @@ -0,0 +1 @@ +Subproject commit df8d021dbae077a39693c1e76e8438006d62603e diff --git a/tests/test_analyzer.py b/tests/test_analyzer.py deleted file mode 100644 index 43a2380..0000000 --- a/tests/test_analyzer.py +++ /dev/null @@ -1,34 +0,0 @@ -import unittest -from code_graph import * - -# logging_config.py -import logging - -class TestAnalyzer(unittest.TestCase): - def setUp(self): - # Create a custom logger - logger = logging.getLogger('code_graph') - - # Set the minimum level of messages to handle - logger.setLevel(logging.DEBUG) - - # Create handlers - console_handler = logging.StreamHandler() - - # Set the level for handlers - console_handler.setLevel(logging.DEBUG) - - # Create formatters and add them to the handlers - console_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s') - - console_handler.setFormatter(console_format) - - # Add handlers to the logger - logger.addHandler(console_handler) - - def test_analyzer(self): - analyzer = SourceAnalyzer() - analyzer.analyze_repository('https://github.com/FalkorDB/falkordb-py.git') - analyzer.analyze_repository('https://github.com/psf/requests.git') - analyzer.analyze_repository('https://github.com/antirez/sds.git') - analyzer.analyze_repository('https://github.com/FalkorDB/FalkorDB.git') diff --git a/tests/test_c_analyzer.py b/tests/test_c_analyzer.py index 2454888..8644088 100644 --- a/tests/test_c_analyzer.py +++ b/tests/test_c_analyzer.py @@ -1,22 +1,33 @@ +import os import unittest from pathlib import Path -from code_graph import * +from code_graph import SourceAnalyzer, File, Struct, Function, Graph class Test_C_Analyzer(unittest.TestCase): def test_analyzer(self): path = Path(__file__).parent analyzer = SourceAnalyzer() - g = Graph('test_c') - analyzer.graph = g - analyzer.analyze_sources(str(path)) + # Get the current file path + current_file_path = os.path.abspath(__file__) - f = g.get_file('/source_files/c', 'src.c', '.c') - self.assertEqual(File('/source_files/c', 'src.c', '.c'), f) + # Get the directory of the current file + current_dir = os.path.dirname(current_file_path) + + # Append 'source_files/c' to the current directory + path = os.path.join(current_dir, 'source_files') + path = os.path.join(path, 'c') + path = str(path) + + g = Graph("c") + analyzer.analyze(path, g) + + f = g.get_file('', 'src.c', '.c') + self.assertEqual(File('', 'src.c', '.c'), f) s = g.get_struct_by_name('exp') - expected_s = Struct('/source_files/c/src.c', 'exp', '', 9, 13) + expected_s = Struct('src.c', 'exp', '', 9, 13) expected_s.add_field('i', 'int') expected_s.add_field('f', 'float') expected_s.add_field('data', 'char[]') @@ -24,7 +35,7 @@ def test_analyzer(self): add = g.get_function_by_name('add') - expected_add = Function('/source_files/c/src.c', 'add', '', 'int', '', 0, 7) + expected_add = Function('src.c', 'add', '', 'int', '', 0, 7) expected_add.add_argument('a', 'int') expected_add.add_argument('b', 'int') self.assertEqual(expected_add, add) @@ -32,7 +43,7 @@ def test_analyzer(self): main = g.get_function_by_name('main') - expected_main = Function('/source_files/c/src.c', 'main', '', 'int', '', 15, 18) + expected_main = Function('src.c', 'main', '', 'int', '', 15, 18) expected_main.add_argument('argv', 'const char**') expected_main.add_argument('argc', 'int') self.assertEqual(expected_main, main) @@ -48,3 +59,4 @@ def test_analyzer(self): self.assertEqual(len(callers), 2) self.assertIn('add', callers) self.assertIn('main', callers) + diff --git a/tests/test_git_history.py b/tests/test_git_history.py new file mode 100644 index 0000000..a4696af --- /dev/null +++ b/tests/test_git_history.py @@ -0,0 +1,226 @@ +import os +import unittest +from git import Repo +from code_graph import ( + Graph, + Project, + switch_commit +) + +repo = None # repository +graph = None # code graph +git_graph = None # git graph + +class Test_Git_History(unittest.TestCase): + @classmethod + def setUpClass(cls): + # This runs once before all tests in this class + + global repo + global graph + global git_graph + + # Get the current file path + current_file_path = os.path.abspath(__file__) + + # Get the directory of the current file + current_dir = os.path.dirname(current_file_path) + + # Append 'git_repo' to the current directory + repo_dir = os.path.join(current_dir, 'git_repo') + + # Checkout HEAD commit + repo = Repo(repo_dir) + repo.git.checkout("HEAD") + + proj = Project.from_local_repository(repo_dir) + graph = proj.analyze_sources() + git_graph = proj.process_git_history() + + def assert_file_not_exists(self, path: str, name: str, ext: str) -> None: + f = graph.get_file(path, name, ext) + self.assertIsNone(f) + + def assert_file_exists(self, path: str, name: str, ext: str) -> None: + f = graph.get_file(path, name, ext) + + self.assertIsNotNone(f) + self.assertEqual(f.ext, ext) + self.assertEqual(f.path, path) + self.assertEqual(f.name, name) + + def test_git_graph_structure(self): + # validate git graph structure + c = repo.commit("HEAD") + + while True: + commits = git_graph.get_commits([c.hexsha]) + + self.assertEqual(len(commits), 1) + actual = commits[0] + + self.assertEqual(c.hexsha, actual['hash']) + self.assertEqual(c.message, actual['message']) + self.assertEqual(c.author.name, actual['author']) + self.assertEqual(c.committed_date, actual['date']) + + # Advance to previous commit + if len(c.parents) == 0: + break + + c = c.parents[0] + + def test_git_transitions(self): + # our test git repo: + # + # commit df8d021dbae077a39693c1e76e8438006d62603e (HEAD, main) + # removed b.py + + # commit 5ec6b14612547393e257098e214ae7748ed12c50 + # added both b.py and c.py + + # commit c4332d05bc1b92a33012f2ff380b807d3fbb9c2e + # modified a.py + + # commit fac1698da4ee14c215316859e68841ae0b0275b0 + # created a.py + + #---------------------------------------------------------------------- + # Going backwards + #---------------------------------------------------------------------- + + # HEAD commit + switch_commit('git_repo', 'df8d021dbae077a39693c1e76e8438006d62603e') + + # a.py and c.py should exists + self.assert_file_exists("", "a.py", ".py") + self.assert_file_exists("", "c.py", ".py") + + # b.py should NOT exists + self.assert_file_not_exists("", "b.py", ".py") + + #----------------------------------------------------------------------- + # df8d02 -> 5ec6b1 + #----------------------------------------------------------------------- + + switch_commit('git_repo', '5ec6b14612547393e257098e214ae7748ed12c50') + + # a.py, b.py and c.py should exists + self.assert_file_exists("", "a.py", ".py") + self.assert_file_exists("", "b.py", ".py") + self.assert_file_exists("", "c.py", ".py") + + #----------------------------------------------------------------------- + # 5ec6b1 -> c4332d + #----------------------------------------------------------------------- + + switch_commit('git_repo', 'c4332d05bc1b92a33012f2ff380b807d3fbb9c2e') + + # only a.py, should exists + self.assert_file_exists("", "a.py", ".py") + + # b.py and c.py shouldn't exists + self.assert_file_not_exists("", "b.py", ".py") + self.assert_file_not_exists("", "c.py", ".py") + + #----------------------------------------------------------------------- + # c4332d -> fac169 + #----------------------------------------------------------------------- + + switch_commit('git_repo', 'fac1698da4ee14c215316859e68841ae0b0275b0') + + # only a.py, should exists + self.assert_file_exists("", "a.py", ".py") + + # b.py and c.py shouldn't exists + self.assert_file_not_exists("", "b.py", ".py") + self.assert_file_not_exists("", "c.py", ".py") + + #---------------------------------------------------------------------- + # Going forward + #---------------------------------------------------------------------- + + #----------------------------------------------------------------------- + # fac169 -> c4332d0 + #----------------------------------------------------------------------- + + switch_commit('git_repo', 'c4332d05bc1b92a33012f2ff380b807d3fbb9c2e') + + # only a.py, should exists + self.assert_file_exists("", "a.py", ".py") + + # b.py and c.py shouldn't exists + self.assert_file_not_exists("", "b.py", ".py") + self.assert_file_not_exists("", "c.py", ".py") + + #----------------------------------------------------------------------- + # c4332d0 -> 5ec6b14 + #----------------------------------------------------------------------- + + #import ipdb; ipdb.set_trace() + switch_commit('git_repo', '5ec6b14612547393e257098e214ae7748ed12c50') + + # a.py, b.py and c.py should exists + self.assert_file_exists("", "a.py", ".py") + self.assert_file_exists("", "b.py", ".py") + self.assert_file_exists("", "c.py", ".py") + + #----------------------------------------------------------------------- + # 5ec6b14 -> HEAD + #----------------------------------------------------------------------- + + switch_commit('git_repo', 'df8d021dbae077a39693c1e76e8438006d62603e') + + # a.py and c.py should exists + self.assert_file_exists("", "a.py", ".py") + self.assert_file_exists("", "c.py", ".py") + + # b.py should NOT exists + self.assert_file_not_exists("", "b.py", ".py") + + + def test_git_multi_commit_transition(self): + # our test git repo: + # + # commit df8d021dbae077a39693c1e76e8438006d62603e (HEAD, main) + # removed b.py + + # commit 5ec6b14612547393e257098e214ae7748ed12c50 + # added both b.py and c.py + + # commit c4332d05bc1b92a33012f2ff380b807d3fbb9c2e + # modified a.py + + # commit fac1698da4ee14c215316859e68841ae0b0275b0 + # created a.py + + # Start at the HEAD commit + switch_commit('git_repo', 'df8d021dbae077a39693c1e76e8438006d62603e') + + # a.py and c.py should exists + self.assert_file_exists("", "a.py", ".py") + self.assert_file_exists("", "c.py", ".py") + + # b.py should NOT exists + self.assert_file_not_exists("", "b.py", ".py") + + # Switch over to the very first commit fac1698da4ee14c215316859e68841ae0b0275b0 + + switch_commit('git_repo', 'fac1698da4ee14c215316859e68841ae0b0275b0') + + # a.py + self.assert_file_exists("", "a.py", ".py") + + # b.py and c.py should NOT exists + self.assert_file_not_exists("", "b.py", ".py") + self.assert_file_not_exists("", "c.py", ".py") + + # Switch back to HEAD + switch_commit('git_repo', 'df8d021dbae077a39693c1e76e8438006d62603e') + + # a.py and c.py should exists + self.assert_file_exists("", "a.py", ".py") + self.assert_file_exists("", "c.py", ".py") + + # b.py should NOT exists + self.assert_file_not_exists("", "b.py", ".py") diff --git a/tests/test_graph_ops.py b/tests/test_graph_ops.py index 35ea483..b688404 100644 --- a/tests/test_graph_ops.py +++ b/tests/test_graph_ops.py @@ -10,51 +10,51 @@ def setUp(self): self.g = self.db.select_graph('test') self.graph = Graph(name='test') - def _test_add_function(self): + def test_add_function(self): # Create function func = Function('/path/to/function', 'func', '', 'int', '', 1, 10) func.add_argument('x', 'int') func.add_argument('y', 'float') - func_id = self.graph.add_function(func) - self.assertEqual(func, self.graph.get_function(func_id)) + self.graph.add_function(func) + self.assertEqual(func, self.graph.get_function(func.id)) - def _test_add_file(self): + def test_add_file(self): file = File('/path/to/file', 'file', 'txt') - file_id = self.graph.add_file(file) - self.assertEqual(file, self.graph.get_file(file_id)) + self.graph.add_file(file) + self.assertEqual(file, self.graph.get_file('/path/to/file', 'file', 'txt')) - def _test_file_add_function(self): + def test_file_add_function(self): file = File('/path/to/file', 'file', 'txt') func = Function('/path/to/function', 'func', '', 'int', '', 1, 10) - file_id = self.graph.add_file(file) - func_id = self.graph.add_function(func) + self.graph.add_file(file) + self.graph.add_function(func) - self.graph.file_add_function(file_id=file_id, func_id=func_id) + self.graph.connect_entities("CONTAINS", file.id, func.id) query = """MATCH (file:File)-[:CONTAINS]->(func:Function) WHERE ID(func) = $func_id AND ID(file) = $file_id RETURN true""" - params = {'file_id': file_id, 'func_id': func_id} + params = {'file_id': file.id, 'func_id': func.id} res = self.g.query(query, params).result_set self.assertTrue(res[0][0]) - def _test_function_calls_function(self): + def test_function_calls_function(self): caller = Function('/path/to/function', 'func_A', '', 'int', '', 1, 10) callee = Function('/path/to/function', 'func_B', '', 'int', '', 11, 21) - caller_id = self.graph.add_function(caller) - callee_id = self.graph.add_function(callee) - self.graph.function_calls_function(caller_id, callee_id, 10) + self.graph.add_function(caller) + self.graph.add_function(callee) + self.graph.function_calls_function(caller.id, callee.id, 10) query = """MATCH (caller:Function)-[:CALLS]->(callee:Function) WHERE ID(caller) = $caller_id AND ID(callee) = $callee_id RETURN true""" - params = {'caller_id': caller_id, 'callee_id': callee_id} + params = {'caller_id': caller.id, 'callee_id': callee.id} res = self.g.query(query, params).result_set self.assertTrue(res[0][0]) diff --git a/tests/test_py_analyzer.py b/tests/test_py_analyzer.py index 2cb96da..7a21a96 100644 --- a/tests/test_py_analyzer.py +++ b/tests/test_py_analyzer.py @@ -1,39 +1,51 @@ +import os import unittest from pathlib import Path -from code_graph import * + +from code_graph import SourceAnalyzer, File, Class, Function, Graph class Test_PY_Analyzer(unittest.TestCase): def test_analyzer(self): path = Path(__file__).parent analyzer = SourceAnalyzer() - g = Graph('test_py') - analyzer.graph = g - analyzer.analyze_sources(str(path)) + # Get the current file path + current_file_path = os.path.abspath(__file__) + + # Get the directory of the current file + current_dir = os.path.dirname(current_file_path) + + # Append 'source_files/c' to the current directory + path = os.path.join(current_dir, 'source_files') + path = os.path.join(path, 'py') + path = str(path) - f = g.get_file('/source_files/py', 'src.py', '.py') - self.assertEqual(File('/source_files/py', 'src.py', '.py'), f) + g = Graph("py") + analyzer.analyze(path, g) + + f = g.get_file('', 'src.py', '.py') + self.assertEqual(File('', 'src.py', '.py'), f) log = g.get_function_by_name('log') - expected_log = Function('/source_files/py/src.py', 'log', None, 'None', '', 0, 1) + expected_log = Function('src.py', 'log', None, 'None', '', 0, 1) expected_log.add_argument('msg', 'str') self.assertEqual(expected_log, log) abort = g.get_function_by_name('abort') - expected_abort = Function('/source_files/py/src.py', 'abort', None, 'Task', '', 9, 11) + expected_abort = Function('src.py', 'abort', None, 'Task', '', 9, 11) expected_abort.add_argument('self', 'Unknown') expected_abort.add_argument('delay', 'float') self.assertEqual(expected_abort, abort) init = g.get_function_by_name('__init__') - expected_init = Function('/source_files/py/src.py', '__init__', None, None, '', 4, 7) + expected_init = Function('src.py', '__init__', None, None, '', 4, 7) expected_init.add_argument('self', 'Unknown') expected_init.add_argument('name', 'str') expected_init.add_argument('duration', 'int') self.assertEqual(expected_init, init) task = g.get_class_by_name('Task') - expected_task = Class('/source_files/py/src.py', 'Task', None, 3, 11) + expected_task = Class('src.py', 'Task', None, 3, 11) self.assertEqual(expected_task, task) callees = g.function_calls(abort.id) @@ -46,3 +58,4 @@ def test_analyzer(self): self.assertIn('__init__', callers) self.assertIn('log', callers) +