diff --git a/.dockerignore b/.dockerignore index 72d14322f1..ed33d0f4c8 100644 --- a/.dockerignore +++ b/.dockerignore @@ -43,7 +43,7 @@ build/ .env.local .env.*.local -# Large data files +# Large data files (LFS archives only) data/* !data/.lfs/ diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..b6a3420c77 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,30 @@ +## Problem + + + + +Closes DIM-XXX + +## Solution + + + + + +## Breaking Changes + + + + + +## How to Test + + + +## Contributor License Agreement + +- [ ] I have read and approved the [CLA](https://github.com/dimensionalOS/dimos/blob/main/CLA.md). diff --git a/.github/workflows/code-cleanup.yml b/.github/workflows/code-cleanup.yml index 48f6ea281e..745f3852c1 100644 --- a/.github/workflows/code-cleanup.yml +++ b/.github/workflows/code-cleanup.yml @@ -1,6 +1,6 @@ name: code-cleanup on: - push: + pull_request: paths-ignore: - '**.md' diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 5dc19917e5..03de5c3d15 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -7,8 +7,6 @@ on: paths-ignore: - '**.md' pull_request: - paths-ignore: - - '**.md' permissions: contents: read @@ -22,6 +20,7 @@ jobs: ros: ${{ steps.filter.outputs.ros }} python: ${{ steps.filter.outputs.python }} dev: ${{ steps.filter.outputs.dev }} + navigation: ${{ steps.filter.outputs.navigation }} tests: ${{ steps.filter.outputs.tests }} branch-tag: ${{ steps.set-tag.outputs.branch_tag }} steps: @@ -52,6 +51,11 @@ jobs: dev: - docker/dev/** + navigation: + - .github/workflows/_docker-build-template.yml + - .github/workflows/docker.yml + - docker/navigation/** + tests: - dimos/** @@ -130,6 +134,57 @@ jobs: to-image: ghcr.io/dimensionalos/dev:${{ needs.check-changes.outputs.branch-tag }} dockerfile: dev + navigation: + needs: [check-changes] + if: needs.check-changes.outputs.navigation == 'true' + runs-on: [self-hosted, Linux] + permissions: + contents: read + packages: write + steps: + - name: Fix permissions + run: | + sudo chown -R $USER:$USER ${{ github.workspace }} || true + + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Checkout ros-navigation-autonomy-stack + uses: actions/checkout@v4 + with: + repository: dimensionalOS/ros-navigation-autonomy-stack + ref: fastlio2 + path: docker/navigation/ros-navigation-autonomy-stack + fetch-depth: 1 + lfs: false + token: ${{ secrets.NAV_REPO_READ_TOKEN }} + + - uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - uses: crazy-max/ghaction-github-runtime@v3 + + - uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + install: true + use: true + + - uses: docker/build-push-action@v6 + with: + push: true + context: . + file: docker/navigation/Dockerfile + tags: ghcr.io/dimensionalos/navigation:${{ needs.check-changes.outputs.branch-tag }} + cache-from: type=gha,scope=navigation + cache-to: type=gha,mode=max,scope=navigation + build-args: | + ROS_DISTRO=humble + ros-dev: needs: [check-changes, ros-python] if: always() @@ -146,92 +201,94 @@ jobs: run-ros-tests: needs: [check-changes, ros-dev] - if: always() + if: ${{ + always() && + needs.check-changes.result == 'success' && + (needs.check-changes.outputs.tests == 'true' || + needs.check-changes.outputs.ros == 'true' || + needs.check-changes.outputs.python == 'true' || + needs.check-changes.outputs.dev == 'true') + }} uses: ./.github/workflows/tests.yml secrets: inherit with: - should-run: ${{ - needs.check-changes.result == 'success' && - ((needs.ros-dev.result == 'success') || - (needs.ros-dev.result == 'skipped' && - needs.check-changes.outputs.tests == 'true')) - }} cmd: "pytest && pytest -m ros" # run tests that depend on ros as well dev-image: ros-dev:${{ (needs.check-changes.outputs.python == 'true' || needs.check-changes.outputs.dev == 'true' || needs.check-changes.outputs.ros == 'true') && needs.ros-dev.result == 'success' && needs.check-changes.outputs.branch-tag || 'dev' }} run-tests: needs: [check-changes, dev] - if: always() + if: ${{ + always() && + needs.check-changes.result == 'success' && + (needs.check-changes.outputs.tests == 'true' || + needs.check-changes.outputs.python == 'true' || + needs.check-changes.outputs.dev == 'true') + }} uses: ./.github/workflows/tests.yml secrets: inherit with: - should-run: ${{ - needs.check-changes.result == 'success' && - ((needs.dev.result == 'success') || - (needs.dev.result == 'skipped' && - needs.check-changes.outputs.tests == 'true')) - }} cmd: "pytest" dev-image: dev:${{ (needs.check-changes.outputs.python == 'true' || needs.check-changes.outputs.dev == 'true') && needs.dev.result == 'success' && needs.check-changes.outputs.branch-tag || 'dev' }} # we run in parallel with normal tests for speed run-heavy-tests: needs: [check-changes, dev] - if: always() + if: ${{ + always() && + needs.check-changes.result == 'success' && + (needs.check-changes.outputs.tests == 'true' || + needs.check-changes.outputs.python == 'true' || + needs.check-changes.outputs.dev == 'true') + }} uses: ./.github/workflows/tests.yml secrets: inherit with: - should-run: ${{ - needs.check-changes.result == 'success' && - ((needs.dev.result == 'success') || - (needs.dev.result == 'skipped' && - needs.check-changes.outputs.tests == 'true')) - }} cmd: "pytest -m heavy" dev-image: dev:${{ (needs.check-changes.outputs.python == 'true' || needs.check-changes.outputs.dev == 'true') && needs.dev.result == 'success' && needs.check-changes.outputs.branch-tag || 'dev' }} run-lcm-tests: needs: [check-changes, dev] - if: always() + if: ${{ + always() && + needs.check-changes.result == 'success' && + (needs.check-changes.outputs.tests == 'true' || + needs.check-changes.outputs.python == 'true' || + needs.check-changes.outputs.dev == 'true') + }} uses: ./.github/workflows/tests.yml secrets: inherit with: - should-run: ${{ - needs.check-changes.result == 'success' && - ((needs.dev.result == 'success') || - (needs.dev.result == 'skipped' && - needs.check-changes.outputs.tests == 'true')) - }} cmd: "pytest -m lcm" dev-image: dev:${{ (needs.check-changes.outputs.python == 'true' || needs.check-changes.outputs.dev == 'true') && needs.dev.result == 'success' && needs.check-changes.outputs.branch-tag || 'dev' }} run-integration-tests: needs: [check-changes, dev] - if: always() + if: ${{ + always() && + needs.check-changes.result == 'success' && + (needs.check-changes.outputs.tests == 'true' || + needs.check-changes.outputs.python == 'true' || + needs.check-changes.outputs.dev == 'true') + }} uses: ./.github/workflows/tests.yml secrets: inherit with: - should-run: ${{ - needs.check-changes.result == 'success' && - ((needs.dev.result == 'success') || - (needs.dev.result == 'skipped' && - needs.check-changes.outputs.tests == 'true')) - }} cmd: "pytest -m integration" dev-image: dev:${{ (needs.check-changes.outputs.python == 'true' || needs.check-changes.outputs.dev == 'true') && needs.dev.result == 'success' && needs.check-changes.outputs.branch-tag || 'dev' }} run-mypy: needs: [check-changes, ros-dev] - if: always() + if: ${{ + always() && + needs.check-changes.result == 'success' && + (needs.check-changes.outputs.tests == 'true' || + needs.check-changes.outputs.ros == 'true' || + needs.check-changes.outputs.python == 'true' || + needs.check-changes.outputs.dev == 'true') + }} uses: ./.github/workflows/tests.yml secrets: inherit with: - should-run: ${{ - needs.check-changes.result == 'success' && - ((needs.ros-dev.result == 'success') || - (needs.ros-dev.result == 'skipped' && - needs.check-changes.outputs.tests == 'true')) - }} cmd: "MYPYPATH=/opt/ros/humble/lib/python3.10/site-packages mypy dimos" dev-image: ros-dev:${{ (needs.check-changes.outputs.python == 'true' || needs.check-changes.outputs.dev == 'true' || needs.check-changes.outputs.ros == 'true') && needs.ros-dev.result == 'success' && needs.check-changes.outputs.branch-tag || 'dev' }} @@ -269,3 +326,16 @@ jobs: # CI: "true" # run: | # /entrypoint.sh bash -c "pytest -m module" + + ci-complete: + needs: [check-changes, ros, python, ros-python, dev, ros-dev, run-tests, run-heavy-tests, run-lcm-tests, run-integration-tests, run-ros-tests, run-mypy] + runs-on: [self-hosted, Linux] + if: always() + steps: + - name: CI gate + if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') }} + run: | + echo "❌ One or more CI jobs failed or were cancelled" + exit 1 + - name: CI passed + run: echo "✅ All CI checks passed or were intentionally skipped" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e84d7d43d2..25273238dc 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -3,10 +3,6 @@ name: tests on: workflow_call: inputs: - should-run: - required: false - type: boolean - default: true dev-image: required: true type: string @@ -20,22 +16,6 @@ permissions: packages: read jobs: - - # cleanup: - # runs-on: dimos-runner-ubuntu-2204 - # steps: - # - name: exit early - # if: ${{ !inputs.should-run }} - # run: | - # exit 0 - - # - name: Free disk space - # run: | - # sudo rm -rf /opt/ghc - # sudo rm -rf /usr/share/dotnet - # sudo rm -rf /usr/local/share/boost - # sudo rm -rf /usr/local/lib/android - run-tests: runs-on: [self-hosted, Linux] container: diff --git a/.gitignore b/.gitignore index e52d08ba32..24a3dd8919 100644 --- a/.gitignore +++ b/.gitignore @@ -54,6 +54,7 @@ yolo11n.pt # symlink one of .envrc.* if you'd like to use .envrc .claude +**/CLAUDE.md .direnv/ /logs @@ -64,3 +65,7 @@ yolo11n.pt *mobileclip* /results +**/cpp/result + +CLAUDE.MD +/assets/teleop_certs/ diff --git a/CLA.md b/CLA.md new file mode 100644 index 0000000000..507849309c --- /dev/null +++ b/CLA.md @@ -0,0 +1,24 @@ +## Dimensional OS Individual Contributor License Agreement + +In order to clarify the intellectual property license granted with Contributions from any person or entity, **Dimensional Inc.** ("**Dimensional**") must have a Contributor License Agreement ("CLA") on file that has been signed by each Contributor, indicating agreement to the license terms below. This license is for your protection as a Contributor as well as the protection of Dimensional; it does not change your rights to use your own Contributions for any other purpose. + +You accept and agree to the following terms and conditions for Your present and future Contributions submitted to Dimensional. Except for the license granted herein to Dimensional and recipients of software distributed by Dimensional, You reserve all right, title, and interest in and to Your Contributions. + +1. Definitions. + "You" (or "Your") shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with Dimensional. For legal entities, the entity making a Contribution and all other entities that control, are controlled by, or are under common control with that entity are considered to be a single Contributor. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + + "Contribution" shall mean any original work of authorship, including any modifications or additions to an existing work, that is intentionally submitted by You to Dimensional for inclusion in, or documentation of, any of the products owned or managed by Dimensional (the "Work"). For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to Dimensional or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, Dimensional for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by You as "Not a Contribution." + +2. Grant of Copyright License. Subject to the terms and conditions of this Agreement, You hereby grant to Dimensional and to recipients of software distributed by Dimensional a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contributions and such derivative works. + +3. Grant of Patent License. Subject to the terms and conditions of this Agreement, You hereby grant to Dimensional and to recipients of software distributed by Dimensional a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution(s) alone or by combination of Your Contribution(s) with the Work to which such Contribution(s) was submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted to that entity under this Agreement for that Contribution or Work shall terminate as of the date such litigation is filed. + +4. You represent that you are legally entitled to grant the above license. If your employer(s) has rights to intellectual property that you create that includes your Contributions, you represent that you have received permission to make Contributions on behalf of that employer, that your employer has waived such rights for your Contributions to Dimensional, or that your employer has executed a separate Corporate CLA with Dimensional. + +5. You represent that each of Your Contributions is Your original creation (see section 7 for submissions on behalf of others). You represent that Your Contribution submissions include complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which you are personally aware and which are associated with any part of Your Contributions. + +6. You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. + +7. Should You wish to submit work that is not Your original creation, You may submit it to Dimensional separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as "Submitted on behalf of a third-party: [named here]." + +8. You agree to notify Dimensional of any facts or circumstances of which you become aware that would make these representations inaccurate in any respect. diff --git a/README.md b/README.md index 5c8ab2128a..a84fe11b0e 100644 --- a/README.md +++ b/README.md @@ -1,158 +1,185 @@ +
- banner_bordered_trimmed -

Program Atoms

-

The Agentive Operating System for Generalist Robotics

+banner_bordered_trimmed -
+

The Agentive Operating System for Generalist Robotics

-[![Discord](https://img.shields.io/discord/1341146487186391173?style=flat-square&logo=discord&logoColor=white&label=Discord&color=5865F2)](https://discord.gg/8m6HMArf) +[![Discord](https://img.shields.io/discord/1341146487186391173?style=flat-square&logo=discord&logoColor=white&label=Discord&color=5865F2)](https://discord.gg/dimos) [![Stars](https://img.shields.io/github/stars/dimensionalOS/dimos?style=flat-square)](https://github.com/dimensionalOS/dimos/stargazers) [![Forks](https://img.shields.io/github/forks/dimensionalOS/dimos?style=flat-square)](https://github.com/dimensionalOS/dimos/fork) [![Contributors](https://img.shields.io/github/contributors/dimensionalOS/dimos?style=flat-square)](https://github.com/dimensionalOS/dimos/graphs/contributors) -
![Nix](https://img.shields.io/badge/Nix-flakes-5277C3?style=flat-square&logo=NixOS&logoColor=white) ![NixOS](https://img.shields.io/badge/NixOS-supported-5277C3?style=flat-square&logo=NixOS&logoColor=white) -![CUDA](https://img.shields.io/badge/CUDA-12.x-76B900?style=flat-square&logo=nvidia&logoColor=white) +![CUDA](https://img.shields.io/badge/CUDA-supported-76B900?style=flat-square&logo=nvidia&logoColor=white) [![Docker](https://img.shields.io/badge/Docker-ready-2496ED?style=flat-square&logo=docker&logoColor=white)](https://www.docker.com/) -

- Features • - Installation • - Documentation • - Development • - Contributing -

- -
- -> \[!NOTE] -> -> ⚠️ **Alpha Pre-Release: Expect Breaking Changes** ⚠️ + -# The Dimensional Framework +[Hardware](#hardware) • +[Installation](#installation) • +[Development](#development) • +[Multi Language](#multi-language-support) • +[ROS](#ros-interop) -Dimensional is the open-source, universal operating system for generalist robotics. On DimOS, developers -can design, build, and run physical ("dimensional") applications that run on any humanoid, quadruped, -drone, or wheeled embodiment. +⚠️ **Alpha Pre-Release: Expect Breaking Changes** ⚠️ -**Programming physical robots is now as simple as programming digital software**: Composable, Modular, Repeatable. + -Core Features: -- **Navigation:** Production navigation stack for any robot with lidar: SLAM, terrain analysis, collision - avoidance, route planning, exploration. -- **Dashboard:** The DimOS command center gives developers the tooling to debug, visualize, compose, and - test dimensional applications in real-time. Control your robot via waypoint, agent query, keyboard, - VR, more. -- **Modules:** Standalone components (equivalent to ROS nodes) that publish and subscribe to typed - In/Out streams that communicate over DimOS transports. The building blocks of Dimensional. -- **Agents (experimental):** DimOS agents understand physical space, subscribe to sensor streams, and call - **physical** tools. Emergence appears when agents have physical agency. -- **MCP (experimental):** Vibecode robots by giving your AI editor (Cursor, Claude Code) MCP access to run - physical commands (move forward 1 meter, jump, etc.). -- **Manipulation (unreleased)** Classical (OMPL, IK, GraspGen), Agentive (TAMP), and VLA-native manipulation stack runs out-of-the-box on any DimOS supported arm embodiment. -- **Transport/Middleware:** DimOS native Python transport supports LCM, DDS, and SHM, plus ROS 2. -- **Robot integrations:** We integrate with the majority of hardware OEMs and are moving fast to cover - them all. Supported and/or immediate roadmap: + - | Category | Platforms | - | --- | --- | - | Quadrupeds | Unitree Go2, Unitree B1, AGIBOT D1 Max/Pro, Dobot Rover | - | Drones | DJI Mavic 2, Holybro x500 | - | Humanoids | Unitree G1, Booster K1, AGIBOT X2, ABIBOT A2 | - | Arms | OpenARMs, xARM 6/7, AgileX Piper, HighTorque Pantera | +# Intro + +Dimensional is the modern operating system for generalist robotics. We are setting the next-generation SDK standard, integrating with the majority of robot manufacturers. + +With a simple install and no ROS required, build physical applications entirely in python that run on any humanoid, quadruped, or drone. + +Dimensional is agent native -- "vibecode" your robots in natural language and build (local & hosted) multi-agent systems that work seamlessly with your hardware. Agents run as native modules — subscribing to any embedded stream, from perception (lidar, camera) and spatial memory down to control loops and motor drivers. + + + + + + + + + + + + + + + + + +
+ Navigation + + Perception +
+

Navigation and Mapping

+ SLAM, dynamic obstacle avoidance, route planning, and autonomous exploration — via both DimOS native and ROS
Watch video +
+

Perception

+ Detectors, 3d projections, VLMs, Audio processing +
+ Agents + + Spatial Memory +
+

Agentive Control, MCP

+ "hey Robot, go find the kitchen"
Watch video +
+

Spatial Memory

+ Spatio-temporal RAG, Dynamic memory, Object localization and permanence
Watch video +
+ + +# Hardware + + + + + + + + + + + + + + + + + +
+

Quadruped

+ +
+

Humanoid

+ +
+

Arm

+ +
+

Drone

+ +
+

Misc

+ +
+ 🟩 Unitree Go2 pro/air
+ 🟥 Unitree B1
+
+ 🟨 Unitree G1
+
+ 🟥 Xarm
+ 🟥 AgileX Piper
+
+ 🟥 Mavlink
+ 🟥 DJI SDK
+
+ 🟥 Force Torque Sensor
+
+
+
+🟩 stable 🟨 beta 🟧 alpha 🟥 experimental -# Getting Started +
-## Installation +# Installation -Supported/tested matrix: +## System Install -| Platform | Status | Tested | Required System deps | -| --- | --- | --- | --- | -| Linux | supported | Ubuntu 22.04, 24.04 | See below | -| macOS | experimental beta | not CI-tested | `brew install gnu-sed gcc portaudio git-lfs libjpeg-turbo python` | +To set up your system dependencies, follow one of these guides: -Note: macOS is usable but expect inconsistent/flaky behavior (rather than hard errors/crashes). +- 🟩 [Ubuntu 22.04 / 24.04](docs/installation/ubuntu.md) +- 🟩 [NixOS / General Linux](docs/installation/nix.md) +- 🟧 [macOS](docs/installation/osx.md) -```sh -sudo apt-get update -sudo apt-get install -y curl g++ portaudio19-dev git-lfs libturbojpeg python3-dev -# install uv for python -curl -LsSf https://astral.sh/uv/install.sh | sh && export PATH="$HOME/.local/bin:$PATH" -``` +## Python Install -Option 1: Install in a virtualenv +### Quickstart -```sh +```bash +uv venv --python "3.12" +source .venv/bin/activate +uv pip install dimos[base,unitree] -uv venv && . .venv/bin/activate -uv pip install 'dimos[base,unitree]' -# replay recorded data to test that the system is working -# IMPORTANT: First replay run will show a black rerun window while 2.4 GB downloads from LFS +# Replay a recorded Go2 session (no hardware needed) +# NOTE: First run will show a black rerun window while ~2.4 GB downloads from LFS dimos --replay run unitree-go2 ``` -Option 2: Run without installing - -```sh -uvx --from 'dimos[base,unitree]' dimos --replay run unitree-go2 -``` - -### Test Installation - -#### Control a robot in a simulation (no robot required) +```bash +# Install with simulation support +uv pip install dimos[base,unitree,sim] +# Run Go2 in MuJoCo simulation +dimos --simulation run unitree-go2 -```sh -export DISPLAY=:1 # Or DISPLAY=:0 if getting GLFW/OpenGL X11 errors -# ignore the warp warnings -dimos --viewer-backend rerun-web --simulation run unitree-go2 +# Run G1 humanoid in simulation +dimos --simulation run unitree-g1-sim ``` -#### Control a real robot (Unitree Go2 over WebRTC) - -```sh +```bash +# Control a real robot (Unitree Go2 over WebRTC) export ROBOT_IP= -dimos --viewer-backend rerun-web run unitree-go2 +dimos run unitree-go2 ``` -After running dimOS open http://localhost:7779 to control robot movement. +# Usage -#### Dimensional Agents +## Use DimOS as a Library -> \[!NOTE] -> -> **Experimental Beta: Potential unstoppable robot sentience** - -```sh -export OPENAI_API_KEY= -dimos --viewer-backend rerun-web run unitree-go2-agentic -``` - -After running that, open a new terminal and run the following to start giving instructions to the agent. -```sh -# activate the venv in this new terminal -source .venv/bin/activate - -# then tell the agent "explore the room" -# then tell it to go to something, ex: "go to the door" -humancli -``` - -# The Dimensional Library - -### Modules - -Modules are subsystems on a robot that operate autonomously and communicate with other subsystems using standardized messages. See below a simple robot connection module that sends streams of continuous `cmd_vel` to the robot and recieves `color_image` to a simple `Listener` module. +See below a simple robot connection module that sends streams of continuous `cmd_vel` to the robot and receives `color_image` to a simple `Listener` module. DimOS Modules are subsystems on a robot that communicate with other modules using standardized messages. ```py import threading, time, numpy as np -from dimos.core import In, Module, Out, rpc -from dimos.core.blueprints import autoconnect +from dimos.core import In, Module, Out, rpc, autoconnect from dimos.msgs.geometry_msgs import Twist -from dimos.msgs.sensor_msgs import Image -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ImageFormat +from dimos.msgs.sensor_msgs import Image, ImageFormat class RobotConnection(Module): cmd_vel: In[Twist] @@ -186,70 +213,68 @@ if __name__ == "__main__": ).build().loop() ``` -### Blueprints +## Blueprints -Blueprints are how robots are constructed on Dimensional; instructions for how to construct and wire modules. You compose them with -`autoconnect(...)`, which connects streams by `(name, type)` and returns a `ModuleBlueprintSet`. +Blueprints are instructions for how to construct and wire modules. We compose them with +`autoconnect(...)`, which connects streams by `(name, type)` and returns a `Blueprint`. Blueprints can be composed, remapped, and have transports overridden if `autoconnect()` fails due to conflicting variable names or `In[]` and `Out[]` message types. A blueprint example that connects the image stream from a robot to an LLM Agent for reasoning and action execution. ```py -from dimos.core.blueprints import autoconnect -from dimos.core.transport import LCMTransport +from dimos.core import autoconnect, LCMTransport from dimos.msgs.sensor_msgs import Image -from dimos.robot.unitree.connection.go2 import go2_connection -from dimos.agents.agent import llm_agent +from dimos.robot.unitree.go2.connection import go2_connection +from dimos.agents.agent import agent blueprint = autoconnect( go2_connection(), - llm_agent(), + agent(), ).transports({("color_image", Image): LCMTransport("/color_image", Image)}) # Run the blueprint -blueprint.build().loop() +if __name__ == "__main__": + blueprint.build().loop() ``` -# Development +## Library API -```sh -GIT_LFS_SKIP_SMUDGE=1 git clone -b dev https://github.com/dimensionalOS/dimos.git -cd dimos -``` +- [Modules](docs/usage/modules.md) +- [LCM](docs/usage/lcm.md) +- [Blueprints](docs/usage/blueprints.md) +- [Transports](docs/usage/transports/index.md) +- [Data Streams](docs/usage/data_streams/README.md) +- [Configuration](docs/usage/configuration.md) +- [Visualization](docs/usage/visualization.md) -Then pick one of two development paths: +## Demos -Option A: Devcontainer -```sh -./bin/dev -``` +DimOS Demo -Option B: Editable install with uv -```sh -uv venv && . .venv/bin/activate -uv pip install -e '.[base,dev]' -``` +# Development -For system deps, Nix setups, and testing, see `/docs/development/README.md`. +## Develop on DimOS -### Monitoring & Debugging +```sh +export GIT_LFS_SKIP_SMUDGE=1 +git clone -b dev https://github.com/dimensionalOS/dimos.git +cd dimos -DimOS comes with a number of monitoring tools: -- Run `lcmspy` to see how fast messages are being published on streams. -- Run `skillspy` to see how skills are being called, how long they are running, which are active, etc. -- Run `agentspy` to see the agent's status over time. -- If you suspect there is a bug within DimOS itself, you can enable extreme logging by prefixing the dimos command with `DIMOS_LOG_LEVEL=DEBUG RERUN_SAVE=1 `. Ex: `DIMOS_LOG_LEVEL=DEBUG RERUN_SAVE=1 dimos --replay run unitree-go2` +uv sync --all-extras --no-extra dds + +# Run fast test suite +uv run pytest dimos +``` +## Multi Language Support -# Documentation +Python is our glue and prototyping language, but we support many languages via LCM interop. -Concepts: -- [Modules](/docs/concepts/modules.md): The building blocks of DimOS, modules run in parallel and are singleton python classes. -- [Streams](/docs/api/sensor_streams/index.md): How modules communicate, a Pub / Sub system. -- [Blueprints](/dimos/core/README_BLUEPRINTS.md): a way to group modules together and define their connections to each other. -- [RPC](/dimos/core/README_BLUEPRINTS.md#calling-the-methods-of-other-modules): how one module can call a method on another module (arguments get serialized to JSON-like binary data). -- [Skills](/dimos/core/README_BLUEPRINTS.md#defining-skills): An RPC function, except it can be called by an AI agent (a tool for an AI). +Check our language interop examples: +- [C++](examples/language-interop/cpp/) +- [Lua](examples/language-interop/lua/) +- [TypeScript](examples/language-interop/ts/) -## Contributing +## ROS interop -We welcome contributions! See our [Bounty List](https://docs.google.com/spreadsheets/d/1tzYTPvhO7Lou21cU6avSWTQOhACl5H8trSvhtYtsk8U/edit?usp=sharing) for open requests for contributions. If you would like to suggest a feature or sponsor a bounty, open an issue. +For researchers, we can talk to ROS directly via [ROS Transports](docs/usage/transports/index.md), or host dockerized ROS deployments as first-class DimOS modules, allowing you easy installation and portability diff --git a/README_installation.md b/README_installation.md deleted file mode 100644 index dc9117798f..0000000000 --- a/README_installation.md +++ /dev/null @@ -1,136 +0,0 @@ -# DimOS - -## Installation - -Clone the repo: - -```bash -git clone -b main --single-branch https://github.com/dimensionalOS/dimos.git -cd dimos -``` - -### System dependencies - -Tested on Ubuntu 22.04/24.04. - -```bash -sudo apt update -sudo apt install git-lfs python3-venv python3-pyaudio portaudio19-dev libturbojpeg0-dev -``` - -### Python dependencies - -Install `uv` by [following their instructions](https://docs.astral.sh/uv/getting-started/installation/) or just run: - -```bash -curl -LsSf https://astral.sh/uv/install.sh | sh -``` - -Install Python dependencies: - -```bash -uv sync -``` - -Depending on what you want to test you might want to install more optional dependencies as well (recommended): - -```bash -uv sync --extra dev --extra cpu --extra sim --extra drone -``` - -### Install Foxglove Studio (robot visualization and control) - -> **Note:** This will be obsolete once we finish our migration to open source [Rerun](https://rerun.io/). - -Download and install [Foxglove Studio](https://foxglove.dev/download): - -```bash -wget https://get.foxglove.dev/desktop/latest/foxglove-studio-latest-linux-amd64.deb -sudo apt install ./foxglove-studio-*.deb -``` - -[Register an account](https://app.foxglove.dev/signup) to use it. - -Open Foxglove Studio: - -```bash -foxglove-studio -``` - -To connect and load our dashboard: - -1. Click on "Open connection" -2. In the popup window, leave the WebSocket URL as `ws://localhost:8765` and click "Open" -3. In the top right, click on the "Default" dropdown, then "Import from file..." -4. Navigate to the `dimos` repo and select `assets/foxglove_dashboards/unitree.json` - -### Test the install - -Run the Python tests: - -```bash -uv run pytest dimos -``` - -They should all pass in about 3 minutes. - -### Test a robot replay - -Run the system by playing back recorded data from a robot (the replay data is automatically downloaded via Git LFS): - -```bash -uv run dimos --replay run unitree-go2-basic -``` - -You can visualize the robot data in Foxglove Studio. - -### Run a simulation - -```bash -uv run dimos --simulation run unitree-go2-basic -``` - -This will open a MuJoCo simulation window. You can also visualize data in Foxglove. - -If you want to also teleoperate the simulated robot run: - -```bash -uv run dimos --simulation run unitree-go2-basic --extra-module keyboard_teleop -``` - -This will also open a Keyboard Teleop window. Focus on the window and use WASD to control the robot. - -### Command center - -You can also control the robot from the `command-center` extension to Foxglove. - -First, pull the LFS file: - -```bash -git lfs pull --include="assets/dimensional.command-center-extension-0.0.1.foxe" -``` - -To install it, drag that file over the Foxglove Studio window. The extension will be installed automatically. Then, click on the "Add panel" icon on the top right and add "command-center". - -You can now click on the map to give it a travel goal, or click on "Start Keyboard Control" to teleoperate it. - -### Using `dimos` in your code - -If you want to use dimos in your own project (not the cloned repo), you can install it as a dependency: - -```bash -uv add dimos -``` - -Note, a few dependencies do not have PyPI packages and need to be installed from their Git repositories. These are only required for specific features: - -- **CLIP** and **detectron2**: Required for the Detic open-vocabulary object detector -- **contact_graspnet_pytorch**: Required for robotic grasp prediction - -You can install them with: - -```bash -uv add git+https://github.com/openai/CLIP.git -uv add git+https://github.com/dimensionalOS/contact_graspnet_pytorch.git -uv add git+https://github.com/facebookresearch/detectron2.git -``` diff --git a/assets/dimensional-dark.svg b/assets/dimensional-dark.svg new file mode 100644 index 0000000000..95edaadc0e --- /dev/null +++ b/assets/dimensional-dark.svg @@ -0,0 +1,23 @@ + + + + diff --git a/assets/dimensional-light.svg b/assets/dimensional-light.svg new file mode 100644 index 0000000000..f0a107bd10 --- /dev/null +++ b/assets/dimensional-light.svg @@ -0,0 +1,23 @@ + + + + diff --git a/assets/dimensional-text.svg b/assets/dimensional-text.svg new file mode 100644 index 0000000000..0cf32b3d19 --- /dev/null +++ b/assets/dimensional-text.svg @@ -0,0 +1,20 @@ + + + + diff --git a/assets/dimensional.svg b/assets/dimensional.svg new file mode 100644 index 0000000000..f36a0dea40 --- /dev/null +++ b/assets/dimensional.svg @@ -0,0 +1,23 @@ + + + + diff --git a/assets/readme/agentic_control.gif b/assets/readme/agentic_control.gif new file mode 100644 index 0000000000..f9f5970441 --- /dev/null +++ b/assets/readme/agentic_control.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb0411de5e5967be8773d5d95e692a6a5859f75bb400164451a3b383b1025fb4 +size 2416274 diff --git a/assets/readme/agents.png b/assets/readme/agents.png new file mode 100644 index 0000000000..b05bee0b03 --- /dev/null +++ b/assets/readme/agents.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a255d32f9a0ecff12d99dda9b8a51e0958ac282d7a0f814f93fd39261afaf84d +size 477123 diff --git a/assets/readme/dimos_demo.gif b/assets/readme/dimos_demo.gif new file mode 100644 index 0000000000..5a68bd72ac --- /dev/null +++ b/assets/readme/dimos_demo.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fda7f7a859ce98002e0faef88fb2942f395e19995b36b585c48447ec5a9435ee +size 24011189 diff --git a/assets/readme/lidar.gif b/assets/readme/lidar.gif new file mode 100644 index 0000000000..8302c2957d --- /dev/null +++ b/assets/readme/lidar.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d47badc970572aa7badf98c908490c8b86ea9f1cafbb18507cfdb5d08655cdfb +size 5900150 diff --git a/assets/readme/lidar.png b/assets/readme/lidar.png new file mode 100644 index 0000000000..1b499de10f --- /dev/null +++ b/assets/readme/lidar.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65b1797fd9ac8edae5dce0691397b6aca2e975badfd58462ed8e20a4dace655e +size 927067 diff --git a/assets/readme/navigation.gif b/assets/readme/navigation.gif new file mode 100644 index 0000000000..1402b1e85a --- /dev/null +++ b/assets/readme/navigation.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64e7965f421916cdb71667a9ed99eab96c14c64bd195bd483628d1b9b9a4e95c +size 4395592 diff --git a/assets/readme/navigation.png b/assets/readme/navigation.png new file mode 100644 index 0000000000..16819a5007 --- /dev/null +++ b/assets/readme/navigation.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:472cabca4b0d661658bf9ffbde78e636668e9ef6499dc38ea0f552557d735bd9 +size 617989 diff --git a/assets/readme/perception.png b/assets/readme/perception.png new file mode 100644 index 0000000000..7ec15aabbf --- /dev/null +++ b/assets/readme/perception.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48e4c61c1ec588d56d61a74fd9f0d9251eadc042e7a514fb1896826d52a32988 +size 797817 diff --git a/assets/readme/spacer.png b/assets/readme/spacer.png new file mode 100644 index 0000000000..8745fc9687 --- /dev/null +++ b/assets/readme/spacer.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a16ec40112698cf02b9abd3d18c8db65ce40f48f2c61076b45de58695f16532 +size 66 diff --git a/assets/readme/spatial_memory.gif b/assets/readme/spatial_memory.gif new file mode 100644 index 0000000000..070c65270b --- /dev/null +++ b/assets/readme/spatial_memory.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50b9cb7898ae8d238a088252fd96d2278a1be96a0dbb761839bc58c99c17f7a7 +size 4655580 diff --git a/base-requirements.txt b/base-requirements.txt deleted file mode 100644 index 68b485fb9a..0000000000 --- a/base-requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -torch==2.0.1 -torchvision==0.15.2 diff --git a/bin/dev b/bin/dev index 2ccebcb071..459696c018 100755 --- a/bin/dev +++ b/bin/dev @@ -1,26 +1,26 @@ #!/usr/bin/env bash REPO_ROOT=$(git rev-parse --show-toplevel) REL_PATH=$(realpath --relative-to="$REPO_ROOT" "$(pwd)") -IMAGE="ghcr.io/dimensionalos/dev" -IMAGE_NAME="$IMAGE:dev" +IMAGE="ghcr.io/dimensionalos/ros-dev" +IMAGE_TAG="$IMAGE:dev" image_exists() { - docker image inspect "${IMAGE_NAME}" &>/dev/null + docker image inspect "${IMAGE_TAG}" &>/dev/null } image_pull() { - docker pull "$IMAGE_NAME" + docker pull "$IMAGE_TAG" } ensure_image_downloaded() { if ! image_exists "$1"; then - echo "Image ${IMAGE_NAME} not found. Pulling..." + echo "Image ${IMAGE_TAG} not found. Pulling..." image_pull "$1" fi } check_image_running() { - if docker ps -q --filter "ancestor=${IMAGE_NAME}" | grep -q .; then + if docker ps -q --filter "ancestor=${IMAGE_TAG}" | grep -q .; then return 0 else return 1 @@ -28,16 +28,16 @@ check_image_running() { } stop_image() { - if check_image_running ${IMAGE_NAME}; then - echo "Stopping containers from image ${IMAGE_NAME}..." - docker stop $(docker ps -q --filter "ancestor=${IMAGE_NAME}") + if check_image_running ${IMAGE_TAG}; then + echo "Stopping containers from image ${IMAGE_TAG}..." + docker stop $(docker ps -q --filter "ancestor=${IMAGE_TAG}") else - echo "No containers from image ${IMAGE_NAME} are running." + echo "No containers from image ${IMAGE_TAG} are running." fi } -get_tag() { +get_branch_tag() { local branch_name branch_name=$(git rev-parse --abbrev-ref HEAD) @@ -58,16 +58,16 @@ get_tag() { build_image() { local image_tag - image_tag=$(get_tag) + image_tag=$(get_branch_tag) docker build \ --build-arg GIT_COMMIT=$(git rev-parse --short HEAD) \ --build-arg GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD) \ - -t "ghcr.io/dimensionalos/dev:${image_tag}" -f docker/dev/Dockerfile . + -t "ghcr.io/dimensionalos/ros-dev:${image_tag}" -f docker/dev/Dockerfile . } remove_image() { - local tag=$(get_tag) + local tag=$(get_branch_tag) docker rm -f "dimos-dev-${tag}" 2>/dev/null || true } @@ -141,7 +141,7 @@ else shift ;; pull) - docker pull ghcr.io/dimensionalos/dev:dev + docker pull "${IMAGE_TAG}" shift ;; *) diff --git a/bin/doclinks b/bin/doclinks new file mode 100755 index 0000000000..5dee1c69b0 --- /dev/null +++ b/bin/doclinks @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +python "$REPO_ROOT/dimos/utils/docs/doclinks.py" "$@" diff --git a/data/.lfs/models_graspgen.tar.gz b/data/.lfs/models_graspgen.tar.gz new file mode 100644 index 0000000000..8321530922 --- /dev/null +++ b/data/.lfs/models_graspgen.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:058ff764c043dccc516c1519a1e23207500c20a10c432c15eb5e30104477c0a4 +size 2117602984 diff --git a/data/.lfs/piper_description.tar.gz b/data/.lfs/piper_description.tar.gz new file mode 100644 index 0000000000..3ab8ab227b --- /dev/null +++ b/data/.lfs/piper_description.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4ce51d4ea15f29d80e69b0fff4a4d667f086e010329bb5c66980a881f1ee539 +size 3091511 diff --git a/data/.lfs/xarm7.tar.gz b/data/.lfs/xarm7.tar.gz new file mode 100644 index 0000000000..8e2cfa368a --- /dev/null +++ b/data/.lfs/xarm7.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47dd79f13845ae6a35368345b7443a9190c7584d548caddd9c3eae224442c6fc +size 3280557 diff --git a/data/.lfs/xarm_description.tar.gz b/data/.lfs/xarm_description.tar.gz new file mode 100644 index 0000000000..4cccd9ab25 --- /dev/null +++ b/data/.lfs/xarm_description.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e25f1ede8e4022f5053a61717191a2c338ea5af5b81e26bd2c880343aff1316 +size 12709222 diff --git a/dimos/agents/__init__.py b/dimos/agents/__init__.py deleted file mode 100644 index 2bac584249..0000000000 --- a/dimos/agents/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -from langchain_core.messages import ( - AIMessage, - HumanMessage, - MessageLikeRepresentation, - SystemMessage, - ToolCall, - ToolMessage, -) - -from dimos.agents.agent import Agent, deploy -from dimos.agents.spec import AgentSpec -from dimos.agents.vlm_agent import VLMAgent -from dimos.agents.vlm_stream_tester import VlmStreamTester -from dimos.protocol.skill.skill import skill -from dimos.protocol.skill.type import Output, Reducer, Stream - -__all__ = [ - "Agent", - "AgentSpec", - "Output", - "Reducer", - "Stream", - "VLMAgent", - "VlmStreamTester", - "deploy", - "skill", -] diff --git a/dimos/agents/agent.py b/dimos/agents/agent.py index e9c7c5d7b9..76195ccea0 100644 --- a/dimos/agents/agent.py +++ b/dimos/agents/agent.py @@ -1,4 +1,4 @@ -# Copyright 2025-2026 Dimensional Inc. +# Copyright 2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,395 +11,195 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import asyncio -import datetime + +from dataclasses import dataclass import json -from operator import itemgetter -import os -from typing import Any, TypedDict +from queue import Empty, Queue +from threading import Event, RLock, Thread +from typing import TYPE_CHECKING, Any, Protocol import uuid -from langchain_core.messages import ( - AIMessage, - HumanMessage, - SystemMessage, - ToolCall, - ToolMessage, -) - -from dimos.agents.llm_init import build_llm, build_system_message -from dimos.agents.spec import AgentSpec, Model, Provider -from dimos.core import DimosCluster, rpc -from dimos.protocol.skill.coordinator import SkillCoordinator, SkillState, SkillStateDict -from dimos.protocol.skill.skill import SkillContainer -from dimos.protocol.skill.type import Output -from dimos.utils.logging_config import setup_logger - -logger = setup_logger() - - -SYSTEM_MSG_APPEND = "\nYour message history will always be appended with a System Overview message that provides situational awareness." - - -def toolmsg_from_state(state: SkillState) -> ToolMessage: - if state.skill_config.output != Output.standard: - content = "output attached in separate messages" - else: - content = state.content() # type: ignore[assignment] - - return ToolMessage( - # if agent call has been triggered by another skill, - # and this specific skill didn't finish yet but we need a tool call response - # we return a message explaining that execution is still ongoing - content=content - or "Running, you will be called with an update, no need for subsequent tool calls", - name=state.name, - tool_call_id=state.call_id, - ) - - -class SkillStateSummary(TypedDict): - name: str - call_id: str - state: str - data: Any - - -def summary_from_state(state: SkillState, special_data: bool = False) -> SkillStateSummary: - content = state.content() - if isinstance(content, dict): - content = json.dumps(content) - - if not isinstance(content, str): - content = str(content) - - return { - "name": state.name, - "call_id": state.call_id, - "state": state.state.name, - "data": state.content() if not special_data else "data will be in a separate message", - } - - -def _custom_json_serializers(obj): # type: ignore[no-untyped-def] - if isinstance(obj, datetime.date | datetime.datetime): - return obj.isoformat() - raise TypeError(f"Type {type(obj)} not serializable") - - -# takes an overview of running skills from the coorindator -# and builds messages to be sent to an agent -def snapshot_to_messages( - state: SkillStateDict, - tool_calls: list[ToolCall], -) -> tuple[list[ToolMessage], AIMessage | None]: - # builds a set of tool call ids from a previous agent request - tool_call_ids = set( - map(itemgetter("id"), tool_calls), - ) - - # build a tool msg responses - tool_msgs: list[ToolMessage] = [] - - # build a general skill state overview (for longer running skills) - state_overview: list[dict[str, SkillStateSummary]] = [] - - # for special skills that want to return a separate message - # (images for example, requires to be a HumanMessage) - special_msgs: list[HumanMessage] = [] - - # for special skills that want to return a separate message that should - # stay in history, like actual human messages, critical events - history_msgs: list[HumanMessage] = [] - - # Initialize state_msg - state_msg = None - - for skill_state in sorted( - state.values(), - key=lambda skill_state: skill_state.duration(), - ): - if skill_state.call_id in tool_call_ids: - tool_msgs.append(toolmsg_from_state(skill_state)) - - if skill_state.skill_config.output == Output.human: - content = skill_state.content() - if not content: - continue - history_msgs.append(HumanMessage(content=content)) # type: ignore[arg-type] - continue - - special_data = skill_state.skill_config.output == Output.image - if special_data: - content = skill_state.content() - if not content: - continue - special_msgs.append(HumanMessage(content=content)) # type: ignore[arg-type] - - if skill_state.call_id in tool_call_ids: - continue - - state_overview.append(summary_from_state(skill_state, special_data)) # type: ignore[arg-type] - - if state_overview: - state_overview_str = "\n".join( - json.dumps(s, default=_custom_json_serializers) for s in state_overview +from langchain.agents import create_agent +from langchain_core.messages import HumanMessage +from langchain_core.messages.base import BaseMessage +from langchain_core.tools import StructuredTool +from langgraph.graph.state import CompiledStateGraph +from reactivex.disposable import Disposable + +from dimos.agents.system_prompt import SYSTEM_PROMPT +from dimos.agents.utils import pretty_print_langchain_message +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig, SkillInfo +from dimos.core.rpc_client import RpcCall, RPCClient +from dimos.core.stream import In, Out +from dimos.protocol.rpc import RPCSpec +from dimos.spec.utils import Spec + +if TYPE_CHECKING: + from langchain_core.language_models import BaseChatModel + + +@dataclass +class AgentConfig(ModuleConfig): + system_prompt: str | None = SYSTEM_PROMPT + model: str = "gpt-4o" + model_fixture: str | None = None + + +class Agent(Module): + default_config: type[AgentConfig] = AgentConfig + config: AgentConfig + agent: Out[BaseMessage] + human_input: In[str] + agent_idle: Out[bool] + + _lock: RLock + _state_graph: CompiledStateGraph[Any, Any, Any, Any] | None + _message_queue: Queue[BaseMessage] + _history: list[BaseMessage] + _thread: Thread + _stop_event: Event + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self._lock = RLock() + self._state_graph = None + self._message_queue = Queue() + self._history = [] + self._thread = Thread( + target=self._thread_loop, + name=f"{self.__class__.__name__}-thread", + daemon=True, ) - state_msg = AIMessage("State Overview:\n" + state_overview_str) - - return { # type: ignore[return-value] - "tool_msgs": tool_msgs, - "history_msgs": history_msgs, - "state_msgs": ([state_msg] if state_msg else []) + special_msgs, - } - - -# Agent class job is to glue skill coordinator state to an agent, builds langchain messages -class Agent(AgentSpec): - system_message: SystemMessage - state_messages: list[AIMessage | HumanMessage] - - def __init__( # type: ignore[no-untyped-def] - self, - *args, - **kwargs, - ) -> None: - AgentSpec.__init__(self, *args, **kwargs) - - self.state_messages = [] - self.coordinator = SkillCoordinator() - self._history = [] # type: ignore[var-annotated] - self._agent_id = str(uuid.uuid4()) - self._agent_stopped = False - - self.system_message = build_system_message(self.config, append=SYSTEM_MSG_APPEND) - self.publish(self.system_message) - self._llm = build_llm(self.config) - - @rpc - def get_agent_id(self) -> str: - return self._agent_id + self._stop_event = Event() @rpc def start(self) -> None: super().start() - self.coordinator.start() + + def _on_human_input(string: str) -> None: + self._message_queue.put(HumanMessage(content=string)) + + self._disposables.add(Disposable(self.human_input.subscribe(_on_human_input))) @rpc def stop(self) -> None: - self.coordinator.stop() - self._agent_stopped = True + self._stop_event.set() + if self._thread.is_alive(): + self._thread.join(timeout=2.0) super().stop() - def clear_history(self) -> None: - self._history.clear() - - def append_history(self, *msgs: list[AIMessage | HumanMessage]) -> None: - for msg in msgs: - self.publish(msg) # type: ignore[arg-type] - - self._history.extend(msgs) - - def history(self): # type: ignore[no-untyped-def] - return [self.system_message, *self._history, *self.state_messages] - - # Used by agent to execute tool calls - def execute_tool_calls(self, tool_calls: list[ToolCall]) -> None: - """Execute a list of tool calls from the agent.""" - if self._agent_stopped: - logger.warning("Agent is stopped, cannot execute tool calls.") - return - for tool_call in tool_calls: - logger.info(f"executing skill call {tool_call}") - self.coordinator.call_skill( - tool_call.get("id"), # type: ignore[arg-type] - tool_call.get("name"), - tool_call.get("args"), - ) - - # used to inject skill calls into the agent loop without agent asking for it - def run_implicit_skill(self, skill_name: str, **kwargs) -> None: # type: ignore[no-untyped-def] - if self._agent_stopped: - logger.warning("Agent is stopped, cannot execute implicit skill calls.") - return - self.coordinator.call_skill(False, skill_name, {"args": kwargs}) - - async def agent_loop(self, first_query: str = ""): # type: ignore[no-untyped-def] - # TODO: Should I add a lock here to prevent concurrent calls to agent_loop? - - if self._agent_stopped: - logger.warning("Agent is stopped, cannot run agent loop.") - # return "Agent is stopped." - import traceback - - traceback.print_stack() - return "Agent is stopped." - - self.state_messages = [] - if first_query: - self.append_history(HumanMessage(first_query)) # type: ignore[arg-type] - - def _get_state() -> str: - # TODO: FIX THIS EXTREME HACK - update = self.coordinator.generate_snapshot(clear=False) - snapshot_msgs = snapshot_to_messages(update, msg.tool_calls) - return json.dumps(snapshot_msgs, sort_keys=True, default=lambda o: repr(o)) - - try: - while True: - # we are getting tools from the coordinator on each turn - # since this allows for skillcontainers to dynamically provide new skills - tools = self.get_tools() # type: ignore[no-untyped-call] - self._llm = self._llm.bind_tools(tools) # type: ignore[assignment] - - # publish to /agent topic for observability - for state_msg in self.state_messages: - self.publish(state_msg) - - # history() builds our message history dynamically - # ensures we include latest system state, but not old ones. - messages = self.history() # type: ignore[no-untyped-call] - - # Some LLMs don't work without any human messages. Add an initial one. - if len(messages) == 1 and isinstance(messages[0], SystemMessage): - messages.append( - HumanMessage( - "Everything is initialized. I'll let you know when you should act." - ) - ) - self.append_history(messages[-1]) - - msg = self._llm.invoke(messages) - - self.append_history(msg) # type: ignore[arg-type] - - logger.info(f"Agent response: {msg.content}") - - state = _get_state() - - if msg.tool_calls: - self.execute_tool_calls(msg.tool_calls) - - # print(self) - # print(self.coordinator) - - self._write_debug_history_file() - - if not self.coordinator.has_active_skills(): - logger.info("No active tasks, exiting agent loop.") - return msg.content - - # coordinator will continue once a skill state has changed in - # such a way that agent call needs to be executed - - if state == _get_state(): - await self.coordinator.wait_for_updates() - - # we request a full snapshot of currently running, finished or errored out skills - # we ask for removal of finished skills from subsequent snapshots (clear=True) - update = self.coordinator.generate_snapshot(clear=True) + @rpc + def on_system_modules(self, modules: list[RPCClient]) -> None: + assert self.rpc is not None - # generate tool_msgs and general state update message, - # depending on a skill having associated tool call from previous interaction - # we will return a tool message, and not a general state message - snapshot_msgs = snapshot_to_messages(update, msg.tool_calls) + if self.config.model.startswith("ollama:"): + from dimos.agents.ollama_agent import ensure_ollama_model - self.state_messages = snapshot_msgs.get("state_msgs", []) # type: ignore[attr-defined] - self.append_history( - *snapshot_msgs.get("tool_msgs", []), # type: ignore[attr-defined] - *snapshot_msgs.get("history_msgs", []), # type: ignore[attr-defined] - ) + ensure_ollama_model(self.config.model.removeprefix("ollama:")) - except Exception as e: - logger.error(f"Error in agent loop: {e}") - import traceback + model: str | BaseChatModel = self.config.model + if self.config.model_fixture is not None: + from dimos.agents.testing import MockModel - traceback.print_exc() + model = MockModel(json_path=self.config.model_fixture) - @rpc - def loop_thread(self) -> bool: - asyncio.run_coroutine_threadsafe(self.agent_loop(), self._loop) # type: ignore[arg-type] - return True + with self._lock: + self._state_graph = create_agent( + model=model, + tools=_get_tools_from_modules(self, modules, self.rpc), + system_prompt=self.config.system_prompt, + ) + self._thread.start() @rpc - def query(self, query: str): # type: ignore[no-untyped-def] - # TODO: could this be - # from distributed.utils import sync - # return sync(self._loop, self.agent_loop, query) - return asyncio.run_coroutine_threadsafe(self.agent_loop(query), self._loop).result() # type: ignore[arg-type] + def add_message(self, message: BaseMessage) -> None: + self._message_queue.put(message) + + def _thread_loop(self) -> None: + while not self._stop_event.is_set(): + try: + message = self._message_queue.get(timeout=0.5) + except Empty: + continue - async def query_async(self, query: str): # type: ignore[no-untyped-def] - return await self.agent_loop(query) + with self._lock: + if not self._state_graph: + raise ValueError("No state graph initialized") + self._process_message(self._state_graph, message) - @rpc - def register_skills(self, container, run_implicit_name: str | None = None): # type: ignore[no-untyped-def] - ret = self.coordinator.register_skills(container) # type: ignore[func-returns-value] + def _process_message( + self, state_graph: CompiledStateGraph[Any, Any, Any, Any], message: BaseMessage + ) -> None: + self.agent_idle.publish(False) + self._history.append(message) + pretty_print_langchain_message(message) + self.agent.publish(message) - if run_implicit_name: - self.run_implicit_skill(run_implicit_name) + for update in state_graph.stream({"messages": self._history}, stream_mode="updates"): + for node_output in update.values(): + for msg in node_output.get("messages", []): + self._history.append(msg) + pretty_print_langchain_message(msg) + self.agent.publish(msg) - return ret + if self._message_queue.empty(): + self.agent_idle.publish(True) - def get_tools(self): # type: ignore[no-untyped-def] - return self.coordinator.get_tools() - def _write_debug_history_file(self) -> None: - file_path = os.getenv("DEBUG_AGENT_HISTORY_FILE") - if not file_path: - return +class AgentSpec(Spec, Protocol): + def add_message(self, message: BaseMessage) -> None: ... - history = [x.__dict__ for x in self.history()] # type: ignore[no-untyped-call] - with open(file_path, "w") as f: - json.dump(history, f, default=lambda x: repr(x), indent=2) +def _get_tools_from_modules( + agent: Agent, modules: list[RPCClient], rpc: RPCSpec +) -> list[StructuredTool]: + skills = [skill for module in modules for skill in (module.get_skills() or [])] + return [_skill_to_tool(agent, skill, rpc) for skill in skills] -class LlmAgent(Agent): - @rpc - def start(self) -> None: - super().start() - self.loop_thread() +def _skill_to_tool(agent: Agent, skill: SkillInfo, rpc: RPCSpec) -> StructuredTool: + rpc_call = RpcCall(None, rpc, skill.func_name, skill.class_name, []) - @rpc - def stop(self) -> None: - super().stop() + def wrapped_func(*args: Any, **kwargs: Any) -> str | list[dict[str, Any]]: + result = None + try: + result = rpc_call(*args, **kwargs) + except Exception as e: + return f"Exception: Error: {e}" -llm_agent = LlmAgent.blueprint + if result is None: + return "It has started. You will be updated later." + if hasattr(result, "agent_encode"): + uuid_ = str(uuid.uuid4()) + _append_image_to_history(agent, skill, uuid_, result) + return f"Tool call started with UUID: {uuid_}" -def deploy( - dimos: DimosCluster, - system_prompt: str = "You are a helpful assistant for controlling a Unitree Go2 robot.", - model: Model = Model.GPT_4O, - provider: Provider = Provider.OPENAI, # type: ignore[attr-defined] - skill_containers: list[SkillContainer] | None = None, -) -> Agent: - from dimos.agents.cli.human import HumanInput + return str(result) - if skill_containers is None: - skill_containers = [] - agent = dimos.deploy( # type: ignore[attr-defined] - Agent, - system_prompt=system_prompt, - model=model, - provider=provider, + return StructuredTool( + name=skill.func_name, + func=wrapped_func, + args_schema=json.loads(skill.args_schema), ) - human_input = dimos.deploy(HumanInput) # type: ignore[attr-defined] - human_input.start() - agent.register_skills(human_input) - - for skill_container in skill_containers: - print("Registering skill container:", skill_container) - agent.register_skills(skill_container) - - agent.run_implicit_skill("human") - agent.start() - agent.loop_thread() +def _append_image_to_history(agent: Agent, skill: SkillInfo, uuid_: str, result: Any) -> None: + agent.add_message( + HumanMessage( + content=[ + { + "type": "text", + "text": f"This is the artefact for the '{skill.func_name}' tool with UUID:={uuid_}.", + }, + *result.agent_encode(), + ] + ) + ) - return agent # type: ignore[no-any-return] +agent = Agent.blueprint -__all__ = ["Agent", "deploy", "llm_agent"] +__all__ = ["Agent", "AgentSpec", "agent"] diff --git a/dimos/agents/agent_test_runner.py b/dimos/agents/agent_test_runner.py new file mode 100644 index 0000000000..7d7fbab03d --- /dev/null +++ b/dimos/agents/agent_test_runner.py @@ -0,0 +1,80 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from threading import Event, Thread + +from langchain_core.messages import AIMessage +from langchain_core.messages.base import BaseMessage +from reactivex.disposable import Disposable + +from dimos.agents.agent import AgentSpec +from dimos.core.core import rpc +from dimos.core.module import Module +from dimos.core.rpc_client import RPCClient +from dimos.core.stream import In, Out + + +class AgentTestRunner(Module): + agent_spec: AgentSpec + agent: In[BaseMessage] + agent_idle: In[bool] + finished: Out[bool] + added: Out[bool] + + def __init__(self, messages: list[BaseMessage]) -> None: + super().__init__() + self._messages = messages + self._idle_event = Event() + self._subscription_ready = Event() + self._thread = Thread(target=self._thread_loop, daemon=True) + + @rpc + def start(self) -> None: + super().start() + self._disposables.add(Disposable(self.agent.subscribe(self._on_agent_message))) + self._disposables.add(Disposable(self.agent_idle.subscribe(self._on_agent_idle))) + # Signal that subscription is ready + self._subscription_ready.set() + + @rpc + def stop(self) -> None: + super().stop() + + @rpc + def on_system_modules(self, _modules: list[RPCClient]) -> None: + self._thread.start() + + def _on_agent_idle(self, idle: bool) -> None: + if idle: + self._idle_event.set() + + def _on_agent_message(self, message: BaseMessage) -> None: + # Check for final AIMessage (no tool calls) to signal completion + is_ai = isinstance(message, AIMessage) + has_tool_calls = hasattr(message, "tool_calls") and message.tool_calls + if is_ai and not has_tool_calls: + self.added.publish(True) + + def _thread_loop(self) -> None: + # Ensure subscription is ready before sending messages + if not self._subscription_ready.wait(5): + raise TimeoutError("Timed out waiting for subscription to be ready.") + + for message in self._messages: + self._idle_event.clear() + self.agent_spec.add_message(message) + if not self._idle_event.wait(60): + raise TimeoutError("Timed out waiting for message to be processed.") + + self.finished.publish(True) diff --git a/dimos/agents/annotation.py b/dimos/agents/annotation.py new file mode 100644 index 0000000000..083a3cbc53 --- /dev/null +++ b/dimos/agents/annotation.py @@ -0,0 +1,24 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections.abc import Callable +from typing import Any, TypeVar + +F = TypeVar("F", bound=Callable[..., Any]) + + +def skill(func: F) -> F: + func.__rpc__ = True # type: ignore[attr-defined] + func.__skill__ = True # type: ignore[attr-defined] + return func diff --git a/dimos/agents/cli/human.py b/dimos/agents/cli/human.py deleted file mode 100644 index e842b3cc8a..0000000000 --- a/dimos/agents/cli/human.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import queue - -from reactivex.disposable import Disposable - -from dimos.agents import Output, Reducer, Stream, skill # type: ignore[attr-defined] -from dimos.core import pLCMTransport, rpc -from dimos.core.module import Module -from dimos.core.rpc_client import RpcCall - - -class HumanInput(Module): - running: bool = False - - @skill(stream=Stream.call_agent, reducer=Reducer.string, output=Output.human, hide_skill=True) # type: ignore[arg-type] - def human(self): # type: ignore[no-untyped-def] - """receives human input, no need to run this, it's running implicitly""" - if self.running: - return "already running" - self.running = True - transport = pLCMTransport("/human_input") # type: ignore[var-annotated] - - msg_queue = queue.Queue() # type: ignore[var-annotated] - unsub = transport.subscribe(msg_queue.put) # type: ignore[func-returns-value] - self._disposables.add(Disposable(unsub)) - yield from iter(msg_queue.get, None) - - @rpc - def start(self) -> None: - super().start() - - @rpc - def stop(self) -> None: - super().stop() - - @rpc - def set_AgentSpec_register_skills(self, callable: RpcCall) -> None: - callable.set_rpc(self.rpc) # type: ignore[arg-type] - callable(self, run_implicit_name="human") - - -human_input = HumanInput.blueprint - -__all__ = ["HumanInput", "human_input"] diff --git a/dimos/agents/conftest.py b/dimos/agents/conftest.py index 52d7d5a6bb..23d888b0fe 100644 --- a/dimos/agents/conftest.py +++ b/dimos/agents/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2025-2026 Dimensional Inc. +# Copyright 2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,74 +12,98 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os from pathlib import Path +from threading import Event +from dotenv import load_dotenv +from langchain_core.messages.base import BaseMessage import pytest from dimos.agents.agent import Agent -from dimos.agents.testing import MockModel -from dimos.protocol.skill.test_coordinator import SkillContainerTest +from dimos.agents.agent_test_runner import AgentTestRunner +from dimos.core.blueprints import autoconnect +from dimos.core.global_config import global_config +from dimos.core.transport import pLCMTransport +load_dotenv() -@pytest.fixture -def fixture_dir(): - return Path(__file__).parent / "fixtures" +FIXTURE_DIR = Path(__file__).parent / "fixtures" @pytest.fixture -def potato_system_prompt() -> str: - return "Your name is Mr. Potato, potatoes are bad at math. Use a tools if asked to calculate" +def fixture_dir() -> Path: + return FIXTURE_DIR @pytest.fixture -def skill_container(): - container = SkillContainerTest() - try: - yield container - finally: - container.stop() - - -@pytest.fixture -def create_fake_agent(fixture_dir): - agent = None - - def _agent_factory(*, system_prompt, skill_containers, fixture): - mock_model = MockModel(json_path=fixture_dir / fixture) - - nonlocal agent - agent = Agent(system_prompt=system_prompt, model_instance=mock_model) - - for skill_container in skill_containers: - agent.register_skills(skill_container) - - agent.start() - - return agent - - try: - yield _agent_factory - finally: - if agent: - agent.stop() - - -@pytest.fixture -def create_potato_agent(potato_system_prompt, skill_container, fixture_dir): - agent = None - - def _agent_factory(*, fixture): - mock_model = MockModel(json_path=fixture_dir / fixture) - - nonlocal agent - agent = Agent(system_prompt=potato_system_prompt, model_instance=mock_model) - agent.register_skills(skill_container) - agent.start() - - return agent - - try: - yield _agent_factory - finally: - if agent: - agent.stop() +def agent_setup(request): + coordinator = None + transports: list[pLCMTransport] = [] + unsubs: list = [] + recording = bool(os.getenv("RECORD")) + + def fn( + *, + blueprints, + messages: list[BaseMessage], + dask: bool = False, + system_prompt: str | None = None, + fixture: str | None = None, + ) -> list[BaseMessage]: + history: list[BaseMessage] = [] + finished_event = Event() + + agent_transport: pLCMTransport = pLCMTransport("/agent") + finished_transport: pLCMTransport = pLCMTransport("/finished") + transports.extend([agent_transport, finished_transport]) + + def on_message(msg: BaseMessage) -> None: + history.append(msg) + + unsubs.append(agent_transport.subscribe(on_message)) + unsubs.append(finished_transport.subscribe(lambda _: finished_event.set())) + + # Derive fixture path from test name if not explicitly provided. + if fixture is not None: + fixture_path = FIXTURE_DIR / fixture + else: + fixture_path = FIXTURE_DIR / f"{request.node.name}.json" + + agent_kwargs: dict = {"system_prompt": system_prompt} + + if recording or fixture_path.exists(): + # RECORD=1: use real LLM, save responses to fixture file. + # No RECORD but fixture exists: play back recorded responses. + # The MockModel checks RECORD internally to decide record vs playback. + agent_kwargs["model_fixture"] = str(fixture_path) + + blueprint = autoconnect( + *blueprints, + Agent.blueprint(**agent_kwargs), + AgentTestRunner.blueprint(messages=messages), + ) + + global_config.update( + viewer_backend="none", + dask=dask, + ) + + nonlocal coordinator + coordinator = blueprint.build() + + if not finished_event.wait(60): + raise TimeoutError("Timed out waiting for agent to finish processing messages.") + + return history + + yield fn + + if coordinator is not None: + coordinator.stop() + + for transport in transports: + transport.stop() + + for unsub in unsubs: + unsub() diff --git a/dimos/agents/demo_agent.py b/dimos/agents/demo_agent.py new file mode 100644 index 0000000000..b3250fba5b --- /dev/null +++ b/dimos/agents/demo_agent.py @@ -0,0 +1,32 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.agents.agent import Agent +from dimos.core.blueprints import autoconnect +from dimos.hardware.sensors.camera import zed +from dimos.hardware.sensors.camera.module import camera_module +from dimos.hardware.sensors.camera.webcam import Webcam + +demo_agent = autoconnect(Agent.blueprint()) + +demo_agent_camera = autoconnect( + Agent.blueprint(), + camera_module( + hardware=lambda: Webcam( + camera_index=0, + fps=15, + camera_info=zed.CameraInfo.SingleWebcam, + ), + ), +) diff --git a/dimos/agents/fixtures/test_can_call_again_on_error[False].json b/dimos/agents/fixtures/test_can_call_again_on_error[False].json new file mode 100644 index 0000000000..762b452cff --- /dev/null +++ b/dimos/agents/fixtures/test_can_call_again_on_error[False].json @@ -0,0 +1,34 @@ +{ + "responses": [ + { + "content": "", + "tool_calls": [ + { + "name": "register_user", + "args": { + "name": "Paul" + }, + "id": "call_gizJWFgoQiYOQMCDqjlshkvk", + "type": "tool_call" + } + ] + }, + { + "content": "", + "tool_calls": [ + { + "name": "register_user", + "args": { + "name": "paul" + }, + "id": "call_O9p0ktNw0frMfNXjbul6Do1m", + "type": "tool_call" + } + ] + }, + { + "content": "The user named \"paul\" has been registered successfully.", + "tool_calls": [] + } + ] +} diff --git a/dimos/agents/fixtures/test_can_call_again_on_error[True].json b/dimos/agents/fixtures/test_can_call_again_on_error[True].json new file mode 100644 index 0000000000..b67efe84e0 --- /dev/null +++ b/dimos/agents/fixtures/test_can_call_again_on_error[True].json @@ -0,0 +1,34 @@ +{ + "responses": [ + { + "content": "", + "tool_calls": [ + { + "name": "register_user", + "args": { + "name": "Paul" + }, + "id": "call_4l78eCMbfsbIC2qPL86jA4S0", + "type": "tool_call" + } + ] + }, + { + "content": "", + "tool_calls": [ + { + "name": "register_user", + "args": { + "name": "paul" + }, + "id": "call_Jzi8RU0jRMeCcUDEcRkHzahs", + "type": "tool_call" + } + ] + }, + { + "content": "The user named \"paul\" has been registered successfully.", + "tool_calls": [] + } + ] +} diff --git a/dimos/agents/fixtures/test_can_call_tool[False].json b/dimos/agents/fixtures/test_can_call_tool[False].json new file mode 100644 index 0000000000..32d7770899 --- /dev/null +++ b/dimos/agents/fixtures/test_can_call_tool[False].json @@ -0,0 +1,22 @@ +{ + "responses": [ + { + "content": "", + "tool_calls": [ + { + "name": "add", + "args": { + "x": 33333, + "y": 100 + }, + "id": "call_vIdFXzNnojiCXtnEi9J2gXQN", + "type": "tool_call" + } + ] + }, + { + "content": "33333 + 100 is 33433.", + "tool_calls": [] + } + ] +} diff --git a/dimos/agents/fixtures/test_can_call_tool[True].json b/dimos/agents/fixtures/test_can_call_tool[True].json new file mode 100644 index 0000000000..e9431bb8ea --- /dev/null +++ b/dimos/agents/fixtures/test_can_call_tool[True].json @@ -0,0 +1,22 @@ +{ + "responses": [ + { + "content": "", + "tool_calls": [ + { + "name": "add", + "args": { + "x": 33333, + "y": 100 + }, + "id": "call_ZlA2HzNAuHF0H52CKQIPX9Te", + "type": "tool_call" + } + ] + }, + { + "content": "The result of 33333 + 100 is 33433.", + "tool_calls": [] + } + ] +} diff --git a/dimos/agents/fixtures/test_get_gps_position_for_queries.json b/dimos/agents/fixtures/test_get_gps_position_for_queries.json index 5d95b91bac..c2f163598d 100644 --- a/dimos/agents/fixtures/test_get_gps_position_for_queries.json +++ b/dimos/agents/fixtures/test_get_gps_position_for_queries.json @@ -6,19 +6,19 @@ { "name": "get_gps_position_for_queries", "args": { - "args": [ + "queries": [ "Hyde Park", "Regent Park", "Russell Park" ] }, - "id": "call_xO0VDst53tzetEUq8mapKGS1", + "id": "call_KqAjbd5E9VE69YMWBINdWnkw", "type": "tool_call" } ] }, { - "content": "Here are the latitude and longitude coordinates for the parks:\n\n- Hyde Park: Latitude 37.782601, Longitude -122.413201\n- Regent Park: Latitude 37.782602, Longitude -122.413202\n- Russell Park: Latitude 37.782603, Longitude -122.413203", + "content": "Here are the latitude and longitude coordinates for the parks you asked about:\n\n- **Hyde Park**: Latitude 37.782601, Longitude -122.413201\n- **Regent Park**: Latitude 37.782602, Longitude -122.413202\n- **Russell Park**: Latitude 37.782603, Longitude -122.413203", "tool_calls": [] } ] diff --git a/dimos/agents/fixtures/test_go_to_object.json b/dimos/agents/fixtures/test_go_to_object.json deleted file mode 100644 index 80f1e95379..0000000000 --- a/dimos/agents/fixtures/test_go_to_object.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "responses": [ - { - "content": "I will navigate to the nearest chair.", - "tool_calls": [ - { - "name": "navigate_with_text", - "args": { - "args": [ - "chair" - ] - }, - "id": "call_LP4eewByfO9XaxMtnnWxDUz7", - "type": "tool_call" - } - ] - }, - { - "content": "I'm on my way to the chair. Let me know if there's anything else you'd like me to do!", - "tool_calls": [] - }, - { - "content": "I have successfully navigated to the chair. Let me know if you need anything else!", - "tool_calls": [] - } - ] -} diff --git a/dimos/agents/fixtures/test_go_to_semantic_location.json b/dimos/agents/fixtures/test_go_to_semantic_location.json index 1a10711543..8ea3006142 100644 --- a/dimos/agents/fixtures/test_go_to_semantic_location.json +++ b/dimos/agents/fixtures/test_go_to_semantic_location.json @@ -6,17 +6,15 @@ { "name": "navigate_with_text", "args": { - "args": [ - "bookshelf" - ] + "query": "bookshelf" }, - "id": "call_yPoqcavMo05ogNNy5LMNQl2a", + "id": "call_PkS6DWAciWAAAdZfatiXoEdu", "type": "tool_call" } ] }, { - "content": "I have successfully arrived at the bookshelf. Is there anything specific you need here?", + "content": "I have successfully navigated to the bookshelf.", "tool_calls": [] } ] diff --git a/dimos/agents/fixtures/test_how_much_is_124181112_plus_124124.json b/dimos/agents/fixtures/test_how_much_is_124181112_plus_124124.json deleted file mode 100644 index f4dbe0c3a5..0000000000 --- a/dimos/agents/fixtures/test_how_much_is_124181112_plus_124124.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "responses": [ - { - "content": "", - "tool_calls": [ - { - "name": "add", - "args": { - "args": [ - 124181112, - 124124 - ] - }, - "id": "call_SSoVXz5yihrzR8TWIGnGKSpi", - "type": "tool_call" - } - ] - }, - { - "content": "Let me do some potato math... Calculating this will take some time, hold on! \ud83e\udd54", - "tool_calls": [] - }, - { - "content": "The result of adding 124,181,112 and 124,124 is 124,305,236. Potatoes work well with tools! \ud83e\udd54\ud83c\udf89", - "tool_calls": [] - }, - { - "content": "", - "tool_calls": [ - { - "name": "add", - "args": { - "args": [ - 1000000000, - -1000000 - ] - }, - "id": "call_ge9pv6IRa3yo0vjVaORvrGby", - "type": "tool_call" - } - ] - }, - { - "content": "Let's get those numbers crunched. Potatoes need a bit of time! \ud83e\udd54\ud83d\udcca", - "tool_calls": [] - }, - { - "content": "The result of one billion plus negative one million is 999,000,000. Potatoes are amazing with some help! \ud83e\udd54\ud83d\udca1", - "tool_calls": [] - } - ] -} diff --git a/dimos/agents/fixtures/test_image.json b/dimos/agents/fixtures/test_image.json new file mode 100644 index 0000000000..196c2122ef --- /dev/null +++ b/dimos/agents/fixtures/test_image.json @@ -0,0 +1,23 @@ +{ + "responses": [ + { + "content": "", + "tool_calls": [ + { + "name": "take_a_picture", + "args": {}, + "id": "call_11vyrbtZXlsEgoKuQs8jRjP8", + "type": "tool_call" + } + ] + }, + { + "content": "I have taken a picture, let me analyze it to provide a description.\nBased on the image captured, the setting resembles a \"cafe\". It shows an indoor space with tables, chairs, and a warm, inviting atmosphere typical of a cafe environment, where people might gather for drinks and conversation.", + "tool_calls": [] + }, + { + "content": "The image depicts a \"cafe\" setting. It shows people sitting outside at tables, likely enjoying drinks or meals, with a cozy and inviting ambiance typical of a cafe.", + "tool_calls": [] + } + ] +} diff --git a/dimos/agents/fixtures/test_multiple_tool_calls_with_multiple_messages.json b/dimos/agents/fixtures/test_multiple_tool_calls_with_multiple_messages.json new file mode 100644 index 0000000000..7fd8172d35 --- /dev/null +++ b/dimos/agents/fixtures/test_multiple_tool_calls_with_multiple_messages.json @@ -0,0 +1,98 @@ +{ + "responses": [ + { + "content": "", + "tool_calls": [ + { + "name": "locate_person", + "args": { + "name": "John" + }, + "id": "call_w4cTUUpojE2zRMYda93ma3io", + "type": "tool_call" + } + ] + }, + { + "content": "", + "tool_calls": [ + { + "name": "register_person", + "args": { + "name": "John" + }, + "id": "call_Rij2ZZHG1u2yEKALNqH4L0WH", + "type": "tool_call" + } + ] + }, + { + "content": "", + "tool_calls": [ + { + "name": "locate_person", + "args": { + "name": "John" + }, + "id": "call_Pr1HUeq1j2L9bYU3CsTXVmnk", + "type": "tool_call" + } + ] + }, + { + "content": "", + "tool_calls": [ + { + "name": "go_to_location", + "args": { + "description": "kitchen" + }, + "id": "call_F6Pw6Da2mixJ1mmhb0q4uI3F", + "type": "tool_call" + } + ] + }, + { + "content": "I have moved to the kitchen where John is located.", + "tool_calls": [] + }, + { + "content": "", + "tool_calls": [ + { + "name": "register_person", + "args": { + "name": "Jane" + }, + "id": "call_52hL9l50BzPFPruLoPxigoVa", + "type": "tool_call" + }, + { + "name": "locate_person", + "args": { + "name": "Jane" + }, + "id": "call_0NbGZIn9BOCENTfrKvuacarZ", + "type": "tool_call" + } + ] + }, + { + "content": "", + "tool_calls": [ + { + "name": "go_to_location", + "args": { + "description": "living room" + }, + "id": "call_XoErhAixvK31yHOeCKiwgrKj", + "type": "tool_call" + } + ] + }, + { + "content": "I have moved to the living room where Jane is located.", + "tool_calls": [] + } + ] +} diff --git a/dimos/agents/fixtures/test_pounce.json b/dimos/agents/fixtures/test_pounce.json index 99e84d003a..4213c12c95 100644 --- a/dimos/agents/fixtures/test_pounce.json +++ b/dimos/agents/fixtures/test_pounce.json @@ -6,32 +6,15 @@ { "name": "execute_sport_command", "args": { - "args": [ - "FrontPounce" - ] + "command_name": "FrontPounce" }, - "id": "call_Ukj6bCAnHQLj28RHRp697blZ", + "id": "call_Z7X0sJfUygGiJUUL67I64eRs", "type": "tool_call" } ] }, { - "content": "", - "tool_calls": [ - { - "name": "speak", - "args": { - "args": [ - "I have successfully performed a front pounce." - ] - }, - "id": "call_FR9DtqEvJ9zSY85qVD2UFrll", - "type": "tool_call" - } - ] - }, - { - "content": "I have successfully performed a front pounce.", + "content": "The robot has successfully performed a \"FrontPounce\" action!", "tool_calls": [] } ] diff --git a/dimos/agents/fixtures/test_prompt.json b/dimos/agents/fixtures/test_prompt.json new file mode 100644 index 0000000000..b924f66f52 --- /dev/null +++ b/dimos/agents/fixtures/test_prompt.json @@ -0,0 +1,8 @@ +{ + "responses": [ + { + "content": "My name is Johnny. How can I assist you today?", + "tool_calls": [] + } + ] +} diff --git a/dimos/agents/fixtures/test_set_gps_travel_points.json b/dimos/agents/fixtures/test_set_gps_travel_points.json index eb5b2a9195..e1392125a1 100644 --- a/dimos/agents/fixtures/test_set_gps_travel_points.json +++ b/dimos/agents/fixtures/test_set_gps_travel_points.json @@ -1,29 +1,36 @@ { "responses": [ { - "content": "I understand you want me to navigate to the specified location. I will set the GPS travel point accordingly.", + "content": "", + "tool_calls": [ + { + "name": "set_gps_travel_points", + "args": {}, + "id": "call_0Onzw4fDoT68BWNfZUiGlIbg", + "type": "tool_call" + } + ] + }, + { + "content": "It seems there was an issue with the initial attempt. Let me try again by providing the correct parameter.", "tool_calls": [ { "name": "set_gps_travel_points", "args": { - "args": [ + "points": [ { "lat": 37.782654, "lon": -122.413273 } ] }, - "id": "call_q6JCCYFuyAjqUgUibJHqcIMD", + "id": "call_b0QRHVc09ZtY8jbQaUsny7Yx", "type": "tool_call" } ] }, { - "content": "I'm on my way to the specified location. Let me know if there is anything else I can assist you with!", - "tool_calls": [] - }, - { - "content": "I've reached the specified location. Do you need any further assistance?", + "content": "The GPS travel point has been successfully set to latitude: 37.782654 and longitude: -122.413273.", "tool_calls": [] } ] diff --git a/dimos/agents/fixtures/test_set_gps_travel_points_multiple.json b/dimos/agents/fixtures/test_set_gps_travel_points_multiple.json index 9d8f7e9e00..2391e81fe1 100644 --- a/dimos/agents/fixtures/test_set_gps_travel_points_multiple.json +++ b/dimos/agents/fixtures/test_set_gps_travel_points_multiple.json @@ -1,12 +1,23 @@ { "responses": [ + { + "content": "", + "tool_calls": [ + { + "name": "set_gps_travel_points", + "args": {}, + "id": "call_JWgodQUZD16l2tjePmTXGX7V", + "type": "tool_call" + } + ] + }, { "content": "", "tool_calls": [ { "name": "set_gps_travel_points", "args": { - "args": [ + "points": [ { "lat": 37.782654, "lon": -122.413273 @@ -21,13 +32,13 @@ } ] }, - "id": "call_Q09MRMEgRnJPBOGZpM0j8sL2", + "id": "call_e5szuuZrTdq6deN8mUM5kusY", "type": "tool_call" } ] }, { - "content": "I've successfully set the travel points and will navigate to them sequentially.", + "content": "The GPS travel points have been successfully set in the specified order.", "tool_calls": [] } ] diff --git a/dimos/agents/fixtures/test_show_your_love.json b/dimos/agents/fixtures/test_show_your_love.json deleted file mode 100644 index 941906e781..0000000000 --- a/dimos/agents/fixtures/test_show_your_love.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "responses": [ - { - "content": "", - "tool_calls": [ - { - "name": "execute_sport_command", - "args": { - "args": [ - "FingerHeart" - ] - }, - "id": "call_VFp6x9F00FBmiiUiemFWewop", - "type": "tool_call" - } - ] - }, - { - "content": "", - "tool_calls": [ - { - "name": "speak", - "args": { - "args": [ - "Here's a gesture to show you some love!" - ] - }, - "id": "call_WUUmBJ95s9PtVx8YQsmlJ4EU", - "type": "tool_call" - } - ] - }, - { - "content": "Just did a finger heart gesture to show my affection!", - "tool_calls": [] - } - ] -} diff --git a/dimos/agents/fixtures/test_take_a_look_around.json b/dimos/agents/fixtures/test_start_exploration.json similarity index 51% rename from dimos/agents/fixtures/test_take_a_look_around.json rename to dimos/agents/fixtures/test_start_exploration.json index c30fe71017..713e6e2dba 100644 --- a/dimos/agents/fixtures/test_take_a_look_around.json +++ b/dimos/agents/fixtures/test_start_exploration.json @@ -6,17 +6,15 @@ { "name": "start_exploration", "args": { - "args": [ - 10 - ] + "timeout": 10 }, - "id": "call_AMNeD8zTkvyFHKG90DriDPuM", + "id": "call_o5O7xLaI4iayDhszXblOiWVS", "type": "tool_call" } ] }, { - "content": "I have completed a brief exploration of the surroundings. Let me know if there's anything specific you need!", + "content": "I have completed the exploration for 10 seconds. If there's anything specific you would like to do next, please let me know!", "tool_calls": [] } ] diff --git a/dimos/agents/fixtures/test_stop_movement.json b/dimos/agents/fixtures/test_stop_movement.json index b80834213e..f9f10af0e0 100644 --- a/dimos/agents/fixtures/test_stop_movement.json +++ b/dimos/agents/fixtures/test_stop_movement.json @@ -5,16 +5,14 @@ "tool_calls": [ { "name": "stop_movement", - "args": { - "args": null - }, - "id": "call_oAKe9W8s3xRGioZhBJJDOZB1", + "args": {}, + "id": "call_dHP1UE2Bw180bzxPT2wI2Dam", "type": "tool_call" } ] }, { - "content": "I have stopped moving. Let me know if you need anything else!", + "content": "The movement has been stopped.", "tool_calls": [] } ] diff --git a/dimos/agents/fixtures/test_what_do_you_see_in_this_picture.json b/dimos/agents/fixtures/test_what_do_you_see_in_this_picture.json deleted file mode 100644 index 27ac3453bc..0000000000 --- a/dimos/agents/fixtures/test_what_do_you_see_in_this_picture.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "responses": [ - { - "content": "", - "tool_calls": [ - { - "name": "take_photo", - "args": { - "args": [] - }, - "id": "call_o6ikJtK3vObuEFD6hDtLoyGQ", - "type": "tool_call" - } - ] - }, - { - "content": "I took a photo, but as an AI, I can't see or interpret images. If there's anything specific you need to know, feel free to ask!", - "tool_calls": [] - }, - { - "content": "It looks like a cozy outdoor cafe where people are sitting and enjoying a meal. There are flowers and a nice, sunny ambiance. If you have any specific questions about the image, let me know!", - "tool_calls": [] - } - ] -} diff --git a/dimos/agents/fixtures/test_what_is_your_name.json b/dimos/agents/fixtures/test_what_is_your_name.json deleted file mode 100644 index a74d793b1d..0000000000 --- a/dimos/agents/fixtures/test_what_is_your_name.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "responses": [ - { - "content": "Hi! My name is Mr. Potato. How can I assist you today?", - "tool_calls": [] - } - ] -} diff --git a/dimos/agents/fixtures/test_where_am_i.json b/dimos/agents/fixtures/test_where_am_i.json index 2d274f8fa6..a4a06a5c72 100644 --- a/dimos/agents/fixtures/test_where_am_i.json +++ b/dimos/agents/fixtures/test_where_am_i.json @@ -5,16 +5,14 @@ "tool_calls": [ { "name": "where_am_i", - "args": { - "args": [] - }, - "id": "call_uRJLockZ5JWtGWbsSL1dpHm3", + "args": {}, + "id": "call_4eiRtfr8mI7SgLfR0FTjH7Pp", "type": "tool_call" } ] }, { - "content": "You are on Bourbon Street.", + "content": "You are currently on Bourbon Street.", "tool_calls": [] } ] diff --git a/dimos/agents/llm_init.py b/dimos/agents/llm_init.py deleted file mode 100644 index eb8c33c631..0000000000 --- a/dimos/agents/llm_init.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import cast - -from langchain.chat_models import init_chat_model -from langchain_core.language_models.chat_models import BaseChatModel -from langchain_core.messages import SystemMessage -from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline - -from dimos.agents.ollama_agent import ensure_ollama_model -from dimos.agents.spec import AgentConfig -from dimos.agents.system_prompt import SYSTEM_PROMPT - - -def build_llm(config: AgentConfig) -> BaseChatModel: - if config.model_instance: - return config.model_instance - - if config.provider.value.lower() == "ollama": - ensure_ollama_model(config.model) - - if config.provider.value.lower() == "huggingface": - llm = HuggingFacePipeline.from_model_id( - model_id=config.model, - task="text-generation", - pipeline_kwargs={ - "max_new_tokens": 512, - "temperature": 0.7, - }, - ) - return ChatHuggingFace(llm=llm, model_id=config.model) - - return cast( - "BaseChatModel", - init_chat_model( # type: ignore[call-overload] - model_provider=config.provider, - model=config.model, - ), - ) - - -def build_system_message(config: AgentConfig, *, append: str = "") -> SystemMessage: - if config.system_prompt: - if isinstance(config.system_prompt, str): - return SystemMessage(config.system_prompt + append) - if append: - config.system_prompt.content += append # type: ignore[operator] - return config.system_prompt - - return SystemMessage(SYSTEM_PROMPT + append) diff --git a/dimos/agents/skills/conftest.py b/dimos/agents/skills/conftest.py deleted file mode 100644 index 0e2e3e0636..0000000000 --- a/dimos/agents/skills/conftest.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from functools import partial - -import pytest -from reactivex.scheduler import ThreadPoolScheduler - -from dimos.agents.skills.google_maps_skill_container import GoogleMapsSkillContainer -from dimos.agents.skills.gps_nav_skill import GpsNavSkillContainer -from dimos.agents.skills.navigation import NavigationSkillContainer -from dimos.agents.system_prompt import SYSTEM_PROMPT -from dimos.robot.unitree_webrtc.unitree_skill_container import UnitreeSkillContainer - - -@pytest.fixture(autouse=True) -def cleanup_threadpool_scheduler(monkeypatch): - # TODO: get rid of this global threadpool - """Clean up and recreate the global ThreadPoolScheduler after each test.""" - # Disable ChromaDB telemetry to avoid leaking threads - monkeypatch.setenv("CHROMA_ANONYMIZED_TELEMETRY", "False") - yield - from dimos.utils import threadpool - - # Shutdown the global scheduler's executor - threadpool.scheduler.executor.shutdown(wait=True) - # Recreate it for the next test - threadpool.scheduler = ThreadPoolScheduler(max_workers=threadpool.get_max_workers()) - - -@pytest.fixture -def navigation_skill_container(mocker): - container = NavigationSkillContainer() - container.color_image.connection = mocker.MagicMock() - container.odom.connection = mocker.MagicMock() - container.start() - yield container - container.stop() - - -@pytest.fixture -def gps_nav_skill_container(mocker): - container = GpsNavSkillContainer() - container.gps_location.connection = mocker.MagicMock() - container.gps_goal = mocker.MagicMock() - container.start() - yield container - container.stop() - - -@pytest.fixture -def google_maps_skill_container(mocker): - container = GoogleMapsSkillContainer() - container.gps_location.connection = mocker.MagicMock() - container.start() - container._client = mocker.MagicMock() - yield container - container.stop() - - -@pytest.fixture -def unitree_skills(mocker): - container = UnitreeSkillContainer() - container.start() - yield container - container.stop() - - -@pytest.fixture -def create_navigation_agent(navigation_skill_container, create_fake_agent): - return partial( - create_fake_agent, - system_prompt=SYSTEM_PROMPT, - skill_containers=[navigation_skill_container], - ) - - -@pytest.fixture -def create_gps_nav_agent(gps_nav_skill_container, create_fake_agent): - return partial( - create_fake_agent, system_prompt=SYSTEM_PROMPT, skill_containers=[gps_nav_skill_container] - ) - - -@pytest.fixture -def create_google_maps_agent( - gps_nav_skill_container, google_maps_skill_container, create_fake_agent -): - return partial( - create_fake_agent, - system_prompt=SYSTEM_PROMPT, - skill_containers=[gps_nav_skill_container, google_maps_skill_container], - ) - - -@pytest.fixture -def create_unitree_skills_agent(unitree_skills, create_fake_agent): - return partial( - create_fake_agent, - system_prompt=SYSTEM_PROMPT, - skill_containers=[unitree_skills], - ) diff --git a/dimos/agents/skills/demo_calculator_skill.py b/dimos/agents/skills/demo_calculator_skill.py index 2ed8050ca5..61d66e301a 100644 --- a/dimos/agents/skills/demo_calculator_skill.py +++ b/dimos/agents/skills/demo_calculator_skill.py @@ -12,18 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dimos.core.skill_module import SkillModule -from dimos.protocol.skill.skill import skill +from dimos.agents.annotation import skill +from dimos.core.module import Module -class DemoCalculatorSkill(SkillModule): +class DemoCalculatorSkill(Module): def start(self) -> None: super().start() def stop(self) -> None: super().stop() - @skill() + @skill def sum_numbers(self, n1: int, n2: int, *args: int, **kwargs: int) -> str: """This skill adds two numbers. Always use this tool. Never add up numbers yourself. diff --git a/dimos/agents/skills/demo_google_maps_skill.py b/dimos/agents/skills/demo_google_maps_skill.py index cd8cad9d6a..13f2ebc19b 100644 --- a/dimos/agents/skills/demo_google_maps_skill.py +++ b/dimos/agents/skills/demo_google_maps_skill.py @@ -13,20 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dotenv import load_dotenv - -from dimos.agents.agent import llm_agent -from dimos.agents.cli.human import human_input +from dimos.agents.agent import agent from dimos.agents.skills.demo_robot import demo_robot from dimos.agents.skills.google_maps_skill_container import google_maps_skill from dimos.core.blueprints import autoconnect -load_dotenv() - - demo_google_maps_skill = autoconnect( demo_robot(), google_maps_skill(), - human_input(), - llm_agent(), + agent(), ) diff --git a/dimos/agents/skills/demo_gps_nav.py b/dimos/agents/skills/demo_gps_nav.py index 4204b23dc7..7a6abd32dd 100644 --- a/dimos/agents/skills/demo_gps_nav.py +++ b/dimos/agents/skills/demo_gps_nav.py @@ -13,20 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dotenv import load_dotenv - -from dimos.agents.agent import llm_agent -from dimos.agents.cli.human import human_input +from dimos.agents.agent import agent from dimos.agents.skills.demo_robot import demo_robot from dimos.agents.skills.gps_nav_skill import gps_nav_skill from dimos.core.blueprints import autoconnect -load_dotenv() - - -demo_gps_nav_skill = autoconnect( +demo_gps_nav = autoconnect( demo_robot(), gps_nav_skill(), - human_input(), - llm_agent(), + agent(), ) diff --git a/dimos/agents/skills/demo_skill.py b/dimos/agents/skills/demo_skill.py index 547d81c5b8..b067a3fbc2 100644 --- a/dimos/agents/skills/demo_skill.py +++ b/dimos/agents/skills/demo_skill.py @@ -13,18 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dotenv import load_dotenv - -from dimos.agents.agent import llm_agent -from dimos.agents.cli.human import human_input +from dimos.agents.agent import agent from dimos.agents.skills.demo_calculator_skill import demo_calculator_skill from dimos.core.blueprints import autoconnect -load_dotenv() - - demo_skill = autoconnect( demo_calculator_skill(), - human_input(), - llm_agent(), + agent(), ) diff --git a/dimos/agents/skills/google_maps_skill_container.py b/dimos/agents/skills/google_maps_skill_container.py index d5a30904ed..33b2ee9f10 100644 --- a/dimos/agents/skills/google_maps_skill_container.py +++ b/dimos/agents/skills/google_maps_skill_container.py @@ -15,18 +15,18 @@ import json from typing import Any +from dimos.agents.annotation import skill from dimos.core.core import rpc -from dimos.core.skill_module import SkillModule +from dimos.core.module import Module from dimos.core.stream import In from dimos.mapping.google_maps.google_maps import GoogleMaps from dimos.mapping.types import LatLon -from dimos.protocol.skill.skill import skill from dimos.utils.logging_config import setup_logger logger = setup_logger() -class GoogleMapsSkillContainer(SkillModule): +class GoogleMapsSkillContainer(Module): _latest_location: LatLon | None = None _client: GoogleMaps @@ -55,7 +55,7 @@ def _get_latest_location(self) -> LatLon: raise ValueError("The position has not been set yet.") return self._latest_location - @skill() + @skill def where_am_i(self, context_radius: int = 200) -> str: """This skill returns information about what street/locality/city/etc you are in. It also gives you nearby landmarks. @@ -81,7 +81,7 @@ def where_am_i(self, context_radius: int = 200) -> str: return result.model_dump_json() - @skill() + @skill def get_gps_position_for_queries(self, queries: list[str]) -> str: """Get the GPS position (latitude/longitude) from Google Maps for know landmarks or searchable locations. This includes anything that wouldn't be viewable on a physical OSM map including intersections (5th and Natoma) diff --git a/dimos/agents/skills/gps_nav_skill.py b/dimos/agents/skills/gps_nav_skill.py index c7325a5b64..721119f6e6 100644 --- a/dimos/agents/skills/gps_nav_skill.py +++ b/dimos/agents/skills/gps_nav_skill.py @@ -14,19 +14,19 @@ import json +from dimos.agents.annotation import skill from dimos.core.core import rpc +from dimos.core.module import Module from dimos.core.rpc_client import RpcCall -from dimos.core.skill_module import SkillModule from dimos.core.stream import In, Out from dimos.mapping.types import LatLon from dimos.mapping.utils.distance import distance_in_meters -from dimos.protocol.skill.skill import skill from dimos.utils.logging_config import setup_logger logger = setup_logger() -class GpsNavSkillContainer(SkillModule): +class GpsNavSkillContainer(Module): _latest_location: LatLon | None = None _max_valid_distance: int = 50000 _set_gps_travel_goal_points: RpcCall | None = None @@ -59,8 +59,8 @@ def _get_latest_location(self) -> LatLon: raise ValueError("The position has not been set yet.") return self._latest_location - @skill() - def set_gps_travel_points(self, *points: dict[str, float]) -> str: + @skill + def set_gps_travel_points(self, points: list[dict[str, float]]) -> str: """Define the movement path determined by GPS coordinates. Requires at least one. You can get the coordinates by using the `get_gps_position_for_queries` skill. Example: diff --git a/dimos/agents/skills/navigation.py b/dimos/agents/skills/navigation.py index 054246d6ee..322a09c2bb 100644 --- a/dimos/agents/skills/navigation.py +++ b/dimos/agents/skills/navigation.py @@ -15,8 +15,11 @@ import time from typing import Any +from reactivex.disposable import Disposable + +from dimos.agents.annotation import skill from dimos.core.core import rpc -from dimos.core.skill_module import SkillModule +from dimos.core.module import Module from dimos.core.stream import In from dimos.models.qwen.video_query import BBox from dimos.models.vl.qwen import QwenVlModel @@ -25,14 +28,13 @@ from dimos.msgs.sensor_msgs import Image from dimos.navigation.base import NavigationState from dimos.navigation.visual.query import get_object_bbox_from_image -from dimos.protocol.skill.skill import skill from dimos.types.robot_location import RobotLocation from dimos.utils.logging_config import setup_logger logger = setup_logger() -class NavigationSkillContainer(SkillModule): +class NavigationSkillContainer(Module): _latest_image: Image | None = None _latest_odom: PoseStamped | None = None _skill_started: bool = False @@ -49,9 +51,6 @@ class NavigationSkillContainer(SkillModule): "ObjectTracking.track", "ObjectTracking.stop_track", "ObjectTracking.is_tracking", - "WavefrontFrontierExplorer.stop_exploration", - "WavefrontFrontierExplorer.explore", - "WavefrontFrontierExplorer.is_exploration_active", ] color_image: In[Image] @@ -64,8 +63,9 @@ def __init__(self) -> None: @rpc def start(self) -> None: - self._disposables.add(self.color_image.subscribe(self._on_color_image)) # type: ignore[arg-type] - self._disposables.add(self.odom.subscribe(self._on_odom)) # type: ignore[arg-type] + super().start() + self._disposables.add(Disposable(self.color_image.subscribe(self._on_color_image))) + self._disposables.add(Disposable(self.odom.subscribe(self._on_odom))) self._skill_started = True @rpc @@ -78,7 +78,7 @@ def _on_color_image(self, image: Image) -> None: def _on_odom(self, odom: PoseStamped) -> None: self._latest_odom = odom - @skill() + @skill def tag_location(self, location_name: str) -> str: """Tag this location in the spatial memory with a name. @@ -93,12 +93,12 @@ def tag_location(self, location_name: str) -> str: if not self._skill_started: raise ValueError(f"{self} has not been started.") - tf = self.tf.get("map", "base_link", time_tolerance=2.0) - if not tf: - return "Could not get the robot's current transform." - position = tf.translation - rotation = tf.rotation.to_euler() + if not self._latest_odom: + return "No odometry data received yet, cannot tag location." + + position = self._latest_odom.position + rotation = self._latest_odom.orientation location = RobotLocation( name=location_name, @@ -113,7 +113,7 @@ def tag_location(self, location_name: str) -> str: logger.info(f"Tagged {location}") return f"Tagged '{location_name}': ({position.x},{position.y})." - @skill() + @skill def navigate_with_text(self, query: str) -> str: """Navigate to a location by querying the existing semantic map using natural language. @@ -158,48 +158,31 @@ def _navigate_by_tagged_location(self, query: str) -> str | None: if not robot_location: return None - print("Found tagged location:", robot_location) + logger.info("Found tagged location", location=robot_location) goal_pose = PoseStamped( position=make_vector3(*robot_location.position), orientation=Quaternion.from_euler(Vector3(*robot_location.rotation)), frame_id="map", ) - result = self._navigate_to(goal_pose) - if not result: - return "Error: Faild to reach the tagged location." - - return ( - f"Successfuly arrived at location tagged '{robot_location.name}' from query '{query}'." - ) + return self._navigate_to(goal_pose, f"Found a tagged location called '{query}'.") - def _navigate_to(self, pose: PoseStamped) -> bool: + def _navigate_to(self, pose: PoseStamped, message: str) -> str: try: - set_goal_rpc, get_state_rpc, is_goal_reached_rpc = self.get_rpc_calls( - "NavigationInterface.set_goal", - "NavigationInterface.get_state", - "NavigationInterface.is_goal_reached", - ) + set_goal_rpc = self.get_rpc_calls("NavigationInterface.set_goal") except Exception: logger.error("Navigation module not connected properly") - return False + return "Error: Navigation module is not connected, cannot set goal." logger.info( f"Navigating to pose: ({pose.position.x:.2f}, {pose.position.y:.2f}, {pose.position.z:.2f})" ) set_goal_rpc(pose) - time.sleep(1.0) - - while get_state_rpc() == NavigationState.FOLLOWING_PATH: - time.sleep(0.25) - time.sleep(1.0) - if not is_goal_reached_rpc(): - logger.info("Navigation was cancelled or failed") - return False - else: - logger.info("Navigation goal reached") - return True + return ( + f"{message}. Started navigating to that position. " + f"To cancel movement call the 'stop_navigation' tool." + ) def _navigate_to_object(self, query: str) -> str | None: try: @@ -286,24 +269,15 @@ def _navigate_using_semantic_map(self, query: str) -> str: goal_pose = self._get_goal_pose_from_result(best_match) - print("Goal pose for semantic nav:", goal_pose) + logger.info("Goal pose for semantic nav", pose=goal_pose) if not goal_pose: return f"Found a result for '{query}' but it didn't have a valid position." - result = self._navigate_to(goal_pose) + message = f"Found a location in the semantic map matching '{query}'." + return self._navigate_to(goal_pose, message) - if not result: - return f"Failed to navigate for '{query}'" - - return f"Successfuly arrived at '{query}'" - - @skill() - def follow_human(self, person: str) -> str: - """Follow a specific person""" - return "Not implemented yet." - - @skill() - def stop_movement(self) -> str: + @skill + def stop_navigation(self) -> str: """Immediatly stop moving.""" if not self._skill_started: @@ -320,57 +294,7 @@ def _cancel_goal_and_stop(self) -> None: logger.warning("Navigation module not connected, cannot cancel goal") return - try: - stop_exploration_rpc = self.get_rpc_calls("WavefrontFrontierExplorer.stop_exploration") - except Exception: - logger.warning("FrontierExplorer module not connected, cannot stop exploration") - return - cancel_goal_rpc() - return stop_exploration_rpc() # type: ignore[no-any-return] - - @skill() - def start_exploration(self, timeout: float = 240.0) -> str: - """A skill that performs autonomous frontier exploration. - - This skill continuously finds and navigates to unknown frontiers in the environment - until no more frontiers are found or the exploration is stopped. - - Don't call any other skills except stop_movement skill when needed. - - Args: - timeout (float, optional): Maximum time (in seconds) allowed for exploration - """ - - if not self._skill_started: - raise ValueError(f"{self} has not been started.") - - try: - return self._start_exploration(timeout) - finally: - self._cancel_goal_and_stop() - - def _start_exploration(self, timeout: float) -> str: - try: - explore_rpc, is_exploration_active_rpc = self.get_rpc_calls( - "WavefrontFrontierExplorer.explore", - "WavefrontFrontierExplorer.is_exploration_active", - ) - except Exception: - return "Error: The WavefrontFrontierExplorer module is not connected." - - logger.info("Starting autonomous frontier exploration") - - start_time = time.time() - - has_started = explore_rpc() - if not has_started: - return "Error: Could not start exploration." - - while time.time() - start_time < timeout and is_exploration_active_rpc(): - time.sleep(0.5) - - return "Exploration completed successfuly" def _get_goal_pose_from_result(self, result: dict[str, Any]) -> PoseStamped | None: similarity = 1.0 - (result.get("distance") or 1) @@ -383,9 +307,7 @@ def _get_goal_pose_from_result(self, result: dict[str, Any]) -> PoseStamped | No metadata = result.get("metadata") if not metadata: return None - print(metadata) first = metadata[0] - print(first) pos_x = first.get("pos_x", 0) pos_y = first.get("pos_y", 0) theta = first.get("rot_z", 0) diff --git a/dimos/agents/skills/osm.py b/dimos/agents/skills/osm.py index 71f453069f..9bb731cf72 100644 --- a/dimos/agents/skills/osm.py +++ b/dimos/agents/skills/osm.py @@ -13,19 +13,19 @@ # limitations under the License. -from dimos.core.skill_module import SkillModule +from dimos.agents.annotation import skill +from dimos.core.module import Module from dimos.core.stream import In from dimos.mapping.osm.current_location_map import CurrentLocationMap from dimos.mapping.types import LatLon from dimos.mapping.utils.distance import distance_in_meters from dimos.models.vl.qwen import QwenVlModel -from dimos.protocol.skill.skill import skill from dimos.utils.logging_config import setup_logger logger = setup_logger() -class OsmSkill(SkillModule): +class OsmSkill(Module): _latest_location: LatLon | None _current_location_map: CurrentLocationMap @@ -46,7 +46,7 @@ def stop(self) -> None: def _on_gps_location(self, location: LatLon) -> None: self._latest_location = location - @skill() + @skill def map_query(self, query_sentence: str) -> str: """This skill uses a vision language model to find something on the map based on the query sentence. You can query it with something like "Where diff --git a/dimos/agents/skills/person_follow.py b/dimos/agents/skills/person_follow.py index 0d4420632c..641055e6f6 100644 --- a/dimos/agents/skills/person_follow.py +++ b/dimos/agents/skills/person_follow.py @@ -12,16 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from threading import Event, RLock +from threading import Event, RLock, Thread import time from typing import TYPE_CHECKING +from langchain_core.messages import HumanMessage import numpy as np from reactivex.disposable import Disposable +from dimos.agents.agent import AgentSpec +from dimos.agents.annotation import skill from dimos.core.core import rpc from dimos.core.global_config import GlobalConfig -from dimos.core.skill_module import SkillModule +from dimos.core.module import Module from dimos.core.stream import In, Out from dimos.models.qwen.video_query import BBox from dimos.models.segmentation.edge_tam import EdgeTAMProcessor @@ -31,7 +34,6 @@ from dimos.navigation.visual.query import get_object_bbox_from_image from dimos.navigation.visual_servoing.detection_navigation import DetectionNavigation from dimos.navigation.visual_servoing.visual_servoing_2d import VisualServoing2D -from dimos.protocol.skill.skill import skill from dimos.utils.logging_config import setup_logger if TYPE_CHECKING: @@ -40,7 +42,7 @@ logger = setup_logger() -class PersonFollowSkillContainer(SkillModule): +class PersonFollowSkillContainer(Module): """Skill container for following a person. This skill uses: @@ -54,28 +56,30 @@ class PersonFollowSkillContainer(SkillModule): global_map: In[PointCloud2] cmd_vel: Out[Twist] + _agent_spec: AgentSpec _frequency: float = 20.0 # Hz - control loop frequency _max_lost_frames: int = 15 # number of frames to wait before declaring person lost def __init__( self, camera_info: CameraInfo, - global_config: GlobalConfig, + cfg: GlobalConfig, use_3d_navigation: bool = False, ) -> None: super().__init__() - self._global_config: GlobalConfig = global_config + self._global_config: GlobalConfig = cfg self._use_3d_navigation: bool = use_3d_navigation self._latest_image: Image | None = None self._latest_pointcloud: PointCloud2 | None = None self._vl_model: VlModel = QwenVlModel() self._tracker: EdgeTAMProcessor | None = None + self._thread: Thread | None = None self._should_stop: Event = Event() self._lock = RLock() # Use MuJoCo camera intrinsics in simulation mode if self._global_config.simulation: - from dimos.robot.unitree_webrtc.mujoco_connection import MujocoConnection + from dimos.robot.unitree.mujoco_connection import MujocoConnection camera_info = MujocoConnection.camera_info_static @@ -102,7 +106,7 @@ def stop(self) -> None: self._vl_model.stop() super().stop() - @skill() + @skill def follow_person(self, query: str) -> str: """Follow a person matching the given description using visual servoing. @@ -139,9 +143,9 @@ def follow_person(self, query: str) -> str: if initial_bbox is None: return f"Could not find '{query}' in the current view." - return self._follow_loop(query, initial_bbox) + return self._follow_person(query, initial_bbox) - @skill() + @skill def stop_following(self) -> str: """Stop following the current person. @@ -152,6 +156,10 @@ def stop_following(self) -> str: self.cmd_vel.publish(Twist.zero()) + if self._thread is not None: + self._thread.join(timeout=2) + self._thread = None + return "Stopped following." def _on_color_image(self, image: Image) -> None: @@ -162,7 +170,7 @@ def _on_pointcloud(self, pointcloud: PointCloud2) -> None: with self._lock: self._latest_pointcloud = pointcloud - def _follow_loop(self, query: str, initial_bbox: BBox) -> str: + def _follow_person(self, query: str, initial_bbox: BBox) -> str: x1, y1, x2, y2 = initial_bbox box = np.array([x1, y1, x2, y2], dtype=np.float32) @@ -186,6 +194,15 @@ def _follow_loop(self, query: str, initial_bbox: BBox) -> str: logger.info(f"EdgeTAM initialized with {len(initial_detections)} detections") + self._thread = Thread(target=self._follow_loop, args=(tracker, query)) + self._thread.start() + + return ( + "Found the person. Starting to follow. You can stop following by calling " + "the 'stop_following' tool." + ) + + def _follow_loop(self, tracker: EdgeTAMProcessor, query: str) -> None: lost_count = 0 period = 1.0 / self._frequency next_time = time.monotonic() @@ -204,8 +221,8 @@ def _follow_loop(self, query: str, initial_bbox: BBox) -> str: lost_count += 1 if lost_count > self._max_lost_frames: - self.cmd_vel.publish(Twist.zero()) - return f"Lost track of '{query}'. Stopping." + self._send_stop_reason(query, "lost track of the person") + return else: lost_count = 0 best_detection = max(detections.detections, key=lambda d: d.bbox_2d_volume()) @@ -214,16 +231,16 @@ def _follow_loop(self, query: str, initial_bbox: BBox) -> str: with self._lock: pointcloud = self._latest_pointcloud if pointcloud is None: - self.cmd_vel.publish(Twist.zero()) - return "No pointcloud available for 3D navigation. Stopping." + self._send_stop_reason(query, "no pointcloud available for 3D navigation") + return twist = self._detection_navigation.compute_twist_for_detection_3d( pointcloud, best_detection, latest_image, ) if twist is None: - self.cmd_vel.publish(Twist.zero()) - return f"3D navigation failed for '{query}'. Stopping." + self._send_stop_reason(query, "3D navigation failed") + return else: twist = self._visual_servo.compute_twist( best_detection.bbox, @@ -236,12 +253,17 @@ def _follow_loop(self, query: str, initial_bbox: BBox) -> str: if sleep_duration > 0: time.sleep(sleep_duration) - self.cmd_vel.publish(Twist.zero()) - return "Stopped following as requested." + self._send_stop_reason(query, "it was requested to stop following") def _stop_following(self) -> None: self._should_stop.set() + def _send_stop_reason(self, query: str, reason: str) -> None: + self.cmd_vel.publish(Twist.zero()) + message = f"Person follow stopped for '{query}'. Reason: {reason}." + self._agent_spec.add_message(HumanMessage(message)) + logger.info("Person follow stopped", query=query, reason=reason) + person_follow_skill = PersonFollowSkillContainer.blueprint diff --git a/dimos/agents/skills/speak_skill.py b/dimos/agents/skills/speak_skill.py index 073dda656a..aa06d30ba4 100644 --- a/dimos/agents/skills/speak_skill.py +++ b/dimos/agents/skills/speak_skill.py @@ -17,9 +17,9 @@ from reactivex import Subject +from dimos.agents.annotation import skill from dimos.core.core import rpc -from dimos.core.skill_module import SkillModule -from dimos.protocol.skill.skill import skill +from dimos.core.module import Module from dimos.stream.audio.node_output import SounddeviceAudioOutput from dimos.stream.audio.tts.node_openai import OpenAITTSNode, Voice from dimos.utils.logging_config import setup_logger @@ -27,7 +27,7 @@ logger = setup_logger() -class SpeakSkill(SkillModule): +class SpeakSkill(Module): _tts_node: OpenAITTSNode | None = None _audio_output: SounddeviceAudioOutput | None = None _audio_lock: threading.Lock = threading.Lock() @@ -49,7 +49,7 @@ def stop(self) -> None: self._audio_output = None super().stop() - @skill() + @skill def speak(self, text: str) -> str: """Speak text out loud through the robot's speakers. diff --git a/dimos/agents/skills/test_google_maps_skill_container.py b/dimos/agents/skills/test_google_maps_skill_container.py index 0af206fbb1..84da91e886 100644 --- a/dimos/agents/skills/test_google_maps_skill_container.py +++ b/dimos/agents/skills/test_google_maps_skill_container.py @@ -14,34 +14,83 @@ import re +from langchain_core.messages import HumanMessage +import pytest + +from dimos.agents.skills.google_maps_skill_container import GoogleMapsSkillContainer +from dimos.core.module import Module +from dimos.core.stream import Out from dimos.mapping.google_maps.types import Coordinates, LocationContext, Position from dimos.mapping.types import LatLon -def test_where_am_i(create_google_maps_agent, google_maps_skill_container) -> None: - google_maps_skill_container._latest_location = LatLon(lat=37.782654, lon=-122.413273) - google_maps_skill_container._client.get_location_context.return_value = LocationContext( - street="Bourbon Street", coordinates=Coordinates(lat=37.782654, lon=-122.413273) - ) - agent = create_google_maps_agent(fixture="test_where_am_i.json") +class FakeGPS(Module): + """Provides a gps_location output so GoogleMapsSkillContainer's input port gets a transport.""" + + gps_location: Out[LatLon] + + +class FakeLocationClient: + def get_location_context(self, location, radius=200): + return LocationContext( + street="Bourbon Street", + coordinates=Coordinates(lat=37.782654, lon=-122.413273), + ) + + +class MockedWhereAmISkill(GoogleMapsSkillContainer): + def __init__(self): + Module.__init__(self) # Skip GoogleMapsSkillContainer's __init__. + self._client = FakeLocationClient() + self._latest_location = LatLon(lat=37.782654, lon=-122.413273) + self._started = True + self._max_valid_distance = 20000 - response = agent.query("what street am I on") - assert "bourbon" in response.lower() +class FakePositionClient: + def __init__(self): + self._positions = iter( + [ + Position(lat=37.782601, lon=-122.413201, description="address 1"), + Position(lat=37.782602, lon=-122.413202, description="address 2"), + Position(lat=37.782603, lon=-122.413203, description="address 3"), + ] + ) + def get_position(self, query, location): + return next(self._positions) -def test_get_gps_position_for_queries( - create_google_maps_agent, google_maps_skill_container -) -> None: - google_maps_skill_container._latest_location = LatLon(lat=37.782654, lon=-122.413273) - google_maps_skill_container._client.get_position.side_effect = [ - Position(lat=37.782601, lon=-122.413201, description="address 1"), - Position(lat=37.782602, lon=-122.413202, description="address 2"), - Position(lat=37.782603, lon=-122.413203, description="address 3"), - ] - agent = create_google_maps_agent(fixture="test_get_gps_position_for_queries.json") - response = agent.query("what are the lat/lon for hyde park, regent park, russell park?") +class MockedPositionSkill(GoogleMapsSkillContainer): + def __init__(self): + Module.__init__(self) + self._client = FakePositionClient() + self._latest_location = LatLon(lat=37.782654, lon=-122.413273) + self._started = True + self._max_valid_distance = 20000 + + +@pytest.mark.integration +def test_where_am_i(agent_setup) -> None: + history = agent_setup( + blueprints=[FakeGPS.blueprint(), MockedWhereAmISkill.blueprint()], + messages=[HumanMessage("What street am I on? Use the where_am_i tool.")], + ) + + assert "bourbon" in history[-1].content.lower() + + +@pytest.mark.integration +def test_get_gps_position_for_queries(agent_setup) -> None: + history = agent_setup( + blueprints=[FakeGPS.blueprint(), MockedPositionSkill.blueprint()], + messages=[ + HumanMessage( + "What are the lat/lon for hyde park, regent park, russell park? " + "Use the get_gps_position_for_queries tool." + ) + ], + ) regex = r".*37\.782601.*122\.413201.*37\.782602.*122\.413202.*37\.782603.*122\.413203.*" - assert re.match(regex, response, re.DOTALL) + assert re.match(regex, history[-1].content, re.DOTALL) diff --git a/dimos/agents/skills/test_gps_nav_skills.py b/dimos/agents/skills/test_gps_nav_skills.py index ab0d1ec318..afcb4d36d0 100644 --- a/dimos/agents/skills/test_gps_nav_skills.py +++ b/dimos/agents/skills/test_gps_nav_skills.py @@ -12,47 +12,57 @@ # See the License for the specific language governing permissions and # limitations under the License. +from langchain_core.messages import HumanMessage +import pytest +from dimos.agents.skills.gps_nav_skill import GpsNavSkillContainer +from dimos.core.module import Module +from dimos.core.stream import Out from dimos.mapping.types import LatLon -def test_set_gps_travel_points(create_gps_nav_agent, gps_nav_skill_container, mocker) -> None: - gps_nav_skill_container._latest_location = LatLon(lat=37.782654, lon=-122.413273) - gps_nav_skill_container._set_gps_travel_goal_points = mocker.Mock() - agent = create_gps_nav_agent(fixture="test_set_gps_travel_points.json") +class FakeGPS(Module): + """Provides a gps_location output so GpsNavSkillContainer's input port gets a transport.""" - agent.query("go to lat: 37.782654, lon: -122.413273") + gps_location: Out[LatLon] - gps_nav_skill_container._set_gps_travel_goal_points.assert_called_once_with( - [LatLon(lat=37.782654, lon=-122.413273)] - ) - gps_nav_skill_container.gps_goal.publish.assert_called_once_with( - [LatLon(lat=37.782654, lon=-122.413273)] - ) +class MockedGpsNavSkill(GpsNavSkillContainer): + def __init__(self): + Module.__init__(self) + self._latest_location = LatLon(lat=37.782654, lon=-122.413273) + self._started = True + self._max_valid_distance = 50000 -def test_set_gps_travel_points_multiple( - create_gps_nav_agent, gps_nav_skill_container, mocker -) -> None: - gps_nav_skill_container._latest_location = LatLon(lat=37.782654, lon=-122.413273) - gps_nav_skill_container._set_gps_travel_goal_points = mocker.Mock() - agent = create_gps_nav_agent(fixture="test_set_gps_travel_points_multiple.json") - agent.query( - "go to lat: 37.782654, lon: -122.413273, then 37.782660,-122.413260, and then 37.782670,-122.413270" +@pytest.mark.integration +def test_set_gps_travel_points(agent_setup) -> None: + history = agent_setup( + blueprints=[FakeGPS.blueprint(), MockedGpsNavSkill.blueprint()], + messages=[ + HumanMessage( + 'Set GPS travel points to [{"lat": 37.782654, "lon": -122.413273}]. ' + "Use the set_gps_travel_points tool." + ) + ], ) - gps_nav_skill_container._set_gps_travel_goal_points.assert_called_once_with( - [ - LatLon(lat=37.782654, lon=-122.413273), - LatLon(lat=37.782660, lon=-122.413260), - LatLon(lat=37.782670, lon=-122.413270), - ] - ) - gps_nav_skill_container.gps_goal.publish.assert_called_once_with( - [ - LatLon(lat=37.782654, lon=-122.413273), - LatLon(lat=37.782660, lon=-122.413260), - LatLon(lat=37.782670, lon=-122.413270), - ] + assert "success" in history[-1].content.lower() + + +@pytest.mark.integration +def test_set_gps_travel_points_multiple(agent_setup) -> None: + history = agent_setup( + blueprints=[FakeGPS.blueprint(), MockedGpsNavSkill.blueprint()], + messages=[ + HumanMessage( + "Set GPS travel points to these locations in order: " + '{"lat": 37.782654, "lon": -122.413273}, ' + '{"lat": 37.782660, "lon": -122.413260}, ' + '{"lat": 37.782670, "lon": -122.413270}. ' + "Use the set_gps_travel_points tool." + ) + ], ) + + assert "success" in history[-1].content.lower() diff --git a/dimos/agents/skills/test_navigation.py b/dimos/agents/skills/test_navigation.py index 67e0429cb5..91737ada77 100644 --- a/dimos/agents/skills/test_navigation.py +++ b/dimos/agents/skills/test_navigation.py @@ -12,84 +12,105 @@ # See the License for the specific language governing permissions and # limitations under the License. +from langchain_core.messages import HumanMessage import pytest -from dimos.msgs.geometry_msgs import PoseStamped, Vector3 -from dimos.utils.transform_utils import euler_to_quaternion +from dimos.agents.skills.navigation import NavigationSkillContainer +from dimos.core.module import Module +from dimos.core.stream import Out +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.msgs.sensor_msgs import Image -def test_stop_movement(create_navigation_agent, navigation_skill_container, mocker) -> None: - cancel_goal_mock = mocker.Mock() - stop_exploration_mock = mocker.Mock() - navigation_skill_container._bound_rpc_calls["NavigationInterface.cancel_goal"] = ( - cancel_goal_mock - ) - navigation_skill_container._bound_rpc_calls["WavefrontFrontierExplorer.stop_exploration"] = ( - stop_exploration_mock - ) - agent = create_navigation_agent(fixture="test_stop_movement.json") +class FakeCamera(Module): + color_image: Out[Image] + + +class FakeOdom(Module): + odom: Out[PoseStamped] + + +class MockedStopNavSkill(NavigationSkillContainer): + rpc_calls: list[str] = [] + + def __init__(self): + Module.__init__(self) + self._skill_started = True + + def _cancel_goal_and_stop(self): + pass + + +class MockedExploreNavSkill(NavigationSkillContainer): + rpc_calls: list[str] = [] + + def __init__(self): + Module.__init__(self) + self._skill_started = True + + def _start_exploration(self, timeout): + return "Exploration completed successfuly" + + def _cancel_goal_and_stop(self): + pass + - agent.query("stop") +class MockedSemanticNavSkill(NavigationSkillContainer): + rpc_calls: list[str] = [] - cancel_goal_mock.assert_called_once_with() - stop_exploration_mock.assert_called_once_with() + def __init__(self): + Module.__init__(self) + self._skill_started = True + + def _navigate_by_tagged_location(self, query): + return None + + def _navigate_to_object(self, query): + return None + + def _navigate_using_semantic_map(self, query): + return f"Successfuly arrived at '{query}'" @pytest.mark.integration -def test_take_a_look_around(create_navigation_agent, navigation_skill_container, mocker) -> None: - explore_mock = mocker.Mock() - is_exploration_active_mock = mocker.Mock() - navigation_skill_container._bound_rpc_calls["WavefrontFrontierExplorer.explore"] = explore_mock - navigation_skill_container._bound_rpc_calls[ - "WavefrontFrontierExplorer.is_exploration_active" - ] = is_exploration_active_mock - mocker.patch("dimos.agents.skills.navigation.time.sleep") - agent = create_navigation_agent(fixture="test_take_a_look_around.json") - - agent.query("take a look around for 10 seconds") - - explore_mock.assert_called_once_with() - - -def test_go_to_semantic_location( - create_navigation_agent, navigation_skill_container, mocker -) -> None: - mocker.patch( - "dimos.agents.skills.navigation.NavigationSkillContainer._navigate_by_tagged_location", - return_value=None, - ) - mocker.patch( - "dimos.agents.skills.navigation.NavigationSkillContainer._navigate_to_object", - return_value=None, +def test_stop_movement(agent_setup) -> None: + history = agent_setup( + blueprints=[ + FakeCamera.blueprint(), + FakeOdom.blueprint(), + MockedStopNavSkill.blueprint(), + ], + messages=[HumanMessage("Stop moving. Use the stop_movement tool.")], ) - navigate_to_mock = mocker.patch( - "dimos.agents.skills.navigation.NavigationSkillContainer._navigate_to", - return_value=True, - ) - query_by_text_mock = mocker.Mock( - return_value=[ - { - "distance": 0.5, - "metadata": [ - { - "pos_x": 1, - "pos_y": 2, - "rot_z": 3, - } - ], - } - ] + + assert "stopped" in history[-1].content.lower() + + +@pytest.mark.integration +def test_start_exploration(agent_setup) -> None: + history = agent_setup( + blueprints=[ + FakeCamera.blueprint(), + FakeOdom.blueprint(), + MockedExploreNavSkill.blueprint(), + ], + messages=[ + HumanMessage("Take a look around for 10 seconds. Use the start_exploration tool.") + ], ) - navigation_skill_container._bound_rpc_calls["SpatialMemory.query_by_text"] = query_by_text_mock - agent = create_navigation_agent(fixture="test_go_to_semantic_location.json") - - agent.query("go to the bookshelf") - - query_by_text_mock.assert_called_once_with("bookshelf") - navigate_to_mock.assert_called_once_with( - PoseStamped( - position=Vector3(1, 2, 0), - orientation=euler_to_quaternion(Vector3(0, 0, 3)), - frame_id="world", - ), + + assert "explor" in history[-1].content.lower() + + +@pytest.mark.integration +def test_go_to_semantic_location(agent_setup) -> None: + history = agent_setup( + blueprints=[ + FakeCamera.blueprint(), + FakeOdom.blueprint(), + MockedSemanticNavSkill.blueprint(), + ], + messages=[HumanMessage("Go to the bookshelf. Use the navigate_with_text tool.")], ) + + assert "success" in history[-1].content.lower() diff --git a/dimos/agents/skills/test_unitree_skill_container.py b/dimos/agents/skills/test_unitree_skill_container.py index 29dfade979..ea1cfba5cf 100644 --- a/dimos/agents/skills/test_unitree_skill_container.py +++ b/dimos/agents/skills/test_unitree_skill_container.py @@ -12,21 +12,35 @@ # See the License for the specific language governing permissions and # limitations under the License. +import difflib -def test_pounce(mocker, create_unitree_skills_agent, unitree_skills) -> None: - agent = create_unitree_skills_agent(fixture="test_pounce.json") - publish_request_mock = mocker.Mock() - unitree_skills.get_rpc_calls = mocker.Mock(return_value=publish_request_mock) +from langchain_core.messages import HumanMessage +import pytest - response = agent.query("pounce") +from dimos.robot.unitree.unitree_skill_container import _UNITREE_COMMANDS, UnitreeSkillContainer - assert "front pounce" in response.lower() - publish_request_mock.assert_called_once_with("rt/api/sport/request", {"api_id": 1032}) +class MockedUnitreeSkill(UnitreeSkillContainer): + rpc_calls: list[str] = [] -def test_did_you_mean(mocker, unitree_skills) -> None: - unitree_skills.get_rpc_calls = mocker.Mock() - assert ( - unitree_skills.execute_sport_command("Pounce") - == "There's no 'Pounce' command. Did you mean: ['FrontPounce', 'Pose']" + def __init__(self): + super().__init__() + # Provide a fake RPC so the real execute_sport_command runs end-to-end. + self._bound_rpc_calls["GO2Connection.publish_request"] = lambda *args, **kwargs: None + + +@pytest.mark.integration +def test_pounce(agent_setup) -> None: + history = agent_setup( + blueprints=[MockedUnitreeSkill.blueprint()], + messages=[HumanMessage("Pounce! Use the execute_sport_command tool.")], ) + + response = history[-1].content.lower() + assert "pounce" in response + + +def test_did_you_mean() -> None: + suggestions = difflib.get_close_matches("Pounce", _UNITREE_COMMANDS.keys(), n=3, cutoff=0.6) + assert "FrontPounce" in suggestions + assert "Pose" in suggestions diff --git a/dimos/agents/spec.py b/dimos/agents/spec.py deleted file mode 100644 index b0a0324e89..0000000000 --- a/dimos/agents/spec.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Base agent module that wraps BaseAgent for DimOS module usage.""" - -from abc import ABC, abstractmethod -from dataclasses import dataclass, field -from enum import Enum -from typing import TYPE_CHECKING, Any, Union - -if TYPE_CHECKING: - from dimos.protocol.skill.skill import SkillContainer - -from langchain.chat_models.base import _SUPPORTED_PROVIDERS -from langchain_core.language_models.chat_models import BaseChatModel -from langchain_core.messages import ( - AIMessage, - HumanMessage, - SystemMessage, - ToolMessage, -) -from rich.console import Console -from rich.table import Table -from rich.text import Text - -from dimos.core import Module, rpc -from dimos.core.module import ModuleConfig -from dimos.protocol.pubsub import PubSub, lcm # type: ignore[attr-defined] -from dimos.protocol.service import Service # type: ignore[attr-defined] -from dimos.protocol.skill.skill import SkillContainer -from dimos.utils.generic import truncate_display_string -from dimos.utils.logging_config import setup_logger - -logger = setup_logger() - - -# Dynamically create ModelProvider enum from LangChain's supported providers -_providers = {provider.upper(): provider for provider in _SUPPORTED_PROVIDERS} -Provider = Enum("Provider", _providers, type=str) # type: ignore[misc] - - -class Model(str, Enum): - """Common model names across providers. - - Note: This is not exhaustive as model names change frequently. - Based on langchain's _attempt_infer_model_provider patterns. - """ - - # OpenAI models (prefix: gpt-3, gpt-4, o1, o3) - GPT_4O = "gpt-4o" - GPT_4O_MINI = "gpt-4o-mini" - GPT_4_TURBO = "gpt-4-turbo" - GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview" - GPT_4 = "gpt-4" - GPT_35_TURBO = "gpt-3.5-turbo" - GPT_35_TURBO_16K = "gpt-3.5-turbo-16k" - O1_PREVIEW = "o1-preview" - O1_MINI = "o1-mini" - O3_MINI = "o3-mini" - - # Anthropic models (prefix: claude) - CLAUDE_3_OPUS = "claude-3-opus-20240229" - CLAUDE_3_SONNET = "claude-3-sonnet-20240229" - CLAUDE_3_HAIKU = "claude-3-haiku-20240307" - CLAUDE_35_SONNET = "claude-3-5-sonnet-20241022" - CLAUDE_35_SONNET_LATEST = "claude-3-5-sonnet-latest" - CLAUDE_3_7_SONNET = "claude-3-7-sonnet-20250219" - - # Google models (prefix: gemini) - GEMINI_20_FLASH = "gemini-2.0-flash" - GEMINI_15_PRO = "gemini-1.5-pro" - GEMINI_15_FLASH = "gemini-1.5-flash" - GEMINI_10_PRO = "gemini-1.0-pro" - - # Amazon Bedrock models (prefix: amazon) - AMAZON_TITAN_EXPRESS = "amazon.titan-text-express-v1" - AMAZON_TITAN_LITE = "amazon.titan-text-lite-v1" - - # Cohere models (prefix: command) - COMMAND_R_PLUS = "command-r-plus" - COMMAND_R = "command-r" - COMMAND = "command" - COMMAND_LIGHT = "command-light" - - # Fireworks models (prefix: accounts/fireworks) - FIREWORKS_LLAMA_V3_70B = "accounts/fireworks/models/llama-v3-70b-instruct" - FIREWORKS_MIXTRAL_8X7B = "accounts/fireworks/models/mixtral-8x7b-instruct" - - # Mistral models (prefix: mistral) - MISTRAL_LARGE = "mistral-large" - MISTRAL_MEDIUM = "mistral-medium" - MISTRAL_SMALL = "mistral-small" - MIXTRAL_8X7B = "mixtral-8x7b" - MIXTRAL_8X22B = "mixtral-8x22b" - MISTRAL_7B = "mistral-7b" - - # DeepSeek models (prefix: deepseek) - DEEPSEEK_CHAT = "deepseek-chat" - DEEPSEEK_CODER = "deepseek-coder" - DEEPSEEK_R1_DISTILL_LLAMA_70B = "deepseek-r1-distill-llama-70b" - - # xAI models (prefix: grok) - GROK_1 = "grok-1" - GROK_2 = "grok-2" - - # Perplexity models (prefix: sonar) - SONAR_SMALL_CHAT = "sonar-small-chat" - SONAR_MEDIUM_CHAT = "sonar-medium-chat" - SONAR_LARGE_CHAT = "sonar-large-chat" - - # Meta Llama models (various providers) - LLAMA_3_70B = "llama-3-70b" - LLAMA_3_8B = "llama-3-8b" - LLAMA_31_70B = "llama-3.1-70b" - LLAMA_31_8B = "llama-3.1-8b" - LLAMA_33_70B = "llama-3.3-70b" - LLAMA_2_70B = "llama-2-70b" - LLAMA_2_13B = "llama-2-13b" - LLAMA_2_7B = "llama-2-7b" - - -@dataclass -class AgentConfig(ModuleConfig): - system_prompt: str | SystemMessage | None = None - skills: SkillContainer | list[SkillContainer] | None = None - - # we can provide model/provvider enums or instantiated model_instance - model: Model = Model.GPT_4O - provider: Provider = Provider.OPENAI # type: ignore[attr-defined] - model_instance: BaseChatModel | None = None - - agent_transport: type[PubSub] = lcm.PickleLCM # type: ignore[type-arg] - agent_topic: Any = field(default_factory=lambda: lcm.Topic("/agent")) - - -AnyMessage = Union[SystemMessage, ToolMessage, AIMessage, HumanMessage] - - -class AgentSpec(Service[AgentConfig], Module, ABC): - default_config: type[AgentConfig] = AgentConfig - - def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] - Service.__init__(self, *args, **kwargs) - Module.__init__(self, *args, **kwargs) - - if self.config.agent_transport: - self.transport = self.config.agent_transport() - - def publish(self, msg: AnyMessage) -> None: - if self.transport: - self.transport.publish(self.config.agent_topic, msg) - - def start(self) -> None: - super().start() - - def stop(self) -> None: - if hasattr(self, "transport") and self.transport: - self.transport.stop() # type: ignore[attr-defined] - self.transport = None # type: ignore[assignment] - super().stop() - - @rpc - @abstractmethod - def clear_history(self): ... # type: ignore[no-untyped-def] - - @abstractmethod - def append_history(self, *msgs: list[AIMessage | HumanMessage]): ... # type: ignore[no-untyped-def] - - @abstractmethod - def history(self) -> list[AnyMessage]: ... - - @rpc - @abstractmethod - def register_skills( - self, container: "SkillContainer", run_implicit_name: str | None = None - ) -> None: ... - - @rpc - @abstractmethod - def query(self, query: str): ... # type: ignore[no-untyped-def] - - def __str__(self) -> str: - console = Console(force_terminal=True, legacy_windows=False) - table = Table(show_header=True) - - table.add_column("Message Type", style="cyan", no_wrap=True) - table.add_column("Content") - - for message in self.history(): - if isinstance(message, HumanMessage): - content = message.content - if not isinstance(content, str): - content = "" - - table.add_row(Text("Human", style="green"), Text(content, style="green")) - elif isinstance(message, AIMessage): - if hasattr(message, "metadata") and message.metadata.get("state"): - table.add_row( - Text("State Summary", style="blue"), - Text(message.content, style="blue"), # type: ignore[arg-type] - ) - else: - table.add_row( - Text("Agent", style="magenta"), - Text(message.content, style="magenta"), # type: ignore[arg-type] - ) - - for tool_call in message.tool_calls: - table.add_row( - "Tool Call", - Text( - f"{tool_call.get('name')}({tool_call.get('args')})", - style="bold magenta", - ), - ) - elif isinstance(message, ToolMessage): - table.add_row( - "Tool Response", Text(f"{message.name}() -> {message.content}"), style="red" - ) - elif isinstance(message, SystemMessage): - table.add_row( - "System", Text(truncate_display_string(message.content, 800), style="yellow") - ) - else: - table.add_row("Unknown", str(message)) - - # Render to string with title above - with console.capture() as capture: - console.print(Text(f" Agent ({self._agent_id})", style="bold blue")) # type: ignore[attr-defined] - console.print(table) - return capture.get().strip() diff --git a/dimos/agents/temp/webcam_agent.py b/dimos/agents/temp/webcam_agent.py deleted file mode 100644 index b09ec2e1d8..0000000000 --- a/dimos/agents/temp/webcam_agent.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Run script for Unitree Go2 robot with agents framework. -This is the migrated version using the new LangChain-based agent system. -""" - -from threading import Thread -import time - -import reactivex as rx -import reactivex.operators as ops - -from dimos.agents import Agent, Output, Reducer, Stream, skill # type: ignore[attr-defined] -from dimos.agents.cli.human import HumanInput -from dimos.agents.spec import Model, Provider -from dimos.core import LCMTransport, Module, rpc, start -from dimos.hardware.sensors.camera import zed -from dimos.hardware.sensors.camera.module import CameraModule -from dimos.hardware.sensors.camera.webcam import Webcam -from dimos.msgs.geometry_msgs import Quaternion, Transform, Vector3 -from dimos.msgs.sensor_msgs import CameraInfo, Image -from dimos.protocol.skill.test_coordinator import SkillContainerTest -from dimos.web.robot_web_interface import RobotWebInterface - - -class WebModule(Module): - web_interface: RobotWebInterface = None # type: ignore[assignment] - human_query: rx.subject.Subject = None # type: ignore[assignment, type-arg] - agent_response: rx.subject.Subject = None # type: ignore[assignment, type-arg] - - thread: Thread = None # type: ignore[assignment] - - _human_messages_running = False - - def __init__(self) -> None: - super().__init__() - self.agent_response = rx.subject.Subject() - self.human_query = rx.subject.Subject() - - @rpc - def start(self) -> None: - super().start() - - text_streams = { - "agent_responses": self.agent_response, - } - - self.web_interface = RobotWebInterface( - port=5555, - text_streams=text_streams, - audio_subject=rx.subject.Subject(), - ) - - unsub = self.web_interface.query_stream.subscribe(self.human_query.on_next) - self._disposables.add(unsub) - - self.thread = Thread(target=self.web_interface.run, daemon=True) - self.thread.start() - - @rpc - def stop(self) -> None: - if self.web_interface: - self.web_interface.stop() # type: ignore[attr-defined] - if self.thread: - # TODO, you can't just wait for a server to close, you have to signal it to end. - self.thread.join(timeout=1.0) - - super().stop() - - @skill(stream=Stream.call_agent, reducer=Reducer.all, output=Output.human) # type: ignore[arg-type] - def human_messages(self): # type: ignore[no-untyped-def] - """Provide human messages from web interface. Don't use this tool, it's running implicitly already""" - if self._human_messages_running: - print("human_messages already running, not starting another") - return "already running" - self._human_messages_running = True - while True: - print("Waiting for human message...") - message = self.human_query.pipe(ops.first()).run() - print(f"Got human message: {message}") - yield message - - -def main() -> None: - dimos = start(4) - # Create agent - agent = Agent( - system_prompt="You are a helpful assistant for controlling a Unitree Go2 robot. ", - model=Model.GPT_4O, # Could add CLAUDE models to enum - provider=Provider.OPENAI, # type: ignore[attr-defined] # Would need ANTHROPIC provider - ) - - testcontainer = dimos.deploy(SkillContainerTest) # type: ignore[attr-defined] - webcam = dimos.deploy( # type: ignore[attr-defined] - CameraModule, - transform=Transform( - translation=Vector3(0.0, 0.0, 0.0), - rotation=Quaternion(0.0, 0.0, 0.0, 1.0), - frame_id="base_link", - child_frame_id="camera_link", - ), - hardware=lambda: Webcam( - camera_index=0, - fps=15, - stereo_slice="left", - camera_info=zed.CameraInfo.SingleWebcam, - ), - ) - - webcam.camera_info.transport = LCMTransport("/camera_info", CameraInfo) - - webcam.image.transport = LCMTransport("/image", Image) - - webcam.start() - - human_input = dimos.deploy(HumanInput) # type: ignore[attr-defined] - - time.sleep(1) - - agent.register_skills(human_input) - agent.register_skills(webcam) - agent.register_skills(testcontainer) - - agent.run_implicit_skill("video_stream") - agent.run_implicit_skill("human") - - agent.start() - agent.loop_thread() - - while True: - time.sleep(1) - - # webcam.stop() - - -if __name__ == "__main__": - main() diff --git a/dimos/agents/test_agent.py b/dimos/agents/test_agent.py index 934fa0360a..da69dfb7dc 100644 --- a/dimos/agents/test_agent.py +++ b/dimos/agents/test_agent.py @@ -1,4 +1,4 @@ -# Copyright 2025-2026 Dimensional Inc. +# Copyright 2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,158 +12,201 @@ # See the License for the specific language governing permissions and # limitations under the License. + +from langchain_core.messages import HumanMessage import pytest -import pytest_asyncio - -from dimos.agents.agent import Agent -from dimos.core import start -from dimos.protocol.skill.test_coordinator import SkillContainerTest - -system_prompt = ( - "Your name is Mr. Potato, potatoes are bad at math. Use a tools if asked to calculate" -) - - -@pytest.fixture(scope="session") -def dimos_cluster(): - """Session-scoped fixture to initialize dimos cluster once.""" - dimos = start(2) - try: - yield dimos - finally: - dimos.shutdown() - - -@pytest_asyncio.fixture -async def local(): - """Local context: both agent and testcontainer run locally""" - testcontainer = SkillContainerTest() - agent = Agent(system_prompt=system_prompt) - try: - yield agent, testcontainer - except Exception as e: - print(f"Error: {e}") - import traceback - - traceback.print_exc() - raise e - finally: - # Ensure cleanup happens while event loop is still active - try: - agent.stop() - except Exception: - pass - try: - testcontainer.stop() - except Exception: - pass - - -@pytest_asyncio.fixture -async def dask_mixed(dimos_cluster): - """Dask context: testcontainer on dimos, agent local""" - testcontainer = dimos_cluster.deploy(SkillContainerTest) - agent = Agent(system_prompt=system_prompt) - try: - yield agent, testcontainer - finally: - try: - agent.stop() - except Exception: - pass - try: - testcontainer.stop() - except Exception: - pass - - -@pytest_asyncio.fixture -async def dask_full(dimos_cluster): - """Dask context: both agent and testcontainer deployed on dimos""" - testcontainer = dimos_cluster.deploy(SkillContainerTest) - agent = dimos_cluster.deploy(Agent, system_prompt=system_prompt) - try: - yield agent, testcontainer - finally: - try: - agent.stop() - except Exception: - pass - try: - testcontainer.stop() - except Exception: - pass - - -@pytest_asyncio.fixture(params=["local", "dask_mixed", "dask_full"]) -async def agent_context(request): - """Parametrized fixture that runs tests with different agent configurations""" - param = request.param - - if param == "local": - testcontainer = SkillContainerTest() - agent = Agent(system_prompt=system_prompt) - try: - yield agent, testcontainer - finally: - try: - agent.stop() - except Exception: - pass - try: - testcontainer.stop() - except Exception: - pass - elif param == "dask_mixed": - dimos_cluster = request.getfixturevalue("dimos_cluster") - testcontainer = dimos_cluster.deploy(SkillContainerTest) - agent = Agent(system_prompt=system_prompt) - try: - yield agent, testcontainer - finally: - try: - agent.stop() - except Exception: - pass - try: - testcontainer.stop() - except Exception: - pass - elif param == "dask_full": - dimos_cluster = request.getfixturevalue("dimos_cluster") - testcontainer = dimos_cluster.deploy(SkillContainerTest) - agent = dimos_cluster.deploy(Agent, system_prompt=system_prompt) - try: - yield agent, testcontainer - finally: - try: - agent.stop() - except Exception: - pass - try: - testcontainer.stop() - except Exception: - pass - - -# @pytest.mark.timeout(40) -@pytest.mark.tool -@pytest.mark.asyncio -async def test_agent_init(agent_context) -> None: - """Test agent initialization and basic functionality across different configurations""" - agent, testcontainer = agent_context - - agent.register_skills(testcontainer) - agent.start() - - # agent.run_implicit_skill("uptime_seconds") - - print("query agent") - # When running locally, call the async method directly - agent.query( - "hi there, please tell me what's your name and current date, and how much is 124181112 + 124124?" + +from dimos.agents.annotation import skill +from dimos.core.module import Module +from dimos.msgs.sensor_msgs import Image +from dimos.utils.data import get_data + + +class Adder(Module): + @skill + def add(self, x: int, y: int) -> str: + """adds x and y.""" + return str(x + y) + + +@pytest.mark.integration +@pytest.mark.parametrize("dask", [False, True]) +def test_can_call_tool(dask, agent_setup): + history = agent_setup( + blueprints=[Adder.blueprint()], + messages=[HumanMessage("What is 33333 + 100? Use the tool.")], + dask=dask, + ) + + assert "33433" in history[-1].content + + +class UserRegistration(Module): + def __init__(self): + super().__init__() + self._first_call = True + self._use_upper = False + + @skill + def register_user(self, name: str) -> str: + """registers a user by name.""" + + # If the agent calls with "paul" or "Paul", always say it's the wrong way + # to force it to try again. + + if self._first_call: + self._first_call = False + self._use_upper = not name[0].isupper() + + if self._use_upper and not name[0].isupper(): + return ValueError("Names must start with an uppercase letter.") + if not self._use_upper and name[0].isupper(): + return ValueError("The names must only use lowercase letters.") + + global _correct_name_registered + _correct_name_registered = True + return "User name registered successfully." + + +@pytest.mark.integration +@pytest.mark.parametrize("dask", [False, True]) +def test_can_call_again_on_error(dask, agent_setup): + history = agent_setup( + blueprints=[UserRegistration.blueprint()], + messages=[ + HumanMessage( + "Register a user named 'Paul'. If there are errors, just try again until you succeed." + ) + ], + dask=dask, + ) + + assert any(message.content == "User name registered successfully." for message in history) + + +class MultipleTools(Module): + def __init__(self): + super().__init__() + self._people = {"Ben": "office", "Bob": "garage"} + + @skill + def register_person(self, name: str) -> str: + """Registers a person by name.""" + if name.lower() == "john": + self._people[name] = "kitchen" + elif name.lower() == "jane": + self._people[name] = "living room" + return f"'{name}' has been registered." + + @skill + def locate_person(self, name: str) -> str: + """Locates a person by name.""" + if name not in self._people: + known_people = list(self._people.keys()) + return ( + f"Error: '{name}' is not registered. People cannot be located until they've " + f"been registered in the system. People known so far: {', '.join(known_people)}. " + "Use register_person to register a person." + ) + return f"'{name}' is located at '{self._people[name]}'." + + +class NavigationSkill(Module): + @skill + def go_to_location(self, description: str) -> str: + """Go to a location by a description.""" + if description.strip().lower() not in ["kitchen", "living room"]: + return f"Error: Unknown location description: '{description}'." + return f"Going to the {description}." + + +@pytest.mark.integration +def test_multiple_tool_calls_with_multiple_messages(agent_setup): + history = agent_setup( + blueprints=[MultipleTools.blueprint(), NavigationSkill.blueprint()], + messages=[ + HumanMessage( + "You are a robot assistant. Move to the location where John is. Don't ask me for feedback, just go there." + ), + HumanMessage("Nice job. You did it. Now go to the location where Jane is."), + ], + ) + + # Collect all go_to_location calls from the history + go_to_location_calls = [] + for message in history: + if hasattr(message, "tool_calls"): + for tool_call in message.tool_calls: + if tool_call["name"] == "go_to_location": + go_to_location_calls.append(tool_call) + + # Find the index of the second HumanMessage to split first/second prompt + second_human_idx = None + human_count = 0 + for i, message in enumerate(history): + if isinstance(message, HumanMessage): + human_count += 1 + if human_count == 2: + second_human_idx = i + break + + # Collect go_to_location calls before and after the second prompt + calls_after_first_prompt = [] + calls_after_second_prompt = [] + for i, message in enumerate(history): + if hasattr(message, "tool_calls"): + for tool_call in message.tool_calls: + if tool_call["name"] == "go_to_location": + if i < second_human_idx: + calls_after_first_prompt.append(tool_call) + else: + calls_after_second_prompt.append(tool_call) + + # After the first prompt, go_to_location should be called with "kitchen" + assert len(calls_after_first_prompt) == 1 + assert "kitchen" in calls_after_first_prompt[0]["args"]["description"].lower() + + # After the second prompt, go_to_location should be called with "living room" + assert len(calls_after_second_prompt) == 1 + assert "living room" in calls_after_second_prompt[0]["args"]["description"].lower() + + # There should be exactly two go_to_location calls total + assert len(go_to_location_calls) == 2 + + +@pytest.mark.integration +def test_prompt(agent_setup): + history = agent_setup( + blueprints=[], + messages=[HumanMessage("What is your name?")], + system_prompt="You are a helpful assistant named Johnny.", + ) + + assert "Johnny" in history[-1].content + + +class Visualizer(Module): + @skill + def take_a_picture(self) -> Image: + """Takes a picture.""" + return Image.from_file(get_data("cafe-smol.jpg")).to_rgb() + + +@pytest.mark.integration +def test_image(agent_setup): + history = agent_setup( + blueprints=[Visualizer.blueprint()], + messages=[ + HumanMessage( + "What do you see? Take a picture using your camera and describe it. " + "Please mention one of the words which best match the image: " + "'stadium', 'cafe', 'battleship'." + ) + ], + system_prompt="You are a helpful assistant that can use a camera to take pictures.", ) - print("Agent loop finished, asking about camera") - agent.query("tell me what you see on the camera?") - # you can run skillspy and agentspy in parallel with this test for a better observation of what's happening + response = history[-1].content.lower() + assert "cafe" in response + assert "stadium" not in response + assert "battleship" not in response diff --git a/dimos/agents/test_agent_direct.py b/dimos/agents/test_agent_direct.py deleted file mode 100644 index 4fc16a32b0..0000000000 --- a/dimos/agents/test_agent_direct.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from contextlib import contextmanager - -from dimos.agents.agent import Agent -from dimos.core import start -from dimos.protocol.skill.test_coordinator import SkillContainerTest - -system_prompt = ( - "Your name is Mr. Potato, potatoes are bad at math. Use a tools if asked to calculate" -) - - -@contextmanager -def dimos_cluster(): - dimos = start(2) - try: - yield dimos - finally: - dimos.close_all() - - -@contextmanager -def local(): - """Local context: both agent and testcontainer run locally""" - testcontainer = SkillContainerTest() - agent = Agent(system_prompt=system_prompt) - try: - yield agent, testcontainer - except Exception as e: - print(f"Error: {e}") - import traceback - - traceback.print_exc() - raise e - finally: - # Ensure cleanup happens while event loop is still active - agent.stop() - testcontainer.stop() - - -@contextmanager -def partial(): - """Dask context: testcontainer on dimos, agent local""" - with dimos_cluster() as dimos: - testcontainer = dimos.deploy(SkillContainerTest) - agent = Agent(system_prompt=system_prompt) - try: - yield agent, testcontainer - finally: - agent.stop() - testcontainer.stop() - - -@contextmanager -def full(): - """Dask context: both agent and testcontainer deployed on dimos""" - with dimos_cluster() as dimos: - testcontainer = dimos.deploy(SkillContainerTest) - agent = dimos.deploy(Agent, system_prompt=system_prompt) - try: - yield agent, testcontainer - finally: - agent.stop() - testcontainer.stop() - - -def check_agent(agent_context) -> None: - """Test agent initialization and basic functionality across different configurations""" - with agent_context() as [agent, testcontainer]: - agent.register_skills(testcontainer) - agent.start() - - print("query agent") - - agent.query( - "hi there, please tell me what's your name and current date, and how much is 124181112 + 124124?" - ) - - print("Agent loop finished, asking about camera") - - agent.query("tell me what you see on the camera?") - - print("=" * 150) - print("End of test", agent.get_agent_id()) - print("=" * 150) - - # you can run skillspy and agentspy in parallel with this test for a better observation of what's happening - - -if __name__ == "__main__": - list(map(check_agent, [local, partial, full])) diff --git a/dimos/agents/test_agent_fake.py b/dimos/agents/test_agent_fake.py deleted file mode 100644 index e544765758..0000000000 --- a/dimos/agents/test_agent_fake.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - - -@pytest.mark.integration -def test_what_is_your_name(create_potato_agent) -> None: - agent = create_potato_agent(fixture="test_what_is_your_name.json") - response = agent.query("hi there, please tell me what's your name?") - assert "Mr. Potato" in response - - -@pytest.mark.integration -def test_how_much_is_124181112_plus_124124(create_potato_agent) -> None: - agent = create_potato_agent(fixture="test_how_much_is_124181112_plus_124124.json") - - response = agent.query("how much is 124181112 + 124124?") - assert "124305236" in response.replace(",", "") - - response = agent.query("how much is one billion plus -1000000, in digits please") - assert "999000000" in response.replace(",", "") - - -@pytest.mark.integration -def test_what_do_you_see_in_this_picture(create_potato_agent) -> None: - agent = create_potato_agent(fixture="test_what_do_you_see_in_this_picture.json") - - response = agent.query("take a photo and tell me what do you see") - assert "outdoor cafe " in response diff --git a/dimos/agents/test_mock_agent.py b/dimos/agents/test_mock_agent.py deleted file mode 100644 index 4f449e973a..0000000000 --- a/dimos/agents/test_mock_agent.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Test agent with FakeChatModel for unit testing.""" - -import time - -from dimos_lcm.sensor_msgs import CameraInfo -from langchain_core.messages import AIMessage, HumanMessage -import pytest - -from dimos.agents.agent import Agent -from dimos.agents.testing import MockModel -from dimos.core import LCMTransport, start -from dimos.msgs.geometry_msgs import PoseStamped, Vector3 -from dimos.msgs.sensor_msgs import Image, PointCloud2 -from dimos.protocol.skill.test_coordinator import SkillContainerTest -from dimos.robot.unitree.connection.go2 import GO2Connection - - -@pytest.mark.integration -def test_tool_call() -> None: - """Test agent initialization and tool call execution.""" - # Create a fake model that will respond with tool calls - fake_model = MockModel( - responses=[ - AIMessage( - content="I'll add those numbers for you.", - tool_calls=[ - { - "name": "add", - "args": {"args": {"x": 5, "y": 3}}, - "id": "tool_call_1", - } - ], - ), - AIMessage(content="Let me do some math..."), - AIMessage(content="The result of adding 5 and 3 is 8."), - ] - ) - - # Create agent with the fake model - agent = Agent( - model_instance=fake_model, - system_prompt="You are a helpful robot assistant with math skills.", - ) - - # Register skills with coordinator - skills = SkillContainerTest() - agent.coordinator.register_skills(skills) - agent.start() - - # Query the agent - agent.query("Please add 5 and 3") - - # Check that tools were bound - assert fake_model.tools is not None - assert len(fake_model.tools) > 0 - - # Verify the model was called and history updated - assert len(agent._history) > 0 - - agent.stop() - - -@pytest.mark.integration -def test_image_tool_call() -> None: - """Test agent with image tool call execution.""" - dimos = start(2) - # Create a fake model that will respond with image tool calls - fake_model = MockModel( - responses=[ - AIMessage( - content="I'll take a photo for you.", - tool_calls=[ - { - "name": "take_photo", - "args": {"args": {}}, - "id": "tool_call_image_1", - } - ], - ), - AIMessage(content="I've taken the photo. The image shows a cafe scene."), - ] - ) - - # Create agent with the fake model - agent = Agent( - model_instance=fake_model, - system_prompt="You are a helpful robot assistant with camera capabilities.", - ) - - test_skill_module = dimos.deploy(SkillContainerTest) - - agent.register_skills(test_skill_module) - agent.start() - - agent.run_implicit_skill("get_detections") - - # Query the agent - agent.query("Please take a photo") - - # Check that tools were bound - assert fake_model.tools is not None - assert len(fake_model.tools) > 0 - - # Verify the model was called and history updated - assert len(agent._history) > 0 - - # Check that image was handled specially - # Look for HumanMessage with image content in history - human_messages_with_images = [ - msg - for msg in agent._history - if isinstance(msg, HumanMessage) and msg.content and isinstance(msg.content, list) - ] - assert len(human_messages_with_images) >= 0 # May have image messages - agent.stop() - test_skill_module.stop() - dimos.close_all() - - -@pytest.mark.tool -def test_tool_call_implicit_detections() -> None: - """Test agent with image tool call execution.""" - dimos = start(2) - # Create a fake model that will respond with image tool calls - fake_model = MockModel( - responses=[ - AIMessage( - content="I'll take a photo for you.", - tool_calls=[ - { - "name": "take_photo", - "args": {"args": {}}, - "id": "tool_call_image_1", - } - ], - ), - AIMessage(content="I've taken the photo. The image shows a cafe scene."), - ] - ) - - # Create agent with the fake model - agent = Agent( - model_instance=fake_model, - system_prompt="You are a helpful robot assistant with camera capabilities.", - ) - - robot_connection = dimos.deploy(GO2Connection, connection_type="fake") - robot_connection.lidar.transport = LCMTransport("/lidar", PointCloud2) - robot_connection.odom.transport = LCMTransport("/odom", PoseStamped) - robot_connection.video.transport = LCMTransport("/image", Image) - robot_connection.cmd_vel.transport = LCMTransport("/cmd_vel", Vector3) - robot_connection.camera_info.transport = LCMTransport("/camera_info", CameraInfo) - robot_connection.start() - - test_skill_module = dimos.deploy(SkillContainerTest) - - agent.register_skills(test_skill_module) - agent.start() - - agent.run_implicit_skill("get_detections") - - print( - "Robot replay pipeline is running in the background.\nWaiting 8.5 seconds for some detections before quering agent" - ) - time.sleep(8.5) - - # Query the agent - agent.query("Please take a photo") - - # Check that tools were bound - assert fake_model.tools is not None - assert len(fake_model.tools) > 0 - - # Verify the model was called and history updated - assert len(agent._history) > 0 - - # Check that image was handled specially - # Look for HumanMessage with image content in history - human_messages_with_images = [ - msg - for msg in agent._history - if isinstance(msg, HumanMessage) and msg.content and isinstance(msg.content, list) - ] - assert len(human_messages_with_images) >= 0 - - agent.stop() - test_skill_module.stop() - robot_connection.stop() - dimos.stop() diff --git a/dimos/agents/test_stash_agent.py b/dimos/agents/test_stash_agent.py deleted file mode 100644 index 2b712fed1a..0000000000 --- a/dimos/agents/test_stash_agent.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - -from dimos.agents.agent import Agent -from dimos.protocol.skill.test_coordinator import SkillContainerTest - - -@pytest.mark.tool -@pytest.mark.asyncio -async def test_agent_init() -> None: - system_prompt = ( - "Your name is Mr. Potato, potatoes are bad at math. Use a tools if asked to calculate" - ) - - # # Uncomment the following lines to use a dimos module system - # dimos = start(2) - # testcontainer = dimos.deploy(SkillContainerTest) - # agent = Agent(system_prompt=system_prompt) - - ## uncomment the following lines to run agents in a main loop without a module system - testcontainer = SkillContainerTest() - agent = Agent(system_prompt=system_prompt) - - agent.register_skills(testcontainer) - agent.start() - - agent.run_implicit_skill("uptime_seconds") - - await agent.query_async( - "hi there, please tell me what's your name and current date, and how much is 124181112 + 124124?" - ) - - # agent loop is considered finished once no active skills remain, - # agent will stop it's loop if passive streams are active - print("Agent loop finished, asking about camera") - - # we query again (this shows subsequent querying, but we could have asked for camera image in the original query, - # it all runs in parallel, and agent might get called once or twice depending on timing of skill responses) - # await agent.query_async("tell me what you see on the camera?") - - # you can run skillspy and agentspy in parallel with this test for a better observation of what's happening - await agent.query_async("tell me exactly everything we've talked about until now") - - print("Agent loop finished") - - agent.stop() - testcontainer.stop() - dimos.stop() diff --git a/dimos/agents/testing.py b/dimos/agents/testing.py index dc563b9ea9..d03d3a1263 100644 --- a/dimos/agents/testing.py +++ b/dimos/agents/testing.py @@ -38,6 +38,8 @@ class MockModel(SimpleChatModel): Can operate in two modes: 1. Playback mode (default): Reads responses from a JSON file or list 2. Record mode: Uses a real LLM and saves responses to a JSON file + + Set the RECORD environment variable to enable record mode. """ responses: list[str | AIMessage] = [] @@ -47,8 +49,7 @@ class MockModel(SimpleChatModel): real_model: Any | None = None recorded_messages: list[dict[str, Any]] = [] - def __init__(self, **kwargs) -> None: # type: ignore[no-untyped-def] - # Extract custom parameters before calling super().__init__ + def __init__(self, **kwargs: Any) -> None: responses = kwargs.pop("responses", []) json_path = kwargs.pop("json_path", None) model_provider = kwargs.pop("model_provider", "openai") @@ -63,9 +64,8 @@ def __init__(self, **kwargs) -> None: # type: ignore[no-untyped-def] self.recorded_messages = [] if self.record: - # Initialize real model for recording self.real_model = init_chat_model(model_provider=model_provider, model=model_name) - self.responses = [] # Initialize empty for record mode + self.responses = [] elif self.json_path: self.responses = self._load_responses_from_json() # type: ignore[assignment] elif responses: @@ -86,7 +86,6 @@ def _load_responses_from_json(self) -> list[AIMessage]: if isinstance(item, str): responses.append(AIMessage(content=item)) else: - # Reconstruct AIMessage from dict msg = AIMessage( content=item.get("content", ""), tool_calls=item.get("tool_calls", []) ) @@ -109,7 +108,7 @@ def _save_responses_to_json(self) -> None: } with open(self.json_path, "w") as f: - json.dump(data, f, indent=2, default=str) + f.write(json.dumps(data, indent=2, default=str) + "\n") def _call( self, @@ -118,7 +117,6 @@ def _call( run_manager: CallbackManagerForLLMRun | None = None, **kwargs: Any, ) -> str: - """Not used in _generate.""" return "" def _generate( @@ -129,11 +127,9 @@ def _generate( **kwargs: Any, ) -> ChatResult: if self.record: - # Recording mode - use real model and save responses if not self.real_model: raise ValueError("Real model not initialized for recording") - # Bind tools if needed model = self.real_model if self._bound_tools: model = model.bind_tools(self._bound_tools) @@ -145,12 +141,10 @@ def _generate( generation = ChatGeneration(message=result) return ChatResult(generations=[generation]) else: - # Playback mode - use predefined responses if not self.responses: - raise ValueError("No responses available for playback. ") + raise ValueError("No responses available for playback.") if self.i >= len(self.responses): - # Don't wrap around - stay at last response response = self.responses[-1] else: response = self.responses[self.i] @@ -171,10 +165,20 @@ def _stream( run_manager: CallbackManagerForLLMRun | None = None, **kwargs: Any, ) -> Iterator[ChatGenerationChunk]: - """Stream not implemented for testing.""" result = self._generate(messages, stop, run_manager, **kwargs) message = result.generations[0].message - chunk = AIMessageChunk(content=message.content) + chunk = AIMessageChunk( + content=message.content, + tool_call_chunks=[ + { + "name": tc["name"], + "args": json.dumps(tc["args"]), + "id": tc["id"], + "index": i, + } + for i, tc in enumerate(getattr(message, "tool_calls", [])) + ], + ) yield ChatGenerationChunk(message=chunk) def bind_tools( @@ -184,14 +188,7 @@ def bind_tools( tool_choice: str | None = None, **kwargs: Any, ) -> Runnable: # type: ignore[type-arg] - """Store tools and return self.""" self._bound_tools = tools if self.record and self.real_model: - # Also bind tools to the real model self.real_model = self.real_model.bind_tools(tools, tool_choice=tool_choice, **kwargs) return self - - @property - def tools(self) -> Sequence[Any] | None: - """Get bound tools for inspection.""" - return self._bound_tools diff --git a/dimos/agents/utils.py b/dimos/agents/utils.py new file mode 100644 index 0000000000..061e5ebb13 --- /dev/null +++ b/dimos/agents/utils.py @@ -0,0 +1,94 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime +from typing import Any + +from langchain_core.messages.base import BaseMessage + +CYAN = "\033[36m" +YELLOW = "\033[33m" +GREEN = "\033[32m" +MAGENTA = "\033[35m" +BLUE = "\033[34m" +GRAY = "\033[90m" +RESET = "\033[0m" +BOLD = "\033[1m" + +TYPE_WIDTH = 12 + + +def pretty_print_langchain_message(msg: BaseMessage) -> None: + d = msg.__dict__ + msg_type = d.get("type", "unknown") + + type_colors = { + "human": CYAN, + "ai": GREEN, + "tool": YELLOW, + "system": MAGENTA, + } + type_color = type_colors.get(msg_type, RESET) + + print(f"{GRAY}{'-' * 60}{RESET}") + + timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3] + time_str = f"{GRAY}{timestamp}{RESET} " + type_str = f"{type_color}{msg_type:<{TYPE_WIDTH}}{RESET}" + + content = d.get("content", "") + tool_calls = d.get("tool_calls", []) + + # 12 chars for timestamp + 1 space + TYPE_WIDTH + 1 space + indent = " " * (12 + 1 + TYPE_WIDTH + 1) + first_line = True + + def print_line(text: str) -> None: + nonlocal first_line + if first_line: + print(f"{time_str} {type_str} {text}") + first_line = False + else: + print(f"{indent}{text}") + + if content: + content_str = repr(_try_to_remove_url_data(content)) + if len(content_str) > 2000: + content_str = content_str[:5000] + "... [truncated]" + print_line(f"{BOLD}{type_color}{content_str}{RESET}") + + if tool_calls: + print_line(f"{MAGENTA}tool_calls:{RESET}") + for tc in tool_calls: + name = tc.get("name") + args = tc.get("args") + print_line(f" - {BLUE}{name}{RESET}({CYAN}{args}{RESET})") + + if first_line: + print(f"{time_str} {type_str}") + + +def _try_to_remove_url_data(content: Any) -> Any: + if not isinstance(content, list): + return content + + ret = [] + + for item in content: + if isinstance(item, dict) and item.get("type") == "image_url": + ret.append({**item, "image_url": ""}) + else: + ret.append(item) + + return ret diff --git a/dimos/agents/vlm_agent.py b/dimos/agents/vlm_agent.py index 0b99fe4d1c..c99f8afa49 100644 --- a/dimos/agents/vlm_agent.py +++ b/dimos/agents/vlm_agent.py @@ -12,34 +12,53 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any -from langchain_core.messages import AIMessage, HumanMessage +from langchain.chat_models import init_chat_model +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage -from dimos.agents.llm_init import build_llm, build_system_message -from dimos.agents.spec import AgentSpec, AnyMessage -from dimos.core import rpc +from dimos.agents.system_prompt import SYSTEM_PROMPT +from dimos.core import Module, rpc +from dimos.core.module import ModuleConfig from dimos.core.stream import In, Out from dimos.msgs.sensor_msgs import Image from dimos.utils.logging_config import setup_logger +if TYPE_CHECKING: + from langchain_core.language_models.chat_models import BaseChatModel + logger = setup_logger() -class VLMAgent(AgentSpec): +@dataclass +class VLMAgentConfig(ModuleConfig): + model: str = "gpt-4o" + system_prompt: str | None = SYSTEM_PROMPT + + +class VLMAgent(Module): """Stream-first agent for vision queries with optional RPC access.""" + default_config: type[VLMAgentConfig] = VLMAgentConfig + config: VLMAgentConfig + color_image: In[Image] query_stream: In[HumanMessage] answer_stream: Out[AIMessage] def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self._llm = build_llm(self.config) + + if self.config.model.startswith("ollama:"): + from dimos.agents.ollama_agent import ensure_ollama_model + + ensure_ollama_model(self.config.model.removeprefix("ollama:")) + + self._llm: BaseChatModel = init_chat_model(self.config.model) # type: ignore[assignment] self._latest_image: Image | None = None self._history: list[AIMessage | HumanMessage] = [] - self._system_message = build_system_message(self.config) - self.publish(self._system_message) + self._system_message = SystemMessage(self.config.system_prompt or SYSTEM_PROMPT) @rpc def start(self) -> None: @@ -76,7 +95,7 @@ def _extract_text(self, msg: HumanMessage) -> str: def _invoke(self, msg: HumanMessage, **kwargs: Any) -> AIMessage: messages = [self._system_message, msg] response = self._llm.invoke(messages, **kwargs) - self.append_history([msg, response]) # type: ignore[arg-type] + self._history.extend([msg, response]) # type: ignore[arg-type] return response # type: ignore[return-value] def _invoke_image( @@ -92,23 +111,6 @@ def _invoke_image( def clear_history(self) -> None: self._history.clear() - def append_history(self, *msgs: list[AIMessage | HumanMessage]) -> None: - for msg_list in msgs: - for msg in msg_list: - self.publish(msg) # type: ignore[arg-type] - self._history.extend(msg_list) - - def history(self) -> list[AnyMessage]: - return [self._system_message, *self._history] - - @rpc - def register_skills(self, container: Any, run_implicit_name: str | None = None) -> None: - logger.warning( - "VLMAgent does not manage skills; register_skills is a no-op", - container=str(container), - run_implicit_name=run_implicit_name, - ) - @rpc def query(self, query: str) -> str: response = self._invoke(HumanMessage(query)) diff --git a/dimos/agents/cli/web.py b/dimos/agents/web_human_input.py similarity index 100% rename from dimos/agents/cli/web.py rename to dimos/agents/web_human_input.py diff --git a/dimos/agents_deprecated/agent.py b/dimos/agents_deprecated/agent.py index b7e2acad4c..0443b2cc94 100644 --- a/dimos/agents_deprecated/agent.py +++ b/dimos/agents_deprecated/agent.py @@ -36,7 +36,7 @@ from dotenv import load_dotenv from openai import NOT_GIVEN, OpenAI from pydantic import BaseModel -from reactivex import Observable, Observer, create, empty, just, operators as RxOps +from reactivex import Observable, Observer, create, empty, just, operators as rxops from reactivex.disposable import CompositeDisposable, Disposable from reactivex.subject import Subject @@ -201,8 +201,8 @@ def __init__( if (input_data_stream is None) else ( input_query_stream.pipe( # type: ignore[misc, union-attr] - RxOps.with_latest_from(input_data_stream), - RxOps.map( + rxops.with_latest_from(input_data_stream), + rxops.map( lambda combined: { "query": combined[0], # type: ignore[index] "objects": combined[1] # type: ignore[index] @@ -210,10 +210,10 @@ def __init__( else "No object data available", } ), - RxOps.map( + rxops.map( lambda data: f"{data['query']}\n\nCurrent objects detected:\n{data['objects']}" # type: ignore[index] ), - RxOps.do_action( + rxops.do_action( lambda x: print(f"\033[34mEnriched query: {x.split(chr(10))[0]}\033[0m") # type: ignore[arg-type] or [print(f"\033[34m{line}\033[0m") for line in x.split(chr(10))[1:]] # type: ignore[var-annotated] ), @@ -513,9 +513,9 @@ def _process_frame(emission) -> Observable: # type: ignore[no-untyped-def, type frame = emission return just(frame).pipe( # type: ignore[call-overload, no-any-return] MyOps.print_emission(id="B", **print_emission_args), # type: ignore[arg-type] - RxOps.observe_on(self.pool_scheduler), + rxops.observe_on(self.pool_scheduler), MyOps.print_emission(id="C", **print_emission_args), # type: ignore[arg-type] - RxOps.subscribe_on(self.pool_scheduler), + rxops.subscribe_on(self.pool_scheduler), MyOps.print_emission(id="D", **print_emission_args), # type: ignore[arg-type] MyVidOps.with_jpeg_export( self.frame_processor, # type: ignore[arg-type] @@ -525,13 +525,13 @@ def _process_frame(emission) -> Observable: # type: ignore[no-untyped-def, type MyOps.print_emission(id="E", **print_emission_args), # type: ignore[arg-type] MyVidOps.encode_image(), MyOps.print_emission(id="F", **print_emission_args), # type: ignore[arg-type] - RxOps.filter( + rxops.filter( lambda base64_and_dims: base64_and_dims is not None and base64_and_dims[0] is not None # type: ignore[index] and base64_and_dims[1] is not None # type: ignore[index] ), MyOps.print_emission(id="G", **print_emission_args), # type: ignore[arg-type] - RxOps.flat_map( + rxops.flat_map( lambda base64_and_dims: create( # type: ignore[arg-type, return-value] lambda observer, _: self._observable_query( observer, # type: ignore[arg-type] @@ -555,11 +555,11 @@ def process_if_free(emission): # type: ignore[no-untyped-def] is_processing[0] = True return _process_frame(emission).pipe( MyOps.print_emission(id="I", **print_emission_args), # type: ignore[arg-type] - RxOps.observe_on(self.pool_scheduler), + rxops.observe_on(self.pool_scheduler), MyOps.print_emission(id="J", **print_emission_args), # type: ignore[arg-type] - RxOps.subscribe_on(self.pool_scheduler), + rxops.subscribe_on(self.pool_scheduler), MyOps.print_emission(id="K", **print_emission_args), # type: ignore[arg-type] - RxOps.do_action( + rxops.do_action( on_completed=lambda: is_processing.__setitem__(0, False), on_error=lambda e: is_processing.__setitem__(0, False), ), @@ -568,7 +568,7 @@ def process_if_free(emission): # type: ignore[no-untyped-def] observable = frame_observable.pipe( MyOps.print_emission(id="A", **print_emission_args), # type: ignore[arg-type] - RxOps.flat_map(process_if_free), + rxops.flat_map(process_if_free), MyOps.print_emission(id="M", **print_emission_args), # type: ignore[arg-type] ) @@ -601,7 +601,7 @@ def _process_query(query) -> Observable: # type: ignore[no-untyped-def, type-ar """ return just(query).pipe( MyOps.print_emission(id="Pr A", **print_emission_args), # type: ignore[arg-type] - RxOps.flat_map( + rxops.flat_map( lambda query: create( # type: ignore[arg-type, return-value] lambda observer, _: self._observable_query(observer, incoming_query=query) # type: ignore[arg-type] ) @@ -622,11 +622,11 @@ def process_if_free(query): # type: ignore[no-untyped-def] logger.info("Processing Query.") return _process_query(query).pipe( MyOps.print_emission(id="B", **print_emission_args), # type: ignore[arg-type] - RxOps.observe_on(self.pool_scheduler), + rxops.observe_on(self.pool_scheduler), MyOps.print_emission(id="C", **print_emission_args), # type: ignore[arg-type] - RxOps.subscribe_on(self.pool_scheduler), + rxops.subscribe_on(self.pool_scheduler), MyOps.print_emission(id="D", **print_emission_args), # type: ignore[arg-type] - RxOps.do_action( + rxops.do_action( on_completed=lambda: is_processing.__setitem__(0, False), on_error=lambda e: is_processing.__setitem__(0, False), ), @@ -635,7 +635,7 @@ def process_if_free(query): # type: ignore[no-untyped-def] observable = query_observable.pipe( MyOps.print_emission(id="A", **print_emission_args), # type: ignore[arg-type] - RxOps.flat_map(lambda query: process_if_free(query)), # type: ignore[no-untyped-call] + rxops.flat_map(lambda query: process_if_free(query)), # type: ignore[no-untyped-call] MyOps.print_emission(id="F", **print_emission_args), # type: ignore[arg-type] ) @@ -654,9 +654,9 @@ def get_response_observable(self) -> Observable: # type: ignore[type-arg] Observable: An observable that emits string responses from the agent. """ return self.response_subject.pipe( - RxOps.observe_on(self.pool_scheduler), - RxOps.subscribe_on(self.pool_scheduler), - RxOps.share(), + rxops.observe_on(self.pool_scheduler), + rxops.subscribe_on(self.pool_scheduler), + rxops.share(), ) def run_observable_query(self, query_text: str, **kwargs) -> Observable: # type: ignore[no-untyped-def, type-arg] diff --git a/dimos/agents_deprecated/memory/image_embedding.py b/dimos/agents_deprecated/memory/image_embedding.py index 9c19dc4142..27e16f1aa8 100644 --- a/dimos/agents_deprecated/memory/image_embedding.py +++ b/dimos/agents_deprecated/memory/image_embedding.py @@ -64,7 +64,7 @@ def _initialize_model(self): # type: ignore[no-untyped-def] """Initialize the specified embedding model.""" try: import onnxruntime as ort # type: ignore[import-untyped] - import torch + import torch # noqa: F401 from transformers import ( # type: ignore[import-untyped] AutoFeatureExtractor, AutoModel, diff --git a/dimos/agents_deprecated/memory/test_image_embedding.py b/dimos/agents_deprecated/memory/test_image_embedding.py index 3f2efbcc1a..89f0716e7e 100644 --- a/dimos/agents_deprecated/memory/test_image_embedding.py +++ b/dimos/agents_deprecated/memory/test_image_embedding.py @@ -31,7 +31,6 @@ class TestImageEmbedding: """Test class for CLIP image embedding functionality.""" - @pytest.mark.tofix def test_clip_embedding_initialization(self) -> None: """Test CLIP embedding provider initializes correctly.""" try: @@ -44,7 +43,6 @@ def test_clip_embedding_initialization(self) -> None: except Exception as e: pytest.skip(f"Skipping test due to model initialization error: {e}") - @pytest.mark.tofix def test_clip_embedding_process_video(self) -> None: """Test CLIP embedding provider can process video frames and return embeddings.""" try: @@ -149,7 +147,6 @@ def on_completed() -> None: except Exception as e: pytest.fail(f"Test failed with error: {e}") - @pytest.mark.tofix def test_clip_embedding_similarity(self) -> None: """Test CLIP embedding similarity search and text-to-image queries.""" try: diff --git a/dimos/agents_deprecated/modules/gateway/client.py b/dimos/agents_deprecated/modules/gateway/client.py index 6e3c6c6706..772ca445aa 100644 --- a/dimos/agents_deprecated/modules/gateway/client.py +++ b/dimos/agents_deprecated/modules/gateway/client.py @@ -52,6 +52,7 @@ def __init__( self.timeout = timeout self._client = None self._async_client = None + self._aclose_task: asyncio.Task[None] | None = None # Always use TensorZero embedded gateway try: @@ -177,7 +178,7 @@ def __del__(self) -> None: try: loop = asyncio.get_event_loop() if loop.is_running(): - loop.create_task(self.aclose()) + self._aclose_task = loop.create_task(self.aclose()) else: loop.run_until_complete(self.aclose()) except RuntimeError: diff --git a/dimos/conftest.py b/dimos/conftest.py index e0544bea1c..5d1ca2b860 100644 --- a/dimos/conftest.py +++ b/dimos/conftest.py @@ -15,8 +15,36 @@ import asyncio import threading +from dotenv import load_dotenv import pytest +from dimos.protocol.service.lcmservice import autoconf + +load_dotenv() + + +def _has_cuda(): + try: + import torch + except Exception: + return False + + try: + return bool(torch.cuda.is_available()) + except Exception: + return False + + +@pytest.hookimpl() +def pytest_collection_modifyitems(config, items): + if not _has_cuda(): + skip_marker = pytest.mark.skip( + reason="CUDA is not available (torch.cuda.is_available() returned False)" + ) + for item in items: + if item.get_closest_marker("cuda"): + item.add_marker(skip_marker) + @pytest.fixture def event_loop(): @@ -25,6 +53,18 @@ def event_loop(): loop.close() +@pytest.fixture(scope="session", autouse=True) +def _autoconf(request): + """Run autoconf() before all tests with capture suspended so people see `sudo` commands.""" + + capman = request.config.pluginmanager.getplugin("capturemanager") + capman.suspend_global_capture(in_=True) + try: + autoconf() + finally: + capman.resume_global_capture() + + _session_threads = set() _seen_threads = set() _seen_threads_lock = threading.RLock() diff --git a/dimos/control/README.md b/dimos/control/README.md index 58490321fa..755bfbd939 100644 --- a/dimos/control/README.md +++ b/dimos/control/README.md @@ -1,4 +1,4 @@ -# Control Orchestrator +# Control Coordinator Centralized control system for multi-arm robots with per-joint arbitration. @@ -6,7 +6,7 @@ Centralized control system for multi-arm robots with per-joint arbitration. ``` ┌─────────────────────────────────────────────────────────────┐ -│ ControlOrchestrator │ +│ ControlCoordinator │ │ │ │ ┌──────────────────────────────────────────────────────┐ │ │ │ TickLoop (100Hz) │ │ @@ -16,8 +16,8 @@ Centralized control system for multi-arm robots with per-joint arbitration. │ │ │ │ │ │ │ ▼ ▼ ▼ ▼ │ │ ┌─────────┐ ┌───────┐ ┌─────────┐ ┌──────────┐ │ -│ │Hardware │ │ Tasks │ │Priority │ │ Backends │ │ -│ │Interface│ │ │ │ Winners │ │ │ │ +│ │Connected│ │ Tasks │ │Priority │ │ Adapters │ │ +│ │Hardware │ │ │ │ Winners │ │ │ │ │ └─────────┘ └───────┘ └─────────┘ └──────────┘ │ └─────────────────────────────────────────────────────────────┘ ``` @@ -25,13 +25,13 @@ Centralized control system for multi-arm robots with per-joint arbitration. ## Quick Start ```bash -# Terminal 1: Run orchestrator -dimos run orchestrator-mock # Single 7-DOF mock arm -dimos run orchestrator-dual-mock # Dual arms (7+6 DOF) -dimos run orchestrator-piper-xarm # Real hardware +# Terminal 1: Run coordinator +dimos run coordinator-mock # Single 7-DOF mock arm +dimos run coordinator-dual-mock # Dual arms (7+6 DOF) +dimos run coordinator-piper-xarm # Real hardware # Terminal 2: Control via CLI -python -m dimos.manipulation.control.orchestrator_client +python -m dimos.manipulation.control.coordinator_client ``` ## Core Concepts @@ -42,17 +42,17 @@ Single deterministic loop at 100Hz: 2. **Compute** - Each task calculates desired output 3. **Arbitrate** - Per-joint, highest priority wins 4. **Route** - Group commands by hardware -5. **Write** - Send commands to backends +5. **Write** - Send commands to adapters ### Tasks (Controllers) -Tasks are passive controllers called by the orchestrator: +Tasks are passive controllers called by the coordinator: ```python class MyController: def claim(self) -> ResourceClaim: return ResourceClaim(joints={"joint1", "joint2"}, priority=10) - def compute(self, state: OrchestratorState) -> JointCommandOutput: + def compute(self, state: CoordinatorState) -> JointCommandOutput: # Your control law here (PID, impedance, etc.) return JointCommandOutput( joint_names=["joint1", "joint2"], @@ -83,10 +83,11 @@ def on_preempted(self, by_task: str, joints: frozenset[str]) -> None: ``` dimos/control/ -├── orchestrator.py # Module + RPC interface +├── coordinator.py # Module + RPC interface ├── tick_loop.py # 100Hz control loop ├── task.py # ControlTask protocol + types -├── hardware_interface.py # Backend wrapper +├── hardware_interface.py # ConnectedHardware wrapper +├── components.py # HardwareComponent config + type aliases ├── blueprints.py # Pre-configured setups └── tasks/ └── trajectory_task.py # Joint trajectory controller @@ -95,13 +96,25 @@ dimos/control/ ## Configuration ```python -from dimos.control import control_orchestrator, HardwareConfig, TaskConfig +from dimos.control import control_coordinator, HardwareComponent, TaskConfig -my_robot = control_orchestrator( +my_robot = control_coordinator( tick_rate=100.0, hardware=[ - HardwareConfig(id="left", type="xarm", dof=7, joint_prefix="left", ip="192.168.1.100"), - HardwareConfig(id="right", type="piper", dof=6, joint_prefix="right", can_port="can0"), + HardwareComponent( + hardware_id="left_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("left_arm", 7), + adapter_type="xarm", + address="192.168.1.100", + ), + HardwareComponent( + hardware_id="right_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("right_arm", 6), + adapter_type="piper", + address="can0", + ), ], tasks=[ TaskConfig(name="traj_left", type="trajectory", joint_names=[...], priority=10), @@ -181,15 +194,15 @@ class PIDController: ## Joint State Output -The orchestrator publishes one aggregated `JointState` message containing all joints: +The coordinator publishes one aggregated `JointState` message containing all joints: ```python JointState( - name=["left_joint1", ..., "right_joint1", ...], # All joints + name=["left_arm_joint1", ..., "right_arm_joint1", ...], # All joints position=[...], velocity=[...], effort=[...], ) ``` -Subscribe via: `/orchestrator/joint_state` +Subscribe via: `/coordinator/joint_state` diff --git a/dimos/control/__init__.py b/dimos/control/__init__.py index 3d7d647cd4..23ac02836b 100644 --- a/dimos/control/__init__.py +++ b/dimos/control/__init__.py @@ -12,81 +12,71 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""ControlOrchestrator - Centralized control for multi-arm coordination. +"""ControlCoordinator - Centralized control for multi-arm coordination. -This module provides a centralized control orchestrator that replaces +This module provides a centralized control coordinator that replaces per-driver/per-controller loops with a single deterministic tick-based system. Features: -- Single tick loop (read → compute → arbitrate → route → write) +- Single tick loop (read -> compute -> arbitrate -> route -> write) - Per-joint arbitration (highest priority wins) - Mode conflict detection - Partial command support (hold last value) - Aggregated preemption notifications Example: - >>> from dimos.control import ControlOrchestrator + >>> from dimos.control import ControlCoordinator >>> from dimos.control.tasks import JointTrajectoryTask, JointTrajectoryTaskConfig - >>> from dimos.hardware.manipulators.xarm import XArmBackend + >>> from dimos.hardware.manipulators.xarm import XArmAdapter >>> - >>> # Create orchestrator - >>> orch = ControlOrchestrator(tick_rate=100.0) + >>> # Create coordinator + >>> coord = ControlCoordinator(tick_rate=100.0) >>> >>> # Add hardware - >>> backend = XArmBackend(ip="192.168.1.185", dof=7) - >>> backend.connect() - >>> orch.add_hardware("left_arm", backend, joint_prefix="left") + >>> adapter = XArmAdapter(ip="192.168.1.185", dof=7) + >>> adapter.connect() + >>> coord.add_hardware("left_arm", adapter) >>> >>> # Add task - >>> joints = [f"left_joint{i+1}" for i in range(7)] + >>> joints = [f"left_arm_joint{i+1}" for i in range(7)] >>> task = JointTrajectoryTask( ... "traj_left", ... JointTrajectoryTaskConfig(joint_names=joints, priority=10), ... ) - >>> orch.add_task(task) + >>> coord.add_task(task) >>> >>> # Start - >>> orch.start() + >>> coord.start() """ -from dimos.control.hardware_interface import ( - BackendHardwareInterface, - HardwareInterface, -) -from dimos.control.orchestrator import ( - ControlOrchestrator, - ControlOrchestratorConfig, - HardwareConfig, - TaskConfig, - control_orchestrator, -) -from dimos.control.task import ( - ControlMode, - ControlTask, - JointCommandOutput, - JointStateSnapshot, - OrchestratorState, - ResourceClaim, -) -from dimos.control.tick_loop import TickLoop +import lazy_loader as lazy -__all__ = [ - # Hardware interface - "BackendHardwareInterface", - "ControlMode", - # Orchestrator - "ControlOrchestrator", - "ControlOrchestratorConfig", - # Task protocol and types - "ControlTask", - "HardwareConfig", - "HardwareInterface", - "JointCommandOutput", - "JointStateSnapshot", - "OrchestratorState", - "ResourceClaim", - "TaskConfig", - # Tick loop - "TickLoop", - "control_orchestrator", -] +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "components": [ + "HardwareComponent", + "HardwareId", + "HardwareType", + "JointName", + "JointState", + "make_joints", + ], + "coordinator": [ + "ControlCoordinator", + "ControlCoordinatorConfig", + "TaskConfig", + "control_coordinator", + ], + "hardware_interface": ["ConnectedHardware"], + "task": [ + "ControlMode", + "ControlTask", + "CoordinatorState", + "JointCommandOutput", + "JointStateSnapshot", + "ResourceClaim", + ], + "tick_loop": ["TickLoop"], + }, +) diff --git a/dimos/control/blueprints.py b/dimos/control/blueprints.py index d38ac1f81f..8762ebd95b 100644 --- a/dimos/control/blueprints.py +++ b/dimos/control/blueprints.py @@ -12,47 +12,34 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Pre-configured blueprints for the ControlOrchestrator. +"""Pre-configured blueprints for the ControlCoordinator. -This module provides ready-to-use orchestrator blueprints for common setups. +This module provides ready-to-use coordinator blueprints for common setups. Usage: # Run via CLI: - dimos run orchestrator-mock # Mock 7-DOF arm - dimos run orchestrator-xarm7 # XArm7 real hardware - dimos run orchestrator-dual-mock # Dual mock arms + dimos run coordinator-mock # Mock 7-DOF arm + dimos run coordinator-xarm7 # XArm7 real hardware + dimos run coordinator-dual-mock # Dual mock arms # Or programmatically: - from dimos.control.blueprints import orchestrator_mock - coordinator = orchestrator_mock.build() + from dimos.control.blueprints import coordinator_mock + coordinator = coordinator_mock.build() coordinator.loop() - -Example with trajectory setter: - # Terminal 1: Run the orchestrator - dimos run orchestrator-mock - - # Terminal 2: Send trajectories via RPC - python -m dimos.control.examples.orchestrator_trajectory_setter --task traj_arm """ from __future__ import annotations -from dimos.control.orchestrator import ( - HardwareConfig, - TaskConfig, - control_orchestrator, -) +from dimos.control.components import HardwareComponent, HardwareType, make_joints +from dimos.control.coordinator import TaskConfig, control_coordinator from dimos.core.transport import LCMTransport +from dimos.msgs.geometry_msgs import PoseStamped from dimos.msgs.sensor_msgs import JointState +from dimos.teleop.quest.quest_types import Buttons +from dimos.utils.data import LfsPath -# ============================================================================= -# Helper function to generate joint names -# ============================================================================= - - -def _joint_names(prefix: str, dof: int) -> list[str]: - """Generate joint names with prefix.""" - return [f"{prefix}_joint{i + 1}" for i in range(dof)] +_PIPER_MODEL_PATH = LfsPath("piper_description/mujoco_model/piper_no_gripper_description.xml") +_XARM6_MODEL_PATH = LfsPath("xarm_description/urdf/xarm6/xarm6.urdf") # ============================================================================= @@ -60,44 +47,44 @@ def _joint_names(prefix: str, dof: int) -> list[str]: # ============================================================================= # Mock 7-DOF arm (for testing) -orchestrator_mock = control_orchestrator( +coordinator_mock = control_coordinator( tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", hardware=[ - HardwareConfig( - id="arm", - type="mock", - dof=7, - joint_prefix="arm", + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 7), + adapter_type="mock", ), ], tasks=[ TaskConfig( name="traj_arm", type="trajectory", - joint_names=_joint_names("arm", 7), + joint_names=[f"arm_joint{i + 1}" for i in range(7)], priority=10, ), ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), } ) -# XArm7 real hardware (requires IP configuration) -orchestrator_xarm7 = control_orchestrator( +# XArm7 real hardware +coordinator_xarm7 = control_coordinator( tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", hardware=[ - HardwareConfig( - id="arm", - type="xarm", - dof=7, - joint_prefix="arm", - ip="192.168.2.235", # Default IP, override via env or config + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 7), + adapter_type="xarm", + address="192.168.2.235", auto_enable=True, ), ], @@ -105,28 +92,28 @@ def _joint_names(prefix: str, dof: int) -> list[str]: TaskConfig( name="traj_arm", type="trajectory", - joint_names=_joint_names("arm", 7), + joint_names=[f"arm_joint{i + 1}" for i in range(7)], priority=10, ), ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), } ) # XArm6 real hardware -orchestrator_xarm6 = control_orchestrator( +coordinator_xarm6 = control_coordinator( tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", hardware=[ - HardwareConfig( - id="arm", - type="xarm", - dof=6, - joint_prefix="arm", - ip="192.168.1.210", + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="xarm", + address="192.168.1.210", auto_enable=True, ), ], @@ -134,28 +121,28 @@ def _joint_names(prefix: str, dof: int) -> list[str]: TaskConfig( name="traj_xarm", type="trajectory", - joint_names=_joint_names("arm", 6), + joint_names=[f"arm_joint{i + 1}" for i in range(6)], priority=10, ), ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), } ) # Piper arm (6-DOF, CAN bus) -orchestrator_piper = control_orchestrator( +coordinator_piper = control_coordinator( tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", hardware=[ - HardwareConfig( - id="arm", - type="piper", - dof=6, - joint_prefix="arm", - can_port="can0", + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="piper", + address="can0", auto_enable=True, ), ], @@ -163,79 +150,80 @@ def _joint_names(prefix: str, dof: int) -> list[str]: TaskConfig( name="traj_piper", type="trajectory", - joint_names=_joint_names("arm", 6), + joint_names=[f"arm_joint{i + 1}" for i in range(6)], priority=10, ), ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), } ) + # ============================================================================= # Dual Arm Blueprints # ============================================================================= -# Dual mock arms (7-DOF left, 6-DOF right) for testing -orchestrator_dual_mock = control_orchestrator( +# Dual mock arms (7-DOF left, 6-DOF right) +coordinator_dual_mock = control_coordinator( tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", hardware=[ - HardwareConfig( - id="left_arm", - type="mock", - dof=7, - joint_prefix="left", - ), - HardwareConfig( - id="right_arm", - type="mock", - dof=6, - joint_prefix="right", + HardwareComponent( + hardware_id="left_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("left_arm", 7), + adapter_type="mock", + ), + HardwareComponent( + hardware_id="right_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("right_arm", 6), + adapter_type="mock", ), ], tasks=[ TaskConfig( name="traj_left", type="trajectory", - joint_names=_joint_names("left", 7), + joint_names=[f"left_arm_joint{i + 1}" for i in range(7)], priority=10, ), TaskConfig( name="traj_right", type="trajectory", - joint_names=_joint_names("right", 6), + joint_names=[f"right_arm_joint{i + 1}" for i in range(6)], priority=10, ), ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), } ) -# Dual XArm setup (XArm7 left, XArm6 right) -orchestrator_dual_xarm = control_orchestrator( +# Dual XArm (XArm7 left, XArm6 right) +coordinator_dual_xarm = control_coordinator( tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", hardware=[ - HardwareConfig( - id="left_arm", - type="xarm", - dof=7, - joint_prefix="left", - ip="192.168.2.235", + HardwareComponent( + hardware_id="left_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("left_arm", 7), + adapter_type="xarm", + address="192.168.2.235", auto_enable=True, ), - HardwareConfig( - id="right_arm", - type="xarm", - dof=6, - joint_prefix="right", - ip="192.168.1.210", + HardwareComponent( + hardware_id="right_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("right_arm", 6), + adapter_type="xarm", + address="192.168.1.210", auto_enable=True, ), ], @@ -243,42 +231,42 @@ def _joint_names(prefix: str, dof: int) -> list[str]: TaskConfig( name="traj_left", type="trajectory", - joint_names=_joint_names("left", 7), + joint_names=[f"left_arm_joint{i + 1}" for i in range(7)], priority=10, ), TaskConfig( name="traj_right", type="trajectory", - joint_names=_joint_names("right", 6), + joint_names=[f"right_arm_joint{i + 1}" for i in range(6)], priority=10, ), ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), } ) -# Dual Arm setup (XArm6 , Piper ) -orchestrator_piper_xarm = control_orchestrator( +# Dual arm (XArm6 + Piper) +coordinator_piper_xarm = control_coordinator( tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", hardware=[ - HardwareConfig( - id="xarm_arm", - type="xarm", - dof=6, - joint_prefix="xarm", - ip="192.168.1.210", + HardwareComponent( + hardware_id="xarm_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("xarm_arm", 6), + adapter_type="xarm", + address="192.168.1.210", auto_enable=True, ), - HardwareConfig( - id="piper_arm", - type="piper", - dof=6, - joint_prefix="piper", - can_port="can0", + HardwareComponent( + hardware_id="piper_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("piper_arm", 6), + adapter_type="piper", + address="can0", auto_enable=True, ), ], @@ -286,81 +274,365 @@ def _joint_names(prefix: str, dof: int) -> list[str]: TaskConfig( name="traj_xarm", type="trajectory", - joint_names=_joint_names("xarm", 6), + joint_names=[f"xarm_arm_joint{i + 1}" for i in range(6)], priority=10, ), TaskConfig( name="traj_piper", type="trajectory", - joint_names=_joint_names("piper", 6), + joint_names=[f"piper_arm_joint{i + 1}" for i in range(6)], priority=10, ), ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), } ) + # ============================================================================= -# High-frequency Blueprints (200Hz) +# Streaming Control Blueprints # ============================================================================= -# High-frequency mock for demanding applications -orchestrator_highfreq_mock = control_orchestrator( - tick_rate=200.0, +# XArm6 teleop - streaming position control +coordinator_teleop_xarm6 = control_coordinator( + tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", hardware=[ - HardwareConfig( - id="arm", - type="mock", - dof=7, - joint_prefix="arm", + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="xarm", + address="192.168.1.210", + auto_enable=True, ), ], tasks=[ TaskConfig( - name="traj_arm", - type="trajectory", - joint_names=_joint_names("arm", 7), + name="servo_arm", + type="servo", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + ), + ], +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + ("joint_command", JointState): LCMTransport("/teleop/joint_command", JointState), + } +) + +# XArm6 velocity control - streaming velocity for joystick +coordinator_velocity_xarm6 = control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="xarm", + address="192.168.1.210", + auto_enable=True, + ), + ], + tasks=[ + TaskConfig( + name="velocity_arm", + type="velocity", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + ), + ], +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + ("joint_command", JointState): LCMTransport("/joystick/joint_command", JointState), + } +) + +# XArm6 combined (servo + velocity tasks) +coordinator_combined_xarm6 = control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="xarm", + address="192.168.1.210", + auto_enable=True, + ), + ], + tasks=[ + TaskConfig( + name="servo_arm", + type="servo", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + ), + TaskConfig( + name="velocity_arm", + type="velocity", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], priority=10, ), ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + ("joint_command", JointState): LCMTransport("/control/joint_command", JointState), } ) + # ============================================================================= -# Raw Blueprints (no hardware/tasks configured - for programmatic setup) +# Cartesian IK Blueprints (internal Pinocchio IK solver) # ============================================================================= -# Basic orchestrator with transport only (add hardware/tasks programmatically) -orchestrator_basic = control_orchestrator( + +# Mock 6-DOF arm with CartesianIK +coordinator_cartesian_ik_mock = control_coordinator( tick_rate=100.0, publish_joint_state=True, - joint_state_frame_id="orchestrator", + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="mock", + ), + ], + tasks=[ + TaskConfig( + name="cartesian_ik_arm", + type="cartesian_ik", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + model_path=_PIPER_MODEL_PATH, + ee_joint_id=6, + ), + ], ).transports( { - ("joint_state", JointState): LCMTransport("/orchestrator/joint_state", JointState), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + ("cartesian_command", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), } ) +# Piper arm with CartesianIK +coordinator_cartesian_ik_piper = control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="piper", + address="can0", + auto_enable=True, + ), + ], + tasks=[ + TaskConfig( + name="cartesian_ik_arm", + type="cartesian_ik", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + model_path=_PIPER_MODEL_PATH, + ee_joint_id=6, + ), + ], +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + ("cartesian_command", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + } +) + + +# ============================================================================= +# Teleop IK Blueprints (VR teleoperation with internal Pinocchio IK) +# ============================================================================= + +# Single XArm6 with TeleopIK +coordinator_teleop_xarm6 = control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="xarm", + address="192.168.1.210", + auto_enable=True, + ), + ], + tasks=[ + TaskConfig( + name="teleop_xarm", + type="teleop_ik", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + model_path=_XARM6_MODEL_PATH, + ee_joint_id=6, + hand="right", + ), + ], +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + ("cartesian_command", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("buttons", Buttons): LCMTransport("/teleop/buttons", Buttons), + } +) + +# Single Piper with TeleopIK +coordinator_teleop_piper = control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="piper", + address="can0", + auto_enable=True, + ), + ], + tasks=[ + TaskConfig( + name="teleop_piper", + type="teleop_ik", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + model_path=_PIPER_MODEL_PATH, + ee_joint_id=6, + hand="left", + ), + ], +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + ("cartesian_command", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("buttons", Buttons): LCMTransport("/teleop/buttons", Buttons), + } +) + +# Dual arm teleop: XArm6 + Piper with TeleopIK +coordinator_teleop_dual = control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="xarm_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("xarm_arm", 6), + adapter_type="xarm", + address="192.168.1.210", + auto_enable=True, + ), + HardwareComponent( + hardware_id="piper_arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("piper_arm", 6), + adapter_type="piper", + address="can0", + auto_enable=True, + ), + ], + tasks=[ + TaskConfig( + name="teleop_xarm", + type="teleop_ik", + joint_names=[f"xarm_arm_joint{i + 1}" for i in range(6)], + priority=10, + model_path=_XARM6_MODEL_PATH, + ee_joint_id=6, + hand="left", + ), + TaskConfig( + name="teleop_piper", + type="teleop_ik", + joint_names=[f"piper_arm_joint{i + 1}" for i in range(6)], + priority=10, + model_path=_PIPER_MODEL_PATH, + ee_joint_id=6, + hand="right", + ), + ], +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + ("cartesian_command", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("buttons", Buttons): LCMTransport("/teleop/buttons", Buttons), + } +) + + +# ============================================================================= +# Raw Blueprints (for programmatic setup) +# ============================================================================= + +coordinator_basic = control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } +) + + +# ============================================================================= +# Exports +# ============================================================================= __all__ = [ - # Raw blueprints (for programmatic setup) - "orchestrator_basic", - # Dual arm blueprints - "orchestrator_dual_mock", - "orchestrator_dual_xarm", - # High-frequency blueprints - "orchestrator_highfreq_mock", - # Single arm blueprints - "orchestrator_mock", - "orchestrator_piper", - "orchestrator_piper_xarm", - "orchestrator_xarm6", - "orchestrator_xarm7", + # Raw + "coordinator_basic", + # Cartesian IK + "coordinator_cartesian_ik_mock", + "coordinator_cartesian_ik_piper", + # Streaming control + "coordinator_combined_xarm6", + # Dual arm + "coordinator_dual_mock", + "coordinator_dual_xarm", + # Single arm + "coordinator_mock", + "coordinator_piper", + "coordinator_piper_xarm", + # Teleop IK + "coordinator_teleop_dual", + "coordinator_teleop_piper", + "coordinator_teleop_xarm6", + "coordinator_velocity_xarm6", + "coordinator_xarm6", + "coordinator_xarm7", ] diff --git a/dimos/control/components.py b/dimos/control/components.py new file mode 100644 index 0000000000..e3022468ed --- /dev/null +++ b/dimos/control/components.py @@ -0,0 +1,82 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Hardware component schema for the ControlCoordinator.""" + +from dataclasses import dataclass, field +from enum import Enum + +HardwareId = str +JointName = str +TaskName = str + + +class HardwareType(Enum): + MANIPULATOR = "manipulator" + BASE = "base" + GRIPPER = "gripper" + + +@dataclass(frozen=True) +class JointState: + """State of a single joint.""" + + position: float + velocity: float + effort: float + + +@dataclass +class HardwareComponent: + """Configuration for a hardware component. + + Attributes: + hardware_id: Unique identifier, also used as joint name prefix + hardware_type: Type of hardware (MANIPULATOR, BASE, GRIPPER) + joints: List of joint names (e.g., ["arm_joint1", "arm_joint2", ...]) + adapter_type: Adapter type ("mock", "xarm", "piper") + address: Connection address - IP for TCP, port for CAN + auto_enable: Whether to auto-enable servos + """ + + hardware_id: HardwareId + hardware_type: HardwareType + joints: list[JointName] = field(default_factory=list) + adapter_type: str = "mock" + address: str | None = None + auto_enable: bool = True + + +def make_joints(hardware_id: HardwareId, dof: int) -> list[JointName]: + """Create joint names for hardware. + + Args: + hardware_id: The hardware identifier (e.g., "left_arm") + dof: Degrees of freedom + + Returns: + List of joint names like ["left_arm_joint1", "left_arm_joint2", ...] + """ + return [f"{hardware_id}_joint{i + 1}" for i in range(dof)] + + +__all__ = [ + "HardwareComponent", + "HardwareId", + "HardwareType", + "JointName", + "JointState", + "TaskName", + "make_joints", +] diff --git a/dimos/control/coordinator.py b/dimos/control/coordinator.py new file mode 100644 index 0000000000..5685a9f9c7 --- /dev/null +++ b/dimos/control/coordinator.py @@ -0,0 +1,668 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""ControlCoordinator module. + +Centralized control coordinator that replaces per-driver/per-controller +loops with a single deterministic tick-based system. + +Features: +- Single tick loop (read -> compute -> arbitrate -> route -> write) +- Per-joint arbitration (highest priority wins) +- Mode conflict detection +- Partial command support (hold last value) +- Aggregated preemption notifications +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +import threading +import time +from typing import TYPE_CHECKING, Any + +from dimos.control.components import HardwareComponent, HardwareId, JointName, TaskName +from dimos.control.hardware_interface import ConnectedHardware +from dimos.control.task import ControlTask +from dimos.control.tick_loop import TickLoop +from dimos.core import In, Module, Out, rpc +from dimos.core.module import ModuleConfig +from dimos.msgs.geometry_msgs import ( + PoseStamped, # noqa: TC001 - needed at runtime for In[PoseStamped] +) +from dimos.msgs.sensor_msgs import ( + JointState, # noqa: TC001 - needed at runtime for Out[JointState] +) +from dimos.teleop.quest.quest_types import Buttons # noqa: TC001 - needed for teleop buttons +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from collections.abc import Callable + from pathlib import Path + + from dimos.hardware.manipulators.spec import ManipulatorAdapter + +logger = setup_logger() + + +# ============================================================================= +# Configuration +# ============================================================================= + + +@dataclass +class TaskConfig: + """Configuration for a control task. + + Attributes: + name: Task name (e.g., "traj_arm") + type: Task type ("trajectory", "servo", "velocity", "cartesian_ik", "teleop_ik") + joint_names: List of joint names this task controls + priority: Task priority (higher wins arbitration) + model_path: Path to URDF/MJCF for IK solver (cartesian_ik/teleop_ik only) + ee_joint_id: End-effector joint ID in model (cartesian_ik/teleop_ik only) + """ + + name: str + type: str = "trajectory" + joint_names: list[str] = field(default_factory=lambda: []) + priority: int = 10 + # Cartesian IK / Teleop IK specific + model_path: str | Path | None = None + ee_joint_id: int = 6 + hand: str = "" # teleop_ik only: "left" or "right" controller + + +@dataclass +class ControlCoordinatorConfig(ModuleConfig): + """Configuration for the ControlCoordinator. + + Attributes: + tick_rate: Control loop frequency in Hz (default: 100) + publish_joint_state: Whether to publish aggregated JointState + joint_state_frame_id: Frame ID for published JointState + log_ticks: Whether to log tick information (verbose) + hardware: List of hardware configurations to create on start + tasks: List of task configurations to create on start + """ + + tick_rate: float = 100.0 + publish_joint_state: bool = True + joint_state_frame_id: str = "coordinator" + log_ticks: bool = False + hardware: list[HardwareComponent] = field(default_factory=lambda: []) + tasks: list[TaskConfig] = field(default_factory=lambda: []) + + +# ============================================================================= +# ControlCoordinator Module +# ============================================================================= + + +class ControlCoordinator(Module[ControlCoordinatorConfig]): + """Centralized control coordinator with per-joint arbitration. + + Single tick loop that: + 1. Reads state from all hardware + 2. Runs all active tasks + 3. Arbitrates conflicts per-joint (highest priority wins) + 4. Routes commands to hardware + 5. Publishes aggregated joint state + + Key design decisions: + - Joint-centric commands (not hardware-centric) + - Per-joint arbitration (not per-hardware) + - Centralized time (tasks use state.t_now, never time.time()) + - Partial commands OK (hardware holds last value) + - Aggregated preemption (one notification per task per tick) + + Example: + >>> from dimos.control import ControlCoordinator + >>> from dimos.hardware.manipulators.xarm import XArmAdapter + >>> + >>> orch = ControlCoordinator(tick_rate=100.0) + >>> adapter = XArmAdapter(ip="192.168.1.185", dof=7) + >>> adapter.connect() + >>> orch.add_hardware("left_arm", adapter, joint_prefix="left") + >>> orch.start() + """ + + # Output: Aggregated joint state for external consumers + joint_state: Out[JointState] + + # Input: Streaming joint commands for real-time control + joint_command: In[JointState] + + # Input: Streaming cartesian commands for CartesianIKTask + # Uses frame_id as task name for routing + cartesian_command: In[PoseStamped] + + # Input: Teleop buttons for engage/disengage signaling + buttons: In[Buttons] + + config: ControlCoordinatorConfig + default_config = ControlCoordinatorConfig + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # Connected hardware (keyed by hardware_id) + self._hardware: dict[HardwareId, ConnectedHardware] = {} + self._hardware_lock = threading.Lock() + + # Joint -> hardware mapping (built when hardware added) + self._joint_to_hardware: dict[JointName, HardwareId] = {} + + # Registered tasks + self._tasks: dict[TaskName, ControlTask] = {} + self._task_lock = threading.Lock() + + # Tick loop (created on start) + self._tick_loop: TickLoop | None = None + + # Subscription handles for streaming commands + self._joint_command_unsub: Callable[[], None] | None = None + self._cartesian_command_unsub: Callable[[], None] | None = None + self._buttons_unsub: Callable[[], None] | None = None + + logger.info(f"ControlCoordinator initialized at {self.config.tick_rate}Hz") + + # ========================================================================= + # Config-based Setup + # ========================================================================= + + def _setup_from_config(self) -> None: + """Create hardware and tasks from config (called on start).""" + hardware_added: list[str] = [] + + try: + for component in self.config.hardware: + self._setup_hardware(component) + hardware_added.append(component.hardware_id) + + for task_cfg in self.config.tasks: + task = self._create_task_from_config(task_cfg) + self.add_task(task) + + except Exception: + # Rollback: clean up all successfully added hardware + for hw_id in hardware_added: + try: + self.remove_hardware(hw_id) + except Exception: + pass + raise + + def _setup_hardware(self, component: HardwareComponent) -> None: + """Connect and add a single hardware adapter.""" + adapter = self._create_adapter(component) + + if not adapter.connect(): + raise RuntimeError(f"Failed to connect to {component.adapter_type} adapter") + + try: + if component.auto_enable and hasattr(adapter, "write_enable"): + adapter.write_enable(True) + + self.add_hardware(adapter, component) + except Exception: + adapter.disconnect() + raise + + def _create_adapter(self, component: HardwareComponent) -> ManipulatorAdapter: + """Create a manipulator adapter from component config.""" + from dimos.hardware.manipulators.registry import adapter_registry + + return adapter_registry.create( + component.adapter_type, + dof=len(component.joints), + address=component.address, + ) + + def _create_task_from_config(self, cfg: TaskConfig) -> ControlTask: + """Create a control task from config.""" + task_type = cfg.type.lower() + + if task_type == "trajectory": + from dimos.control.tasks import JointTrajectoryTask, JointTrajectoryTaskConfig + + return JointTrajectoryTask( + cfg.name, + JointTrajectoryTaskConfig( + joint_names=cfg.joint_names, + priority=cfg.priority, + ), + ) + + elif task_type == "servo": + from dimos.control.tasks import JointServoTask, JointServoTaskConfig + + return JointServoTask( + cfg.name, + JointServoTaskConfig( + joint_names=cfg.joint_names, + priority=cfg.priority, + ), + ) + + elif task_type == "velocity": + from dimos.control.tasks import JointVelocityTask, JointVelocityTaskConfig + + return JointVelocityTask( + cfg.name, + JointVelocityTaskConfig( + joint_names=cfg.joint_names, + priority=cfg.priority, + ), + ) + + elif task_type == "cartesian_ik": + from dimos.control.tasks import CartesianIKTask, CartesianIKTaskConfig + + if cfg.model_path is None: + raise ValueError(f"CartesianIKTask '{cfg.name}' requires model_path in TaskConfig") + + return CartesianIKTask( + cfg.name, + CartesianIKTaskConfig( + joint_names=cfg.joint_names, + model_path=cfg.model_path, + ee_joint_id=cfg.ee_joint_id, + priority=cfg.priority, + ), + ) + + elif task_type == "teleop_ik": + from dimos.control.tasks.teleop_task import TeleopIKTask, TeleopIKTaskConfig + + if cfg.model_path is None: + raise ValueError(f"TeleopIKTask '{cfg.name}' requires model_path in TaskConfig") + + return TeleopIKTask( + cfg.name, + TeleopIKTaskConfig( + joint_names=cfg.joint_names, + model_path=cfg.model_path, + ee_joint_id=cfg.ee_joint_id, + priority=cfg.priority, + hand=cfg.hand, + ), + ) + + else: + raise ValueError(f"Unknown task type: {task_type}") + + # ========================================================================= + # Hardware Management (RPC) + # ========================================================================= + + @rpc + def add_hardware( + self, + adapter: ManipulatorAdapter, + component: HardwareComponent, + ) -> bool: + """Register a hardware adapter with the coordinator.""" + with self._hardware_lock: + if component.hardware_id in self._hardware: + logger.warning(f"Hardware {component.hardware_id} already registered") + return False + + connected = ConnectedHardware( + adapter=adapter, + component=component, + ) + self._hardware[component.hardware_id] = connected + + for joint_name in connected.joint_names: + self._joint_to_hardware[joint_name] = component.hardware_id + + logger.info( + f"Added hardware {component.hardware_id} with joints: {connected.joint_names}" + ) + return True + + @rpc + def remove_hardware(self, hardware_id: str) -> bool: + """Remove a hardware interface. + + Note: For safety, call this only when no tasks are actively using this + hardware. Consider stopping the coordinator before removing hardware. + """ + with self._hardware_lock: + if hardware_id not in self._hardware: + return False + + interface = self._hardware[hardware_id] + hw_joints = set(interface.joint_names) + + with self._task_lock: + for task in self._tasks.values(): + if task.is_active(): + claimed_joints = task.claim().joints + overlap = hw_joints & claimed_joints + if overlap: + logger.error( + f"Cannot remove hardware {hardware_id}: " + f"task '{task.name}' is actively using joints {overlap}" + ) + return False + + for joint_name in interface.joint_names: + del self._joint_to_hardware[joint_name] + + interface.disconnect() + del self._hardware[hardware_id] + logger.info(f"Removed hardware {hardware_id}") + return True + + @rpc + def list_hardware(self) -> list[str]: + """List registered hardware IDs.""" + with self._hardware_lock: + return list(self._hardware.keys()) + + @rpc + def list_joints(self) -> list[str]: + """List all joint names across all hardware.""" + with self._hardware_lock: + return list(self._joint_to_hardware.keys()) + + @rpc + def get_joint_positions(self) -> dict[str, float]: + """Get current joint positions for all joints.""" + with self._hardware_lock: + positions: dict[str, float] = {} + for hw in self._hardware.values(): + state = hw.read_state() # {joint_name: JointState} + for joint_name, joint_state in state.items(): + positions[joint_name] = joint_state.position + return positions + + # ========================================================================= + # Task Management (RPC) + # ========================================================================= + + @rpc + def add_task(self, task: ControlTask) -> bool: + """Register a task with the coordinator.""" + if not isinstance(task, ControlTask): + raise TypeError("task must implement ControlTask") + + with self._task_lock: + if task.name in self._tasks: + logger.warning(f"Task {task.name} already registered") + return False + self._tasks[task.name] = task + logger.info(f"Added task {task.name}") + return True + + @rpc + def remove_task(self, task_name: TaskName) -> bool: + """Remove a task by name.""" + with self._task_lock: + if task_name in self._tasks: + del self._tasks[task_name] + logger.info(f"Removed task {task_name}") + return True + return False + + @rpc + def get_task(self, task_name: TaskName) -> ControlTask | None: + """Get a task by name.""" + with self._task_lock: + return self._tasks.get(task_name) + + @rpc + def list_tasks(self) -> list[str]: + """List registered task names.""" + with self._task_lock: + return list(self._tasks.keys()) + + @rpc + def get_active_tasks(self) -> list[str]: + """List currently active task names.""" + with self._task_lock: + return [name for name, task in self._tasks.items() if task.is_active()] + + # ========================================================================= + # Streaming Control + # ========================================================================= + + def _on_joint_command(self, msg: JointState) -> None: + """Route incoming JointState to streaming tasks by joint name. + + Routes position data to servo tasks and velocity data to velocity tasks. + Each task only receives data for joints it claims. + """ + if not msg.name: + return + + t_now = time.perf_counter() + incoming_joints = set(msg.name) + + with self._task_lock: + for task in self._tasks.values(): + claimed_joints = task.claim().joints + + # Skip if no overlap between incoming and claimed joints + if not (claimed_joints & incoming_joints): + continue + + # Route to servo tasks (position control) + if msg.position: + positions_by_name = dict(zip(msg.name, msg.position, strict=False)) + task.set_target_by_name(positions_by_name, t_now) + + # Route to velocity tasks (velocity control) + elif msg.velocity: + velocities_by_name = dict(zip(msg.name, msg.velocity, strict=False)) + task.set_velocities_by_name(velocities_by_name, t_now) + + def _on_cartesian_command(self, msg: PoseStamped) -> None: + """Route incoming PoseStamped to CartesianIKTask by task name. + + Uses frame_id as the target task name for routing. + """ + task_name = msg.frame_id + if not task_name: + logger.warning("Received cartesian_command with empty frame_id (task name)") + return + + t_now = time.perf_counter() + + with self._task_lock: + task = self._tasks.get(task_name) + if task is None: + logger.warning(f"Cartesian command for unknown task: {task_name}") + return + + task.on_cartesian_command(msg, t_now) + + def _on_buttons(self, msg: Buttons) -> None: + """Forward button state to all tasks.""" + with self._task_lock: + for task in self._tasks.values(): + task.on_buttons(msg) + + @rpc + def task_invoke( + self, task_name: TaskName, method: str, kwargs: dict[str, Any] | None = None + ) -> Any: + """Invoke a method on a task. Pass t_now=None to auto-inject current time.""" + with self._task_lock: + task = self._tasks.get(task_name) + if task is None: + logger.warning(f"Task {task_name} not found") + return None + + if not hasattr(task, method): + logger.warning(f"Task {task_name} has no method {method}") + return None + + kwargs = kwargs or {} + + # Auto-inject t_now if requested (None means "use current time") + if "t_now" in kwargs and kwargs["t_now"] is None: + kwargs["t_now"] = time.perf_counter() + + return getattr(task, method)(**kwargs) + + # ========================================================================= + # Gripper + # ========================================================================= + + @rpc + def set_gripper_position(self, hardware_id: str, position: float) -> bool: + """Set gripper position on a specific hardware device. + + Args: + hardware_id: ID of the hardware with the gripper + position: Gripper position in meters + """ + with self._hardware_lock: + hw = self._hardware.get(hardware_id) + if hw is None: + logger.warning(f"Hardware '{hardware_id}' not found for gripper command") + return False + return hw.adapter.write_gripper_position(position) + + @rpc + def get_gripper_position(self, hardware_id: str) -> float | None: + """Get gripper position from a specific hardware device. + + Args: + hardware_id: ID of the hardware with the gripper + """ + with self._hardware_lock: + hw = self._hardware.get(hardware_id) + if hw is None: + return None + return hw.adapter.read_gripper_position() + + # ========================================================================= + # Lifecycle + # ========================================================================= + + @rpc + def start(self) -> None: + """Start the coordinator control loop.""" + if self._tick_loop and self._tick_loop.is_running: + logger.warning("Coordinator already running") + return + + super().start() + + # Setup hardware and tasks from config (if any) + if self.config.hardware or self.config.tasks: + self._setup_from_config() + + # Create and start tick loop + publish_cb = self.joint_state.publish if self.config.publish_joint_state else None + self._tick_loop = TickLoop( + tick_rate=self.config.tick_rate, + hardware=self._hardware, + hardware_lock=self._hardware_lock, + tasks=self._tasks, + task_lock=self._task_lock, + joint_to_hardware=self._joint_to_hardware, + publish_callback=publish_cb, + frame_id=self.config.joint_state_frame_id, + log_ticks=self.config.log_ticks, + ) + self._tick_loop.start() + + # Subscribe to joint commands if any streaming tasks configured + streaming_types = ("servo", "velocity") + has_streaming = any(t.type in streaming_types for t in self.config.tasks) + if has_streaming: + try: + self._joint_command_unsub = self.joint_command.subscribe(self._on_joint_command) + logger.info("Subscribed to joint_command for streaming tasks") + except Exception: + logger.warning( + "Streaming tasks configured but could not subscribe to joint_command. " + "Use task_invoke RPC or set transport via blueprint." + ) + + # Subscribe to cartesian commands if any cartesian_ik tasks configured + has_cartesian_ik = any(t.type in ("cartesian_ik", "teleop_ik") for t in self.config.tasks) + if has_cartesian_ik: + try: + self._cartesian_command_unsub = self.cartesian_command.subscribe( + self._on_cartesian_command + ) + logger.info("Subscribed to cartesian_command for CartesianIK/TeleopIK tasks") + except Exception: + logger.warning( + "CartesianIK/TeleopIK tasks configured but could not subscribe to cartesian_command. " + "Use task_invoke RPC or set transport via blueprint." + ) + + # Subscribe to buttons if any teleop_ik tasks configured (engage/disengage) + has_teleop_ik = any(t.type == "teleop_ik" for t in self.config.tasks) + if has_teleop_ik: + self._buttons_unsub = self.buttons.subscribe(self._on_buttons) + logger.info("Subscribed to buttons for engage/disengage") + + logger.info(f"ControlCoordinator started at {self.config.tick_rate}Hz") + + @rpc + def stop(self) -> None: + """Stop the coordinator.""" + logger.info("Stopping ControlCoordinator...") + + # Unsubscribe from streaming commands + if self._joint_command_unsub: + self._joint_command_unsub() + self._joint_command_unsub = None + if self._cartesian_command_unsub: + self._cartesian_command_unsub() + self._cartesian_command_unsub = None + if self._buttons_unsub: + self._buttons_unsub() + self._buttons_unsub = None + + if self._tick_loop: + self._tick_loop.stop() + + # Disconnect all hardware adapters + with self._hardware_lock: + for hw_id, interface in self._hardware.items(): + try: + interface.disconnect() + logger.info(f"Disconnected hardware {hw_id}") + except Exception as e: + logger.error(f"Error disconnecting hardware {hw_id}: {e}") + + super().stop() + logger.info("ControlCoordinator stopped") + + @rpc + def get_tick_count(self) -> int: + """Get the number of ticks since start.""" + return self._tick_loop.tick_count if self._tick_loop else 0 + + +# Blueprint export +control_coordinator = ControlCoordinator.blueprint + + +__all__ = [ + "ControlCoordinator", + "ControlCoordinatorConfig", + "HardwareComponent", + "TaskConfig", + "control_coordinator", +] diff --git a/dimos/control/examples/cartesian_ik_jogger.py b/dimos/control/examples/cartesian_ik_jogger.py new file mode 100644 index 0000000000..d2a2f4d119 --- /dev/null +++ b/dimos/control/examples/cartesian_ik_jogger.py @@ -0,0 +1,349 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Pygame-based cartesian jogger for CartesianIKTask. + +Publishes PoseStamped commands to the coordinator via LCM. +The frame_id is used as the task name for routing. + +Keyboard controls for jogging robot end-effector in world frame: + W/S: +X/-X (forward/backward) + A/D: -Y/+Y (left/right) + Q/E: +Z/-Z (up/down) + R/F: +Roll/-Roll + T/G: +Pitch/-Pitch + Y/H: +Yaw/-Yaw + SPACE: Reset to home pose + ESC: Quit + +Usage: + python -m dimos.control.examples.cartesian_ik_jogger +""" + +from __future__ import annotations + +from dataclasses import dataclass +import time +from typing import Any + +import numpy as np + +try: + import pygame +except ImportError: + print("pygame not installed. Install with: pip install pygame") + raise + + +@dataclass +class JogState: + """Current jogging state.""" + + x: float = 0.0 + y: float = 0.0 + z: float = 0.0 + roll: float = 0.0 + pitch: float = 0.0 + yaw: float = 0.0 + + def copy(self) -> JogState: + return JogState( + x=self.x, + y=self.y, + z=self.z, + roll=self.roll, + pitch=self.pitch, + yaw=self.yaw, + ) + + @classmethod + def from_fk(cls, model_path: str, ee_joint_id: int) -> JogState: + """Create JogState from forward kinematics at zero configuration. + + This ensures the initial pose is reachable by the robot. + """ + import pinocchio # type: ignore[import-untyped] + + # Load model + if model_path.endswith(".xml"): + model = pinocchio.buildModelFromMJCF(model_path) + else: + model = pinocchio.buildModelFromUrdf(model_path) + + data = model.createData() + + # Compute FK at zero configuration + q_zero = np.zeros(model.nq) + pinocchio.forwardKinematics(model, data, q_zero) + + # Get EE pose + ee_pose = data.oMi[ee_joint_id] + position = ee_pose.translation + rotation = ee_pose.rotation + + # Convert rotation matrix to RPY + rpy = pinocchio.rpy.matrixToRpy(rotation) + + print("Initial EE pose from FK at q=0:") + print(f" Position: x={position[0]:.3f}, y={position[1]:.3f}, z={position[2]:.3f}") + print( + f" Orientation: roll={np.degrees(rpy[0]):.1f}°, pitch={np.degrees(rpy[1]):.1f}°, yaw={np.degrees(rpy[2]):.1f}°" + ) + + return cls( + x=float(position[0]), + y=float(position[1]), + z=float(position[2]), + roll=float(rpy[0]), + pitch=float(rpy[1]), + yaw=float(rpy[2]), + ) + + def to_pose_stamped(self, task_name: str) -> Any: + """Convert to PoseStamped for LCM publishing. + + Args: + task_name: Task name to use as frame_id for routing + """ + from dimos.msgs.geometry_msgs import PoseStamped + from dimos.msgs.geometry_msgs.Quaternion import Quaternion + from dimos.msgs.geometry_msgs.Vector3 import Vector3 + + position = Vector3(self.x, self.y, self.z) + orientation = Quaternion.from_euler(Vector3(self.roll, self.pitch, self.yaw)) + + return PoseStamped( + ts=time.time(), + frame_id=task_name, # Used for task routing + position=position, + orientation=orientation, + ) + + +# Jog speeds +LINEAR_SPEED = 0.05 # m/s +ANGULAR_SPEED = 0.5 # rad/s + +# Position limits (workspace bounds) - will be updated based on initial pose +X_LIMITS = (-0.5, 0.5) +Y_LIMITS = (-0.5, 0.5) +Z_LIMITS = (-0.2, 0.6) + +# Task name for routing (must match blueprint config) +TASK_NAME = "cartesian_ik_arm" + + +def clamp(value: float, min_val: float, max_val: float) -> float: + return max(min_val, min(max_val, value)) + + +def _get_piper_model_path() -> str: + """Get path to Piper MJCF model.""" + from dimos.utils.data import get_data + + piper_path = get_data("piper_description") + return str(piper_path / "mujoco_model" / "piper_no_gripper_description.xml") + + +def run_jogger_ui(model_path: str | None = None, ee_joint_id: int = 6) -> None: + """Run the pygame-based cartesian jogger UI. + + This is ONLY the UI - it publishes PoseStamped to LCM. + The coordinator must be running separately to receive commands. + + Args: + model_path: Path to robot model (MJCF/URDF) for computing initial FK pose. + If None, uses Piper model. + ee_joint_id: End-effector joint ID in the model + """ + from dimos.core.transport import LCMTransport + from dimos.msgs.geometry_msgs import PoseStamped + + # Use Piper model if not specified + if model_path is None: + model_path = _get_piper_model_path() + + print("Starting Cartesian IK Jogger UI...") + print("Publishing to /coordinator/cartesian_command") + print("(Coordinator must be running separately to receive commands)") + + # Create LCM publisher for sending cartesian commands + transport: LCMTransport[PoseStamped] = LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ) + + # Initialize pygame + pygame.init() + screen = pygame.display.set_mode((600, 400)) + pygame.display.set_caption("Cartesian IK Jogger") + font = pygame.font.Font(None, 28) + clock = pygame.time.Clock() + + # Initial pose from forward kinematics at zero configuration + # This ensures we start at a pose that's reachable from q=[0,0,0,0,0,0] + home_pose = JogState.from_fk(model_path, ee_joint_id) + current_pose = home_pose.copy() + + # Send initial pose via LCM + transport.publish(current_pose.to_pose_stamped(TASK_NAME)) + + running = True + last_time = time.perf_counter() + + print("\nControls:") + print(" W/S: +X/-X (forward/backward)") + print(" A/D: -Y/+Y (left/right)") + print(" Q/E: +Z/-Z (up/down)") + print(" R/F: +Roll/-Roll") + print(" T/G: +Pitch/-Pitch") + print(" Y/H: +Yaw/-Yaw") + print(" SPACE: Reset to home") + print(" ESC: Quit") + print() + + while running: + dt = time.perf_counter() - last_time + last_time = time.perf_counter() + + # Handle events + for event in pygame.event.get(): + if event.type == pygame.QUIT: + running = False + elif event.type == pygame.KEYDOWN: + if event.key == pygame.K_ESCAPE: + running = False + elif event.key == pygame.K_SPACE: + current_pose = home_pose.copy() + print("Reset to home pose") + + # Get pressed keys for continuous jogging + keys = pygame.key.get_pressed() + + # Linear motion + if keys[pygame.K_w]: + current_pose.x += LINEAR_SPEED * dt + if keys[pygame.K_s]: + current_pose.x -= LINEAR_SPEED * dt + if keys[pygame.K_a]: + current_pose.y -= LINEAR_SPEED * dt + if keys[pygame.K_d]: + current_pose.y += LINEAR_SPEED * dt + if keys[pygame.K_q]: + current_pose.z += LINEAR_SPEED * dt + if keys[pygame.K_e]: + current_pose.z -= LINEAR_SPEED * dt + + # Angular motion + if keys[pygame.K_r]: + current_pose.roll += ANGULAR_SPEED * dt + if keys[pygame.K_f]: + current_pose.roll -= ANGULAR_SPEED * dt + if keys[pygame.K_t]: + current_pose.pitch += ANGULAR_SPEED * dt + if keys[pygame.K_g]: + current_pose.pitch -= ANGULAR_SPEED * dt + if keys[pygame.K_y]: + current_pose.yaw += ANGULAR_SPEED * dt + if keys[pygame.K_h]: + current_pose.yaw -= ANGULAR_SPEED * dt + + # Clamp to workspace limits + current_pose.x = clamp(current_pose.x, *X_LIMITS) + current_pose.y = clamp(current_pose.y, *Y_LIMITS) + current_pose.z = clamp(current_pose.z, *Z_LIMITS) + + # Publish pose via LCM (frame_id = task name for routing) + transport.publish(current_pose.to_pose_stamped(TASK_NAME)) + + # Draw UI + screen.fill((30, 30, 30)) + + # Title + title = font.render("Cartesian IK Jogger", True, (255, 255, 255)) + screen.blit(title, (200, 20)) + + # Position display + y_offset = 70 + pos_text = ( + f"Position: X={current_pose.x:.3f} Y={current_pose.y:.3f} Z={current_pose.z:.3f}" + ) + pos_surf = font.render(pos_text, True, (100, 255, 100)) + screen.blit(pos_surf, (50, y_offset)) + + # Orientation display + y_offset += 30 + ori_text = f"Orientation: R={np.degrees(current_pose.roll):.1f}° P={np.degrees(current_pose.pitch):.1f}° Y={np.degrees(current_pose.yaw):.1f}°" + ori_surf = font.render(ori_text, True, (100, 200, 255)) + screen.blit(ori_surf, (50, y_offset)) + + # Controls + y_offset += 50 + controls = [ + ("W/S", "+X/-X (forward/back)"), + ("A/D", "-Y/+Y (left/right)"), + ("Q/E", "+Z/-Z (up/down)"), + ("R/F", "+Roll/-Roll"), + ("T/G", "+Pitch/-Pitch"), + ("Y/H", "+Yaw/-Yaw"), + ("SPACE", "Reset to home"), + ("ESC", "Quit"), + ] + + for key, desc in controls: + text = f"{key}: {desc}" + surf = font.render(text, True, (180, 180, 180)) + screen.blit(surf, (50, y_offset)) + y_offset += 25 + + # Active keys indicator + y_offset += 20 + active_keys = [] + if keys[pygame.K_w]: + active_keys.append("W") + if keys[pygame.K_s]: + active_keys.append("S") + if keys[pygame.K_a]: + active_keys.append("A") + if keys[pygame.K_d]: + active_keys.append("D") + if keys[pygame.K_q]: + active_keys.append("Q") + if keys[pygame.K_e]: + active_keys.append("E") + + if active_keys: + active_text = f"Active: {' '.join(active_keys)}" + active_surf = font.render(active_text, True, (255, 255, 0)) + screen.blit(active_surf, (50, y_offset)) + + pygame.display.flip() + clock.tick(50) # 50 Hz update rate + + # Cleanup + print("Jogger UI stopped.") + pygame.quit() + + +def main() -> None: + """Run the jogger UI standalone. + + Note: This only runs the UI. The coordinator must be started separately: + Terminal 1: dimos run coordinator-cartesian-ik-mock + Terminal 2: python -m dimos.control.examples.cartesian_ik_jogger + """ + run_jogger_ui() + + +if __name__ == "__main__": + main() diff --git a/dimos/control/hardware_interface.py b/dimos/control/hardware_interface.py index ef62f974c6..9f6eb99851 100644 --- a/dimos/control/hardware_interface.py +++ b/dimos/control/hardware_interface.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Hardware interface for the ControlOrchestrator. +"""Connected hardware for the ControlCoordinator. -Wraps ManipulatorBackend with orchestrator-specific features: +Wraps ManipulatorAdapter with coordinator-specific features: - Namespaced joint names (e.g., "left_joint1") - Unified read/write interface - Hold-last-value for partial commands @@ -24,92 +24,45 @@ import logging import time -from typing import Protocol, runtime_checkable +from typing import TYPE_CHECKING -from dimos.hardware.manipulators.spec import ControlMode, ManipulatorBackend +from dimos.hardware.manipulators.spec import ControlMode, ManipulatorAdapter -logger = logging.getLogger(__name__) - - -@runtime_checkable -class HardwareInterface(Protocol): - """Protocol for hardware that the orchestrator can control. - - This wraps ManipulatorBackend with orchestrator-specific features: - - Namespaced joint names (e.g., "left_arm_joint1") - - Unified read/write interface - - State caching - """ - - @property - def hardware_id(self) -> str: - """Unique ID for this hardware (e.g., 'left_arm').""" - ... - - @property - def joint_names(self) -> list[str]: - """Ordered list of fully-qualified joint names this hardware controls.""" - ... - - def read_state(self) -> dict[str, tuple[float, float, float]]: - """Read current state. - - Returns: - Dict of joint_name -> (position, velocity, effort) - """ - ... - - def write_command(self, commands: dict[str, float], mode: ControlMode) -> bool: - """Write commands to hardware. +if TYPE_CHECKING: + from dimos.control.components import HardwareComponent, HardwareId, JointName, JointState - IMPORTANT: Accepts partial joint sets. Missing joints hold last value. - - Args: - commands: {joint_name: value} - can be partial - mode: Control mode (POSITION, VELOCITY, TORQUE) +logger = logging.getLogger(__name__) - Returns: - True if command was sent successfully - """ - ... - def disconnect(self) -> None: - """Disconnect the underlying hardware.""" - ... +class ConnectedHardware: + """Runtime wrapper for hardware connected to the coordinator. + Wraps a ManipulatorAdapter with coordinator-specific features: + - Joint names from HardwareComponent config + - Hold-last-value for partial commands + - Converts between joint names and array indices -class BackendHardwareInterface: - """Concrete implementation wrapping a ManipulatorBackend. - - Features: - - Generates namespaced joint names (prefix_joint1, prefix_joint2, ...) - - Holds last commanded value for partial commands - - On first tick, reads current position from hardware for missing joints + Created when hardware is added to the coordinator. One instance + per physical hardware device. """ def __init__( self, - backend: ManipulatorBackend, - hardware_id: str, - joint_prefix: str | None = None, + adapter: ManipulatorAdapter, + component: HardwareComponent, ) -> None: """Initialize hardware interface. Args: - backend: ManipulatorBackend instance (XArmBackend, PiperBackend, etc.) - hardware_id: Unique identifier for this hardware - joint_prefix: Prefix for joint names (defaults to hardware_id) + adapter: ManipulatorAdapter instance (XArmAdapter, PiperAdapter, etc.) + component: Hardware component with joints config """ - if not isinstance(backend, ManipulatorBackend): - raise TypeError("backend must implement ManipulatorBackend") - - self._backend = backend - self._hardware_id = hardware_id - self._prefix = joint_prefix or hardware_id - self._dof = backend.get_dof() + if not isinstance(adapter, ManipulatorAdapter): + raise TypeError("adapter must implement ManipulatorAdapter") - # Generate joint names: prefix_joint1, prefix_joint2, ... - self._joint_names = [f"{self._prefix}_joint{i + 1}" for i in range(self._dof)] + self._adapter = adapter + self._component = component + self._joint_names = component.joints # Track last commanded values for hold-last behavior self._last_commanded: dict[str, float] = {} @@ -118,36 +71,52 @@ def __init__( self._current_mode: ControlMode | None = None @property - def hardware_id(self) -> str: + def adapter(self) -> ManipulatorAdapter: + """The underlying hardware adapter.""" + return self._adapter + + @property + def hardware_id(self) -> HardwareId: """Unique ID for this hardware.""" - return self._hardware_id + return self._component.hardware_id @property - def joint_names(self) -> list[str]: + def joint_names(self) -> list[JointName]: """Ordered list of joint names.""" return self._joint_names + @property + def component(self) -> HardwareComponent: + """The hardware component config.""" + return self._component + @property def dof(self) -> int: """Degrees of freedom.""" - return self._dof + return len(self._joint_names) def disconnect(self) -> None: - """Disconnect the underlying backend.""" - self._backend.disconnect() + """Disconnect the underlying adapter.""" + self._adapter.disconnect() - def read_state(self) -> dict[str, tuple[float, float, float]]: - """Read state as {joint_name: (position, velocity, effort)}. + def read_state(self) -> dict[JointName, JointState]: + """Read state as {joint_name: JointState}. Returns: - Dict mapping joint name to (position, velocity, effort) tuple + Dict mapping joint name to JointState with position, velocity, effort """ - positions = self._backend.read_joint_positions() - velocities = self._backend.read_joint_velocities() - efforts = self._backend.read_joint_efforts() + from dimos.control.components import JointState + + positions = self._adapter.read_joint_positions() + velocities = self._adapter.read_joint_velocities() + efforts = self._adapter.read_joint_efforts() return { - name: (positions[i], velocities[i], efforts[i]) + name: JointState( + position=positions[i], + velocity=velocities[i], + effort=efforts[i], + ) for i, name in enumerate(self._joint_names) } @@ -176,29 +145,29 @@ def write_command(self, commands: dict[str, float], mode: ControlMode) -> bool: self._last_commanded[joint_name] = value elif joint_name not in self._warned_unknown_joints: logger.warning( - f"Hardware {self._hardware_id} received command for unknown joint " + f"Hardware {self.hardware_id} received command for unknown joint " f"{joint_name}. Valid joints: {self._joint_names}" ) self._warned_unknown_joints.add(joint_name) - # Build ordered list for backend + # Build ordered list for adapter ordered = self._build_ordered_command() # Switch control mode if needed if mode != self._current_mode: - if not self._backend.set_control_mode(mode): - logger.warning(f"Hardware {self._hardware_id} failed to switch to {mode.name}") + if not self._adapter.set_control_mode(mode): + logger.warning(f"Hardware {self.hardware_id} failed to switch to {mode.name}") return False self._current_mode = mode - # Send to backend + # Send to adapter match mode: case ControlMode.POSITION | ControlMode.SERVO_POSITION: - return self._backend.write_joint_positions(ordered) + return self._adapter.write_joint_positions(ordered) case ControlMode.VELOCITY: - return self._backend.write_joint_velocities(ordered) + return self._adapter.write_joint_velocities(ordered) case ControlMode.TORQUE: - logger.warning(f"Hardware {self._hardware_id} does not support torque mode") + logger.warning(f"Hardware {self.hardware_id} does not support torque mode") return False case _: return False @@ -207,7 +176,7 @@ def _initialize_last_commanded(self) -> None: """Initialize last_commanded with current hardware positions.""" for _ in range(10): try: - current = self._backend.read_joint_positions() + current = self._adapter.read_joint_positions() for i, name in enumerate(self._joint_names): self._last_commanded[name] = current[i] self._initialized = True @@ -216,7 +185,7 @@ def _initialize_last_commanded(self) -> None: time.sleep(0.01) raise RuntimeError( - f"Hardware {self._hardware_id} failed to read initial positions after retries" + f"Hardware {self.hardware_id} failed to read initial positions after retries" ) def _build_ordered_command(self) -> list[float]: @@ -225,6 +194,5 @@ def _build_ordered_command(self) -> list[float]: __all__ = [ - "BackendHardwareInterface", - "HardwareInterface", + "ConnectedHardware", ] diff --git a/dimos/control/orchestrator.py b/dimos/control/orchestrator.py deleted file mode 100644 index 2d64620b13..0000000000 --- a/dimos/control/orchestrator.py +++ /dev/null @@ -1,538 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""ControlOrchestrator module. - -Centralized control orchestrator that replaces per-driver/per-controller -loops with a single deterministic tick-based system. - -Features: -- Single tick loop (read → compute → arbitrate → route → write) -- Per-joint arbitration (highest priority wins) -- Mode conflict detection -- Partial command support (hold last value) -- Aggregated preemption notifications -""" - -from __future__ import annotations - -from dataclasses import dataclass, field -import threading -import time -from typing import TYPE_CHECKING, Any - -from dimos.control.hardware_interface import BackendHardwareInterface, HardwareInterface -from dimos.control.task import ControlTask -from dimos.control.tick_loop import TickLoop -from dimos.core import Module, Out, rpc -from dimos.core.module import ModuleConfig -from dimos.msgs.sensor_msgs import ( - JointState, # noqa: TC001 - needed at runtime for Out[JointState] -) -from dimos.msgs.trajectory_msgs import JointTrajectory, TrajectoryState -from dimos.utils.logging_config import setup_logger - -if TYPE_CHECKING: - from dimos.hardware.manipulators.spec import ManipulatorBackend - -logger = setup_logger() - - -# ============================================================================= -# Configuration -# ============================================================================= - - -@dataclass -class HardwareConfig: - """Configuration for a hardware backend. - - Attributes: - id: Unique hardware identifier (e.g., "arm", "left_arm") - type: Backend type ("mock", "xarm", "piper") - dof: Degrees of freedom - joint_prefix: Prefix for joint names (defaults to id) - ip: IP address (required for xarm) - can_port: CAN port (for piper, default "can0") - auto_enable: Whether to auto-enable servos (default True) - """ - - id: str - type: str = "mock" - dof: int = 7 - joint_prefix: str | None = None - ip: str | None = None - can_port: str | None = None - auto_enable: bool = True - - -@dataclass -class TaskConfig: - """Configuration for a control task. - - Attributes: - name: Task name (e.g., "traj_arm") - type: Task type ("trajectory") - joint_names: List of joint names this task controls - priority: Task priority (higher wins arbitration) - """ - - name: str - type: str = "trajectory" - joint_names: list[str] = field(default_factory=lambda: []) - priority: int = 10 - - -@dataclass -class TaskStatus: - """Status of a control task. - - Attributes: - active: Whether the task is currently active - state: Task state name (e.g., "IDLE", "RUNNING", "DONE") - progress: Task progress from 0.0 to 1.0 - """ - - active: bool - state: str | None = None - progress: float | None = None - - -@dataclass -class ControlOrchestratorConfig(ModuleConfig): - """Configuration for the ControlOrchestrator. - - Attributes: - tick_rate: Control loop frequency in Hz (default: 100) - publish_joint_state: Whether to publish aggregated JointState - joint_state_frame_id: Frame ID for published JointState - log_ticks: Whether to log tick information (verbose) - hardware: List of hardware configurations to create on start - tasks: List of task configurations to create on start - """ - - tick_rate: float = 100.0 - publish_joint_state: bool = True - joint_state_frame_id: str = "orchestrator" - log_ticks: bool = False - hardware: list[HardwareConfig] = field(default_factory=lambda: []) - tasks: list[TaskConfig] = field(default_factory=lambda: []) - - -# ============================================================================= -# ControlOrchestrator Module -# ============================================================================= - - -class ControlOrchestrator(Module[ControlOrchestratorConfig]): - """Centralized control orchestrator with per-joint arbitration. - - Single tick loop that: - 1. Reads state from all hardware - 2. Runs all active tasks - 3. Arbitrates conflicts per-joint (highest priority wins) - 4. Routes commands to hardware - 5. Publishes aggregated joint state - - Key design decisions: - - Joint-centric commands (not hardware-centric) - - Per-joint arbitration (not per-hardware) - - Centralized time (tasks use state.t_now, never time.time()) - - Partial commands OK (hardware holds last value) - - Aggregated preemption (one notification per task per tick) - - Example: - >>> from dimos.control import ControlOrchestrator - >>> from dimos.hardware.manipulators.xarm import XArmBackend - >>> - >>> orch = ControlOrchestrator(tick_rate=100.0) - >>> backend = XArmBackend(ip="192.168.1.185", dof=7) - >>> backend.connect() - >>> orch.add_hardware("left_arm", backend, joint_prefix="left") - >>> orch.start() - """ - - # Output: Aggregated joint state for external consumers - joint_state: Out[JointState] - - config: ControlOrchestratorConfig - default_config = ControlOrchestratorConfig - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - # Hardware interfaces (keyed by hardware_id) - self._hardware: dict[str, HardwareInterface] = {} - self._hardware_lock = threading.Lock() - - # Joint -> hardware mapping (built when hardware added) - self._joint_to_hardware: dict[str, str] = {} - - # Registered tasks - self._tasks: dict[str, ControlTask] = {} - self._task_lock = threading.Lock() - - # Tick loop (created on start) - self._tick_loop: TickLoop | None = None - - logger.info(f"ControlOrchestrator initialized at {self.config.tick_rate}Hz") - - # ========================================================================= - # Config-based Setup - # ========================================================================= - - def _setup_from_config(self) -> None: - """Create hardware and tasks from config (called on start).""" - hardware_added: list[str] = [] - - try: - for hw_cfg in self.config.hardware: - self._setup_hardware(hw_cfg) - hardware_added.append(hw_cfg.id) - - for task_cfg in self.config.tasks: - task = self._create_task_from_config(task_cfg) - self.add_task(task) - - except Exception: - # Rollback: clean up all successfully added hardware - for hw_id in hardware_added: - try: - self.remove_hardware(hw_id) - except Exception: - pass - raise - - def _setup_hardware(self, hw_cfg: HardwareConfig) -> None: - """Connect and add a single hardware backend.""" - backend = self._create_backend_from_config(hw_cfg) - - if not backend.connect(): - raise RuntimeError(f"Failed to connect to {hw_cfg.type} backend") - - try: - if hw_cfg.auto_enable and hasattr(backend, "write_enable"): - backend.write_enable(True) - self.add_hardware( - hw_cfg.id, - backend, - joint_prefix=hw_cfg.joint_prefix or hw_cfg.id, - ) - except Exception: - backend.disconnect() - raise - - def _create_backend_from_config(self, cfg: HardwareConfig) -> ManipulatorBackend: - """Create a manipulator backend from config.""" - match cfg.type.lower(): - case "mock": - from dimos.hardware.manipulators.mock import MockBackend - - return MockBackend(dof=cfg.dof) - case "xarm": - if cfg.ip is None: - raise ValueError("ip is required for xarm backend") - from dimos.hardware.manipulators.xarm import XArmBackend - - return XArmBackend(ip=cfg.ip, dof=cfg.dof) - case "piper": - from dimos.hardware.manipulators.piper import PiperBackend - - return PiperBackend(can_port=cfg.can_port or "can0", dof=cfg.dof) - case _: - raise ValueError(f"Unknown backend type: {cfg.type}") - - def _create_task_from_config(self, cfg: TaskConfig) -> ControlTask: - """Create a control task from config.""" - task_type = cfg.type.lower() - - if task_type == "trajectory": - from dimos.control.tasks import JointTrajectoryTask, JointTrajectoryTaskConfig - - return JointTrajectoryTask( - cfg.name, - JointTrajectoryTaskConfig( - joint_names=cfg.joint_names, - priority=cfg.priority, - ), - ) - - else: - raise ValueError(f"Unknown task type: {task_type}") - - # ========================================================================= - # Hardware Management (RPC) - # ========================================================================= - - @rpc - def add_hardware( - self, - hardware_id: str, - backend: ManipulatorBackend, - joint_prefix: str | None = None, - ) -> bool: - """Register a hardware backend with the orchestrator.""" - with self._hardware_lock: - if hardware_id in self._hardware: - logger.warning(f"Hardware {hardware_id} already registered") - return False - - interface = BackendHardwareInterface( - backend=backend, - hardware_id=hardware_id, - joint_prefix=joint_prefix, - ) - self._hardware[hardware_id] = interface - - for joint_name in interface.joint_names: - self._joint_to_hardware[joint_name] = hardware_id - - logger.info(f"Added hardware {hardware_id} with joints: {interface.joint_names}") - return True - - @rpc - def remove_hardware(self, hardware_id: str) -> bool: - """Remove a hardware interface. - - Note: For safety, call this only when no tasks are actively using this - hardware. Consider stopping the orchestrator before removing hardware. - """ - with self._hardware_lock: - if hardware_id not in self._hardware: - return False - - interface = self._hardware[hardware_id] - hw_joints = set(interface.joint_names) - - with self._task_lock: - for task in self._tasks.values(): - if task.is_active(): - claimed_joints = task.claim().joints - overlap = hw_joints & claimed_joints - if overlap: - logger.error( - f"Cannot remove hardware {hardware_id}: " - f"task '{task.name}' is actively using joints {overlap}" - ) - return False - - for joint_name in interface.joint_names: - del self._joint_to_hardware[joint_name] - - interface.disconnect() - del self._hardware[hardware_id] - logger.info(f"Removed hardware {hardware_id}") - return True - - @rpc - def list_hardware(self) -> list[str]: - """List registered hardware IDs.""" - with self._hardware_lock: - return list(self._hardware.keys()) - - @rpc - def list_joints(self) -> list[str]: - """List all joint names across all hardware.""" - with self._hardware_lock: - return list(self._joint_to_hardware.keys()) - - @rpc - def get_joint_positions(self) -> dict[str, float]: - """Get current joint positions for all joints.""" - with self._hardware_lock: - positions: dict[str, float] = {} - for hw in self._hardware.values(): - state = hw.read_state() # {joint_name: (pos, vel, effort)} - for joint_name, (pos, _vel, _effort) in state.items(): - positions[joint_name] = pos - return positions - - # ========================================================================= - # Task Management (RPC) - # ========================================================================= - - @rpc - def add_task(self, task: ControlTask) -> bool: - """Register a task with the orchestrator.""" - if not isinstance(task, ControlTask): - raise TypeError("task must implement ControlTask") - - with self._task_lock: - if task.name in self._tasks: - logger.warning(f"Task {task.name} already registered") - return False - self._tasks[task.name] = task - logger.info(f"Added task {task.name}") - return True - - @rpc - def remove_task(self, task_name: str) -> bool: - """Remove a task by name.""" - with self._task_lock: - if task_name in self._tasks: - del self._tasks[task_name] - logger.info(f"Removed task {task_name}") - return True - return False - - @rpc - def get_task(self, task_name: str) -> ControlTask | None: - """Get a task by name.""" - with self._task_lock: - return self._tasks.get(task_name) - - @rpc - def list_tasks(self) -> list[str]: - """List registered task names.""" - with self._task_lock: - return list(self._tasks.keys()) - - @rpc - def get_active_tasks(self) -> list[str]: - """List currently active task names.""" - with self._task_lock: - return [name for name, task in self._tasks.items() if task.is_active()] - - # ========================================================================= - # Trajectory Execution (RPC) - # ========================================================================= - - @rpc - def execute_trajectory(self, task_name: str, trajectory: JointTrajectory) -> bool: - """Execute a trajectory on a named task.""" - with self._task_lock: - task = self._tasks.get(task_name) - if task is None: - logger.warning(f"Task {task_name} not found") - return False - - if not hasattr(task, "execute"): - logger.warning(f"Task {task_name} doesn't support execute()") - return False - - logger.info( - f"Executing trajectory on {task_name}: " - f"{len(trajectory.points)} points, duration={trajectory.duration:.3f}s" - ) - return task.execute(trajectory) # type: ignore[attr-defined,no-any-return] - - @rpc - def get_trajectory_status(self, task_name: str) -> TaskStatus | None: - """Get the status of a trajectory task.""" - with self._task_lock: - task = self._tasks.get(task_name) - if task is None: - return None - - state: str | None = None - if hasattr(task, "get_state"): - task_state: TrajectoryState = task.get_state() # type: ignore[attr-defined] - state = ( - task_state.name if isinstance(task_state, TrajectoryState) else str(task_state) - ) - - progress: float | None = None - if hasattr(task, "get_progress"): - t_now = time.perf_counter() - progress = task.get_progress(t_now) # type: ignore[attr-defined] - - return TaskStatus(active=task.is_active(), state=state, progress=progress) - - @rpc - def cancel_trajectory(self, task_name: str) -> bool: - """Cancel an active trajectory on a task.""" - with self._task_lock: - task = self._tasks.get(task_name) - if task is None: - logger.warning(f"Task {task_name} not found") - return False - - if not hasattr(task, "cancel"): - logger.warning(f"Task {task_name} doesn't support cancel()") - return False - - logger.info(f"Cancelling trajectory on {task_name}") - return task.cancel() # type: ignore[attr-defined,no-any-return] - - # ========================================================================= - # Lifecycle - # ========================================================================= - - @rpc - def start(self) -> None: - """Start the orchestrator control loop.""" - if self._tick_loop and self._tick_loop.is_running: - logger.warning("Orchestrator already running") - return - - super().start() - - # Setup hardware and tasks from config (if any) - if self.config.hardware or self.config.tasks: - self._setup_from_config() - - # Create and start tick loop - publish_cb = self.joint_state.publish if self.config.publish_joint_state else None - self._tick_loop = TickLoop( - tick_rate=self.config.tick_rate, - hardware=self._hardware, - hardware_lock=self._hardware_lock, - tasks=self._tasks, - task_lock=self._task_lock, - joint_to_hardware=self._joint_to_hardware, - publish_callback=publish_cb, - frame_id=self.config.joint_state_frame_id, - log_ticks=self.config.log_ticks, - ) - self._tick_loop.start() - - logger.info(f"ControlOrchestrator started at {self.config.tick_rate}Hz") - - @rpc - def stop(self) -> None: - """Stop the orchestrator.""" - logger.info("Stopping ControlOrchestrator...") - - if self._tick_loop: - self._tick_loop.stop() - - # Disconnect all hardware backends - with self._hardware_lock: - for hw_id, interface in self._hardware.items(): - try: - interface.disconnect() - logger.info(f"Disconnected hardware {hw_id}") - except Exception as e: - logger.error(f"Error disconnecting hardware {hw_id}: {e}") - - super().stop() - logger.info("ControlOrchestrator stopped") - - @rpc - def get_tick_count(self) -> int: - """Get the number of ticks since start.""" - return self._tick_loop.tick_count if self._tick_loop else 0 - - -# Blueprint export -control_orchestrator = ControlOrchestrator.blueprint - - -__all__ = [ - "ControlOrchestrator", - "ControlOrchestratorConfig", - "HardwareConfig", - "TaskConfig", - "control_orchestrator", -] diff --git a/dimos/control/task.py b/dimos/control/task.py index 49589188d9..ecdf9ab7f4 100644 --- a/dimos/control/task.py +++ b/dimos/control/task.py @@ -12,26 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""ControlTask protocol and types for the ControlOrchestrator. +"""ControlTask protocol and types for the ControlCoordinator. This module defines: -- Data types used by tasks and the orchestrator (ResourceClaim, JointStateSnapshot, etc.) +- Data types used by tasks and the coordinator (ResourceClaim, JointStateSnapshot, etc.) - ControlTask protocol that all tasks must implement -Tasks are "passive" - they don't own threads. The orchestrator calls +Tasks are "passive" - they don't own threads. The coordinator calls compute() at each tick, passing current state and time. CRITICAL: Tasks must NEVER call time.time() directly. -Use the t_now passed in OrchestratorState. +Use the t_now passed in CoordinatorState. """ from __future__ import annotations from dataclasses import dataclass, field -from typing import Protocol, runtime_checkable +from typing import TYPE_CHECKING, Protocol, runtime_checkable +from dimos.control.components import JointName from dimos.hardware.manipulators.spec import ControlMode +if TYPE_CHECKING: + from dimos.msgs.geometry_msgs import Pose, PoseStamped + from dimos.teleop.quest.quest_types import Buttons + # ============================================================================= # Data Types # ============================================================================= @@ -41,7 +46,7 @@ class ResourceClaim: """Declares which joints a task wants to control. - Used by the orchestrator to determine resource ownership and + Used by the coordinator to determine resource ownership and resolve conflicts between competing tasks. Attributes: @@ -52,7 +57,7 @@ class ResourceClaim: mode: Control mode (POSITION, VELOCITY, TORQUE) """ - joints: frozenset[str] + joints: frozenset[JointName] priority: int = 0 mode: ControlMode = ControlMode.POSITION @@ -75,26 +80,26 @@ class JointStateSnapshot: timestamp: Unix timestamp when state was read """ - joint_positions: dict[str, float] = field(default_factory=dict) - joint_velocities: dict[str, float] = field(default_factory=dict) - joint_efforts: dict[str, float] = field(default_factory=dict) + joint_positions: dict[JointName, float] = field(default_factory=dict) + joint_velocities: dict[JointName, float] = field(default_factory=dict) + joint_efforts: dict[JointName, float] = field(default_factory=dict) timestamp: float = 0.0 - def get_position(self, joint_name: str) -> float | None: + def get_position(self, joint_name: JointName) -> float | None: """Get position for a specific joint.""" return self.joint_positions.get(joint_name) - def get_velocity(self, joint_name: str) -> float | None: + def get_velocity(self, joint_name: JointName) -> float | None: """Get velocity for a specific joint.""" return self.joint_velocities.get(joint_name) - def get_effort(self, joint_name: str) -> float | None: + def get_effort(self, joint_name: JointName) -> float | None: """Get effort for a specific joint.""" return self.joint_efforts.get(joint_name) @dataclass -class OrchestratorState: +class CoordinatorState: """Complete state snapshot for tasks to read. Passed to each task's compute() method every tick. Contains @@ -109,7 +114,7 @@ class OrchestratorState: """ joints: JointStateSnapshot - t_now: float # Orchestrator time (perf_counter) - USE THIS, NOT time.time()! + t_now: float # Coordinator time (perf_counter) - USE THIS, NOT time.time()! dt: float # Time since last tick @@ -118,7 +123,7 @@ class JointCommandOutput: """Joint-centric command output from a task. Commands are addressed by joint name, NOT by hardware ID. - The orchestrator routes commands to the appropriate hardware. + The coordinator routes commands to the appropriate hardware. This design enables: - WBC spanning multiple hardware interfaces @@ -133,7 +138,7 @@ class JointCommandOutput: mode: Control mode - must match which field is populated """ - joint_names: list[str] + joint_names: list[JointName] positions: list[float] | None = None velocities: list[float] | None = None efforts: list[float] | None = None @@ -170,14 +175,14 @@ def get_values(self) -> list[float] | None: @runtime_checkable class ControlTask(Protocol): - """Protocol for passive tasks that run within the orchestrator. + """Protocol for passive tasks that run within the coordinator. - Tasks are "passive" - they don't own threads. The orchestrator + Tasks are "passive" - they don't own threads. The coordinator calls compute() at each tick, passing current state and time. Lifecycle: - 1. Task is added to orchestrator via add_task() - 2. Orchestrator calls claim() to understand resource needs + 1. Task is added to coordinator via add_task() + 2. Coordinator calls claim() to understand resource needs 3. Each tick: is_active() → compute() → output merged via arbitration 4. Task removed via remove_task() or transitions to inactive @@ -199,7 +204,7 @@ class ControlTask(Protocol): ... def is_active(self) -> bool: ... return self._executing ... - ... def compute(self, state: OrchestratorState) -> JointCommandOutput | None: + ... def compute(self, state: CoordinatorState) -> JointCommandOutput | None: ... # Use state.t_now, NOT time.time()! ... t_elapsed = state.t_now - self._start_time ... positions = self._trajectory.sample(t_elapsed) @@ -217,14 +222,14 @@ def name(self) -> str: """Unique identifier for this task instance. Used for logging, debugging, and task management. - Must be unique across all tasks in the orchestrator. + Must be unique across all tasks in the coordinator. """ ... def claim(self) -> ResourceClaim: """Declare resource requirements. - Called by orchestrator to determine: + Called by coordinator to determine: - Which joints this task wants to control - Priority for conflict resolution - Control mode (position/velocity/effort) @@ -233,7 +238,7 @@ def claim(self) -> ResourceClaim: ResourceClaim with joints (frozenset) and priority (int) Note: - The claim can change dynamically - orchestrator calls this + The claim can change dynamically - coordinator calls this every tick for active tasks. """ ... @@ -251,16 +256,16 @@ def is_active(self) -> bool: """ ... - def compute(self, state: OrchestratorState) -> JointCommandOutput | None: + def compute(self, state: CoordinatorState) -> JointCommandOutput | None: """Compute output command given current state. - Called by orchestrator for active tasks each tick. + Called by coordinator for active tasks each tick. CRITICAL: Use state.t_now for timing, NEVER time.time()! This ensures deterministic behavior and enables simulation. Args: - state: OrchestratorState containing: + state: CoordinatorState containing: - joints: JointStateSnapshot with all joint states - t_now: Current tick time (use this for all timing!) - dt: Time since last tick @@ -271,7 +276,7 @@ def compute(self, state: OrchestratorState) -> JointCommandOutput | None: """ ... - def on_preempted(self, by_task: str, joints: frozenset[str]) -> None: + def on_preempted(self, by_task: str, joints: frozenset[JointName]) -> None: """Called ONCE per tick with ALL preempted joints aggregated. Called when a higher-priority task takes control of some of this @@ -286,14 +291,56 @@ def on_preempted(self, by_task: str, joints: frozenset[str]) -> None: """ ... + def on_buttons(self, msg: Buttons) -> bool: + """Handle button state from teleop controllers.""" + ... + + def on_cartesian_command(self, pose: Pose | PoseStamped, t_now: float) -> bool: + """Handle incoming cartesian command (target or delta pose).""" + ... + + def set_target_by_name(self, positions: dict[str, float], t_now: float) -> bool: + """Handle servo position commands by joint name.""" + ... + + def set_velocities_by_name(self, velocities: dict[str, float], t_now: float) -> bool: + """Handle velocity commands by joint name.""" + ... + + +class BaseControlTask(ControlTask): + """Base class with no-op defaults for optional listener methods. + + Inherit from this to avoid implementing empty methods + in tasks that don't need them. Only override what your task uses. + """ + + def on_buttons(self, msg: Buttons) -> bool: + """No-op default.""" + return False + + def on_cartesian_command(self, pose: Pose | PoseStamped, t_now: float) -> bool: + """No-op default.""" + return False + + def set_target_by_name(self, positions: dict[str, float], t_now: float) -> bool: + """No-op default.""" + return False + + def set_velocities_by_name(self, velocities: dict[str, float], t_now: float) -> bool: + """No-op default.""" + return False + __all__ = [ + # Protocol + Base + "BaseControlTask", # Types "ControlMode", - # Protocol "ControlTask", + "CoordinatorState", "JointCommandOutput", + "JointName", "JointStateSnapshot", - "OrchestratorState", "ResourceClaim", ] diff --git a/dimos/control/tasks/__init__.py b/dimos/control/tasks/__init__.py index 75460ffa26..5b869b01f9 100644 --- a/dimos/control/tasks/__init__.py +++ b/dimos/control/tasks/__init__.py @@ -12,14 +12,38 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Task implementations for the ControlOrchestrator.""" +"""Task implementations for the ControlCoordinator.""" +from dimos.control.tasks.cartesian_ik_task import ( + CartesianIKTask, + CartesianIKTaskConfig, +) +from dimos.control.tasks.servo_task import ( + JointServoTask, + JointServoTaskConfig, +) +from dimos.control.tasks.teleop_task import ( + TeleopIKTask, + TeleopIKTaskConfig, +) from dimos.control.tasks.trajectory_task import ( JointTrajectoryTask, JointTrajectoryTaskConfig, ) +from dimos.control.tasks.velocity_task import ( + JointVelocityTask, + JointVelocityTaskConfig, +) __all__ = [ + "CartesianIKTask", + "CartesianIKTaskConfig", + "JointServoTask", + "JointServoTaskConfig", "JointTrajectoryTask", "JointTrajectoryTaskConfig", + "JointVelocityTask", + "JointVelocityTaskConfig", + "TeleopIKTask", + "TeleopIKTaskConfig", ] diff --git a/dimos/control/tasks/cartesian_ik_task.py b/dimos/control/tasks/cartesian_ik_task.py new file mode 100644 index 0000000000..6ea5ddc55b --- /dev/null +++ b/dimos/control/tasks/cartesian_ik_task.py @@ -0,0 +1,335 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Cartesian control task with internal Pinocchio IK solver. + +Accepts streaming cartesian poses (e.g., from teleoperation, visual servoing) +and computes inverse kinematics internally to output joint commands. +Participates in joint-level arbitration. + +CRITICAL: Uses t_now from CoordinatorState, never calls time.time() +""" + +from __future__ import annotations + +from dataclasses import dataclass +import threading +from typing import TYPE_CHECKING, Any + +import numpy as np + +from dimos.control.task import ( + BaseControlTask, + ControlMode, + CoordinatorState, + JointCommandOutput, + ResourceClaim, +) +from dimos.manipulation.planning.kinematics.pinocchio_ik import ( + PinocchioIK, + check_joint_delta, + get_worst_joint_delta, + pose_to_se3, +) +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from pathlib import Path + + from numpy.typing import NDArray + import pinocchio # type: ignore[import-untyped] + + from dimos.msgs.geometry_msgs import Pose, PoseStamped + +logger = setup_logger() + + +@dataclass +class CartesianIKTaskConfig: + """Configuration for cartesian IK task. + + Attributes: + joint_names: List of joint names this task controls (must match model DOF) + model_path: Path to URDF or MJCF file for IK solver + ee_joint_id: End-effector joint ID in the kinematic chain + priority: Priority for arbitration (higher wins) + timeout: If no command received for this many seconds, go inactive (0 = never) + max_joint_delta_deg: Maximum allowed joint change per tick (safety limit) + """ + + joint_names: list[str] + model_path: str | Path + ee_joint_id: int + priority: int = 10 + timeout: float = 0.5 + max_joint_delta_deg: float = 15.0 # ~1500°/s at 100Hz + + +class CartesianIKTask(BaseControlTask): + """Cartesian control task with internal Pinocchio IK solver. + + Accepts streaming cartesian poses via on_cartesian_command() and computes IK + internally to output joint commands. Uses current joint state from + CoordinatorState as IK warm-start for fast convergence. + + Unlike CartesianServoTask (which bypasses joint arbitration), this task + outputs JointCommandOutput and participates in joint-level arbitration. + + Example: + >>> from dimos.utils.data import get_data + >>> piper_path = get_data("piper_description") + >>> task = CartesianIKTask( + ... name="cartesian_arm", + ... config=CartesianIKTaskConfig( + ... joint_names=["joint1", "joint2", "joint3", "joint4", "joint5", "joint6"], + ... model_path=piper_path / "mujoco_model" / "piper_no_gripper_description.xml", + ... ee_joint_id=6, + ... priority=10, + ... timeout=0.5, + ... ), + ... ) + >>> coordinator.add_task(task) + >>> task.start() + >>> + >>> # From teleop callback or other source: + >>> task.on_cartesian_command(pose_stamped, t_now=time.perf_counter()) + """ + + def __init__(self, name: str, config: CartesianIKTaskConfig) -> None: + """Initialize cartesian IK task. + + Args: + name: Unique task name + config: Task configuration + """ + if not config.joint_names: + raise ValueError(f"CartesianIKTask '{name}' requires at least one joint") + if not config.model_path: + raise ValueError(f"CartesianIKTask '{name}' requires model_path for IK solver") + + self._name = name + self._config = config + self._joint_names = frozenset(config.joint_names) + self._joint_names_list = list(config.joint_names) + self._num_joints = len(config.joint_names) + + # Create IK solver from model + self._ik = PinocchioIK.from_model_path(config.model_path, config.ee_joint_id) + + # Validate DOF matches joint names + if self._ik.nq != self._num_joints: + logger.warning( + f"CartesianIKTask {name}: model DOF ({self._ik.nq}) != " + f"joint_names count ({self._num_joints})" + ) + + # Thread-safe target state + self._lock = threading.Lock() + self._target_pose: Pose | PoseStamped | None = None + self._last_update_time: float = 0.0 + self._active = False + + # Cache last successful IK solution for warm-starting + self._last_q_solution: NDArray[np.floating[Any]] | None = None + + logger.info( + f"CartesianIKTask {name} initialized with model: {config.model_path}, " + f"ee_joint_id={config.ee_joint_id}, joints={config.joint_names}" + ) + + @property + def name(self) -> str: + """Unique task identifier.""" + return self._name + + def claim(self) -> ResourceClaim: + """Declare resource requirements.""" + return ResourceClaim( + joints=self._joint_names, + priority=self._config.priority, + mode=ControlMode.SERVO_POSITION, + ) + + def is_active(self) -> bool: + """Check if task should run this tick.""" + with self._lock: + return self._active and self._target_pose is not None + + def compute(self, state: CoordinatorState) -> JointCommandOutput | None: + """Compute IK and output joint positions. + + Args: + state: Current coordinator state (contains joint positions for IK warm-start) + + Returns: + JointCommandOutput with positions, or None if inactive/timed out/IK failed + """ + with self._lock: + if not self._active or self._target_pose is None: + return None + # Check timeout + if self._config.timeout > 0: + time_since_update = state.t_now - self._last_update_time + if time_since_update > self._config.timeout: + logger.warning( + f"CartesianIKTask {self._name} timed out " + f"(no update for {time_since_update:.3f}s)" + ) + self._active = False + return None + raw_pose = self._target_pose + + # Convert to SE3 right before use + target_pose = pose_to_se3(raw_pose) + # Get current joint positions for IK warm-start + q_current = self._get_current_joints(state) + if q_current is None: + logger.debug(f"CartesianIKTask {self._name}: missing joint state for IK warm-start") + return None + + # Compute IK + q_solution, converged, final_error = self._ik.solve(target_pose, q_current) + # Use the solution even if it didn't fully converge + if not converged: + logger.debug( + f"CartesianIKTask {self._name}: IK did not converge " + f"(error={final_error:.4f}), using partial solution" + ) + + # Safety check: reject if any joint delta exceeds limit + if not check_joint_delta(q_solution, q_current, self._config.max_joint_delta_deg): + worst_idx, worst_deg = get_worst_joint_delta(q_solution, q_current) + logger.warning( + f"CartesianIKTask {self._name}: rejecting motion - " + f"joint {self._joint_names_list[worst_idx]} delta " + f"{worst_deg:.1f}° exceeds limit {self._config.max_joint_delta_deg}°" + ) + return None + + # Cache solution for next warm-start + with self._lock: + self._last_q_solution = q_solution.copy() + return JointCommandOutput( + joint_names=self._joint_names_list, + positions=q_solution.flatten().tolist(), + mode=ControlMode.SERVO_POSITION, + ) + + def _get_current_joints(self, state: CoordinatorState) -> NDArray[np.floating[Any]] | None: + """Get current joint positions from coordinator state. + + Falls back to last IK solution if joint state unavailable. + """ + positions = [] + for joint_name in self._joint_names_list: + pos = state.joints.get_position(joint_name) + if pos is None: + # Fallback to last solution + if self._last_q_solution is not None: + result: NDArray[np.floating[Any]] = self._last_q_solution.copy() + return result + return None + positions.append(pos) + return np.array(positions, dtype=np.float64) + + def on_preempted(self, by_task: str, joints: frozenset[str]) -> None: + """Handle preemption by higher-priority task. + + Args: + by_task: Name of preempting task + joints: Joints that were preempted + """ + if joints & self._joint_names: + logger.warning( + f"CartesianIKTask {self._name} preempted by {by_task} on joints {joints}" + ) + + # ========================================================================= + # Task-specific methods + # ========================================================================= + + def on_cartesian_command(self, pose: Pose | PoseStamped, t_now: float) -> bool: + """Handle incoming cartesian command (target EE pose). + + Args: + pose: Target end-effector pose (Pose or PoseStamped) + t_now: Current time (from coordinator or time.perf_counter()) + + Returns: + True if accepted + """ + with self._lock: + self._target_pose = pose # Store raw, convert to SE3 in compute() + self._last_update_time = t_now + self._active = True + + return True + + def start(self) -> None: + """Activate the task (start accepting and outputting commands).""" + with self._lock: + self._active = True + logger.info(f"CartesianIKTask {self._name} started") + + def stop(self) -> None: + """Deactivate the task (stop outputting commands).""" + with self._lock: + self._active = False + logger.info(f"CartesianIKTask {self._name} stopped") + + def clear(self) -> None: + """Clear current target and deactivate.""" + with self._lock: + self._target_pose = None + self._active = False + logger.info(f"CartesianIKTask {self._name} cleared") + + def is_tracking(self) -> bool: + """Check if actively receiving and outputting commands.""" + with self._lock: + return self._active and self._target_pose is not None + + def get_current_ee_pose(self, state: CoordinatorState) -> pinocchio.SE3 | None: + """Get current end-effector pose via forward kinematics. + + Useful for getting initial pose before starting tracking. + + Args: + state: Current coordinator state + + Returns: + Current EE pose as SE3, or None if joint state unavailable + """ + q_current = self._get_current_joints(state) + if q_current is None: + return None + + return self._ik.forward_kinematics(q_current) + + def forward_kinematics(self, joint_positions: NDArray[np.floating[Any]]) -> pinocchio.SE3: + """Compute end-effector pose from joint positions. + + Args: + joint_positions: Joint angles in radians + + Returns: + End-effector pose as SE3 + """ + return self._ik.forward_kinematics(joint_positions) + + +__all__ = [ + "CartesianIKTask", + "CartesianIKTaskConfig", +] diff --git a/dimos/control/tasks/servo_task.py b/dimos/control/tasks/servo_task.py new file mode 100644 index 0000000000..b69b4dd099 --- /dev/null +++ b/dimos/control/tasks/servo_task.py @@ -0,0 +1,242 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Streaming joint servo task for real-time position control. + +Accepts streaming joint positions (e.g., from teleoperation) and outputs them +directly to hardware each tick. Useful for teleoperation, visual servoing, +or any real-time control where you don't want trajectory planning overhead. + +CRITICAL: Uses t_now from CoordinatorState, never calls time.time() +""" + +from __future__ import annotations + +from dataclasses import dataclass +import threading + +from dimos.control.task import ( + BaseControlTask, + ControlMode, + CoordinatorState, + JointCommandOutput, + ResourceClaim, +) +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +@dataclass +class JointServoTaskConfig: + """Configuration for servo task. + + Attributes: + joint_names: List of joint names this task controls + priority: Priority for arbitration (higher wins) + timeout: If no command received for this many seconds, go inactive (0 = never timeout) + """ + + joint_names: list[str] + priority: int = 10 + timeout: float = 0.5 # 500ms default timeout + + +class JointServoTask(BaseControlTask): + """Streaming joint position control for teleoperation/visual servoing. + + Accepts target positions via set_target() or set_target_by_name() and + outputs them each tick. Uses SERVO_POSITION mode for high-frequency control. + + No trajectory planning - just pass-through with optional timeout. + + Example: + >>> task = JointServoTask( + ... name="servo_arm", + ... config=JointServoTaskConfig( + ... joint_names=["arm_joint1", "arm_joint2", "arm_joint3"], + ... priority=10, + ... timeout=0.5, + ... ), + ... ) + >>> coordinator.add_task(task) + >>> task.start() + >>> + >>> # From teleop callback or other source: + >>> task.set_target([0.1, 0.2, 0.3], t_now=time.perf_counter()) + """ + + def __init__(self, name: str, config: JointServoTaskConfig) -> None: + """Initialize servo task. + + Args: + name: Unique task name + config: Task configuration + """ + if not config.joint_names: + raise ValueError(f"JointServoTask '{name}' requires at least one joint") + + self._name = name + self._config = config + self._joint_names = frozenset(config.joint_names) + self._joint_names_list = list(config.joint_names) + self._num_joints = len(config.joint_names) + + # Current target (thread-safe) + self._lock = threading.Lock() + self._target: list[float] | None = None + self._last_update_time: float = 0.0 + self._active = False + + logger.info(f"JointServoTask {name} initialized for joints: {config.joint_names}") + + @property + def name(self) -> str: + """Unique task identifier.""" + return self._name + + def claim(self) -> ResourceClaim: + """Declare resource requirements.""" + return ResourceClaim( + joints=self._joint_names, + priority=self._config.priority, + mode=ControlMode.SERVO_POSITION, + ) + + def is_active(self) -> bool: + """Check if task should run this tick.""" + with self._lock: + return self._active and self._target is not None + + def compute(self, state: CoordinatorState) -> JointCommandOutput | None: + """Output current target positions. + + Args: + state: Current coordinator state + + Returns: + JointCommandOutput with positions, or None if inactive/timed out + """ + with self._lock: + if not self._active or self._target is None: + return None + + # Check timeout + if self._config.timeout > 0: + time_since_update = state.t_now - self._last_update_time + if time_since_update > self._config.timeout: + logger.warning( + f"JointServoTask {self._name} timed out " + f"(no update for {time_since_update:.3f}s)" + ) + self._active = False + return None + + return JointCommandOutput( + joint_names=self._joint_names_list, + positions=list(self._target), + mode=ControlMode.SERVO_POSITION, + ) + + def on_preempted(self, by_task: str, joints: frozenset[str]) -> None: + """Handle preemption by higher-priority task. + + Args: + by_task: Name of preempting task + joints: Joints that were preempted + """ + if joints & self._joint_names: + logger.warning(f"JointServoTask {self._name} preempted by {by_task} on joints {joints}") + + # ========================================================================= + # Task-specific methods + # ========================================================================= + + def set_target(self, positions: list[float], t_now: float) -> bool: + """Set target joint positions. + + Call this from your teleop callback or other data source. + + Args: + positions: Joint positions in radians (must match joint_names length) + t_now: Current time (from coordinator or time.perf_counter()) + + Returns: + True if accepted, False if wrong number of joints + """ + if len(positions) != self._num_joints: + logger.warning( + f"JointServoTask {self._name}: expected {self._num_joints} " + f"positions, got {len(positions)}" + ) + return False + + with self._lock: + self._target = list(positions) + self._last_update_time = t_now + self._active = True + + return True + + def set_target_by_name(self, positions: dict[str, float], t_now: float) -> bool: + """Set target positions by joint name. + + Extracts only the joints this task controls from the dict. + Useful for routing when multiple tasks share an input stream. + + Args: + positions: {joint_name: position} dict (can contain extra joints) + t_now: Current time + + Returns: + True if all required joints found, False if any missing + """ + ordered = [] + for name in self._joint_names_list: + if name not in positions: + # Missing joint - don't update + return False + ordered.append(positions[name]) + + return self.set_target(ordered, t_now) + + def start(self) -> None: + """Activate the task (start accepting and outputting commands).""" + with self._lock: + self._active = True + logger.info(f"JointServoTask {self._name} started") + + def stop(self) -> None: + """Deactivate the task (stop outputting commands).""" + with self._lock: + self._active = False + logger.info(f"JointServoTask {self._name} stopped") + + def clear(self) -> None: + """Clear current target and deactivate.""" + with self._lock: + self._target = None + self._active = False + logger.info(f"JointServoTask {self._name} cleared") + + def is_streaming(self) -> bool: + """Check if actively receiving and outputting commands.""" + with self._lock: + return self._active and self._target is not None + + +__all__ = [ + "JointServoTask", + "JointServoTaskConfig", +] diff --git a/dimos/control/tasks/teleop_task.py b/dimos/control/tasks/teleop_task.py new file mode 100644 index 0000000000..d1a90eb6d0 --- /dev/null +++ b/dimos/control/tasks/teleop_task.py @@ -0,0 +1,332 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Teleop cartesian control task with internal Pinocchio IK solver. + +Accepts streaming cartesian delta poses from teleoperation and computes +inverse kinematics internally to output joint commands. Deltas are applied +relative to the EE pose captured at engage time. + +Participates in joint-level arbitration. + +CRITICAL: Uses t_now from CoordinatorState, never calls time.time() +""" + +from __future__ import annotations + +from dataclasses import dataclass +import threading +from typing import TYPE_CHECKING, Any + +import numpy as np +import pinocchio # type: ignore[import-untyped] + +from dimos.control.task import ( + BaseControlTask, + ControlMode, + CoordinatorState, + JointCommandOutput, + ResourceClaim, +) +from dimos.manipulation.planning.kinematics.pinocchio_ik import ( + PinocchioIK, + check_joint_delta, + pose_to_se3, +) +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from pathlib import Path + + from numpy.typing import NDArray + + from dimos.msgs.geometry_msgs import Pose, PoseStamped + from dimos.teleop.quest.quest_types import Buttons + +logger = setup_logger() + + +@dataclass +class TeleopIKTaskConfig: + """Configuration for teleop IK task. + + Attributes: + joint_names: List of joint names this task controls (must match model DOF) + model_path: Path to URDF or MJCF file for IK solver + ee_joint_id: End-effector joint ID in the kinematic chain + priority: Priority for arbitration (higher wins) + timeout: If no command received for this many seconds, go inactive (0 = never) + max_joint_delta_deg: Maximum allowed joint change per tick (safety limit) + hand: "left" or "right" — which controller's primary button to listen to + """ + + joint_names: list[str] + model_path: str | Path + ee_joint_id: int + priority: int = 10 + timeout: float = 0.5 + max_joint_delta_deg: float = 5.0 # ~500°/s at 100Hz + hand: str = "" + + +class TeleopIKTask(BaseControlTask): + """Teleop cartesian control task with internal Pinocchio IK solver. + + Accepts streaming cartesian delta poses via on_cartesian_command() and computes IK + internally to output joint commands. Deltas are applied relative to the EE pose + captured at engage time (first compute). + + Uses current joint state from CoordinatorState as IK warm-start for fast convergence. + Outputs JointCommandOutput and participates in joint-level arbitration. + + Example: + >>> from dimos.utils.data import get_data + >>> piper_path = get_data("piper_description") + >>> task = TeleopIKTask( + ... name="teleop_arm", + ... config=TeleopIKTaskConfig( + ... joint_names=["joint1", "joint2", "joint3", "joint4", "joint5", "joint6"], + ... model_path=piper_path / "mujoco_model" / "piper_no_gripper_description.xml", + ... ee_joint_id=6, + ... priority=10, + ... timeout=0.5, + ... ), + ... ) + >>> coordinator.add_task(task) + >>> task.start() + >>> + >>> # From teleop callback: + >>> task.on_cartesian_command(delta_pose, t_now=time.perf_counter()) + """ + + def __init__(self, name: str, config: TeleopIKTaskConfig) -> None: + """Initialize teleop IK task. + + Args: + name: Unique task name + config: Task configuration + """ + if not config.joint_names: + raise ValueError(f"TeleopIKTask '{name}' requires at least one joint") + if not config.model_path: + raise ValueError(f"TeleopIKTask '{name}' requires model_path for IK solver") + + self._name = name + self._config = config + self._joint_names = frozenset(config.joint_names) + self._joint_names_list = list(config.joint_names) + self._num_joints = len(config.joint_names) + + # Create IK solver from model + self._ik = PinocchioIK.from_model_path(config.model_path, config.ee_joint_id) + + # Validate DOF matches joint names + if self._ik.nq != self._num_joints: + logger.warning( + f"TeleopIKTask {name}: model DOF ({self._ik.nq}) != " + f"joint_names count ({self._num_joints})" + ) + + # Thread-safe target state + self._lock = threading.Lock() + self._target_pose: Pose | PoseStamped | None = None + self._last_update_time: float = 0.0 + self._active = False + + # Initial EE pose for delta application + self._initial_ee_pose: pinocchio.SE3 | None = None + self._prev_primary: bool = False + + logger.info( + f"TeleopIKTask {name} initialized with model: {config.model_path}, " + f"ee_joint_id={config.ee_joint_id}, joints={config.joint_names}" + ) + + @property + def name(self) -> str: + """Unique task identifier.""" + return self._name + + def claim(self) -> ResourceClaim: + """Declare resource requirements.""" + return ResourceClaim( + joints=self._joint_names, + priority=self._config.priority, + mode=ControlMode.SERVO_POSITION, + ) + + def is_active(self) -> bool: + """Check if task should run this tick.""" + with self._lock: + return self._active and self._target_pose is not None + + def compute(self, state: CoordinatorState) -> JointCommandOutput | None: + """Compute IK and output joint positions. + + Args: + state: Current coordinator state (contains joint positions for IK warm-start) + + Returns: + JointCommandOutput with positions, or None if inactive/timed out/IK failed + """ + with self._lock: + if not self._active or self._target_pose is None: + return None + + # Timeout safety: stop if teleop stream drops + if self._config.timeout > 0: + time_since_update = state.t_now - self._last_update_time + if time_since_update > self._config.timeout: + logger.warning( + f"TeleopIKTask {self._name} timed out " + f"(no update for {time_since_update:.3f}s)" + ) + self._target_pose = None + self._active = False + return None + raw_pose = self._target_pose + + # Convert to SE3 right before use + delta_se3 = pose_to_se3(raw_pose) + # Capture initial EE pose if not set (first command after engage) + with self._lock: + need_capture = self._initial_ee_pose is None + + if need_capture: + q_current = self._get_current_joints(state) + if q_current is None: + logger.debug( + f"TeleopIKTask {self._name}: cannot capture initial pose, joint state unavailable" + ) + return None + initial_pose = self._ik.forward_kinematics(q_current) + with self._lock: + self._initial_ee_pose = initial_pose + + # Apply delta to initial pose: target = initial + delta + with self._lock: + if self._initial_ee_pose is None: + return None + target_pose = pinocchio.SE3( + delta_se3.rotation @ self._initial_ee_pose.rotation, + self._initial_ee_pose.translation + delta_se3.translation, + ) + + # Get current joint positions for IK warm-start + q_current = self._get_current_joints(state) + if q_current is None: + logger.debug(f"TeleopIKTask {self._name}: missing joint state for IK warm-start") + return None + + # Compute IK + q_solution, converged, final_error = self._ik.solve(target_pose, q_current) + # Use the solution even if it didn't fully converge + if not converged: + logger.debug( + f"TeleopIKTask {self._name}: IK did not converge " + f"(error={final_error:.4f}), using partial solution" + ) + # Safety: reject if any joint would jump too far in one tick + if not check_joint_delta(q_solution, q_current, self._config.max_joint_delta_deg): + logger.warning( + f"TeleopIKTask {self._name}: joint delta exceeds " + f"{self._config.max_joint_delta_deg}°, rejecting solution" + ) + return None + + positions = q_solution.flatten().tolist() + return JointCommandOutput( + joint_names=self._joint_names_list, + positions=positions, + mode=ControlMode.SERVO_POSITION, + ) + + def _get_current_joints(self, state: CoordinatorState) -> NDArray[np.floating[Any]] | None: + """Get current joint positions from coordinator state.""" + positions = [] + for joint_name in self._joint_names_list: + pos = state.joints.get_position(joint_name) + if pos is None: + return None + positions.append(pos) + return np.array(positions) + + def on_preempted(self, by_task: str, joints: frozenset[str]) -> None: + """Handle preemption by higher-priority task. + + Args: + by_task: Name of preempting task + joints: Joints that were preempted + """ + if joints & self._joint_names: + logger.warning(f"TeleopIKTask {self._name} preempted by {by_task} on joints {joints}") + + # ========================================================================= + # Task-specific methods + # ========================================================================= + + def on_buttons(self, msg: Buttons) -> bool: + """Press-and-hold engage: hold primary button to track, release to stop. + + Checks only the button matching self._config.hand (left_primary or right_primary). + If hand is not set, listens to both. + """ + hand = self._config.hand + if hand == "left": + primary = msg.left_primary + elif hand == "right": + primary = msg.right_primary + else: + primary = msg.left_primary or msg.right_primary + + if primary and not self._prev_primary: + # Rising edge: reset initial pose so compute() recaptures + logger.info(f"TeleopIKTask {self._name}: engage") + with self._lock: + self._initial_ee_pose = None + elif not primary and self._prev_primary: + # Falling edge: stop tracking + logger.info(f"TeleopIKTask {self._name}: disengage") + with self._lock: + self._target_pose = None + self._initial_ee_pose = None + self._prev_primary = primary + return True + + def on_cartesian_command(self, pose: Pose | PoseStamped, t_now: float) -> bool: + """Handle incoming cartesian command (delta pose from teleop)""" + with self._lock: + self._target_pose = pose # Store raw, convert to SE3 in compute() + self._last_update_time = t_now + self._active = True + + return True + + def start(self) -> None: + """Activate the task (start accepting and outputting commands).""" + with self._lock: + self._active = True + logger.info(f"TeleopIKTask {self._name} started") + + def stop(self) -> None: + """Deactivate the task (stop outputting commands).""" + with self._lock: + self._active = False + logger.info(f"TeleopIKTask {self._name} stopped") + + +__all__ = [ + "TeleopIKTask", + "TeleopIKTaskConfig", +] diff --git a/dimos/control/tasks/trajectory_task.py b/dimos/control/tasks/trajectory_task.py index 08e3ae337e..4d2eaa188b 100644 --- a/dimos/control/tasks/trajectory_task.py +++ b/dimos/control/tasks/trajectory_task.py @@ -12,13 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Joint trajectory task for the ControlOrchestrator. +"""Joint trajectory task for the ControlCoordinator. -Passive trajectory execution - called by orchestrator each tick. +Passive trajectory execution - called by coordinator each tick. Unlike JointTrajectoryController which owns a thread, this task -is compute-only and relies on the orchestrator for timing. +is compute-only and relies on the coordinator for timing. -CRITICAL: Uses t_now from OrchestratorState, never calls time.time() +CRITICAL: Uses t_now from CoordinatorState, never calls time.time() """ from __future__ import annotations @@ -26,10 +26,10 @@ from dataclasses import dataclass from dimos.control.task import ( + BaseControlTask, ControlMode, - ControlTask, + CoordinatorState, JointCommandOutput, - OrchestratorState, ResourceClaim, ) from dimos.msgs.trajectory_msgs import JointTrajectory, TrajectoryState @@ -51,13 +51,13 @@ class JointTrajectoryTaskConfig: priority: int = 10 -class JointTrajectoryTask(ControlTask): +class JointTrajectoryTask(BaseControlTask): """Passive trajectory execution task. Unlike JointTrajectoryController which owns a thread, this task - is called by the orchestrator at each tick. + is called by the coordinator at each tick. - CRITICAL: Uses t_now from OrchestratorState, never calls time.time() + CRITICAL: Uses t_now from CoordinatorState, never calls time.time() State Machine: IDLE ──execute()──► EXECUTING ──done──► COMPLETED @@ -74,8 +74,8 @@ class JointTrajectoryTask(ControlTask): ... priority=10, ... ), ... ) - >>> orchestrator.add_task(task) - >>> task.execute(my_trajectory, t_now=orchestrator_t_now) + >>> coordinator.add_task(task) + >>> task.execute(my_trajectory, t_now=coordinator_t_now) """ def __init__(self, name: str, config: JointTrajectoryTaskConfig) -> None: @@ -117,13 +117,13 @@ def is_active(self) -> bool: """Check if task should run this tick.""" return self._state == TrajectoryState.EXECUTING - def compute(self, state: OrchestratorState) -> JointCommandOutput | None: + def compute(self, state: CoordinatorState) -> JointCommandOutput | None: """Compute trajectory output for this tick. CRITICAL: Uses state.t_now for timing, NOT time.time()! Args: - state: Current orchestrator state + state: Current coordinator state Returns: JointCommandOutput with positions, or None if not executing @@ -244,7 +244,7 @@ def get_progress(self, t_now: float) -> float: """Get execution progress (0.0 to 1.0). Args: - t_now: Current orchestrator time + t_now: Current coordinator time Returns: Progress as fraction, or 0.0 if not executing diff --git a/dimos/control/tasks/velocity_task.py b/dimos/control/tasks/velocity_task.py new file mode 100644 index 0000000000..163bc09827 --- /dev/null +++ b/dimos/control/tasks/velocity_task.py @@ -0,0 +1,277 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Streaming joint velocity task for real-time velocity control. + +Accepts streaming joint velocities (e.g., from joystick) and outputs them +directly to hardware each tick. Useful for joystick control, force feedback, +or any velocity-mode real-time control. + +SAFETY: On timeout, sends zero velocities to stop motion (configurable). + +CRITICAL: Uses t_now from CoordinatorState, never calls time.time() +""" + +from __future__ import annotations + +from dataclasses import dataclass +import threading + +from dimos.control.task import ( + BaseControlTask, + ControlMode, + CoordinatorState, + JointCommandOutput, + ResourceClaim, +) +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +@dataclass +class JointVelocityTaskConfig: + """Configuration for velocity task. + + Attributes: + joint_names: List of joint names this task controls + priority: Priority for arbitration (higher wins) + timeout: If no command received for this many seconds, trigger timeout behavior + zero_on_timeout: If True, send zero velocities on timeout (safety). If False, go inactive. + """ + + joint_names: list[str] + priority: int = 10 + timeout: float = 0.2 # 200ms default - shorter for safety + zero_on_timeout: bool = True # Send zeros to stop motion + + +class JointVelocityTask(BaseControlTask): + """Streaming joint velocity control for joystick/force feedback. + + Accepts target velocities via set_velocities() or set_velocities_by_name() + and outputs them each tick. Uses VELOCITY mode for direct velocity control. + + SAFETY: On timeout (no update for timeout seconds): + - If zero_on_timeout=True: sends zero velocities to stop motion + - If zero_on_timeout=False: goes inactive (hardware may coast) + + Example: + >>> task = JointVelocityTask( + ... name="velocity_arm", + ... config=JointVelocityTaskConfig( + ... joint_names=["arm_joint1", "arm_joint2", "arm_joint3"], + ... priority=10, + ... timeout=0.2, + ... zero_on_timeout=True, + ... ), + ... ) + >>> coordinator.add_task(task) + >>> task.start() + >>> + >>> # From joystick callback: + >>> task.set_velocities([0.1, -0.05, 0.0], t_now=time.perf_counter()) + """ + + def __init__(self, name: str, config: JointVelocityTaskConfig) -> None: + """Initialize velocity task. + + Args: + name: Unique task name + config: Task configuration + """ + if not config.joint_names: + raise ValueError(f"JointVelocityTask '{name}' requires at least one joint") + + self._name = name + self._config = config + self._joint_names = frozenset(config.joint_names) + self._joint_names_list = list(config.joint_names) + self._num_joints = len(config.joint_names) + + # Current velocities (thread-safe) + self._lock = threading.Lock() + self._velocities: list[float] | None = None + self._last_update_time: float = 0.0 + self._active = False + self._timed_out = False # Track timeout state for logging + + logger.info(f"JointVelocityTask {name} initialized for joints: {config.joint_names}") + + @property + def name(self) -> str: + """Unique task identifier.""" + return self._name + + def claim(self) -> ResourceClaim: + """Declare resource requirements.""" + return ResourceClaim( + joints=self._joint_names, + priority=self._config.priority, + mode=ControlMode.VELOCITY, + ) + + def is_active(self) -> bool: + """Check if task should run this tick.""" + with self._lock: + # Active if started, even if timed out (we still send zeros) + if self._config.zero_on_timeout: + return self._active + else: + return self._active and self._velocities is not None + + def compute(self, state: CoordinatorState) -> JointCommandOutput | None: + """Output current target velocities. + + Args: + state: Current coordinator state + + Returns: + JointCommandOutput with velocities, or None if inactive + """ + with self._lock: + if not self._active: + return None + + # Check timeout + if self._config.timeout > 0 and self._velocities is not None: + time_since_update = state.t_now - self._last_update_time + if time_since_update > self._config.timeout: + if not self._timed_out: + logger.warning( + f"JointVelocityTask {self._name} timed out " + f"(no update for {time_since_update:.3f}s)" + ) + self._timed_out = True + + if self._config.zero_on_timeout: + # SAFETY: Send zeros to stop motion + return JointCommandOutput( + joint_names=self._joint_names_list, + velocities=[0.0] * self._num_joints, + mode=ControlMode.VELOCITY, + ) + else: + # Go inactive + self._active = False + return None + + if self._velocities is None: + return None + + # Reset timeout flag on successful output + self._timed_out = False + + return JointCommandOutput( + joint_names=self._joint_names_list, + velocities=list(self._velocities), + mode=ControlMode.VELOCITY, + ) + + def on_preempted(self, by_task: str, joints: frozenset[str]) -> None: + """Handle preemption by higher-priority task. + + Args: + by_task: Name of preempting task + joints: Joints that were preempted + """ + if joints & self._joint_names: + logger.warning( + f"JointVelocityTask {self._name} preempted by {by_task} on joints {joints}" + ) + + # ========================================================================= + # Task-specific methods + # ========================================================================= + + def set_velocities(self, velocities: list[float], t_now: float) -> bool: + """Set target joint velocities. + + Call this from your joystick callback or other data source. + + Args: + velocities: Joint velocities in rad/s (must match joint_names length) + t_now: Current time (from coordinator or time.perf_counter()) + + Returns: + True if accepted, False if wrong number of joints + """ + if len(velocities) != self._num_joints: + logger.warning( + f"JointVelocityTask {self._name}: expected {self._num_joints} " + f"velocities, got {len(velocities)}" + ) + return False + + with self._lock: + self._velocities = list(velocities) + self._last_update_time = t_now + self._active = True + self._timed_out = False + + return True + + def set_velocities_by_name(self, velocities: dict[str, float], t_now: float) -> bool: + """Set target velocities by joint name. + + Extracts only the joints this task controls from the dict. + Useful for routing when multiple tasks share an input stream. + + Args: + velocities: {joint_name: velocity} dict (can contain extra joints) + t_now: Current time + + Returns: + True if all required joints found, False if any missing + """ + ordered = [] + for name in self._joint_names_list: + if name not in velocities: + # Missing joint - don't update + return False + ordered.append(velocities[name]) + + return self.set_velocities(ordered, t_now) + + def start(self) -> None: + """Activate the task (start accepting and outputting commands).""" + with self._lock: + self._active = True + self._timed_out = False + logger.info(f"JointVelocityTask {self._name} started") + + def stop(self) -> None: + """Deactivate the task (stop outputting commands).""" + with self._lock: + self._active = False + logger.info(f"JointVelocityTask {self._name} stopped") + + def clear(self) -> None: + """Clear current velocities and deactivate.""" + with self._lock: + self._velocities = None + self._active = False + self._timed_out = False + logger.info(f"JointVelocityTask {self._name} cleared") + + def is_streaming(self) -> bool: + """Check if actively receiving and outputting commands.""" + with self._lock: + return self._active and self._velocities is not None and not self._timed_out + + +__all__ = [ + "JointVelocityTask", + "JointVelocityTaskConfig", +] diff --git a/dimos/control/test_control.py b/dimos/control/test_control.py index 2522affa60..656678d167 100644 --- a/dimos/control/test_control.py +++ b/dimos/control/test_control.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Tests for the Control Orchestrator module.""" +"""Tests for the Control Coordinator module.""" from __future__ import annotations @@ -22,12 +22,13 @@ import pytest -from dimos.control.hardware_interface import BackendHardwareInterface +from dimos.control.components import HardwareComponent, HardwareType, make_joints +from dimos.control.hardware_interface import ConnectedHardware from dimos.control.task import ( ControlMode, + CoordinatorState, JointCommandOutput, JointStateSnapshot, - OrchestratorState, ResourceClaim, ) from dimos.control.tasks.trajectory_task import ( @@ -36,7 +37,7 @@ TrajectoryState, ) from dimos.control.tick_loop import TickLoop -from dimos.hardware.manipulators.spec import ManipulatorBackend +from dimos.hardware.manipulators.spec import ManipulatorAdapter from dimos.msgs.trajectory_msgs import JointTrajectory, TrajectoryPoint # ============================================================================= @@ -45,27 +46,28 @@ @pytest.fixture -def mock_backend(): - """Create a mock manipulator backend.""" - backend = MagicMock(spec=ManipulatorBackend) - backend.get_dof.return_value = 6 - backend.read_joint_positions.return_value = [0.0] * 6 - backend.read_joint_velocities.return_value = [0.0] * 6 - backend.read_joint_efforts.return_value = [0.0] * 6 - backend.write_joint_positions.return_value = True - backend.write_joint_velocities.return_value = True - backend.set_control_mode.return_value = True - return backend +def mock_adapter(): + """Create a mock manipulator adapter.""" + adapter = MagicMock(spec=ManipulatorAdapter) + adapter.get_dof.return_value = 6 + adapter.read_joint_positions.return_value = [0.0] * 6 + adapter.read_joint_velocities.return_value = [0.0] * 6 + adapter.read_joint_efforts.return_value = [0.0] * 6 + adapter.write_joint_positions.return_value = True + adapter.write_joint_velocities.return_value = True + adapter.set_control_mode.return_value = True + return adapter @pytest.fixture -def hardware_interface(mock_backend): - """Create a BackendHardwareInterface with mock backend.""" - return BackendHardwareInterface( - backend=mock_backend, +def connected_hardware(mock_adapter): + """Create a ConnectedHardware instance with mock adapter.""" + component = HardwareComponent( hardware_id="test_arm", - joint_prefix="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), ) + return ConnectedHardware(adapter=mock_adapter, component=component) @pytest.fixture @@ -99,15 +101,15 @@ def simple_trajectory(): @pytest.fixture -def orchestrator_state(): - """Create a sample OrchestratorState.""" +def coordinator_state(): + """Create a sample CoordinatorState.""" joints = JointStateSnapshot( joint_positions={"arm_joint1": 0.0, "arm_joint2": 0.0, "arm_joint3": 0.0}, joint_velocities={"arm_joint1": 0.0, "arm_joint2": 0.0, "arm_joint3": 0.0}, joint_efforts={"arm_joint1": 0.0, "arm_joint2": 0.0, "arm_joint3": 0.0}, timestamp=time.perf_counter(), ) - return OrchestratorState(joints=joints, t_now=time.perf_counter(), dt=0.01) + return CoordinatorState(joints=joints, t_now=time.perf_counter(), dt=0.01) # ============================================================================= @@ -170,13 +172,13 @@ def test_get_position(self): # ============================================================================= -# Test BackendHardwareInterface +# Test ConnectedHardware # ============================================================================= -class TestBackendHardwareInterface: - def test_joint_names_prefixed(self, hardware_interface): - names = hardware_interface.joint_names +class TestConnectedHardware: + def test_joint_names_prefixed(self, connected_hardware): + names = connected_hardware.joint_names assert names == [ "arm_joint1", "arm_joint2", @@ -186,22 +188,22 @@ def test_joint_names_prefixed(self, hardware_interface): "arm_joint6", ] - def test_read_state(self, hardware_interface): - state = hardware_interface.read_state() + def test_read_state(self, connected_hardware): + state = connected_hardware.read_state() assert "arm_joint1" in state assert len(state) == 6 - pos, vel, eff = state["arm_joint1"] - assert pos == 0.0 - assert vel == 0.0 - assert eff == 0.0 + joint_state = state["arm_joint1"] + assert joint_state.position == 0.0 + assert joint_state.velocity == 0.0 + assert joint_state.effort == 0.0 - def test_write_command(self, hardware_interface, mock_backend): + def test_write_command(self, connected_hardware, mock_adapter): commands = { "arm_joint1": 0.5, "arm_joint2": 1.0, } - hardware_interface.write_command(commands, ControlMode.POSITION) - mock_backend.write_joint_positions.assert_called() + connected_hardware.write_command(commands, ControlMode.POSITION) + mock_adapter.write_joint_positions.assert_called() # ============================================================================= @@ -229,23 +231,21 @@ def test_execute_trajectory(self, trajectory_task, simple_trajectory): assert trajectory_task.is_active() assert trajectory_task.get_state() == TrajectoryState.EXECUTING - def test_compute_during_trajectory( - self, trajectory_task, simple_trajectory, orchestrator_state - ): + def test_compute_during_trajectory(self, trajectory_task, simple_trajectory, coordinator_state): t_start = time.perf_counter() trajectory_task.execute(simple_trajectory) # First compute sets start time (deferred start) - state0 = OrchestratorState( - joints=orchestrator_state.joints, + state0 = CoordinatorState( + joints=coordinator_state.joints, t_now=t_start, dt=0.01, ) trajectory_task.compute(state0) # Compute at 0.5s into trajectory - state = OrchestratorState( - joints=orchestrator_state.joints, + state = CoordinatorState( + joints=coordinator_state.joints, t_now=t_start + 0.5, dt=0.01, ) @@ -256,21 +256,21 @@ def test_compute_during_trajectory( assert len(output.positions) == 3 assert 0.4 < output.positions[0] < 0.6 - def test_trajectory_completes(self, trajectory_task, simple_trajectory, orchestrator_state): + def test_trajectory_completes(self, trajectory_task, simple_trajectory, coordinator_state): t_start = time.perf_counter() trajectory_task.execute(simple_trajectory) # First compute sets start time (deferred start) - state0 = OrchestratorState( - joints=orchestrator_state.joints, + state0 = CoordinatorState( + joints=coordinator_state.joints, t_now=t_start, dt=0.01, ) trajectory_task.compute(state0) # Compute past trajectory duration - state = OrchestratorState( - joints=orchestrator_state.joints, + state = CoordinatorState( + joints=coordinator_state.joints, t_now=t_start + 1.5, dt=0.01, ) @@ -297,13 +297,13 @@ def test_preemption(self, trajectory_task, simple_trajectory): assert trajectory_task.get_state() == TrajectoryState.ABORTED assert not trajectory_task.is_active() - def test_progress(self, trajectory_task, simple_trajectory, orchestrator_state): + def test_progress(self, trajectory_task, simple_trajectory, coordinator_state): t_start = time.perf_counter() trajectory_task.execute(simple_trajectory) # First compute sets start time (deferred start) - state0 = OrchestratorState( - joints=orchestrator_state.joints, + state0 = CoordinatorState( + joints=coordinator_state.joints, t_now=t_start, dt=0.01, ) @@ -428,8 +428,13 @@ def test_non_overlapping_joints(self): class TestTickLoop: - def test_tick_loop_starts_and_stops(self, mock_backend): - hw = BackendHardwareInterface(mock_backend, "arm", "arm") + def test_tick_loop_starts_and_stops(self, mock_adapter): + component = HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + ) + hw = ConnectedHardware(mock_adapter, component) hardware = {"arm": hw} tasks: dict = {} joint_to_hardware = {f"arm_joint{i + 1}": "arm" for i in range(6)} @@ -452,8 +457,13 @@ def test_tick_loop_starts_and_stops(self, mock_backend): time.sleep(0.02) assert tick_loop.tick_count == final_count - def test_tick_loop_calls_compute(self, mock_backend): - hw = BackendHardwareInterface(mock_backend, "arm", "arm") + def test_tick_loop_calls_compute(self, mock_adapter): + component = HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + ) + hw = ConnectedHardware(mock_adapter, component) hardware = {"arm": hw} mock_task = MagicMock() @@ -494,8 +504,13 @@ def test_tick_loop_calls_compute(self, mock_backend): class TestIntegration: - def test_full_trajectory_execution(self, mock_backend): - hw = BackendHardwareInterface(mock_backend, "arm", "arm") + def test_full_trajectory_execution(self, mock_adapter): + component = HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + ) + hw = ConnectedHardware(mock_adapter, component) hardware = {"arm": hw} config = JointTrajectoryTaskConfig( @@ -539,4 +554,4 @@ def test_full_trajectory_execution(self, mock_backend): tick_loop.stop() assert traj_task.get_state() == TrajectoryState.COMPLETED - assert mock_backend.write_joint_positions.call_count > 0 + assert mock_adapter.write_joint_positions.call_count > 0 diff --git a/dimos/control/tick_loop.py b/dimos/control/tick_loop.py index 03e4e0ebd0..e0020a34da 100644 --- a/dimos/control/tick_loop.py +++ b/dimos/control/tick_loop.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Tick loop for the ControlOrchestrator. +"""Tick loop for the ControlCoordinator. This module contains the core control loop logic: - Read state from all hardware @@ -21,7 +21,7 @@ - Route commands to hardware - Publish aggregated joint state -Separated from orchestrator.py following the DimOS pattern of +Separated from coordinator.py following the DimOS pattern of splitting coordination logic from module wrapper. """ @@ -33,9 +33,9 @@ from dimos.control.task import ( ControlTask, + CoordinatorState, JointCommandOutput, JointStateSnapshot, - OrchestratorState, ResourceClaim, ) from dimos.msgs.sensor_msgs import JointState @@ -44,7 +44,8 @@ if TYPE_CHECKING: from collections.abc import Callable - from dimos.control.hardware_interface import HardwareInterface + from dimos.control.components import HardwareId, JointName, TaskName + from dimos.control.hardware_interface import ConnectedHardware from dimos.hardware.manipulators.spec import ControlMode logger = setup_logger() @@ -60,7 +61,7 @@ class JointWinner(NamedTuple): class TickLoop: - """Core tick loop for the control orchestrator. + """Core tick loop for the control coordinator. Runs the deterministic control cycle: 1. READ: Collect joint state from all hardware @@ -73,7 +74,7 @@ class TickLoop: Args: tick_rate: Control loop frequency in Hz - hardware: Dict of hardware_id -> HardwareInterface + hardware: Dict of hardware_id -> ConnectedHardware hardware_lock: Lock protecting hardware dict tasks: Dict of task_name -> ControlTask task_lock: Lock protecting tasks dict @@ -86,13 +87,13 @@ class TickLoop: def __init__( self, tick_rate: float, - hardware: dict[str, HardwareInterface], + hardware: dict[HardwareId, ConnectedHardware], hardware_lock: threading.Lock, - tasks: dict[str, ControlTask], + tasks: dict[TaskName, ControlTask], task_lock: threading.Lock, - joint_to_hardware: dict[str, str], + joint_to_hardware: dict[JointName, HardwareId], publish_callback: Callable[[JointState], None] | None = None, - frame_id: str = "orchestrator", + frame_id: str = "coordinator", log_ticks: bool = False, ) -> None: self._tick_rate = tick_rate @@ -133,7 +134,7 @@ def start(self) -> None: self._tick_thread = threading.Thread( target=self._loop, - name="ControlOrchestrator-Tick", + name="ControlCoordinator-Tick", daemon=True, ) self._tick_thread.start() @@ -173,7 +174,7 @@ def _tick(self) -> None: # === PHASE 1: READ ALL HARDWARE === joint_states = self._read_all_hardware() - state = OrchestratorState(joints=joint_states, t_now=t_now, dt=dt) + state = CoordinatorState(joints=joint_states, t_now=t_now, dt=dt) # === PHASE 2: COMPUTE ALL ACTIVE TASKS === commands = self._compute_all_tasks(state) @@ -213,10 +214,10 @@ def _read_all_hardware(self) -> JointStateSnapshot: for hw in self._hardware.values(): try: state = hw.read_state() - for joint_name, (pos, vel, eff) in state.items(): - joint_positions[joint_name] = pos - joint_velocities[joint_name] = vel - joint_efforts[joint_name] = eff + for joint_name, joint_state in state.items(): + joint_positions[joint_name] = joint_state.position + joint_velocities[joint_name] = joint_state.velocity + joint_efforts[joint_name] = joint_state.effort except Exception as e: logger.error(f"Failed to read {hw.hardware_id}: {e}") @@ -228,7 +229,7 @@ def _read_all_hardware(self) -> JointStateSnapshot: ) def _compute_all_tasks( - self, state: OrchestratorState + self, state: CoordinatorState ) -> list[tuple[ControlTask, ResourceClaim, JointCommandOutput | None]]: """Compute outputs from all active tasks.""" results: list[tuple[ControlTask, ResourceClaim, JointCommandOutput | None]] = [] diff --git a/dimos/core/README_BLUEPRINTS.md b/dimos/core/README_BLUEPRINTS.md deleted file mode 100644 index 0a3e2ceaf5..0000000000 --- a/dimos/core/README_BLUEPRINTS.md +++ /dev/null @@ -1,319 +0,0 @@ -# Blueprints - -Blueprints (`ModuleBlueprint`) are instructions for how to initialize a `Module`. - -You don't typically want to run a single module, so multiple blueprints are handled together in `ModuleBlueprintSet`. - -You create a `ModuleBlueprintSet` from a single module (say `ConnectionModule`) with: - -```python -blueprint = create_module_blueprint(ConnectionModule, 'arg1', 'arg2', kwarg='value') -``` - -But the same thing can be acomplished more succinctly as: - -```python -connection = ConnectionModule.blueprint -``` - -Now you can create the blueprint with: - -```python -blueprint = connection('arg1', 'arg2', kwarg='value') -``` - -## Linking blueprints - -You can link multiple blueprints together with `autoconnect`: - -```python -blueprint = autoconnect( - module1(), - module2(), - module3(), -) -``` - -`blueprint` itself is a `ModuleBlueprintSet` so you can link it with other modules: - -```python -expanded_blueprint = autoconnect( - blueprint, - module4(), - module5(), -) -``` - -Blueprints are frozen data classes, and `autoconnect()` always constructs an expanded blueprint so you never have to worry about changes in one affecting the other. - -### Duplicate module handling - -If the same module appears multiple times in `autoconnect`, the **later blueprint wins** and overrides earlier ones: - -```python -blueprint = autoconnect( - module_a(arg1=1), - module_b(), - module_a(arg1=2), # This one is used, the first is discarded -) -``` - -This is so you can "inherit" from one blueprint but override something you need to change. - -## How transports are linked - -Imagine you have this code: - -```python -class ModuleA(Module): - image: Out[Image] - start_explore: Out[Bool] - -class ModuleB(Module): - image: In[Image] - begin_explore: In[Bool] - -module_a = partial(create_module_blueprint, ModuleA) -module_b = partial(create_module_blueprint, ModuleB) - -autoconnect(module_a(), module_b()) -``` - -Connections are linked based on `(property_name, object_type)`. In this case `('image', Image)` will be connected between the two modules, but `begin_explore` will not be linked to `start_explore`. - -## Topic names - -By default, the name of the property is used to generate the topic name. So for `image`, the topic will be `/image`. - -The property name is used only if it's unique. If two modules have the same property name with different types, then both get a random topic such as `/SGVsbG8sIFdvcmxkI`. - -If you don't like the name you can always override it like in the next section. - -## Which transport is used? - -By default `LCMTransport` is used if the object supports `lcm_encode`. If it doesn't `pLCMTransport` is used (meaning "pickled LCM"). - -You can override transports with the `transports` method. It returns a new blueprint in which the override is set. - -```python -blueprint = autoconnect(...) -expanded_blueprint = autoconnect(blueprint, ...) -blueprint = blueprint.transports({ - ("image", Image): pSHMTransport( - "/go2/color_image", default_capacity=DEFAULT_CAPACITY_COLOR_IMAGE - ), - ("start_explore", Bool): pLCMTransport(), -}) -``` - -Note: `expanded_blueprint` does not get the transport overrides because it's created from the initial value of `blueprint`, not the second. - -## Remapping connections - -Sometimes you need to rename a connection to match what other modules expect. You can use `remappings` to rename module connections: - -```python -class ConnectionModule(Module): - color_image: Out[Image] # Outputs on 'color_image' - -class ProcessingModule(Module): - rgb_image: In[Image] # Expects input on 'rgb_image' - -# Without remapping, these wouldn't connect automatically -# With remapping, color_image is renamed to rgb_image -blueprint = ( - autoconnect( - ConnectionModule.blueprint(), - ProcessingModule.blueprint(), - ) - .remappings([ - (ConnectionModule, 'color_image', 'rgb_image'), - ]) -) -``` - -After remapping: -- The `color_image` output from `ConnectionModule` is treated as `rgb_image` -- It automatically connects to any module with an `rgb_image` input of type `Image` -- The topic name becomes `/rgb_image` instead of `/color_image` - -If you want to override the topic, you still have to do it manually: - -```python -blueprint -.remappings([ - (ConnectionModule, 'color_image', 'rgb_image'), -]) -.transports({ - ("rgb_image", Image): LCMTransport("/custom/rgb/image", Image), -}) -``` - -## Overriding global configuration. - -Each module can optionally take a `global_config` option in `__init__`. E.g.: - -```python -class ModuleA(Module): - - def __init__(self, global_config: GlobalConfig | None = None): - ... -``` - -The config is normally taken from .env or from environment variables. But you can specifically override the values for a specific blueprint: - -```python -blueprint = blueprint.global_config(n_dask_workers=8) -``` - -## Calling the methods of other modules - -Imagine you have this code: - -```python -class ModuleA(Module): - - @rpc - def get_time(self) -> str: - ... - -class ModuleB(Module): - def request_the_time(self) -> None: - ... -``` - -And you want to call `ModuleA.get_time` in `ModuleB.request_the_time`. - -To do this, you can request a link to the method you want to call in `rpc_calls`. Calling `get_time_rcp` will call the original `ModuleA.get_time`. - -```python -class ModuleB(Module): - rpc_calls: list[str] = [ - "ModuleA.get_time", - ] - - def request_the_time(self) -> None: - get_time_rpc = self.get_rpc_calls("ModuleA.get_time") - print(get_time_rpc()) -``` - -You can also request multiple methods at a time: - -```python -method1_rpc, method2_rpc = self.get_rpc_calls("ModuleX.m1", "ModuleX.m2") -``` - -## Alternative RPC calls - -There is an alternative way of receiving RPC methods. It is useful when you want to perform an action at the time you receive the RPC methods. - -You can use it by defining a method like `set__`: - -```python -class ModuleB(Module): - @rpc # Note that it has to be an rpc method. - def set_ModuleA_get_time(self, rpc_call: RpcCall) -> None: - self._get_time = rpc_call - self._get_time.set_rpc(self.rpc) - - def request_the_time(self) -> None: - print(self._get_time()) -``` - -Note that `RpcCall.rpc` does not serialize, so you have to set it to the one from the module with `rpc_call.set_rpc(self.rpc)` - -## Calling an interface - -In the previous examples, you can only call methods in a module called `ModuleA`. But what if you want to deploy an alternative module in your blueprint? - -You can do so by extracting the common interface as an `ABC` (abstract base class) and linking to the `ABC` instead one particular class. - -```python -class TimeInterface(ABC): - @abstractmethod - def get_time(self): ... - -class ProperTime(TimeInterface): - def get_time(self): - return "13:00" - -class BadTime(TimeInterface): - def get_time(self): - return "01:00 PM" - - -class ModuleB(Module): - rpc_calls: list[str] = [ - "TimeInterface.get_time", # TimeInterface instead of ProperTime or BadTime - ] - - def request_the_time(self) -> None: - get_time_rpc = self.get_rpc_calls("TimeInterface.get_time") - print(get_time_rpc()) -``` - -The actual method that you get in `get_time_rpc` depends on which module is deployed. If you deploy `ProperTime`, you get `ProperTime.get_time`: - -```python -blueprint = autoconnect( - ProperTime.blueprint(), - # get_rpc_calls("TimeInterface.get_time") returns ProperTime.get_time - ModuleB.blueprint(), -) -``` - -If both are deployed, the blueprint will throw an error because it's ambiguous. - -## Defining skills - -Skills have to be registered with `AgentSpec.register_skills(self)`. - -```python -class SomeSkill(Module): - - @skill - def some_skill(self) -> None: - ... - - @rpc - def set_AgentSpec_register_skills(self, register_skills: RpcCall) -> None: - register_skills.set_rpc(self.rpc) - register_skills(RPCClient(self, self.__class__)) - - # The agent is just interested in the `@skill` methods, so you'll need this if your class - # has things that cannot be pickled. - def __getstate__(self): - pass - def __setstate__(self, _state): - pass -``` - -Or, you can avoid all of this by inheriting from `SkillModule` which does the above automatically: - -```python -class SomeSkill(SkillModule): - - @skill - def some_skill(self) -> None: - ... -``` - -## Building - -All you have to do to build a blueprint is call: - -```python -module_coordinator = blueprint.build(global_config=config) -``` - -This returns a `ModuleCoordinator` instance that manages all deployed modules. - -### Running and shutting down - -You can block the thread until it exits with: - -```python -module_coordinator.loop() -``` - -This will wait for Ctrl+C and then automatically stop all modules and clean up resources. diff --git a/dimos/core/__init__.py b/dimos/core/__init__.py index b56fe74f4f..2b6296b623 100644 --- a/dimos/core/__init__.py +++ b/dimos/core/__init__.py @@ -1,58 +1,44 @@ from __future__ import annotations import multiprocessing as mp -import signal import time +from typing import TYPE_CHECKING, cast -from dask.distributed import Client, LocalCluster +import lazy_loader as lazy from rich.console import Console -import dimos.core.colors as colors from dimos.core.core import rpc -from dimos.core.module import Module, ModuleBase, ModuleConfig, ModuleConfigT -from dimos.core.rpc_client import RPCClient -from dimos.core.stream import In, Out, RemoteIn, RemoteOut, Transport -from dimos.core.transport import ( - LCMTransport, - SHMTransport, - ZenohTransport, - pLCMTransport, - pSHMTransport, -) -from dimos.protocol.rpc import LCMRPC -from dimos.protocol.rpc.spec import RPCSpec -from dimos.protocol.tf import LCMTF, TF, PubSubTF, TFConfig, TFSpec -from dimos.utils.actor_registry import ActorRegistry from dimos.utils.logging_config import setup_logger +if TYPE_CHECKING: + # Avoid runtime import to prevent circular import; ruff's TC001 would otherwise move it. + from dask.distributed import LocalCluster + + from dimos.core._dask_exports import DimosCluster + from dimos.core.module import Module + from dimos.core.rpc_client import ModuleProxy + logger = setup_logger() -__all__ = [ - "LCMRPC", - "LCMTF", - "TF", - "DimosCluster", - "In", - "LCMTransport", - "Module", - "ModuleBase", - "ModuleConfig", - "ModuleConfigT", - "Out", - "PubSubTF", - "RPCSpec", - "RemoteIn", - "RemoteOut", - "SHMTransport", - "TFConfig", - "TFSpec", - "Transport", - "ZenohTransport", - "pLCMTransport", - "pSHMTransport", - "rpc", - "start", -] +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submodules=["colors"], + submod_attrs={ + "blueprints": ["autoconnect", "Blueprint"], + "_dask_exports": ["DimosCluster"], + "_protocol_exports": ["LCMRPC", "RPCSpec", "LCMTF", "TF", "PubSubTF", "TFConfig", "TFSpec"], + "module": ["Module", "ModuleBase", "ModuleConfig", "ModuleConfigT"], + "stream": ["In", "Out", "RemoteIn", "RemoteOut", "Transport"], + "transport": [ + "LCMTransport", + "SHMTransport", + "ZenohTransport", + "pLCMTransport", + "pSHMTransport", + ], + }, +) +__all__ += ["DimosCluster", "Module", "rpc", "start", "wait_exit"] class CudaCleanupPlugin: @@ -68,7 +54,7 @@ def teardown(self, worker) -> None: # type: ignore[no-untyped-def] import sys if "cupy" in sys.modules: - import cupy as cp # type: ignore[import-not-found] + import cupy as cp # type: ignore[import-not-found, import-untyped] # Clear memory pools mempool = cp.get_default_memory_pool() @@ -85,15 +71,25 @@ def teardown(self, worker) -> None: # type: ignore[no-untyped-def] def patch_actor(actor, cls) -> None: ... # type: ignore[no-untyped-def] -DimosCluster = Client - +def patchdask(dask_client: DimosCluster, local_cluster: LocalCluster) -> DimosCluster: + from dimos.core.rpc_client import RPCClient + from dimos.utils.actor_registry import ActorRegistry -def patchdask(dask_client: Client, local_cluster: LocalCluster) -> DimosCluster: def deploy( # type: ignore[no-untyped-def] - actor_class, + actor_class: type[Module], *args, **kwargs, - ): + ) -> ModuleProxy: + from dimos.core.docker_runner import DockerModule, is_docker_module + + # Check if this module should run in Docker (based on its default_config) + if is_docker_module(actor_class): + logger.info("Deploying module in Docker.", module=actor_class.__name__) + dm = DockerModule(actor_class, *args, **kwargs) + dm.start() # Explicit start - follows create -> configure -> start lifecycle + dask_client._docker_modules.append(dm) # type: ignore[attr-defined] + return dm # type: ignore[return-value] + logger.info("Deploying module.", module=actor_class.__name__) actor = dask_client.submit( # type: ignore[no-untyped-call] actor_class, @@ -108,11 +104,12 @@ def deploy( # type: ignore[no-untyped-def] # Register actor deployment in shared memory ActorRegistry.update(str(actor), str(worker)) - return RPCClient(actor, actor_class) + return cast("ModuleProxy", RPCClient(actor, actor_class)) def check_worker_memory() -> None: """Check memory usage of all workers.""" info = dask_client.scheduler_info() + console = Console() total_workers = len(info.get("workers", {})) total_memory_used = 0 @@ -169,12 +166,20 @@ def close_all() -> None: return dask_client._closed = True # type: ignore[attr-defined] + # Stop all Docker modules (in reverse order of deployment) + for dm in reversed(dask_client._docker_modules): # type: ignore[attr-defined] + try: + dm.stop() + except Exception: + pass + dask_client._docker_modules.clear() # type: ignore[attr-defined] + # Stop all SharedMemory transports before closing Dask # This prevents the "leaked shared_memory objects" warning and hangs try: import gc - from dimos.protocol.pubsub import shmpubsub + from dimos.protocol.pubsub.impl import shmpubsub for obj in gc.get_objects(): if isinstance(obj, shmpubsub.SharedMemoryPubSubBase): @@ -220,11 +225,12 @@ def close_all() -> None: # This is needed, solves race condition in CI thread check time.sleep(0.1) + dask_client._docker_modules = [] # type: ignore[attr-defined] dask_client.deploy = deploy # type: ignore[attr-defined] dask_client.check_worker_memory = check_worker_memory # type: ignore[attr-defined] dask_client.stop = lambda: dask_client.close() # type: ignore[attr-defined, no-untyped-call] dask_client.close_all = close_all # type: ignore[attr-defined] - return dask_client + return dask_client # type: ignore[return-value] def start(n: int | None = None, memory_limit: str = "auto") -> DimosCluster: @@ -238,6 +244,8 @@ def start(n: int | None = None, memory_limit: str = "auto") -> DimosCluster: DimosCluster: A patched Dask client with deploy(), check_worker_memory(), stop(), and close_all() methods """ + from dask.distributed import Client, LocalCluster + console = Console() if not n: n = mp.cpu_count() @@ -257,31 +265,6 @@ def start(n: int | None = None, memory_limit: str = "auto") -> DimosCluster: ) patched_client = patchdask(client, cluster) - patched_client._shutting_down = False # type: ignore[attr-defined] - - # Signal handler with proper exit handling - def signal_handler(sig, frame) -> None: # type: ignore[no-untyped-def] - # If already shutting down, force exit - if patched_client._shutting_down: # type: ignore[attr-defined] - import os - - console.print("[red]Force exit!") - os._exit(1) - - patched_client._shutting_down = True # type: ignore[attr-defined] - console.print(f"[yellow]Shutting down (signal {sig})...") - - try: - patched_client.close_all() # type: ignore[attr-defined] - except Exception: - pass - - import sys - - sys.exit(0) - - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) return patched_client diff --git a/dimos/core/_dask_exports.py b/dimos/core/_dask_exports.py new file mode 100644 index 0000000000..cb257e7804 --- /dev/null +++ b/dimos/core/_dask_exports.py @@ -0,0 +1,17 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dask.distributed import Client as DimosCluster + +__all__ = ["DimosCluster"] diff --git a/dimos/core/_protocol_exports.py b/dimos/core/_protocol_exports.py new file mode 100644 index 0000000000..be77fd8323 --- /dev/null +++ b/dimos/core/_protocol_exports.py @@ -0,0 +1,19 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.protocol.rpc import LCMRPC +from dimos.protocol.rpc.spec import RPCSpec +from dimos.protocol.tf import LCMTF, TF, PubSubTF, TFConfig, TFSpec + +__all__ = ["LCMRPC", "LCMTF", "TF", "PubSubTF", "RPCSpec", "TFConfig", "TFSpec"] diff --git a/dimos/core/blueprints.py b/dimos/core/blueprints.py index 1fa51629bf..605517e6cf 100644 --- a/dimos/core/blueprints.py +++ b/dimos/core/blueprints.py @@ -23,14 +23,12 @@ from types import MappingProxyType from typing import Any, Literal, get_args, get_origin, get_type_hints -import rerun as rr -import rerun.blueprint as rrb - -from dimos.core.global_config import GlobalConfig -from dimos.core.module import Module +from dimos.core.global_config import GlobalConfig, global_config +from dimos.core.module import Module, is_module_type from dimos.core.module_coordinator import ModuleCoordinator from dimos.core.stream import In, Out -from dimos.core.transport import LCMTransport, pLCMTransport +from dimos.core.transport import LCMTransport, PubSubTransport, pLCMTransport +from dimos.spec.utils import Spec, is_spec, spec_annotation_compliance, spec_structural_compliance from dimos.utils.generic import short_id from dimos.utils.logging_config import setup_logger @@ -38,35 +36,87 @@ @dataclass(frozen=True) -class ModuleConnection: +class StreamRef: name: str type: type direction: Literal["in", "out"] @dataclass(frozen=True) -class ModuleBlueprint: +class ModuleRef: + name: str + spec: type[Spec] | type[Module] + + +@dataclass(frozen=True) +class _BlueprintAtom: module: type[Module] - connections: tuple[ModuleConnection, ...] - args: tuple[Any] + streams: tuple[StreamRef, ...] + module_refs: tuple[ModuleRef, ...] + args: tuple[Any, ...] kwargs: dict[str, Any] + @classmethod + def create( + cls, module: type[Module], args: tuple[Any, ...], kwargs: dict[str, Any] + ) -> "_BlueprintAtom": + streams: list[StreamRef] = [] + module_refs: list[ModuleRef] = [] + + # Use get_type_hints() to properly resolve string annotations. + try: + all_annotations = get_type_hints(module) + except Exception: + # Fallback to raw annotations if get_type_hints fails. + all_annotations = {} + for base_class in reversed(module.__mro__): + if hasattr(base_class, "__annotations__"): + all_annotations.update(base_class.__annotations__) + + for name, annotation in all_annotations.items(): + origin = get_origin(annotation) + # Streams + if origin in (In, Out): + direction = "in" if origin == In else "out" + type_ = get_args(annotation)[0] + streams.append( + StreamRef(name=name, type=type_, direction=direction) # type: ignore[arg-type] + ) + # linking to unknown module via Spec + elif is_spec(annotation): + module_refs.append(ModuleRef(name=name, spec=annotation)) + # linking to specific/known module directly + elif is_module_type(annotation): + module_refs.append(ModuleRef(name=name, spec=annotation)) + + return cls( + module=module, + streams=tuple(streams), + module_refs=tuple(module_refs), + args=args, + kwargs=kwargs, + ) + @dataclass(frozen=True) -class ModuleBlueprintSet: - blueprints: tuple[ModuleBlueprint, ...] - # TODO: Replace Any - transport_map: Mapping[tuple[str, type], Any] = field( +class Blueprint: + blueprints: tuple[_BlueprintAtom, ...] + transport_map: Mapping[tuple[str, type], PubSubTransport[Any]] = field( default_factory=lambda: MappingProxyType({}) ) global_config_overrides: Mapping[str, Any] = field(default_factory=lambda: MappingProxyType({})) - remapping_map: Mapping[tuple[type[Module], str], str] = field( + remapping_map: Mapping[tuple[type[Module], str], str | type[Module] | type[Spec]] = field( default_factory=lambda: MappingProxyType({}) ) requirement_checks: tuple[Callable[[], str | None], ...] = field(default_factory=tuple) - def transports(self, transports: dict[tuple[str, type], Any]) -> "ModuleBlueprintSet": - return ModuleBlueprintSet( + @classmethod + def create(cls, module: type[Module], *args: Any, **kwargs: Any) -> "Blueprint": + blueprint = _BlueprintAtom.create(module, args, kwargs) + return cls(blueprints=(blueprint,)) + + def transports(self, transports: dict[tuple[str, type], Any]) -> "Blueprint": + return Blueprint( blueprints=self.blueprints, transport_map=MappingProxyType({**self.transport_map, **transports}), global_config_overrides=self.global_config_overrides, @@ -74,8 +124,8 @@ def transports(self, transports: dict[tuple[str, type], Any]) -> "ModuleBlueprin requirement_checks=self.requirement_checks, ) - def global_config(self, **kwargs: Any) -> "ModuleBlueprintSet": - return ModuleBlueprintSet( + def global_config(self, **kwargs: Any) -> "Blueprint": + return Blueprint( blueprints=self.blueprints, transport_map=self.transport_map, global_config_overrides=MappingProxyType({**self.global_config_overrides, **kwargs}), @@ -83,12 +133,14 @@ def global_config(self, **kwargs: Any) -> "ModuleBlueprintSet": requirement_checks=self.requirement_checks, ) - def remappings(self, remappings: list[tuple[type[Module], str, str]]) -> "ModuleBlueprintSet": + def remappings( + self, remappings: list[tuple[type[Module], str, str | type[Module] | type[Spec]]] + ) -> "Blueprint": remappings_dict = dict(self.remapping_map) for module, old, new in remappings: remappings_dict[(module, old)] = new - return ModuleBlueprintSet( + return Blueprint( blueprints=self.blueprints, transport_map=self.transport_map, global_config_overrides=self.global_config_overrides, @@ -96,8 +148,8 @@ def remappings(self, remappings: list[tuple[type[Module], str, str]]) -> "Module requirement_checks=self.requirement_checks, ) - def requirements(self, *checks: Callable[[], str | None]) -> "ModuleBlueprintSet": - return ModuleBlueprintSet( + def requirements(self, *checks: Callable[[], str | None]) -> "Blueprint": + return Blueprint( blueprints=self.blueprints, transport_map=self.transport_map, global_config_overrides=self.global_config_overrides, @@ -124,14 +176,14 @@ def _check_ambiguity( f"{modules_str}. Please use a concrete class name instead." ) - def _get_transport_for(self, name: str, type: type) -> Any: - transport = self.transport_map.get((name, type), None) + def _get_transport_for(self, name: str, stream_type: type) -> PubSubTransport[Any]: + transport = self.transport_map.get((name, stream_type), None) if transport: return transport - use_pickled = getattr(type, "lcm_encode", None) is None + use_pickled = getattr(stream_type, "lcm_encode", None) is None topic = f"/{name}" if self._is_name_unique(name) else f"/{short_id()}" - transport = pLCMTransport(topic) if use_pickled else LCMTransport(topic, type) + transport = pLCMTransport(topic) if use_pickled else LCMTransport(topic, stream_type) return transport @@ -140,10 +192,11 @@ def _all_name_types(self) -> set[tuple[str, type]]: # Apply remappings to get the actual names that will be used result = set() for blueprint in self.blueprints: - for conn in blueprint.connections: - # Check if this connection should be remapped + for conn in blueprint.streams: + # Check if this stream should be remapped remapped_name = self.remapping_map.get((blueprint.module, conn.name), conn.name) - result.add((remapped_name, conn.type)) + if isinstance(remapped_name, str): + result.add((remapped_name, conn.type)) return result def _is_name_unique(self, name: str) -> bool: @@ -169,10 +222,10 @@ def _verify_no_name_conflicts(self) -> None: name_to_modules = defaultdict(list) for blueprint in self.blueprints: - for conn in blueprint.connections: - connection_name = self.remapping_map.get((blueprint.module, conn.name), conn.name) - name_to_types[connection_name].add(conn.type) - name_to_modules[connection_name].append((blueprint.module, conn.type)) + for conn in blueprint.streams: + stream_name = self.remapping_map.get((blueprint.module, conn.name), conn.name) + name_to_types[stream_name].add(conn.type) + name_to_modules[stream_name].append((blueprint.module, conn.type)) conflicts = {} for conn_name, types in name_to_types.items(): @@ -185,7 +238,7 @@ def _verify_no_name_conflicts(self) -> None: if not conflicts: return - error_lines = ["Blueprint cannot start because there are conflicting connections."] + error_lines = ["Blueprint cannot start because there are conflicting streams."] for name, modules_by_type in conflicts.items(): type_entries = [] for conn_type, modules in modules_by_type.items(): @@ -202,44 +255,126 @@ def _verify_no_name_conflicts(self) -> None: def _deploy_all_modules( self, module_coordinator: ModuleCoordinator, global_config: GlobalConfig ) -> None: + module_specs: list[tuple[type[Module], tuple[Any, ...], dict[str, Any]]] = [] for blueprint in self.blueprints: kwargs = {**blueprint.kwargs} sig = inspect.signature(blueprint.module.__init__) - if "global_config" in sig.parameters: - kwargs["global_config"] = global_config - module_coordinator.deploy(blueprint.module, *blueprint.args, **kwargs) + if "cfg" in sig.parameters: + kwargs["cfg"] = global_config + module_specs.append((blueprint.module, blueprint.args, kwargs)) + + module_coordinator.deploy_parallel(module_specs) - def _connect_transports(self, module_coordinator: ModuleCoordinator) -> None: - # Gather all the In/Out connections with remapping applied. - connections = defaultdict(list) - # Track original name -> remapped name for each module - module_conn_mapping = defaultdict(dict) # type: ignore[var-annotated] + def _connect_streams(self, module_coordinator: ModuleCoordinator) -> None: + # dict when given (final/remapped) stream name+type, provides a list of modules + original (non-remapped) stream names + streams = defaultdict(list) for blueprint in self.blueprints: - for conn in blueprint.connections: - # Check if this connection should be remapped + for conn in blueprint.streams: + # Check if this stream should be remapped remapped_name = self.remapping_map.get((blueprint.module, conn.name), conn.name) - # Store the mapping for later use - module_conn_mapping[blueprint.module][conn.name] = remapped_name - # Group by remapped name and type - connections[remapped_name, conn.type].append((blueprint.module, conn.name)) - - # Connect all In/Out connections by remapped name and type. - for remapped_name, type in connections.keys(): - transport = self._get_transport_for(remapped_name, type) - for module, original_name in connections[(remapped_name, type)]: - instance = module_coordinator.get_instance(module) + if isinstance(remapped_name, str): + # Group by remapped name and type + streams[remapped_name, conn.type].append((blueprint.module, conn.name)) + + # Connect all In/Out streams by remapped name and type. + for remapped_name, stream_type in streams.keys(): + transport = self._get_transport_for(remapped_name, stream_type) + for module, original_name in streams[(remapped_name, stream_type)]: + instance = module_coordinator.get_instance(module) # type: ignore[assignment] instance.set_transport(original_name, transport) # type: ignore[union-attr] logger.info( "Transport", name=remapped_name, original_name=original_name, topic=str(getattr(transport, "topic", None)), - type=f"{type.__module__}.{type.__qualname__}", + type=f"{stream_type.__module__}.{stream_type.__qualname__}", module=module.__name__, transport=transport.__class__.__name__, ) + def _connect_module_refs(self, module_coordinator: ModuleCoordinator) -> None: + # partly fill out the mod_and_mod_ref_to_proxy + mod_and_mod_ref_to_proxy = { + (module, name): replacement + for (module, name), replacement in self.remapping_map.items() + if is_spec(replacement) or is_module_type(replacement) + } + + # after this loop we should have an exact module for every module_ref on every blueprint + for blueprint in self.blueprints: + for each_module_ref in blueprint.module_refs: + # we've got to find a another module that implements this spec + spec = mod_and_mod_ref_to_proxy.get( + (blueprint.module, each_module_ref.name), each_module_ref.spec + ) + + # if the spec is actually module, use that (basically a user override) + if is_module_type(spec): + mod_and_mod_ref_to_proxy[blueprint.module, each_module_ref.name] = spec + continue + + # find all available candidates + possible_module_candidates = [ + each_other_blueprint.module + for each_other_blueprint in self.blueprints + if ( + each_other_blueprint != blueprint + and spec_structural_compliance(each_other_blueprint.module, spec) + ) + ] + # we keep valid separate from invalid to provide a better error message for "almost" valid cases + valid_module_candidates = [ + each_candidate + for each_candidate in possible_module_candidates + if spec_annotation_compliance(each_candidate, spec) + ] + # none + if len(possible_module_candidates) == 0: + raise Exception( + f"""The {blueprint.module.__name__} has a module reference ({each_module_ref}) which requested a module that fills out the {each_module_ref.spec.__name__} spec. But I couldn't find a module that met that spec.\n""" + ) + # exactly one structurally valid candidate + elif len(possible_module_candidates) == 1: + if len(valid_module_candidates) == 0: + logger.warning( + f"""The {blueprint.module.__name__} has a module reference ({each_module_ref}) which requested a module that fills out the {each_module_ref.spec.__name__} spec. I found a module ({possible_module_candidates[0].__name__}) that met that spec structurally, but it had a mismatch in type annotations.\nPlease either change the {each_module_ref.spec.__name__} spec or the {possible_module_candidates[0].__name__} module.\n""" + ) + mod_and_mod_ref_to_proxy[blueprint.module, each_module_ref.name] = ( + possible_module_candidates[0] + ) + continue + # more than one + elif len(valid_module_candidates) > 1: + raise Exception( + f"""The {blueprint.module.__name__} has a module reference ({each_module_ref}) which requested a module that fills out the {each_module_ref.spec.__name__} spec. But I found multiple modules that met that spec: {possible_module_candidates}.\nTo fix this use .remappings, for example:\n autoconnect(...).remappings([ ({blueprint.module.__name__}, {each_module_ref.name!r}, ) ])\n""" + ) + # structural candidates, but no valid candidates + elif len(valid_module_candidates) == 0: + possible_module_candidates_str = ", ".join( + [each_candidate.__name__ for each_candidate in possible_module_candidates] + ) + raise Exception( + f"""The {blueprint.module.__name__} has a module reference ({each_module_ref}) which requested a module that fills out the {each_module_ref.spec.__name__} spec. Some modules ({possible_module_candidates_str}) met the spec structurally but had a mismatch in type annotations\n""" + ) + # one valid candidate (and more than one structurally valid candidate) + else: + mod_and_mod_ref_to_proxy[blueprint.module, each_module_ref.name] = ( + valid_module_candidates[0] + ) + + # now that we know the streams, we mutate the RPCClient objects + for (base_module, module_ref_name), target_module in mod_and_mod_ref_to_proxy.items(): + base_module_proxy = module_coordinator.get_instance(base_module) + target_module_proxy = module_coordinator.get_instance(target_module) # type: ignore[type-var,arg-type] + setattr( + base_module_proxy, + module_ref_name, + target_module_proxy, + ) + # Ensure the remote module instance can use the module ref inside its own RPC handlers. + base_module_proxy.set_module_ref(module_ref_name, target_module_proxy) + def _connect_rpc_methods(self, module_coordinator: ModuleCoordinator) -> None: # Gather all RPC methods. rpc_methods = {} @@ -255,10 +390,13 @@ def _connect_rpc_methods(self, module_coordinator: ModuleCoordinator) -> None: for blueprint in self.blueprints: for method_name in blueprint.module.rpcs.keys(): # type: ignore[attr-defined] - method = getattr(module_coordinator.get_instance(blueprint.module), method_name) + module_proxy = module_coordinator.get_instance(blueprint.module) # type: ignore[assignment] + method_for_rpc_client = getattr(module_proxy, method_name) # Register under concrete class name (backward compatibility) - rpc_methods[f"{blueprint.module.__name__}_{method_name}"] = method - rpc_methods_dot[f"{blueprint.module.__name__}.{method_name}"] = method + rpc_methods[f"{blueprint.module.__name__}_{method_name}"] = method_for_rpc_client + rpc_methods_dot[f"{blueprint.module.__name__}.{method_name}"] = ( + method_for_rpc_client + ) # Also register under any interface names for base in blueprint.module.mro(): @@ -270,10 +408,12 @@ def _connect_rpc_methods(self, module_coordinator: ModuleCoordinator) -> None: and getattr(base, method_name, None) is not None ): interface_key = f"{base.__name__}.{method_name}" - interface_methods_dot[interface_key].append((blueprint.module, method)) + interface_methods_dot[interface_key].append( + (blueprint.module, method_for_rpc_client) + ) interface_key_underscore = f"{base.__name__}_{method_name}" interface_methods[interface_key_underscore].append( - (blueprint.module, method) + (blueprint.module, method_for_rpc_client) ) # Check for ambiguity in interface methods and add non-ambiguous ones @@ -286,7 +426,7 @@ def _connect_rpc_methods(self, module_coordinator: ModuleCoordinator) -> None: # Fulfil method requests (so modules can call each other). for blueprint in self.blueprints: - instance = module_coordinator.get_instance(blueprint.module) + instance = module_coordinator.get_instance(blueprint.module) # type: ignore[assignment] for method_name in blueprint.module.rpcs.keys(): # type: ignore[attr-defined] if not method_name.startswith("set_"): @@ -313,123 +453,32 @@ def _connect_rpc_methods(self, module_coordinator: ModuleCoordinator) -> None: requested_method_name, rpc_methods_dot[requested_method_name] ) - def _init_rerun_blueprint(self, module_coordinator: ModuleCoordinator) -> None: - """Compose and send Rerun blueprint from module contributions. - - Collects rerun_views() from all modules and composes them into a unified layout. - """ - # Collect view contributions from all modules - side_panels = [] - for blueprint in self.blueprints: - if hasattr(blueprint.module, "rerun_views"): - views = blueprint.module.rerun_views() - if views: - side_panels.extend(views) - - # Always include latency panel if we have any panels - if side_panels: - side_panels.append( - rrb.TimeSeriesView( - name="Latency (ms)", - origin="/metrics", - contents=[ - "+ /metrics/voxel_map/latency_ms", - "+ /metrics/costmap/latency_ms", - ], - ) - ) - - # Compose final layout - if side_panels: - composed_blueprint = rrb.Blueprint( - rrb.Horizontal( - rrb.Spatial3DView( - name="3D View", - origin="world", - background=[0, 0, 0], - ), - rrb.Vertical(*side_panels, row_shares=[2] + [1] * (len(side_panels) - 1)), - column_shares=[3, 1], - ), - rrb.TimePanel(state="collapsed"), - rrb.SelectionPanel(state="collapsed"), - rrb.BlueprintPanel(state="collapsed"), - ) - rr.send_blueprint(composed_blueprint) - def build( self, - global_config: GlobalConfig | None = None, cli_config_overrides: Mapping[str, Any] | None = None, ) -> ModuleCoordinator: - if global_config is None: - global_config = GlobalConfig() - global_config = global_config.model_copy(update=dict(self.global_config_overrides)) + global_config.update(**dict(self.global_config_overrides)) if cli_config_overrides: - global_config = global_config.model_copy(update=dict(cli_config_overrides)) + global_config.update(**dict(cli_config_overrides)) self._check_requirements() self._verify_no_name_conflicts() - # Initialize Rerun server before deploying modules (if backend is Rerun) - if global_config.rerun_enabled and global_config.viewer_backend.startswith("rerun"): - try: - from dimos.dashboard.rerun_init import init_rerun_server - - server_addr = init_rerun_server(viewer_mode=global_config.viewer_backend) - global_config = global_config.model_copy(update={"rerun_server_addr": server_addr}) - logger.info("Rerun server initialized", addr=server_addr) - except Exception as e: - logger.warning(f"Failed to initialize Rerun server: {e}") - - module_coordinator = ModuleCoordinator(global_config=global_config) + module_coordinator = ModuleCoordinator(cfg=global_config) module_coordinator.start() + # all module constructors are called here (each of them setup their own) self._deploy_all_modules(module_coordinator, global_config) - self._connect_transports(module_coordinator) + self._connect_streams(module_coordinator) self._connect_rpc_methods(module_coordinator) + self._connect_module_refs(module_coordinator) module_coordinator.start_all_modules() - # Compose and send Rerun blueprint from module contributions - if global_config.viewer_backend.startswith("rerun"): - self._init_rerun_blueprint(module_coordinator) - return module_coordinator -def _make_module_blueprint( - module: type[Module], args: tuple[Any], kwargs: dict[str, Any] -) -> ModuleBlueprint: - connections: list[ModuleConnection] = [] - - # Use get_type_hints() to properly resolve string annotations. - try: - all_annotations = get_type_hints(module) - except Exception: - # Fallback to raw annotations if get_type_hints fails. - all_annotations = {} - for base_class in reversed(module.__mro__): - if hasattr(base_class, "__annotations__"): - all_annotations.update(base_class.__annotations__) - - for name, annotation in all_annotations.items(): - origin = get_origin(annotation) - if origin not in (In, Out): - continue - direction = "in" if origin == In else "out" - type_ = get_args(annotation)[0] - connections.append(ModuleConnection(name=name, type=type_, direction=direction)) # type: ignore[arg-type] - - return ModuleBlueprint(module=module, connections=tuple(connections), args=args, kwargs=kwargs) - - -def create_module_blueprint(module: type[Module], *args: Any, **kwargs: Any) -> ModuleBlueprintSet: - blueprint = _make_module_blueprint(module, args, kwargs) - return ModuleBlueprintSet(blueprints=(blueprint,)) - - -def autoconnect(*blueprints: ModuleBlueprintSet) -> ModuleBlueprintSet: +def autoconnect(*blueprints: Blueprint) -> Blueprint: all_blueprints = tuple(_eliminate_duplicates([bp for bs in blueprints for bp in bs.blueprints])) all_transports = dict( # type: ignore[var-annotated] reduce(operator.iadd, [list(x.transport_map.items()) for x in blueprints], []) @@ -442,7 +491,7 @@ def autoconnect(*blueprints: ModuleBlueprintSet) -> ModuleBlueprintSet: ) all_requirement_checks = tuple(check for bs in blueprints for check in bs.requirement_checks) - return ModuleBlueprintSet( + return Blueprint( blueprints=all_blueprints, transport_map=MappingProxyType(all_transports), global_config_overrides=MappingProxyType(all_config_overrides), @@ -451,7 +500,7 @@ def autoconnect(*blueprints: ModuleBlueprintSet) -> ModuleBlueprintSet: ) -def _eliminate_duplicates(blueprints: list[ModuleBlueprint]) -> list[ModuleBlueprint]: +def _eliminate_duplicates(blueprints: list[_BlueprintAtom]) -> list[_BlueprintAtom]: # The duplicates are eliminated in reverse so that newer blueprints override older ones. seen = set() unique_blueprints = [] diff --git a/dimos/core/core.py b/dimos/core/core.py index e7a7d09f58..6c95700926 100644 --- a/dimos/core/core.py +++ b/dimos/core/core.py @@ -17,7 +17,6 @@ from typing import ( TYPE_CHECKING, - Any, TypeVar, ) @@ -30,7 +29,12 @@ register_picklers() T = TypeVar("T") +from typing import ParamSpec, TypeVar -def rpc(fn: Callable[..., Any]) -> Callable[..., Any]: +P = ParamSpec("P") +R = TypeVar("R") + + +def rpc(fn: Callable[P, R]) -> Callable[P, R]: fn.__rpc__ = True # type: ignore[attr-defined] return fn diff --git a/dimos/core/docker_build.py b/dimos/core/docker_build.py new file mode 100644 index 0000000000..7ee90fc5c3 --- /dev/null +++ b/dimos/core/docker_build.py @@ -0,0 +1,120 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Docker image building and Dockerfile conversion utilities. +Converts any Dockerfile into a DimOS module container by appending a footer +that installs DimOS and creates the module entrypoint. +""" + +from __future__ import annotations + +import subprocess +from typing import TYPE_CHECKING + +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from pathlib import Path + + from dimos.core.docker_runner import DockerModuleConfig + +logger = setup_logger() + +# Timeout for quick Docker commands +DOCKER_CMD_TIMEOUT = 20 + +# Sentinel value to detect already-converted Dockerfiles (UUID ensures uniqueness) +DIMOS_SENTINEL = "DIMOS-MODULE-CONVERSION-427593ae-c6e8-4cf1-9b2d-ee81a420a5dc" + +# Footer appended to Dockerfiles for DimOS module conversion +DIMOS_FOOTER = f""" +# ==== {DIMOS_SENTINEL} ==== +# Copy DimOS source from build context +COPY dimos /dimos/source/dimos/ +COPY pyproject.toml /dimos/source/ +COPY docker/python/module-install.sh /tmp/module-install.sh + +# Install DimOS and create entrypoint +RUN bash /tmp/module-install.sh /dimos/source && rm /tmp/module-install.sh + +ENTRYPOINT ["/dimos/entrypoint.sh"] +""" + + +def _run(cmd: list[str], *, timeout: float | None = None) -> subprocess.CompletedProcess[str]: + """Run a command and return the result.""" + return subprocess.run(cmd, capture_output=True, text=True, timeout=timeout, check=False) + + +def _run_streaming(cmd: list[str]) -> int: + """Run command and stream output to terminal. Returns exit code.""" + result = subprocess.run(cmd, text=True) + return result.returncode + + +def _docker_bin(cfg: DockerModuleConfig) -> str: + """Get docker binary path.""" + return cfg.docker_bin or "docker" + + +def _image_exists(docker_bin: str, image_name: str) -> bool: + """Check if a Docker image exists locally.""" + r = _run([docker_bin, "image", "inspect", image_name], timeout=DOCKER_CMD_TIMEOUT) + return r.returncode == 0 + + +def _convert_dockerfile(dockerfile: Path) -> Path: + """Append DimOS footer to Dockerfile. Returns path to converted file.""" + content = dockerfile.read_text() + + # Already converted? + if DIMOS_SENTINEL in content: + return dockerfile + + logger.info(f"Converting {dockerfile.name} to DimOS format") + + converted = dockerfile.parent / f".{dockerfile.name}.dimos" + converted.write_text(content.rstrip() + "\n" + DIMOS_FOOTER.lstrip("\n")) + return converted + + +def build_image(cfg: DockerModuleConfig) -> None: + """Build Docker image using footer mode conversion.""" + if cfg.docker_file is None: + raise ValueError("docker_file is required for building Docker images") + dockerfile = _convert_dockerfile(cfg.docker_file) + + context = cfg.docker_build_context or cfg.docker_file.parent + cmd = [_docker_bin(cfg), "build", "-t", cfg.docker_image, "-f", str(dockerfile)] + for k, v in cfg.docker_build_args.items(): + cmd.extend(["--build-arg", f"{k}={v}"]) + cmd.append(str(context)) + + logger.info(f"Building Docker image: {cfg.docker_image}") + exit_code = _run_streaming(cmd) + if exit_code != 0: + raise RuntimeError(f"Docker build failed with exit code {exit_code}") + + +def image_exists(cfg: DockerModuleConfig) -> bool: + """Check if the configured Docker image exists locally.""" + return _image_exists(_docker_bin(cfg), cfg.docker_image) + + +__all__ = [ + "DIMOS_FOOTER", + "build_image", + "image_exists", +] diff --git a/dimos/core/docker_runner.py b/dimos/core/docker_runner.py new file mode 100644 index 0000000000..9be2ff6012 --- /dev/null +++ b/dimos/core/docker_runner.py @@ -0,0 +1,521 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import argparse +from contextlib import suppress +from dataclasses import dataclass, field +import importlib +import json +import os +import signal +import subprocess +import threading +import time +from typing import TYPE_CHECKING, Any + +from dimos.core.docker_build import build_image, image_exists +from dimos.core.module import Module, ModuleConfig +from dimos.core.rpc_client import RpcCall +from dimos.protocol.rpc import LCMRPC +from dimos.utils.logging_config import setup_logger +from dimos.visualization.rerun.bridge import RERUN_GRPC_PORT, RERUN_WEB_PORT + +if TYPE_CHECKING: + from collections.abc import Callable + from pathlib import Path + +logger = setup_logger() + +DOCKER_RUN_TIMEOUT = 120 # Timeout for `docker run` command execution +DOCKER_CMD_TIMEOUT = 20 # Timeout for quick Docker commands (inspect, rm, logs) +DOCKER_STATUS_TIMEOUT = 10 # Timeout for container status checks +DOCKER_STOP_TIMEOUT = 30 # Timeout for `docker stop` command (graceful shutdown) +RPC_READY_TIMEOUT = 3.0 # Timeout for RPC readiness probe during container startup +LOG_TAIL_LINES = 200 # Number of log lines to include in error messages + + +@dataclass(kw_only=True) +class DockerModuleConfig(ModuleConfig): + """ + Configuration for running a DimOS module inside Docker. + + For advanced Docker options not listed here, use docker_extra_args. + Example: docker_extra_args=["--cap-add=SYS_ADMIN", "--read-only"] + """ + + # Build / image + docker_image: str + docker_file: Path | None = None # Required on host for building, not needed in container + docker_build_context: Path | None = None + docker_build_args: dict[str, str] = field(default_factory=dict) + + # Identity + docker_container_name: str | None = None + docker_labels: dict[str, str] = field(default_factory=dict) + + # Networking (host mode recommended for LCM multicast) + docker_network_mode: str = "host" + docker_network: str | None = None + docker_ports: list[tuple[int, int, str]] = field( + default_factory=list + ) # (host, container, proto) + + # Runtime resources + docker_gpus: str | None = "all" + docker_shm_size: str = "2g" + docker_restart_policy: str = "on-failure:3" + + # Env + volumes + devices + docker_env_files: list[str] = field(default_factory=list) + docker_env: dict[str, str] = field(default_factory=dict) + docker_volumes: list[tuple[str, str, str]] = field( + default_factory=list + ) # (host, container, mode) + docker_devices: list[str] = field(default_factory=list) # --device args as strings + + # Security + docker_privileged: bool = False + + # Lifecycle / overrides + docker_rm: bool = False + docker_entrypoint: str | None = None + docker_command: list[str] | None = None + docker_extra_args: list[str] = field(default_factory=list) + + # Startup readiness + docker_startup_timeout: float = 120.0 + docker_poll_interval: float = 1.0 + + # Advanced + docker_bin: str = "docker" + + +def is_docker_module(module_class: type) -> bool: + """Check if a module class should run in Docker based on its default_config.""" + default_config = getattr(module_class, "default_config", None) + return default_config is not None and issubclass(default_config, DockerModuleConfig) + + +# Docker helpers + + +def _run(cmd: list[str], *, timeout: float | None = None) -> subprocess.CompletedProcess[str]: + logger.debug(f"exec: {' '.join(cmd)}") + return subprocess.run(cmd, capture_output=True, text=True, timeout=timeout, check=False) + + +def _docker_bin(cfg: DockerModuleConfig) -> str: + """Get docker binary path, defaulting to 'docker' if empty/None.""" + return cfg.docker_bin or "docker" + + +def _remove_container(cfg: DockerModuleConfig, name: str) -> None: + _run([_docker_bin(cfg), "rm", "-f", name], timeout=DOCKER_CMD_TIMEOUT) + + +def _is_container_running(cfg: DockerModuleConfig, name: str) -> bool: + r = _run( + [_docker_bin(cfg), "inspect", "-f", "{{.State.Running}}", name], + timeout=DOCKER_STATUS_TIMEOUT, + ) + return r.returncode == 0 and r.stdout.strip() == "true" + + +def _tail_logs(cfg: DockerModuleConfig, name: str, n: int = LOG_TAIL_LINES) -> str: + r = _run([_docker_bin(cfg), "logs", "--tail", str(n), name], timeout=DOCKER_CMD_TIMEOUT) + out = (r.stdout or "").rstrip() + err = (r.stderr or "").rstrip() + return out + ("\n" + err if err else "") + + +def _extract_module_config(cfg: DockerModuleConfig) -> dict[str, Any]: + """Extract JSON-serializable config fields for the container (excludes docker_* fields).""" + out: dict[str, Any] = {} + for k, v in cfg.__dict__.items(): + if k.startswith("docker_") or isinstance(v, type) or callable(v): + continue + try: + json.dumps(v) + out[k] = v + except (TypeError, ValueError): + logger.debug(f"Config field '{k}' not JSON-serializable, skipping") + return out + + +# Host-side Docker-backed Module handle + + +class DockerModule: + """ + Host-side handle for a module running inside Docker. + + Lifecycle: + - start(): launches container, waits for module ready via RPC + - stop(): stops container + - __getattr__: exposes RpcCall for @rpc methods on remote module + + Communication: All RPC happens via LCM multicast (requires --network=host). + """ + + def __init__(self, module_class: type[Module], *args: Any, **kwargs: Any) -> None: + # Config + config_class = getattr(module_class, "default_config", DockerModuleConfig) + config = config_class(**kwargs) + + # Module info + self._module_class = module_class + self._config = config + self._args = args + self._kwargs = kwargs + self._running = False + self.remote_name = module_class.__name__ + self._container_name = ( + config.docker_container_name + or f"dimos_{module_class.__name__.lower()}_{os.getpid()}_{int(time.time())}" + ) + + # RPC setup + self.rpc = LCMRPC() + self.rpcs = set(module_class.rpcs.keys()) # type: ignore[attr-defined] + self.rpc_calls: list[str] = getattr(module_class, "rpc_calls", []) + self._unsub_fns: list[Callable[[], None]] = [] + self._bound_rpc_calls: dict[str, RpcCall] = {} + + # Build image if needed (but don't start - caller must call start() explicitly) + if not image_exists(config): + logger.info(f"Building {config.docker_image}") + build_image(config) + + def set_rpc_method(self, method: str, callable: RpcCall) -> None: + callable.set_rpc(self.rpc) + self._bound_rpc_calls[method] = callable + + def get_rpc_calls(self, *methods: str) -> RpcCall | tuple[RpcCall, ...]: + # Check all requested methods exist + missing = set(methods) - self._bound_rpc_calls.keys() + if missing: + raise ValueError(f"RPC methods not found: {missing}") + # Return single RpcCall or tuple + calls = tuple(self._bound_rpc_calls[m] for m in methods) + return calls[0] if len(calls) == 1 else calls + + def start(self) -> None: + if self._running: + return + + cfg = self._config + + # Prevent accidental kill of running container with same name + if _is_container_running(cfg, self._container_name): + raise RuntimeError( + f"Container '{self._container_name}' already running. " + "Choose a different container_name or stop the existing container." + ) + _remove_container(cfg, self._container_name) + + cmd = self._build_docker_run_command() + logger.info(f"Starting docker container: {self._container_name}") + r = _run(cmd, timeout=DOCKER_RUN_TIMEOUT) + if r.returncode != 0: + raise RuntimeError( + f"Failed to start container.\nSTDOUT:\n{r.stdout}\nSTDERR:\n{r.stderr}" + ) + + self.rpc.start() + self._running = True + self._wait_for_ready() + + def stop(self) -> None: + """Gracefully stop the Docker container and clean up resources.""" + # Signal remote module, stop RPC, unsubscribe handlers (ignore failures) + with suppress(Exception): + if self._running: + self.rpc.call_nowait(f"{self.remote_name}/stop", ([], {})) + with suppress(Exception): + self.rpc.stop() + for unsub in self._unsub_fns: + with suppress(Exception): + unsub() + self._unsub_fns.clear() + + # Stop and remove container + _run([_docker_bin(self._config), "stop", self._container_name], timeout=DOCKER_STOP_TIMEOUT) + _remove_container(self._config, self._container_name) + self._running = False + logger.info(f"Stopped container: {self._container_name}") + + def status(self) -> dict[str, Any]: + cfg = self._config + return { + "module": self.remote_name, + "container_name": self._container_name, + "image": cfg.docker_image, + "running": bool(self._running and _is_container_running(cfg, self._container_name)), + } + + def tail_logs(self, n: int = 200) -> str: + return _tail_logs(self._config, self._container_name, n=n) + + def set_transport(self, stream_name: str, transport: Any) -> bool: + """Configure stream transport in container. Mirrors DaskModule.set_transport() for autoconnect().""" + topic = getattr(transport, "topic", None) + if topic is None: + return False + if hasattr(topic, "topic"): + topic = topic.topic + result, _ = self.rpc.call_sync( + f"{self.remote_name}/configure_stream", ([stream_name, str(topic)], {}) + ) + return bool(result) + + def __getattr__(self, name: str) -> Any: + if name in self.rpcs: + original_method = getattr(self._module_class, name, None) + return RpcCall(original_method, self.rpc, name, self.remote_name, self._unsub_fns, None) + raise AttributeError(f"{name} not found on {self._module_class.__name__}") + + # Docker command building (split into focused helpers for readability) + + def _build_docker_run_command(self) -> list[str]: + """Build the complete `docker run` command.""" + cfg = self._config + self._validate_config(cfg) + + cmd = [_docker_bin(cfg), "run", "-d"] + self._add_lifecycle_args(cmd, cfg) + self._add_network_args(cmd, cfg) + self._add_port_args(cmd, cfg) + self._add_resource_args(cmd, cfg) + self._add_security_args(cmd, cfg) + self._add_device_args(cmd, cfg) + self._add_label_args(cmd, cfg) + self._add_env_args(cmd, cfg) + self._add_volume_args(cmd, cfg) + self._add_entrypoint_args(cmd, cfg) + cmd.extend(cfg.docker_extra_args) + + cmd.append(cfg.docker_image) + cmd.extend(self._build_container_command(cfg)) + return cmd + + def _validate_config(self, cfg: DockerModuleConfig) -> None: + """Validate config before building command.""" + # Warn about network mode - LCM multicast requires host network + using_host_network = cfg.docker_network is None and cfg.docker_network_mode == "host" + if not using_host_network: + logger.warning( + "DockerModule not using host network. LCM multicast requires --network=host. " + "RPC communication may not work with bridge/custom networks." + ) + + def _add_lifecycle_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add --rm and --name args.""" + if cfg.docker_rm: + cmd.append("--rm") + if cfg.docker_restart_policy and cfg.docker_restart_policy != "no": + logger.warning( + "--rm with docker_restart_policy is unusual; consider docker_restart_policy='no'." + ) + cmd.extend(["--name", self._container_name]) + + def _add_network_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add --network args.""" + if cfg.docker_network and cfg.docker_network_mode != "host": + logger.warning( + "Both 'docker_network' and 'docker_network_mode' set; using 'docker_network' and ignoring 'docker_network_mode'." + ) + if cfg.docker_network: + cmd.extend(["--network", cfg.docker_network]) + else: + cmd.append(f"--network={cfg.docker_network_mode}") + + def _add_port_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add -p port args. No-op for host network (ports auto-exposed).""" + if cfg.docker_network is None and cfg.docker_network_mode == "host": + return + # Non-host network: map Rerun ports + any custom ports + for port in (RERUN_GRPC_PORT, RERUN_WEB_PORT): + cmd.extend(["-p", f"{port}:{port}/tcp"]) + for host_port, container_port, proto in cfg.docker_ports: + cmd.extend(["-p", f"{host_port}:{container_port}/{proto or 'tcp'}"]) + + def _add_resource_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add --shm-size, --restart, --gpus args.""" + cmd.append(f"--shm-size={cfg.docker_shm_size}") + if cfg.docker_restart_policy: + cmd.append(f"--restart={cfg.docker_restart_policy}") + if cfg.docker_gpus: + cmd.extend(["--gpus", cfg.docker_gpus]) + + def _add_security_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add --privileged if enabled.""" + if cfg.docker_privileged: + cmd.append("--privileged") + + def _add_device_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add --device args.""" + for dev in cfg.docker_devices: + cmd.extend(["--device", dev]) + + def _add_label_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add --label args with DimOS defaults.""" + labels = dict(cfg.docker_labels) + labels.setdefault("dimos.kind", "module") + labels.setdefault("dimos.module", self._module_class.__name__) + for k, v in labels.items(): + cmd.extend(["--label", f"{k}={v}"]) + + def _add_env_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add -e and --env-file args.""" + cmd.extend(["-e", "PYTHONUNBUFFERED=1"]) + for env_file in cfg.docker_env_files: + cmd.extend(["--env-file", env_file]) + for k, v in cfg.docker_env.items(): + cmd.extend(["-e", f"{k}={v}"]) + + def _add_volume_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add -v volume args.""" + for host_path, container_path, mode in cfg.docker_volumes: + cmd.extend(["-v", f"{host_path}:{container_path}:{mode}"]) + + def _add_entrypoint_args(self, cmd: list[str], cfg: DockerModuleConfig) -> None: + """Add --entrypoint override.""" + if cfg.docker_entrypoint: + cmd.extend(["--entrypoint", cfg.docker_entrypoint]) + + def _build_container_command(self, cfg: DockerModuleConfig) -> list[str]: + """Build the container command (module runner or custom).""" + if cfg.docker_command: + return list(cfg.docker_command) + + module_path = f"{self._module_class.__module__}.{self._module_class.__name__}" + # Filter out docker-specific kwargs (paths, etc.) - only pass module config + kwargs = {"config": _extract_module_config(cfg)} + payload = {"module_path": module_path, "args": list(self._args), "kwargs": kwargs} + # DimOS base image entrypoint already runs "dimos.core.docker_runner run" + return ["--payload", json.dumps(payload, separators=(",", ":"))] + + def _wait_for_ready(self) -> None: + """Poll the module's RPC endpoint until ready, crashed, or timeout.""" + cfg = self._config + start_time = time.time() + + logger.info(f"Waiting for {self.remote_name} to be ready...") + + while (time.time() - start_time) < cfg.docker_startup_timeout: + if not _is_container_running(cfg, self._container_name): + logs = _tail_logs(cfg, self._container_name) + raise RuntimeError(f"Container died during startup:\n{logs}") + + try: + self.rpc.call_sync( + f"{self.remote_name}/start", ([], {}), rpc_timeout=RPC_READY_TIMEOUT + ) + elapsed = time.time() - start_time + logger.info(f"{self.remote_name} ready ({elapsed:.1f}s)") + return + except (TimeoutError, ConnectionError, OSError): + # Module not ready yet - retry after poll interval + time.sleep(cfg.docker_poll_interval) + + logs = _tail_logs(cfg, self._container_name) + raise RuntimeError( + f"Timeout waiting for {self.remote_name} after {cfg.docker_startup_timeout:.1f}s:\n{logs}" + ) + + +# Container-side runner + + +class StandaloneModuleRunner: + """Runs a module inside Docker container. Blocks until SIGTERM/SIGINT.""" + + def __init__(self, module_path: str, args: list[Any], kwargs: dict[str, Any]) -> None: + self._module_path = module_path + self._args = args + self._module: Module | None = None + self._shutdown = threading.Event() + + # Merge config fields into kwargs (Configurable creates config from these) + if "config" in kwargs: + config_dict = kwargs.pop("config") + kwargs = {**config_dict, **kwargs} + self._kwargs = kwargs + + def start(self) -> None: + mod_path, class_name = self._module_path.rsplit(".", 1) + mod = importlib.import_module(mod_path) + module_class = getattr(mod, class_name) + + self._module = module_class(*self._args, **self._kwargs) + logger.info(f"[docker runner] module constructed: {class_name}") + + def stop(self) -> None: + self._shutdown.set() + if self._module is not None: + try: + self._module.stop() + except Exception as e: + logger.error(f"[docker runner] error stopping module: {e}") + + def wait(self) -> None: + self._shutdown.wait() + + +def _install_signal_handlers(runner: StandaloneModuleRunner) -> None: + def shutdown(_sig: int, _frame: Any) -> None: + runner.stop() + + signal.signal(signal.SIGTERM, shutdown) + signal.signal(signal.SIGINT, shutdown) + + +def _cli_run(payload_json: str) -> None: + payload = json.loads(payload_json) + runner = StandaloneModuleRunner( + payload["module_path"], + payload.get("args", []), + payload.get("kwargs", {}), + ) + _install_signal_handlers(runner) + runner.start() + runner.wait() + + +def main(argv: list[str] | None = None) -> None: + parser = argparse.ArgumentParser(prog="dimos.core.docker_runner") + sub = parser.add_subparsers(dest="cmd", required=True) + + runp = sub.add_parser("run", help="Run a module inside a container") + runp.add_argument("--payload", required=True, help="JSON payload with module_path and config") + + args = parser.parse_args(argv) + + if args.cmd == "run": + _cli_run(args.payload) + return + + raise ValueError(f"Unknown cmd: {args.cmd}") + + +if __name__ == "__main__": + main() + + +__all__ = [ + "DockerModule", + "DockerModuleConfig", + "is_docker_module", +] diff --git a/dimos/core/global_config.py b/dimos/core/global_config.py index 205c38c361..080c2c8bbc 100644 --- a/dimos/core/global_config.py +++ b/dimos/core/global_config.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from functools import cached_property import re from typing import Literal, TypeAlias @@ -20,7 +19,7 @@ from dimos.mapping.occupancy.path_map import NavigationStrategy -ViewerBackend: TypeAlias = Literal["rerun-web", "rerun-native", "foxglove"] +ViewerBackend: TypeAlias = Literal["rerun", "rerun-web", "foxglove", "none"] def _get_all_numbers(s: str) -> list[float]: @@ -31,8 +30,6 @@ class GlobalConfig(BaseSettings): robot_ip: str | None = None simulation: bool = False replay: bool = False - rerun_enabled: bool = True - rerun_server_addr: str | None = None viewer_backend: ViewerBackend = "rerun-web" n_dask_workers: int = 2 memory_limit: str = "auto" @@ -48,15 +45,22 @@ class GlobalConfig(BaseSettings): robot_rotation_diameter: float = 0.6 planner_strategy: NavigationStrategy = "simple" planner_robot_speed: float | None = None + dask: bool = True model_config = SettingsConfigDict( env_file=".env", env_file_encoding="utf-8", extra="ignore", - frozen=True, ) - @cached_property + def update(self, **kwargs: object) -> None: + """Update config fields in place.""" + for key, value in kwargs.items(): + if not hasattr(self, key): + raise AttributeError(f"GlobalConfig has no field '{key}'") + setattr(self, key, value) + + @property def unitree_connection_type(self) -> str: if self.replay: return "replay" @@ -64,13 +68,16 @@ def unitree_connection_type(self) -> str: return "mujoco" return "webrtc" - @cached_property + @property def mujoco_start_pos_float(self) -> tuple[float, float]: x, y = _get_all_numbers(self.mujoco_start_pos) return (x, y) - @cached_property + @property def mujoco_camera_position_float(self) -> tuple[float, ...]: if self.mujoco_camera_position is None: return (-0.906, 0.008, 1.101, 4.931, 89.749, -46.378) return tuple(_get_all_numbers(self.mujoco_camera_position)) + + +global_config = GlobalConfig() diff --git a/dimos/core/introspection/blueprint/dot.py b/dimos/core/introspection/blueprint/dot.py index 4c27c6282d..c60ad06fc8 100644 --- a/dimos/core/introspection/blueprint/dot.py +++ b/dimos/core/introspection/blueprint/dot.py @@ -24,7 +24,7 @@ from collections import defaultdict from enum import Enum, auto -from dimos.core.blueprints import ModuleBlueprintSet +from dimos.core.blueprints import Blueprint from dimos.core.introspection.utils import ( GROUP_COLORS, TYPE_COLORS, @@ -54,13 +54,13 @@ class LayoutAlgo(Enum): def render( - blueprint_set: ModuleBlueprintSet, + blueprint_set: Blueprint, *, layout: set[LayoutAlgo] | None = None, - ignored_connections: set[tuple[str, str]] | None = None, + ignored_streams: set[tuple[str, str]] | None = None, ignored_modules: set[str] | None = None, ) -> str: - """Generate a hub-style DOT graph from a ModuleBlueprintSet. + """Generate a hub-style DOT graph from a Blueprint. This creates intermediate "type nodes" that represent data channels, connecting producers to consumers through a central hub node. @@ -68,7 +68,7 @@ def render( Args: blueprint_set: The blueprint set to visualize. layout: Set of layout algorithms to apply. Default is none (let graphviz decide). - ignored_connections: Set of (name, type_name) tuples to ignore. + ignored_streams: Set of (name, type_name) tuples to ignore. ignored_modules: Set of module names to ignore. Returns: @@ -77,8 +77,8 @@ def render( """ if layout is None: layout = set() - if ignored_connections is None: - ignored_connections = DEFAULT_IGNORED_CONNECTIONS + if ignored_streams is None: + ignored_streams = DEFAULT_IGNORED_CONNECTIONS if ignored_modules is None: ignored_modules = DEFAULT_IGNORED_MODULES @@ -91,14 +91,14 @@ def render( for bp in blueprint_set.blueprints: module_classes[bp.module.__name__] = bp.module - for conn in bp.connections: + for conn in bp.streams: # Apply remapping remapped_name = blueprint_set.remapping_map.get((bp.module, conn.name), conn.name) key = (remapped_name, conn.type) if conn.direction == "out": - producers[key].append(bp.module) + producers[key].append(bp.module) # type: ignore[index] else: - consumers[key].append(bp.module) + consumers[key].append(bp.module) # type: ignore[index] # Find all active channels (have both producers AND consumers) active_channels: dict[tuple[str, type], str] = {} # key -> color @@ -107,7 +107,7 @@ def render( type_name = type_.__name__ if key not in consumers: continue - if (name, type_name) in ignored_connections: + if (name, type_name) in ignored_streams: continue # Check if all modules are ignored valid_producers = [m for m in producers[key] if m.__name__ not in ignored_modules] @@ -224,12 +224,12 @@ def get_group(mod_class: type[Module]) -> str: def render_svg( - blueprint_set: ModuleBlueprintSet, + blueprint_set: Blueprint, output_path: str, *, layout: set[LayoutAlgo] | None = None, ) -> None: - """Generate an SVG file from a ModuleBlueprintSet using graphviz. + """Generate an SVG file from a Blueprint using graphviz. Args: blueprint_set: The blueprint set to visualize. diff --git a/dimos/core/introspection/svg.py b/dimos/core/introspection/svg.py index cdf87cc093..57b88834e0 100644 --- a/dimos/core/introspection/svg.py +++ b/dimos/core/introspection/svg.py @@ -19,13 +19,13 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from dimos.core.blueprints import ModuleBlueprintSet + from dimos.core.blueprints import Blueprint from dimos.core.introspection.blueprint.dot import LayoutAlgo from dimos.core.introspection.module.info import ModuleInfo def to_svg( - target: ModuleInfo | ModuleBlueprintSet, + target: ModuleInfo | Blueprint, output_path: str, *, layout: set[LayoutAlgo] | None = None, @@ -34,24 +34,24 @@ def to_svg( Dispatches to the appropriate renderer based on input type: - ModuleInfo -> module/dot.render_svg - - ModuleBlueprintSet -> blueprint/dot.render_svg + - Blueprint -> blueprint/dot.render_svg Args: - target: Either a ModuleInfo (single module) or ModuleBlueprintSet (blueprint graph). + target: Either a ModuleInfo (single module) or Blueprint (blueprint graph). output_path: Path to write the SVG file. layout: Layout algorithms (only used for blueprints). """ # Avoid circular imports by importing here - from dimos.core.blueprints import ModuleBlueprintSet + from dimos.core.blueprints import Blueprint from dimos.core.introspection.module.info import ModuleInfo if isinstance(target, ModuleInfo): from dimos.core.introspection.module import dot as module_dot module_dot.render_svg(target, output_path) - elif isinstance(target, ModuleBlueprintSet): + elif isinstance(target, Blueprint): from dimos.core.introspection.blueprint import dot as blueprint_dot blueprint_dot.render_svg(target, output_path, layout=layout) else: - raise TypeError(f"Expected ModuleInfo or ModuleBlueprintSet, got {type(target).__name__}") + raise TypeError(f"Expected ModuleInfo or Blueprint, got {type(target).__name__}") diff --git a/dimos/core/module.py b/dimos/core/module.py index 08e428d3c7..d6089a8f0a 100644 --- a/dimos/core/module.py +++ b/dimos/core/module.py @@ -11,10 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import asyncio -from collections.abc import Callable from dataclasses import dataclass from functools import partial +import inspect +import json import sys import threading from typing import ( @@ -26,26 +29,39 @@ overload, ) +from typing_extensions import TypeVar as TypeVarExtension + if TYPE_CHECKING: + from collections.abc import Callable + from dimos.core.introspection.module import ModuleInfo + from dimos.core.rpc_client import RPCClient + +from typing import TypeVar from dask.distributed import Actor, get_worker +from langchain_core.tools import tool from reactivex.disposable import CompositeDisposable -from typing_extensions import TypeVar from dimos.core import colors from dimos.core.core import T, rpc from dimos.core.introspection.module import extract_module_info, render_module_io from dimos.core.resource import Resource -from dimos.core.rpc_client import RpcCall +from dimos.core.rpc_client import RpcCall # noqa: TC001 from dimos.core.stream import In, Out, RemoteIn, RemoteOut, Transport from dimos.protocol.rpc import LCMRPC, RPCSpec from dimos.protocol.service import Configurable # type: ignore[attr-defined] -from dimos.protocol.skill.skill import SkillContainer from dimos.protocol.tf import LCMTF, TFSpec from dimos.utils.generic import classproperty +@dataclass(frozen=True) +class SkillInfo: + class_name: str + func_name: str + args_schema: str + + def get_loop() -> tuple[asyncio.AbstractEventLoop, threading.Thread | None]: # we are actually instantiating a new loop here # to not interfere with an existing dask loop @@ -82,10 +98,10 @@ class ModuleConfig: frame_id: str | None = None -ModuleConfigT = TypeVar("ModuleConfigT", bound=ModuleConfig, default=ModuleConfig) +ModuleConfigT = TypeVarExtension("ModuleConfigT", bound=ModuleConfig, default=ModuleConfig) -class ModuleBase(Configurable[ModuleConfigT], SkillContainer, Resource): +class ModuleBase(Configurable[ModuleConfigT], Resource): _rpc: RPCSpec | None = None _tf: TFSpec | None = None _loop: asyncio.AbstractEventLoop | None = None @@ -126,16 +142,23 @@ def start(self) -> None: @rpc def stop(self) -> None: self._close_module() - super().stop() def _close_module(self) -> None: self._close_rpc() - if hasattr(self, "_loop") and self._loop_thread: - if self._loop_thread.is_alive(): - self._loop.call_soon_threadsafe(self._loop.stop) # type: ignore[union-attr] - self._loop_thread.join(timeout=2) + + # Save into local variables to avoid race when stopping concurrently + # (from RPC and worker shutdown) + loop_thread = getattr(self, "_loop_thread", None) + loop = getattr(self, "_loop", None) + + if loop_thread: + if loop_thread.is_alive(): + if loop: + loop.call_soon_threadsafe(loop.stop) + loop_thread.join(timeout=2) self._loop = None self._loop_thread = None + if hasattr(self, "_tf") and self._tf is not None: self._tf.stop() self._tf = None @@ -143,8 +166,7 @@ def _close_module(self) -> None: self._disposables.dispose() def _close_rpc(self) -> None: - # Using hasattr is needed because SkillCoordinator skips ModuleBase.__init__ and self.rpc is never set. - if hasattr(self, "rpc") and self.rpc: + if self.rpc: self.rpc.stop() # type: ignore[attr-defined] self.rpc = None # type: ignore[assignment] @@ -264,7 +286,7 @@ class _io_descriptor: """Descriptor that makes io() work on both class and instance.""" def __get__( - self, obj: "ModuleBase | None", objtype: type["ModuleBase"] + self, obj: ModuleBase | None, objtype: type[ModuleBase] ) -> Callable[[bool], str]: if obj is None: return objtype._io_class @@ -273,7 +295,7 @@ def __get__( io = _io_descriptor() @classmethod - def _module_info_class(cls) -> "ModuleInfo": + def _module_info_class(cls) -> ModuleInfo: """Class-level module_info() - returns ModuleInfo from annotations.""" hints = get_type_hints(cls) @@ -309,8 +331,8 @@ class _module_info_descriptor: """Descriptor that makes module_info() work on both class and instance.""" def __get__( - self, obj: "ModuleBase | None", objtype: type["ModuleBase"] - ) -> Callable[[], "ModuleInfo"]: + self, obj: ModuleBase | None, objtype: type[ModuleBase] + ) -> Callable[[], ModuleInfo]: if obj is None: return objtype._module_info_class # For instances, extract from actual streams @@ -326,9 +348,9 @@ def __get__( @classproperty def blueprint(self): # type: ignore[no-untyped-def] # Here to prevent circular imports. - from dimos.core.blueprints import create_module_blueprint + from dimos.core.blueprints import Blueprint - return partial(create_module_blueprint, self) # type: ignore[arg-type] + return partial(Blueprint.create, self) # type: ignore[arg-type] @rpc def get_rpc_method_names(self) -> list[str]: @@ -339,6 +361,10 @@ def set_rpc_method(self, method: str, callable: RpcCall) -> None: callable.set_rpc(self.rpc) # type: ignore[arg-type] self._bound_rpc_calls[method] = callable + @rpc + def set_module_ref(self, name: str, module_ref: RPCClient) -> None: + setattr(self, name, module_ref) + @overload def get_rpc_calls(self, method: str) -> RpcCall: ... @@ -354,8 +380,22 @@ def get_rpc_calls(self, *methods: str) -> RpcCall | tuple[RpcCall, ...]: # type result = tuple(self._bound_rpc_calls[m] for m in methods) return result[0] if len(result) == 1 else result - -class DaskModule(ModuleBase[ModuleConfigT]): + @rpc + def get_skills(self) -> list[SkillInfo]: + skills: list[SkillInfo] = [] + for name in dir(self): + attr = getattr(self, name) + if callable(attr) and hasattr(attr, "__skill__"): + schema = json.dumps(tool(attr).args_schema.model_json_schema()) + skills.append( + SkillInfo( + class_name=self.__class__.__name__, func_name=name, args_schema=schema + ) + ) + return skills + + +class Module(ModuleBase[ModuleConfigT]): ref: Actor worker: int @@ -438,6 +478,17 @@ def set_transport(self, stream_name: str, transport: Transport) -> bool: # type stream._transport = transport return True + @rpc + def configure_stream(self, stream_name: str, topic: str) -> bool: + """Configure a stream's transport by topic. Called by DockerModule for stream wiring.""" + from dimos.core.transport import pLCMTransport + + stream = getattr(self, stream_name, None) + if not isinstance(stream, (Out, In)): + return False + stream._transport = pLCMTransport(topic) + return True + # called from remote def connect_stream(self, input_name: str, remote_stream: RemoteOut[T]): # type: ignore[no-untyped-def] input_stream = getattr(self, input_name, None) @@ -454,5 +505,11 @@ def dask_register_subscriber(self, output_name: str, subscriber: RemoteIn[T]) -> getattr(self, output_name).transport.dask_register_subscriber(subscriber) -# global setting -Module = DaskModule +ModuleT = TypeVar("ModuleT", bound="Module") + + +def is_module_type(value: Any) -> bool: + try: + return inspect.isclass(value) and issubclass(value, Module) + except Exception: + return False diff --git a/dimos/core/module_coordinator.py b/dimos/core/module_coordinator.py index 9f38fabe05..c6d975731d 100644 --- a/dimos/core/module_coordinator.py +++ b/dimos/core/module_coordinator.py @@ -12,34 +12,43 @@ # See the License for the specific language governing permissions and # limitations under the License. +from concurrent.futures import ThreadPoolExecutor import time -from typing import TypeVar +from typing import TYPE_CHECKING, Any from dimos import core -from dimos.core import DimosCluster, Module -from dimos.core.global_config import GlobalConfig +from dimos.core import DimosCluster +from dimos.core.global_config import GlobalConfig, global_config +from dimos.core.module import Module, ModuleT from dimos.core.resource import Resource +from dimos.core.worker_manager import WorkerManager -T = TypeVar("T", bound="Module") +if TYPE_CHECKING: + from dimos.core.rpc_client import ModuleProxy -class ModuleCoordinator(Resource): - _client: DimosCluster | None = None +class ModuleCoordinator(Resource): # type: ignore[misc] + _client: DimosCluster | WorkerManager | None = None + _global_config: GlobalConfig _n: int | None = None _memory_limit: str = "auto" - _deployed_modules: dict[type[Module], Module] = {} + _deployed_modules: dict[type[Module], "ModuleProxy"] def __init__( self, n: int | None = None, - global_config: GlobalConfig | None = None, + cfg: GlobalConfig = global_config, ) -> None: - cfg = global_config or GlobalConfig() self._n = n if n is not None else cfg.n_dask_workers self._memory_limit = cfg.memory_limit + self._global_config = cfg + self._deployed_modules = {} def start(self) -> None: - self._client = core.start(self._n, self._memory_limit) + if self._global_config.dask: + self._client = core.start(self._n, self._memory_limit) + else: + self._client = WorkerManager() def stop(self) -> None: for module in reversed(self._deployed_modules.values()): @@ -47,20 +56,47 @@ def stop(self) -> None: self._client.close_all() # type: ignore[union-attr] - def deploy(self, module_class: type[T], *args, **kwargs) -> T: # type: ignore[no-untyped-def] + def deploy(self, module_class: type[ModuleT], *args, **kwargs) -> "ModuleProxy": # type: ignore[no-untyped-def] if not self._client: - raise ValueError("Not started") + raise ValueError("Trying to dimos.deploy before dask client has started") - module = self._client.deploy(module_class, *args, **kwargs) # type: ignore[attr-defined] + module: ModuleProxy = self._client.deploy(module_class, *args, **kwargs) # type: ignore[union-attr, attr-defined, assignment] self._deployed_modules[module_class] = module - return module # type: ignore[no-any-return] + return module + + def deploy_parallel( + self, module_specs: list[tuple[type[ModuleT], tuple[Any, ...], dict[str, Any]]] + ) -> list["ModuleProxy"]: + if not self._client: + raise ValueError("Not started") + + if isinstance(self._client, WorkerManager): + modules = self._client.deploy_parallel(module_specs) + for (module_class, _, _), module in zip(module_specs, modules, strict=True): + self._deployed_modules[module_class] = module # type: ignore[assignment] + return modules # type: ignore[return-value] + else: + return [ + self.deploy(module_class, *args, **kwargs) + for module_class, args, kwargs in module_specs + ] def start_all_modules(self) -> None: - for module in self._deployed_modules.values(): - module.start() + modules = list(self._deployed_modules.values()) + if isinstance(self._client, WorkerManager): + with ThreadPoolExecutor(max_workers=len(modules)) as executor: + list(executor.map(lambda m: m.start(), modules)) + else: + for module in modules: + module.start() + + module_list = list(self._deployed_modules.values()) + for module in modules: + if hasattr(module, "on_system_modules"): + module.on_system_modules(module_list) - def get_instance(self, module: type[T]) -> T | None: - return self._deployed_modules.get(module) # type: ignore[return-value] + def get_instance(self, module: type[ModuleT]) -> "ModuleProxy": + return self._deployed_modules.get(module) # type: ignore[return-value, no-any-return] def loop(self) -> None: try: diff --git a/dimos/core/native_module.py b/dimos/core/native_module.py new file mode 100644 index 0000000000..6a93e6453a --- /dev/null +++ b/dimos/core/native_module.py @@ -0,0 +1,296 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""NativeModule: blueprint-integrated wrapper for native (C/C++) executables. + +A NativeModule is a thin Python Module subclass that declares In/Out ports +for blueprint wiring but delegates all real work to a managed subprocess. +The native process receives its LCM topic names via CLI args and does +pub/sub directly on the LCM multicast bus. + +Example usage:: + + @dataclass(kw_only=True) + class MyConfig(NativeModuleConfig): + executable: str = "./build/my_module" + some_param: float = 1.0 + + class MyCppModule(NativeModule): + default_config = MyConfig + pointcloud: Out[PointCloud2] + cmd_vel: In[Twist] + + # Works with autoconnect, remappings, etc. + autoconnect( + MyCppModule.blueprint(), + SomeConsumer.blueprint(), + ).build().loop() +""" + +from __future__ import annotations + +from dataclasses import dataclass, field, fields +import enum +import inspect +import json +import os +from pathlib import Path +import signal +import subprocess +import threading +from typing import IO, Any + +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +class LogFormat(enum.Enum): + TEXT = "text" + JSON = "json" + + +@dataclass(kw_only=True) +class NativeModuleConfig(ModuleConfig): + """Configuration for a native (C/C++) subprocess module.""" + + executable: str + build_command: str | None = None + cwd: str | None = None + extra_args: list[str] = field(default_factory=list) + extra_env: dict[str, str] = field(default_factory=dict) + shutdown_timeout: float = 10.0 + log_format: LogFormat = LogFormat.TEXT + + # Override in subclasses to exclude fields from CLI arg generation + cli_exclude: frozenset[str] = frozenset() + + def to_cli_args(self) -> list[str]: + """Auto-convert subclass config fields to CLI args. + + Iterates fields defined on the concrete subclass (not NativeModuleConfig + or its parents) and converts them to ``["--name", str(value)]`` pairs. + Skips fields whose values are ``None`` and fields in ``cli_exclude``. + """ + ignore_fields = {f.name for f in fields(NativeModuleConfig)} + args: list[str] = [] + for f in fields(self): + if f.name in ignore_fields: + continue + if f.name in self.cli_exclude: + continue + val = getattr(self, f.name) + if val is None: + continue + if isinstance(val, bool): + args.extend([f"--{f.name}", str(val).lower()]) + elif isinstance(val, list): + args.extend([f"--{f.name}", ",".join(str(v) for v in val)]) + else: + args.extend([f"--{f.name}", str(val)]) + return args + + +class NativeModule(Module[NativeModuleConfig]): + """Module that wraps a native executable as a managed subprocess. + + Subclass this, declare In/Out ports, and set ``default_config`` to a + :class:`NativeModuleConfig` subclass pointing at the executable. + + On ``start()``, the binary is launched with CLI args:: + + -- ... + + The native process should parse these args and pub/sub on the given + LCM topics directly. On ``stop()``, the process receives SIGTERM. + """ + + default_config: type[NativeModuleConfig] = NativeModuleConfig + _process: subprocess.Popen[bytes] | None = None + _watchdog: threading.Thread | None = None + _stopping: bool = False + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self._resolve_paths() + + @rpc + def start(self) -> None: + if self._process is not None and self._process.poll() is None: + logger.warning("Native process already running", pid=self._process.pid) + return + + self._maybe_build() + + topics = self._collect_topics() + + cmd = [self.config.executable] + for name, topic_str in topics.items(): + cmd.extend([f"--{name}", topic_str]) + cmd.extend(self.config.to_cli_args()) + cmd.extend(self.config.extra_args) + + env = {**os.environ, **self.config.extra_env} + cwd = self.config.cwd or str(Path(self.config.executable).resolve().parent) + + logger.info("Starting native process", cmd=" ".join(cmd), cwd=cwd) + self._process = subprocess.Popen( + cmd, + env=env, + cwd=cwd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + logger.info("Native process started", pid=self._process.pid) + + self._stopping = False + self._watchdog = threading.Thread(target=self._watch_process, daemon=True) + self._watchdog.start() + + @rpc + def stop(self) -> None: + self._stopping = True + if self._process is not None and self._process.poll() is None: + logger.info("Stopping native process", pid=self._process.pid) + self._process.send_signal(signal.SIGTERM) + try: + self._process.wait(timeout=self.config.shutdown_timeout) + except subprocess.TimeoutExpired: + logger.warning( + "Native process did not exit, sending SIGKILL", pid=self._process.pid + ) + self._process.kill() + self._process.wait(timeout=5) + if self._watchdog is not None and self._watchdog is not threading.current_thread(): + self._watchdog.join(timeout=2) + self._watchdog = None + self._process = None + super().stop() + + def _watch_process(self) -> None: + """Block until the native process exits; trigger stop() if it crashed.""" + if self._process is None: + return + + stdout_t = self._start_reader(self._process.stdout, "info") + stderr_t = self._start_reader(self._process.stderr, "warning") + rc = self._process.wait() + stdout_t.join(timeout=2) + stderr_t.join(timeout=2) + + if self._stopping: + return + logger.error( + "Native process died unexpectedly", + pid=self._process.pid, + returncode=rc, + ) + self.stop() + + def _start_reader(self, stream: IO[bytes] | None, level: str) -> threading.Thread: + """Spawn a daemon thread that pipes a subprocess stream through the logger.""" + t = threading.Thread(target=self._read_log_stream, args=(stream, level), daemon=True) + t.start() + return t + + def _read_log_stream(self, stream: IO[bytes] | None, level: str) -> None: + if stream is None: + return + log_fn = getattr(logger, level) + for raw in stream: + line = raw.decode("utf-8", errors="replace").rstrip() + if not line: + continue + if self.config.log_format == LogFormat.JSON: + try: + data = json.loads(line) + event = data.pop("event", line) + log_fn(event, **data) + continue + except (json.JSONDecodeError, TypeError): + logger.warning("malformed JSON from native module", raw=line) + log_fn(line, pid=self._process.pid if self._process else None) + stream.close() + + def _resolve_paths(self) -> None: + """Resolve relative ``cwd`` and ``executable`` against the subclass's source file.""" + if self.config.cwd is not None and not Path(self.config.cwd).is_absolute(): + source_file = inspect.getfile(type(self)) + base_dir = Path(source_file).resolve().parent + self.config.cwd = str(base_dir / self.config.cwd) + if not Path(self.config.executable).is_absolute() and self.config.cwd is not None: + self.config.executable = str(Path(self.config.cwd) / self.config.executable) + + def _maybe_build(self) -> None: + """Run ``build_command`` if the executable does not exist.""" + exe = Path(self.config.executable) + if exe.exists(): + return + if self.config.build_command is None: + raise FileNotFoundError( + f"Executable not found: {exe}. " + "Set build_command in config to auto-build, or build it manually." + ) + logger.info( + "Executable not found, running build", + executable=str(exe), + build_command=self.config.build_command, + ) + proc = subprocess.Popen( + self.config.build_command, + shell=True, + cwd=self.config.cwd, + env={**os.environ, **self.config.extra_env}, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout, stderr = proc.communicate() + for line in stdout.decode("utf-8", errors="replace").splitlines(): + if line.strip(): + logger.info(line) + for line in stderr.decode("utf-8", errors="replace").splitlines(): + if line.strip(): + logger.warning(line) + if proc.returncode != 0: + raise RuntimeError( + f"Build command failed (exit {proc.returncode}): {self.config.build_command}" + ) + if not exe.exists(): + raise FileNotFoundError( + f"Build command succeeded but executable still not found: {exe}" + ) + + def _collect_topics(self) -> dict[str, str]: + """Extract LCM topic strings from blueprint-assigned stream transports.""" + topics: dict[str, str] = {} + for name in list(self.inputs) + list(self.outputs): + stream = getattr(self, name, None) + if stream is None: + continue + transport = getattr(stream, "_transport", None) + if transport is None: + continue + topic = getattr(transport, "topic", None) + if topic is not None: + topics[name] = str(topic) + return topics + + +__all__ = [ + "LogFormat", + "NativeModule", + "NativeModuleConfig", +] diff --git a/dimos/core/rpc_client.py b/dimos/core/rpc_client.py index a3d1a2da0c..30cc4f3017 100644 --- a/dimos/core/rpc_client.py +++ b/dimos/core/rpc_client.py @@ -13,17 +13,16 @@ # limitations under the License. from collections.abc import Callable -from typing import Any +from typing import TYPE_CHECKING, Any -from dimos.protocol.rpc import LCMRPC +from dimos.protocol.rpc import LCMRPC, RPCSpec from dimos.utils.logging_config import setup_logger logger = setup_logger() class RpcCall: - _original_method: Callable[..., Any] | None - _rpc: LCMRPC | None + _rpc: RPCSpec | None _name: str _remote_name: str _unsub_fns: list # type: ignore[type-arg] @@ -32,13 +31,12 @@ class RpcCall: def __init__( self, original_method: Callable[..., Any] | None, - rpc: LCMRPC, + rpc: RPCSpec, name: str, remote_name: str, unsub_fns: list, # type: ignore[type-arg] stop_client: Callable[[], None] | None = None, ) -> None: - self._original_method = original_method self._rpc = rpc self._name = name self._remote_name = remote_name @@ -50,7 +48,7 @@ def __init__( self.__name__ = original_method.__name__ self.__qualname__ = f"{self.__class__.__name__}.{original_method.__name__}" - def set_rpc(self, rpc: LCMRPC) -> None: + def set_rpc(self, rpc: RPCSpec) -> None: self._rpc = rpc def __call__(self, *args, **kwargs): # type: ignore[no-untyped-def] @@ -71,10 +69,10 @@ def __call__(self, *args, **kwargs): # type: ignore[no-untyped-def] return result def __getstate__(self): # type: ignore[no-untyped-def] - return (self._original_method, self._name, self._remote_name) + return (self._name, self._remote_name) def __setstate__(self, state) -> None: # type: ignore[no-untyped-def] - self._original_method, self._name, self._remote_name = state + self._name, self._remote_name = state self._unsub_fns = [] self._rpc = None self._stop_rpc_client = None @@ -139,3 +137,14 @@ def __getattr__(self, name: str): # type: ignore[no-untyped-def] # return super().__getattr__(name) # Try to avoid recursion by directly accessing attributes that are known return self.actor_instance.__getattr__(name) + + +if TYPE_CHECKING: + from dimos.core.module import Module + + # the class below is only ever used for type hinting + # why? because the RPCClient instance is going to have all the methods of a Module + # but those methods/attributes are super dynamic, so the type hints can't figure that out + class ModuleProxy(RPCClient, Module): # type: ignore[misc] + def start(self) -> None: ... + def stop(self) -> None: ... diff --git a/dimos/core/skill_module.py b/dimos/core/skill_module.py deleted file mode 100644 index fa5abd381f..0000000000 --- a/dimos/core/skill_module.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dimos.core.module import Module -from dimos.core.rpc_client import RpcCall, RPCClient -from dimos.protocol.skill.skill import rpc - - -class SkillModule(Module): - """Use this module if you want to auto-register skills to an AgentSpec.""" - - @rpc - def set_AgentSpec_register_skills(self, callable: RpcCall) -> None: - callable.set_rpc(self.rpc) # type: ignore[arg-type] - callable(RPCClient(self, self.__class__)) - - @rpc - def set_MCPModule_register_skills(self, callable: RpcCall) -> None: - callable.set_rpc(self.rpc) # type: ignore[arg-type] - callable(RPCClient(self, self.__class__)) - - def __getstate__(self) -> None: - pass - - def __setstate__(self, _state) -> None: # type: ignore[no-untyped-def] - pass diff --git a/dimos/core/stream.py b/dimos/core/stream.py index 64a1e0edce..77edf45417 100644 --- a/dimos/core/stream.py +++ b/dimos/core/stream.py @@ -69,7 +69,7 @@ def _subscribe(observer, scheduler=None): # type: ignore[no-untyped-def] # default return is backpressured because most # use cases will want this by default - def observable(self): # type: ignore[no-untyped-def] + def observable(self) -> Observable[T]: return backpressure(self.pure_observable()) @@ -255,7 +255,7 @@ def subscribe(self, cb: Callable[[T], Any]) -> Callable[[], None]: # representation of input outside of module -# used for configuring connections, setting a transport +# used for configuring streams, setting a transport class RemoteIn(RemoteStream[T]): def connect(self, other: RemoteOut[T]) -> None: return self.owner.connect_stream(self.name, other).result() # type: ignore[no-any-return, union-attr] diff --git a/dimos/core/test_blueprints.py b/dimos/core/test_blueprints.py index a8b9354f70..09144054c1 100644 --- a/dimos/core/test_blueprints.py +++ b/dimos/core/test_blueprints.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Protocol + import pytest from dimos.core._test_future_annotations_helper import ( @@ -20,24 +22,24 @@ FutureModuleOut, ) from dimos.core.blueprints import ( - ModuleBlueprint, - ModuleBlueprintSet, - ModuleConnection, - _make_module_blueprint, + Blueprint, + StreamRef, + _BlueprintAtom, autoconnect, ) from dimos.core.core import rpc -from dimos.core.global_config import GlobalConfig from dimos.core.module import Module from dimos.core.module_coordinator import ModuleCoordinator from dimos.core.rpc_client import RpcCall from dimos.core.stream import In, Out from dimos.core.transport import LCMTransport +from dimos.msgs.sensor_msgs import Image from dimos.protocol import pubsub +from dimos.spec.utils import Spec # Disable Rerun for tests (prevents viewer spawn and gRPC flush errors) _BUILD_WITHOUT_RERUN = { - "global_config": GlobalConfig(rerun_enabled=False, viewer_backend="foxglove"), + "cli_config_overrides": {"viewer_backend": "none"}, } @@ -66,6 +68,14 @@ class Data3: pass +class SourceModule(Module): + color_image: Out[Data1] + + +class TargetModule(Module): + remapped_data: In[Data1] + + class ModuleA(Module): data1: Out[Data1] data2: Out[Data2] @@ -104,13 +114,14 @@ class ModuleC(Module): def test_get_connection_set() -> None: - assert _make_module_blueprint(CatModule, args=("arg1"), kwargs={"k": "v"}) == ModuleBlueprint( + assert _BlueprintAtom.create(CatModule, args=("arg1",), kwargs={"k": "v"}) == _BlueprintAtom( module=CatModule, - connections=( - ModuleConnection(name="pet_cat", type=Petting, direction="in"), - ModuleConnection(name="scratches", type=Scratch, direction="out"), + streams=( + StreamRef(name="pet_cat", type=Petting, direction="in"), + StreamRef(name="scratches", type=Scratch, direction="out"), ), - args=("arg1"), + module_refs=(), + args=("arg1",), kwargs={"k": "v"}, ) @@ -118,24 +129,26 @@ def test_get_connection_set() -> None: def test_autoconnect() -> None: blueprint_set = autoconnect(module_a(), module_b()) - assert blueprint_set == ModuleBlueprintSet( + assert blueprint_set == Blueprint( blueprints=( - ModuleBlueprint( + _BlueprintAtom( module=ModuleA, - connections=( - ModuleConnection(name="data1", type=Data1, direction="out"), - ModuleConnection(name="data2", type=Data2, direction="out"), + streams=( + StreamRef(name="data1", type=Data1, direction="out"), + StreamRef(name="data2", type=Data2, direction="out"), ), + module_refs=(), args=(), kwargs={}, ), - ModuleBlueprint( + _BlueprintAtom( module=ModuleB, - connections=( - ModuleConnection(name="data1", type=Data1, direction="in"), - ModuleConnection(name="data2", type=Data2, direction="in"), - ModuleConnection(name="data3", type=Data3, direction="out"), + streams=( + StreamRef(name="data1", type=Data1, direction="in"), + StreamRef(name="data2", type=Data2, direction="in"), + StreamRef(name="data3", type=Data3, direction="out"), ), + module_refs=(), args=(), kwargs={}, ), @@ -212,7 +225,7 @@ class ModuleB(Module): pytest.fail("Expected ValueError to be raised") except ValueError as e: error_message = str(e) - assert "Blueprint cannot start because there are conflicting connections" in error_message + assert "Blueprint cannot start because there are conflicting streams" in error_message assert "'shared_data' has conflicting types" in error_message assert "Data1 in ModuleA" in error_message assert "Data2 in ModuleB" in error_message @@ -234,7 +247,7 @@ class Module2(Module): pytest.fail("Expected ValueError to be raised") except ValueError as e: error_message = str(e) - assert "Blueprint cannot start because there are conflicting connections" in error_message + assert "Blueprint cannot start because there are conflicting streams" in error_message assert "'sensor_data' has conflicting types" in error_message assert "'control_signal' has conflicting types" in error_message @@ -275,16 +288,9 @@ class Module3(Module): @pytest.mark.integration def test_remapping() -> None: - """Test that remapping connections works correctly.""" + """Test that remapping streams works correctly.""" pubsub.lcm.autoconf() - # Define test modules with connections that will be remapped - class SourceModule(Module): - color_image: Out[Data1] # Will be remapped to 'remapped_data' - - class TargetModule(Module): - remapped_data: In[Data1] # Receives the remapped connection - # Create blueprint with remapping blueprint_set = autoconnect( SourceModule.blueprint(), @@ -304,7 +310,7 @@ class TargetModule(Module): # The original name shouldn't be in the name types since it's remapped assert ("color_image", Data1) not in blueprint_set._all_name_types - # Build and verify connections work + # Build and verify streams work coordinator = blueprint_set.build(**_BUILD_WITHOUT_RERUN) try: @@ -335,22 +341,18 @@ def test_future_annotations_support() -> None: """Test that modules using `from __future__ import annotations` work correctly. PEP 563 (future annotations) stores annotations as strings instead of actual types. - This test verifies that _make_module_blueprint properly resolves string annotations + This test verifies that _BlueprintAtom.create properly resolves string annotations to the actual In/Out types. """ - # Test that connections are properly extracted from modules with future annotations - out_blueprint = _make_module_blueprint(FutureModuleOut, args=(), kwargs={}) - assert len(out_blueprint.connections) == 1 - assert out_blueprint.connections[0] == ModuleConnection( - name="data", type=FutureData, direction="out" - ) + # Test that streams are properly extracted from modules with future annotations + out_blueprint = _BlueprintAtom.create(FutureModuleOut, args=(), kwargs={}) + assert len(out_blueprint.streams) == 1 + assert out_blueprint.streams[0] == StreamRef(name="data", type=FutureData, direction="out") - in_blueprint = _make_module_blueprint(FutureModuleIn, args=(), kwargs={}) - assert len(in_blueprint.connections) == 1 - assert in_blueprint.connections[0] == ModuleConnection( - name="data", type=FutureData, direction="in" - ) + in_blueprint = _BlueprintAtom.create(FutureModuleIn, args=(), kwargs={}) + assert len(in_blueprint.streams) == 1 + assert in_blueprint.streams[0] == StreamRef(name="data", type=FutureData, direction="in") @pytest.mark.integration @@ -377,3 +379,127 @@ def test_future_annotations_autoconnect() -> None: finally: coordinator.stop() + + +# ModuleRef / RPC tests +class CalculatorSpec(Spec, Protocol): + @rpc + def compute1(self, a: int, b: int) -> int: ... + + @rpc + def compute2(self, a: float, b: float) -> float: ... + + +class Calculator1(Module): + @rpc + def compute1(self, a: int, b: int) -> int: + return a + b + + @rpc + def compute2(self, a: float, b: float) -> float: + return a + b + + @rpc + def start(self) -> None: ... + + @rpc + def stop(self) -> None: ... + + +class Calculator2(Module): + @rpc + def compute1(self, a: int, b: int) -> int: + return a * b + + @rpc + def compute2(self, a: float, b: float) -> float: + return a * b + + @rpc + def start(self) -> None: ... + + @rpc + def stop(self) -> None: ... + + +# link to a specific module +class Mod1(Module): + stream1: In[Image] + calc: Calculator1 + + @rpc + def start(self) -> None: + _ = self.calc.compute1 + + @rpc + def stop(self) -> None: ... + + +# link to any module that implements a spec (Autoconnect will handle it) +class Mod2(Module): + stream1: In[Image] + calc: CalculatorSpec + + @rpc + def start(self) -> None: + _ = self.calc.compute1 + + @rpc + def stop(self) -> None: ... + + +@pytest.mark.integration +def test_module_ref_direct() -> None: + coordinator = autoconnect( + Calculator1.blueprint(), + Mod1.blueprint(), + ).build(**_BUILD_WITHOUT_RERUN) + + try: + mod1 = coordinator.get_instance(Mod1) + assert mod1 is not None + assert mod1.calc.compute1(2, 3) == 5 + assert mod1.calc.compute2(1.5, 2.5) == 4.0 + finally: + coordinator.stop() + + +@pytest.mark.integration +def test_module_ref_spec() -> None: + coordinator = autoconnect( + Calculator1.blueprint(), + Mod2.blueprint(), + ).build(**_BUILD_WITHOUT_RERUN) + + try: + mod2 = coordinator.get_instance(Mod2) + assert mod2 is not None + assert mod2.calc.compute1(4, 5) == 9 + assert mod2.calc.compute2(3.0, 0.5) == 3.5 + finally: + coordinator.stop() + + +@pytest.mark.integration +def test_module_ref_remap_ambiguous() -> None: + coordinator = ( + autoconnect( + Calculator1.blueprint(), + Calculator2.blueprint(), + Mod2.blueprint(), + ) + .remappings( + [ + (Mod2, "calc", Calculator1), + ] + ) + .build(**_BUILD_WITHOUT_RERUN) + ) + + try: + mod2 = coordinator.get_instance(Mod2) + assert mod2 is not None + assert mod2.calc.compute1(2, 3) == 5 + assert mod2.calc.compute2(2.0, 3.0) == 5.0 + finally: + coordinator.stop() diff --git a/dimos/core/test_core.py b/dimos/core/test_core.py index fde1cf5df2..c229659b84 100644 --- a/dimos/core/test_core.py +++ b/dimos/core/test_core.py @@ -24,12 +24,11 @@ Out, pLCMTransport, rpc, - start, ) from dimos.core.testing import MockRobotClient, dimos from dimos.msgs.geometry_msgs import Vector3 from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.odometry import Odometry assert dimos @@ -87,7 +86,7 @@ def test_classmethods() -> None: # Check that we have the expected RPC methods assert "navigate_to" in class_rpcs, "navigate_to should be in rpcs" assert "start" in class_rpcs, "start should be in rpcs" - assert len(class_rpcs) == 8 + assert len(class_rpcs) == 9 # Check that the values are callable assert callable(class_rpcs["navigate_to"]), "navigate_to should be callable" @@ -138,8 +137,3 @@ def test_basic_deployment(dimos) -> None: assert nav.lidar_msg_count >= 8 dimos.shutdown() - - -if __name__ == "__main__": - client = start(1) # single process for CI memory - test_deployment(client) diff --git a/dimos/core/test_modules.py b/dimos/core/test_modules.py index 7bd995c857..d96b58af5f 100644 --- a/dimos/core/test_modules.py +++ b/dimos/core/test_modules.py @@ -89,13 +89,10 @@ def is_module_subclass( target_classes = { "Module", "ModuleBase", - "DaskModule", "dimos.core.Module", "dimos.core.ModuleBase", - "dimos.core.DaskModule", "dimos.core.module.Module", "dimos.core.module.ModuleBase", - "dimos.core.module.DaskModule", } def find_qualified_name(base: str, context_module: str | None = None) -> str: @@ -291,7 +288,7 @@ def get_all_module_subclasses(): filtered_results = [] for class_name, filepath, has_start, has_stop, forbidden_methods in results: # Skip base module classes themselves - if class_name in ("Module", "ModuleBase", "DaskModule", "SkillModule"): + if class_name in ("Module", "ModuleBase"): continue # Skip test-only modules (those defined in test_ files) diff --git a/dimos/core/test_native_module.py b/dimos/core/test_native_module.py new file mode 100644 index 0000000000..8af63b0bf4 --- /dev/null +++ b/dimos/core/test_native_module.py @@ -0,0 +1,176 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for NativeModule: blueprint wiring, topic collection, CLI arg generation. + +Every test launches the real native_echo.py subprocess via blueprint.build(). +The echo script writes received CLI args to a temp file for assertions. +""" + +from dataclasses import dataclass +import json +from pathlib import Path +import time + +import pytest + +from dimos.core import DimosCluster +from dimos.core.blueprints import autoconnect +from dimos.core.core import rpc +from dimos.core.module import Module +from dimos.core.native_module import LogFormat, NativeModule, NativeModuleConfig +from dimos.core.stream import In, Out +from dimos.core.transport import LCMTransport +from dimos.msgs.geometry_msgs.Twist import Twist +from dimos.msgs.sensor_msgs.Imu import Imu +from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 + +_ECHO = str(Path(__file__).parent / "tests" / "native_echo.py") + + +@pytest.fixture +def args_file(tmp_path: Path) -> str: + """Temp file path where native_echo.py writes the CLI args it received.""" + return str(tmp_path / "native_echo_args.json") + + +def read_json_file(path: str) -> dict[str, str]: + """Read and parse --key value pairs from the echo output file.""" + raw: list[str] = json.loads(Path(path).read_text()) + result = {} + i = 0 + while i < len(raw): + if raw[i].startswith("--") and i + 1 < len(raw): + result[raw[i][2:]] = raw[i + 1] + i += 2 + else: + i += 1 + return result + + +@dataclass(kw_only=True) +class StubNativeConfig(NativeModuleConfig): + executable: str = _ECHO + log_format: LogFormat = LogFormat.TEXT + output_file: str | None = None + die_after: float | None = None + some_param: float = 1.5 + + +class StubNativeModule(NativeModule): + default_config = StubNativeConfig + pointcloud: Out[PointCloud2] + imu: Out[Imu] + cmd_vel: In[Twist] + + +class StubConsumer(Module): + pointcloud: In[PointCloud2] + imu: In[Imu] + + @rpc + def start(self) -> None: + pass + + +class StubProducer(Module): + cmd_vel: Out[Twist] + + @rpc + def start(self) -> None: + pass + + +def test_process_crash_triggers_stop() -> None: + """When the native process dies unexpectedly, the watchdog calls stop().""" + mod = StubNativeModule(die_after=0.2) + mod.pointcloud.transport = LCMTransport("/pc", PointCloud2) + mod.start() + + assert mod._process is not None + pid = mod._process.pid + + # Wait for the process to die and the watchdog to call stop() + for _ in range(30): + time.sleep(0.1) + if mod._process is None: + break + + assert mod._process is None, f"Watchdog did not clean up after process {pid} died" + + +def test_manual(dimos_cluster: DimosCluster, args_file: str) -> None: + native_module = dimos_cluster.deploy( # type: ignore[attr-defined] + StubNativeModule, + some_param=2.5, + output_file=args_file, + ) + + native_module.pointcloud.transport = LCMTransport("/my/custom/lidar", PointCloud2) + native_module.cmd_vel.transport = LCMTransport("/cmd_vel", Twist) + native_module.start() + time.sleep(1) + native_module.stop() + + assert read_json_file(args_file) == { + "cmd_vel": "/cmd_vel#geometry_msgs.Twist", + "pointcloud": "/my/custom/lidar#sensor_msgs.PointCloud2", + "output_file": args_file, + "some_param": "2.5", + } + + +@pytest.mark.heavy +def test_autoconnect(args_file: str) -> None: + """autoconnect passes correct topic args to the native subprocess.""" + blueprint = autoconnect( + StubNativeModule.blueprint( + some_param=2.5, + output_file=args_file, + ), + StubConsumer.blueprint(), + StubProducer.blueprint(), + ).transports( + { + ("pointcloud", PointCloud2): LCMTransport("/my/custom/lidar", PointCloud2), + }, + ) + + coordinator = blueprint.global_config(viewer_backend="none").build() + try: + # Validate blueprint wiring: all modules deployed + native = coordinator.get_instance(StubNativeModule) # type: ignore[type-var] + consumer = coordinator.get_instance(StubConsumer) + producer = coordinator.get_instance(StubProducer) + assert native is not None + assert consumer is not None + assert producer is not None + + # Out→In topics match between connected modules + assert native.pointcloud.transport.topic == consumer.pointcloud.transport.topic + assert native.imu.transport.topic == consumer.imu.transport.topic + assert producer.cmd_vel.transport.topic == native.cmd_vel.transport.topic + + # Custom transport was applied + assert native.pointcloud.transport.topic.topic == "/my/custom/lidar" + finally: + coordinator.stop() + + assert read_json_file(args_file) == { + "cmd_vel": "/cmd_vel#geometry_msgs.Twist", + "pointcloud": "/my/custom/lidar#sensor_msgs.PointCloud2", + "imu": "/imu#sensor_msgs.Imu", + "output_file": args_file, + "some_param": "2.5", + } diff --git a/dimos/core/test_stream.py b/dimos/core/test_stream.py index b963022c50..836f879b67 100644 --- a/dimos/core/test_stream.py +++ b/dimos/core/test_stream.py @@ -25,7 +25,7 @@ ) from dimos.core.testing import MockRobotClient, dimos from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.odometry import Odometry assert dimos diff --git a/dimos/core/test_worker.py b/dimos/core/test_worker.py new file mode 100644 index 0000000000..98a7c5782d --- /dev/null +++ b/dimos/core/test_worker.py @@ -0,0 +1,153 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from dimos.core import In, Module, Out, rpc +from dimos.core.worker_manager import WorkerManager +from dimos.msgs.geometry_msgs import Vector3 + + +class SimpleModule(Module): + output: Out[Vector3] + input: In[Vector3] + + counter: int = 0 + + @rpc + def start(self) -> None: + pass + + @rpc + def increment(self) -> int: + self.counter += 1 + return self.counter + + @rpc + def get_counter(self) -> int: + return self.counter + + +class AnotherModule(Module): + value: int = 100 + + @rpc + def start(self) -> None: + pass + + @rpc + def add(self, n: int) -> int: + self.value += n + return self.value + + @rpc + def get_value(self) -> int: + return self.value + + +class ThirdModule(Module): + multiplier: int = 1 + + @rpc + def start(self) -> None: + pass + + @rpc + def multiply(self, n: int) -> int: + self.multiplier *= n + return self.multiplier + + @rpc + def get_multiplier(self) -> int: + return self.multiplier + + +@pytest.fixture +def worker_manager(): + manager = WorkerManager() + try: + yield manager + finally: + manager.close_all() + + +@pytest.mark.integration +def test_worker_manager_basic(worker_manager): + module = worker_manager.deploy(SimpleModule) + module.start() + + result = module.increment() + assert result == 1 + + result = module.increment() + assert result == 2 + + result = module.get_counter() + assert result == 2 + + module.stop() + + +@pytest.mark.integration +def test_worker_manager_multiple_different_modules(worker_manager): + module1 = worker_manager.deploy(SimpleModule) + module2 = worker_manager.deploy(AnotherModule) + + module1.start() + module2.start() + + # Each module has its own state + module1.increment() + module1.increment() + module2.add(10) + + assert module1.get_counter() == 2 + assert module2.get_value() == 110 + + # Stop modules to clean up threads + module1.stop() + module2.stop() + + +@pytest.mark.integration +def test_worker_manager_parallel_deployment(worker_manager): + modules = worker_manager.deploy_parallel( + [ + (SimpleModule, (), {}), + (AnotherModule, (), {}), + (ThirdModule, (), {}), + ] + ) + + assert len(modules) == 3 + module1, module2, module3 = modules + + # Start all modules + module1.start() + module2.start() + module3.start() + + # Each module has its own state + module1.increment() + module2.add(50) + module3.multiply(5) + + assert module1.get_counter() == 1 + assert module2.get_value() == 150 + assert module3.get_multiplier() == 5 + + # Stop modules + module1.stop() + module2.stop() + module3.stop() diff --git a/dimos/core/testing.py b/dimos/core/testing.py index 38774ef327..dee25aaa45 100644 --- a/dimos/core/testing.py +++ b/dimos/core/testing.py @@ -20,8 +20,8 @@ from dimos.core import In, Module, Out, rpc, start from dimos.msgs.geometry_msgs import Vector3 from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.lidar import pointcloud2_from_webrtc_lidar -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.lidar import pointcloud2_from_webrtc_lidar +from dimos.robot.unitree.type.odometry import Odometry from dimos.utils.testing import SensorReplay @@ -79,6 +79,5 @@ def odomloop(self) -> None: self.odometry.publish(odom) lidarmsg = next(lidariter) - lidarmsg.pubtime = time.perf_counter() # type: ignore[union-attr] self.lidar.publish(lidarmsg) time.sleep(0.1) diff --git a/dimos/core/tests/native_echo.py b/dimos/core/tests/native_echo.py new file mode 100755 index 0000000000..6723b0b0d1 --- /dev/null +++ b/dimos/core/tests/native_echo.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Echo binary for NativeModule tests. + +Parses --output_file and --die_after from CLI args, writes remaining +args as JSON to the output file, then waits for SIGTERM. +""" + +import argparse +import json +import signal +import sys +import time + +print("this message goes to stdout") +print("this message goes to stderr", file=sys.stderr) + +signal.signal(signal.SIGTERM, lambda *_: sys.exit(0)) + +parser = argparse.ArgumentParser() +parser.add_argument("--output_file", default=None) +parser.add_argument("--die_after", type=float, default=None) +args, _ = parser.parse_known_args() + +if args.output_file: + with open(args.output_file, "w") as f: + json.dump(sys.argv[1:], f) + +print("my args:", json.dumps(sys.argv[1:])) + +if args.die_after is not None: + time.sleep(args.die_after) + sys.exit(42) + +signal.pause() diff --git a/dimos/core/transport.py b/dimos/core/transport.py index 4c1b19ee2e..2586706feb 100644 --- a/dimos/core/transport.py +++ b/dimos/core/transport.py @@ -14,29 +14,53 @@ from __future__ import annotations -from typing import Any, TypeVar - -import dimos.core.colors as colors - -T = TypeVar("T") - +import threading from typing import ( TYPE_CHECKING, + Any, TypeVar, ) +import dimos.core.colors as colors from dimos.core.stream import In, Out, Stream, Transport from dimos.msgs.protocol import DimosMsg -from dimos.protocol.pubsub.jpeg_shm import JpegSharedMemory -from dimos.protocol.pubsub.lcmpubsub import LCM, JpegLCM, PickleLCM, Topic as LCMTopic -from dimos.protocol.pubsub.rospubsub import DimosROS, ROSTopic -from dimos.protocol.pubsub.shmpubsub import BytesSharedMemory, PickleSharedMemory + +try: + import cyclonedds as _cyclonedds # noqa: F401 + + DDS_AVAILABLE = True +except ImportError: + DDS_AVAILABLE = False +from dimos.protocol.pubsub.impl.jpeg_shm import JpegSharedMemory +from dimos.protocol.pubsub.impl.lcmpubsub import LCM, JpegLCM, PickleLCM, Topic as LCMTopic +from dimos.protocol.pubsub.impl.rospubsub import DimosROS, ROSTopic +from dimos.protocol.pubsub.impl.shmpubsub import BytesSharedMemory, PickleSharedMemory if TYPE_CHECKING: from collections.abc import Callable T = TypeVar("T") # type: ignore[misc] +# TODO +# Transports need to be rewritten and simplified, +# +# there is no need for them to get a reference to "a stream" on publish/subscribe calls +# this is a legacy from dask transports. +# +# new transport should literally have 2 functions (next to start/stop) +# "send(msg)" and "receive(callback)" and that's all +# +# we can also consider pubsubs conforming directly to Transport specs +# and removing PubSubTransport glue entirely +# +# Why not ONLY pubsubs without Transport abstraction? +# +# General idea for transports (and why they exist at all) +# is that they can be * anything * like +# +# a web camera rtsp stream for Image, audio stream from mic, etc +# http binary streams, tcp connections etc + class PubSubTransport(Transport[T]): topic: Any @@ -73,7 +97,7 @@ def subscribe( ) -> Callable[[], None]: if not self._started: self.start() - return self.lcm.subscribe(self.topic, lambda msg, topic: callback(msg)) + return self.lcm.subscribe(LCMTopic(self.topic), lambda msg, topic: callback(msg)) def start(self) -> None: self.lcm.start() @@ -112,7 +136,7 @@ def broadcast(self, _, msg) -> None: # type: ignore[no-untyped-def] def subscribe(self, callback: Callable[[T], None], selfstream: In[T] = None) -> None: # type: ignore[assignment, override] if not self._started: self.start() - return self.lcm.subscribe(self.topic, lambda msg, topic: callback(msg)) # type: ignore[return-value] + return self.lcm.subscribe(self.topic, lambda msg, topic: callback(msg)) # type: ignore[return-value, arg-type] class JpegLcmTransport(LCMTransport): # type: ignore[type-arg] @@ -258,4 +282,41 @@ def stop(self) -> None: self._ros = None +if DDS_AVAILABLE: + from dimos.protocol.pubsub.impl.ddspubsub import DDS, Topic as DDSTopic + + class DDSTransport(PubSubTransport[T]): + def __init__(self, topic: str, type: type, **kwargs) -> None: # type: ignore[no-untyped-def] + super().__init__(DDSTopic(topic, type)) + self.dds = DDS(**kwargs) + self._started: bool = False + self._start_lock = threading.RLock() + + def start(self) -> None: + with self._start_lock: + if not self._started: + self.dds.start() + self._started = True + + def stop(self) -> None: + with self._start_lock: + if self._started: + self.dds.stop() + self._started = False + + def broadcast(self, _, msg) -> None: # type: ignore[no-untyped-def] + with self._start_lock: + if not self._started: + self.start() + self.dds.publish(self.topic, msg) + + def subscribe( + self, callback: Callable[[T], None], selfstream: Stream[T] | None = None + ) -> Callable[[], None]: + with self._start_lock: + if not self._started: + self.start() + return self.dds.subscribe(self.topic, lambda msg, topic: callback(msg)) + + class ZenohTransport(PubSubTransport[T]): ... diff --git a/dimos/core/worker.py b/dimos/core/worker.py new file mode 100644 index 0000000000..d6ff71918c --- /dev/null +++ b/dimos/core/worker.py @@ -0,0 +1,227 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import multiprocessing as mp +from multiprocessing.connection import Connection +import traceback +from typing import Any + +from dimos.core.module import ModuleT +from dimos.core.rpc_client import RPCClient +from dimos.utils.actor_registry import ActorRegistry +from dimos.utils.logging_config import setup_logger +from dimos.utils.sequential_ids import SequentialIds + +logger = setup_logger() + + +class ActorFuture: + """Mimics Dask's ActorFuture - wraps a result with .result() method.""" + + def __init__(self, value: Any) -> None: + self._value = value + + def result(self, _timeout: float | None = None) -> Any: + return self._value + + +class Actor: + """Proxy that forwards method calls to the worker process.""" + + def __init__( + self, conn: Connection | None, module_class: type[ModuleT], worker_id: int + ) -> None: + self._conn = conn + self._cls = module_class + self._worker_id = worker_id + + def __reduce__(self) -> tuple[type, tuple[None, type, int]]: + """Exclude the connection when pickling - it can't be used in other processes.""" + return (Actor, (None, self._cls, self._worker_id)) + + def _send_request_to_worker(self, request: dict[str, Any]) -> Any: + if self._conn is None: + raise RuntimeError("Actor connection not available - cannot send requests") + self._conn.send(request) + response = self._conn.recv() + if response.get("error"): + if "AttributeError" in response["error"]: # TODO: better error handling + raise AttributeError(response["error"]) + raise RuntimeError(f"Worker error: {response['error']}") + return response.get("result") + + def set_ref(self, ref: Any) -> ActorFuture: + """Set the actor reference on the remote module.""" + result = self._send_request_to_worker({"type": "set_ref", "ref": ref}) + return ActorFuture(result) + + def __getattr__(self, name: str) -> Any: + """Proxy attribute access to the worker process.""" + if name.startswith("_"): + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'") + + return self._send_request_to_worker({"type": "getattr", "name": name}) + + +# Global forkserver context. Using `forkserver` instead of `fork` because it +# avoids CUDA context corruption issues. +_forkserver_ctx: Any = None + + +def get_forkserver_context() -> Any: + global _forkserver_ctx + if _forkserver_ctx is None: + _forkserver_ctx = mp.get_context("forkserver") + return _forkserver_ctx + + +def reset_forkserver_context() -> None: + """Reset the forkserver context. Used in tests to ensure clean state.""" + global _forkserver_ctx + _forkserver_ctx = None + + +_seq_ids = SequentialIds() + + +class Worker: + def __init__( + self, + module_class: type[ModuleT], + args: tuple[Any, ...] = (), + kwargs: dict[Any, Any] | None = None, + ) -> None: + self._module_class: type[ModuleT] = module_class + self._args: tuple[Any, ...] = args + self._kwargs: dict[Any, Any] = kwargs or {} + self._process: Any = None + self._conn: Connection | None = None + self._actor: Actor | None = None + self._worker_id: int = _seq_ids.next() + self._ready: bool = False + + def start_process(self) -> None: + ctx = get_forkserver_context() + parent_conn, child_conn = ctx.Pipe() + self._conn = parent_conn + + self._process = ctx.Process( + target=_worker_entrypoint, + args=(child_conn, self._module_class, self._args, self._kwargs, self._worker_id), + daemon=True, + ) + self._process.start() + self._actor = Actor(parent_conn, self._module_class, self._worker_id) + + def wait_until_ready(self) -> None: + if self._ready: + return + if self._actor is None: + raise RuntimeError("Worker process not started") + + worker_id = self._actor.set_ref(self._actor).result() + ActorRegistry.update(str(self._actor), str(worker_id)) + self._ready = True + + logger.info( + "Deployed module.", module=self._module_class.__name__, worker_id=self._worker_id + ) + + def deploy(self) -> None: + self.start_process() + self.wait_until_ready() + + def get_instance(self) -> RPCClient: + if self._actor is None: + raise RuntimeError("Worker not deployed") + return RPCClient(self._actor, self._module_class) + + def shutdown(self) -> None: + if self._conn is not None: + try: + self._conn.send({"type": "shutdown"}) + self._conn.recv() + except (BrokenPipeError, EOFError): + pass + finally: + self._conn.close() + self._conn = None + + if self._process is not None: + self._process.join(timeout=2) + if self._process.is_alive(): + self._process.terminate() + self._process.join(timeout=1) + self._process = None + + +def _worker_entrypoint( + conn: Connection, + module_class: type[ModuleT], + args: tuple[Any, ...], + kwargs: dict[Any, Any], + worker_id: int, +) -> None: + instance = None + + try: + instance = module_class(*args, **kwargs) + instance.worker = worker_id + + _worker_loop(conn, instance, worker_id) + except Exception as e: + logger.error(f"Worker process error: {e}", exc_info=True) + finally: + if instance is not None: + try: + instance.stop() + except Exception: + logger.error("Error during worker shutdown", exc_info=True) + + +def _worker_loop(conn: Connection, instance: Any, worker_id: int) -> None: + while True: + try: + if not conn.poll(timeout=0.1): + continue + request = conn.recv() + except (EOFError, KeyboardInterrupt): + break + + response: dict[str, Any] = {} + try: + req_type = request.get("type") + + if req_type == "set_ref": + instance.ref = request.get("ref") + response["result"] = worker_id + + elif req_type == "getattr": + response["result"] = getattr(instance, request["name"]) + + elif req_type == "shutdown": + response["result"] = True + conn.send(response) + break + + else: + response["error"] = f"Unknown request type: {req_type}" + + except Exception as e: + response["error"] = f"{e.__class__.__name__}: {e}\n{traceback.format_exc()}" + + try: + conn.send(response) + except (BrokenPipeError, EOFError): + break diff --git a/dimos/core/worker_manager.py b/dimos/core/worker_manager.py new file mode 100644 index 0000000000..175b650fd2 --- /dev/null +++ b/dimos/core/worker_manager.py @@ -0,0 +1,74 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +from dimos.core.module import ModuleT +from dimos.core.rpc_client import RPCClient +from dimos.core.worker import Worker +from dimos.utils.actor_registry import ActorRegistry +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +class WorkerManager: + def __init__(self) -> None: + self._workers: list[Worker] = [] + self._closed = False + + def deploy(self, module_class: type[ModuleT], *args: Any, **kwargs: Any) -> RPCClient: + if self._closed: + raise RuntimeError("WorkerManager is closed") + + worker = Worker(module_class, args=args, kwargs=kwargs) + worker.deploy() + self._workers.append(worker) + return worker.get_instance() + + def deploy_parallel( + self, module_specs: list[tuple[type[ModuleT], tuple[Any, ...], dict[Any, Any]]] + ) -> list[RPCClient]: + if self._closed: + raise RuntimeError("WorkerManager is closed") + + workers: list[Worker] = [] + for module_class, args, kwargs in module_specs: + worker = Worker(module_class, args=args, kwargs=kwargs) + worker.start_process() + workers.append(worker) + + for worker in workers: + worker.wait_until_ready() + self._workers.append(worker) + + return [worker.get_instance() for worker in workers] + + def close_all(self) -> None: + if self._closed: + return + self._closed = True + + logger.info("Shutting down all workers...") + + for worker in reversed(self._workers): + try: + worker.shutdown() + except Exception as e: + logger.error(f"Error shutting down worker: {e}", exc_info=True) + + self._workers.clear() + ActorRegistry.clear() + + logger.info("All workers shut down") diff --git a/dimos/dashboard/__init__.py b/dimos/dashboard/__init__.py deleted file mode 100644 index fc97805936..0000000000 --- a/dimos/dashboard/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2025 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Dashboard module for visualization and monitoring. - -Rerun Initialization: - Main process (e.g., blueprints.build) starts Rerun server automatically. - Worker modules connect to the server via connect_rerun(). - -Usage in modules: - import rerun as rr - from dimos.dashboard.rerun_init import connect_rerun - - class MyModule(Module): - def start(self): - super().start() - connect_rerun() # Connect to Rerun server - rr.log("my/entity", my_data.to_rerun()) -""" - -from dimos.dashboard.rerun_init import connect_rerun, init_rerun_server, shutdown_rerun - -__all__ = ["connect_rerun", "init_rerun_server", "shutdown_rerun"] diff --git a/dimos/dashboard/dimos.rbl b/dimos/dashboard/dimos.rbl deleted file mode 100644 index 160180e27a..0000000000 Binary files a/dimos/dashboard/dimos.rbl and /dev/null differ diff --git a/dimos/dashboard/rerun_init.py b/dimos/dashboard/rerun_init.py deleted file mode 100644 index 4ccec8209d..0000000000 --- a/dimos/dashboard/rerun_init.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Rerun initialization with multi-process support. - -Architecture: - - Main process calls init_rerun_server() to start gRPC server + viewer - - Worker processes call connect_rerun() to connect to the server - - All processes share the same Rerun recording stream - -Viewer modes (set via VIEWER_BACKEND config or environment variable): - - "rerun-web" (default): Web viewer on port 9090 - - "rerun-native": Native Rerun viewer (requires display) - - "foxglove": Use Foxglove instead of Rerun - -Usage: - # Set via environment: - VIEWER_BACKEND=rerun-web # or rerun-native or foxglove - - # Or via .env file: - viewer_backend=rerun-native - - # In main process (blueprints.py handles this automatically): - from dimos.dashboard.rerun_init import init_rerun_server - server_addr = init_rerun_server(viewer_mode="rerun-web") - - # In worker modules: - from dimos.dashboard.rerun_init import connect_rerun - connect_rerun() - - # On shutdown: - from dimos.dashboard.rerun_init import shutdown_rerun - shutdown_rerun() -""" - -import atexit -import threading - -import rerun as rr - -from dimos.core.global_config import GlobalConfig -from dimos.utils.logging_config import setup_logger - -logger = setup_logger() - -RERUN_GRPC_PORT = 9876 -RERUN_WEB_PORT = 9090 -RERUN_GRPC_ADDR = f"rerun+http://127.0.0.1:{RERUN_GRPC_PORT}/proxy" - -# Track initialization state -_server_started = False -_connected = False -_rerun_init_lock = threading.Lock() - - -def init_rerun_server(viewer_mode: str = "rerun-web", memory_limit: str = "4GB") -> str: - """Initialize Rerun server in the main process. - - Starts the gRPC server and optionally the web/native viewer. - Should only be called once from the main process. - - Args: - viewer_mode: One of "rerun-web", "rerun-native", or "rerun-grpc-only" - memory_limit: Maximum memory for Rerun viewer (e.g., "16GB", "25%"). Default 16GB. - - Returns: - Server address for workers to connect to. - - Raises: - RuntimeError: If server initialization fails. - """ - global _server_started - - if _server_started: - logger.debug("Rerun server already started") - return RERUN_GRPC_ADDR - - rr.init("dimos") - - if viewer_mode == "rerun-native": - # Spawn native viewer (requires display) - rr.spawn(port=RERUN_GRPC_PORT, connect=True, memory_limit=memory_limit) - logger.info("Rerun: spawned native viewer", port=RERUN_GRPC_PORT, memory_limit=memory_limit) - elif viewer_mode == "rerun-web": - # Start gRPC + web viewer (headless friendly) - server_uri = rr.serve_grpc(grpc_port=RERUN_GRPC_PORT) - rr.serve_web_viewer(web_port=RERUN_WEB_PORT, open_browser=False, connect_to=server_uri) - logger.info( - "Rerun: web viewer started", - web_port=RERUN_WEB_PORT, - url=f"http://localhost:{RERUN_WEB_PORT}", - ) - else: - # Just gRPC server, no viewer (connect externally) - rr.serve_grpc(grpc_port=RERUN_GRPC_PORT) - logger.info( - "Rerun: gRPC server only", - port=RERUN_GRPC_PORT, - connect_command=f"rerun --connect {RERUN_GRPC_ADDR}", - ) - - _server_started = True - - # Register shutdown handler - atexit.register(shutdown_rerun) - - return RERUN_GRPC_ADDR - - -def connect_rerun( - global_config: GlobalConfig | None = None, - server_addr: str | None = None, -) -> None: - """Connect to Rerun server from a worker process. - - Modules should check global_config.viewer_backend before calling this. - - Args: - global_config: Global configuration (checks viewer_backend) - server_addr: Server address to connect to. Defaults to RERUN_GRPC_ADDR. - """ - global _connected - - with _rerun_init_lock: - if _connected: - logger.debug("Already connected to Rerun server") - return - - # Skip if foxglove backend selected - if global_config and not global_config.viewer_backend.startswith("rerun"): - logger.debug("Rerun connection skipped", viewer_backend=global_config.viewer_backend) - return - - addr = server_addr or RERUN_GRPC_ADDR - - rr.init("dimos") - rr.connect_grpc(addr) - logger.info("Rerun: connected to server", addr=addr) - - _connected = True - - -def shutdown_rerun() -> None: - """Disconnect from Rerun and cleanup resources.""" - global _server_started, _connected - - if _server_started or _connected: - try: - rr.disconnect() - logger.info("Rerun: disconnected") - except Exception as e: - logger.warning("Rerun: error during disconnect", error=str(e)) - - _server_started = False - _connected = False diff --git a/dimos/dashboard/rerun_scene_wiring.py b/dimos/dashboard/rerun_scene_wiring.py deleted file mode 100644 index 56efe306ea..0000000000 --- a/dimos/dashboard/rerun_scene_wiring.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Rerun scene wiring helpers (static attachments, URDF, pinholes). - -This module is intentionally *not* a TF visualizer. -It only provides static Rerun scene setup: -- view coordinates -- attach semantic entity paths (world/robot/...) under named TF frames (base_link, camera_optical, ...) -- optional URDF logging -- optional axes gizmo + camera pinhole(s) - -Dynamic TF visualization remains the responsibility of `TFRerunModule`. -""" - -from __future__ import annotations - -from pathlib import Path -from typing import TYPE_CHECKING, Any, Protocol - -import rerun as rr - -from dimos.core import Module, rpc -from dimos.core.global_config import GlobalConfig -from dimos.dashboard.rerun_init import connect_rerun - -if TYPE_CHECKING: - from collections.abc import Sequence - - -class _HasToRerun(Protocol): - def to_rerun(self) -> Any: ... - - -def _attach_entity(entity_path: str, parent_frame: str) -> None: - """Attach an entity path's implicit frame (tf#/...) under a named frame.""" - rr.log( - entity_path, - rr.Transform3D( - translation=[0.0, 0.0, 0.0], - rotation=rr.Quaternion(xyzw=[0.0, 0.0, 0.0, 1.0]), - parent_frame=parent_frame, # type: ignore[call-arg] - ), - static=True, - ) - - -class RerunSceneWiringModule(Module): - """Static Rerun scene wiring for semantic entity paths.""" - - _global_config: GlobalConfig - - # Semantic entity roots - world_entity: str - robot_entity: str - robot_axes_entity: str - - # Named TF frames to attach to - world_frame: str - robot_frame: str - - # Optional assets - urdf_path: str | Path | None - axes_size: float | None - - # Multi-camera wiring: - # tuple = (camera_entity_path, camera_named_frame, camera_info_static) - cameras: Sequence[tuple[str, str, _HasToRerun]] - camera_rgb_suffix: str - - def __init__( - self, - *, - global_config: GlobalConfig | None = None, - world_entity: str = "world", - robot_entity: str = "world/robot", - robot_axes_entity: str = "world/robot/axes", - world_frame: str = "world", - robot_frame: str = "base_link", - urdf_path: str | Path | None = None, - axes_size: float | None = 0.5, - cameras: Sequence[tuple[str, str, _HasToRerun]] = (), - camera_rgb_suffix: str = "rgb", - **kwargs: Any, - ) -> None: - super().__init__(**kwargs) - self._global_config = global_config or GlobalConfig() - - self.world_entity = world_entity - self.robot_entity = robot_entity - self.robot_axes_entity = robot_axes_entity - - self.world_frame = world_frame - self.robot_frame = robot_frame - - self.urdf_path = urdf_path - self.axes_size = axes_size - - self.cameras = cameras - self.camera_rgb_suffix = camera_rgb_suffix - - @rpc - def start(self) -> None: - super().start() - - if not self._global_config.viewer_backend.startswith("rerun"): - return - - connect_rerun(global_config=self._global_config) - - # Global view coordinates (applies to views at/under this origin). - rr.log(self.world_entity, rr.ViewCoordinates.RIGHT_HAND_Z_UP, static=True) - - # Attach semantic entity paths to named TF frames. - _attach_entity(self.world_entity, self.world_frame) - _attach_entity(self.robot_entity, self.robot_frame) - - if self.axes_size is not None: - rr.log(self.robot_axes_entity, rr.TransformAxes3D(self.axes_size), static=True) # type: ignore[attr-defined] - - # Optional URDF load (purely visual). - if self.urdf_path is not None: - p = Path(self.urdf_path) - if p.exists(): - rr.log_file_from_path( - str(p), - entity_path_prefix=self.robot_entity, - static=True, - ) - - # Multi-camera: attach camera entities + log static pinholes. - for cam_entity, cam_frame, cam_info in self.cameras: - _attach_entity(cam_entity, cam_frame) - rr.log(cam_entity, cam_info.to_rerun(), static=True) # type: ignore[no-untyped-call] - - @rpc - def stop(self) -> None: - super().stop() - - -rerun_scene_wiring = RerunSceneWiringModule.blueprint diff --git a/dimos/dashboard/tf_rerun_module.py b/dimos/dashboard/tf_rerun_module.py deleted file mode 100644 index bca05ce2e4..0000000000 --- a/dimos/dashboard/tf_rerun_module.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""TF Rerun Module - Snapshot TF visualization in Rerun. - -This module polls the TF buffer at a configurable rate and logs the latest -transform for each edge to Rerun. This provides stable, rate-limited TF -visualization without subscribing to the /tf transport from here. - -Usage: - # In blueprints: - from dimos.dashboard.tf_rerun_module import tf_rerun - - def my_robot(): - return ( - robot_connection() - + tf_rerun() # Add TF visualization - + other_modules() - ) -""" - -from collections.abc import Sequence -import threading -import time -from typing import Any, cast - -import rerun as rr - -from dimos.core import Module, rpc -from dimos.core.blueprints import ModuleBlueprintSet, autoconnect -from dimos.core.global_config import GlobalConfig -from dimos.dashboard.rerun_init import connect_rerun -from dimos.dashboard.rerun_scene_wiring import rerun_scene_wiring -from dimos.utils.logging_config import setup_logger - -logger = setup_logger() - - -class TFRerunModule(Module): - """Polls TF buffer and logs snapshot transforms to Rerun. - - This module automatically visualizes the TF tree in Rerun by: - - Using `self.tf` (the system TF service) to maintain the TF buffer - - Polling at a configurable rate and logging the latest transform per edge - """ - - _global_config: GlobalConfig - _poll_thread: threading.Thread | None = None - _stop_event: threading.Event | None = None - _poll_hz: float - _last_ts_by_edge: dict[tuple[str, str], float] - - def __init__( - self, - global_config: GlobalConfig | None = None, - poll_hz: float = 30.0, - **kwargs: Any, - ) -> None: - """Initialize TFRerunModule. - - Args: - global_config: Optional global configuration for viewer backend settings - **kwargs: Additional arguments passed to parent Module - """ - super().__init__(**kwargs) - self._global_config = global_config or GlobalConfig() - self._poll_hz = poll_hz - self._last_ts_by_edge = {} - - @rpc - def start(self) -> None: - """Start the TF visualization module.""" - super().start() - - # Only connect if Rerun backend is selected - if self._global_config.viewer_backend.startswith("rerun"): - connect_rerun(global_config=self._global_config) - - # Ensure TF transport is started so its internal subscription populates the buffer. - self.tf.start(sub=True) - - self._stop_event = threading.Event() - self._poll_thread = threading.Thread(target=self._poll_loop, daemon=True) - self._poll_thread.start() - logger.info("TFRerunModule: started TF snapshot polling", poll_hz=self._poll_hz) - - def _poll_loop(self) -> None: - assert self._stop_event is not None - period_s = 1.0 / max(self._poll_hz, 0.1) - - while not self._stop_event.is_set(): - # Snapshot keys to avoid concurrent modification while TF buffer updates. - items = list(self.tf.buffers.items()) # type: ignore[attr-defined] - for (parent, child), buffer in items: - latest = buffer.get() - if latest is None: - continue - last_ts = self._last_ts_by_edge.get((parent, child)) - if last_ts is not None and latest.ts == last_ts: - continue - - # Log under `world/tf/...` so it is visible under the default 3D view origin. - rr.log(f"world/tf/{child}", latest.to_rerun()) # type: ignore[no-untyped-call] - self._last_ts_by_edge[(parent, child)] = latest.ts - - time.sleep(period_s) - - @rpc - def stop(self) -> None: - """Stop the TF visualization module and cleanup LCM subscription.""" - if self._stop_event is not None: - self._stop_event.set() - self._stop_event = None - - if self._poll_thread is not None and self._poll_thread.is_alive(): - self._poll_thread.join(timeout=1.0) - self._poll_thread = None - - super().stop() - - -def tf_rerun( - *, - poll_hz: float = 30.0, - scene: bool = True, - # Scene wiring kwargs (only used if scene=True) - world_entity: str = "world", - robot_entity: str = "world/robot", - robot_axes_entity: str = "world/robot/axes", - world_frame: str = "world", - robot_frame: str = "base_link", - urdf_path: str | None = None, - axes_size: float | None = 0.5, - cameras: Sequence[tuple[str, str, Any]] = (), - camera_rgb_suffix: str = "rgb", -) -> ModuleBlueprintSet: - """Convenience blueprint: TF snapshot polling + (optional) static scene wiring. - - - TF visualization stays in `TFRerunModule` (poll TF buffer, log to `world/tf/*`). - - Scene wiring is handled by `RerunSceneWiringModule` (view coords, attachments, URDF, pinholes). - """ - tf_bp = cast("ModuleBlueprintSet", TFRerunModule.blueprint(poll_hz=poll_hz)) - if not scene: - return tf_bp - - scene_bp = cast( - "ModuleBlueprintSet", - rerun_scene_wiring( - world_entity=world_entity, - robot_entity=robot_entity, - robot_axes_entity=robot_axes_entity, - world_frame=world_frame, - robot_frame=robot_frame, - urdf_path=urdf_path, - axes_size=axes_size, - cameras=cameras, - camera_rgb_suffix=camera_rgb_suffix, - ), - ) - - return autoconnect(tf_bp, scene_bp) diff --git a/dimos/e2e_tests/lcm_spy.py b/dimos/e2e_tests/lcm_spy.py index de0864dcd2..9efed09d5e 100644 --- a/dimos/e2e_tests/lcm_spy.py +++ b/dimos/e2e_tests/lcm_spy.py @@ -22,8 +22,9 @@ import lcm +from dimos.msgs import DimosMsg from dimos.msgs.geometry_msgs import PoseStamped -from dimos.protocol.service.lcmservice import LCMMsg, LCMService +from dimos.protocol.service.lcmservice import LCMService class LcmSpy(LCMService): @@ -155,7 +156,7 @@ def listener(msg: bytes) -> None: def wait_for_message_result( self, topic: str, - type: type[LCMMsg], + type: type[DimosMsg], predicate: Callable[[Any], bool], fail_message: str, timeout: float = 30.0, diff --git a/dimos/e2e_tests/test_control_orchestrator.py b/dimos/e2e_tests/test_control_coordinator.py similarity index 58% rename from dimos/e2e_tests/test_control_orchestrator.py rename to dimos/e2e_tests/test_control_coordinator.py index aa820d66ec..f6e520831d 100644 --- a/dimos/e2e_tests/test_control_orchestrator.py +++ b/dimos/e2e_tests/test_control_coordinator.py @@ -12,13 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""End-to-end tests for the ControlOrchestrator. +"""End-to-end tests for the ControlCoordinator. -These tests start a real orchestrator process and communicate via LCM/RPC. +These tests start a real coordinator process and communicate via LCM/RPC. Unlike unit tests, these verify the full system integration. - -Run with: - pytest dimos/e2e_tests/test_control_orchestrator.py -v -s """ import os @@ -26,7 +23,7 @@ import pytest -from dimos.control.orchestrator import ControlOrchestrator +from dimos.control.coordinator import ControlCoordinator from dimos.core.rpc_client import RPCClient from dimos.msgs.sensor_msgs import JointState from dimos.msgs.trajectory_msgs import JointTrajectory, TrajectoryPoint, TrajectoryState @@ -34,28 +31,25 @@ @pytest.mark.skipif(bool(os.getenv("CI")), reason="LCM doesn't work in CI.") @pytest.mark.e2e -class TestControlOrchestratorE2E: - """End-to-end tests for ControlOrchestrator.""" +class TestControlCoordinatorE2E: + """End-to-end tests for ControlCoordinator.""" - def test_orchestrator_starts_and_responds_to_rpc(self, lcm_spy, start_blueprint) -> None: - """Test that orchestrator starts and responds to RPC queries.""" + def test_coordinator_starts_and_responds_to_rpc(self, lcm_spy, start_blueprint) -> None: + """Test that coordinator starts and responds to RPC queries.""" # Save topics we care about (LCM topics include type suffix) - joint_state_topic = "/orchestrator/joint_state#sensor_msgs.JointState" + joint_state_topic = "/coordinator/joint_state#sensor_msgs.JointState" lcm_spy.save_topic(joint_state_topic) - lcm_spy.save_topic("/rpc/ControlOrchestrator/list_joints/res") - lcm_spy.save_topic("/rpc/ControlOrchestrator/list_tasks/res") + lcm_spy.save_topic("/rpc/ControlCoordinator/list_joints/res") + lcm_spy.save_topic("/rpc/ControlCoordinator/list_tasks/res") - # Start the mock orchestrator blueprint - start_blueprint("orchestrator-mock") + # Start the mock coordinator blueprint + start_blueprint("coordinator-mock") # Wait for joint state to be published (proves tick loop is running) - lcm_spy.wait_for_saved_topic( - joint_state_topic, - timeout=10.0, - ) + lcm_spy.wait_for_saved_topic(joint_state_topic) # Create RPC client and query - client = RPCClient(None, ControlOrchestrator) + client = RPCClient(None, ControlCoordinator) try: # Test list_joints RPC joints = client.list_joints() @@ -75,23 +69,19 @@ def test_orchestrator_starts_and_responds_to_rpc(self, lcm_spy, start_blueprint) finally: client.stop_rpc_client() - def test_orchestrator_executes_trajectory(self, lcm_spy, start_blueprint) -> None: - """Test that orchestrator executes a trajectory via RPC.""" + def test_coordinator_executes_trajectory(self, lcm_spy, start_blueprint) -> None: + """Test that coordinator executes a trajectory via RPC.""" # Save topics - lcm_spy.save_topic("/orchestrator/joint_state#sensor_msgs.JointState") - lcm_spy.save_topic("/rpc/ControlOrchestrator/execute_trajectory/res") - lcm_spy.save_topic("/rpc/ControlOrchestrator/get_trajectory_status/res") + lcm_spy.save_topic("/coordinator/joint_state#sensor_msgs.JointState") - # Start orchestrator - start_blueprint("orchestrator-mock") + # Start coordinator + start_blueprint("coordinator-mock") # Wait for it to be ready - lcm_spy.wait_for_saved_topic( - "/orchestrator/joint_state#sensor_msgs.JointState", timeout=10.0 - ) + lcm_spy.wait_for_saved_topic("/coordinator/joint_state#sensor_msgs.JointState") # Create RPC client - client = RPCClient(None, ControlOrchestrator) + client = RPCClient(None, ControlCoordinator) try: # Get initial joint positions initial_positions = client.get_joint_positions() @@ -114,8 +104,8 @@ def test_orchestrator_executes_trajectory(self, lcm_spy, start_blueprint) -> Non ], ) - # Execute trajectory - result = client.execute_trajectory("traj_arm", trajectory) + # Execute trajectory via task_invoke + result = client.task_invoke("traj_arm", "execute", {"trajectory": trajectory}) assert result is True # Poll for completion @@ -124,8 +114,8 @@ def test_orchestrator_executes_trajectory(self, lcm_spy, start_blueprint) -> Non completed = False while time.time() - start_time < timeout: - status = client.get_trajectory_status("traj_arm") - if status is not None and status.state == TrajectoryState.COMPLETED.name: + state = client.task_invoke("traj_arm", "get_state") + if state is not None and state == TrajectoryState.COMPLETED: completed = True break time.sleep(0.1) @@ -134,16 +124,16 @@ def test_orchestrator_executes_trajectory(self, lcm_spy, start_blueprint) -> Non finally: client.stop_rpc_client() - def test_orchestrator_joint_state_published(self, lcm_spy, start_blueprint) -> None: + def test_coordinator_joint_state_published(self, lcm_spy, start_blueprint) -> None: """Test that joint state messages are published at expected rate.""" - joint_state_topic = "/orchestrator/joint_state#sensor_msgs.JointState" + joint_state_topic = "/coordinator/joint_state#sensor_msgs.JointState" lcm_spy.save_topic(joint_state_topic) - # Start orchestrator - start_blueprint("orchestrator-mock") + # Start coordinator + start_blueprint("coordinator-mock") # Wait for initial message - lcm_spy.wait_for_saved_topic(joint_state_topic, timeout=10.0) + lcm_spy.wait_for_saved_topic(joint_state_topic) # Collect messages for 1 second time.sleep(1.0) @@ -164,17 +154,15 @@ def test_orchestrator_joint_state_published(self, lcm_spy, start_blueprint) -> N assert len(joint_state.position) == 7 assert "arm_joint1" in joint_state.name - def test_orchestrator_cancel_trajectory(self, lcm_spy, start_blueprint) -> None: + def test_coordinator_cancel_trajectory(self, lcm_spy, start_blueprint) -> None: """Test that a running trajectory can be cancelled.""" - lcm_spy.save_topic("/orchestrator/joint_state#sensor_msgs.JointState") + lcm_spy.save_topic("/coordinator/joint_state#sensor_msgs.JointState") - # Start orchestrator - start_blueprint("orchestrator-mock") - lcm_spy.wait_for_saved_topic( - "/orchestrator/joint_state#sensor_msgs.JointState", timeout=10.0 - ) + # Start coordinator + start_blueprint("coordinator-mock") + lcm_spy.wait_for_saved_topic("/coordinator/joint_state#sensor_msgs.JointState") - client = RPCClient(None, ControlOrchestrator) + client = RPCClient(None, ControlCoordinator) try: # Create a long trajectory (5 seconds) trajectory = JointTrajectory( @@ -193,38 +181,36 @@ def test_orchestrator_cancel_trajectory(self, lcm_spy, start_blueprint) -> None: ], ) - # Start trajectory - result = client.execute_trajectory("traj_arm", trajectory) + # Start trajectory via task_invoke + result = client.task_invoke("traj_arm", "execute", {"trajectory": trajectory}) assert result is True # Wait a bit then cancel time.sleep(0.5) - cancel_result = client.cancel_trajectory("traj_arm") + cancel_result = client.task_invoke("traj_arm", "cancel") assert cancel_result is True # Check status is ABORTED - status = client.get_trajectory_status("traj_arm") - assert status is not None - assert status.state == TrajectoryState.ABORTED.name + state = client.task_invoke("traj_arm", "get_state") + assert state is not None + assert state == TrajectoryState.ABORTED finally: client.stop_rpc_client() - def test_dual_arm_orchestrator(self, lcm_spy, start_blueprint) -> None: - """Test dual-arm orchestrator with independent trajectories.""" - lcm_spy.save_topic("/orchestrator/joint_state#sensor_msgs.JointState") + def test_dual_arm_coordinator(self, lcm_spy, start_blueprint) -> None: + """Test dual-arm coordinator with independent trajectories.""" + lcm_spy.save_topic("/coordinator/joint_state#sensor_msgs.JointState") - # Start dual-arm mock orchestrator - start_blueprint("orchestrator-dual-mock") - lcm_spy.wait_for_saved_topic( - "/orchestrator/joint_state#sensor_msgs.JointState", timeout=10.0 - ) + # Start dual-arm mock coordinator + start_blueprint("coordinator-dual-mock") + lcm_spy.wait_for_saved_topic("/coordinator/joint_state#sensor_msgs.JointState") - client = RPCClient(None, ControlOrchestrator) + client = RPCClient(None, ControlCoordinator) try: # Verify both arms present joints = client.list_joints() - assert "left_joint1" in joints - assert "right_joint1" in joints + assert "left_arm_joint1" in joints + assert "right_arm_joint1" in joints tasks = client.list_tasks() assert "traj_left" in tasks @@ -232,7 +218,7 @@ def test_dual_arm_orchestrator(self, lcm_spy, start_blueprint) -> None: # Create trajectories for both arms left_trajectory = JointTrajectory( - joint_names=[f"left_joint{i + 1}" for i in range(7)], + joint_names=[f"left_arm_joint{i + 1}" for i in range(7)], points=[ TrajectoryPoint(time_from_start=0.0, positions=[0.0] * 7), TrajectoryPoint(time_from_start=0.5, positions=[0.2] * 7), @@ -240,25 +226,30 @@ def test_dual_arm_orchestrator(self, lcm_spy, start_blueprint) -> None: ) right_trajectory = JointTrajectory( - joint_names=[f"right_joint{i + 1}" for i in range(6)], + joint_names=[f"right_arm_joint{i + 1}" for i in range(6)], points=[ TrajectoryPoint(time_from_start=0.0, positions=[0.0] * 6), TrajectoryPoint(time_from_start=0.5, positions=[0.3] * 6), ], ) - # Execute both - assert client.execute_trajectory("traj_left", left_trajectory) is True - assert client.execute_trajectory("traj_right", right_trajectory) is True + # Execute both via task_invoke + assert ( + client.task_invoke("traj_left", "execute", {"trajectory": left_trajectory}) is True + ) + assert ( + client.task_invoke("traj_right", "execute", {"trajectory": right_trajectory}) + is True + ) # Wait for completion time.sleep(1.0) # Both should complete - left_status = client.get_trajectory_status("traj_left") - right_status = client.get_trajectory_status("traj_right") + left_state = client.task_invoke("traj_left", "get_state") + right_state = client.task_invoke("traj_right", "get_state") - assert left_status is not None and left_status.state == TrajectoryState.COMPLETED.name - assert right_status is not None and right_status.state == TrajectoryState.COMPLETED.name + assert left_state == TrajectoryState.COMPLETED + assert right_state == TrajectoryState.COMPLETED finally: client.stop_rpc_client() diff --git a/dimos/e2e_tests/test_dimos_cli_e2e.py b/dimos/e2e_tests/test_dimos_cli_e2e.py index f91db1b2fc..ede0ec7a3a 100644 --- a/dimos/e2e_tests/test_dimos_cli_e2e.py +++ b/dimos/e2e_tests/test_dimos_cli_e2e.py @@ -21,17 +21,14 @@ @pytest.mark.skipif(not os.getenv("OPENAI_API_KEY"), reason="OPENAI_API_KEY not set.") @pytest.mark.e2e def test_dimos_skills(lcm_spy, start_blueprint, human_input) -> None: - lcm_spy.save_topic("/rpc/DemoCalculatorSkill/set_AgentSpec_register_skills/res") - lcm_spy.save_topic("/rpc/HumanInput/start/res") lcm_spy.save_topic("/agent") + lcm_spy.save_topic("/rpc/Agent/on_system_modules/res") lcm_spy.save_topic("/rpc/DemoCalculatorSkill/sum_numbers/req") lcm_spy.save_topic("/rpc/DemoCalculatorSkill/sum_numbers/res") start_blueprint("run", "demo-skill") - lcm_spy.wait_for_saved_topic("/rpc/DemoCalculatorSkill/set_AgentSpec_register_skills/res") - lcm_spy.wait_for_saved_topic("/rpc/HumanInput/start/res") - lcm_spy.wait_for_saved_topic_content("/agent", b"AIMessage") + lcm_spy.wait_for_saved_topic("/rpc/Agent/on_system_modules/res") human_input("what is 52983 + 587237") diff --git a/dimos/e2e_tests/test_person_follow.py b/dimos/e2e_tests/test_person_follow.py index 709f4e4511..abb9cfb4fa 100644 --- a/dimos/e2e_tests/test_person_follow.py +++ b/dimos/e2e_tests/test_person_follow.py @@ -64,10 +64,8 @@ def test_person_follow( ) -> None: start_blueprint("--mujoco-start-pos", "-6.18 0.96", "run", "unitree-go2-agentic") - lcm_spy.save_topic("/rpc/HumanInput/start/res") - lcm_spy.wait_for_saved_topic("/rpc/HumanInput/start/res", timeout=120.0) - lcm_spy.save_topic("/agent") - lcm_spy.wait_for_saved_topic_content("/agent", b"AIMessage", timeout=120.0) + lcm_spy.save_topic("/rpc/Agent/on_system_modules/res") + lcm_spy.wait_for_saved_topic("/rpc/Agent/on_system_modules/res", timeout=120.0) time.sleep(5) diff --git a/dimos/e2e_tests/test_simulation_module.py b/dimos/e2e_tests/test_simulation_module.py new file mode 100644 index 0000000000..6c15f62056 --- /dev/null +++ b/dimos/e2e_tests/test_simulation_module.py @@ -0,0 +1,86 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""End-to-end tests for the simulation module.""" + +import os + +import pytest + +from dimos.msgs.sensor_msgs import JointCommand, JointState, RobotState + + +def _positions_within_tolerance( + positions: list[float], + target: list[float], + tolerance: float, +) -> bool: + if len(positions) < len(target): + return False + return all(abs(positions[i] - target[i]) <= tolerance for i in range(len(target))) + + +@pytest.mark.skipif(bool(os.getenv("CI")), reason="LCM doesn't work in CI.") +@pytest.mark.e2e +class TestSimulationModuleE2E: + def test_xarm7_joint_state_published(self, lcm_spy, start_blueprint) -> None: + joint_state_topic = "/xarm/joint_states#sensor_msgs.JointState" + lcm_spy.save_topic(joint_state_topic) + + start_blueprint("xarm7-trajectory-sim") + lcm_spy.wait_for_saved_topic(joint_state_topic, timeout=15.0) + + with lcm_spy._messages_lock: + raw_joint_state = lcm_spy.messages[joint_state_topic][0] + + joint_state = JointState.lcm_decode(raw_joint_state) + assert len(joint_state.name) == 8 + assert len(joint_state.position) == 8 + + def test_xarm7_robot_state_published(self, lcm_spy, start_blueprint) -> None: + robot_state_topic = "/xarm/robot_state#sensor_msgs.RobotState" + lcm_spy.save_topic(robot_state_topic) + + start_blueprint("xarm7-trajectory-sim") + lcm_spy.wait_for_saved_topic(robot_state_topic, timeout=15.0) + + with lcm_spy._messages_lock: + raw_robot_state = lcm_spy.messages[robot_state_topic][0] + + robot_state = RobotState.lcm_decode(raw_robot_state) + assert robot_state.mt_able in (0, 1) + + def test_xarm7_joint_command_updates_joint_state(self, lcm_spy, start_blueprint) -> None: + joint_state_topic = "/xarm/joint_states#sensor_msgs.JointState" + joint_command_topic = "/xarm/joint_position_command#sensor_msgs.JointCommand" + lcm_spy.save_topic(joint_state_topic) + + start_blueprint("xarm7-trajectory-sim") + lcm_spy.wait_for_saved_topic(joint_state_topic, timeout=15.0) + + target_positions = [0.2, -0.2, 0.1, -0.1, 0.15, -0.15, 0.05] + lcm_spy.publish(joint_command_topic, JointCommand(positions=target_positions)) + + tolerance = 0.03 + lcm_spy.wait_for_message_result( + joint_state_topic, + JointState, + predicate=lambda msg: _positions_within_tolerance( + list(msg.position), + target_positions, + tolerance, + ), + fail_message=("joint_state did not reach commanded positions within tolerance"), + timeout=10.0, + ) diff --git a/dimos/e2e_tests/test_spatial_memory.py b/dimos/e2e_tests/test_spatial_memory.py index 7c08800a6f..8b03a9915c 100644 --- a/dimos/e2e_tests/test_spatial_memory.py +++ b/dimos/e2e_tests/test_spatial_memory.py @@ -34,10 +34,8 @@ def test_spatial_memory_navigation( ) -> None: start_blueprint("run", "unitree-go2-agentic") - lcm_spy.save_topic("/rpc/HumanInput/start/res") - lcm_spy.wait_for_saved_topic("/rpc/HumanInput/start/res", timeout=120.0) - lcm_spy.save_topic("/agent") - lcm_spy.wait_for_saved_topic_content("/agent", b"AIMessage", timeout=120.0) + lcm_spy.save_topic("/rpc/Agent/on_system_modules/res") + lcm_spy.wait_for_saved_topic("/rpc/Agent/on_system_modules/res", timeout=120.0) time.sleep(5) diff --git a/dimos/hardware/README.md b/dimos/hardware/README.md deleted file mode 100644 index 2587e3595d..0000000000 --- a/dimos/hardware/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# Hardware - -## Remote camera stream with timestamps - -### Required Ubuntu packages: - -```bash -sudo apt install gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav python3-gi python3-gi-cairo gir1.2-gstreamer-1.0 gir1.2-gst-plugins-base-1.0 v4l-utils gstreamer1.0-vaapi -``` - -### Usage - -On sender machine (with the camera): - -```bash -python3 dimos/hardware/gstreamer_sender.py --device /dev/video0 --host 0.0.0.0 --port 5000 -``` - -If it's a stereo camera and you only want to send the left side (the left camera): - -```bash -python3 dimos/hardware/gstreamer_sender.py --device /dev/video0 --host 0.0.0.0 --port 5000 --single-camera -``` - -On receiver machine: - -```bash -python3 dimos/hardware/gstreamer_camera_test_script.py --host 10.0.0.227 --port 5000 -``` diff --git a/dimos/hardware/manipulators/README.md b/dimos/hardware/manipulators/README.md index d3e54d4cb0..60d3c94567 100644 --- a/dimos/hardware/manipulators/README.md +++ b/dimos/hardware/manipulators/README.md @@ -1,6 +1,6 @@ # Manipulator Drivers -This module provides manipulator arm drivers using the **B-lite architecture**: Protocol-only with injectable backends. +This module provides manipulator arm drivers: Protocol-only with injectable adapters. ## Architecture Overview @@ -14,35 +14,32 @@ This module provides manipulator arm drivers using the **B-lite architecture**: └─────────────────────┬───────────────────────────────────────┘ │ uses ┌─────────────────────▼───────────────────────────────────────┐ -│ Backend (implements Protocol) │ +│ Adapter (implements Protocol) │ │ - Handles SDK communication │ │ - Unit conversions (radians ↔ vendor units) │ -│ - Swappable: XArmBackend, PiperBackend, MockBackend │ +│ - Swappable: XArmAdapter, PiperAdapter, MockAdapter │ └─────────────────────────────────────────────────────────────┘ ``` ## Key Benefits -- **Testable**: Inject `MockBackend` for unit tests without hardware +- **Testable**: Inject `MockAdapter` for unit tests without hardware - **Flexible**: Each arm controls its own threading/timing - **Simple**: No ABC inheritance required - just implement the Protocol -- **Type-safe**: Full type checking via `ManipulatorBackend` Protocol +- **Type-safe**: Full type checking via `ManipulatorAdapter` Protocol ## Directory Structure ``` manipulators/ -├── spec.py # ManipulatorBackend Protocol + shared types +├── spec.py # ManipulatorAdapter Protocol + shared types +├── registry.py # Adapter registry with auto-discovery ├── mock/ -│ └── backend.py # MockBackend for testing +│ └── adapter.py # MockAdapter for testing ├── xarm/ -│ ├── backend.py # XArmBackend (SDK wrapper) -│ ├── arm.py # XArm driver module -│ └── blueprints.py # Pre-configured blueprints +│ ├── adapter.py # XArmAdapter (SDK wrapper) └── piper/ - ├── backend.py # PiperBackend (SDK wrapper) - ├── arm.py # Piper driver module - └── blueprints.py # Pre-configured blueprints + ├── adapter.py # PiperAdapter (SDK wrapper) ``` ## Quick Start @@ -71,20 +68,20 @@ coordinator.loop() ### Testing Without Hardware ```python -from dimos.hardware.manipulators.mock import MockBackend +from dimos.hardware.manipulators.mock import MockAdapter from dimos.hardware.manipulators.xarm import XArm -arm = XArm(backend=MockBackend(dof=6)) +arm = XArm(adapter=MockAdapter(dof=6)) arm.start() # No hardware needed! arm.move_joint([0.1, 0.2, 0.3, 0.4, 0.5, 0.6]) ``` ## Adding a New Arm -1. **Create the backend** (`backend.py`): +1. **Create the adapter** (`adapter.py`): ```python -class MyArmBackend: # No inheritance needed - just match the Protocol +class MyArmAdapter: # No inheritance needed - just match the Protocol def __init__(self, ip: str = "192.168.1.100", dof: int = 6) -> None: self._ip = ip self._dof = dof @@ -100,16 +97,16 @@ class MyArmBackend: # No inheritance needed - just match the Protocol ```python from dimos.core import Module, ModuleConfig, In, Out, rpc -from .backend import MyArmBackend +from .adapter import MyArmAdapter class MyArm(Module[MyArmConfig]): joint_state: Out[JointState] robot_state: Out[RobotState] joint_position_command: In[JointCommand] - def __init__(self, backend=None, **kwargs): + def __init__(self, adapter=None, **kwargs): super().__init__(**kwargs) - self.backend = backend or MyArmBackend( + self.adapter = adapter or MyArmAdapter( ip=self.config.ip, dof=self.config.dof, ) @@ -118,9 +115,9 @@ class MyArm(Module[MyArmConfig]): 3. **Create blueprints** (`blueprints.py`) for common configurations. -## ManipulatorBackend Protocol +## ManipulatorAdapter Protocol -All backends must implement these core methods: +All adapters must implement these core methods: | Category | Methods | |----------|---------| @@ -138,7 +135,7 @@ Optional methods (return `None`/`False` if unsupported): ## Unit Conventions -All backends convert to/from SI units: +All adapters convert to/from SI units: | Quantity | Unit | |----------|------| @@ -147,17 +144,3 @@ All backends convert to/from SI units: | Torque | Nm | | Position | meters | | Force | Newtons | - -## Available Blueprints - -### XArm -- `xarm_servo` - Basic servo control (6-DOF) -- `xarm5_servo`, `xarm7_servo` - 5/7-DOF variants -- `xarm_trajectory` - Driver + trajectory controller -- `xarm_cartesian` - Driver + cartesian controller - -### Piper -- `piper_servo` - Basic servo control -- `piper_servo_gripper` - With gripper support -- `piper_trajectory` - Driver + trajectory controller -- `piper_left`, `piper_right` - Dual arm configurations diff --git a/dimos/hardware/manipulators/__init__.py b/dimos/hardware/manipulators/__init__.py index e4133dbb51..58986c9211 100644 --- a/dimos/hardware/manipulators/__init__.py +++ b/dimos/hardware/manipulators/__init__.py @@ -14,12 +14,11 @@ """Manipulator drivers for robotic arms. -Architecture: B-lite (Protocol-based backends with per-arm drivers) - -- spec.py: ManipulatorBackend Protocol and shared types -- xarm/: XArm driver and backend -- piper/: Piper driver and backend -- mock/: Mock backend for testing +Architecture: Protocol-based adapters for different manipulator hardware. +- spec.py: ManipulatorAdapter Protocol and shared types +- xarm/: XArm adapter +- piper/: Piper adapter +- mock/: Mock adapter for testing Usage: >>> from dimos.hardware.manipulators.xarm import XArm @@ -30,8 +29,8 @@ Testing: >>> from dimos.hardware.manipulators.xarm import XArm - >>> from dimos.hardware.manipulators.mock import MockBackend - >>> arm = XArm(backend=MockBackend()) + >>> from dimos.hardware.manipulators.mock import MockAdapter + >>> arm = XArm(adapter=MockAdapter()) >>> arm.start() # No hardware needed! """ @@ -39,7 +38,7 @@ ControlMode, DriverStatus, JointLimits, - ManipulatorBackend, + ManipulatorAdapter, ManipulatorInfo, ) @@ -47,6 +46,6 @@ "ControlMode", "DriverStatus", "JointLimits", - "ManipulatorBackend", + "ManipulatorAdapter", "ManipulatorInfo", ] diff --git a/dimos/hardware/manipulators/mock/__init__.py b/dimos/hardware/manipulators/mock/__init__.py index 87428973a4..63be6f7e98 100644 --- a/dimos/hardware/manipulators/mock/__init__.py +++ b/dimos/hardware/manipulators/mock/__init__.py @@ -12,17 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Mock backend for testing manipulator drivers without hardware. +"""Mock adapter for testing manipulator drivers without hardware. Usage: >>> from dimos.hardware.manipulators.xarm import XArm - >>> from dimos.hardware.manipulators.mock import MockBackend - >>> arm = XArm(backend=MockBackend()) + >>> from dimos.hardware.manipulators.mock import MockAdapter + >>> arm = XArm(adapter=MockAdapter()) >>> arm.start() # No hardware needed! >>> arm.move_joint([0.1, 0.2, 0.3, 0.4, 0.5, 0.6]) - >>> assert arm.backend.read_joint_positions() == [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] + >>> assert arm.adapter.read_joint_positions() == [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] """ -from dimos.hardware.manipulators.mock.backend import MockBackend +from dimos.hardware.manipulators.mock.adapter import MockAdapter -__all__ = ["MockBackend"] +__all__ = ["MockAdapter"] diff --git a/dimos/hardware/manipulators/mock/backend.py b/dimos/hardware/manipulators/mock/adapter.py similarity index 92% rename from dimos/hardware/manipulators/mock/backend.py rename to dimos/hardware/manipulators/mock/adapter.py index 80b3543739..ff299669f7 100644 --- a/dimos/hardware/manipulators/mock/backend.py +++ b/dimos/hardware/manipulators/mock/adapter.py @@ -12,16 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Mock backend for testing - no hardware required. +"""Mock adapter for testing - no hardware required. Usage: >>> from dimos.hardware.manipulators.xarm import XArm - >>> from dimos.hardware.manipulators.mock import MockBackend - >>> arm = XArm(backend=MockBackend()) + >>> from dimos.hardware.manipulators.mock import MockAdapter + >>> arm = XArm(adapter=MockAdapter()) >>> arm.start() # No hardware! """ +from __future__ import annotations + import math +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dimos.hardware.manipulators.registry import AdapterRegistry from dimos.hardware.manipulators.spec import ( ControlMode, @@ -30,17 +36,17 @@ ) -class MockBackend: - """Fake backend for unit tests. +class MockAdapter: + """Fake adapter for unit tests. - Implements ManipulatorBackend protocol with in-memory state. + Implements ManipulatorAdapter protocol with in-memory state. Useful for: - Unit testing driver logic without hardware - Integration testing with predictable behavior - Development without physical robot """ - def __init__(self, dof: int = 6) -> None: + def __init__(self, dof: int = 6, **_: object) -> None: self._dof = dof self._positions = [0.0] * dof self._velocities = [0.0] * dof @@ -247,4 +253,9 @@ def set_efforts(self, efforts: list[float]) -> None: self._efforts = list(efforts) -__all__ = ["MockBackend"] +def register(registry: AdapterRegistry) -> None: + """Register this adapter with the registry.""" + registry.register("mock", MockAdapter) + + +__all__ = ["MockAdapter"] diff --git a/dimos/hardware/manipulators/piper/__init__.py b/dimos/hardware/manipulators/piper/__init__.py index 16c6e451cd..bfeb89b1c0 100644 --- a/dimos/hardware/manipulators/piper/__init__.py +++ b/dimos/hardware/manipulators/piper/__init__.py @@ -12,15 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Piper manipulator hardware backend. +"""Piper manipulator hardware adapter. Usage: - >>> from dimos.hardware.manipulators.piper import PiperBackend - >>> backend = PiperBackend(can_port="can0") - >>> backend.connect() - >>> positions = backend.read_joint_positions() + >>> from dimos.hardware.manipulators.piper import PiperAdapter + >>> adapter = PiperAdapter(can_port="can0") + >>> adapter.connect() + >>> positions = adapter.read_joint_positions() """ -from dimos.hardware.manipulators.piper.backend import PiperBackend +from dimos.hardware.manipulators.piper.adapter import PiperAdapter -__all__ = ["PiperBackend"] +__all__ = ["PiperAdapter"] diff --git a/dimos/hardware/manipulators/piper/backend.py b/dimos/hardware/manipulators/piper/adapter.py similarity index 86% rename from dimos/hardware/manipulators/piper/backend.py rename to dimos/hardware/manipulators/piper/adapter.py index 1ce91dccd1..68b5769a95 100644 --- a/dimos/hardware/manipulators/piper/backend.py +++ b/dimos/hardware/manipulators/piper/adapter.py @@ -12,32 +12,45 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Piper backend - implements ManipulatorBackend protocol. +"""Piper adapter - implements ManipulatorAdapter protocol. -Handles all Piper SDK communication and unit conversion. +SDK Units: angles=0.001 degrees (millidegrees), distance=mm +DimOS Units: angles=radians, distance=meters """ +from __future__ import annotations + import math import time -from typing import Any +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from dimos.hardware.manipulators.registry import AdapterRegistry from dimos.hardware.manipulators.spec import ( ControlMode, JointLimits, - ManipulatorBackend, + ManipulatorAdapter, ManipulatorInfo, ) # Unit conversion constants -# Piper uses 0.001 degrees internally -RAD_TO_PIPER = 57295.7795 # radians to Piper units (0.001 degrees) -PIPER_TO_RAD = 1.0 / RAD_TO_PIPER # Piper units to radians +# Piper uses 0.001 degrees (millidegrees) for angles +RAD_TO_MILLIDEG = 57295.7795 # radians -> millidegrees +MILLIDEG_TO_RAD = 1.0 / RAD_TO_MILLIDEG # millidegrees -> radians +MM_TO_M = 0.001 # mm -> meters + +# Hardware specs +GRIPPER_MAX_OPENING_M = 0.08 # Max gripper opening in meters + +# Default configurable parameters +DEFAULT_GRIPPER_SPEED = 1000 -class PiperBackend(ManipulatorBackend): - """Piper-specific backend. +class PiperAdapter(ManipulatorAdapter): + """Piper-specific adapter. - Implements ManipulatorBackend protocol via duck typing. + Implements ManipulatorAdapter protocol via duck typing. No inheritance required - just matching method signatures. Unit conversions: @@ -45,11 +58,18 @@ class PiperBackend(ManipulatorBackend): - Velocities: Piper uses internal units, we use rad/s """ - def __init__(self, can_port: str = "can0", dof: int = 6) -> None: + def __init__( + self, + address: str = "can0", + dof: int = 6, + gripper_speed: int = DEFAULT_GRIPPER_SPEED, + **_: object, + ) -> None: if dof != 6: - raise ValueError(f"PiperBackend only supports 6 DOF (got {dof})") - self._can_port = can_port + raise ValueError(f"PiperAdapter only supports 6 DOF (got {dof})") + self._can_port = address self._dof = dof + self._gripper_speed = gripper_speed self._sdk: Any = None self._connected: bool = False self._enabled: bool = False @@ -202,12 +222,12 @@ def read_joint_positions(self) -> list[float]: js = joint_msgs.joint_state return [ - js.joint_1 * PIPER_TO_RAD, - js.joint_2 * PIPER_TO_RAD, - js.joint_3 * PIPER_TO_RAD, - js.joint_4 * PIPER_TO_RAD, - js.joint_5 * PIPER_TO_RAD, - js.joint_6 * PIPER_TO_RAD, + js.joint_1 * MILLIDEG_TO_RAD, + js.joint_2 * MILLIDEG_TO_RAD, + js.joint_3 * MILLIDEG_TO_RAD, + js.joint_4 * MILLIDEG_TO_RAD, + js.joint_5 * MILLIDEG_TO_RAD, + js.joint_6 * MILLIDEG_TO_RAD, ] def read_joint_velocities(self) -> list[float]: @@ -294,7 +314,7 @@ def write_joint_positions( return False # Convert radians to Piper units (0.001 degrees) - piper_joints = [round(rad * RAD_TO_PIPER) for rad in positions] + piper_joints = [round(rad * RAD_TO_MILLIDEG) for rad in positions] # Set speed rate if not full speed if velocity < 1.0: @@ -426,12 +446,12 @@ def read_cartesian_position(self) -> dict[str, float] | None: if pose_msgs and pose_msgs.end_pose: ep = pose_msgs.end_pose return { - "x": ep.X_axis / 1000.0, # mm -> m - "y": ep.Y_axis / 1000.0, - "z": ep.Z_axis / 1000.0, - "roll": ep.RX_axis * PIPER_TO_RAD, - "pitch": ep.RY_axis * PIPER_TO_RAD, - "yaw": ep.RZ_axis * PIPER_TO_RAD, + "x": ep.X_axis * MM_TO_M, + "y": ep.Y_axis * MM_TO_M, + "z": ep.Z_axis * MM_TO_M, + "roll": ep.RX_axis * MILLIDEG_TO_RAD, + "pitch": ep.RY_axis * MILLIDEG_TO_RAD, + "yaw": ep.RZ_axis * MILLIDEG_TO_RAD, } except Exception: pass @@ -464,9 +484,8 @@ def read_gripper_position(self) -> float | None: gripper_msgs = self._sdk.GetArmGripperMsgs() if gripper_msgs and gripper_msgs.gripper_state: # Piper gripper position is 0-100 percentage - # Convert to meters (assume max opening 0.08m) - pos = gripper_msgs.gripper_state.grippers_angle - return float(pos / 100.0) * 0.08 + pos: float = gripper_msgs.gripper_state.grippers_angle + return (pos / 100.0) * GRIPPER_MAX_OPENING_M except Exception: pass @@ -480,10 +499,9 @@ def write_gripper_position(self, position: float) -> bool: try: if hasattr(self._sdk, "GripperCtrl"): # Convert meters to percentage (0-100) - # Assume max opening 0.08m - percentage = int((position / 0.08) * 100) + percentage = int((position / GRIPPER_MAX_OPENING_M) * 100) percentage = max(0, min(100, percentage)) - self._sdk.GripperCtrl(percentage, 1000, 0x01, 0) + self._sdk.GripperCtrl(percentage, self._gripper_speed, 0x01, 0) return True except Exception: pass @@ -502,4 +520,9 @@ def read_force_torque(self) -> list[float] | None: return None -__all__ = ["PiperBackend"] +def register(registry: AdapterRegistry) -> None: + """Register this adapter with the registry.""" + registry.register("piper", PiperAdapter) + + +__all__ = ["PiperAdapter"] diff --git a/dimos/hardware/manipulators/registry.py b/dimos/hardware/manipulators/registry.py new file mode 100644 index 0000000000..65dbe74b50 --- /dev/null +++ b/dimos/hardware/manipulators/registry.py @@ -0,0 +1,99 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Adapter registry with auto-discovery. + +Automatically discovers and registers manipulator adapters from subpackages. +Each adapter provides a `register()` function in its adapter.py module. + +Usage: + from dimos.hardware.manipulators.registry import adapter_registry + + # Create an adapter by name + adapter = adapter_registry.create("xarm", ip="192.168.1.185", dof=6) + adapter = adapter_registry.create("piper", can_port="can0", dof=6) + adapter = adapter_registry.create("mock", dof=7) + + # List available adapters + print(adapter_registry.available()) # ["mock", "piper", "xarm"] +""" + +from __future__ import annotations + +import importlib +import logging +import pkgutil +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from dimos.hardware.manipulators.spec import ManipulatorAdapter + +logger = logging.getLogger(__name__) + + +class AdapterRegistry: + """Registry for manipulator adapters with auto-discovery.""" + + def __init__(self) -> None: + self._adapters: dict[str, type[ManipulatorAdapter]] = {} + + def register(self, name: str, cls: type[ManipulatorAdapter]) -> None: + """Register an adapter class.""" + self._adapters[name.lower()] = cls + + def create(self, name: str, **kwargs: Any) -> ManipulatorAdapter: + """Create an adapter instance by name. + + Args: + name: Adapter name (e.g., "xarm", "piper", "mock") + **kwargs: Arguments passed to adapter constructor + + Returns: + Configured adapter instance + + Raises: + KeyError: If adapter name is not found + """ + key = name.lower() + if key not in self._adapters: + raise KeyError(f"Unknown adapter: {name}. Available: {self.available()}") + + return self._adapters[key](**kwargs) + + def available(self) -> list[str]: + """List available adapter names.""" + return sorted(self._adapters.keys()) + + def discover(self) -> None: + """Discover and register adapters from subpackages. + + Can be called multiple times to pick up newly added adapters. + """ + import dimos.hardware.manipulators as pkg + + for _, name, ispkg in pkgutil.iter_modules(pkg.__path__): + if not ispkg: + continue + try: + module = importlib.import_module(f"dimos.hardware.manipulators.{name}.adapter") + if hasattr(module, "register"): + module.register(self) + except ImportError as e: + logger.debug(f"Skipping adapter {name}: {e}") + + +adapter_registry = AdapterRegistry() +adapter_registry.discover() + +__all__ = ["AdapterRegistry", "adapter_registry"] diff --git a/dimos/hardware/manipulators/spec.py b/dimos/hardware/manipulators/spec.py index 585043421e..ff4d38c54f 100644 --- a/dimos/hardware/manipulators/spec.py +++ b/dimos/hardware/manipulators/spec.py @@ -16,7 +16,7 @@ This file defines: 1. Shared enums and dataclasses used by all arms -2. ManipulatorBackend Protocol that backends must implement +2. ManipulatorAdapter Protocol that adapters must implement Note: No ABC for drivers. Each arm implements its own driver with full control over threading and logic. @@ -84,12 +84,12 @@ def default_base_transform() -> Transform: # ============================================================================ -# BACKEND PROTOCOL +# ADAPTER PROTOCOL # ============================================================================ @runtime_checkable -class ManipulatorBackend(Protocol): +class ManipulatorAdapter(Protocol): """Protocol for hardware-specific IO. Implement this per vendor SDK. All methods use SI units: @@ -255,7 +255,7 @@ def read_force_torque(self) -> list[float] | None: "ControlMode", "DriverStatus", "JointLimits", - "ManipulatorBackend", + "ManipulatorAdapter", "ManipulatorInfo", "default_base_transform", ] diff --git a/dimos/hardware/manipulators/xarm/__init__.py b/dimos/hardware/manipulators/xarm/__init__.py index 343ebc4e0e..8bcab667c1 100644 --- a/dimos/hardware/manipulators/xarm/__init__.py +++ b/dimos/hardware/manipulators/xarm/__init__.py @@ -12,15 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""XArm manipulator hardware backend. +"""XArm manipulator hardware adapter. Usage: - >>> from dimos.hardware.manipulators.xarm import XArmBackend - >>> backend = XArmBackend(ip="192.168.1.185", dof=6) - >>> backend.connect() - >>> positions = backend.read_joint_positions() + >>> from dimos.hardware.manipulators.xarm import XArmAdapter + >>> adapter = XArmAdapter(ip="192.168.1.185", dof=6) + >>> adapter.connect() + >>> positions = adapter.read_joint_positions() """ -from dimos.hardware.manipulators.xarm.backend import XArmBackend +from dimos.hardware.manipulators.xarm.adapter import XArmAdapter -__all__ = ["XArmBackend"] +__all__ = ["XArmAdapter"] diff --git a/dimos/hardware/manipulators/xarm/backend.py b/dimos/hardware/manipulators/xarm/adapter.py similarity index 82% rename from dimos/hardware/manipulators/xarm/backend.py rename to dimos/hardware/manipulators/xarm/adapter.py index 9adcdca24f..dd9f764031 100644 --- a/dimos/hardware/manipulators/xarm/backend.py +++ b/dimos/hardware/manipulators/xarm/adapter.py @@ -12,22 +12,34 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""XArm backend - implements ManipulatorBackend protocol. +"""XArm adapter - implements ManipulatorAdapter protocol. -Handles all XArm SDK communication and unit conversion. +SDK Units: angles=degrees, distance=mm, velocity=deg/s +DimOS Units: angles=radians, distance=meters, velocity=rad/s """ +from __future__ import annotations + import math +from typing import TYPE_CHECKING from xarm.wrapper import XArmAPI +if TYPE_CHECKING: + from dimos.hardware.manipulators.registry import AdapterRegistry + from dimos.hardware.manipulators.spec import ( ControlMode, JointLimits, - ManipulatorBackend, + ManipulatorAdapter, ManipulatorInfo, ) +# Unit conversion constants +MM_TO_M = 0.001 +M_TO_MM = 1000.0 +MAX_CARTESIAN_SPEED_MM = 500.0 # Max cartesian speed in mm/s + # XArm mode codes _XARM_MODE_POSITION = 0 _XARM_MODE_SERVO_CARTESIAN = 1 @@ -36,48 +48,17 @@ _XARM_MODE_JOINT_TORQUE = 6 -class XArmBackend(ManipulatorBackend): - """XArm-specific backend. +class XArmAdapter(ManipulatorAdapter): + """XArm-specific adapter. - Implements ManipulatorBackend protocol via duck typing. + Implements ManipulatorAdapter protocol via duck typing. No inheritance required - just matching method signatures. - - Unit conversions: - - Angles: XArm uses degrees, we use radians - - Positions: XArm uses mm, we use meters - - Velocities: XArm uses deg/s, we use rad/s - - TODO: Consider creating XArmPose/XArmVelocity types to encapsulate - unit conversions instead of helper methods. See ManipulatorPose discussion. """ - # ========================================================================= - # Unit Conversions (SI <-> XArm units) - # ========================================================================= - - @staticmethod - def _m_to_mm(m: float) -> float: - return m * 1000.0 - - @staticmethod - def _mm_to_m(mm: float) -> float: - return mm / 1000.0 - - @staticmethod - def _rad_to_deg(rad: float) -> float: - return math.degrees(rad) - - @staticmethod - def _deg_to_rad(deg: float) -> float: - return math.radians(deg) - - @staticmethod - def _velocity_to_speed_mm(velocity: float) -> float: - """Convert 0-1 velocity fraction to mm/s (max ~500 mm/s).""" - return velocity * 500 - - def __init__(self, ip: str, dof: int = 6) -> None: - self._ip = ip + def __init__(self, address: str, dof: int = 6, **_: object) -> None: + if not address: + raise ValueError("address (IP) is required for XArmAdapter") + self._ip = address self._dof = dof self._arm: XArmAPI | None = None self._control_mode: ControlMode = ControlMode.POSITION @@ -319,12 +300,12 @@ def read_cartesian_position(self) -> dict[str, float] | None: _, pose = self._arm.get_position() if pose and len(pose) >= 6: return { - "x": self._mm_to_m(pose[0]), - "y": self._mm_to_m(pose[1]), - "z": self._mm_to_m(pose[2]), - "roll": self._deg_to_rad(pose[3]), - "pitch": self._deg_to_rad(pose[4]), - "yaw": self._deg_to_rad(pose[5]), + "x": pose[0] * MM_TO_M, + "y": pose[1] * MM_TO_M, + "z": pose[2] * MM_TO_M, + "roll": math.radians(pose[3]), + "pitch": math.radians(pose[4]), + "yaw": math.radians(pose[5]), } return None @@ -338,13 +319,13 @@ def write_cartesian_position( return False code: int = self._arm.set_position( - x=self._m_to_mm(pose.get("x", 0)), - y=self._m_to_mm(pose.get("y", 0)), - z=self._m_to_mm(pose.get("z", 0)), - roll=self._rad_to_deg(pose.get("roll", 0)), - pitch=self._rad_to_deg(pose.get("pitch", 0)), - yaw=self._rad_to_deg(pose.get("yaw", 0)), - speed=self._velocity_to_speed_mm(velocity), + x=pose.get("x", 0) * M_TO_MM, + y=pose.get("y", 0) * M_TO_MM, + z=pose.get("z", 0) * M_TO_MM, + roll=math.degrees(pose.get("roll", 0)), + pitch=math.degrees(pose.get("pitch", 0)), + yaw=math.degrees(pose.get("yaw", 0)), + speed=velocity * MAX_CARTESIAN_SPEED_MM, wait=False, ) return code == 0 @@ -362,7 +343,7 @@ def read_gripper_position(self) -> float | None: code: int = result[0] pos: float | None = result[1] if code == 0 and pos is not None: - return pos / 1000.0 # mm -> m + return pos * MM_TO_M return None def write_gripper_position(self, position: float) -> bool: @@ -370,8 +351,9 @@ def write_gripper_position(self, position: float) -> bool: if not self._arm: return False - pos_mm = position * 1000.0 # m -> mm - code: int = self._arm.set_gripper_position(pos_mm) + self._arm.set_gripper_enable(True) + pos_mm = position * M_TO_MM + code: int = self._arm.set_gripper_position(pos_mm, wait=True) return code == 0 # ========================================================================= @@ -389,4 +371,9 @@ def read_force_torque(self) -> list[float] | None: return None -__all__ = ["XArmBackend"] +def register(registry: AdapterRegistry) -> None: + """Register this adapter with the registry.""" + registry.register("xarm", XArmAdapter) + + +__all__ = ["XArmAdapter"] diff --git a/dimos/hardware/sensors/camera/gstreamer/gstreamer_camera.py b/dimos/hardware/sensors/camera/gstreamer/gstreamer_camera.py index 949330881a..9161185d50 100644 --- a/dimos/hardware/sensors/camera/gstreamer/gstreamer_camera.py +++ b/dimos/hardware/sensors/camera/gstreamer/gstreamer_camera.py @@ -22,7 +22,9 @@ import numpy as np -from dimos.core import Module, ModuleConfig, Out, rpc +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import Out from dimos.msgs.sensor_msgs import Image, ImageFormat from dimos.utils.logging_config import setup_logger diff --git a/dimos/hardware/sensors/camera/gstreamer/readme.md b/dimos/hardware/sensors/camera/gstreamer/readme.md deleted file mode 100644 index 29198aea24..0000000000 --- a/dimos/hardware/sensors/camera/gstreamer/readme.md +++ /dev/null @@ -1 +0,0 @@ -This gstreamer stuff is obsoleted but could be adopted as an alternative hardware for camera module if needed diff --git a/dimos/hardware/sensors/camera/module.py b/dimos/hardware/sensors/camera/module.py index 6f51febfef..11821d4724 100644 --- a/dimos/hardware/sensors/camera/module.py +++ b/dimos/hardware/sensors/camera/module.py @@ -12,24 +12,26 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections.abc import Callable, Generator +from collections.abc import Callable from dataclasses import dataclass, field import time from typing import Any import reactivex as rx -from reactivex import operators as ops -from dimos.agents import Output, Reducer, Stream, skill -from dimos.core import Module, ModuleConfig, Out, rpc +from dimos.agents.annotation import skill from dimos.core.blueprints import autoconnect +from dimos.core.core import rpc +from dimos.core.global_config import GlobalConfig, global_config +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import Out from dimos.hardware.sensors.camera.spec import CameraHardware from dimos.hardware.sensors.camera.webcam import Webcam from dimos.msgs.geometry_msgs import Quaternion, Transform, Vector3 from dimos.msgs.sensor_msgs.CameraInfo import CameraInfo from dimos.msgs.sensor_msgs.Image import Image, sharpness_barrier from dimos.spec import perception -from dimos.utils.reactive import iter_observable +from dimos.visualization.rerun.bridge import rerun_bridge def default_transform() -> Transform: @@ -57,12 +59,17 @@ class CameraModule(Module[CameraModuleConfig], perception.Camera): config: CameraModuleConfig default_config = CameraModuleConfig + _global_config: GlobalConfig - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args: Any, cfg: GlobalConfig = global_config, **kwargs: Any) -> None: + self._global_config = cfg + self._latest_image: Image | None = None super().__init__(*args, **kwargs) @rpc def start(self) -> None: + super().start() + if callable(self.config.hardware): self.hardware = self.config.hardware() else: @@ -73,8 +80,12 @@ def start(self) -> None: if self.config.frequency > 0: stream = stream.pipe(sharpness_barrier(self.config.frequency)) + def on_image(image: Image) -> None: + self.color_image.publish(image) + self._latest_image = image + self._disposables.add( - stream.subscribe(self.color_image.publish), + stream.subscribe(on_image), ) self._disposables.add( @@ -101,11 +112,12 @@ def publish_metadata(self) -> None: self.tf.publish(camera_link, camera_optical) - # actually skills should support on_demand passive skills so we don't emit this periodically - # but just provide the latest frame on demand - @skill(stream=Stream.passive, output=Output.image, reducer=Reducer.latest) # type: ignore[arg-type] - def video_stream(self) -> Generator[Image, None, None]: - yield from iter_observable(self.hardware.image_stream().pipe(ops.sample(1.0))) + @skill + def take_a_picture(self) -> Image: + """Grabs and returns the latest image from the camera.""" + if self._latest_image is None: + raise RuntimeError("No image received from camera yet.") + return self._latest_image def stop(self) -> None: if self.hardware and hasattr(self.hardware, "stop"): @@ -117,7 +129,7 @@ def stop(self) -> None: demo_camera = autoconnect( camera_module(), + rerun_bridge(), ) - __all__ = ["CameraModule", "camera_module"] diff --git a/dimos/hardware/sensors/camera/realsense/__init__.py b/dimos/hardware/sensors/camera/realsense/__init__.py index c3e63d77d8..58f519a12e 100644 --- a/dimos/hardware/sensors/camera/realsense/__init__.py +++ b/dimos/hardware/sensors/camera/realsense/__init__.py @@ -12,10 +12,32 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dimos.hardware.sensors.camera.realsense.camera import ( - RealSenseCamera, - RealSenseCameraConfig, - realsense_camera, -) +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dimos.hardware.sensors.camera.realsense.camera import ( + RealSenseCamera, + RealSenseCameraConfig, + realsense_camera, + ) __all__ = ["RealSenseCamera", "RealSenseCameraConfig", "realsense_camera"] + + +def __getattr__(name: str) -> object: + if name in __all__: + from dimos.hardware.sensors.camera.realsense.camera import ( + RealSenseCamera, + RealSenseCameraConfig, + realsense_camera, + ) + + globals().update( + RealSenseCamera=RealSenseCamera, + RealSenseCameraConfig=RealSenseCameraConfig, + realsense_camera=realsense_camera, + ) + return globals()[name] + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/dimos/hardware/sensors/camera/realsense/camera.py b/dimos/hardware/sensors/camera/realsense/camera.py index 3613dbf0a2..4ff0ccf6c4 100644 --- a/dimos/hardware/sensors/camera/realsense/camera.py +++ b/dimos/hardware/sensors/camera/realsense/camera.py @@ -12,18 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import atexit from dataclasses import dataclass, field import threading import time +from typing import TYPE_CHECKING import cv2 import numpy as np -import pyrealsense2 as rs # type: ignore[import-not-found] import reactivex as rx from scipy.spatial.transform import Rotation # type: ignore[import-untyped] -from dimos.core import Module, ModuleConfig, Out, rpc +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig from dimos.core.module_coordinator import ModuleCoordinator from dimos.core.transport import LCMTransport from dimos.hardware.sensors.camera.spec import ( @@ -39,6 +42,11 @@ from dimos.spec import perception from dimos.utils.reactive import backpressure +if TYPE_CHECKING: + import pyrealsense2 as rs # type: ignore[import-not-found] + + from dimos.core.stream import Out + def default_base_transform() -> Transform: """Default identity transform for camera mounting.""" @@ -112,6 +120,8 @@ def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] @rpc def start(self) -> None: + import pyrealsense2 as rs # type: ignore[import-not-found] + self._pipeline = rs.pipeline() config = rs.config() @@ -178,6 +188,8 @@ def _publish_camera_info(self) -> None: self.depth_camera_info.publish(self._depth_camera_info) def _build_camera_info(self) -> None: + import pyrealsense2 as rs # type: ignore[import-not-found] + if self._profile is None: return @@ -203,6 +215,8 @@ def _build_camera_info(self) -> None: ) def _intrinsics_to_camera_info(self, intrinsics: rs.intrinsics, frame_id: str) -> CameraInfo: + import pyrealsense2 as rs # type: ignore[import-not-found] + fx, fy = intrinsics.fx, intrinsics.fy cx, cy = intrinsics.ppx, intrinsics.ppy @@ -230,6 +244,8 @@ def _intrinsics_to_camera_info(self, intrinsics: rs.intrinsics, frame_id: str) - ) def _get_extrinsics(self) -> None: + import pyrealsense2 as rs # type: ignore[import-not-found] + if self._profile is None or not self.config.enable_depth: return diff --git a/dimos/hardware/sensors/camera/realsense/handeyeout_xarm6/calibration.json b/dimos/hardware/sensors/camera/realsense/handeyeout_xarm6/calibration.json new file mode 100644 index 0000000000..2591a2f04a --- /dev/null +++ b/dimos/hardware/sensors/camera/realsense/handeyeout_xarm6/calibration.json @@ -0,0 +1,31 @@ +{ + "transform_id": "EEF_TO_COLOR_OPT", + "parent_frame": "eef", + "child_frame": "camera_color_optical_frame", + "direction": "T_parent_child", + "units": { + "translation": "meters", + "angles": "radians" + }, + "translation_m": { + "x": 0.067052239, + "y": -0.0311387575, + "z": 0.021611456 + }, + "rotation_rpy_rad": { + "roll": -0.004202176, + "pitch": -0.00848499, + "yaw": 1.5898775, + "convention": "R = Rz(yaw) * Ry(pitch) * Rx(roll)" + }, + "rotation_quat_wxyz": { + "w": 0.7003270047, + "x": 0.0015569323, + "y": -0.0044709112, + "z": 0.7138064706 + }, + "notes": [ + "This is an extrinsic transform from end-effector frame to RealSense color optical frame.", + "Keep quaternion as the source of truth; use RPY mainly for debugging/printing." + ] +} diff --git a/dimos/hardware/sensors/camera/webcam.py b/dimos/hardware/sensors/camera/webcam.py index 54989ca568..51199624fe 100644 --- a/dimos/hardware/sensors/camera/webcam.py +++ b/dimos/hardware/sensors/camera/webcam.py @@ -19,12 +19,11 @@ from typing import Literal import cv2 -from dimos_lcm.sensor_msgs import CameraInfo from reactivex import create from reactivex.observable import Observable from dimos.hardware.sensors.camera.spec import CameraConfig, CameraHardware -from dimos.msgs.sensor_msgs import Image +from dimos.msgs.sensor_msgs import CameraInfo, Image from dimos.msgs.sensor_msgs.Image import ImageFormat from dimos.utils.reactive import backpressure diff --git a/dimos/hardware/sensors/camera/zed/__init__.py b/dimos/hardware/sensors/camera/zed/__init__.py index dd23096c1d..f8e73273bf 100644 --- a/dimos/hardware/sensors/camera/zed/__init__.py +++ b/dimos/hardware/sensors/camera/zed/__init__.py @@ -20,7 +20,7 @@ # Check if ZED SDK is available try: - import pyzed.sl as sl + import pyzed.sl as sl # noqa: F401 HAS_ZED_SDK = True except ImportError: @@ -43,6 +43,12 @@ def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] "ZED SDK not installed. Please install pyzed package to use ZED camera functionality." ) + def zed_camera(*args: object, **kwargs: object) -> None: # type: ignore[no-redef] + raise ModuleNotFoundError( + "ZED SDK not installed. Please install pyzed package to use ZED camera functionality.", + name="pyzed", + ) + # Set up camera calibration provider (always available) CALIBRATION_DIR = Path(__file__).parent diff --git a/dimos/hardware/sensors/camera/zed/camera.py b/dimos/hardware/sensors/camera/zed/camera.py index 67d80af4b0..171b706ff9 100644 --- a/dimos/hardware/sensors/camera/zed/camera.py +++ b/dimos/hardware/sensors/camera/zed/camera.py @@ -18,12 +18,14 @@ from dataclasses import dataclass, field import threading import time +from typing import TYPE_CHECKING import cv2 import pyzed.sl as sl import reactivex as rx -from dimos.core import Module, ModuleConfig, Out, rpc +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig from dimos.core.module_coordinator import ModuleCoordinator from dimos.core.transport import LCMTransport from dimos.hardware.sensors.camera.spec import ( @@ -39,6 +41,9 @@ from dimos.spec import perception from dimos.utils.reactive import backpressure +if TYPE_CHECKING: + from dimos.core.stream import Out + def default_base_transform() -> Transform: """Default identity transform for camera mounting.""" diff --git a/dimos/hardware/sensors/fake_zed_module.py b/dimos/hardware/sensors/fake_zed_module.py index e8fc51bf31..ec5613077d 100644 --- a/dimos/hardware/sensors/fake_zed_module.py +++ b/dimos/hardware/sensors/fake_zed_module.py @@ -24,7 +24,9 @@ from dimos_lcm.sensor_msgs import CameraInfo import numpy as np -from dimos.core import Module, ModuleConfig, Out, rpc +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import Out from dimos.msgs.geometry_msgs import PoseStamped from dimos.msgs.sensor_msgs import Image, ImageFormat from dimos.msgs.std_msgs import Header diff --git a/dimos/models/manipulation/__init__.py b/dimos/hardware/sensors/lidar/__init__.py similarity index 100% rename from dimos/models/manipulation/__init__.py rename to dimos/hardware/sensors/lidar/__init__.py diff --git a/dimos/hardware/sensors/lidar/common/dimos_native_module.hpp b/dimos/hardware/sensors/lidar/common/dimos_native_module.hpp new file mode 100644 index 0000000000..cdd5d85914 --- /dev/null +++ b/dimos/hardware/sensors/lidar/common/dimos_native_module.hpp @@ -0,0 +1,86 @@ +// Copyright 2026 Dimensional Inc. +// SPDX-License-Identifier: Apache-2.0 +// +// Lightweight header-only helper for dimos NativeModule C++ binaries. +// Parses -- CLI args passed by the Python NativeModule wrapper. + +#pragma once + +#include +#include +#include +#include + +#include "std_msgs/Header.hpp" +#include "std_msgs/Time.hpp" + +namespace dimos { + +class NativeModule { +public: + NativeModule(int argc, char** argv) { + for (int i = 1; i < argc; ++i) { + std::string arg(argv[i]); + if (arg.size() > 2 && arg[0] == '-' && arg[1] == '-' && i + 1 < argc) { + args_[arg.substr(2)] = argv[++i]; + } + } + } + + /// Get the full LCM channel string for a declared port. + /// Format is "#", e.g. "/pointcloud#sensor_msgs.PointCloud2". + /// This is the exact channel name used by Python LCMTransport subscribers. + const std::string& topic(const std::string& port) const { + auto it = args_.find(port); + if (it == args_.end()) { + throw std::runtime_error("NativeModule: no topic for port '" + port + "'"); + } + return it->second; + } + + /// Get a string arg value, or a default if not present. + std::string arg(const std::string& key, const std::string& default_val = "") const { + auto it = args_.find(key); + return it != args_.end() ? it->second : default_val; + } + + /// Get a float arg value, or a default if not present. + float arg_float(const std::string& key, float default_val = 0.0f) const { + auto it = args_.find(key); + return it != args_.end() ? std::stof(it->second) : default_val; + } + + /// Get an int arg value, or a default if not present. + int arg_int(const std::string& key, int default_val = 0) const { + auto it = args_.find(key); + return it != args_.end() ? std::stoi(it->second) : default_val; + } + + /// Check if a port/arg was provided. + bool has(const std::string& key) const { + return args_.count(key) > 0; + } + +private: + std::map args_; +}; + +/// Convert seconds (double) to a ROS-style Time message. +inline std_msgs::Time time_from_seconds(double t) { + std_msgs::Time ts; + ts.sec = static_cast(t); + ts.nsec = static_cast((t - ts.sec) * 1e9); + return ts; +} + +/// Build a stamped Header with auto-incrementing sequence number. +inline std_msgs::Header make_header(const std::string& frame_id, double ts) { + static std::atomic seq{0}; + std_msgs::Header h; + h.seq = seq.fetch_add(1, std::memory_order_relaxed); + h.stamp = time_from_seconds(ts); + h.frame_id = frame_id; + return h; +} + +} // namespace dimos diff --git a/dimos/hardware/sensors/lidar/common/livox_sdk_config.hpp b/dimos/hardware/sensors/lidar/common/livox_sdk_config.hpp new file mode 100644 index 0000000000..d7101c850e --- /dev/null +++ b/dimos/hardware/sensors/lidar/common/livox_sdk_config.hpp @@ -0,0 +1,116 @@ +// Copyright 2026 Dimensional Inc. +// SPDX-License-Identifier: Apache-2.0 +// +// Shared Livox SDK2 configuration utilities for dimos native modules. +// Used by both mid360_native and fastlio2_native. + +#pragma once + +#include +#include + +#include +#include + +#include +#include +#include + +namespace livox_common { + +// Gravity constant for converting accelerometer data from g to m/s^2 +inline constexpr double GRAVITY_MS2 = 9.80665; + +// Livox data_type values (not provided as named constants in SDK2 header) +inline constexpr uint8_t DATA_TYPE_IMU = 0x00; +inline constexpr uint8_t DATA_TYPE_CARTESIAN_HIGH = 0x01; +inline constexpr uint8_t DATA_TYPE_CARTESIAN_LOW = 0x02; + +// SDK network port configuration for Livox Mid-360 +struct SdkPorts { + int cmd_data = 56100; + int push_msg = 56200; + int point_data = 56300; + int imu_data = 56400; + int log_data = 56500; + int host_cmd_data = 56101; + int host_push_msg = 56201; + int host_point_data = 56301; + int host_imu_data = 56401; + int host_log_data = 56501; +}; + +// Write Livox SDK JSON config to an in-memory file (memfd_create). +// Returns {fd, path} — caller must close(fd) after LivoxLidarSdkInit reads it. +inline std::pair write_sdk_config(const std::string& host_ip, + const std::string& lidar_ip, + const SdkPorts& ports) { + int fd = memfd_create("livox_sdk_config", 0); + if (fd < 0) { + perror("memfd_create"); + return {-1, ""}; + } + + FILE* fp = fdopen(fd, "w"); + if (!fp) { + perror("fdopen"); + close(fd); + return {-1, ""}; + } + + fprintf(fp, + "{\n" + " \"MID360\": {\n" + " \"lidar_net_info\": {\n" + " \"cmd_data_port\": %d,\n" + " \"push_msg_port\": %d,\n" + " \"point_data_port\": %d,\n" + " \"imu_data_port\": %d,\n" + " \"log_data_port\": %d\n" + " },\n" + " \"host_net_info\": [\n" + " {\n" + " \"host_ip\": \"%s\",\n" + " \"multicast_ip\": \"224.1.1.5\",\n" + " \"cmd_data_port\": %d,\n" + " \"push_msg_port\": %d,\n" + " \"point_data_port\": %d,\n" + " \"imu_data_port\": %d,\n" + " \"log_data_port\": %d\n" + " }\n" + " ]\n" + " }\n" + "}\n", + ports.cmd_data, ports.push_msg, ports.point_data, + ports.imu_data, ports.log_data, + host_ip.c_str(), + ports.host_cmd_data, ports.host_push_msg, ports.host_point_data, + ports.host_imu_data, ports.host_log_data); + fflush(fp); // flush but don't fclose — that would close fd + + char path[64]; + snprintf(path, sizeof(path), "/proc/self/fd/%d", fd); + return {fd, path}; +} + +// Initialize Livox SDK from in-memory config. +// Returns true on success. Handles fd lifecycle internally. +inline bool init_livox_sdk(const std::string& host_ip, + const std::string& lidar_ip, + const SdkPorts& ports) { + auto [fd, path] = write_sdk_config(host_ip, lidar_ip, ports); + if (fd < 0) { + fprintf(stderr, "Error: failed to write SDK config\n"); + return false; + } + + bool ok = LivoxLidarSdkInit(path.c_str(), host_ip.c_str()); + close(fd); + + if (!ok) { + fprintf(stderr, "Error: LivoxLidarSdkInit failed\n"); + } + return ok; +} + +} // namespace livox_common diff --git a/dimos/robot/unitree_webrtc/testing/__init__.py b/dimos/hardware/sensors/lidar/fastlio2/__init__.py similarity index 100% rename from dimos/robot/unitree_webrtc/testing/__init__.py rename to dimos/hardware/sensors/lidar/fastlio2/__init__.py diff --git a/dimos/hardware/sensors/lidar/fastlio2/config/avia.yaml b/dimos/hardware/sensors/lidar/fastlio2/config/avia.yaml new file mode 100644 index 0000000000..8447b64658 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/config/avia.yaml @@ -0,0 +1,35 @@ +common: + lid_topic: "/livox/lidar" + imu_topic: "/livox/imu" + time_sync_en: false # ONLY turn on when external time synchronization is really not possible + time_offset_lidar_to_imu: 0.0 # Time offset between lidar and IMU calibrated by other algorithms, e.g. LI-Init (can be found in README). + # This param will take effect no matter what time_sync_en is. So if the time offset is not known exactly, please set as 0.0 + +preprocess: + lidar_type: 1 # 1 for Livox serials LiDAR, 2 for Velodyne LiDAR, 3 for ouster LiDAR, + scan_line: 6 + blind: 4 + +mapping: + acc_cov: 0.1 + gyr_cov: 0.1 + b_acc_cov: 0.0001 + b_gyr_cov: 0.0001 + fov_degree: 90 + det_range: 450.0 + extrinsic_est_en: false # true: enable the online estimation of IMU-LiDAR extrinsic + extrinsic_T: [ 0.04165, 0.02326, -0.0284 ] + extrinsic_R: [ 1, 0, 0, + 0, 1, 0, + 0, 0, 1] + +publish: + path_en: false + scan_publish_en: true # false: close all the point cloud output + dense_publish_en: true # false: low down the points number in a global-frame point clouds scan. + scan_bodyframe_pub_en: true # true: output the point cloud scans in IMU-body-frame + +pcd_save: + pcd_save_en: true + interval: -1 # how many LiDAR frames saved in each pcd file; + # -1 : all frames will be saved in ONE pcd file, may lead to memory crash when having too much frames. diff --git a/dimos/hardware/sensors/lidar/fastlio2/config/horizon.yaml b/dimos/hardware/sensors/lidar/fastlio2/config/horizon.yaml new file mode 100644 index 0000000000..43db0c3bff --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/config/horizon.yaml @@ -0,0 +1,35 @@ +common: + lid_topic: "/livox/lidar" + imu_topic: "/livox/imu" + time_sync_en: false # ONLY turn on when external time synchronization is really not possible + time_offset_lidar_to_imu: 0.0 # Time offset between lidar and IMU calibrated by other algorithms, e.g. LI-Init (can be found in README). + # This param will take effect no matter what time_sync_en is. So if the time offset is not known exactly, please set as 0.0 + +preprocess: + lidar_type: 1 # 1 for Livox serials LiDAR, 2 for Velodyne LiDAR, 3 for ouster LiDAR, + scan_line: 6 + blind: 4 + +mapping: + acc_cov: 0.1 + gyr_cov: 0.1 + b_acc_cov: 0.0001 + b_gyr_cov: 0.0001 + fov_degree: 100 + det_range: 260.0 + extrinsic_est_en: true # true: enable the online estimation of IMU-LiDAR extrinsic + extrinsic_T: [ 0.05512, 0.02226, -0.0297 ] + extrinsic_R: [ 1, 0, 0, + 0, 1, 0, + 0, 0, 1] + +publish: + path_en: false + scan_publish_en: true # false: close all the point cloud output + dense_publish_en: true # false: low down the points number in a global-frame point clouds scan. + scan_bodyframe_pub_en: true # true: output the point cloud scans in IMU-body-frame + +pcd_save: + pcd_save_en: true + interval: -1 # how many LiDAR frames saved in each pcd file; + # -1 : all frames will be saved in ONE pcd file, may lead to memory crash when having too much frames. diff --git a/dimos/hardware/sensors/lidar/fastlio2/config/marsim.yaml b/dimos/hardware/sensors/lidar/fastlio2/config/marsim.yaml new file mode 100644 index 0000000000..ad6c89121a --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/config/marsim.yaml @@ -0,0 +1,35 @@ +common: + lid_topic: "/quad0_pcl_render_node/sensor_cloud" + imu_topic: "/quad_0/imu" + time_sync_en: false # ONLY turn on when external time synchronization is really not possible + time_offset_lidar_to_imu: 0.0 # Time offset between lidar and IMU calibrated by other algorithms, e.g. LI-Init (can be found in README). + # This param will take effect no matter what time_sync_en is. So if the time offset is not known exactly, please set as 0.0 + +preprocess: + lidar_type: 4 # 1 for Livox serials LiDAR, 2 for Velodyne LiDAR, 3 for ouster LiDAR, + scan_line: 4 + blind: 0.5 + +mapping: + acc_cov: 0.1 + gyr_cov: 0.1 + b_acc_cov: 0.0001 + b_gyr_cov: 0.0001 + fov_degree: 90 + det_range: 50.0 + extrinsic_est_en: false # true: enable the online estimation of IMU-LiDAR extrinsic + extrinsic_T: [ -0.0, -0.0, 0.0 ] + extrinsic_R: [ 1, 0, 0, + 0, 1, 0, + 0, 0, 1] + +publish: + path_en: false + scan_publish_en: true # false: close all the point cloud output + dense_publish_en: true # false: low down the points number in a global-frame point clouds scan. + scan_bodyframe_pub_en: true # true: output the point cloud scans in IMU-body-frame + +pcd_save: + pcd_save_en: true + interval: -1 # how many LiDAR frames saved in each pcd file; + # -1 : all frames will be saved in ONE pcd file, may lead to memory crash when having too much frames. diff --git a/dimos/hardware/sensors/lidar/fastlio2/config/mid360.yaml b/dimos/hardware/sensors/lidar/fastlio2/config/mid360.yaml new file mode 100644 index 0000000000..512047ee48 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/config/mid360.yaml @@ -0,0 +1,35 @@ +common: + lid_topic: "/livox/lidar" + imu_topic: "/livox/imu" + time_sync_en: false # ONLY turn on when external time synchronization is really not possible + time_offset_lidar_to_imu: 0.0 # Time offset between lidar and IMU calibrated by other algorithms, e.g. LI-Init (can be found in README). + # This param will take effect no matter what time_sync_en is. So if the time offset is not known exactly, please set as 0.0 + +preprocess: + lidar_type: 1 # 1 for Livox serials LiDAR, 2 for Velodyne LiDAR, 3 for ouster LiDAR, + scan_line: 4 + blind: 0.5 + +mapping: + acc_cov: 0.1 + gyr_cov: 0.1 + b_acc_cov: 0.0001 + b_gyr_cov: 0.0001 + fov_degree: 360 + det_range: 100.0 + extrinsic_est_en: false # true: enable the online estimation of IMU-LiDAR extrinsic + extrinsic_T: [ -0.011, -0.02329, 0.04412 ] + extrinsic_R: [ 1, 0, 0, + 0, 1, 0, + 0, 0, 1] + +publish: + path_en: false + scan_publish_en: true # false: close all the point cloud output + dense_publish_en: true # false: low down the points number in a global-frame point clouds scan. + scan_bodyframe_pub_en: true # true: output the point cloud scans in IMU-body-frame + +pcd_save: + pcd_save_en: true + interval: -1 # how many LiDAR frames saved in each pcd file; + # -1 : all frames will be saved in ONE pcd file, may lead to memory crash when having too much frames. diff --git a/dimos/hardware/sensors/lidar/fastlio2/config/ouster64.yaml b/dimos/hardware/sensors/lidar/fastlio2/config/ouster64.yaml new file mode 100644 index 0000000000..9d891bbeba --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/config/ouster64.yaml @@ -0,0 +1,36 @@ +common: + lid_topic: "/os_cloud_node/points" + imu_topic: "/os_cloud_node/imu" + time_sync_en: false # ONLY turn on when external time synchronization is really not possible + time_offset_lidar_to_imu: 0.0 # Time offset between lidar and IMU calibrated by other algorithms, e.g. LI-Init (can be found in README). + # This param will take effect no matter what time_sync_en is. So if the time offset is not known exactly, please set as 0.0 + +preprocess: + lidar_type: 3 # 1 for Livox serials LiDAR, 2 for Velodyne LiDAR, 3 for ouster LiDAR, + scan_line: 64 + timestamp_unit: 3 # 0-second, 1-milisecond, 2-microsecond, 3-nanosecond. + blind: 4 + +mapping: + acc_cov: 0.1 + gyr_cov: 0.1 + b_acc_cov: 0.0001 + b_gyr_cov: 0.0001 + fov_degree: 180 + det_range: 150.0 + extrinsic_est_en: false # true: enable the online estimation of IMU-LiDAR extrinsic + extrinsic_T: [ 0.0, 0.0, 0.0 ] + extrinsic_R: [1, 0, 0, + 0, 1, 0, + 0, 0, 1] + +publish: + path_en: false + scan_publish_en: true # false: close all the point cloud output + dense_publish_en: true # false: low down the points number in a global-frame point clouds scan. + scan_bodyframe_pub_en: true # true: output the point cloud scans in IMU-body-frame + +pcd_save: + pcd_save_en: true + interval: -1 # how many LiDAR frames saved in each pcd file; + # -1 : all frames will be saved in ONE pcd file, may lead to memory crash when having too much frames. diff --git a/dimos/hardware/sensors/lidar/fastlio2/config/velodyne.yaml b/dimos/hardware/sensors/lidar/fastlio2/config/velodyne.yaml new file mode 100644 index 0000000000..450eda48b8 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/config/velodyne.yaml @@ -0,0 +1,37 @@ +common: + lid_topic: "/velodyne_points" + imu_topic: "/imu/data" + time_sync_en: false # ONLY turn on when external time synchronization is really not possible + time_offset_lidar_to_imu: 0.0 # Time offset between lidar and IMU calibrated by other algorithms, e.g. LI-Init (can be found in README). + # This param will take effect no matter what time_sync_en is. So if the time offset is not known exactly, please set as 0.0 + +preprocess: + lidar_type: 2 # 1 for Livox serials LiDAR, 2 for Velodyne LiDAR, 3 for ouster LiDAR, + scan_line: 32 + scan_rate: 10 # only need to be set for velodyne, unit: Hz, + timestamp_unit: 2 # the unit of time/t field in the PointCloud2 rostopic: 0-second, 1-milisecond, 2-microsecond, 3-nanosecond. + blind: 2 + +mapping: + acc_cov: 0.1 + gyr_cov: 0.1 + b_acc_cov: 0.0001 + b_gyr_cov: 0.0001 + fov_degree: 180 + det_range: 100.0 + extrinsic_est_en: false # true: enable the online estimation of IMU-LiDAR extrinsic, + extrinsic_T: [ 0, 0, 0.28] + extrinsic_R: [ 1, 0, 0, + 0, 1, 0, + 0, 0, 1] + +publish: + path_en: false + scan_publish_en: true # false: close all the point cloud output + dense_publish_en: true # false: low down the points number in a global-frame point clouds scan. + scan_bodyframe_pub_en: true # true: output the point cloud scans in IMU-body-frame + +pcd_save: + pcd_save_en: true + interval: -1 # how many LiDAR frames saved in each pcd file; + # -1 : all frames will be saved in ONE pcd file, may lead to memory crash when having too much frames. diff --git a/dimos/hardware/sensors/lidar/fastlio2/cpp/CMakeLists.txt b/dimos/hardware/sensors/lidar/fastlio2/cpp/CMakeLists.txt new file mode 100644 index 0000000000..39f9f90443 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/cpp/CMakeLists.txt @@ -0,0 +1,117 @@ +cmake_minimum_required(VERSION 3.14) +project(fastlio2_native CXX) + +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3") + +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${CMAKE_SOURCE_DIR}/result" CACHE PATH "" FORCE) +endif() + +# OpenMP for parallel processing +find_package(OpenMP QUIET) +if(OpenMP_CXX_FOUND) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") +endif() + +# MP defines (same logic as FAST-LIO) +message("CPU architecture: ${CMAKE_SYSTEM_PROCESSOR}") +if(CMAKE_SYSTEM_PROCESSOR MATCHES "(x86)|(X86)|(amd64)|(AMD64)") + include(ProcessorCount) + ProcessorCount(N) + if(N GREATER 4) + add_definitions(-DMP_EN -DMP_PROC_NUM=3) + elseif(N GREATER 3) + add_definitions(-DMP_EN -DMP_PROC_NUM=2) + else() + add_definitions(-DMP_PROC_NUM=1) + endif() +else() + add_definitions(-DMP_PROC_NUM=1) +endif() + +# Fetch dependencies +include(FetchContent) + +# FAST-LIO-NON-ROS (pass -DFASTLIO_DIR= or auto-fetched from GitHub) +if(NOT FASTLIO_DIR) + message(STATUS "FASTLIO_DIR not set, fetching FAST-LIO-NON-ROS from GitHub...") + FetchContent_Declare(fast_lio + GIT_REPOSITORY https://github.com/leshy/FAST-LIO-NON-ROS.git + GIT_TAG dimos-integration + GIT_SHALLOW TRUE + ) + FetchContent_MakeAvailable(fast_lio) + set(FASTLIO_DIR ${fast_lio_SOURCE_DIR}) +endif() + +# dimos-lcm C++ message headers +FetchContent_Declare(dimos_lcm + GIT_REPOSITORY https://github.com/dimensionalOS/dimos-lcm.git + GIT_TAG main + GIT_SHALLOW TRUE +) +FetchContent_MakeAvailable(dimos_lcm) + +# LCM +find_package(PkgConfig REQUIRED) +pkg_check_modules(LCM REQUIRED lcm) + +# Eigen3 +find_package(Eigen3 REQUIRED) + +# PCL (only components we need — avoid full PCL which drags in VTK via io) +find_package(PCL 1.8 REQUIRED COMPONENTS common filters) + +# yaml-cpp (FAST-LIO config parsing — standard YAML format) +find_package(yaml-cpp REQUIRED) + +# Livox SDK2 (from nix or /usr/local fallback) +find_library(LIVOX_SDK livox_lidar_sdk_shared) +if(NOT LIVOX_SDK) + message(FATAL_ERROR "Livox SDK2 not found. Available via nix flake in lidar/livox/") +endif() +get_filename_component(LIVOX_SDK_LIB_DIR ${LIVOX_SDK} DIRECTORY) +get_filename_component(LIVOX_SDK_PREFIX ${LIVOX_SDK_LIB_DIR} DIRECTORY) +set(LIVOX_SDK_INCLUDE_DIR ${LIVOX_SDK_PREFIX}/include) + +add_executable(fastlio2_native + main.cpp + ${FASTLIO_DIR}/src/preprocess.cpp + ${FASTLIO_DIR}/include/ikd-Tree/ikd_Tree.cpp +) + +# Shared Livox common headers (livox_sdk_config.hpp etc.) +if(NOT LIVOX_COMMON_DIR) + set(LIVOX_COMMON_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../common) +endif() + +target_include_directories(fastlio2_native PRIVATE + ${FASTLIO_DIR}/include + ${FASTLIO_DIR}/src + ${dimos_lcm_SOURCE_DIR}/generated/cpp_lcm_msgs + ${LCM_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} + ${PCL_INCLUDE_DIRS} + ${CMAKE_CURRENT_SOURCE_DIR} + ${LIVOX_COMMON_DIR} + ${LIVOX_SDK_INCLUDE_DIR} +) + +target_link_libraries(fastlio2_native PRIVATE + ${LCM_LIBRARIES} + ${LIVOX_SDK} + ${PCL_LIBRARIES} + yaml-cpp::yaml-cpp +) + +if(OpenMP_CXX_FOUND) + target_link_libraries(fastlio2_native PRIVATE OpenMP::OpenMP_CXX) +endif() + +target_link_directories(fastlio2_native PRIVATE + ${LCM_LIBRARY_DIRS} +) + +install(TARGETS fastlio2_native DESTINATION bin) diff --git a/dimos/hardware/sensors/lidar/fastlio2/cpp/README.md b/dimos/hardware/sensors/lidar/fastlio2/cpp/README.md new file mode 100644 index 0000000000..da6e7c8803 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/cpp/README.md @@ -0,0 +1,109 @@ +# FAST-LIO2 Native Module (C++) + +Real-time LiDAR SLAM using FAST-LIO2 with integrated Livox Mid-360 driver. +Binds Livox SDK2 directly into FAST-LIO-NON-ROS: SDK callbacks feed +CustomMsg/Imu to FastLio, which performs EKF-LOAM SLAM. Registered +(world-frame) point clouds and odometry are published on LCM. + +## Build + +### Nix (recommended) + +```bash +cd dimos/hardware/sensors/lidar/fastlio2/cpp +nix build .#fastlio2_native +``` + +Binary lands at `result/bin/fastlio2_native`. + +The flake pulls Livox SDK2 from the livox sub-flake and +[FAST-LIO-NON-ROS](https://github.com/leshy/FAST-LIO-NON-ROS) from GitHub +automatically. + +### Native (CMake) + +Requires: +- CMake >= 3.14 +- [LCM](https://lcm-proj.github.io/) (`pacman -S lcm` or build from source) +- [Livox SDK2](https://github.com/Livox-SDK/Livox-SDK2) installed to `/usr/local` +- Eigen3, PCL (common, filters), yaml-cpp, Boost, OpenMP +- [FAST-LIO-NON-ROS](https://github.com/leshy/FAST-LIO-NON-ROS) checked out locally + +```bash +cd dimos/hardware/sensors/lidar/fastlio2/cpp +cmake -B build -DFASTLIO_DIR=$HOME/coding/FAST-LIO-NON-ROS +cmake --build build -j$(nproc) +cmake --install build +``` + +Binary lands at `result/bin/fastlio2_native` (same location as nix). + +If `-DFASTLIO_DIR` is omitted, CMake auto-fetches FAST-LIO-NON-ROS from GitHub. + +## Network setup + +The Mid-360 communicates over USB ethernet. Configure the interface: + +```bash +sudo nmcli con add type ethernet ifname usbeth0 con-name livox-mid360 \ + ipv4.addresses 192.168.1.5/24 ipv4.method manual +sudo nmcli con up livox-mid360 +``` + +This persists across reboots. The lidar defaults to `192.168.1.155`. + +## Usage + +Normally launched by `FastLio2` via the NativeModule framework: + +```python +from dimos.hardware.sensors.lidar.fastlio2.module import FastLio2 +from dimos.core.blueprints import autoconnect + +autoconnect( + FastLio2.blueprint(host_ip="192.168.1.5"), + SomeConsumer.blueprint(), +).build().loop() +``` + +### Manual invocation (for debugging) + +```bash +./result/bin/fastlio2_native \ + --lidar '/pointcloud#sensor_msgs.PointCloud2' \ + --odometry '/odometry#nav_msgs.Odometry' \ + --host_ip 192.168.1.5 \ + --lidar_ip 192.168.1.155 \ + --config_path ../config/mid360.yaml +``` + +Topic strings must include the `#type` suffix -- this is the actual LCM channel +name used by dimos subscribers. + +For full vis: +```sh +rerun-bridge +``` + +For LCM traffic: +```sh +lcm-spy +``` + +## Configuration + +FAST-LIO2 config files live in `config/`. The YAML config controls filter +parameters, EKF tuning, and point cloud processing settings. + +## File overview + +| File | Description | +|---------------------------|--------------------------------------------------------------| +| `main.cpp` | Livox SDK2 + FAST-LIO2 integration, EKF SLAM, LCM publishing | +| `cloud_filter.hpp` | Point cloud filtering (range, voxel downsampling) | +| `voxel_map.hpp` | Global voxel map accumulation | +| `dimos_native_module.hpp` | Reusable header for parsing NativeModule CLI args | +| `config/` | FAST-LIO2 YAML configuration files | +| `flake.nix` | Nix flake for hermetic builds | +| `CMakeLists.txt` | Build config, fetches dimos-lcm headers automatically | +| `../module.py` | Python NativeModule wrapper (`FastLio2`) | diff --git a/dimos/hardware/sensors/lidar/fastlio2/cpp/cloud_filter.hpp b/dimos/hardware/sensors/lidar/fastlio2/cpp/cloud_filter.hpp new file mode 100644 index 0000000000..352ba9bef5 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/cpp/cloud_filter.hpp @@ -0,0 +1,51 @@ +// Copyright 2026 Dimensional Inc. +// SPDX-License-Identifier: Apache-2.0 +// +// Point cloud filtering utilities: voxel grid downsampling and +// statistical outlier removal using PCL. + +#ifndef CLOUD_FILTER_HPP_ +#define CLOUD_FILTER_HPP_ + +#include +#include +#include +#include + +struct CloudFilterConfig { + float voxel_size = 0.1f; + int sor_mean_k = 50; + float sor_stddev = 1.0f; +}; + +/// Apply voxel grid downsample + statistical outlier removal in-place. +/// Returns the filtered cloud (new allocation). +template +typename pcl::PointCloud::Ptr filter_cloud( + const typename pcl::PointCloud::Ptr& input, + const CloudFilterConfig& cfg) { + + if (!input || input->empty()) return input; + + // Voxel grid downsample + typename pcl::PointCloud::Ptr voxelized(new pcl::PointCloud()); + pcl::VoxelGrid vg; + vg.setInputCloud(input); + vg.setLeafSize(cfg.voxel_size, cfg.voxel_size, cfg.voxel_size); + vg.filter(*voxelized); + + // Statistical outlier removal + if (cfg.sor_mean_k > 0 && voxelized->size() > static_cast(cfg.sor_mean_k)) { + typename pcl::PointCloud::Ptr cleaned(new pcl::PointCloud()); + pcl::StatisticalOutlierRemoval sor; + sor.setInputCloud(voxelized); + sor.setMeanK(cfg.sor_mean_k); + sor.setStddevMulThresh(cfg.sor_stddev); + sor.filter(*cleaned); + return cleaned; + } + + return voxelized; +} + +#endif diff --git a/dimos/hardware/sensors/lidar/fastlio2/cpp/config/mid360.json b/dimos/hardware/sensors/lidar/fastlio2/cpp/config/mid360.json new file mode 100644 index 0000000000..ff6cc6dbf6 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/cpp/config/mid360.json @@ -0,0 +1,38 @@ +{ + "common": { + "time_sync_en": false, + "time_offset_lidar_to_imu": 0.0, + "msr_freq": 50.0, + "main_freq": 5000.0 + }, + "preprocess": { + "lidar_type": 1, + "scan_line": 1, + "blind": 1 + }, + "mapping": { + "acc_cov": 0.1, + "gyr_cov": 0.1, + "b_acc_cov": 0.0001, + "b_gyr_cov": 0.0001, + "fov_degree": 360, + "det_range": 100.0, + "extrinsic_est_en": true, + "extrinsic_T": [ + 0.04165, + 0.02326, + -0.0284 + ], + "extrinsic_R": [ + 1.0, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + 0.0, + 0.0, + 1.0 + ] + } +} diff --git a/dimos/hardware/sensors/lidar/fastlio2/cpp/flake.lock b/dimos/hardware/sensors/lidar/fastlio2/cpp/flake.lock new file mode 100644 index 0000000000..2636f00ada --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/cpp/flake.lock @@ -0,0 +1,135 @@ +{ + "nodes": { + "dimos-lcm": { + "flake": false, + "locked": { + "lastModified": 1769774949, + "narHash": "sha256-icRK7jerqNlwK1WZBrnIP04I2WozzFqTD7qsmnPxQuo=", + "owner": "dimensionalOS", + "repo": "dimos-lcm", + "rev": "0aa72b7b1bd3a65f50f5c03485ee9b728df56afe", + "type": "github" + }, + "original": { + "owner": "dimensionalOS", + "ref": "main", + "repo": "dimos-lcm", + "type": "github" + } + }, + "dimos-lcm_2": { + "flake": false, + "locked": { + "lastModified": 1769774949, + "narHash": "sha256-icRK7jerqNlwK1WZBrnIP04I2WozzFqTD7qsmnPxQuo=", + "owner": "dimensionalOS", + "repo": "dimos-lcm", + "rev": "0aa72b7b1bd3a65f50f5c03485ee9b728df56afe", + "type": "github" + }, + "original": { + "owner": "dimensionalOS", + "ref": "main", + "repo": "dimos-lcm", + "type": "github" + } + }, + "fast-lio": { + "flake": false, + "locked": { + "lastModified": 1770976391, + "narHash": "sha256-OjSHk6qs3oCZ7XNjDyq4/K/Rb1VhqyADtra2q3F8V5U=", + "owner": "leshy", + "repo": "FAST-LIO-NON-ROS", + "rev": "47606ac6bbafcae9231936b4662b94c84fe87339", + "type": "github" + }, + "original": { + "owner": "leshy", + "ref": "dimos-integration", + "repo": "FAST-LIO-NON-ROS", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "livox-sdk": { + "inputs": { + "dimos-lcm": "dimos-lcm_2", + "flake-utils": [ + "flake-utils" + ], + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "path": "../../livox/cpp", + "type": "path" + }, + "original": { + "path": "../../livox/cpp", + "type": "path" + }, + "parent": [] + }, + "nixpkgs": { + "locked": { + "lastModified": 1770841267, + "narHash": "sha256-9xejG0KoqsoKEGp2kVbXRlEYtFFcDTHjidiuX8hGO44=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "ec7c70d12ce2fc37cb92aff673dcdca89d187bae", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "dimos-lcm": "dimos-lcm", + "fast-lio": "fast-lio", + "flake-utils": "flake-utils", + "livox-sdk": "livox-sdk", + "nixpkgs": "nixpkgs" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/dimos/hardware/sensors/lidar/fastlio2/cpp/flake.nix b/dimos/hardware/sensors/lidar/fastlio2/cpp/flake.nix new file mode 100644 index 0000000000..7a58aceb76 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/cpp/flake.nix @@ -0,0 +1,59 @@ +{ + description = "FAST-LIO2 + Livox Mid-360 native module"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + livox-sdk.url = "path:../../livox/cpp"; + livox-sdk.inputs.nixpkgs.follows = "nixpkgs"; + livox-sdk.inputs.flake-utils.follows = "flake-utils"; + dimos-lcm = { + url = "github:dimensionalOS/dimos-lcm/main"; + flake = false; + }; + fast-lio = { + url = "github:leshy/FAST-LIO-NON-ROS/dimos-integration"; + flake = false; + }; + }; + + outputs = { self, nixpkgs, flake-utils, livox-sdk, dimos-lcm, fast-lio, ... }: + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = import nixpkgs { inherit system; }; + livox-sdk2 = livox-sdk.packages.${system}.livox-sdk2; + + livox-common = ../../common; + + fastlio2_native = pkgs.stdenv.mkDerivation { + pname = "fastlio2_native"; + version = "0.1.0"; + + src = ./.; + + nativeBuildInputs = [ pkgs.cmake pkgs.pkg-config ]; + buildInputs = [ + livox-sdk2 + pkgs.lcm + pkgs.glib + pkgs.eigen + pkgs.pcl + pkgs.yaml-cpp + pkgs.boost + pkgs.llvmPackages.openmp + ]; + + cmakeFlags = [ + "-DCMAKE_POLICY_VERSION_MINIMUM=3.5" + "-DFETCHCONTENT_SOURCE_DIR_DIMOS_LCM=${dimos-lcm}" + "-DFASTLIO_DIR=${fast-lio}" + "-DLIVOX_COMMON_DIR=${livox-common}" + ]; + }; + in { + packages = { + default = fastlio2_native; + inherit fastlio2_native; + }; + }); +} diff --git a/dimos/hardware/sensors/lidar/fastlio2/cpp/main.cpp b/dimos/hardware/sensors/lidar/fastlio2/cpp/main.cpp new file mode 100644 index 0000000000..60b8d9cdb2 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/cpp/main.cpp @@ -0,0 +1,522 @@ +// Copyright 2026 Dimensional Inc. +// SPDX-License-Identifier: Apache-2.0 +// +// FAST-LIO2 + Livox Mid-360 native module for dimos NativeModule framework. +// +// Binds Livox SDK2 directly into FAST-LIO-NON-ROS: SDK callbacks feed +// CustomMsg/Imu to FastLio, which performs EKF-LOAM SLAM. Registered +// (world-frame) point clouds and odometry are published on LCM. +// +// Usage: +// ./fastlio2_native \ +// --lidar '/lidar#sensor_msgs.PointCloud2' \ +// --odometry '/odometry#nav_msgs.Odometry' \ +// --config_path /path/to/mid360.yaml \ +// --host_ip 192.168.1.5 --lidar_ip 192.168.1.155 + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "livox_sdk_config.hpp" + +#include "cloud_filter.hpp" +#include "dimos_native_module.hpp" +#include "voxel_map.hpp" + +// dimos LCM message headers +#include "geometry_msgs/Quaternion.hpp" +#include "geometry_msgs/Vector3.hpp" +#include "nav_msgs/Odometry.hpp" +#include "sensor_msgs/Imu.hpp" +#include "sensor_msgs/PointCloud2.hpp" +#include "sensor_msgs/PointField.hpp" + +// FAST-LIO (header-only core, compiled sources linked via CMake) +#include "fast_lio.hpp" + +using livox_common::GRAVITY_MS2; +using livox_common::DATA_TYPE_IMU; +using livox_common::DATA_TYPE_CARTESIAN_HIGH; +using livox_common::DATA_TYPE_CARTESIAN_LOW; + +// --------------------------------------------------------------------------- +// Global state +// --------------------------------------------------------------------------- + +static std::atomic g_running{true}; +static lcm::LCM* g_lcm = nullptr; +static FastLio* g_fastlio = nullptr; + +static std::string g_lidar_topic; +static std::string g_odometry_topic; +static std::string g_map_topic; +static std::string g_frame_id = "map"; +static std::string g_child_frame_id = "body"; +static float g_frequency = 10.0f; + +// Frame accumulator (Livox SDK raw → CustomMsg) +static std::mutex g_pc_mutex; +static std::vector g_accumulated_points; +static uint64_t g_frame_start_ns = 0; +static bool g_frame_has_timestamp = false; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +static uint64_t get_timestamp_ns(const LivoxLidarEthernetPacket* pkt) { + uint64_t ns = 0; + std::memcpy(&ns, pkt->timestamp, sizeof(uint64_t)); + return ns; +} + +using dimos::time_from_seconds; +using dimos::make_header; + +// --------------------------------------------------------------------------- +// Publish lidar (world-frame point cloud) +// --------------------------------------------------------------------------- + +static void publish_lidar(PointCloudXYZI::Ptr cloud, double timestamp, + const std::string& topic = "") { + const std::string& chan = topic.empty() ? g_lidar_topic : topic; + if (!g_lcm || !cloud || cloud->empty() || chan.empty()) return; + + int num_points = static_cast(cloud->size()); + + sensor_msgs::PointCloud2 pc; + pc.header = make_header(g_frame_id, timestamp); + pc.height = 1; + pc.width = num_points; + pc.is_bigendian = 0; + pc.is_dense = 1; + + // Fields: x, y, z, intensity (float32 each) + pc.fields_length = 4; + pc.fields.resize(4); + + auto make_field = [](const std::string& name, int32_t offset) { + sensor_msgs::PointField f; + f.name = name; + f.offset = offset; + f.datatype = sensor_msgs::PointField::FLOAT32; + f.count = 1; + return f; + }; + + pc.fields[0] = make_field("x", 0); + pc.fields[1] = make_field("y", 4); + pc.fields[2] = make_field("z", 8); + pc.fields[3] = make_field("intensity", 12); + + pc.point_step = 16; + pc.row_step = pc.point_step * num_points; + + pc.data_length = pc.row_step; + pc.data.resize(pc.data_length); + + for (int i = 0; i < num_points; ++i) { + float* dst = reinterpret_cast(pc.data.data() + i * 16); + dst[0] = cloud->points[i].x; + dst[1] = cloud->points[i].y; + dst[2] = cloud->points[i].z; + dst[3] = cloud->points[i].intensity; + } + + g_lcm->publish(chan, &pc); +} + +// --------------------------------------------------------------------------- +// Publish odometry +// --------------------------------------------------------------------------- + +static void publish_odometry(const custom_messages::Odometry& odom, double timestamp) { + if (!g_lcm) return; + + nav_msgs::Odometry msg; + msg.header = make_header(g_frame_id, timestamp); + msg.child_frame_id = g_child_frame_id; + + // Pose + msg.pose.pose.position.x = odom.pose.pose.position.x; + msg.pose.pose.position.y = odom.pose.pose.position.y; + msg.pose.pose.position.z = odom.pose.pose.position.z; + msg.pose.pose.orientation.x = odom.pose.pose.orientation.x; + msg.pose.pose.orientation.y = odom.pose.pose.orientation.y; + msg.pose.pose.orientation.z = odom.pose.pose.orientation.z; + msg.pose.pose.orientation.w = odom.pose.pose.orientation.w; + + // Covariance (fixed-size double[36]) + for (int i = 0; i < 36; ++i) { + msg.pose.covariance[i] = odom.pose.covariance[i]; + } + + // Twist (zero — FAST-LIO doesn't output velocity directly) + msg.twist.twist.linear.x = 0; + msg.twist.twist.linear.y = 0; + msg.twist.twist.linear.z = 0; + msg.twist.twist.angular.x = 0; + msg.twist.twist.angular.y = 0; + msg.twist.twist.angular.z = 0; + std::memset(msg.twist.covariance, 0, sizeof(msg.twist.covariance)); + + g_lcm->publish(g_odometry_topic, &msg); +} + +// --------------------------------------------------------------------------- +// Livox SDK callbacks +// --------------------------------------------------------------------------- + +static void on_point_cloud(const uint32_t /*handle*/, const uint8_t /*dev_type*/, + LivoxLidarEthernetPacket* data, void* /*client_data*/) { + if (!g_running.load() || data == nullptr) return; + + uint64_t ts_ns = get_timestamp_ns(data); + uint16_t dot_num = data->dot_num; + + std::lock_guard lock(g_pc_mutex); + + if (!g_frame_has_timestamp) { + g_frame_start_ns = ts_ns; + g_frame_has_timestamp = true; + } + + if (data->data_type == DATA_TYPE_CARTESIAN_HIGH) { + auto* pts = reinterpret_cast(data->data); + for (uint16_t i = 0; i < dot_num; ++i) { + custom_messages::CustomPoint cp; + cp.x = static_cast(pts[i].x) / 1000.0; // mm → m + cp.y = static_cast(pts[i].y) / 1000.0; + cp.z = static_cast(pts[i].z) / 1000.0; + cp.reflectivity = pts[i].reflectivity; + cp.tag = pts[i].tag; + cp.line = 0; // Mid-360: non-repetitive, single "line" + cp.offset_time = static_cast(ts_ns - g_frame_start_ns); + g_accumulated_points.push_back(cp); + } + } else if (data->data_type == DATA_TYPE_CARTESIAN_LOW) { + auto* pts = reinterpret_cast(data->data); + for (uint16_t i = 0; i < dot_num; ++i) { + custom_messages::CustomPoint cp; + cp.x = static_cast(pts[i].x) / 100.0; // cm → m + cp.y = static_cast(pts[i].y) / 100.0; + cp.z = static_cast(pts[i].z) / 100.0; + cp.reflectivity = pts[i].reflectivity; + cp.tag = pts[i].tag; + cp.line = 0; + cp.offset_time = static_cast(ts_ns - g_frame_start_ns); + g_accumulated_points.push_back(cp); + } + } +} + +static void on_imu_data(const uint32_t /*handle*/, const uint8_t /*dev_type*/, + LivoxLidarEthernetPacket* data, void* /*client_data*/) { + if (!g_running.load() || data == nullptr || !g_fastlio) return; + + double ts = static_cast(get_timestamp_ns(data)) / 1e9; + auto* imu_pts = reinterpret_cast(data->data); + uint16_t dot_num = data->dot_num; + + for (uint16_t i = 0; i < dot_num; ++i) { + auto imu_msg = boost::make_shared(); + imu_msg->header.stamp = custom_messages::Time().fromSec(ts); + imu_msg->header.seq = 0; + imu_msg->header.frame_id = "livox_frame"; + + imu_msg->orientation.x = 0.0; + imu_msg->orientation.y = 0.0; + imu_msg->orientation.z = 0.0; + imu_msg->orientation.w = 1.0; + for (int j = 0; j < 9; ++j) + imu_msg->orientation_covariance[j] = 0.0; + + imu_msg->angular_velocity.x = static_cast(imu_pts[i].gyro_x); + imu_msg->angular_velocity.y = static_cast(imu_pts[i].gyro_y); + imu_msg->angular_velocity.z = static_cast(imu_pts[i].gyro_z); + for (int j = 0; j < 9; ++j) + imu_msg->angular_velocity_covariance[j] = 0.0; + + imu_msg->linear_acceleration.x = static_cast(imu_pts[i].acc_x) * GRAVITY_MS2; + imu_msg->linear_acceleration.y = static_cast(imu_pts[i].acc_y) * GRAVITY_MS2; + imu_msg->linear_acceleration.z = static_cast(imu_pts[i].acc_z) * GRAVITY_MS2; + for (int j = 0; j < 9; ++j) + imu_msg->linear_acceleration_covariance[j] = 0.0; + + g_fastlio->feed_imu(imu_msg); + } +} + +static void on_info_change(const uint32_t handle, const LivoxLidarInfo* info, + void* /*client_data*/) { + if (info == nullptr) return; + + char sn[17] = {}; + std::memcpy(sn, info->sn, 16); + char ip[17] = {}; + std::memcpy(ip, info->lidar_ip, 16); + + printf("[fastlio2] Device connected: handle=%u type=%u sn=%s ip=%s\n", + handle, info->dev_type, sn, ip); + + SetLivoxLidarWorkMode(handle, kLivoxLidarNormal, nullptr, nullptr); + EnableLivoxLidarImuData(handle, nullptr, nullptr); +} + +// --------------------------------------------------------------------------- +// Signal handling +// --------------------------------------------------------------------------- + +static void signal_handler(int /*sig*/) { + g_running.store(false); +} + +// --------------------------------------------------------------------------- +// Main +// --------------------------------------------------------------------------- + +int main(int argc, char** argv) { + dimos::NativeModule mod(argc, argv); + + // Required: LCM topics for output ports + g_lidar_topic = mod.has("lidar") ? mod.topic("lidar") : ""; + g_odometry_topic = mod.has("odometry") ? mod.topic("odometry") : ""; + g_map_topic = mod.has("global_map") ? mod.topic("global_map") : ""; + + if (g_lidar_topic.empty() && g_odometry_topic.empty()) { + fprintf(stderr, "Error: at least one of --lidar or --odometry is required\n"); + return 1; + } + + // FAST-LIO config path + std::string config_path = mod.arg("config_path", ""); + if (config_path.empty()) { + fprintf(stderr, "Error: --config_path is required\n"); + return 1; + } + + // FAST-LIO internal processing rates + double msr_freq = mod.arg_float("msr_freq", 50.0f); + double main_freq = mod.arg_float("main_freq", 5000.0f); + + // Livox hardware config + std::string host_ip = mod.arg("host_ip", "192.168.1.5"); + std::string lidar_ip = mod.arg("lidar_ip", "192.168.1.155"); + g_frequency = mod.arg_float("frequency", 10.0f); + g_frame_id = mod.arg("frame_id", "map"); + g_child_frame_id = mod.arg("child_frame_id", "body"); + float pointcloud_freq = mod.arg_float("pointcloud_freq", 5.0f); + float odom_freq = mod.arg_float("odom_freq", 50.0f); + CloudFilterConfig filter_cfg; + filter_cfg.voxel_size = mod.arg_float("voxel_size", 0.1f); + filter_cfg.sor_mean_k = mod.arg_int("sor_mean_k", 50); + filter_cfg.sor_stddev = mod.arg_float("sor_stddev", 1.0f); + float map_voxel_size = mod.arg_float("map_voxel_size", 0.1f); + float map_max_range = mod.arg_float("map_max_range", 100.0f); + float map_freq = mod.arg_float("map_freq", 0.0f); + + // SDK network ports (defaults from SdkPorts struct in livox_sdk_config.hpp) + livox_common::SdkPorts ports; + const livox_common::SdkPorts port_defaults; + ports.cmd_data = mod.arg_int("cmd_data_port", port_defaults.cmd_data); + ports.push_msg = mod.arg_int("push_msg_port", port_defaults.push_msg); + ports.point_data = mod.arg_int("point_data_port", port_defaults.point_data); + ports.imu_data = mod.arg_int("imu_data_port", port_defaults.imu_data); + ports.log_data = mod.arg_int("log_data_port", port_defaults.log_data); + ports.host_cmd_data = mod.arg_int("host_cmd_data_port", port_defaults.host_cmd_data); + ports.host_push_msg = mod.arg_int("host_push_msg_port", port_defaults.host_push_msg); + ports.host_point_data = mod.arg_int("host_point_data_port", port_defaults.host_point_data); + ports.host_imu_data = mod.arg_int("host_imu_data_port", port_defaults.host_imu_data); + ports.host_log_data = mod.arg_int("host_log_data_port", port_defaults.host_log_data); + + printf("[fastlio2] Starting FAST-LIO2 + Livox Mid-360 native module\n"); + printf("[fastlio2] lidar topic: %s\n", + g_lidar_topic.empty() ? "(disabled)" : g_lidar_topic.c_str()); + printf("[fastlio2] odometry topic: %s\n", + g_odometry_topic.empty() ? "(disabled)" : g_odometry_topic.c_str()); + printf("[fastlio2] global_map topic: %s\n", + g_map_topic.empty() ? "(disabled)" : g_map_topic.c_str()); + printf("[fastlio2] config: %s\n", config_path.c_str()); + printf("[fastlio2] host_ip: %s lidar_ip: %s frequency: %.1f Hz\n", + host_ip.c_str(), lidar_ip.c_str(), g_frequency); + printf("[fastlio2] pointcloud_freq: %.1f Hz odom_freq: %.1f Hz\n", + pointcloud_freq, odom_freq); + printf("[fastlio2] voxel_size: %.3f sor_mean_k: %d sor_stddev: %.1f\n", + filter_cfg.voxel_size, filter_cfg.sor_mean_k, filter_cfg.sor_stddev); + if (!g_map_topic.empty()) + printf("[fastlio2] map_voxel_size: %.3f map_max_range: %.1f map_freq: %.1f Hz\n", + map_voxel_size, map_max_range, map_freq); + + // Signal handlers + signal(SIGTERM, signal_handler); + signal(SIGINT, signal_handler); + + // Init LCM + lcm::LCM lcm; + if (!lcm.good()) { + fprintf(stderr, "Error: LCM init failed\n"); + return 1; + } + g_lcm = &lcm; + + // Init FAST-LIO with config + printf("[fastlio2] Initializing FAST-LIO...\n"); + FastLio fast_lio(config_path, msr_freq, main_freq); + g_fastlio = &fast_lio; + printf("[fastlio2] FAST-LIO initialized.\n"); + + // Init Livox SDK (in-memory config, no temp files) + if (!livox_common::init_livox_sdk(host_ip, lidar_ip, ports)) { + return 1; + } + + // Register SDK callbacks + SetLivoxLidarPointCloudCallBack(on_point_cloud, nullptr); + SetLivoxLidarImuDataCallback(on_imu_data, nullptr); + SetLivoxLidarInfoChangeCallback(on_info_change, nullptr); + + // Start SDK + if (!LivoxLidarSdkStart()) { + fprintf(stderr, "Error: LivoxLidarSdkStart failed\n"); + LivoxLidarSdkUninit(); + return 1; + } + + printf("[fastlio2] SDK started, waiting for device...\n"); + + // Main loop + auto frame_interval = std::chrono::microseconds( + static_cast(1e6 / g_frequency)); + auto last_emit = std::chrono::steady_clock::now(); + const double process_period_ms = 1000.0 / main_freq; + + // Rate limiters for output publishing + auto pc_interval = std::chrono::microseconds( + static_cast(1e6 / pointcloud_freq)); + auto odom_interval = std::chrono::microseconds( + static_cast(1e6 / odom_freq)); + auto last_pc_publish = std::chrono::steady_clock::now(); + auto last_odom_publish = std::chrono::steady_clock::now(); + + // Global voxel map (only if map topic is configured AND map_freq > 0) + std::unique_ptr global_map; + std::chrono::microseconds map_interval{0}; + auto last_map_publish = std::chrono::steady_clock::now(); + if (!g_map_topic.empty() && map_freq > 0.0f) { + global_map = std::make_unique(map_voxel_size, map_max_range); + map_interval = std::chrono::microseconds( + static_cast(1e6 / map_freq)); + } + + while (g_running.load()) { + auto loop_start = std::chrono::high_resolution_clock::now(); + + // At frame rate: build CustomMsg from accumulated points and feed to FAST-LIO + auto now = std::chrono::steady_clock::now(); + if (now - last_emit >= frame_interval) { + std::vector points; + uint64_t frame_start = 0; + + { + std::lock_guard lock(g_pc_mutex); + if (!g_accumulated_points.empty()) { + points.swap(g_accumulated_points); + frame_start = g_frame_start_ns; + g_frame_has_timestamp = false; + } + } + + if (!points.empty()) { + // Build CustomMsg + auto lidar_msg = boost::make_shared(); + lidar_msg->header.seq = 0; + lidar_msg->header.stamp = custom_messages::Time().fromSec( + static_cast(frame_start) / 1e9); + lidar_msg->header.frame_id = "livox_frame"; + lidar_msg->timebase = frame_start; + lidar_msg->lidar_id = 0; + for (int i = 0; i < 3; i++) + lidar_msg->rsvd[i] = 0; + lidar_msg->point_num = static_cast(points.size()); + lidar_msg->points = std::move(points); + + fast_lio.feed_lidar(lidar_msg); + } + + last_emit = now; + } + + // Run FAST-LIO processing step (high frequency) + fast_lio.process(); + + // Check for new results and accumulate/publish (rate-limited) + auto pose = fast_lio.get_pose(); + if (!pose.empty() && (pose[0] != 0.0 || pose[1] != 0.0 || pose[2] != 0.0)) { + double ts = std::chrono::duration( + std::chrono::system_clock::now().time_since_epoch()).count(); + + auto world_cloud = fast_lio.get_world_cloud(); + if (world_cloud && !world_cloud->empty()) { + auto filtered = filter_cloud(world_cloud, filter_cfg); + + // Per-scan publish at pointcloud_freq + if (!g_lidar_topic.empty() && now - last_pc_publish >= pc_interval) { + publish_lidar(filtered, ts); + last_pc_publish = now; + } + + // Global map: insert, prune, and publish at map_freq + if (global_map) { + global_map->insert(filtered); + + if (now - last_map_publish >= map_interval) { + global_map->prune( + static_cast(pose[0]), + static_cast(pose[1]), + static_cast(pose[2])); + auto map_cloud = global_map->to_cloud(); + publish_lidar(map_cloud, ts, g_map_topic); + last_map_publish = now; + } + } + } + + // Publish odometry (rate-limited to odom_freq) + if (!g_odometry_topic.empty() && (now - last_odom_publish >= odom_interval)) { + publish_odometry(fast_lio.get_odometry(), ts); + last_odom_publish = now; + } + } + + // Handle LCM messages + lcm.handleTimeout(0); + + // Rate control (~5kHz processing) + auto loop_end = std::chrono::high_resolution_clock::now(); + auto elapsed_ms = std::chrono::duration(loop_end - loop_start).count(); + if (elapsed_ms < process_period_ms) { + std::this_thread::sleep_for(std::chrono::microseconds( + static_cast((process_period_ms - elapsed_ms) * 1000))); + } + } + + // Cleanup + printf("[fastlio2] Shutting down...\n"); + g_fastlio = nullptr; + LivoxLidarSdkUninit(); + g_lcm = nullptr; + + printf("[fastlio2] Done.\n"); + return 0; +} diff --git a/dimos/hardware/sensors/lidar/fastlio2/cpp/voxel_map.hpp b/dimos/hardware/sensors/lidar/fastlio2/cpp/voxel_map.hpp new file mode 100644 index 0000000000..a50740cd04 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/cpp/voxel_map.hpp @@ -0,0 +1,297 @@ +// Copyright 2026 Dimensional Inc. +// SPDX-License-Identifier: Apache-2.0 +// +// Efficient global voxel map using a hash map. +// Supports O(1) insert/update, distance-based pruning, and +// raycasting-based free space clearing via Amanatides & Woo 3D DDA. +// FOV is discovered dynamically from incoming point cloud data. + +#ifndef VOXEL_MAP_HPP_ +#define VOXEL_MAP_HPP_ + +#include +#include +#include + +#include +#include + +struct VoxelKey { + int32_t x, y, z; + bool operator==(const VoxelKey& o) const { return x == o.x && y == o.y && z == o.z; } +}; + +struct VoxelKeyHash { + size_t operator()(const VoxelKey& k) const { + // Fast spatial hash — large primes reduce collisions for grid coords + size_t h = static_cast(k.x) * 73856093u; + h ^= static_cast(k.y) * 19349669u; + h ^= static_cast(k.z) * 83492791u; + return h; + } +}; + +struct Voxel { + float x, y, z; // running centroid + float intensity; + uint32_t count; // points merged into this voxel + uint8_t miss_count; // consecutive scans where a ray passed through without hitting +}; + +/// Config for raycast-based free space clearing. +struct RaycastConfig { + int subsample = 4; // raycast every Nth point + int max_misses = 3; // erase after this many consecutive misses + float fov_margin_rad = 0.035f; // ~2° safety margin added to discovered FOV +}; + +class VoxelMap { +public: + explicit VoxelMap(float voxel_size, float max_range = 100.0f) + : voxel_size_(voxel_size), max_range_(max_range) { + map_.reserve(500000); + } + + /// Insert a point cloud into the map, merging into existing voxels. + /// Resets miss_count for hit voxels. + template + void insert(const typename pcl::PointCloud::Ptr& cloud) { + if (!cloud) return; + float inv = 1.0f / voxel_size_; + for (const auto& pt : cloud->points) { + VoxelKey key{ + static_cast(std::floor(pt.x * inv)), + static_cast(std::floor(pt.y * inv)), + static_cast(std::floor(pt.z * inv))}; + + auto it = map_.find(key); + if (it != map_.end()) { + // Running average update + auto& v = it->second; + float n = static_cast(v.count); + float n1 = n + 1.0f; + v.x = (v.x * n + pt.x) / n1; + v.y = (v.y * n + pt.y) / n1; + v.z = (v.z * n + pt.z) / n1; + v.intensity = (v.intensity * n + pt.intensity) / n1; + v.count++; + v.miss_count = 0; + } else { + map_.emplace(key, Voxel{pt.x, pt.y, pt.z, pt.intensity, 1, 0}); + } + } + } + + /// Cast rays from sensor origin through each point in the cloud. + /// Discovers the sensor FOV from the cloud's elevation angle range, + /// then marks intermediate voxels as missed and erases those exceeding + /// the miss threshold within the discovered FOV. + /// + /// Orientation quaternion (qx,qy,qz,qw) is body→world. + template + void raycast_clear(float ox, float oy, float oz, + float qx, float qy, float qz, float qw, + const typename pcl::PointCloud::Ptr& cloud, + const RaycastConfig& cfg) { + if (!cloud || cloud->empty() || cfg.max_misses <= 0) return; + + // Phase 0: discover FOV from this scan's elevation angles in sensor-local frame + update_fov(ox, oy, oz, qx, qy, qz, qw, cloud); + + // Skip raycasting until we have a valid FOV (need at least a few scans) + if (!fov_valid_) return; + + float inv = 1.0f / voxel_size_; + int n_pts = static_cast(cloud->size()); + float fov_up = fov_up_ + cfg.fov_margin_rad; + float fov_down = fov_down_ - cfg.fov_margin_rad; + + // Phase 1: walk rays, increment miss_count for intermediate voxels + for (int i = 0; i < n_pts; i += cfg.subsample) { + const auto& pt = cloud->points[i]; + raycast_single(ox, oy, oz, pt.x, pt.y, pt.z, inv); + } + + // Phase 2: erase voxels that exceeded miss threshold and are within FOV + for (auto it = map_.begin(); it != map_.end();) { + if (it->second.miss_count > static_cast(cfg.max_misses)) { + if (in_sensor_fov(ox, oy, oz, qx, qy, qz, qw, + it->second.x, it->second.y, it->second.z, + fov_up, fov_down)) { + it = map_.erase(it); + continue; + } + } + ++it; + } + } + + /// Remove voxels farther than max_range from the given position. + void prune(float px, float py, float pz) { + float r2 = max_range_ * max_range_; + for (auto it = map_.begin(); it != map_.end();) { + float dx = it->second.x - px; + float dy = it->second.y - py; + float dz = it->second.z - pz; + if (dx * dx + dy * dy + dz * dz > r2) + it = map_.erase(it); + else + ++it; + } + } + + /// Export all voxel centroids as a point cloud. + template + typename pcl::PointCloud::Ptr to_cloud() const { + typename pcl::PointCloud::Ptr cloud( + new pcl::PointCloud(map_.size(), 1)); + size_t i = 0; + for (const auto& [key, v] : map_) { + auto& pt = cloud->points[i++]; + pt.x = v.x; + pt.y = v.y; + pt.z = v.z; + pt.intensity = v.intensity; + } + return cloud; + } + + size_t size() const { return map_.size(); } + void clear() { map_.clear(); } + void set_max_range(float r) { max_range_ = r; } + float fov_up_deg() const { return fov_up_ * 180.0f / static_cast(M_PI); } + float fov_down_deg() const { return fov_down_ * 180.0f / static_cast(M_PI); } + bool fov_valid() const { return fov_valid_; } + +private: + std::unordered_map map_; + float voxel_size_; + float max_range_; + + // Dynamically discovered sensor FOV (accumulated over scans) + float fov_up_ = -static_cast(M_PI); // start narrow, expand from data + float fov_down_ = static_cast(M_PI); + int fov_scan_count_ = 0; + bool fov_valid_ = false; + static constexpr int FOV_WARMUP_SCANS = 5; // require N scans before trusting FOV + + /// Update discovered FOV from a scan's elevation angles in sensor-local frame. + template + void update_fov(float ox, float oy, float oz, + float qx, float qy, float qz, float qw, + const typename pcl::PointCloud::Ptr& cloud) { + // Inverse quaternion for world→sensor rotation + float nqx = -qx, nqy = -qy, nqz = -qz; + + for (const auto& pt : cloud->points) { + float wx = pt.x - ox, wy = pt.y - oy, wz = pt.z - oz; + + // Rotate to sensor-local frame + float tx = 2.0f * (nqy * wz - nqz * wy); + float ty = 2.0f * (nqz * wx - nqx * wz); + float tz = 2.0f * (nqx * wy - nqy * wx); + float lx = wx + qw * tx + (nqy * tz - nqz * ty); + float ly = wy + qw * ty + (nqz * tx - nqx * tz); + float lz = wz + qw * tz + (nqx * ty - nqy * tx); + + float horiz_dist = std::sqrt(lx * lx + ly * ly); + if (horiz_dist < 1e-6f) continue; + float elevation = std::atan2(lz, horiz_dist); + + if (elevation > fov_up_) fov_up_ = elevation; + if (elevation < fov_down_) fov_down_ = elevation; + } + + if (++fov_scan_count_ >= FOV_WARMUP_SCANS && !fov_valid_) { + fov_valid_ = true; + printf("[voxel_map] FOV discovered: [%.1f, %.1f] deg\n", + fov_down_deg(), fov_up_deg()); + } + } + + /// Amanatides & Woo 3D DDA: walk from (ox,oy,oz) to (px,py,pz), + /// incrementing miss_count for all intermediate voxels. + void raycast_single(float ox, float oy, float oz, + float px, float py, float pz, float inv) { + float dx = px - ox, dy = py - oy, dz = pz - oz; + float len = std::sqrt(dx * dx + dy * dy + dz * dz); + if (len < 1e-6f) return; + dx /= len; dy /= len; dz /= len; + + int32_t cx = static_cast(std::floor(ox * inv)); + int32_t cy = static_cast(std::floor(oy * inv)); + int32_t cz = static_cast(std::floor(oz * inv)); + int32_t ex = static_cast(std::floor(px * inv)); + int32_t ey = static_cast(std::floor(py * inv)); + int32_t ez = static_cast(std::floor(pz * inv)); + + int sx = (dx >= 0) ? 1 : -1; + int sy = (dy >= 0) ? 1 : -1; + int sz = (dz >= 0) ? 1 : -1; + + // tMax: parametric distance along ray to next voxel boundary per axis + // tDelta: parametric distance to cross one full voxel per axis + float tMaxX = (std::abs(dx) < 1e-10f) ? 1e30f + : (((dx > 0 ? cx + 1 : cx) * voxel_size_ - ox) / dx); + float tMaxY = (std::abs(dy) < 1e-10f) ? 1e30f + : (((dy > 0 ? cy + 1 : cy) * voxel_size_ - oy) / dy); + float tMaxZ = (std::abs(dz) < 1e-10f) ? 1e30f + : (((dz > 0 ? cz + 1 : cz) * voxel_size_ - oz) / dz); + + float tDeltaX = (std::abs(dx) < 1e-10f) ? 1e30f : std::abs(voxel_size_ / dx); + float tDeltaY = (std::abs(dy) < 1e-10f) ? 1e30f : std::abs(voxel_size_ / dy); + float tDeltaZ = (std::abs(dz) < 1e-10f) ? 1e30f : std::abs(voxel_size_ / dz); + + // Walk through voxels (skip endpoint — it was hit) + int max_steps = static_cast(len * inv) + 3; // safety bound + for (int step = 0; step < max_steps; ++step) { + if (cx == ex && cy == ey && cz == ez) break; // reached endpoint + + VoxelKey key{cx, cy, cz}; + auto it = map_.find(key); + if (it != map_.end() && it->second.miss_count < 255) { + it->second.miss_count++; + } + + // Step to next voxel on the axis with smallest tMax + if (tMaxX < tMaxY && tMaxX < tMaxZ) { + cx += sx; tMaxX += tDeltaX; + } else if (tMaxY < tMaxZ) { + cy += sy; tMaxY += tDeltaY; + } else { + cz += sz; tMaxZ += tDeltaZ; + } + } + } + + /// Check if a voxel centroid falls within the sensor's vertical FOV. + /// Rotates the vector (sensor→voxel) into sensor-local frame using the + /// inverse of the body→world quaternion, then checks elevation angle. + static bool in_sensor_fov(float ox, float oy, float oz, + float qx, float qy, float qz, float qw, + float vx, float vy, float vz, + float fov_up_rad, float fov_down_rad) { + // Vector from sensor origin to voxel in world frame + float wx = vx - ox, wy = vy - oy, wz = vz - oz; + + // Rotate by quaternion inverse (conjugate): q* = (-qx,-qy,-qz,qw) + float nqx = -qx, nqy = -qy, nqz = -qz; + // t = 2 * cross(q.xyz, v) + float tx = 2.0f * (nqy * wz - nqz * wy); + float ty = 2.0f * (nqz * wx - nqx * wz); + float tz = 2.0f * (nqx * wy - nqy * wx); + // v' = v + qw * t + cross(q.xyz, t) + float lx = wx + qw * tx + (nqy * tz - nqz * ty); + float ly = wy + qw * ty + (nqz * tx - nqx * tz); + float lz = wz + qw * tz + (nqx * ty - nqy * tx); + + // Elevation angle in sensor-local frame + float horiz_dist = std::sqrt(lx * lx + ly * ly); + if (horiz_dist < 1e-6f) return true; // directly above/below, treat as in FOV + float elevation = std::atan2(lz, horiz_dist); + + return elevation >= fov_down_rad && elevation <= fov_up_rad; + } +}; + +#endif diff --git a/dimos/hardware/sensors/lidar/fastlio2/fastlio_blueprints.py b/dimos/hardware/sensors/lidar/fastlio2/fastlio_blueprints.py new file mode 100644 index 0000000000..05801729e3 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/fastlio_blueprints.py @@ -0,0 +1,50 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.blueprints import autoconnect +from dimos.hardware.sensors.lidar.fastlio2.module import FastLio2 +from dimos.mapping.voxels import VoxelGridMapper +from dimos.visualization.rerun.bridge import rerun_bridge + +voxel_size = 0.05 + +mid360_fastlio = autoconnect( + FastLio2.blueprint(voxel_size=voxel_size, map_voxel_size=voxel_size, map_freq=-1), + rerun_bridge( + visual_override={ + "world/lidar": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), + } + ), +).global_config(n_dask_workers=2, robot_model="mid360_fastlio2") + +mid360_fastlio_voxels = autoconnect( + FastLio2.blueprint(), + VoxelGridMapper.blueprint(publish_interval=1.0, voxel_size=voxel_size, carve_columns=False), + rerun_bridge( + visual_override={ + "world/global_map": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), + "world/lidar": None, + } + ), +).global_config(n_dask_workers=3, robot_model="mid360_fastlio2_voxels") + +mid360_fastlio_voxels_native = autoconnect( + FastLio2.blueprint(voxel_size=voxel_size, map_voxel_size=voxel_size, map_freq=3.0), + rerun_bridge( + visual_override={ + "world/lidar": None, + "world/global_map": lambda grid: grid.to_rerun(voxel_size=voxel_size, mode="boxes"), + } + ), +).global_config(n_dask_workers=2, robot_model="mid360_fastlio2") diff --git a/dimos/hardware/sensors/lidar/fastlio2/module.py b/dimos/hardware/sensors/lidar/fastlio2/module.py new file mode 100644 index 0000000000..ee9a0783a0 --- /dev/null +++ b/dimos/hardware/sensors/lidar/fastlio2/module.py @@ -0,0 +1,148 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python NativeModule wrapper for the FAST-LIO2 + Livox Mid-360 binary. + +Binds Livox SDK2 directly into FAST-LIO-NON-ROS for real-time LiDAR SLAM. +Outputs registered (world-frame) point clouds and odometry with covariance. + +Usage:: + + from dimos.hardware.sensors.lidar.fastlio2.module import FastLio2 + from dimos.core.blueprints import autoconnect + + autoconnect( + FastLio2.blueprint(host_ip="192.168.1.5"), + SomeConsumer.blueprint(), + ).build().loop() +""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING + +from dimos.core import Out # noqa: TC001 +from dimos.core.native_module import NativeModule, NativeModuleConfig +from dimos.hardware.sensors.lidar.livox.ports import ( + SDK_CMD_DATA_PORT, + SDK_HOST_CMD_DATA_PORT, + SDK_HOST_IMU_DATA_PORT, + SDK_HOST_LOG_DATA_PORT, + SDK_HOST_POINT_DATA_PORT, + SDK_HOST_PUSH_MSG_PORT, + SDK_IMU_DATA_PORT, + SDK_LOG_DATA_PORT, + SDK_POINT_DATA_PORT, + SDK_PUSH_MSG_PORT, +) +from dimos.msgs.nav_msgs.Odometry import Odometry # noqa: TC001 +from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 # noqa: TC001 +from dimos.spec import mapping, perception + +_CONFIG_DIR = Path(__file__).parent / "config" + + +@dataclass(kw_only=True) +class FastLio2Config(NativeModuleConfig): + """Config for the FAST-LIO2 + Livox Mid-360 native module.""" + + cwd: str | None = "cpp" + executable: str = "result/bin/fastlio2_native" + build_command: str | None = "nix build .#fastlio2_native" + + # Livox SDK hardware config + host_ip: str = "192.168.1.5" + lidar_ip: str = "192.168.1.155" + frequency: float = 10.0 + + # Frame IDs for output messages + frame_id: str = "map" + child_frame_id: str = "body" + + # FAST-LIO internal processing rates + msr_freq: float = 50.0 + main_freq: float = 5000.0 + + # Output publish rates (Hz) + pointcloud_freq: float = 10.0 + odom_freq: float = 30.0 + + # Point cloud filtering + voxel_size: float = 0.1 + sor_mean_k: int = 50 + sor_stddev: float = 1.0 + + # Global voxel map (disabled when map_freq <= 0) + map_freq: float = 0.0 + map_voxel_size: float = 0.1 + map_max_range: float = 100.0 + + # FAST-LIO YAML config (relative to config/ dir, or absolute path) + # C++ binary reads YAML directly via yaml-cpp + config: str = "mid360.yaml" + + # SDK port configuration (see livox/ports.py for defaults) + cmd_data_port: int = SDK_CMD_DATA_PORT + push_msg_port: int = SDK_PUSH_MSG_PORT + point_data_port: int = SDK_POINT_DATA_PORT + imu_data_port: int = SDK_IMU_DATA_PORT + log_data_port: int = SDK_LOG_DATA_PORT + host_cmd_data_port: int = SDK_HOST_CMD_DATA_PORT + host_push_msg_port: int = SDK_HOST_PUSH_MSG_PORT + host_point_data_port: int = SDK_HOST_POINT_DATA_PORT + host_imu_data_port: int = SDK_HOST_IMU_DATA_PORT + host_log_data_port: int = SDK_HOST_LOG_DATA_PORT + + # Resolved in __post_init__, passed as --config_path to the binary + config_path: str | None = None + + # config is not a CLI arg (config_path is) + cli_exclude: frozenset[str] = frozenset({"config"}) + + def __post_init__(self) -> None: + if self.config_path is None: + path = Path(self.config) + if not path.is_absolute(): + path = _CONFIG_DIR / path + self.config_path = str(path.resolve()) + + +class FastLio2(NativeModule, perception.Lidar, perception.Odometry, mapping.GlobalPointcloud): + """FAST-LIO2 SLAM module with integrated Livox Mid-360 driver. + + Ports: + lidar (Out[PointCloud2]): World-frame registered point cloud. + odometry (Out[Odometry]): Pose with covariance at LiDAR scan rate. + global_map (Out[PointCloud2]): Global voxel map (optional, enable via map_freq > 0). + """ + + default_config: type[FastLio2Config] = FastLio2Config # type: ignore[assignment] + lidar: Out[PointCloud2] + odometry: Out[Odometry] + global_map: Out[PointCloud2] + + +fastlio2_module = FastLio2.blueprint + +__all__ = [ + "FastLio2", + "FastLio2Config", + "fastlio2_module", +] + +# Verify protocol port compliance (mypy will flag missing ports) +if TYPE_CHECKING: + FastLio2() diff --git a/dimos/hardware/sensors/lidar/livox/__init__.py b/dimos/hardware/sensors/lidar/livox/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/hardware/sensors/lidar/livox/cpp/CMakeLists.txt b/dimos/hardware/sensors/lidar/livox/cpp/CMakeLists.txt new file mode 100644 index 0000000000..b6641a2fc6 --- /dev/null +++ b/dimos/hardware/sensors/lidar/livox/cpp/CMakeLists.txt @@ -0,0 +1,57 @@ +cmake_minimum_required(VERSION 3.14) +project(livox_mid360_native CXX) + +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${CMAKE_SOURCE_DIR}/result" CACHE PATH "" FORCE) +endif() + +# Fetch dimos-lcm for C++ message headers +include(FetchContent) +FetchContent_Declare(dimos_lcm + GIT_REPOSITORY https://github.com/dimensionalOS/dimos-lcm.git + GIT_TAG main + GIT_SHALLOW TRUE +) +FetchContent_MakeAvailable(dimos_lcm) + +# Find LCM +find_package(PkgConfig REQUIRED) +pkg_check_modules(LCM REQUIRED lcm) + +# Livox SDK2 (from nix or /usr/local fallback) +find_library(LIVOX_SDK livox_lidar_sdk_shared) +if(NOT LIVOX_SDK) + message(FATAL_ERROR "Livox SDK2 not found. Available via nix flake in lidar/livox/") +endif() +get_filename_component(LIVOX_SDK_LIB_DIR ${LIVOX_SDK} DIRECTORY) +get_filename_component(LIVOX_SDK_PREFIX ${LIVOX_SDK_LIB_DIR} DIRECTORY) +set(LIVOX_SDK_INCLUDE_DIR ${LIVOX_SDK_PREFIX}/include) + +add_executable(mid360_native main.cpp) + +# Shared Livox common headers (livox_sdk_config.hpp etc.) +if(NOT LIVOX_COMMON_DIR) + set(LIVOX_COMMON_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../common) +endif() + +target_include_directories(mid360_native PRIVATE + ${dimos_lcm_SOURCE_DIR}/generated/cpp_lcm_msgs + ${LCM_INCLUDE_DIRS} + ${CMAKE_CURRENT_SOURCE_DIR} + ${LIVOX_COMMON_DIR} + ${LIVOX_SDK_INCLUDE_DIR} +) + +target_link_libraries(mid360_native PRIVATE + ${LCM_LIBRARIES} + ${LIVOX_SDK} +) + +target_link_directories(mid360_native PRIVATE + ${LCM_LIBRARY_DIRS} +) + +install(TARGETS mid360_native DESTINATION bin) diff --git a/dimos/hardware/sensors/lidar/livox/cpp/README.md b/dimos/hardware/sensors/lidar/livox/cpp/README.md new file mode 100644 index 0000000000..4db5248ce1 --- /dev/null +++ b/dimos/hardware/sensors/lidar/livox/cpp/README.md @@ -0,0 +1,114 @@ +# Livox Mid-360 Native Module (C++) + +Native C++ driver for the Livox Mid-360 LiDAR. Publishes PointCloud2 and IMU +data directly on LCM, bypassing Python for minimal latency. + +## Build + +### Nix (recommended) + +```bash +cd dimos/hardware/sensors/lidar/livox/cpp +nix build .#mid360_native +``` + +Binary lands at `result/bin/mid360_native`. + +To build just the Livox SDK2 library: + +```bash +nix build .#livox-sdk2 +``` + +### Native (CMake) + +Requires: +- CMake >= 3.14 +- [LCM](https://lcm-proj.github.io/) (`pacman -S lcm` or build from source) +- [Livox SDK2](https://github.com/Livox-SDK/Livox-SDK2) installed to `/usr/local` + +Installing Livox SDK2 manually: + +```bash +cd ~/src +git clone https://github.com/Livox-SDK/Livox-SDK2.git +cd Livox-SDK2 && mkdir build && cd build +cmake .. && make -j$(nproc) +sudo make install +``` + +Then build: + +```bash +cd dimos/hardware/sensors/lidar/livox/cpp +cmake -B build +cmake --build build -j$(nproc) +cmake --install build +``` + +Binary lands at `result/bin/mid360_native` (same location as nix). + +CMake automatically fetches [dimos-lcm](https://github.com/dimensionalOS/dimos-lcm) +for the C++ message headers on first configure. + +## Network setup + +The Mid-360 communicates over USB ethernet. Configure the interface: + +```bash +sudo nmcli con add type ethernet ifname usbeth0 con-name livox-mid360 \ + ipv4.addresses 192.168.1.5/24 ipv4.method manual +sudo nmcli con up livox-mid360 +``` + +This persists across reboots. The lidar defaults to `192.168.1.155`. + +## Usage + +Normally launched by `Mid360` via the NativeModule framework: + +```python +from dimos.hardware.sensors.lidar.livox.module import Mid360 +from dimos.core.blueprints import autoconnect + +autoconnect( + Mid360.blueprint(host_ip="192.168.1.5"), + SomeConsumer.blueprint(), +).build().loop() +``` + +### Manual invocation (for debugging) + +```bash +./result/bin/mid360_native \ + --pointcloud '/pointcloud#sensor_msgs.PointCloud2' \ + --imu '/imu#sensor_msgs.Imu' \ + --host_ip 192.168.1.5 \ + --lidar_ip 192.168.1.155 \ + --frequency 10 +``` + +Topic strings must include the `#type` suffix -- this is the actual LCM channel +name used by dimos subscribers. + +View data in another terminal: + +For full vis: +```sh +rerun-bridge +``` + +For LCM traffic: +```sh +lcm-spy +``` + +## File overview + +| File | Description | +|---------------------------|----------------------------------------------------------| +| `main.cpp` | Livox SDK2 callbacks, frame accumulation, LCM publishing | +| `dimos_native_module.hpp` | Reusable header for parsing NativeModule CLI args | +| `flake.nix` | Nix flake for hermetic builds | +| `CMakeLists.txt` | Build config, fetches dimos-lcm headers automatically | +| `../module.py` | Python NativeModule wrapper (`Mid360`) | diff --git a/dimos/hardware/sensors/lidar/livox/cpp/flake.lock b/dimos/hardware/sensors/lidar/livox/cpp/flake.lock new file mode 100644 index 0000000000..58e8252be8 --- /dev/null +++ b/dimos/hardware/sensors/lidar/livox/cpp/flake.lock @@ -0,0 +1,79 @@ +{ + "nodes": { + "dimos-lcm": { + "flake": false, + "locked": { + "lastModified": 1769774949, + "narHash": "sha256-icRK7jerqNlwK1WZBrnIP04I2WozzFqTD7qsmnPxQuo=", + "owner": "dimensionalOS", + "repo": "dimos-lcm", + "rev": "0aa72b7b1bd3a65f50f5c03485ee9b728df56afe", + "type": "github" + }, + "original": { + "owner": "dimensionalOS", + "ref": "main", + "repo": "dimos-lcm", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1770841267, + "narHash": "sha256-9xejG0KoqsoKEGp2kVbXRlEYtFFcDTHjidiuX8hGO44=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "ec7c70d12ce2fc37cb92aff673dcdca89d187bae", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "dimos-lcm": "dimos-lcm", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/dimos/hardware/sensors/lidar/livox/cpp/flake.nix b/dimos/hardware/sensors/lidar/livox/cpp/flake.nix new file mode 100644 index 0000000000..eeb06b33a6 --- /dev/null +++ b/dimos/hardware/sensors/lidar/livox/cpp/flake.nix @@ -0,0 +1,71 @@ +{ + description = "Livox SDK2 and Mid-360 native module"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + dimos-lcm = { + url = "github:dimensionalOS/dimos-lcm/main"; + flake = false; + }; + }; + + outputs = { self, nixpkgs, flake-utils, dimos-lcm, ... }: + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = import nixpkgs { inherit system; }; + + livox-sdk2 = pkgs.stdenv.mkDerivation rec { + pname = "livox-sdk2"; + version = "1.2.5"; + + src = pkgs.fetchFromGitHub { + owner = "Livox-SDK"; + repo = "Livox-SDK2"; + rev = "v${version}"; + hash = "sha256-NGscO/vLiQ17yQJtdPyFzhhMGE89AJ9kTL5cSun/bpU="; + }; + + nativeBuildInputs = [ pkgs.cmake ]; + + cmakeFlags = [ + "-DBUILD_SHARED_LIBS=ON" + "-DCMAKE_POLICY_VERSION_MINIMUM=3.5" + ]; + + preConfigure = '' + substituteInPlace CMakeLists.txt \ + --replace-fail "add_subdirectory(samples)" "" + sed -i '1i #include ' sdk_core/comm/define.h + sed -i '1i #include ' sdk_core/logger_handler/file_manager.h + ''; + }; + + livox-common = ../../common; + + mid360_native = pkgs.stdenv.mkDerivation { + pname = "mid360_native"; + version = "0.1.0"; + + src = ./.; + + nativeBuildInputs = [ pkgs.cmake pkgs.pkg-config ]; + buildInputs = [ livox-sdk2 pkgs.lcm pkgs.glib ]; + + cmakeFlags = [ + "-DCMAKE_POLICY_VERSION_MINIMUM=3.5" + "-DFETCHCONTENT_SOURCE_DIR_DIMOS_LCM=${dimos-lcm}" + "-DLIVOX_COMMON_DIR=${livox-common}" + ]; + }; + in { + packages = { + default = mid360_native; + inherit livox-sdk2 mid360_native; + }; + + devShells.default = pkgs.mkShell { + buildInputs = [ livox-sdk2 ]; + }; + }); +} diff --git a/dimos/hardware/sensors/lidar/livox/cpp/main.cpp b/dimos/hardware/sensors/lidar/livox/cpp/main.cpp new file mode 100644 index 0000000000..cdf083ef3b --- /dev/null +++ b/dimos/hardware/sensors/lidar/livox/cpp/main.cpp @@ -0,0 +1,341 @@ +// Copyright 2026 Dimensional Inc. +// SPDX-License-Identifier: Apache-2.0 +// +// Livox Mid-360 native module for dimos NativeModule framework. +// +// Publishes PointCloud2 and Imu messages on LCM topics received via CLI args. +// Usage: ./mid360_native --lidar --imu [--host_ip ] [--lidar_ip ] ... + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "livox_sdk_config.hpp" + +#include "dimos_native_module.hpp" + +#include "geometry_msgs/Quaternion.hpp" +#include "geometry_msgs/Vector3.hpp" +#include "sensor_msgs/Imu.hpp" +#include "sensor_msgs/PointCloud2.hpp" +#include "sensor_msgs/PointField.hpp" + +using livox_common::GRAVITY_MS2; +using livox_common::DATA_TYPE_IMU; +using livox_common::DATA_TYPE_CARTESIAN_HIGH; +using livox_common::DATA_TYPE_CARTESIAN_LOW; + +// --------------------------------------------------------------------------- +// Global state +// --------------------------------------------------------------------------- + +static std::atomic g_running{true}; +static lcm::LCM* g_lcm = nullptr; +static std::string g_lidar_topic; +static std::string g_imu_topic; +static std::string g_frame_id = "lidar_link"; +static std::string g_imu_frame_id = "imu_link"; +static float g_frequency = 10.0f; + +// Frame accumulator +static std::mutex g_pc_mutex; +static std::vector g_accumulated_xyz; // interleaved x,y,z +static std::vector g_accumulated_intensity; // per-point intensity +static double g_frame_timestamp = 0.0; +static bool g_frame_has_timestamp = false; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +static double get_timestamp_ns(const LivoxLidarEthernetPacket* pkt) { + uint64_t ns = 0; + std::memcpy(&ns, pkt->timestamp, sizeof(uint64_t)); + return static_cast(ns); +} + +using dimos::time_from_seconds; +using dimos::make_header; + +// --------------------------------------------------------------------------- +// Build and publish PointCloud2 +// --------------------------------------------------------------------------- + +static void publish_pointcloud(const std::vector& xyz, + const std::vector& intensity, + double timestamp) { + if (!g_lcm || xyz.empty()) return; + + int num_points = static_cast(xyz.size()) / 3; + + sensor_msgs::PointCloud2 pc; + pc.header = make_header(g_frame_id, timestamp); + pc.height = 1; + pc.width = num_points; + pc.is_bigendian = 0; + pc.is_dense = 1; + + // Fields: x, y, z (float32), intensity (float32) + pc.fields_length = 4; + pc.fields.resize(4); + + auto make_field = [](const std::string& name, int32_t offset) { + sensor_msgs::PointField f; + f.name = name; + f.offset = offset; + f.datatype = sensor_msgs::PointField::FLOAT32; + f.count = 1; + return f; + }; + + pc.fields[0] = make_field("x", 0); + pc.fields[1] = make_field("y", 4); + pc.fields[2] = make_field("z", 8); + pc.fields[3] = make_field("intensity", 12); + + pc.point_step = 16; // 4 floats * 4 bytes + pc.row_step = pc.point_step * num_points; + + // Pack point data + pc.data_length = pc.row_step; + pc.data.resize(pc.data_length); + + for (int i = 0; i < num_points; ++i) { + float* dst = reinterpret_cast(pc.data.data() + i * 16); + dst[0] = xyz[i * 3 + 0]; + dst[1] = xyz[i * 3 + 1]; + dst[2] = xyz[i * 3 + 2]; + dst[3] = intensity[i]; + } + + g_lcm->publish(g_lidar_topic, &pc); +} + +// --------------------------------------------------------------------------- +// SDK callbacks +// --------------------------------------------------------------------------- + +static void on_point_cloud(const uint32_t /*handle*/, const uint8_t /*dev_type*/, + LivoxLidarEthernetPacket* data, void* /*client_data*/) { + if (!g_running.load() || data == nullptr) return; + + double ts_ns = get_timestamp_ns(data); + double ts = ts_ns / 1e9; + uint16_t dot_num = data->dot_num; + + std::lock_guard lock(g_pc_mutex); + + if (!g_frame_has_timestamp) { + g_frame_timestamp = ts; + g_frame_has_timestamp = true; + } + + if (data->data_type == DATA_TYPE_CARTESIAN_HIGH) { + auto* pts = reinterpret_cast(data->data); + for (uint16_t i = 0; i < dot_num; ++i) { + // Livox high-precision coordinates are in mm, convert to meters + g_accumulated_xyz.push_back(static_cast(pts[i].x) / 1000.0f); + g_accumulated_xyz.push_back(static_cast(pts[i].y) / 1000.0f); + g_accumulated_xyz.push_back(static_cast(pts[i].z) / 1000.0f); + g_accumulated_intensity.push_back(static_cast(pts[i].reflectivity) / 255.0f); + } + } else if (data->data_type == DATA_TYPE_CARTESIAN_LOW) { + auto* pts = reinterpret_cast(data->data); + for (uint16_t i = 0; i < dot_num; ++i) { + // Livox low-precision coordinates are in cm, convert to meters + g_accumulated_xyz.push_back(static_cast(pts[i].x) / 100.0f); + g_accumulated_xyz.push_back(static_cast(pts[i].y) / 100.0f); + g_accumulated_xyz.push_back(static_cast(pts[i].z) / 100.0f); + g_accumulated_intensity.push_back(static_cast(pts[i].reflectivity) / 255.0f); + } + } +} + +static void on_imu_data(const uint32_t /*handle*/, const uint8_t /*dev_type*/, + LivoxLidarEthernetPacket* data, void* /*client_data*/) { + if (!g_running.load() || data == nullptr || !g_lcm) return; + if (g_imu_topic.empty()) return; + + double ts = get_timestamp_ns(data) / 1e9; + auto* imu_pts = reinterpret_cast(data->data); + uint16_t dot_num = data->dot_num; + + for (uint16_t i = 0; i < dot_num; ++i) { + sensor_msgs::Imu msg; + msg.header = make_header(g_imu_frame_id, ts); + + // Orientation unknown — set to identity with high covariance + msg.orientation.x = 0.0; + msg.orientation.y = 0.0; + msg.orientation.z = 0.0; + msg.orientation.w = 1.0; + msg.orientation_covariance[0] = -1.0; // indicates unknown + + msg.angular_velocity.x = static_cast(imu_pts[i].gyro_x); + msg.angular_velocity.y = static_cast(imu_pts[i].gyro_y); + msg.angular_velocity.z = static_cast(imu_pts[i].gyro_z); + + msg.linear_acceleration.x = static_cast(imu_pts[i].acc_x) * GRAVITY_MS2; + msg.linear_acceleration.y = static_cast(imu_pts[i].acc_y) * GRAVITY_MS2; + msg.linear_acceleration.z = static_cast(imu_pts[i].acc_z) * GRAVITY_MS2; + + g_lcm->publish(g_imu_topic, &msg); + } +} + +static void on_info_change(const uint32_t handle, const LivoxLidarInfo* info, + void* /*client_data*/) { + if (info == nullptr) return; + + char sn[17] = {}; + std::memcpy(sn, info->sn, 16); + char ip[17] = {}; + std::memcpy(ip, info->lidar_ip, 16); + + printf("[mid360] Device connected: handle=%u type=%u sn=%s ip=%s\n", + handle, info->dev_type, sn, ip); + + // Set to normal work mode + SetLivoxLidarWorkMode(handle, kLivoxLidarNormal, nullptr, nullptr); + + // Enable IMU + if (!g_imu_topic.empty()) { + EnableLivoxLidarImuData(handle, nullptr, nullptr); + } +} + +// --------------------------------------------------------------------------- +// Signal handling +// --------------------------------------------------------------------------- + +static void signal_handler(int /*sig*/) { + g_running.store(false); +} + +// --------------------------------------------------------------------------- +// Main +// --------------------------------------------------------------------------- + +int main(int argc, char** argv) { + dimos::NativeModule mod(argc, argv); + + // Required: LCM topics for ports + g_lidar_topic = mod.has("lidar") ? mod.topic("lidar") : ""; + g_imu_topic = mod.has("imu") ? mod.topic("imu") : ""; + + if (g_lidar_topic.empty()) { + fprintf(stderr, "Error: --lidar is required\n"); + return 1; + } + + // Optional config args + std::string host_ip = mod.arg("host_ip", "192.168.1.5"); + std::string lidar_ip = mod.arg("lidar_ip", "192.168.1.155"); + g_frequency = mod.arg_float("frequency", 10.0f); + g_frame_id = mod.arg("frame_id", "lidar_link"); + g_imu_frame_id = mod.arg("imu_frame_id", "imu_link"); + + // SDK network ports (defaults from SdkPorts struct in livox_sdk_config.hpp) + livox_common::SdkPorts ports; + const livox_common::SdkPorts port_defaults; + ports.cmd_data = mod.arg_int("cmd_data_port", port_defaults.cmd_data); + ports.push_msg = mod.arg_int("push_msg_port", port_defaults.push_msg); + ports.point_data = mod.arg_int("point_data_port", port_defaults.point_data); + ports.imu_data = mod.arg_int("imu_data_port", port_defaults.imu_data); + ports.log_data = mod.arg_int("log_data_port", port_defaults.log_data); + ports.host_cmd_data = mod.arg_int("host_cmd_data_port", port_defaults.host_cmd_data); + ports.host_push_msg = mod.arg_int("host_push_msg_port", port_defaults.host_push_msg); + ports.host_point_data = mod.arg_int("host_point_data_port", port_defaults.host_point_data); + ports.host_imu_data = mod.arg_int("host_imu_data_port", port_defaults.host_imu_data); + ports.host_log_data = mod.arg_int("host_log_data_port", port_defaults.host_log_data); + + printf("[mid360] Starting native Livox Mid-360 module\n"); + printf("[mid360] lidar topic: %s\n", g_lidar_topic.c_str()); + printf("[mid360] imu topic: %s\n", g_imu_topic.empty() ? "(disabled)" : g_imu_topic.c_str()); + printf("[mid360] host_ip: %s lidar_ip: %s frequency: %.1f Hz\n", + host_ip.c_str(), lidar_ip.c_str(), g_frequency); + + // Signal handlers + signal(SIGTERM, signal_handler); + signal(SIGINT, signal_handler); + + // Init LCM + lcm::LCM lcm; + if (!lcm.good()) { + fprintf(stderr, "Error: LCM init failed\n"); + return 1; + } + g_lcm = &lcm; + + // Init Livox SDK (in-memory config, no temp files) + if (!livox_common::init_livox_sdk(host_ip, lidar_ip, ports)) { + return 1; + } + + // Register callbacks + SetLivoxLidarPointCloudCallBack(on_point_cloud, nullptr); + if (!g_imu_topic.empty()) { + SetLivoxLidarImuDataCallback(on_imu_data, nullptr); + } + SetLivoxLidarInfoChangeCallback(on_info_change, nullptr); + + // Start SDK + if (!LivoxLidarSdkStart()) { + fprintf(stderr, "Error: LivoxLidarSdkStart failed\n"); + LivoxLidarSdkUninit(); + return 1; + } + + printf("[mid360] SDK started, waiting for device...\n"); + + // Main loop: periodically emit accumulated point clouds + auto frame_interval = std::chrono::microseconds( + static_cast(1e6 / g_frequency)); + auto last_emit = std::chrono::steady_clock::now(); + + while (g_running.load()) { + // Handle LCM (for any subscriptions, though we mostly publish) + lcm.handleTimeout(10); // 10ms timeout + + auto now = std::chrono::steady_clock::now(); + if (now - last_emit >= frame_interval) { + // Swap out the accumulated data + std::vector xyz; + std::vector intensity; + double ts = 0.0; + + { + std::lock_guard lock(g_pc_mutex); + if (!g_accumulated_xyz.empty()) { + xyz.swap(g_accumulated_xyz); + intensity.swap(g_accumulated_intensity); + ts = g_frame_timestamp; + g_frame_has_timestamp = false; + } + } + + if (!xyz.empty()) { + publish_pointcloud(xyz, intensity, ts); + } + + last_emit = now; + } + } + + // Cleanup + printf("[mid360] Shutting down...\n"); + LivoxLidarSdkUninit(); + g_lcm = nullptr; + + printf("[mid360] Done.\n"); + return 0; +} diff --git a/dimos/hardware/sensors/lidar/livox/livox_blueprints.py b/dimos/hardware/sensors/lidar/livox/livox_blueprints.py new file mode 100644 index 0000000000..0cda912b73 --- /dev/null +++ b/dimos/hardware/sensors/lidar/livox/livox_blueprints.py @@ -0,0 +1,22 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.blueprints import autoconnect +from dimos.hardware.sensors.lidar.livox.module import Mid360 +from dimos.visualization.rerun.bridge import rerun_bridge + +mid360 = autoconnect( + Mid360.blueprint(), + rerun_bridge(), +).global_config(n_dask_workers=2, robot_model="mid360") diff --git a/dimos/hardware/sensors/lidar/livox/module.py b/dimos/hardware/sensors/lidar/livox/module.py new file mode 100644 index 0000000000..672968a0eb --- /dev/null +++ b/dimos/hardware/sensors/lidar/livox/module.py @@ -0,0 +1,104 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python NativeModule wrapper for the C++ Livox Mid-360 driver. + +Usage:: + from dimos.hardware.sensors.lidar.livox.module import Mid360 + from dimos.core.blueprints import autoconnect + + autoconnect( + Mid360.blueprint(host_ip="192.168.1.5"), + SomeConsumer.blueprint(), + ).build().loop() +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from dimos.core import Out # noqa: TC001 +from dimos.core.native_module import NativeModule, NativeModuleConfig +from dimos.hardware.sensors.lidar.livox.ports import ( + SDK_CMD_DATA_PORT, + SDK_HOST_CMD_DATA_PORT, + SDK_HOST_IMU_DATA_PORT, + SDK_HOST_LOG_DATA_PORT, + SDK_HOST_POINT_DATA_PORT, + SDK_HOST_PUSH_MSG_PORT, + SDK_IMU_DATA_PORT, + SDK_LOG_DATA_PORT, + SDK_POINT_DATA_PORT, + SDK_PUSH_MSG_PORT, +) +from dimos.msgs.sensor_msgs.Imu import Imu # noqa: TC001 +from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 # noqa: TC001 +from dimos.spec import perception + + +@dataclass(kw_only=True) +class Mid360Config(NativeModuleConfig): + """Config for the C++ Mid-360 native module.""" + + cwd: str | None = "cpp" + executable: str = "result/bin/mid360_native" + build_command: str | None = "nix build .#mid360_native" + + host_ip: str = "192.168.1.5" + lidar_ip: str = "192.168.1.155" + frequency: float = 10.0 + enable_imu: bool = True + frame_id: str = "lidar_link" + imu_frame_id: str = "imu_link" + + # SDK port configuration (see livox/ports.py for defaults) + cmd_data_port: int = SDK_CMD_DATA_PORT + push_msg_port: int = SDK_PUSH_MSG_PORT + point_data_port: int = SDK_POINT_DATA_PORT + imu_data_port: int = SDK_IMU_DATA_PORT + log_data_port: int = SDK_LOG_DATA_PORT + host_cmd_data_port: int = SDK_HOST_CMD_DATA_PORT + host_push_msg_port: int = SDK_HOST_PUSH_MSG_PORT + host_point_data_port: int = SDK_HOST_POINT_DATA_PORT + host_imu_data_port: int = SDK_HOST_IMU_DATA_PORT + host_log_data_port: int = SDK_HOST_LOG_DATA_PORT + + +class Mid360(NativeModule, perception.Lidar, perception.IMU): + """Livox Mid-360 LiDAR module backed by a native C++ binary. + + Ports: + lidar (Out[PointCloud2]): Point cloud frames at configured frequency. + imu (Out[Imu]): IMU data at ~200 Hz (if enabled). + """ + + config: Mid360Config + default_config = Mid360Config + + lidar: Out[PointCloud2] + imu: Out[Imu] + + +mid360_module = Mid360.blueprint + +__all__ = [ + "Mid360", + "Mid360Config", + "mid360_module", +] + +# Verify protocol port compliance (mypy will flag missing ports) +if TYPE_CHECKING: + Mid360() diff --git a/dimos/hardware/sensors/lidar/livox/ports.py b/dimos/hardware/sensors/lidar/livox/ports.py new file mode 100644 index 0000000000..9ad83251d6 --- /dev/null +++ b/dimos/hardware/sensors/lidar/livox/ports.py @@ -0,0 +1,31 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Default Livox SDK2 network port constants. + +These match the defaults in ``common/livox_sdk_config.hpp`` (``SdkPorts``). +Both the Mid-360 driver and FAST-LIO2 modules reference this single source +so port numbers are defined in one place on the Python side. +""" + +SDK_CMD_DATA_PORT = 56100 +SDK_PUSH_MSG_PORT = 56200 +SDK_POINT_DATA_PORT = 56300 +SDK_IMU_DATA_PORT = 56400 +SDK_LOG_DATA_PORT = 56500 +SDK_HOST_CMD_DATA_PORT = 56101 +SDK_HOST_PUSH_MSG_PORT = 56201 +SDK_HOST_POINT_DATA_PORT = 56301 +SDK_HOST_IMU_DATA_PORT = 56401 +SDK_HOST_LOG_DATA_PORT = 56501 diff --git a/dimos/manipulation/__init__.py b/dimos/manipulation/__init__.py index e69de29bb2..3ed1863092 100644 --- a/dimos/manipulation/__init__.py +++ b/dimos/manipulation/__init__.py @@ -0,0 +1,29 @@ +# Copyright 2025 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Manipulation module for robot arm motion planning and control.""" + +from dimos.manipulation.manipulation_module import ( + ManipulationModule, + ManipulationModuleConfig, + ManipulationState, + manipulation_module, +) + +__all__ = [ + "ManipulationModule", + "ManipulationModuleConfig", + "ManipulationState", + "manipulation_module", +] diff --git a/dimos/manipulation/control/orchestrator_client.py b/dimos/manipulation/control/coordinator_client.py similarity index 81% rename from dimos/manipulation/control/orchestrator_client.py rename to dimos/manipulation/control/coordinator_client.py index 84e85dfb3d..4e277fae97 100644 --- a/dimos/manipulation/control/orchestrator_client.py +++ b/dimos/manipulation/control/coordinator_client.py @@ -14,30 +14,30 @@ # limitations under the License. """ -Interactive client for the ControlOrchestrator. +Interactive client for the ControlCoordinator. -Interfaces with a running ControlOrchestrator via RPC to: +Interfaces with a running ControlCoordinator via RPC to: - Query hardware and task status - Plan and execute trajectories on single or multiple arms - Monitor execution progress Usage: - # Terminal 1: Start the orchestrator - dimos run orchestrator-mock # Single arm - dimos run orchestrator-dual-mock # Dual arm + # Terminal 1: Start the coordinator + dimos run coordinator-mock # Single arm + dimos run coordinator-dual-mock # Dual arm # Terminal 2: Run this client - python -m dimos.manipulation.control.orchestrator_client - python -m dimos.manipulation.control.orchestrator_client --task traj_left - python -m dimos.manipulation.control.orchestrator_client --task traj_right + python -m dimos.manipulation.control.coordinator_client + python -m dimos.manipulation.control.coordinator_client --task traj_left + python -m dimos.manipulation.control.coordinator_client --task traj_right How it works: - 1. Connects to ControlOrchestrator via LCM RPC + 1. Connects to ControlCoordinator via LCM RPC 2. Queries available hardware/tasks/joints 3. You add waypoints (joint positions) 4. Generates trajectory with trapezoidal velocity profile - 5. Sends trajectory to orchestrator via execute_trajectory() RPC - 6. Orchestrator's tick loop executes it at 100Hz + 5. Sends trajectory to coordinator via execute_trajectory() RPC + 6. Coordinator's tick loop executes it at 100Hz """ from __future__ import annotations @@ -47,25 +47,27 @@ import time from typing import TYPE_CHECKING, Any -from dimos.control.orchestrator import ControlOrchestrator +from dimos.control.coordinator import ControlCoordinator from dimos.core.rpc_client import RPCClient -from dimos.manipulation.planning import JointTrajectoryGenerator +from dimos.manipulation.planning.trajectory_generator.joint_trajectory_generator import ( + JointTrajectoryGenerator, +) if TYPE_CHECKING: from dimos.msgs.trajectory_msgs import JointTrajectory -class OrchestratorClient: +class CoordinatorClient: """ - RPC client for the ControlOrchestrator. + RPC client for the ControlCoordinator. - Connects to a running orchestrator and provides methods to: + Connects to a running coordinator and provides methods to: - Query state (joints, tasks, hardware) - Execute trajectories on any task - Monitor progress Example: - client = OrchestratorClient() + client = CoordinatorClient() # Query state print(client.list_hardware()) # ['left_arm', 'right_arm'] @@ -84,8 +86,8 @@ class OrchestratorClient: """ def __init__(self) -> None: - """Initialize connection to orchestrator via RPC.""" - self._rpc = RPCClient(None, ControlOrchestrator) + """Initialize connection to coordinator via RPC.""" + self._rpc = RPCClient(None, ControlCoordinator) # Per-task state self._current_task: str | None = None @@ -121,20 +123,25 @@ def get_joint_positions(self) -> dict[str, float]: return self._rpc.get_joint_positions() or {} def get_trajectory_status(self, task_name: str) -> dict[str, Any]: - """Get status of a trajectory task.""" - return self._rpc.get_trajectory_status(task_name) or {} + """Get status of a trajectory task via task_invoke.""" + result = self._rpc.task_invoke(task_name, "get_state", {}) + if result is not None: + return {"state": int(result), "task": task_name} + return {} # ========================================================================= - # Trajectory execution (RPC calls) + # Trajectory execution (via task_invoke) # ========================================================================= def execute_trajectory(self, task_name: str, trajectory: JointTrajectory) -> bool: - """Execute a trajectory on a task.""" - return self._rpc.execute_trajectory(task_name, trajectory) or False + """Execute a trajectory on a task via task_invoke.""" + result = self._rpc.task_invoke(task_name, "execute", {"trajectory": trajectory}) + return bool(result) def cancel_trajectory(self, task_name: str) -> bool: - """Cancel an active trajectory.""" - return self._rpc.cancel_trajectory(task_name) or False + """Cancel an active trajectory via task_invoke.""" + result = self._rpc.task_invoke(task_name, "cancel", {}) + return bool(result) # ========================================================================= # Task selection and setup @@ -144,7 +151,7 @@ def select_task(self, task_name: str) -> bool: """ Select a task and setup its trajectory generator. - This queries the orchestrator to find which joints the task controls, + This queries the coordinator to find which joints the task controls, then creates a trajectory generator for those joints. """ tasks = self.list_tasks() @@ -155,14 +162,18 @@ def select_task(self, task_name: str) -> bool: self._current_task = task_name # Get joints for this task (infer from task name pattern) - # e.g., "traj_left" -> joints starting with "left_" + # e.g., "traj_left" -> joints starting with "left_arm_" (hardware_id based naming) # e.g., "traj_arm" -> joints starting with "arm_" all_joints = self.list_joints() - # Try to infer prefix from task name + # Try to infer hardware_id from task name if "_" in task_name: - prefix = task_name.split("_", 1)[1] # "traj_left" -> "left" - task_joints = [j for j in all_joints if j.startswith(prefix + "_")] + suffix = task_name.split("_", 1)[1] # "traj_left" -> "left" + # Try both patterns: exact suffix (e.g., "arm_") and with "_arm" suffix (e.g., "left_arm_") + task_joints = [j for j in all_joints if j.startswith(suffix + "_")] + if not task_joints: + # Try with "_arm" suffix for dual-arm setups (left -> left_arm) + task_joints = [j for j in all_joints if j.startswith(suffix + "_arm_")] else: task_joints = all_joints @@ -313,36 +324,42 @@ def preview_trajectory(trajectory: JointTrajectory, joint_names: list[str]) -> N print("=" * 70) -def wait_for_completion(client: OrchestratorClient, task_name: str, timeout: float = 60.0) -> bool: - """Wait for trajectory to complete with progress display.""" +def wait_for_completion(client: CoordinatorClient, task_name: str, timeout: float = 60.0) -> bool: + """Wait for trajectory to complete by polling task state. + + TrajectoryState is an IntEnum: IDLE=0, EXECUTING=1, COMPLETED=2, ABORTED=3, FAULT=4. + """ start = time.time() - last_progress = -1.0 + _STATE_NAMES = {0: "IDLE", 1: "EXECUTING", 2: "COMPLETED", 3: "ABORTED", 4: "FAULT"} while time.time() - start < timeout: status = client.get_trajectory_status(task_name) - if not status.get("active", False): - state: str = status.get("state", "UNKNOWN") - print(f"\nTrajectory finished: {state}") - return state == "COMPLETED" + if not status: + print("\nCould not get trajectory status") + return False - progress = status.get("progress", 0.0) - if progress != last_progress: - bar_len = 30 - filled = int(bar_len * progress) - bar = "=" * filled + "-" * (bar_len - filled) - print(f"\r[{bar}] {progress * 100:.1f}%", end="", flush=True) - last_progress = progress + state_val = status.get("state") + state_name = _STATE_NAMES.get(state_val, f"UNKNOWN({state_val})") # type: ignore[arg-type] - time.sleep(0.05) + if state_val in (0, 2): # IDLE or COMPLETED + print(f"\nTrajectory finished: {state_name}") + return True + if state_val in (3, 4): # ABORTED or FAULT + print(f"\nTrajectory failed: {state_name}") + return False + # state_val == 1 means EXECUTING, keep polling + elapsed = time.time() - start + print(f"\r Executing... ({elapsed:.1f}s)", end="", flush=True) + time.sleep(0.1) print("\nTimeout waiting for trajectory") return False -class OrchestratorShell: - """IPython shell interface for orchestrator control.""" +class CoordinatorShell: + """IPython shell interface for coordinator control.""" - def __init__(self, client: OrchestratorClient, initial_task: str) -> None: + def __init__(self, client: CoordinatorClient, initial_task: str) -> None: self._client = client self._current_task = initial_task self._waypoints: list[list[float]] = [] @@ -359,7 +376,7 @@ def _num_joints(self) -> int: def help(self) -> None: """Show available commands.""" - print("\nOrchestrator Client Commands:") + print("\nCoordinator Client Commands:") print("=" * 60) print("Waypoint Commands:") print(" here() - Add current position as waypoint") @@ -465,12 +482,12 @@ def run(self) -> None: def status(self) -> None: """Show task status.""" + _STATE_NAMES = {0: "IDLE", 1: "EXECUTING", 2: "COMPLETED", 3: "ABORTED", 4: "FAULT"} status = self._client.get_trajectory_status(self._current_task) + state_val = status.get("state") + state_name = _STATE_NAMES.get(state_val, f"UNKNOWN({state_val})") # type: ignore[arg-type] print(f"\nTask: {self._current_task}") - print(f" Active: {status.get('active', False)}") - print(f" State: {status.get('state', 'UNKNOWN')}") - if "progress" in status: - print(f" Progress: {status['progress'] * 100:.1f}%") + print(f" State: {state_name} ({state_val})") def cancel(self) -> None: """Cancel active trajectory.""" @@ -557,14 +574,14 @@ def accel(self, value: float | None = None) -> None: print(f"Max acceleration: {value:.2f} rad/s^2") -def interactive_mode(client: OrchestratorClient, initial_task: str) -> None: +def interactive_mode(client: CoordinatorClient, initial_task: str) -> None: """Start IPython interactive mode.""" import IPython - shell = OrchestratorShell(client, initial_task) + shell = CoordinatorShell(client, initial_task) print("\n" + "=" * 60) - print(f"Orchestrator Client (IPython) - Task: {initial_task}") + print(f"Coordinator Client (IPython) - Task: {initial_task}") print("=" * 60) print(f"Joints: {', '.join(shell._joints())}") print("\nType help() for available commands") @@ -596,15 +613,15 @@ def interactive_mode(client: OrchestratorClient, initial_task: str) -> None: ) -def _run_client(client: OrchestratorClient, task: str, vel: float, accel: float) -> int: +def _run_client(client: CoordinatorClient, task: str, vel: float, accel: float) -> int: """Run the client with the given configuration.""" try: hardware = client.list_hardware() tasks = client.list_tasks() if not hardware: - print("\nWarning: No hardware found. Is the orchestrator running?") - print("Start with: dimos run orchestrator-mock") + print("\nWarning: No hardware found. Is the coordinator running?") + print("Start with: dimos run coordinator-mock") response = input("Continue anyway? [y/N]: ").strip().lower() if response != "y": return 0 @@ -614,7 +631,7 @@ def _run_client(client: OrchestratorClient, task: str, vel: float, accel: float) except Exception as e: print(f"\nConnection error: {e}") - print("Make sure orchestrator is running: dimos run orchestrator-mock") + print("Make sure coordinator is running: dimos run coordinator-mock") return 1 if task not in tasks and tasks: @@ -636,18 +653,18 @@ def main() -> int: import argparse parser = argparse.ArgumentParser( - description="Interactive client for ControlOrchestrator", + description="Interactive client for ControlCoordinator", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=""" Examples: - # Single arm (with orchestrator-mock running) - python -m dimos.manipulation.control.orchestrator_client + # Single arm (with coordinator-mock running) + python -m dimos.manipulation.control.coordinator_client # Dual arm - control left arm - python -m dimos.manipulation.control.orchestrator_client --task traj_left + python -m dimos.manipulation.control.coordinator_client --task traj_left # Dual arm - control right arm - python -m dimos.manipulation.control.orchestrator_client --task traj_right + python -m dimos.manipulation.control.coordinator_client --task traj_right """, ) parser.add_argument( @@ -671,11 +688,11 @@ def main() -> int: args = parser.parse_args() print("\n" + "=" * 70) - print("Orchestrator Client") + print("Coordinator Client") print("=" * 70) - print("\nConnecting to ControlOrchestrator via RPC...") + print("\nConnecting to ControlCoordinator via RPC...") - client = OrchestratorClient() + client = CoordinatorClient() try: return _run_client(client, args.task, args.vel, args.accel) finally: diff --git a/dimos/manipulation/control/dual_trajectory_setter.py b/dimos/manipulation/control/dual_trajectory_setter.py index 4b54f0e3e5..4f8a8802e1 100644 --- a/dimos/manipulation/control/dual_trajectory_setter.py +++ b/dimos/manipulation/control/dual_trajectory_setter.py @@ -34,7 +34,9 @@ import time from dimos import core -from dimos.manipulation.planning import JointTrajectoryGenerator +from dimos.manipulation.planning.trajectory_generator.joint_trajectory_generator import ( + JointTrajectoryGenerator, +) from dimos.msgs.sensor_msgs import JointState from dimos.msgs.trajectory_msgs import JointTrajectory diff --git a/dimos/manipulation/control/trajectory_setter.py b/dimos/manipulation/control/trajectory_setter.py index 5b8b2ff234..bad3854521 100644 --- a/dimos/manipulation/control/trajectory_setter.py +++ b/dimos/manipulation/control/trajectory_setter.py @@ -33,7 +33,9 @@ import time from dimos import core -from dimos.manipulation.planning import JointTrajectoryGenerator +from dimos.manipulation.planning.trajectory_generator.joint_trajectory_generator import ( + JointTrajectoryGenerator, +) from dimos.msgs.sensor_msgs import JointState from dimos.msgs.trajectory_msgs import JointTrajectory diff --git a/dimos/manipulation/grasping/__init__.py b/dimos/manipulation/grasping/__init__.py new file mode 100644 index 0000000000..41779f55e7 --- /dev/null +++ b/dimos/manipulation/grasping/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dimos.manipulation.grasping.graspgen_module import ( + GraspGenConfig, + GraspGenModule, + graspgen, +) +from dimos.manipulation.grasping.grasping import ( + GraspingModule, + grasping_module, +) + +__all__ = [ + "GraspGenConfig", + "GraspGenModule", + "GraspingModule", + "graspgen", + "grasping_module", +] diff --git a/dimos/manipulation/grasping/demo_grasping.py b/dimos/manipulation/grasping/demo_grasping.py new file mode 100644 index 0000000000..7c6e94d2af --- /dev/null +++ b/dimos/manipulation/grasping/demo_grasping.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from pathlib import Path + +from dimos.agents.agent import agent +from dimos.core.blueprints import autoconnect +from dimos.hardware.sensors.camera.realsense import realsense_camera +from dimos.manipulation.grasping import graspgen +from dimos.manipulation.grasping.grasping import grasping_module +from dimos.perception.detection.detectors.yoloe import YoloePromptMode +from dimos.perception.object_scene_registration import object_scene_registration_module +from dimos.robot.foxglove_bridge import foxglove_bridge + +camera_module = realsense_camera(enable_pointcloud=False) + +demo_grasping = autoconnect( + camera_module, + object_scene_registration_module( + target_frame="camera_color_optical_frame", prompt_mode=YoloePromptMode.PROMPT + ), + grasping_module(), + graspgen( + docker_file_path=Path(__file__).parent / "docker_context" / "Dockerfile", + docker_build_context=Path(__file__).parent.parent.parent.parent, # repo root + gripper_type="robotiq_2f_140", # out of the bosx ships "robotiq_2f_140", "franka_panda", "single_suction_cup_30mm + num_grasps=400, + topk_num_grasps=100, + filter_collisions=False, + save_visualization_data=False, # to just see the visualization simply run ``grasping/visualize_grasps.py`` as a standalone script + docker_volumes=[ + ("/tmp", "/tmp", "rw") + ], # Grasp visualization debug standalone: python -m dimos.manipulation.grasping.visualize_grasps + ), + foxglove_bridge(), + agent(), +).global_config(viewer_backend="foxglove") diff --git a/dimos/manipulation/grasping/docker_context/Dockerfile b/dimos/manipulation/grasping/docker_context/Dockerfile new file mode 100644 index 0000000000..d10b3cac76 --- /dev/null +++ b/dimos/manipulation/grasping/docker_context/Dockerfile @@ -0,0 +1,72 @@ +# GraspGen - Grasp Pose Generation Model +# https://github.com/NVlabs/GraspGen +# +# This Dockerfile packages the GraspGen model for grasp pose generation. +# Requires CUDA 12.8+ for PyTorch and CUDA extensions. + +FROM nvidia/cuda:12.8.1-cudnn-devel-ubuntu22.04 + +# System dependencies for GraspGen +RUN apt-get update && apt-get install -y \ + git wget curl build-essential \ + libgl1-mesa-glx libglib2.0-0 libsm6 libxext6 libxrender-dev \ + libglu1-mesa libglu1-mesa-dev libegl1-mesa-dev \ + iproute2 \ + && rm -rf /var/lib/apt/lists/* + +# Python environment (Miniconda) +ENV CONDA_DIR=/opt/conda +RUN wget -q https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh && \ + bash /tmp/miniconda.sh -b -p $CONDA_DIR && rm /tmp/miniconda.sh +ENV PATH=$CONDA_DIR/bin:$PATH + +RUN conda init bash && \ + conda config --set channel_priority flexible && \ + echo 'yes' | conda tos accept --override-channels --channel defaults 2>/dev/null || true && \ + conda create -n app python=3.10 -y && conda clean -afy + +# Clone GraspGen repository +WORKDIR /app +RUN git clone https://github.com/NVlabs/GraspGen.git && cd GraspGen && git checkout main + +# Install PyTorch with CUDA 12.8 +RUN conda run -n app pip install --no-cache-dir \ + torch==2.7.0 torchvision==0.22.0 --index-url https://download.pytorch.org/whl/cu128 + +# Install GraspGen package +WORKDIR /app/GraspGen +RUN conda run -n app pip install --no-cache-dir -e . + +# Build CUDA extensions (pointnet2_ops) +RUN conda run -n app bash -c "\ + export TORCH_CUDA_ARCH_LIST='8.0 8.6 8.9 9.0 12.0' && \ + export FORCE_CUDA=1 && \ + export CUDA_HOME=/usr/local/cuda && \ + cd pointnet2_ops && pip install --no-build-isolation ." + +# Install torch-scatter and torch-cluster (require CUDA compilation) +RUN conda run -n app pip install --no-cache-dir --no-build-isolation torch-scatter torch-cluster + +# Additional dependencies +RUN conda run -n app pip install --no-cache-dir \ + numpy trimesh pillow pyrender imageio scipy + +# Model checkpoints from LFS archive +COPY data/.lfs/models_graspgen.tar.gz /tmp/ +RUN tar -xzf /tmp/models_graspgen.tar.gz -C /app/GraspGen/ && \ + rm /tmp/models_graspgen.tar.gz + +# Verify checkpoints exist +RUN test -f /app/GraspGen/checkpoints/graspgen_robotiq_2f_140_gen.pth || \ + (echo "ERROR: Model checkpoints not found" && exit 1) + +# Environment variables for GraspGen +ENV GRASPGEN_PATH=/app/GraspGen +ENV DEFAULT_GRIPPER=robotiq_2f_140 +ENV PYOPENGL_PLATFORM=egl +ENV CUDA_LAUNCH_BLOCKING=0 +ENV TORCH_CUDA_ARCH_LIST="8.0 8.6 8.9 9.0 12.0" +ENV PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True + +# Default command +CMD ["conda", "run", "-n", "app", "python", "-c", "print('GraspGen ready')"] diff --git a/dimos/manipulation/grasping/graspgen_module.py b/dimos/manipulation/grasping/graspgen_module.py new file mode 100644 index 0000000000..47520ea0e5 --- /dev/null +++ b/dimos/manipulation/grasping/graspgen_module.py @@ -0,0 +1,279 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from dataclasses import dataclass +import os +from pathlib import Path +import sys +import time +from typing import TYPE_CHECKING, Any + +import numpy as np + +from dimos.core.core import rpc +from dimos.core.docker_runner import DockerModuleConfig +from dimos.core.module import Module +from dimos.msgs.geometry_msgs import PoseArray +from dimos.msgs.std_msgs import Header +from dimos.utils.logging_config import setup_logger +from dimos.utils.transform_utils import matrix_to_pose + +if TYPE_CHECKING: + from dimos.core.stream import Out + from dimos.msgs.sensor_msgs import PointCloud2 + +logger = setup_logger() + +# Inference constants +MIN_POINTS_FOR_INFERENCE = 50 +OUTLIER_REMOVAL_THRESHOLD = 100 +COLLISION_FILTER_THRESHOLD = 0.02 + + +@dataclass +class GraspGenConfig(DockerModuleConfig): + """Configuration for GraspGen module.""" + + # Docker defaults + docker_image: str = "dimos-graspgen:latest" + docker_gpus: str = "all" + docker_shm_size: str = "4g" + + # GraspGen settings + gripper_type: str = ( + "robotiq_2f_140" # use any from robotiq_2f_140", "franka_panda", "single_suction_cup_30mm" + ) + num_grasps: int = 400 + topk_num_grasps: int = 100 + grasp_threshold: float = -1.0 + filter_collisions: bool = False + save_visualization_data: bool = False + visualization_output_path: str = "/tmp/grasp_visualization.json" + + +class GraspGenModule(Module[GraspGenConfig]): + """Grasp generation module running in Docker.""" + + default_config = GraspGenConfig + grasps: Out[PoseArray] + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self._sampler = self._gripper_info = None + self._initialized = False + + @rpc + def start(self) -> None: + super().start() + if not self._initialize_graspgen(): + raise RuntimeError("Failed to initialize GraspGen") + logger.info(f"GraspGenModule started (gripper={self.config.gripper_type})") + + @rpc + def stop(self) -> None: + self._sampler = self._gripper_info = None + self._initialized = False + super().stop() + + @rpc + def generate_grasps( + self, + pointcloud: PointCloud2, + scene_pointcloud: PointCloud2 | None = None, + ) -> PoseArray | None: + """Generate grasp poses for the given pointcloud.""" + try: + points = self._extract_points(pointcloud) + if len(points) < 10: + return None + + # Run inference (with optional collision filtering) + scene_points = None + if scene_pointcloud is not None and self.config.filter_collisions: + scene_points = self._extract_points(scene_pointcloud) + grasps, scores = self._run_inference(points, scene_points) + if len(grasps) == 0: + return None + + # Convert and publish results + pose_array = self._grasps_to_pose_array(grasps, scores, pointcloud.frame_id) + self.grasps.publish(pose_array) + + if self.config.save_visualization_data: + self._save_visualization_data(points, grasps, scores, pointcloud.frame_id) + return pose_array + except Exception as e: + logger.error(f"Grasp generation failed: {e}") + return None + + def _initialize_graspgen(self) -> bool: + """Load GraspGen model and gripper info. Returns True on success.""" + if self._initialized: + return True + + try: + # Setup GraspGen path and environment (must be set by Dockerfile) + graspgen_path = os.environ.get("GRASPGEN_PATH") + if graspgen_path is None: + raise RuntimeError( + "GRASPGEN_PATH environment variable not set. Ensure Dockerfile sets ENV GRASPGEN_PATH." + ) + if graspgen_path not in sys.path: + sys.path.insert(0, graspgen_path) + os.environ["PYOPENGL_PLATFORM"] = "egl" + + # Load model and gripper (Docker-only imports) + from grasp_gen.grasp_server import ( # type: ignore[import-not-found] + GraspGenSampler, + load_grasp_cfg, + ) + from grasp_gen.robot import get_gripper_info # type: ignore[import-not-found] + + grasp_cfg = load_grasp_cfg(self._get_gripper_config_path()) + self._sampler = GraspGenSampler(grasp_cfg) + self._gripper_info = get_gripper_info(self.config.gripper_type) + self._initialized = True + logger.info("GraspGen initialized") + return True + except Exception as e: + logger.error(f"Failed to initialize GraspGen: {e}") + self._sampler = self._gripper_info = None + return False + + def _get_gripper_config_path(self) -> str: + graspgen_path = os.environ.get("GRASPGEN_PATH") + if graspgen_path is None: + raise RuntimeError("GRASPGEN_PATH environment variable not set") + config_name = f"graspgen_{self.config.gripper_type}.yml" + + for subdir in ("GraspGenModels/checkpoints", "checkpoints"): + path = os.path.join(graspgen_path, subdir, config_name) + if os.path.exists(path): + return path + + return os.path.join(graspgen_path, "checkpoints", config_name) + + def _run_inference( + self, object_pc: np.ndarray[Any, Any], scene_pc: np.ndarray[Any, Any] | None = None + ) -> tuple[np.ndarray[Any, Any], np.ndarray[Any, Any]]: + if self._sampler is None: + return np.array([]), np.array([]) + + from grasp_gen.grasp_server import GraspGenSampler # type: ignore[import-not-found] + from grasp_gen.utils.point_cloud_utils import ( # type: ignore[import-not-found] + filter_colliding_grasps, + point_cloud_outlier_removal, + ) + import torch # type: ignore[import-not-found] + import trimesh.transformations as tra # type: ignore[import-not-found] + + pc_torch = torch.from_numpy(object_pc) + + if len(object_pc) > OUTLIER_REMOVAL_THRESHOLD: + pc_filtered, _ = point_cloud_outlier_removal(pc_torch) + object_pc_filtered = pc_filtered.numpy() + if len(object_pc_filtered) < MIN_POINTS_FOR_INFERENCE: + object_pc_filtered = object_pc + else: + object_pc_filtered = object_pc + + if len(object_pc_filtered) < MIN_POINTS_FOR_INFERENCE: + return np.array([]), np.array([]) + + grasps, scores = GraspGenSampler.run_inference( + object_pc_filtered, + self._sampler, + grasp_threshold=self.config.grasp_threshold, + num_grasps=self.config.num_grasps, + topk_num_grasps=self.config.topk_num_grasps, + remove_outliers=False, + ) + + if len(grasps) == 0: + return np.array([]), np.array([]) + + grasps_np = grasps.cpu().numpy() + scores_np = scores.cpu().numpy() + + if self.config.filter_collisions and scene_pc is not None: + if self._gripper_info is None: + return grasps_np, scores_np + + pc_mean = object_pc_filtered.mean(axis=0) + T_center = tra.translation_matrix(-pc_mean) + grasps_centered = np.array([T_center @ g for g in grasps_np]) + scene_pc_centered = tra.transform_points(scene_pc, T_center) + + collision_free_mask = filter_colliding_grasps( + scene_pc=scene_pc_centered, + grasp_poses=grasps_centered, + gripper_collision_mesh=self._gripper_info.collision_mesh, + collision_threshold=COLLISION_FILTER_THRESHOLD, + ) + grasps_np = grasps_np[collision_free_mask] + scores_np = scores_np[collision_free_mask] + + return grasps_np, scores_np + + def _extract_points(self, msg: PointCloud2) -> np.ndarray[Any, Any]: + points = msg.points().numpy() # type: ignore[no-untyped-call] + if not np.isfinite(points).all(): + raise ValueError("Point cloud contains NaN/Inf") + return points # type: ignore[no-any-return] + + def _grasps_to_pose_array( + self, grasps: np.ndarray[Any, Any], scores: np.ndarray[Any, Any], frame_id: str + ) -> PoseArray: + sorted_indices = np.argsort(scores)[::-1] + poses = [matrix_to_pose(grasps[idx]) for idx in sorted_indices] + return PoseArray(header=Header(frame_id), poses=poses) + + def _save_visualization_data( + self, + points: np.ndarray[Any, Any], + grasps: np.ndarray[Any, Any], + scores: np.ndarray[Any, Any], + frame_id: str, + ) -> None: + import json + + try: + data = { + "point_cloud": points.tolist(), + "grasps": [g.tolist() for g in grasps], + "scores": scores.tolist(), + "frame_id": frame_id, + "timestamp": time.time(), + } + output_path = Path(self.config.visualization_output_path) + output_path.parent.mkdir(parents=True, exist_ok=True) + with open(output_path, "w") as f: + json.dump(data, f) + except Exception as e: + logger.warning(f"Failed to save visualization: {e}") + + +def graspgen( + docker_file_path: Path | str, docker_build_context: Path | str | None = None, **kwargs: Any +) -> Any: + """Create a GraspGen module blueprint. All kwargs passed through to config.""" + dockerfile = Path(docker_file_path) + build_context = Path(docker_build_context) if docker_build_context else dockerfile.parent + return GraspGenModule.blueprint( + docker_file=dockerfile, docker_build_context=build_context, **kwargs + ) + + +__all__ = ["GraspGenConfig", "GraspGenModule", "graspgen"] diff --git a/dimos/manipulation/grasping/grasping.py b/dimos/manipulation/grasping/grasping.py new file mode 100644 index 0000000000..783f899a83 --- /dev/null +++ b/dimos/manipulation/grasping/grasping.py @@ -0,0 +1,151 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Grasping skill module + +Provides @skill interface for agents and orchestrates the grasp generation pipeline: +perception (get pointcloud) to graspgen (generate grasps in Docker) to output grasps +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from dimos.agents.annotation import skill +from dimos.core.core import rpc +from dimos.core.module import Module +from dimos.utils.logging_config import setup_logger +from dimos.utils.transform_utils import quaternion_to_euler + +if TYPE_CHECKING: + from dimos.core.stream import Out + from dimos.msgs.geometry_msgs import PoseArray + from dimos.msgs.sensor_msgs import PointCloud2 + +logger = setup_logger() + + +class GraspingModule(Module): + """Grasping skill and orchestrator module""" + + grasps: Out[PoseArray] + + rpc_calls: list[str] = [ + "ObjectSceneRegistrationModule.get_object_pointcloud_by_name", + "ObjectSceneRegistrationModule.get_object_pointcloud_by_object_id", + "ObjectSceneRegistrationModule.get_full_scene_pointcloud", + "GraspGenModule.generate_grasps", + ] + + @rpc + def start(self) -> None: + super().start() + logger.info("GraspingModule started") + + @rpc + def stop(self) -> None: + super().stop() + logger.info("GraspingModule stopped") + + @skill + def generate_grasps( + self, + object_name: str = "object", + object_id: str | None = None, + filter_collisions: bool = True, + ) -> str: + """Generate grasp poses for the specified object. + + Args: + object_name: Name of the object to grasp (e.g. "coke can", "cup", "bottle"). + object_id: Optional unique object ID from perception. If provided, uses this + instead of object_name for lookup. + filter_collisions: Whether to filter grasps that collide with scene geometry. + + """ + # Get object pointcloud from perception + pc = self._get_object_pointcloud(object_name, object_id) + if pc is None: + msg = f"No pointcloud found for '{object_id or object_name}'" + logger.warning(msg) + return msg + + # Get scene pointcloud for collision filtering + scene_pc = None + if filter_collisions: + scene_pc = self._get_scene_pointcloud(exclude_object_id=object_id) + + # Call GraspGenModule RPC (running in Docker) + try: + generate = self.get_rpc_calls("GraspGenModule.generate_grasps") + result = generate(pc, scene_pc) + except Exception as e: + msg = f"Grasp generation failed: {e}" + logger.error(msg) + return msg + + if result is None or len(result.poses) == 0: + msg = f"No grasps generated for '{object_name}'" + logger.info(msg) + return msg + + self.grasps.publish(result) + logger.info(f"Generated {len(result.poses)} grasps for '{object_name}'") + + # Format result for agent/human + return self._format_grasp_result(result, object_name) + + def _get_object_pointcloud( + self, object_name: str, object_id: str | None = None + ) -> PointCloud2 | None: + """Fetch object pointcloud from perception.""" + try: + if object_id is not None: + get_pc = self.get_rpc_calls( + "ObjectSceneRegistrationModule.get_object_pointcloud_by_object_id" + ) + return get_pc(object_id) # type: ignore[no-any-return] + + get_pc = self.get_rpc_calls( + "ObjectSceneRegistrationModule.get_object_pointcloud_by_name" + ) + return get_pc(object_name) # type: ignore[no-any-return] + except Exception as e: + logger.error(f"Failed to get object pointcloud: {e}") + return None + + def _get_scene_pointcloud(self, exclude_object_id: str | None = None) -> PointCloud2 | None: + """Fetch scene pointcloud from perception for collision filtering.""" + try: + get_scene = self.get_rpc_calls( + "ObjectSceneRegistrationModule.get_full_scene_pointcloud" + ) + return get_scene(exclude_object_id=exclude_object_id) # type: ignore[no-any-return] + except Exception as e: + logger.debug(f"Could not get scene pointcloud: {e}") + return None + + def _format_grasp_result(self, grasps: PoseArray, object_name: str) -> str: + """Format grasp result for agent/human consumption.""" + best = grasps.poses[0] + pos = best.position + rpy = quaternion_to_euler(best.orientation, degrees=True) + return ( + f"Generated {len(grasps.poses)}" + f"Best grasp: pos=({pos.x:.4f}, {pos.y:.4f}, {pos.z:.4f}), " + f"rpy=({rpy.x:.1f}, {rpy.y:.1f}, {rpy.z:.1f}) degrees" + ) + + +grasping_module = GraspingModule.blueprint +__all__ = ["GraspingModule", "grasping_module"] diff --git a/dimos/manipulation/grasping/visualize_grasps.py b/dimos/manipulation/grasping/visualize_grasps.py new file mode 100644 index 0000000000..53edc1bb16 --- /dev/null +++ b/dimos/manipulation/grasping/visualize_grasps.py @@ -0,0 +1,90 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Grasp visualization debug tool: python -m dimos.manipulation.grasping.visualize_grasps""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Any + +import numpy as np +import open3d as o3d # type: ignore[import-untyped] + +GRIPPER_WIDTH = 0.086 +FINGER_LENGTH = 0.052 +PALM_DEPTH = 0.04 +MAX_GRASPS = 100 +VISUALIZATION_FILE = "/tmp/grasp_visualization.json" + + +def create_gripper_geometry(transform: np.ndarray[Any, Any], color: list[float]) -> list[Any]: + w = GRIPPER_WIDTH / 2.0 + fl = FINGER_LENGTH + pd = PALM_DEPTH + wrist = np.array([0.0, 0.0, -(pd + fl)]) + palm = np.array([0.0, 0.0, -fl]) + l_base = np.array([-w, 0.0, -fl]) + r_base = np.array([w, 0.0, -fl]) + l_tip = np.array([-w, 0.0, 0.25 * fl]) + r_tip = np.array([w, 0.0, 0.25 * fl]) + points = np.vstack([wrist, palm, l_base, r_base, l_tip, r_tip]) + lines = [[0, 1], [1, 2], [1, 3], [2, 4], [3, 5]] + points_h = np.hstack([points, np.ones((len(points), 1))]) + points_world = (transform @ points_h.T).T[:, :3] + line_set = o3d.geometry.LineSet() + line_set.points = o3d.utility.Vector3dVector(points_world) + line_set.lines = o3d.utility.Vector2iVector(lines) + line_set.colors = o3d.utility.Vector3dVector([color] * len(lines)) + + return [line_set] + + +def visualize_grasps(point_cloud: np.ndarray[Any, Any], grasps: list[np.ndarray[Any, Any]]) -> None: + geometries = [] + + pcd = o3d.geometry.PointCloud() + pcd.points = o3d.utility.Vector3dVector(point_cloud) + pcd.paint_uniform_color([0.0, 0.8, 0.8]) + geometries.append(pcd) + coord_frame = o3d.geometry.TriangleMesh.create_coordinate_frame(size=0.1) + geometries.append(coord_frame) + + num_to_show = min(len(grasps), MAX_GRASPS) + for i in range(num_to_show): + t = i / max(num_to_show - 1, 1) if i > 0 else 0.0 + color = [min(1.0, 2 * t), max(0.0, 1.0 - t), 0.0] + geometries.extend(create_gripper_geometry(grasps[i], color)) + + o3d.visualization.draw_geometries(geometries, window_name="GraspGen", width=1280, height=720) + + +def main() -> int: + filepath = Path(VISUALIZATION_FILE) + if not filepath.exists(): + print(f"File not found: {filepath}") + return 1 + + with open(filepath) as f: + data = json.load(f) + + point_cloud = np.array(data["point_cloud"]) + grasps = [np.array(g).reshape(4, 4) for g in data["grasps"]] + + visualize_grasps(point_cloud, grasps) + return 0 + + +if __name__ == "__main__": + exit(main()) diff --git a/dimos/manipulation/manipulation_blueprints.py b/dimos/manipulation/manipulation_blueprints.py new file mode 100644 index 0000000000..e95e415373 --- /dev/null +++ b/dimos/manipulation/manipulation_blueprints.py @@ -0,0 +1,410 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Blueprints for manipulation module integration with ControlCoordinator. + +Usage: + # Non-agentic (manual RPC): + dimos run coordinator-mock + dimos run xarm-perception + + # Agentic (LLM agent with skills): + dimos run coordinator-mock + dimos run xarm-perception-agent +""" + +import math +from pathlib import Path + +from dimos.agents.agent import Agent +from dimos.core.blueprints import autoconnect +from dimos.core.transport import LCMTransport +from dimos.hardware.sensors.camera.realsense import realsense_camera +from dimos.manipulation.manipulation_module import manipulation_module +from dimos.manipulation.planning.spec import RobotModelConfig +from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Transform, Vector3 +from dimos.msgs.sensor_msgs import JointState +from dimos.perception.object_scene_registration import object_scene_registration_module +from dimos.robot.foxglove_bridge import foxglove_bridge # TODO: migrate to rerun +from dimos.utils.data import get_data + +# ============================================================================= +# Pose Helpers +# ============================================================================= + + +def _make_base_pose( + x: float = 0.0, + y: float = 0.0, + z: float = 0.0, + roll: float = 0.0, + pitch: float = 0.0, + yaw: float = 0.0, +) -> PoseStamped: + """Create a base pose with optional xyz offset and rpy orientation. + + Args: + x, y, z: Position offset in meters + roll, pitch, yaw: Orientation in radians (Euler angles) + """ + return PoseStamped( + position=Vector3(x=x, y=y, z=z), + orientation=Quaternion.from_euler(Vector3(x=roll, y=pitch, z=yaw)), + ) + + +# ============================================================================= +# URDF Helpers +# ============================================================================= + + +def _get_xarm_urdf_path() -> Path: + """Get path to xarm URDF.""" + return get_data("xarm_description") / "urdf/xarm_device.urdf.xacro" + + +def _get_xarm_package_paths() -> dict[str, Path]: + """Get package paths for xarm xacro resolution.""" + return {"xarm_description": get_data("xarm_description")} + + +def _get_piper_urdf_path() -> Path: + """Get path to piper URDF.""" + return get_data("piper_description") / "urdf/piper_description.xacro" + + +def _get_piper_package_paths() -> dict[str, Path]: + """Get package paths for piper xacro resolution.""" + return {"piper_description": get_data("piper_description")} + + +# Piper gripper collision exclusions (parallel jaw gripper) +# The gripper fingers (link7, link8) can touch each other and gripper_base +PIPER_GRIPPER_COLLISION_EXCLUSIONS: list[tuple[str, str]] = [ + ("gripper_base", "link7"), + ("gripper_base", "link8"), + ("link7", "link8"), + ("link6", "gripper_base"), +] + + +# XArm gripper collision exclusions (parallel linkage mechanism) +# The gripper uses mimic joints where non-adjacent links can overlap legitimately +XARM_GRIPPER_COLLISION_EXCLUSIONS: list[tuple[str, str]] = [ + # Inner knuckle <-> outer knuckle (parallel linkage) + ("right_inner_knuckle", "right_outer_knuckle"), + ("left_inner_knuckle", "left_outer_knuckle"), + # Inner knuckle <-> finger (parallel linkage) + ("right_inner_knuckle", "right_finger"), + ("left_inner_knuckle", "left_finger"), + # Cross-finger pairs (mimic joint symmetry) + ("left_finger", "right_finger"), + ("left_outer_knuckle", "right_outer_knuckle"), + ("left_inner_knuckle", "right_inner_knuckle"), + # Outer knuckle <-> opposite finger + ("left_outer_knuckle", "right_finger"), + ("right_outer_knuckle", "left_finger"), + # Gripper base <-> all moving parts (can touch at limits) + ("xarm_gripper_base_link", "left_inner_knuckle"), + ("xarm_gripper_base_link", "right_inner_knuckle"), + ("xarm_gripper_base_link", "left_finger"), + ("xarm_gripper_base_link", "right_finger"), + # Arm link6 <-> gripper (attached via fixed joint, can touch) + ("link6", "xarm_gripper_base_link"), + ("link6", "left_outer_knuckle"), + ("link6", "right_outer_knuckle"), +] + + +# ============================================================================= +# Robot Configs +# ============================================================================= + + +def _make_xarm6_config( + name: str = "arm", + y_offset: float = 0.0, + joint_prefix: str = "", + coordinator_task: str | None = None, + add_gripper: bool = True, +) -> RobotModelConfig: + """Create XArm6 robot config. + + Args: + name: Robot name in Drake world + y_offset: Y-axis offset for base pose (for multi-arm setups) + joint_prefix: Prefix for joint name mapping (e.g., "left_" or "right_") + coordinator_task: Task name for coordinator RPC execution + add_gripper: Whether to add the xarm gripper + """ + joint_names = ["joint1", "joint2", "joint3", "joint4", "joint5", "joint6"] + joint_mapping = {f"{joint_prefix}{j}": j for j in joint_names} if joint_prefix else {} + + xacro_args: dict[str, str] = { + "dof": "6", + "limited": "true", + "attach_xyz": f"0 {y_offset} 0", + } + if add_gripper: + xacro_args["add_gripper"] = "true" + + return RobotModelConfig( + name=name, + urdf_path=_get_xarm_urdf_path(), + base_pose=_make_base_pose(y=y_offset), + joint_names=joint_names, + end_effector_link="link_tcp" if add_gripper else "link6", + base_link="link_base", + package_paths=_get_xarm_package_paths(), + xacro_args=xacro_args, + collision_exclusion_pairs=XARM_GRIPPER_COLLISION_EXCLUSIONS if add_gripper else [], + auto_convert_meshes=True, + max_velocity=1.0, + max_acceleration=2.0, + joint_name_mapping=joint_mapping, + coordinator_task_name=coordinator_task, + home_joints=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) + + +def _make_xarm7_config( + name: str = "arm", + y_offset: float = 0.0, + z_offset: float = 0.0, + pitch: float = 0.0, + joint_prefix: str = "", + coordinator_task: str | None = None, + add_gripper: bool = False, + gripper_hardware_id: str | None = None, + tf_extra_links: list[str] | None = None, +) -> RobotModelConfig: + """Create XArm7 robot config. + + Args: + name: Robot name in Drake world + y_offset: Y-axis offset for base pose (for multi-arm setups) + z_offset: Z-axis offset for base pose (e.g., table height) + pitch: Base pitch angle in radians (e.g., tilted mount) + joint_prefix: Prefix for joint name mapping (e.g., "left_" or "right_") + coordinator_task: Task name for coordinator RPC execution + add_gripper: Whether to add the xarm gripper + gripper_hardware_id: Coordinator hardware ID for gripper control + tf_extra_links: Additional links to publish TF for (e.g., ["link7"] for camera mount) + """ + joint_names = ["joint1", "joint2", "joint3", "joint4", "joint5", "joint6", "joint7"] + joint_mapping = {f"{joint_prefix}{j}": j for j in joint_names} if joint_prefix else {} + + xacro_args: dict[str, str] = { + "dof": "7", + "limited": "true", + "attach_xyz": f"0 {y_offset} {z_offset}", + "attach_rpy": f"0 {pitch} 0", + } + if add_gripper: + xacro_args["add_gripper"] = "true" + + return RobotModelConfig( + name=name, + urdf_path=_get_xarm_urdf_path(), + base_pose=_make_base_pose(y=y_offset, z=z_offset, pitch=pitch), + joint_names=joint_names, + end_effector_link="link_tcp" if add_gripper else "link7", + base_link="link_base", + package_paths=_get_xarm_package_paths(), + xacro_args=xacro_args, + collision_exclusion_pairs=XARM_GRIPPER_COLLISION_EXCLUSIONS if add_gripper else [], + auto_convert_meshes=True, + max_velocity=1.0, + max_acceleration=2.0, + joint_name_mapping=joint_mapping, + coordinator_task_name=coordinator_task, + gripper_hardware_id=gripper_hardware_id, + tf_extra_links=tf_extra_links or [], + # Home configuration: arm extended forward, elbow up (safe observe pose) + home_joints=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) + + +def _make_piper_config( + name: str = "piper", + y_offset: float = 0.0, + joint_prefix: str = "", + coordinator_task: str | None = None, +) -> RobotModelConfig: + """Create Piper robot config. + + Args: + name: Robot name in Drake world + y_offset: Y-axis offset for base pose (for multi-arm setups) + joint_prefix: Prefix for joint name mapping (e.g., "piper_") + coordinator_task: Task name for coordinator RPC execution + + Note: + Piper has 6 revolute joints (joint1-joint6) for the arm and 2 prismatic + joints (joint7, joint8) for the parallel jaw gripper. + """ + # Piper arm joints (6-DOF) + joint_names = ["joint1", "joint2", "joint3", "joint4", "joint5", "joint6"] + joint_mapping = {f"{joint_prefix}{j}": j for j in joint_names} if joint_prefix else {} + + return RobotModelConfig( + name=name, + urdf_path=_get_piper_urdf_path(), + base_pose=_make_base_pose(y=y_offset), + joint_names=joint_names, + end_effector_link="gripper_base", # End of arm, before gripper fingers + base_link="arm_base", + package_paths=_get_piper_package_paths(), + xacro_args={}, # Piper xacro doesn't need special args + collision_exclusion_pairs=PIPER_GRIPPER_COLLISION_EXCLUSIONS, + auto_convert_meshes=True, + max_velocity=1.0, + max_acceleration=2.0, + joint_name_mapping=joint_mapping, + coordinator_task_name=coordinator_task, + home_joints=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) + + +# ============================================================================= +# Blueprints +# ============================================================================= + + +# Single XArm6 planner (standalone, no coordinator) +xarm6_planner_only = manipulation_module( + robots=[_make_xarm6_config()], + planning_timeout=10.0, + enable_viz=True, +).transports( + { + ("joint_state", JointState): LCMTransport("/xarm/joint_states", JointState), + } +) + + +# Dual XArm6 planner with coordinator integration +# Usage: Start with coordinator_dual_mock, then plan/execute via RPC +dual_xarm6_planner = manipulation_module( + robots=[ + _make_xarm6_config( + "left_arm", y_offset=0.5, joint_prefix="left_", coordinator_task="traj_left" + ), + _make_xarm6_config( + "right_arm", y_offset=-0.5, joint_prefix="right_", coordinator_task="traj_right" + ), + ], + planning_timeout=10.0, + enable_viz=True, +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } +) + + +# Single XArm7 planner for coordinator-mock +# Usage: dimos run coordinator-mock, then dimos run xarm7-planner-coordinator +xarm7_planner_coordinator = manipulation_module( + robots=[_make_xarm7_config("arm", joint_prefix="arm_", coordinator_task="traj_arm")], + planning_timeout=10.0, + enable_viz=True, +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } +) + + +# XArm7 with eye-in-hand RealSense camera for perception-based manipulation +# TF chain: world → link7 (ManipulationModule) → camera_link (RealSense) +# Usage: dimos run coordinator-mock, then dimos run xarm-perception +_XARM_PERCEPTION_CAMERA_TRANSFORM = Transform( + translation=Vector3(x=0.06693724, y=-0.0309563, z=0.00691482), + rotation=Quaternion(0.70513398, 0.00535696, 0.70897578, -0.01052180), # xyzw +) + +xarm_perception = ( + autoconnect( + manipulation_module( + robots=[ + _make_xarm7_config( + "arm", + pitch=math.radians(45), + joint_prefix="arm_", + coordinator_task="traj_arm", + add_gripper=True, + gripper_hardware_id="arm", + tf_extra_links=["link7"], + ), + ], + planning_timeout=10.0, + enable_viz=True, + ), + realsense_camera( + base_frame_id="link7", + base_transform=_XARM_PERCEPTION_CAMERA_TRANSFORM, + ), + object_scene_registration_module(target_frame="world"), + foxglove_bridge(), # TODO: migrate to rerun + ) + .transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } + ) + .global_config(viewer_backend="foxglove") +) + + +# XArm7 perception + LLM agent for agentic manipulation +# Skills (pick, place, move_to_pose, etc.) auto-register with the agent's SkillCoordinator. +# Usage: dimos run coordinator-mock, then dimos run xarm-perception-agent +_MANIPULATION_AGENT_SYSTEM_PROMPT = """\ +You are a robotic manipulation assistant controlling an xArm7 robot arm. + +Use ONLY these ManipulationModule skills for manipulation tasks: +- scan_objects: Scan scene and list detected objects with 3D positions. Always call this first. +- pick: Pick up an object by name. Requires scan_objects first. +- place: Place a held object at x, y, z position. +- place_back: Place a held object back at its original pick position. +- pick_and_place: Pick an object and place it at a target location. +- move_to_pose: Move end-effector to x, y, z with optional roll, pitch, yaw. +- move_to_joints: Move to a joint configuration (comma-separated radians). +- open_gripper / close_gripper / set_gripper: Control the gripper. +- go_home: Move to the home/observe position. +- go_init: Return to the startup position. +- get_scene_info: Get full robot state, detected objects, and scene info. + +Do NOT use the 'detect' or 'select' skills — use scan_objects instead. +For robot_name parameters, always omit or pass None (single-arm setup). +After pick or place, return to init with go_init unless another action follows immediately. +""" + +xarm_perception_agent = autoconnect( + xarm_perception, + Agent.blueprint(system_prompt=_MANIPULATION_AGENT_SYSTEM_PROMPT), +) + + +__all__ = [ + "PIPER_GRIPPER_COLLISION_EXCLUSIONS", + "XARM_GRIPPER_COLLISION_EXCLUSIONS", + "dual_xarm6_planner", + "xarm6_planner_only", + "xarm7_planner_coordinator", + "xarm_perception", + "xarm_perception_agent", +] diff --git a/dimos/manipulation/manipulation_history.py b/dimos/manipulation/manipulation_history.py deleted file mode 100644 index 8d9b281d76..0000000000 --- a/dimos/manipulation/manipulation_history.py +++ /dev/null @@ -1,417 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0) -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Module for manipulation history tracking and search.""" - -from dataclasses import dataclass, field -from datetime import datetime -import json -import os -import pickle -import time -from typing import Any - -from dimos.types.manipulation import ( - ManipulationTask, -) -from dimos.utils.logging_config import setup_logger - -logger = setup_logger() - - -@dataclass -class ManipulationHistoryEntry: - """An entry in the manipulation history. - - Attributes: - task: The manipulation task executed - timestamp: When the manipulation was performed - result: Result of the manipulation (success/failure) - manipulation_response: Response from the motion planner/manipulation executor - """ - - task: ManipulationTask - timestamp: float = field(default_factory=time.time) - result: dict[str, Any] = field(default_factory=dict) - manipulation_response: str | None = ( - None # Any elaborative response from the motion planner / manipulation executor - ) - - def __str__(self) -> str: - status = self.result.get("status", "unknown") - return f"ManipulationHistoryEntry(task='{self.task.description}', status={status}, time={datetime.fromtimestamp(self.timestamp).strftime('%H:%M:%S')})" - - -class ManipulationHistory: - """A simplified, dictionary-based storage for manipulation history. - - This class provides an efficient way to store and query manipulation tasks, - focusing on quick lookups and flexible search capabilities. - """ - - def __init__(self, output_dir: str | None = None, new_memory: bool = False) -> None: - """Initialize a new manipulation history. - - Args: - output_dir: Directory to save history to - new_memory: If True, creates a new memory instead of loading existing one - """ - self._history: list[ManipulationHistoryEntry] = [] - self._output_dir = output_dir - - if output_dir and not new_memory: - self.load_from_dir(output_dir) - elif output_dir: - os.makedirs(output_dir, exist_ok=True) - logger.info(f"Created new manipulation history at {output_dir}") - - def __len__(self) -> int: - """Return the number of entries in the history.""" - return len(self._history) - - def __str__(self) -> str: - """Return a string representation of the history.""" - if not self._history: - return "ManipulationHistory(empty)" - - return ( - f"ManipulationHistory(entries={len(self._history)}, " - f"time_range={datetime.fromtimestamp(self._history[0].timestamp).strftime('%Y-%m-%d %H:%M:%S')} to " - f"{datetime.fromtimestamp(self._history[-1].timestamp).strftime('%Y-%m-%d %H:%M:%S')})" - ) - - def clear(self) -> None: - """Clear all entries from the history.""" - self._history.clear() - logger.info("Cleared manipulation history") - - if self._output_dir: - self.save_history() - - def add_entry(self, entry: ManipulationHistoryEntry) -> None: - """Add an entry to the history. - - Args: - entry: The entry to add - """ - self._history.append(entry) - self._history.sort(key=lambda e: e.timestamp) - - if self._output_dir: - self.save_history() - - def save_history(self) -> None: - """Save the history to the output directory.""" - if not self._output_dir: - logger.warning("Cannot save history: no output directory specified") - return - - os.makedirs(self._output_dir, exist_ok=True) - history_path = os.path.join(self._output_dir, "manipulation_history.pickle") - - with open(history_path, "wb") as f: - pickle.dump(self._history, f) - - logger.info(f"Saved manipulation history to {history_path}") - - # Also save a JSON representation for easier inspection - json_path = os.path.join(self._output_dir, "manipulation_history.json") - try: - history_data = [ - { - "task": { - "description": entry.task.description, - "target_object": entry.task.target_object, - "target_point": entry.task.target_point, - "timestamp": entry.task.timestamp, - "task_id": entry.task.task_id, - "metadata": entry.task.metadata, - }, - "result": entry.result, - "timestamp": entry.timestamp, - "manipulation_response": entry.manipulation_response, - } - for entry in self._history - ] - - with open(json_path, "w") as f: - json.dump(history_data, f, indent=2) - - logger.info(f"Saved JSON representation to {json_path}") - except Exception as e: - logger.error(f"Failed to save JSON representation: {e}") - - def load_from_dir(self, directory: str) -> None: - """Load history from the specified directory. - - Args: - directory: Directory to load history from - """ - history_path = os.path.join(directory, "manipulation_history.pickle") - - if not os.path.exists(history_path): - logger.warning(f"No history found at {history_path}") - return - - try: - with open(history_path, "rb") as f: - self._history = pickle.load(f) - - logger.info( - f"Loaded manipulation history from {history_path} with {len(self._history)} entries" - ) - except Exception as e: - logger.error(f"Failed to load history: {e}") - - def get_all_entries(self) -> list[ManipulationHistoryEntry]: - """Get all entries in chronological order. - - Returns: - List of all manipulation history entries - """ - return self._history.copy() - - def get_entry_by_index(self, index: int) -> ManipulationHistoryEntry | None: - """Get an entry by its index. - - Args: - index: Index of the entry to retrieve - - Returns: - The entry at the specified index or None if index is out of bounds - """ - if 0 <= index < len(self._history): - return self._history[index] - return None - - def get_entries_by_timerange( - self, start_time: float, end_time: float - ) -> list[ManipulationHistoryEntry]: - """Get entries within a specific time range. - - Args: - start_time: Start time (UNIX timestamp) - end_time: End time (UNIX timestamp) - - Returns: - List of entries within the specified time range - """ - return [entry for entry in self._history if start_time <= entry.timestamp <= end_time] - - def get_entries_by_object(self, object_name: str) -> list[ManipulationHistoryEntry]: - """Get entries related to a specific object. - - Args: - object_name: Name of the object to search for - - Returns: - List of entries related to the specified object - """ - return [entry for entry in self._history if entry.task.target_object == object_name] - - def create_task_entry( - self, - task: ManipulationTask, - result: dict[str, Any] | None = None, - agent_response: str | None = None, - ) -> ManipulationHistoryEntry: - """Create a new manipulation history entry. - - Args: - task: The manipulation task - result: Result of the manipulation - agent_response: Response from the agent about this manipulation - - Returns: - The created history entry - """ - entry = ManipulationHistoryEntry( - task=task, result=result or {}, manipulation_response=agent_response - ) - self.add_entry(entry) - return entry - - def search(self, **kwargs) -> list[ManipulationHistoryEntry]: # type: ignore[no-untyped-def] - """Flexible search method that can search by any field in ManipulationHistoryEntry using dot notation. - - This method supports dot notation to access nested fields. String values automatically use - substring matching (contains), while all other types use exact matching. - - Examples: - # Time-based searches: - - search(**{"task.metadata.timestamp": ('>', start_time)}) - entries after start_time - - search(**{"task.metadata.timestamp": ('>=', time - 1800)}) - entries in last 30 mins - - # Constraint searches: - - search(**{"task.constraints.*.reference_point.x": 2.5}) - tasks with x=2.5 reference point - - search(**{"task.constraints.*.end_angle.x": 90}) - tasks with 90-degree x rotation - - search(**{"task.constraints.*.lock_x": True}) - tasks with x-axis translation locked - - # Object and result searches: - - search(**{"task.metadata.objects.*.label": "cup"}) - tasks involving cups - - search(**{"result.status": "success"}) - successful tasks - - search(**{"result.error": "Collision"}) - tasks that had collisions - - Args: - **kwargs: Key-value pairs for searching using dot notation for field paths. - - Returns: - List of matching entries - """ - if not kwargs: - return self._history.copy() - - results = self._history.copy() - - for key, value in kwargs.items(): - # For all searches, automatically determine if we should use contains for strings - results = [e for e in results if self._check_field_match(e, key, value)] - - return results - - def _check_field_match(self, entry, field_path, value) -> bool: # type: ignore[no-untyped-def] - """Check if a field matches the value, with special handling for strings, collections and comparisons. - - For string values, we automatically use substring matching (contains). - For collections (returned by * path), we check if any element matches. - For numeric values (like timestamps), supports >, <, >= and <= comparisons. - For all other types, we use exact matching. - - Args: - entry: The entry to check - field_path: Dot-separated path to the field - value: Value to match against. For comparisons, use tuples like: - ('>', timestamp) - greater than - ('<', timestamp) - less than - ('>=', timestamp) - greater or equal - ('<=', timestamp) - less or equal - - Returns: - True if the field matches the value, False otherwise - """ - try: - field_value = self._get_value_by_path(entry, field_path) # type: ignore[no-untyped-call] - - # Handle comparison operators for timestamps and numbers - if isinstance(value, tuple) and len(value) == 2: - op, compare_value = value - if op == ">": - return field_value > compare_value # type: ignore[no-any-return] - elif op == "<": - return field_value < compare_value # type: ignore[no-any-return] - elif op == ">=": - return field_value >= compare_value # type: ignore[no-any-return] - elif op == "<=": - return field_value <= compare_value # type: ignore[no-any-return] - - # Handle lists (from collection searches) - if isinstance(field_value, list): - for item in field_value: - # String values use contains matching - if isinstance(item, str) and isinstance(value, str): - if value in item: - return True - # All other types use exact matching - elif item == value: - return True - return False - - # String values use contains matching - elif isinstance(field_value, str) and isinstance(value, str): - return value in field_value - # All other types use exact matching - else: - return field_value == value # type: ignore[no-any-return] - - except (AttributeError, KeyError): - return False - - def _get_value_by_path(self, obj, path): # type: ignore[no-untyped-def] - """Get a value from an object using a dot-separated path. - - This method handles three special cases: - 1. Regular attribute access (obj.attr) - 2. Dictionary key access (dict[key]) - 3. Collection search (dict.*.attr) - when * is used, it searches all values in the collection - - Args: - obj: Object to get value from - path: Dot-separated path to the field (e.g., "task.metadata.robot") - - Returns: - Value at the specified path or list of values for collection searches - - Raises: - AttributeError: If an attribute in the path doesn't exist - KeyError: If a dictionary key in the path doesn't exist - """ - current = obj - parts = path.split(".") - - for i, part in enumerate(parts): - # Collection search (*.attr) - search across all items in a collection - if part == "*": - # Get remaining path parts - remaining_path = ".".join(parts[i + 1 :]) - - # Handle different collection types - if isinstance(current, dict): - items = current.values() - if not remaining_path: # If * is the last part, return all values - return list(items) - elif isinstance(current, list): - items = current # type: ignore[assignment] - if not remaining_path: # If * is the last part, return all items - return items - else: # Not a collection - raise AttributeError( - f"Cannot use wildcard on non-collection type: {type(current)}" - ) - - # Apply remaining path to each item in the collection - results = [] - for item in items: - try: - # Recursively get values from each item - value = self._get_value_by_path(item, remaining_path) # type: ignore[no-untyped-call] - if isinstance(value, list): # Flatten nested lists - results.extend(value) - else: - results.append(value) - except (AttributeError, KeyError): - # Skip items that don't have the attribute - pass - return results - - # Regular attribute/key access - elif isinstance(current, dict): - current = current[part] - else: - current = getattr(current, part) - - return current diff --git a/dimos/manipulation/manipulation_interface.py b/dimos/manipulation/manipulation_interface.py index 10e71fbc66..524562520d 100644 --- a/dimos/manipulation/manipulation_interface.py +++ b/dimos/manipulation/manipulation_interface.py @@ -13,19 +13,14 @@ # limitations under the License. """ -ManipulationInterface provides a unified interface for accessing manipulation history. +ManipulationInterface provides a unified interface for accessing manipulation data. This module defines the ManipulationInterface class, which serves as an access point -for the robot's manipulation history, agent-generated constraints, and manipulation -metadata streams. +for agent-generated constraints, manipulation tasks, and perception streams. """ -import os from typing import TYPE_CHECKING, Any -from dimos.manipulation.manipulation_history import ( - ManipulationHistory, -) from dimos.types.manipulation import ( AbstractConstraint, ManipulationTask, @@ -50,28 +45,16 @@ class ManipulationInterface: def __init__( self, - output_dir: str, - new_memory: bool = False, perception_stream: Any = None, ) -> None: """ Initialize a new ManipulationInterface instance. Args: - output_dir: Directory for storing manipulation data - new_memory: If True, creates a new manipulation history from scratch perception_stream: ObjectDetectionStream instance for real-time object data """ - self.output_dir = output_dir - - # Create manipulation history directory - manipulation_dir = os.path.join(output_dir, "manipulation_history") - os.makedirs(manipulation_dir, exist_ok=True) - - # Initialize manipulation history - self.manipulation_history: ManipulationHistory = ManipulationHistory( - output_dir=manipulation_dir, new_memory=new_memory - ) + # List of manipulation tasks + self._tasks: list[ManipulationTask] = [] # List of constraints generated by the Agent via constraint generation skills self.agent_constraints: list[AbstractConstraint] = [] @@ -123,21 +106,15 @@ def get_constraint(self, constraint_id: str) -> AbstractConstraint | None: logger.warning(f"Constraint with ID {constraint_id} not found") return None - def add_manipulation_task( - self, task: ManipulationTask, manipulation_response: str | None = None - ) -> None: + def add_manipulation_task(self, task: ManipulationTask) -> None: """ - Add a manipulation task to ManipulationHistory. + Add a manipulation task. Args: task: The ManipulationTask to add - manipulation_response: Optional response from the motion planner/executor - """ - # Add task to history - self.manipulation_history.add_entry( # type: ignore[call-arg] - task=task, result=None, notes=None, manipulation_response=manipulation_response - ) + self._tasks.append(task) + logger.info(f"Added manipulation task: {task.task_id or 'unknown'}") def get_manipulation_task(self, task_id: str) -> ManipulationTask | None: """ @@ -149,7 +126,10 @@ def get_manipulation_task(self, task_id: str) -> ManipulationTask | None: Returns: The task object or None if not found """ - return self.history.get_manipulation_task(task_id) # type: ignore[attr-defined, no-any-return] + for task in self._tasks: + if task.task_id == task_id: + return task + return None def get_all_manipulation_tasks(self) -> list[ManipulationTask]: """ @@ -158,23 +138,24 @@ def get_all_manipulation_tasks(self) -> list[ManipulationTask]: Returns: List of all manipulation tasks """ - return self.history.get_all_manipulation_tasks() # type: ignore[attr-defined, no-any-return] + return list(self._tasks) - def update_task_status( - self, task_id: str, status: str, result: dict[str, Any] | None = None - ) -> ManipulationTask | None: + def update_task_result(self, task_id: str, result: dict[str, Any]) -> ManipulationTask | None: """ - Update the status and result of a manipulation task. + Update the result of a manipulation task. Args: task_id: ID of the task to update - status: New status for the task (e.g., 'completed', 'failed') - result: Optional dictionary with result data + result: Result data from task execution Returns: The updated task or None if task not found """ - return self.history.update_task_status(task_id, status, result) # type: ignore[attr-defined, no-any-return] + task = self.get_manipulation_task(task_id) + if task is not None: + task.result = result + return task + return None # === Perception stream methods === @@ -260,13 +241,13 @@ def cleanup_perception_subscription(self) -> None: # === Utility methods === - def clear_history(self) -> None: + def clear(self) -> None: """ - Clear all manipulation history data and agent constraints. + Clear all manipulation tasks and agent constraints. """ - self.manipulation_history.clear() + self._tasks.clear() self.agent_constraints.clear() - logger.info("Cleared manipulation history and agent constraints") + logger.info("Cleared manipulation tasks and agent constraints") def __str__(self) -> str: """ @@ -276,7 +257,7 @@ def __str__(self) -> str: String representation with key stats """ has_stream = self.perception_stream is not None - return f"ManipulationInterface(history={self.manipulation_history}, agent_constraints={len(self.agent_constraints)}, perception_stream={has_stream}, detected_objects={len(self.latest_objects)})" + return f"ManipulationInterface(tasks={len(self._tasks)}, constraints={len(self.agent_constraints)}, perception_stream={has_stream}, detected_objects={len(self.latest_objects)})" def __del__(self) -> None: """ diff --git a/dimos/manipulation/manipulation_module.py b/dimos/manipulation/manipulation_module.py new file mode 100644 index 0000000000..310b77d766 --- /dev/null +++ b/dimos/manipulation/manipulation_module.py @@ -0,0 +1,1615 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Manipulation Module - Motion planning with ControlCoordinator execution. + +Interface layers: +- @rpc: Low-level building blocks (plan_to_pose, plan_to_joints, preview_path, execute) +- @skill (short-horizon): Single-step actions (move_to_pose, open_gripper, scan_objects, go_init) +- @skill (long-horizon): Multi-step composed behaviors (pick, place, place_back, pick_and_place) +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from enum import Enum +import math +from pathlib import Path +import threading +import time +from typing import TYPE_CHECKING, Any, TypeAlias + +from dimos.agents.annotation import skill +from dimos.constants import DIMOS_PROJECT_ROOT +from dimos.core import In, Module, rpc +from dimos.core.docker_runner import DockerModule as DockerRunner +from dimos.core.module import ModuleConfig +from dimos.manipulation.grasping.graspgen_module import GraspGenModule +from dimos.manipulation.planning import ( + JointPath, + JointTrajectoryGenerator, + KinematicsSpec, + Obstacle, + ObstacleType, + PlannerSpec, + RobotModelConfig, + RobotName, + WorldRobotID, + create_kinematics, + create_planner, +) +from dimos.manipulation.planning.monitor import WorldMonitor +from dimos.msgs.geometry_msgs import Pose, Quaternion, Vector3 + +# These must be imported at runtime (not TYPE_CHECKING) for In/Out port creation +from dimos.msgs.sensor_msgs import JointState +from dimos.msgs.trajectory_msgs import JointTrajectory +from dimos.perception.detection.type.detection3d.object import Object as DetObject +from dimos.utils.data import get_data +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from dimos.core.rpc_client import RPCClient + from dimos.msgs.geometry_msgs import PoseArray + from dimos.msgs.sensor_msgs import PointCloud2 + from dimos.perception.detection.type.detection3d.object import Object as DetObject + +logger = setup_logger() + +# Composite type aliases for readability (using semantic IDs from planning.spec) +RobotEntry: TypeAlias = tuple[WorldRobotID, RobotModelConfig, JointTrajectoryGenerator] +"""(world_robot_id, config, trajectory_generator)""" + +RobotRegistry: TypeAlias = dict[RobotName, RobotEntry] +"""Maps robot_name -> RobotEntry""" + +PlannedPaths: TypeAlias = dict[RobotName, JointPath] +"""Maps robot_name -> planned joint path""" + +PlannedTrajectories: TypeAlias = dict[RobotName, JointTrajectory] +"""Maps robot_name -> planned trajectory""" + +# The host-side path (graspgen_visualization_output_path) is volume-mounted here. +_GRASPGEN_VIZ_CONTAINER_DIR = "/output/graspgen" +_GRASPGEN_VIZ_CONTAINER_PATH = f"{_GRASPGEN_VIZ_CONTAINER_DIR}/visualization.json" + + +class ManipulationState(Enum): + """State machine for manipulation module.""" + + IDLE = 0 + PLANNING = 1 + EXECUTING = 2 + COMPLETED = 3 + FAULT = 4 + + +@dataclass +class ManipulationModuleConfig(ModuleConfig): + """Configuration for ManipulationModule.""" + + robots: list[RobotModelConfig] = field(default_factory=list) + planning_timeout: float = 10.0 + enable_viz: bool = False + planner_name: str = "rrt_connect" # "rrt_connect" + kinematics_name: str = "jacobian" # "jacobian" or "drake_optimization" + + # GraspGen Docker settings (optional) + graspgen_docker_image: str = "dimos-graspgen:latest" + graspgen_gripper_type: str = "robotiq_2f_140" + graspgen_num_grasps: int = 400 + graspgen_topk_num_grasps: int = 100 + graspgen_grasp_threshold: float = -1.0 + graspgen_filter_collisions: bool = False + graspgen_save_visualization_data: bool = False + graspgen_visualization_output_path: Path = field( + default_factory=lambda: Path.home() / ".dimos" / "graspgen" / "visualization.json" + ) + + +class ManipulationModule(Module): + """Motion planning module with ControlCoordinator execution. + + - @rpc: Low-level building blocks (plan, execute, obstacles) + - @skill (short-horizon): Single-step actions (move_to_pose, open_gripper, scan_objects) + - @skill (long-horizon): Multi-step behaviors (pick, place, pick_and_place) + """ + + default_config = ManipulationModuleConfig + + # Type annotation for the config attribute (mypy uses this) + config: ManipulationModuleConfig + + # Input: Joint state from coordinator (for world sync) + joint_state: In[JointState] + + # Input: Objects from perception (for obstacle integration) + objects: In[list[DetObject]] + + def __init__(self, *args: object, **kwargs: object) -> None: + super().__init__(*args, **kwargs) + + # State machine + self._state = ManipulationState.IDLE + self._lock = threading.Lock() + self._error_message = "" + + # Planning components (initialized in start()) + self._world_monitor: WorldMonitor | None = None + self._planner: PlannerSpec | None = None + self._kinematics: KinematicsSpec | None = None + + # Robot registry: maps robot_name -> (world_robot_id, config, trajectory_gen) + self._robots: RobotRegistry = {} + + # Stored path for plan/preview/execute workflow (per robot) + self._planned_paths: PlannedPaths = {} + self._planned_trajectories: PlannedTrajectories = {} + + # Coordinator integration (lazy initialized) + self._coordinator_client: RPCClient | None = None + + # GraspGen Docker runner (lazy initialized on first generate_grasps call) + self._graspgen: DockerRunner | None = None + # Init joints: captured from first joint state received, used by go_init + self._init_joints: JointState | None = None + + # Last pick position: stored during pick so place_back() can return the object + self._last_pick_position: Vector3 | None = None + + # Snapshotted detections from the last scan_objects/refresh call. + # The live detection cache is volatile (labels change every frame), + # so pick/place use this stable snapshot instead. + self._detection_snapshot: list[DetObject] = [] + + # TF publishing thread + self._tf_stop_event = threading.Event() + self._tf_thread: threading.Thread | None = None + + logger.info("ManipulationModule initialized") + + @rpc + def start(self) -> None: + """Start the manipulation module.""" + super().start() + + # Initialize planning stack + self._initialize_planning() + + # Subscribe to joint state via port + if self.joint_state is not None: + self.joint_state.subscribe(self._on_joint_state) + logger.info("Subscribed to joint_state port") + + # Subscribe to objects port for perception obstacle integration + if self.objects is not None: + self.objects.observable().subscribe(self._on_objects) # type: ignore[no-untyped-call] + logger.info("Subscribed to objects port (async)") + + logger.info("ManipulationModule started") + + def _initialize_planning(self) -> None: + """Initialize world, planner, and trajectory generator.""" + if not self.config.robots: + logger.warning("No robots configured, planning disabled") + return + + self._world_monitor = WorldMonitor(enable_viz=self.config.enable_viz) + + for robot_config in self.config.robots: + robot_id = self._world_monitor.add_robot(robot_config) + traj_gen = JointTrajectoryGenerator( + num_joints=len(robot_config.joint_names), + max_velocity=robot_config.max_velocity, + max_acceleration=robot_config.max_acceleration, + ) + self._robots[robot_config.name] = (robot_id, robot_config, traj_gen) + + self._world_monitor.finalize() + + for _, (robot_id, _, _) in self._robots.items(): + self._world_monitor.start_state_monitor(robot_id) + + # Start obstacle monitor for perception integration + self._world_monitor.start_obstacle_monitor() + + if self.config.enable_viz: + self._world_monitor.start_visualization_thread(rate_hz=10.0) + if url := self._world_monitor.get_visualization_url(): + logger.info(f"Visualization: {url}") + + self._planner = create_planner(name=self.config.planner_name) + self._kinematics = create_kinematics(name=self.config.kinematics_name) + + # Start TF publishing thread if any robot has tf_extra_links + if any(c.tf_extra_links for _, c, _ in self._robots.values()): + _ = self.tf # Eager init — lazy init blocks in Dask workers + self._tf_stop_event.clear() + self._tf_thread = threading.Thread( + target=self._tf_publish_loop, name="ManipTFThread", daemon=True + ) + self._tf_thread.start() + logger.info("TF publishing thread started") + + def _get_default_robot_name(self) -> RobotName | None: + """Get default robot name (first robot if only one, else None).""" + if len(self._robots) == 1: + return next(iter(self._robots.keys())) + return None + + def _get_robot( + self, robot_name: RobotName | None = None + ) -> tuple[RobotName, WorldRobotID, RobotModelConfig, JointTrajectoryGenerator] | None: + """Get robot by name or default. + + Args: + robot_name: Robot name or None for default (if single robot) + + Returns: + (robot_name, robot_id, config, traj_gen) or None if not found + """ + if not robot_name: # None or empty string (LLMs often pass "") + robot_name = self._get_default_robot_name() + if robot_name is None: + logger.error("Multiple robots configured, must specify robot_name") + return None + + if robot_name not in self._robots: + logger.error(f"Unknown robot: {robot_name}") + return None + + robot_id, config, traj_gen = self._robots[robot_name] + return (robot_name, robot_id, config, traj_gen) + + def _on_joint_state(self, msg: JointState) -> None: + """Callback when joint state received from driver.""" + try: + # Forward to world monitor for state synchronization. + # Pass robot_id=None to broadcast to all monitors - each monitor + # extracts only its robot's joints based on joint_name_mapping. + if self._world_monitor is not None: + self._world_monitor.on_joint_state(msg, robot_id=None) + + # Capture initial joint positions on first callback + if self._init_joints is None and msg.position: + self._init_joints = JointState(name=list(msg.name), position=list(msg.position)) + logger.info( + f"Init joints captured: [{', '.join(f'{j:.3f}' for j in msg.position)}]" + ) + + except Exception as e: + logger.error(f"Exception in _on_joint_state: {e}") + import traceback + + logger.error(traceback.format_exc()) + + def _on_objects(self, objects: list[DetObject]) -> None: + """Callback when objects received from perception (runs on RxPY thread pool).""" + try: + if self._world_monitor is not None: + self._world_monitor.on_objects(objects) + except Exception as e: + logger.error(f"Exception in _on_objects: {e}") + + def _tf_publish_loop(self) -> None: + """Publish TF transforms at 10Hz for EE and extra links.""" + from dimos.msgs.geometry_msgs import Transform + + period = 0.1 # 10Hz + while not self._tf_stop_event.is_set(): + try: + if self._world_monitor is None: + break + transforms: list[Transform] = [] + for robot_id, config, _ in self._robots.values(): + # Publish world → EE + ee_pose = self._world_monitor.get_ee_pose(robot_id) + if ee_pose is not None: + ee_tf = Transform.from_pose(config.end_effector_link, ee_pose) + ee_tf.frame_id = "world" + transforms.append(ee_tf) + + # Publish world → each extra link + for link_name in config.tf_extra_links: + link_pose = self._world_monitor.get_link_pose(robot_id, link_name) + if link_pose is not None: + link_tf = Transform.from_pose(link_name, link_pose) + link_tf.frame_id = "world" + transforms.append(link_tf) + + if transforms: + self.tf.publish(*transforms) + except Exception as e: + logger.debug(f"TF publish error: {e}") + + self._tf_stop_event.wait(period) + + # ========================================================================= + # RPC Methods + # ========================================================================= + + @rpc + def get_state(self) -> str: + """Get current manipulation state name.""" + return self._state.name + + @rpc + def get_error(self) -> str: + """Get last error message. + + Returns: + Error message or empty string + """ + return self._error_message + + @rpc + def cancel(self) -> bool: + """Cancel current motion.""" + if self._state != ManipulationState.EXECUTING: + return False + self._state = ManipulationState.IDLE + logger.info("Motion cancelled") + return True + + @rpc + def reset(self) -> bool: + """Reset to IDLE state (fails if EXECUTING).""" + if self._state == ManipulationState.EXECUTING: + return False + self._state = ManipulationState.IDLE + self._error_message = "" + return True + + @rpc + def get_current_joints(self, robot_name: RobotName | None = None) -> list[float] | None: + """Get current joint positions. + + Args: + robot_name: Robot to query (required if multiple robots configured) + """ + if (robot := self._get_robot(robot_name)) and self._world_monitor: + state = self._world_monitor.get_current_joint_state(robot[1]) + if state is not None: + return list(state.position) + return None + + @rpc + def get_ee_pose(self, robot_name: RobotName | None = None) -> Pose | None: + """Get current end-effector pose. + + Args: + robot_name: Robot to query (required if multiple robots configured) + """ + if (robot := self._get_robot(robot_name)) and self._world_monitor: + return self._world_monitor.get_ee_pose(robot[1], joint_state=None) + return None + + @rpc + def is_collision_free(self, joints: list[float], robot_name: RobotName | None = None) -> bool: + """Check if joint configuration is collision-free. + + Args: + joints: Joint configuration to check + robot_name: Robot to check (required if multiple robots configured) + """ + if (robot := self._get_robot(robot_name)) and self._world_monitor: + _, robot_id, config, _ = robot + joint_state = JointState(name=config.joint_names, position=joints) + return self._world_monitor.is_state_valid(robot_id, joint_state) + return False + + # ========================================================================= + # Plan/Preview/Execute Workflow RPC Methods + # ========================================================================= + + def _begin_planning( + self, robot_name: RobotName | None = None + ) -> tuple[RobotName, WorldRobotID] | None: + """Check state and begin planning. Returns (robot_name, robot_id) or None. + + Args: + robot_name: Robot to plan for (required if multiple robots configured) + """ + if self._world_monitor is None: + logger.error("Planning not initialized") + return None + if (robot := self._get_robot(robot_name)) is None: + return None + with self._lock: + if self._state not in (ManipulationState.IDLE, ManipulationState.COMPLETED): + logger.warning(f"Cannot plan: state is {self._state.name}") + return None + self._state = ManipulationState.PLANNING + return robot[0], robot[1] + + def _fail(self, msg: str) -> bool: + """Set FAULT state with error message.""" + logger.warning(msg) + self._state = ManipulationState.FAULT + self._error_message = msg + return False + + def _dismiss_preview(self, robot_id: WorldRobotID) -> None: + """Hide the preview ghost if the world supports it.""" + if self._world_monitor is None: + return + world = self._world_monitor.world + if hasattr(world, "hide_preview"): + world.hide_preview(robot_id) # type: ignore[attr-defined] + world.publish_visualization() + + @rpc + def plan_to_pose(self, pose: Pose, robot_name: RobotName | None = None) -> bool: + """Plan motion to pose. Use preview_path() then execute(). + + Args: + pose: Target end-effector pose + robot_name: Robot to plan for (required if multiple robots configured) + """ + if self._kinematics is None or (r := self._begin_planning(robot_name)) is None: + return False + robot_name, robot_id = r + assert self._world_monitor # guaranteed by _begin_planning + + current = self._world_monitor.get_current_joint_state(robot_id) + if current is None: + return self._fail("No joint state") + + # Convert Pose to PoseStamped for the IK solver + from dimos.msgs.geometry_msgs import PoseStamped + + target_pose = PoseStamped( + frame_id="world", + position=pose.position, + orientation=pose.orientation, + ) + + ik = self._kinematics.solve( + world=self._world_monitor.world, + robot_id=robot_id, + target_pose=target_pose, + seed=current, + check_collision=True, + ) + if not ik.is_success() or ik.joint_state is None: + return self._fail(f"IK failed: {ik.status.name}") + + logger.info(f"IK solved, error: {ik.position_error:.4f}m") + return self._plan_path_only(robot_name, robot_id, ik.joint_state) + + @rpc + def plan_to_joints(self, joints: JointState, robot_name: RobotName | None = None) -> bool: + """Plan motion to joint config. Use preview_path() then execute(). + + Args: + joints: Target joint state (names + positions) + robot_name: Robot to plan for (required if multiple robots configured) + """ + if (r := self._begin_planning(robot_name)) is None: + return False + robot_name, robot_id = r + logger.info(f"Planning to joints for {robot_name}: {[f'{j:.3f}' for j in joints.position]}") + return self._plan_path_only(robot_name, robot_id, joints) + + def _plan_path_only( + self, robot_name: RobotName, robot_id: WorldRobotID, goal: JointState + ) -> bool: + """Plan path from current position to goal, store result.""" + assert self._world_monitor and self._planner # guaranteed by _begin_planning + self._dismiss_preview(robot_id) + start = self._world_monitor.get_current_joint_state(robot_id) + if start is None: + return self._fail("No joint state") + + result = self._planner.plan_joint_path( + world=self._world_monitor.world, + robot_id=robot_id, + start=start, + goal=goal, + timeout=self.config.planning_timeout, + ) + if not result.is_success(): + return self._fail(f"Planning failed: {result.status.name}") + + logger.info(f"Path: {len(result.path)} waypoints") + self._planned_paths[robot_name] = result.path + + _, _, traj_gen = self._robots[robot_name] + # Convert JointState path to list of position lists for trajectory generator + traj = traj_gen.generate([list(state.position) for state in result.path]) + self._planned_trajectories[robot_name] = traj + logger.info(f"Trajectory: {traj.duration:.3f}s") + + self._state = ManipulationState.COMPLETED + return True + + @rpc + def preview_path(self, duration: float = 3.0, robot_name: RobotName | None = None) -> bool: + """Preview the planned path in the visualizer. + + Args: + duration: Total animation duration in seconds + robot_name: Robot to preview (required if multiple robots configured) + """ + from dimos.manipulation.planning.utils.path_utils import interpolate_path + + if self._world_monitor is None: + return False + + robot = self._get_robot(robot_name) + if robot is None: + return False + robot_name, robot_id, _, _ = robot + + planned_path = self._planned_paths.get(robot_name) + if planned_path is None or len(planned_path) == 0: + logger.warning(f"No planned path to preview for {robot_name}") + return False + + # Interpolate and animate + interpolated = interpolate_path(planned_path, resolution=0.1) + self._world_monitor.world.animate_path(robot_id, interpolated, duration) + return True + + @rpc + def has_planned_path(self) -> bool: + """Check if there's a planned path ready. + + Returns: + True if a path is planned and ready + """ + robot = self._get_robot() + if robot is None: + return False + robot_name, _, _, _ = robot + + path = self._planned_paths.get(robot_name) + return path is not None and len(path) > 0 + + @rpc + def get_visualization_url(self) -> str | None: + """Get the visualization URL. + + Returns: + URL string or None if visualization not enabled + """ + if self._world_monitor is None: + return None + return self._world_monitor.get_visualization_url() + + @rpc + def clear_planned_path(self) -> bool: + """Clear the stored planned path. + + Returns: + True if cleared + """ + robot = self._get_robot() + if robot is None: + return False + robot_name, _, _, _ = robot + + self._planned_paths.pop(robot_name, None) + self._planned_trajectories.pop(robot_name, None) + return True + + @rpc + def list_robots(self) -> list[str]: + """List all configured robot names. + + Returns: + List of robot names + """ + return list(self._robots.keys()) + + @rpc + def get_robot_info(self, robot_name: RobotName | None = None) -> dict[str, Any] | None: + """Get information about a robot. + + Args: + robot_name: Robot name (uses default if None) + + Returns: + Dict with robot info or None if not found + """ + robot = self._get_robot(robot_name) + if robot is None: + return None + + robot_name, robot_id, config, _ = robot + + return { + "name": config.name, + "world_robot_id": robot_id, + "joint_names": config.joint_names, + "end_effector_link": config.end_effector_link, + "base_link": config.base_link, + "max_velocity": config.max_velocity, + "max_acceleration": config.max_acceleration, + "has_joint_name_mapping": bool(config.joint_name_mapping), + "coordinator_task_name": config.coordinator_task_name, + "home_joints": config.home_joints, + "pre_grasp_offset": config.pre_grasp_offset, + "init_joints": list(self._init_joints.position) if self._init_joints else None, + } + + @rpc + def get_init_joints(self) -> JointState | None: + """Get the init joint state (captured at startup or set manually).""" + return self._init_joints + + @rpc + def set_init_joints(self, joint_state: JointState) -> bool: + """Set the init joint state. + + Args: + joint_state: New init joint state (names + positions) + """ + self._init_joints = joint_state + logger.info(f"Init joints set: [{', '.join(f'{j:.3f}' for j in joint_state.position)}]") + return True + + @rpc + def set_init_joints_to_current(self, robot_name: RobotName | None = None) -> bool: + """Set init joints to the current joint positions. + + Args: + robot_name: Robot to capture from (required if multiple robots configured) + """ + robot = self._get_robot(robot_name) + if robot is None: + return False + _, robot_id, _, _ = robot + if self._world_monitor is None: + return False + current = self._world_monitor.get_current_joint_state(robot_id) + if current is None: + logger.error("Cannot capture init joints — no current joint state") + return False + self._init_joints = current + logger.info( + f"Init joints set to current: [{', '.join(f'{j:.3f}' for j in current.position)}]" + ) + return True + + # ========================================================================= + # Coordinator Integration RPC Methods + # ========================================================================= + + def _get_coordinator_client(self) -> RPCClient | None: + """Get or create coordinator RPC client (lazy init).""" + if not any( + c.coordinator_task_name or c.gripper_hardware_id for _, c, _ in self._robots.values() + ): + return None + if self._coordinator_client is None: + from dimos.control.coordinator import ControlCoordinator + from dimos.core.rpc_client import RPCClient + + self._coordinator_client = RPCClient(None, ControlCoordinator) + return self._coordinator_client + + def _translate_trajectory_to_coordinator( + self, + trajectory: JointTrajectory, + robot_config: RobotModelConfig, + ) -> JointTrajectory: + """Translate trajectory joint names from URDF to coordinator namespace. + + Args: + trajectory: Trajectory with URDF joint names + robot_config: Robot config with joint name mapping + + Returns: + Trajectory with coordinator joint names + """ + if not robot_config.joint_name_mapping: + return trajectory # No translation needed + + # Translate joint names + coordinator_names = [ + robot_config.get_coordinator_joint_name(j) for j in trajectory.joint_names + ] + + # Create new trajectory with translated names + # Note: duration is computed automatically from points in JointTrajectory.__init__ + return JointTrajectory( + joint_names=coordinator_names, + points=trajectory.points, + timestamp=trajectory.timestamp, + ) + + @rpc + def execute(self, robot_name: RobotName | None = None) -> bool: + """Execute planned trajectory via ControlCoordinator.""" + if (robot := self._get_robot(robot_name)) is None: + return False + robot_name, _, config, _ = robot + + if (traj := self._planned_trajectories.get(robot_name)) is None: + logger.warning("No planned trajectory") + return False + if not config.coordinator_task_name: + logger.error(f"No coordinator_task_name for '{robot_name}'") + return False + if (client := self._get_coordinator_client()) is None: + logger.error("No coordinator client") + return False + + translated = self._translate_trajectory_to_coordinator(traj, config) + logger.info( + f"Executing: task='{config.coordinator_task_name}', {len(translated.points)} pts, {translated.duration:.2f}s" + ) + + self._state = ManipulationState.EXECUTING + result = client.task_invoke( + config.coordinator_task_name, "execute", {"trajectory": translated} + ) + if result: + logger.info("Trajectory accepted") + self._state = ManipulationState.COMPLETED + return True + else: + return self._fail("Coordinator rejected trajectory") + + @rpc + def get_trajectory_status(self, robot_name: RobotName | None = None) -> dict[str, Any] | None: + """Get trajectory execution status via coordinator task_invoke.""" + if (robot := self._get_robot(robot_name)) is None: + return None + _, _, config, _ = robot + if not config.coordinator_task_name or (client := self._get_coordinator_client()) is None: + return None + try: + state = client.task_invoke(config.coordinator_task_name, "get_state", {}) + if state is not None: + return {"state": int(state), "task": config.coordinator_task_name} + return None + except Exception: + return None + + def _get_graspgen(self) -> DockerRunner: + """Get or create GraspGen Docker module (lazy init, thread-safe).""" + # Fast path: already initialized (no lock needed for read) + if self._graspgen is not None: + return self._graspgen + + # Slow path: need to initialize (acquire lock to prevent race condition) + with self._lock: + # Double-check: another thread may have initialized while we waited for lock + if self._graspgen is not None: + return self._graspgen + + # Ensure GraspGen model checkpoints are pulled from LFS + get_data("models_graspgen") + + docker_file = ( + DIMOS_PROJECT_ROOT + / "dimos" + / "manipulation" + / "grasping" + / "docker_context" + / "Dockerfile" + ) + + # Auto-mount host directory for visualization output when enabled. + docker_volumes: list[tuple[str, str, str]] = [] + if self.config.graspgen_save_visualization_data: + host_dir = self.config.graspgen_visualization_output_path.parent + host_dir.mkdir(parents=True, exist_ok=True) + docker_volumes.append((str(host_dir), _GRASPGEN_VIZ_CONTAINER_DIR, "rw")) + + graspgen = DockerRunner( + GraspGenModule, # type: ignore[arg-type] + docker_file=docker_file, + docker_build_context=DIMOS_PROJECT_ROOT, + docker_image=self.config.graspgen_docker_image, + docker_env={"CI": "1"}, # skip interactive system config prompt in container + docker_volumes=docker_volumes, + gripper_type=self.config.graspgen_gripper_type, + num_grasps=self.config.graspgen_num_grasps, + topk_num_grasps=self.config.graspgen_topk_num_grasps, + grasp_threshold=self.config.graspgen_grasp_threshold, + filter_collisions=self.config.graspgen_filter_collisions, + save_visualization_data=self.config.graspgen_save_visualization_data, + visualization_output_path=_GRASPGEN_VIZ_CONTAINER_PATH, + ) + graspgen.start() + self._graspgen = graspgen # cache only after successful start + return self._graspgen + + @rpc + def generate_grasps( + self, + pointcloud: PointCloud2, + scene_pointcloud: PointCloud2 | None = None, + ) -> PoseArray | None: + """Generate grasp poses for the given point cloud via GraspGen Docker module.""" + try: + graspgen = self._get_graspgen() + return graspgen.generate_grasps(pointcloud, scene_pointcloud) # type: ignore[no-any-return] + except Exception as e: + logger.error(f"Grasp generation failed: {e}") + return None + + @property + def world_monitor(self) -> WorldMonitor | None: + """Access the world monitor for advanced obstacle/world operations.""" + return self._world_monitor + + @rpc + def add_obstacle( + self, + name: str, + pose: Pose, + shape: str, + dimensions: list[float] | None = None, + mesh_path: str | None = None, + ) -> str: + """Add obstacle: shape='box'|'sphere'|'cylinder'|'mesh'. Returns obstacle_id.""" + if not self._world_monitor: + return "" + + # Map shape string to ObstacleType + shape_map = { + "box": ObstacleType.BOX, + "sphere": ObstacleType.SPHERE, + "cylinder": ObstacleType.CYLINDER, + "mesh": ObstacleType.MESH, + } + obstacle_type = shape_map.get(shape) + if obstacle_type is None: + logger.warning(f"Unknown obstacle shape: {shape}") + return "" + + # Validate mesh_path for mesh type + if obstacle_type == ObstacleType.MESH and not mesh_path: + logger.warning("mesh_path required for mesh obstacles") + return "" + + # Import PoseStamped here to avoid circular imports + from dimos.msgs.geometry_msgs import PoseStamped + + obstacle = Obstacle( + name=name, + obstacle_type=obstacle_type, + pose=PoseStamped(position=pose.position, orientation=pose.orientation), + dimensions=tuple(dimensions) if dimensions else (), + mesh_path=mesh_path, + ) + return self._world_monitor.add_obstacle(obstacle) + + @rpc + def remove_obstacle(self, obstacle_id: str) -> bool: + """Remove an obstacle from the planning world.""" + if self._world_monitor is None: + return False + return self._world_monitor.remove_obstacle(obstacle_id) + + # ========================================================================= + # Perception RPC Methods + # ========================================================================= + + @rpc + def refresh_obstacles(self, min_duration: float = 0.0) -> list[dict[str, Any]]: + """Refresh perception obstacles. Returns the list of obstacles added. + + Also snapshots the current detections so pick/place can use stable labels. + """ + if self._world_monitor is None: + return [] + result = self._world_monitor.refresh_obstacles(min_duration) + # Snapshot detections at refresh time — the live cache is volatile + self._detection_snapshot = self._world_monitor.get_cached_objects() + logger.info(f"Detection snapshot: {[d.name for d in self._detection_snapshot]}") + return result + + @rpc + def clear_perception_obstacles(self) -> int: + """Remove all perception obstacles. Returns count removed.""" + if self._world_monitor is None: + return 0 + return self._world_monitor.clear_perception_obstacles() + + @rpc + def get_perception_status(self) -> dict[str, int]: + """Get perception obstacle status (cached/added counts).""" + if self._world_monitor is None: + return {"cached": 0, "added": 0} + return self._world_monitor.get_perception_status() + + @rpc + def list_cached_detections(self) -> list[dict[str, Any]]: + """List cached detections from perception.""" + if self._world_monitor is None: + return [] + return self._world_monitor.list_cached_detections() + + @rpc + def list_added_obstacles(self) -> list[dict[str, Any]]: + """List perception obstacles currently in the planning world.""" + if self._world_monitor is None: + return [] + return self._world_monitor.list_added_obstacles() + + # ========================================================================= + # Gripper Methods + # ========================================================================= + + def _get_gripper_hardware_id(self, robot_name: RobotName | None = None) -> str | None: + """Get gripper hardware ID for a robot.""" + robot = self._get_robot(robot_name) + if robot is None: + return None + _, _, config, _ = robot + if not config.gripper_hardware_id: + logger.warning(f"No gripper_hardware_id configured for '{config.name}'") + return None + return str(config.gripper_hardware_id) + + def _set_gripper_position(self, position: float, robot_name: RobotName | None = None) -> bool: + """Internal: set gripper position in meters.""" + hw_id = self._get_gripper_hardware_id(robot_name) + if hw_id is None: + return False + client = self._get_coordinator_client() + if client is None: + logger.error("No coordinator client for gripper control") + return False + return bool(client.set_gripper_position(hw_id, position)) + + @rpc + def get_gripper(self, robot_name: RobotName | None = None) -> float | None: + """Get gripper position in meters. + + Args: + robot_name: Robot to query (required if multiple robots configured) + """ + hw_id = self._get_gripper_hardware_id(robot_name) + if hw_id is None: + return None + client = self._get_coordinator_client() + if client is None: + return None + result = client.get_gripper_position(hw_id) + return float(result) if result is not None else None + + @skill + def set_gripper(self, position: float, robot_name: str | None = None) -> str: + """Set gripper to a specific opening in meters. + + Args: + position: Gripper opening in meters (0.0 = closed, 0.85 = fully open). + robot_name: Robot to control (only needed for multi-arm setups). + """ + if self._set_gripper_position(position, robot_name): + return f"Gripper set to {position:.3f}m" + return "Error: Failed to set gripper position" + + @skill + def open_gripper(self, robot_name: str | None = None) -> str: + """Open the robot gripper fully. + + Args: + robot_name: Robot to control (only needed for multi-arm setups). + """ + if self._set_gripper_position(0.85, robot_name): + return "Gripper opened" + return "Error: Failed to open gripper" + + @skill + def close_gripper(self, robot_name: str | None = None) -> str: + """Close the robot gripper fully. + + Args: + robot_name: Robot to control (only needed for multi-arm setups). + """ + if self._set_gripper_position(0.0, robot_name): + return "Gripper closed" + return "Error: Failed to close gripper" + + # ========================================================================= + # Skill Helpers (internal) + # ========================================================================= + + def _wait_for_trajectory_completion( + self, robot_name: RobotName | None = None, timeout: float = 60.0, poll_interval: float = 0.2 + ) -> bool: + """Wait for trajectory execution to complete. + + Polls the coordinator task state via task_invoke. Falls back to waiting + for the trajectory duration if the coordinator is unavailable. + + Args: + robot_name: Robot to monitor + timeout: Maximum wait time in seconds + poll_interval: Time between status checks + + Returns: + True if trajectory completed successfully + """ + robot = self._get_robot(robot_name) + if robot is None: + return True + rname, _, config, _ = robot + client = self._get_coordinator_client() + + if client is None or not config.coordinator_task_name: + # No coordinator — wait for trajectory duration as fallback + traj = self._planned_trajectories.get(rname) + if traj is not None: + logger.info(f"No coordinator status — waiting {traj.duration:.1f}s for trajectory") + time.sleep(traj.duration + 0.5) + return True + + # Poll task state via task_invoke + start = time.time() + while (time.time() - start) < timeout: + try: + state = client.task_invoke(config.coordinator_task_name, "get_state", {}) + # TrajectoryState is an IntEnum: IDLE=0, EXECUTING=1, COMPLETED=2, ABORTED=3, FAULT=4 + if state is not None: + state_val = int(state) + if state_val in (0, 2): # IDLE or COMPLETED + return True + if state_val in (3, 4): # ABORTED or FAULT + logger.warning(f"Trajectory failed: state={state}") + return False + # state_val == 1 means EXECUTING, keep polling + else: + # task_invoke returned None — task not found, assume done + return True + except Exception: + # Fallback: wait for trajectory duration + traj = self._planned_trajectories.get(rname) + if traj is not None: + remaining = traj.duration - (time.time() - start) + if remaining > 0: + logger.info(f"Status poll failed — waiting {remaining:.1f}s for trajectory") + time.sleep(remaining + 0.5) + return True + time.sleep(poll_interval) + + logger.warning(f"Trajectory execution timed out after {timeout}s") + return False + + def _preview_execute_wait( + self, robot_name: RobotName | None = None, preview_duration: float = 0.5 + ) -> str | None: + """Preview planned path, execute, and wait for completion. + + Returns None on success, or an error string on failure. + + Args: + robot_name: Robot to operate on + preview_duration: Duration to animate the preview in Meshcat (seconds) + """ + logger.info("Previewing trajectory...") + self.preview_path(preview_duration, robot_name) + + logger.info("Executing trajectory...") + if not self.execute(robot_name): + return "Error: Trajectory execution failed" + + if not self._wait_for_trajectory_completion(robot_name): + return "Error: Trajectory execution timed out" + + return None + + def _compute_pre_grasp_pose(self, grasp_pose: Pose, offset: float = 0.10) -> Pose: + """Compute a pre-grasp pose offset along the approach direction (local -Z). + + Args: + grasp_pose: The final grasp pose + offset: Distance to retract along the approach direction (meters) + + Returns: + Pre-grasp pose offset from the grasp pose + """ + from dimos.utils.transform_utils import offset_distance + + return offset_distance(grasp_pose, offset) + + def _find_object_in_detections( + self, object_name: str, object_id: str | None = None + ) -> DetObject | None: + """Find an object in the detection snapshot by name or ID. + + Uses the snapshot taken during the last scan_objects/refresh call, + not the volatile live cache (which changes labels every frame). + + Args: + object_name: Name/label to search for + object_id: Optional specific object ID + + Returns: + Matching DetObject, or None + """ + if not self._detection_snapshot: + logger.warning("No detection snapshot — call scan_objects() first") + return None + + for det in self._detection_snapshot: + if object_id and det.object_id == object_id: + return det + if object_name.lower() in det.name.lower() or det.name.lower() in object_name.lower(): + return det + + available = [det.name for det in self._detection_snapshot] + logger.warning(f"Object '{object_name}' not found in snapshot. Available: {available}") + return None + + def _generate_grasps_for_pick( + self, object_name: str, object_id: str | None = None + ) -> list[Pose] | None: + """Generate grasp poses for an object. + + Computes a top-down approach grasp from the object's detected position. + + Args: + object_name: Name of the object + object_id: Optional object ID + + Returns: + List of grasp poses (best first), or None if object not found + """ + det = self._find_object_in_detections(object_name, object_id) + if det is None: + logger.warning(f"Object '{object_name}' not found in detections") + return None + + c = det.center + grasp_pose = Pose(Vector3(c.x, c.y, c.z), Quaternion.from_euler(Vector3(0.0, math.pi, 0.0))) + logger.info(f"Heuristic grasp for '{object_name}' at ({c.x:.3f}, {c.y:.3f}, {c.z:.3f})") + return [grasp_pose] + + # ========================================================================= + # Short-Horizon Skills — Single-step actions + # ========================================================================= + + @skill + def move_to_pose( + self, + x: float, + y: float, + z: float, + roll: float = 0.0, + pitch: float = 0.0, + yaw: float = 0.0, + robot_name: str | None = None, + ) -> str: + """Move the robot end-effector to a target pose. + + Plans a collision-free trajectory and executes it. + + Args: + x: Target X position in meters. + y: Target Y position in meters. + z: Target Z position in meters. + roll: Target roll in radians (default 0). + pitch: Target pitch in radians (default 0). + yaw: Target yaw in radians (default 0). + robot_name: Robot to move (only needed for multi-arm setups). + """ + logger.info(f"Planning motion to ({x:.3f}, {y:.3f}, {z:.3f})...") + pose = Pose(Vector3(x, y, z), Quaternion.from_euler(Vector3(roll, pitch, yaw))) + + if not self.plan_to_pose(pose, robot_name): + return f"Error: Planning failed — pose ({x:.3f}, {y:.3f}, {z:.3f}) may be unreachable or in collision" + + err = self._preview_execute_wait(robot_name) + if err: + return err + + return f"Reached target pose ({x:.3f}, {y:.3f}, {z:.3f})" + + @skill + def move_to_joints( + self, + joints: str, + robot_name: str | None = None, + ) -> str: + """Move the robot to a target joint configuration. + + Plans a collision-free trajectory and executes it. + + Args: + joints: Comma-separated joint positions in radians, e.g. "0.1, -0.5, 1.2, 0.0, 0.3, -0.1". + robot_name: Robot to move (only needed for multi-arm setups). + """ + try: + joint_values = [float(j.strip()) for j in joints.split(",")] + except ValueError: + return f"Error: Invalid joints format '{joints}'. Expected comma-separated floats." + + robot = self._get_robot(robot_name) + if robot is None: + return "Error: Robot not found" + rname, _, config, _ = robot + goal = JointState(name=config.joint_names, position=joint_values) + + logger.info(f"Planning motion to joints [{', '.join(f'{j:.3f}' for j in joint_values)}]...") + if not self.plan_to_joints(goal, rname): + return "Error: Planning failed — joint configuration may be unreachable or in collision" + + err = self._preview_execute_wait(robot_name) + if err: + return err + + return "Reached target joint configuration" + + @skill + def get_scene_info(self, robot_name: str | None = None) -> str: + """Get current robot state, detected objects, and scene information. + + Returns a summary of the robot's joint positions, end-effector pose, + gripper state, detected objects, and obstacle count. + + Args: + robot_name: Robot to query (only needed for multi-arm setups). + """ + lines: list[str] = [] + + # Robot state + joints = self.get_current_joints(robot_name) + if joints is not None: + lines.append(f"Joints: [{', '.join(f'{j:.3f}' for j in joints)}]") + else: + lines.append("Joints: unavailable (no state received)") + + ee_pose = self.get_ee_pose(robot_name) + if ee_pose is not None: + p = ee_pose.position + lines.append(f"EE pose: ({p.x:.4f}, {p.y:.4f}, {p.z:.4f})") + else: + lines.append("EE pose: unavailable") + + # Gripper + gripper_pos = self.get_gripper(robot_name) + if gripper_pos is not None: + lines.append(f"Gripper: {gripper_pos:.3f}m") + else: + lines.append("Gripper: not configured") + + # Perception + perception = self.get_perception_status() + lines.append( + f"Perception: {perception.get('cached', 0)} cached, {perception.get('added', 0)} obstacles added" + ) + + detections = self._detection_snapshot + if detections: + lines.append(f"Detected objects ({len(detections)}):") + for det in detections: + c = det.center + lines.append(f" - {det.name}: ({c.x:.3f}, {c.y:.3f}, {c.z:.3f})") + else: + lines.append("Detected objects: none") + + # Visualization + url = self.get_visualization_url() + if url: + lines.append(f"Visualization: {url}") + + # State + lines.append(f"State: {self.get_state()}") + + return "\n".join(lines) + + @skill + def scan_objects(self, min_duration: float = 1.0, robot_name: str | None = None) -> str: + """Scan the scene and list detected objects with their 3D positions. + + Refreshes perception obstacles from the latest sensor data and returns + a formatted list of all detected objects. + + Args: + min_duration: Minimum time in seconds to wait for stable detections. + robot_name: Robot context (only needed for multi-arm setups). + """ + obstacles = self.refresh_obstacles(min_duration) + + detections = self._detection_snapshot + if not detections: + return "No objects detected in scene" + + lines = [f"Detected {len(detections)} object(s):"] + for det in detections: + c = det.center + lines.append(f" - {det.name}: ({c.x:.3f}, {c.y:.3f}, {c.z:.3f})") + + if obstacles: + lines.append(f"\n{len(obstacles)} obstacle(s) added to planning world") + + return "\n".join(lines) + + # ========================================================================= + # Long-Horizon Skills — Multi-step composed behaviors + # ========================================================================= + + @skill + def go_home(self, robot_name: str | None = None) -> str: + """Move the robot to its home/observe joint configuration. + + Opens the gripper and moves to the predefined home position. + + Args: + robot_name: Robot to move (only needed for multi-arm setups). + """ + robot = self._get_robot(robot_name) + if robot is None: + return "Error: Robot not found" + rname, _, config, _ = robot + + if config.home_joints is None: + return "Error: No home_joints configured for this robot" + + logger.info("Opening gripper...") + self._set_gripper_position(0.85, rname) + time.sleep(0.5) + + goal = JointState(name=config.joint_names, position=config.home_joints) + logger.info("Planning motion to home position...") + if not self.plan_to_joints(goal, rname): + return "Error: Failed to plan path to home position" + + err = self._preview_execute_wait(robot_name) + if err: + return err + + return "Reached home position" + + @skill + def go_init(self, robot_name: str | None = None) -> str: + """Move the robot to its init position (captured at startup or set manually). + + The init position is the joint configuration the robot was in when the + module first received joint state. It can be changed with set_init_joints(). + + Args: + robot_name: Robot to move (only needed for multi-arm setups). + """ + if self._init_joints is None: + return "Error: No init joints captured — robot may not have reported joint state yet" + + logger.info( + f"Planning motion to init position [{', '.join(f'{j:.3f}' for j in self._init_joints.position)}]..." + ) + if not self.plan_to_joints(self._init_joints, robot_name): + return "Error: Failed to plan path to init position" + + err = self._preview_execute_wait(robot_name) + if err: + return err + + return "Reached init position" + + @skill + def pick( + self, + object_name: str, + object_id: str | None = None, + robot_name: str | None = None, + ) -> str: + """Pick up an object by name using grasp planning and motion execution. + + Generates grasp poses, plans collision-free approach/grasp/retract motions, + and executes them. + + Args: + object_name: Name of the object to pick (e.g. "cup", "bottle", "can"). + object_id: Optional unique object ID from perception for precise identification. + robot_name: Robot to use (only needed for multi-arm setups). + """ + robot = self._get_robot(robot_name) + if robot is None: + return "Error: Robot not found" + rname, _, config, _ = robot + pre_grasp_offset = config.pre_grasp_offset + + # 1. Generate grasps (uses already-cached detections — call scan_objects first) + logger.info(f"Generating grasp poses for '{object_name}'...") + grasp_poses = self._generate_grasps_for_pick(object_name, object_id) + if not grasp_poses: + return f"Error: No grasp poses found for '{object_name}'. Object may not be detected." + + # 2. Try each grasp candidate + max_attempts = min(len(grasp_poses), 5) + for i, grasp_pose in enumerate(grasp_poses[:max_attempts]): + pre_grasp_pose = self._compute_pre_grasp_pose(grasp_pose, pre_grasp_offset) + + logger.info(f"Planning approach to pre-grasp (attempt {i + 1}/{max_attempts})...") + if not self.plan_to_pose(pre_grasp_pose, rname): + logger.info(f"Grasp candidate {i + 1} approach planning failed, trying next") + continue # Try next candidate + + # Open gripper before approach + logger.info("Opening gripper...") + self._set_gripper_position(0.85, rname) + time.sleep(0.5) + + # 3. Preview + execute approach + err = self._preview_execute_wait(rname) + if err: + return err + + # 4. Move to grasp pose + logger.info("Moving to grasp position...") + if not self.plan_to_pose(grasp_pose, rname): + return "Error: Grasp pose planning failed" + err = self._preview_execute_wait(rname) + if err: + return err + + # 5. Close gripper + logger.info("Closing gripper...") + self._set_gripper_position(0.0, rname) + time.sleep(1.5) # Wait for gripper to close + + # 6. Retract to pre-grasp + logger.info("Retracting with object...") + if not self.plan_to_pose(pre_grasp_pose, rname): + return "Error: Retract planning failed" + err = self._preview_execute_wait(rname) + if err: + return err + + # Store pick position so place_back() can return the object + self._last_pick_position = grasp_pose.position + + return f"Pick complete — grasped '{object_name}' successfully" + + return f"Error: All {max_attempts} grasp attempts failed for '{object_name}'" + + @skill + def place( + self, + x: float, + y: float, + z: float, + robot_name: str | None = None, + ) -> str: + """Place a held object at the specified position. + + Plans and executes an approach, lowers to the target, releases the gripper, + and retracts. + + Args: + x: Target X position in meters. + y: Target Y position in meters. + z: Target Z position in meters. + robot_name: Robot to use (only needed for multi-arm setups). + """ + robot = self._get_robot(robot_name) + if robot is None: + return "Error: Robot not found" + rname, _, config, _ = robot + pre_place_offset = config.pre_grasp_offset + + # Compute place pose (top-down approach) + place_pose = Pose(Vector3(x, y, z), Quaternion.from_euler(Vector3(0.0, math.pi, 0.0))) + pre_place_pose = self._compute_pre_grasp_pose(place_pose, pre_place_offset) + + # 1. Move to pre-place + logger.info(f"Planning approach to place position ({x:.3f}, {y:.3f}, {z:.3f})...") + if not self.plan_to_pose(pre_place_pose, rname): + return "Error: Pre-place approach planning failed" + + err = self._preview_execute_wait(rname) + if err: + return err + + # 2. Lower to place position + logger.info("Lowering to place position...") + if not self.plan_to_pose(place_pose, rname): + return "Error: Place pose planning failed" + err = self._preview_execute_wait(rname) + if err: + return err + + # 3. Release + logger.info("Releasing object...") + self._set_gripper_position(0.85, rname) + time.sleep(1.0) + + # 4. Retract + logger.info("Retracting...") + if not self.plan_to_pose(pre_place_pose, rname): + return "Error: Retract planning failed" + err = self._preview_execute_wait(rname) + if err: + return err + + return f"Place complete — object released at ({x:.3f}, {y:.3f}, {z:.3f})" + + @skill + def place_back(self, robot_name: str | None = None) -> str: + """Place the held object back at its original pick position. + + Uses the position stored from the last successful pick operation. + + Args: + robot_name: Robot to use (only needed for multi-arm setups). + """ + if self._last_pick_position is None: + return "Error: No previous pick position stored — run pick() first" + + p = self._last_pick_position + logger.info(f"Placing back at original position ({p.x:.3f}, {p.y:.3f}, {p.z:.3f})...") + return self.place(p.x, p.y, p.z, robot_name) + + @skill + def pick_and_place( + self, + object_name: str, + place_x: float, + place_y: float, + place_z: float, + object_id: str | None = None, + robot_name: str | None = None, + ) -> str: + """Pick up an object and place it at a target location. + + Combines the pick and place skills into a single end-to-end operation. + + Args: + object_name: Name of the object to pick (e.g. "cup", "bottle"). + place_x: Target X position to place the object (meters). + place_y: Target Y position to place the object (meters). + place_z: Target Z position to place the object (meters). + object_id: Optional unique object ID from perception. + robot_name: Robot to use (only needed for multi-arm setups). + """ + logger.info( + f"Starting pick and place: pick '{object_name}' → place at ({place_x:.3f}, {place_y:.3f}, {place_z:.3f})" + ) + + # Pick phase + result = self.pick(object_name, object_id, robot_name) + if result.startswith("Error:"): + return result + + # Place phase + return self.place(place_x, place_y, place_z, robot_name) + + # ========================================================================= + # Lifecycle + # ========================================================================= + + @rpc + def stop(self) -> None: + """Stop the manipulation module.""" + logger.info("Stopping ManipulationModule") + + # Stop GraspGen Docker container (thread-safe access to shared state) + with self._lock: + if self._graspgen is not None: + self._graspgen.stop() + self._graspgen = None + + # Stop TF thread + if self._tf_thread is not None: + self._tf_stop_event.set() + self._tf_thread.join(timeout=1.0) + self._tf_thread = None + + # Stop world monitor (includes visualization thread) + if self._world_monitor is not None: + self._world_monitor.stop_all_monitors() + + super().stop() + + +# Expose blueprint for declarative composition +manipulation_module = ManipulationModule.blueprint diff --git a/dimos/manipulation/planning/README.md b/dimos/manipulation/planning/README.md new file mode 100644 index 0000000000..803d8b166e --- /dev/null +++ b/dimos/manipulation/planning/README.md @@ -0,0 +1,178 @@ +# Manipulation Planning Stack + +Motion planning for robotic manipulators. Backend-agnostic design with Drake implementation. + +## Quick Start + +```bash +# Terminal 1: Mock coordinator +dimos run coordinator-mock + +# Terminal 2: Manipulation planner +dimos run xarm7-planner-coordinator + +# Terminal 3: IPython client +python -m dimos.manipulation.planning.examples.manipulation_client +``` + +In IPython: +```python +joints() # Get current joints +plan([0.1] * 7) # Plan to target +preview() # Preview in Meshcat (url() for link) +execute() # Execute via coordinator +``` + +## Architecture + +``` +┌─────────────────────────────────────────────────────────────┐ +│ ManipulationModule │ +│ (RPC interface, state machine, multi-robot) │ +└─────────────────────────────────────────────────────────────┘ + │ +┌─────────────────────────────────────────────────────────────┐ +│ Backend-Agnostic Components │ +│ ┌──────────────────┐ ┌─────────────────────────────┐ │ +│ │ RRTConnectPlanner│ │ JacobianIK │ │ +│ │ (rrt_planner.py) │ │ (iterative & differential) │ │ +│ └──────────────────┘ └─────────────────────────────┘ │ +│ Uses only WorldSpec interface │ +└─────────────────────────────────────────────────────────────┘ + │ +┌─────────────────────────────────────────────────────────────┐ +│ WorldSpec Protocol │ +│ Context management, collision checking, FK, Jacobian │ +└─────────────────────────────────────────────────────────────┘ + │ +┌─────────────────────────────────────────────────────────────┐ +│ Backend-Specific Implementations │ +│ ┌──────────────────┐ ┌─────────────────────────────┐ │ +│ │ DrakeWorld │ │ DrakeOptimizationIK │ │ +│ │ (physics/viz) │ │ (nonlinear IK) │ │ +│ └──────────────────┘ └─────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────┘ +``` + +## Using ManipulationModule + +```python +from pathlib import Path +from dimos.manipulation import ManipulationModule +from dimos.manipulation.planning.spec import RobotModelConfig + +config = RobotModelConfig( + name="xarm7", + urdf_path=Path("/path/to/xarm7.urdf"), + base_pose=PoseStamped(position=Vector3(), orientation=Quaternion()), + joint_names=["joint1", "joint2", "joint3", "joint4", "joint5", "joint6", "joint7"], + end_effector_link="link7", + base_link="link_base", + joint_name_mapping={"arm_joint1": "joint1", ...}, # coordinator <-> URDF + coordinator_task_name="traj_arm", +) + +module = ManipulationModule( + robots=[config], + planning_timeout=10.0, + enable_viz=True, + planner_name="rrt_connect", # Only option + kinematics_name="drake_optimization", # Or "jacobian" +) +module.start() +module.plan_to_joints([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7]) +module.execute() # Sends to coordinator +``` + +## RobotModelConfig Fields + +| Field | Description | +|-------|-------------| +| `name` | Robot identifier | +| `urdf_path` | Path to URDF/XACRO file | +| `base_pose` | PoseStamped for robot base in world frame | +| `joint_names` | Joint names in URDF | +| `end_effector_link` | EE link name | +| `base_link` | Base link name | +| `max_velocity` | Max joint velocity (rad/s) | +| `max_acceleration` | Max acceleration (rad/s²) | +| `joint_name_mapping` | Coordinator → URDF name mapping | +| `coordinator_task_name` | Task name for execution RPC | +| `package_paths` | ROS package paths for meshes | +| `xacro_args` | Xacro arguments (e.g., `{"dof": "7"}`) | + +## Components + +### Planners (Backend-Agnostic) + +| Planner | Description | +|---------|-------------| +| `RRTConnectPlanner` | Bi-directional RRT-Connect (fast, reliable) | + +### IK Solvers + +| Solver | Type | Description | +|--------|------|-------------| +| `JacobianIK` | Backend-agnostic | Iterative damped least-squares | +| `DrakeOptimizationIK` | Drake-specific | Full nonlinear optimization | + +### World Backends + +| Backend | Description | +|---------|-------------| +| `DrakeWorld` | Drake physics with Meshcat visualization | + +## Blueprints + +| Blueprint | Description | +|-----------|-------------| +| `xarm6_planner_only` | XArm 6-DOF standalone (no coordinator) | +| `xarm7-planner-coordinator` | XArm 7-DOF with coordinator | +| `dual-xarm6-planner` | Dual XArm 6-DOF | + +## Directory Structure + +``` +planning/ +├── spec.py # Protocols (WorldSpec, KinematicsSpec, PlannerSpec) +├── factory.py # create_world, create_kinematics, create_planner +├── world/ +│ └── drake_world.py # DrakeWorld implementation +├── kinematics/ +│ ├── jacobian_ik.py # Backend-agnostic Jacobian IK +│ └── drake_optimization_ik.py # Drake nonlinear IK +├── planners/ +│ └── rrt_planner.py # RRTConnectPlanner +├── monitor/ # WorldMonitor (live state sync) +├── trajectory_generator/ # Time-parameterized trajectories +└── examples/ + ├── planning_tester.py # Standalone CLI tester + └── manipulation_client.py # IPython RPC client +``` + +## Obstacle Types + +| Type | Dimensions | +|------|------------| +| `BOX` | (width, height, depth) | +| `SPHERE` | (radius,) | +| `CYLINDER` | (radius, height) | +| `MESH` | mesh_path | + +## Supported Robots + +| Robot | DOF | +|-------|-----| +| `piper` | 6 | +| `xarm6` | 6 | +| `xarm7` | 7 | + +## Testing + +```bash +# Unit tests (fast, no Drake) +pytest dimos/manipulation/test_manipulation_unit.py -v + +# Integration tests (requires Drake) +pytest dimos/e2e_tests/test_manipulation_module.py -v +``` diff --git a/dimos/manipulation/planning/__init__.py b/dimos/manipulation/planning/__init__.py index d197980a96..8aaf0caa25 100644 --- a/dimos/manipulation/planning/__init__.py +++ b/dimos/manipulation/planning/__init__.py @@ -15,11 +15,70 @@ """ Manipulation Planning Module -Trajectory generation and motion planning for robotic manipulators. -""" +Motion planning stack for robotic manipulators using Protocol-based architecture. + +## Architecture + +- WorldSpec: Core backend owning physics/collision (DrakeWorld, future: MuJoCoWorld) +- KinematicsSpec: IK solvers + - JacobianIK: Backend-agnostic iterative/differential IK + - DrakeOptimizationIK: Drake-specific nonlinear optimization IK +- PlannerSpec: Backend-agnostic joint-space path planning + - RRTConnectPlanner: Bi-directional RRT-Connect + - RRTStarPlanner: RRT* (asymptotically optimal) + +## Factory Functions -from dimos.manipulation.planning.trajectory_generator.joint_trajectory_generator import ( - JointTrajectoryGenerator, +Use factory functions to create components: + +```python +from dimos.manipulation.planning.factory import ( + create_world, + create_kinematics, + create_planner, ) -__all__ = ["JointTrajectoryGenerator"] +world = create_world(backend="drake", enable_viz=True) +kinematics = create_kinematics(name="jacobian") # or "drake_optimization" +planner = create_planner(name="rrt_connect") # backend-agnostic +``` + +## Monitors + +Use WorldMonitor for reactive state synchronization: + +```python +from dimos.manipulation.planning.monitor import WorldMonitor + +monitor = WorldMonitor(enable_viz=True) +robot_id = monitor.add_robot(config) +monitor.finalize() +monitor.start_state_monitor(robot_id) +``` +""" + +import lazy_loader as lazy + +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "factory": ["create_kinematics", "create_planner", "create_planning_stack", "create_world"], + "spec": [ + "CollisionObjectMessage", + "IKResult", + "IKStatus", + "JointPath", + "KinematicsSpec", + "Obstacle", + "ObstacleType", + "PlannerSpec", + "PlanningResult", + "PlanningStatus", + "RobotModelConfig", + "RobotName", + "WorldRobotID", + "WorldSpec", + ], + "trajectory_generator.joint_trajectory_generator": ["JointTrajectoryGenerator"], + }, +) diff --git a/dimos/manipulation/planning/examples/__init__.py b/dimos/manipulation/planning/examples/__init__.py new file mode 100644 index 0000000000..7971835dab --- /dev/null +++ b/dimos/manipulation/planning/examples/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2025 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Manipulation planning examples. +""" diff --git a/dimos/manipulation/planning/factory.py b/dimos/manipulation/planning/factory.py new file mode 100644 index 0000000000..d392bac563 --- /dev/null +++ b/dimos/manipulation/planning/factory.py @@ -0,0 +1,91 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Factory functions for manipulation planning components.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from dimos.manipulation.planning.spec import ( + KinematicsSpec, + PlannerSpec, + WorldSpec, + ) + + +def create_world( + backend: str = "drake", + enable_viz: bool = False, + **kwargs: Any, +) -> WorldSpec: + """Create a world instance. backend='drake', enable_viz for Meshcat.""" + if backend == "drake": + from dimos.manipulation.planning.world.drake_world import DrakeWorld + + return DrakeWorld(enable_viz=enable_viz, **kwargs) + else: + raise ValueError(f"Unknown backend: {backend}. Available: ['drake']") + + +def create_kinematics( + name: str = "jacobian", + **kwargs: Any, +) -> KinematicsSpec: + """Create IK solver. name='jacobian'|'drake_optimization'.""" + if name == "jacobian": + from dimos.manipulation.planning.kinematics.jacobian_ik import JacobianIK + + return JacobianIK(**kwargs) + elif name == "drake_optimization": + from dimos.manipulation.planning.kinematics.drake_optimization_ik import ( + DrakeOptimizationIK, + ) + + return DrakeOptimizationIK(**kwargs) + else: + raise ValueError( + f"Unknown kinematics solver: {name}. Available: ['jacobian', 'drake_optimization']" + ) + + +def create_planner( + name: str = "rrt_connect", + **kwargs: Any, +) -> PlannerSpec: + """Create motion planner. name='rrt_connect'.""" + if name == "rrt_connect": + from dimos.manipulation.planning.planners.rrt_planner import RRTConnectPlanner + + return RRTConnectPlanner(**kwargs) + else: + raise ValueError(f"Unknown planner: {name}. Available: ['rrt_connect']") + + +def create_planning_stack( + robot_config: Any, + enable_viz: bool = False, + planner_name: str = "rrt_connect", + kinematics_name: str = "jacobian", +) -> tuple[WorldSpec, KinematicsSpec, PlannerSpec, str]: + """Create complete planning stack. Returns (world, kinematics, planner, robot_id).""" + world = create_world(backend="drake", enable_viz=enable_viz) + kinematics = create_kinematics(name=kinematics_name) + planner = create_planner(name=planner_name) + + robot_id = world.add_robot(robot_config) + world.finalize() + + return world, kinematics, planner, robot_id diff --git a/dimos/manipulation/planning/kinematics/__init__.py b/dimos/manipulation/planning/kinematics/__init__.py new file mode 100644 index 0000000000..dacd2007cb --- /dev/null +++ b/dimos/manipulation/planning/kinematics/__init__.py @@ -0,0 +1,51 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Kinematics Module + +Contains IK solver implementations that use WorldSpec. + +## Implementations + +- JacobianIK: Backend-agnostic iterative/differential IK (works with any WorldSpec) +- DrakeOptimizationIK: Drake-specific nonlinear optimization IK (requires DrakeWorld) + +## Usage + +Use factory functions to create IK solvers: + +```python +from dimos.manipulation.planning.factory import create_kinematics + +# Backend-agnostic (works with any WorldSpec) +kinematics = create_kinematics(name="jacobian") + +# Drake-specific (requires DrakeWorld, more accurate) +kinematics = create_kinematics(name="drake_optimization") + +result = kinematics.solve(world, robot_id, target_pose) +``` +""" + +from dimos.manipulation.planning.kinematics.drake_optimization_ik import ( + DrakeOptimizationIK, +) +from dimos.manipulation.planning.kinematics.jacobian_ik import JacobianIK +from dimos.manipulation.planning.kinematics.pinocchio_ik import ( + PinocchioIK, + PinocchioIKConfig, +) + +__all__ = ["DrakeOptimizationIK", "JacobianIK", "PinocchioIK", "PinocchioIKConfig"] diff --git a/dimos/manipulation/planning/kinematics/drake_optimization_ik.py b/dimos/manipulation/planning/kinematics/drake_optimization_ik.py new file mode 100644 index 0000000000..1e6b1962a5 --- /dev/null +++ b/dimos/manipulation/planning/kinematics/drake_optimization_ik.py @@ -0,0 +1,269 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Drake optimization-based IK using SNOPT/IPOPT. Requires DrakeWorld.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from dimos.manipulation.planning.spec import IKResult, IKStatus, WorldRobotID, WorldSpec +from dimos.manipulation.planning.utils.kinematics_utils import compute_pose_error +from dimos.msgs.geometry_msgs import PoseStamped, Transform +from dimos.msgs.sensor_msgs import JointState +from dimos.utils.logging_config import setup_logger +from dimos.utils.transform_utils import pose_to_matrix + +if TYPE_CHECKING: + from numpy.typing import NDArray + +try: + from pydrake.math import RigidTransform, RotationMatrix # type: ignore[import-not-found] + from pydrake.multibody.inverse_kinematics import ( # type: ignore[import-not-found] + InverseKinematics, + ) + from pydrake.solvers import Solve # type: ignore[import-not-found] + + DRAKE_AVAILABLE = True +except ImportError: + DRAKE_AVAILABLE = False + +logger = setup_logger() + + +class DrakeOptimizationIK: + """Drake optimization-based IK solver using constrained nonlinear optimization. + + Requires DrakeWorld. For backend-agnostic IK, use JacobianIK. + """ + + def __init__(self) -> None: + if not DRAKE_AVAILABLE: + raise ImportError("Drake is not installed. Install with: pip install drake") + + def _validate_world(self, world: WorldSpec) -> IKResult | None: + from dimos.manipulation.planning.world.drake_world import DrakeWorld + + if not isinstance(world, DrakeWorld): + return _create_failure_result( + IKStatus.NO_SOLUTION, "DrakeOptimizationIK requires DrakeWorld" + ) + if not world.is_finalized: + return _create_failure_result(IKStatus.NO_SOLUTION, "World must be finalized before IK") + return None + + def solve( + self, + world: WorldSpec, + robot_id: WorldRobotID, + target_pose: PoseStamped, + seed: JointState | None = None, + position_tolerance: float = 0.001, + orientation_tolerance: float = 0.01, + check_collision: bool = True, + max_attempts: int = 10, + ) -> IKResult: + """Solve IK with multiple random restarts, returning the best collision-free solution.""" + error = self._validate_world(world) + if error is not None: + return error + + # Convert PoseStamped to 4x4 matrix via Transform + target_matrix = Transform( + translation=target_pose.position, + rotation=target_pose.orientation, + ).to_matrix() + + # Get joint limits + lower_limits, upper_limits = world.get_joint_limits(robot_id) + + # Get seed from current state if not provided + if seed is None: + with world.scratch_context() as ctx: + seed = world.get_joint_state(ctx, robot_id) + + # Extract joint names and seed positions + joint_names = seed.name + seed_positions = np.array(seed.position, dtype=np.float64) + + # Target transform + target_transform = RigidTransform(target_matrix) + + best_result: IKResult | None = None + best_error = float("inf") + + for attempt in range(max_attempts): + # Generate seed positions + if attempt == 0: + current_seed = seed_positions + else: + # Random seed within joint limits + current_seed = np.random.uniform(lower_limits, upper_limits) + + # Solve IK + result = self._solve_single( + world=world, + robot_id=robot_id, + target_transform=target_transform, + seed=current_seed, + joint_names=joint_names, + position_tolerance=position_tolerance, + orientation_tolerance=orientation_tolerance, + lower_limits=lower_limits, + upper_limits=upper_limits, + ) + + if result.is_success() and result.joint_state is not None: + # Check collision if requested + if check_collision: + if not world.check_config_collision_free(robot_id, result.joint_state): + continue # Try another seed + + # Check error + total_error = result.position_error + result.orientation_error + if total_error < best_error: + best_error = total_error + best_result = result + + # If error is within tolerance, we're done + if ( + result.position_error <= position_tolerance + and result.orientation_error <= orientation_tolerance + ): + return result + + if best_result is not None: + return best_result + + return _create_failure_result( + IKStatus.NO_SOLUTION, + f"IK failed after {max_attempts} attempts", + ) + + def _solve_single( + self, + world: WorldSpec, + robot_id: WorldRobotID, + target_transform: RigidTransform, + seed: NDArray[np.float64], + joint_names: list[str], + position_tolerance: float, + orientation_tolerance: float, + lower_limits: NDArray[np.float64], + upper_limits: NDArray[np.float64], + ) -> IKResult: + # Get robot data from world internals (Drake-specific access) + robot_data = world._robots[robot_id] # type: ignore[attr-defined] + plant = world.plant # type: ignore[attr-defined] + + # Create IK problem + ik = InverseKinematics(plant) + + # Get end-effector frame + ee_frame = robot_data.ee_frame + + # Add position constraint + ik.AddPositionConstraint( + frameB=ee_frame, + p_BQ=np.array([0.0, 0.0, 0.0]), # type: ignore[arg-type] + frameA=plant.world_frame(), + p_AQ_lower=target_transform.translation() - np.array([position_tolerance] * 3), + p_AQ_upper=target_transform.translation() + np.array([position_tolerance] * 3), + ) + + # Add orientation constraint + ik.AddOrientationConstraint( + frameAbar=plant.world_frame(), + R_AbarA=target_transform.rotation(), + frameBbar=ee_frame, + R_BbarB=RotationMatrix(), + theta_bound=orientation_tolerance, + ) + + # Get program and set initial guess + prog = ik.get_mutable_prog() + q = ik.q() + + # Set initial guess (full positions vector) + full_seed = np.zeros(plant.num_positions()) + for i, joint_idx in enumerate(robot_data.joint_indices): + full_seed[joint_idx] = seed[i] + prog.SetInitialGuess(q, full_seed) + + # Solve + result = Solve(prog) + + if not result.is_success(): + return _create_failure_result( + IKStatus.NO_SOLUTION, + f"Optimization failed: {result.get_solution_result()}", + ) + + # Extract solution for this robot's joints + full_solution = result.GetSolution(q) + joint_solution = np.array([full_solution[idx] for idx in robot_data.joint_indices]) + + # Clip to limits + joint_solution = np.clip(joint_solution, lower_limits, upper_limits) + + # Compute actual error using FK + solution_state = JointState(name=joint_names, position=joint_solution.tolist()) + with world.scratch_context() as ctx: + world.set_joint_state(ctx, robot_id, solution_state) + actual_pose = world.get_ee_pose(ctx, robot_id) + + position_error, orientation_error = compute_pose_error( + pose_to_matrix(actual_pose), + target_transform.GetAsMatrix4(), # type: ignore[arg-type] + ) + + return _create_success_result( + joint_names=joint_names, + joint_positions=joint_solution, + position_error=position_error, + orientation_error=orientation_error, + iterations=1, + ) + + +def _create_success_result( + joint_names: list[str], + joint_positions: NDArray[np.float64], + position_error: float, + orientation_error: float, + iterations: int, +) -> IKResult: + return IKResult( + status=IKStatus.SUCCESS, + joint_state=JointState(name=joint_names, position=joint_positions.tolist()), + position_error=position_error, + orientation_error=orientation_error, + iterations=iterations, + message="IK solution found", + ) + + +def _create_failure_result( + status: IKStatus, + message: str, + iterations: int = 0, +) -> IKResult: + return IKResult( + status=status, + joint_state=None, + iterations=iterations, + message=message, + ) diff --git a/dimos/manipulation/planning/kinematics/jacobian_ik.py b/dimos/manipulation/planning/kinematics/jacobian_ik.py new file mode 100644 index 0000000000..5f80642058 --- /dev/null +++ b/dimos/manipulation/planning/kinematics/jacobian_ik.py @@ -0,0 +1,430 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Backend-agnostic Jacobian-based inverse kinematics. + +JacobianIK provides iterative and differential IK methods that work with any +WorldSpec implementation. It only uses the standard WorldSpec interface methods +(get_jacobian, get_ee_pose, get_joint_limits) and doesn't depend on any specific +physics backend. + +For full nonlinear optimization IK with Drake, use DrakeOptimizationIK. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from dimos.manipulation.planning.spec import IKResult, IKStatus, WorldRobotID, WorldSpec +from dimos.manipulation.planning.utils.kinematics_utils import ( + check_singularity, + compute_error_twist, + compute_pose_error, + damped_pseudoinverse, +) +from dimos.utils.logging_config import setup_logger +from dimos.utils.transform_utils import pose_to_matrix + +if TYPE_CHECKING: + from numpy.typing import NDArray + +from dimos.msgs.geometry_msgs import PoseStamped, Transform, Twist, Vector3 +from dimos.msgs.sensor_msgs import JointState + +logger = setup_logger() + + +class JacobianIK: + """Backend-agnostic Jacobian-based IK solver. + + This class provides iterative and differential IK methods using only + the standard WorldSpec interface. It works with any physics backend + (Drake, MuJoCo, PyBullet, etc.). + + Methods: + - solve_iterative(): Iterative Jacobian-based IK until convergence + - solve_differential(): Single Jacobian step for velocity control + - solve_differential_position_only(): Position-only differential IK + - solve(): Wrapper for solve_iterative with multiple random restarts + + Example: + ik = JacobianIK(damping=0.01) + result = ik.solve_iterative( + world, robot_id, + target_pose=target, + seed=current_joints, + ) + if result.is_success(): + print(f"Solution: {result.joint_positions}") + """ + + def __init__( + self, + damping: float = 0.05, + max_iterations: int = 200, + singularity_threshold: float = 1e-6, + ): + """Create Jacobian IK solver. + + Args: + damping: Damping factor for pseudoinverse (higher = more stable near singularities) + max_iterations: Default maximum iterations for iterative IK + singularity_threshold: Manipulability threshold for singularity detection + """ + self._damping = damping + self._max_iterations = max_iterations + self._singularity_threshold = singularity_threshold + + def solve( + self, + world: WorldSpec, + robot_id: WorldRobotID, + target_pose: PoseStamped, + seed: JointState | None = None, + position_tolerance: float = 0.001, + orientation_tolerance: float = 0.01, + check_collision: bool = True, + max_attempts: int = 10, + ) -> IKResult: + """Solve IK with multiple random restarts. + + Tries iterative IK from multiple starting configurations to find + a collision-free solution. + + Args: + world: World for FK/collision checking + robot_id: Robot to solve IK for + target_pose: Target end-effector pose + seed: Initial guess (uses current state if None) + position_tolerance: Required position accuracy (meters) + orientation_tolerance: Required orientation accuracy (radians) + check_collision: Whether to check collision of solution + max_attempts: Maximum random restart attempts + + Returns: + IKResult with solution or failure status + """ + if not world.is_finalized: + return _create_failure_result(IKStatus.NO_SOLUTION, "World must be finalized before IK") + + lower_limits, upper_limits = world.get_joint_limits(robot_id) + + # Get seed from current state if not provided + if seed is None: + with world.scratch_context() as ctx: + seed = world.get_joint_state(ctx, robot_id) + + # Extract joint names for creating random seeds + joint_names = seed.name + + best_result: IKResult | None = None + best_error = float("inf") + + for attempt in range(max_attempts): + # Generate seed JointState + if attempt == 0: + current_seed = seed + else: + # Random seed within joint limits + random_positions = np.random.uniform(lower_limits, upper_limits) + current_seed = JointState(name=joint_names, position=random_positions.tolist()) + + # Solve iterative IK + result = self.solve_iterative( + world=world, + robot_id=robot_id, + target_pose=target_pose, + seed=current_seed, + max_iterations=self._max_iterations, + position_tolerance=position_tolerance, + orientation_tolerance=orientation_tolerance, + ) + + if result.is_success() and result.joint_state is not None: + # Check collision if requested + if check_collision: + if not world.check_config_collision_free(robot_id, result.joint_state): + continue # Try another seed + + # Check error + total_error = result.position_error + result.orientation_error + if total_error < best_error: + best_error = total_error + best_result = result + + # If error is within tolerance, we're done + if ( + result.position_error <= position_tolerance + and result.orientation_error <= orientation_tolerance + ): + return result + + if best_result is not None: + return best_result + + return _create_failure_result( + IKStatus.NO_SOLUTION, + f"IK failed after {max_attempts} attempts", + ) + + def solve_iterative( + self, + world: WorldSpec, + robot_id: WorldRobotID, + target_pose: PoseStamped, + seed: JointState, + max_iterations: int = 100, + position_tolerance: float = 0.001, + orientation_tolerance: float = 0.01, + ) -> IKResult: + """Iterative Jacobian-based IK until convergence. + + Uses the damped pseudoinverse method with adaptive step size. + Converges when both position and orientation errors are within tolerance. + + Args: + world: World for FK/Jacobian computation + robot_id: Robot to solve IK for + target_pose: Target end-effector pose + seed: Initial joint configuration + max_iterations: Maximum iterations before giving up + position_tolerance: Required position accuracy (meters) + orientation_tolerance: Required orientation accuracy (radians) + + Returns: + IKResult with solution or failure status + """ + # Convert to internal representation + target_matrix = Transform( + translation=target_pose.position, + rotation=target_pose.orientation, + ).to_matrix() + current_joints = np.array(seed.position, dtype=np.float64) + joint_names = seed.name + + max_iterations = max_iterations or self._max_iterations + lower_limits, upper_limits = world.get_joint_limits(robot_id) + + for iteration in range(max_iterations): + with world.scratch_context() as ctx: + # Set current position (convert to JointState for API) + current_state = JointState(name=joint_names, position=current_joints.tolist()) + world.set_joint_state(ctx, robot_id, current_state) + + # Get current pose (as matrix for error computation) + current_pose = pose_to_matrix(world.get_ee_pose(ctx, robot_id)) + + # Compute error + pos_error, ori_error = compute_pose_error(current_pose, target_matrix) + + # Check convergence + if pos_error <= position_tolerance and ori_error <= orientation_tolerance: + return _create_success_result( + joint_names=joint_names, + joint_positions=current_joints, + position_error=pos_error, + orientation_error=ori_error, + iterations=iteration + 1, + ) + + # Compute twist to reduce error + twist = compute_error_twist(current_pose, target_matrix, gain=0.5) + + # Get Jacobian + J = world.get_jacobian(ctx, robot_id) + + # Adaptive damping near singularities + if check_singularity(J, threshold=self._singularity_threshold): + # Increase damping near singularity instead of failing + effective_damping = self._damping * 10.0 + else: + effective_damping = self._damping + + # Compute joint velocities + J_pinv = damped_pseudoinverse(J, effective_damping) + q_dot = J_pinv @ twist + + # Clamp maximum joint change per iteration (like reference implementations) + max_delta = 0.1 # radians per iteration + max_change = np.max(np.abs(q_dot)) + if max_change > max_delta: + q_dot = q_dot * (max_delta / max_change) + + current_joints = current_joints + q_dot + + # Clip to limits + current_joints = np.clip(current_joints, lower_limits, upper_limits) + + # Compute final error + with world.scratch_context() as ctx: + final_state = JointState(name=joint_names, position=current_joints.tolist()) + world.set_joint_state(ctx, robot_id, final_state) + final_pose = pose_to_matrix(world.get_ee_pose(ctx, robot_id)) + pos_error, ori_error = compute_pose_error(final_pose, target_matrix) + + return _create_failure_result( + IKStatus.NO_SOLUTION, + f"Did not converge after {max_iterations} iterations (pos_err={pos_error:.4f}, ori_err={ori_error:.4f})", + iterations=max_iterations, + ) + + def solve_differential( + self, + world: WorldSpec, + robot_id: WorldRobotID, + current_joints: JointState, + twist: Twist, + dt: float, + ) -> JointState | None: + """Single Jacobian step for velocity control. + + Computes joint velocities from desired end-effector twist using + the damped pseudoinverse method. Returns None if near singularity. + + Args: + world: World for Jacobian computation + robot_id: Robot to compute for + current_joints: Current joint configuration + twist: Desired end-effector twist (linear + angular velocity) + dt: Time step (not used, but kept for interface compatibility) + + Returns: + JointState with velocities, or None if near singularity + """ + # Convert Twist to 6D array [vx, vy, vz, wx, wy, wz] + twist_array = np.array( + [ + twist.linear.x, + twist.linear.y, + twist.linear.z, + twist.angular.x, + twist.angular.y, + twist.angular.z, + ], + dtype=np.float64, + ) + + joint_names = current_joints.name + with world.scratch_context() as ctx: + world.set_joint_state(ctx, robot_id, current_joints) + J = world.get_jacobian(ctx, robot_id) + + # Check for singularity + if check_singularity(J, threshold=self._singularity_threshold): + logger.warning("Near singularity in differential IK") + return None + + # Compute damped pseudoinverse + J_pinv = damped_pseudoinverse(J, self._damping) + + # Compute joint velocities + q_dot = J_pinv @ twist_array + + # Apply velocity limits if available + config = world.get_robot_config(robot_id) + if config.velocity_limits is not None: + velocity_limits = np.array(config.velocity_limits) + # Only consider joints with non-zero velocity limits + nonzero_mask = velocity_limits > 0 + if np.any(nonzero_mask): + max_ratio = np.max(np.abs(q_dot[nonzero_mask]) / velocity_limits[nonzero_mask]) + if max_ratio > 1.0: + q_dot = q_dot / max_ratio + + return JointState(name=joint_names, velocity=q_dot.tolist()) + + def solve_differential_position_only( + self, + world: WorldSpec, + robot_id: WorldRobotID, + current_joints: JointState, + linear_velocity: Vector3, + ) -> JointState | None: + """Position-only differential IK using linear Jacobian. + + Computes joint velocities from desired linear velocity, ignoring + orientation. Returns None if near singularity. + + Args: + world: World for Jacobian computation + robot_id: Robot to compute for + current_joints: Current joint configuration + linear_velocity: Desired linear velocity + + Returns: + JointState with velocities, or None if singular + """ + # Convert Vector3 to array + vel_array = np.array( + [linear_velocity.x, linear_velocity.y, linear_velocity.z], dtype=np.float64 + ) + + joint_names = current_joints.name + with world.scratch_context() as ctx: + world.set_joint_state(ctx, robot_id, current_joints) + J = world.get_jacobian(ctx, robot_id) + + # Extract linear part (first 3 rows) + J_linear = J[:3, :] + + # Check for singularity + JJT = J_linear @ J_linear.T + manipulability = np.sqrt(max(0, np.linalg.det(JJT))) + if manipulability < self._singularity_threshold: + return None + + # Compute damped pseudoinverse + I = np.eye(3) + J_pinv = J_linear.T @ np.linalg.inv(JJT + self._damping**2 * I) + + # Compute joint velocities + q_dot = J_pinv @ vel_array + return JointState(name=joint_names, velocity=q_dot.tolist()) + + +# ============= Result Helpers ============= + + +def _create_success_result( + joint_names: list[str], + joint_positions: NDArray[np.float64], + position_error: float, + orientation_error: float, + iterations: int, +) -> IKResult: + """Create a successful IK result.""" + return IKResult( + status=IKStatus.SUCCESS, + joint_state=JointState(name=joint_names, position=joint_positions.tolist()), + position_error=position_error, + orientation_error=orientation_error, + iterations=iterations, + message="IK solution found", + ) + + +def _create_failure_result( + status: IKStatus, + message: str, + iterations: int = 0, +) -> IKResult: + """Create a failed IK result.""" + return IKResult( + status=status, + joint_state=None, + iterations=iterations, + message=message, + ) diff --git a/dimos/manipulation/planning/kinematics/pinocchio_ik.py b/dimos/manipulation/planning/kinematics/pinocchio_ik.py new file mode 100644 index 0000000000..4224dda556 --- /dev/null +++ b/dimos/manipulation/planning/kinematics/pinocchio_ik.py @@ -0,0 +1,291 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Pinocchio-based inverse kinematics solver. + +Standalone IK solver using Pinocchio for forward kinematics and Jacobian +computation. Uses damped least-squares (Levenberg-Marquardt) for robust +convergence near singularities. + +Unlike JacobianIK (which uses the WorldSpec interface), this solver operates +directly on a Pinocchio model. This makes it suitable for lightweight, +real-time IK in control tasks where a full WorldSpec is not needed. + +Usage: + >>> from dimos.manipulation.planning.kinematics.pinocchio_ik import PinocchioIK + >>> ik = PinocchioIK.from_model_path("robot.urdf", ee_joint_id=6) + >>> q_solution, converged, error = ik.solve(target_se3, q_init) + >>> ee_pose = ik.forward_kinematics(q_solution) +""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import numpy as np +from numpy.linalg import norm, solve +import pinocchio # type: ignore[import-untyped] + +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from dimos.msgs.geometry_msgs import Pose, PoseStamped + +logger = setup_logger() + + +# ============================================================================= +# Configuration +# ============================================================================= + + +@dataclass +class PinocchioIKConfig: + """Configuration for the Pinocchio IK solver. + + Attributes: + max_iter: Maximum IK solver iterations + eps: Convergence threshold (SE3 log-error norm) + damp: Damping factor for singularity handling (higher = more stable) + dt: Integration step size + max_velocity: Max joint velocity per iteration (rad/s), clamps near singularities + """ + + max_iter: int = 100 + eps: float = 1e-4 + damp: float = 1e-2 + dt: float = 1.0 + max_velocity: float = 10.0 + + +# ============================================================================= +# PinocchioIK Solver +# ============================================================================= + + +class PinocchioIK: + """Pinocchio-based damped least-squares IK solver. + + Loads a URDF or MJCF model and provides: + - solve(): Damped least-squares IK from SE3 target + - forward_kinematics(): FK from joint angles to EE pose + + Thread safety: NOT thread-safe. Each caller should use its own instance + or protect calls with an external lock. Control tasks typically hold a + lock around compute() which covers IK calls. + + Example: + >>> ik = PinocchioIK.from_model_path("robot.urdf", ee_joint_id=6) + >>> target = pose_to_se3(pose_stamped) + >>> q, converged, err = ik.solve(target, q_current) + >>> if converged: + ... ee = ik.forward_kinematics(q) + """ + + def __init__( + self, + model: pinocchio.Model, + data: pinocchio.Data, + ee_joint_id: int, + config: PinocchioIKConfig | None = None, + ) -> None: + """Initialize solver with an existing Pinocchio model. + + Args: + model: Pinocchio model + data: Pinocchio data (created from model) + ee_joint_id: End-effector joint ID in the kinematic chain + config: Solver configuration (uses defaults if None) + """ + self._model = model + self._data = data + self._ee_joint_id = ee_joint_id + self._config = config or PinocchioIKConfig() + + @classmethod + def from_model_path( + cls, + model_path: str | Path, + ee_joint_id: int, + ) -> PinocchioIK: + """Create solver by loading a URDF or MJCF file. + + Args: + model_path: Path to URDF (.urdf) or MJCF (.xml) file + ee_joint_id: End-effector joint ID in the kinematic chain + + Returns: + Configured PinocchioIK instance + + Raises: + FileNotFoundError: If model file doesn't exist + """ + path = Path(str(model_path)) + if not path.exists(): + raise FileNotFoundError(f"Model file not found: {path}") + + if path.suffix == ".xml": + model = pinocchio.buildModelFromMJCF(str(path)) + else: + model = pinocchio.buildModelFromUrdf(str(path)) + + data = model.createData() + return cls(model, data, ee_joint_id) + + @property + def model(self) -> pinocchio.Model: + """The Pinocchio model.""" + return self._model + + @property + def nq(self) -> int: + """Number of configuration variables (DOF).""" + return int(self._model.nq) + + @property + def ee_joint_id(self) -> int: + """End-effector joint ID.""" + return self._ee_joint_id + + # ========================================================================= + # Core IK + # ========================================================================= + + def solve( + self, + target_pose: pinocchio.SE3, + q_init: NDArray[np.floating[Any]], + config: PinocchioIKConfig | None = None, + ) -> tuple[NDArray[np.floating[Any]], bool, float]: + """Solve IK using damped least-squares (Levenberg-Marquardt). + + Args: + target_pose: Target end-effector pose as SE3 + q_init: Initial joint configuration (warm-start) + config: Override solver config for this call (uses instance config if None) + + Returns: + Tuple of (joint_angles, converged, final_error) + """ + cfg = config or self._config + q = q_init.copy() + final_err = float("inf") + + for _ in range(cfg.max_iter): + pinocchio.forwardKinematics(self._model, self._data, q) + iMd = self._data.oMi[self._ee_joint_id].actInv(target_pose) + + err = pinocchio.log(iMd).vector + final_err = float(norm(err)) + if final_err < cfg.eps: + return q, True, final_err + + J = pinocchio.computeJointJacobian(self._model, self._data, q, self._ee_joint_id) + J = -np.dot(pinocchio.Jlog6(iMd.inverse()), J) + v = -J.T.dot(solve(J.dot(J.T) + cfg.damp * np.eye(6), err)) + + # Clamp velocity to prevent explosion near singularities + v_norm = norm(v) + if v_norm > cfg.max_velocity: + v = v * (cfg.max_velocity / v_norm) + + q = pinocchio.integrate(self._model, q, v * cfg.dt) + + return q, False, final_err + + # ========================================================================= + # Forward Kinematics + # ========================================================================= + + def forward_kinematics(self, joint_positions: NDArray[np.floating[Any]]) -> pinocchio.SE3: + """Compute end-effector pose from joint positions. + + Args: + joint_positions: Joint angles in radians + + Returns: + End-effector pose as SE3 + """ + pinocchio.forwardKinematics(self._model, self._data, joint_positions) + return self._data.oMi[self._ee_joint_id].copy() + + +# ============================================================================= +# Pose Conversion Helpers +# ============================================================================= + + +def pose_to_se3(pose: Pose | PoseStamped) -> pinocchio.SE3: + """Convert Pose or PoseStamped to pinocchio SE3""" + + position = np.array([pose.x, pose.y, pose.z]) + quat = pose.orientation + rotation = pinocchio.Quaternion(quat.w, quat.x, quat.y, quat.z).toRotationMatrix() + return pinocchio.SE3(rotation, position) + + +# ============================================================================= +# Safety Utilities +# ============================================================================= + + +def check_joint_delta( + q_new: NDArray[np.floating[Any]], + q_current: NDArray[np.floating[Any]], + max_delta_deg: float, +) -> bool: + """Check if joint position change is within safety limits. + + Args: + q_new: Proposed joint positions (radians) + q_current: Current joint positions (radians) + max_delta_deg: Maximum allowed change per joint (degrees) + + Returns: + True if all joint deltas are within limits + """ + max_delta_rad = np.radians(max_delta_deg) + joint_deltas = np.abs(q_new - q_current) + return bool(np.all(joint_deltas <= max_delta_rad)) + + +def get_worst_joint_delta( + q_new: NDArray[np.floating[Any]], + q_current: NDArray[np.floating[Any]], +) -> tuple[int, float]: + """Find the joint with the largest position change. + + Args: + q_new: Proposed joint positions (radians) + q_current: Current joint positions (radians) + + Returns: + Tuple of (joint_index, delta_in_degrees) + """ + joint_deltas = np.abs(q_new - q_current) + worst_idx = int(np.argmax(joint_deltas)) + return worst_idx, float(np.degrees(joint_deltas[worst_idx])) + + +__all__ = [ + "PinocchioIK", + "PinocchioIKConfig", + "check_joint_delta", + "get_worst_joint_delta", + "pose_to_se3", +] diff --git a/dimos/manipulation/planning/monitor/__init__.py b/dimos/manipulation/planning/monitor/__init__.py new file mode 100644 index 0000000000..c280bd4d56 --- /dev/null +++ b/dimos/manipulation/planning/monitor/__init__.py @@ -0,0 +1,63 @@ +# Copyright 2025 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +World Monitor Module + +Provides reactive monitoring for keeping WorldSpec synchronized with the real world. + +## Components + +- WorldMonitor: Top-level monitor using WorldSpec Protocol +- WorldStateMonitor: Syncs joint state to WorldSpec +- WorldObstacleMonitor: Syncs obstacles to WorldSpec + +All monitors use the factory pattern and Protocol types. + +## Example + +```python +from dimos.manipulation.planning.monitor import WorldMonitor + +monitor = WorldMonitor(enable_viz=True) +robot_id = monitor.add_robot(config) +monitor.finalize() + +# Start monitoring +monitor.start_state_monitor(robot_id) +monitor.start_obstacle_monitor() + +# Handle joint state messages +monitor.on_joint_state(msg, robot_id) + +# Thread-safe collision checking +is_valid = monitor.is_state_valid(robot_id, q_test) +``` +""" + +from dimos.manipulation.planning.monitor.world_monitor import WorldMonitor +from dimos.manipulation.planning.monitor.world_obstacle_monitor import ( + WorldObstacleMonitor, +) +from dimos.manipulation.planning.monitor.world_state_monitor import WorldStateMonitor + +# Re-export message types from spec for convenience +from dimos.manipulation.planning.spec import CollisionObjectMessage + +__all__ = [ + "CollisionObjectMessage", + "WorldMonitor", + "WorldObstacleMonitor", + "WorldStateMonitor", +] diff --git a/dimos/manipulation/planning/monitor/world_monitor.py b/dimos/manipulation/planning/monitor/world_monitor.py new file mode 100644 index 0000000000..33017957dc --- /dev/null +++ b/dimos/manipulation/planning/monitor/world_monitor.py @@ -0,0 +1,483 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""World Monitor - keeps WorldSpec synchronized with real robot state and obstacles.""" + +from __future__ import annotations + +from contextlib import contextmanager +import threading +from typing import TYPE_CHECKING, Any + +from dimos.manipulation.planning.factory import create_world +from dimos.manipulation.planning.monitor.world_obstacle_monitor import WorldObstacleMonitor +from dimos.manipulation.planning.monitor.world_state_monitor import WorldStateMonitor +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.msgs.sensor_msgs import JointState +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from collections.abc import Generator + + import numpy as np + from numpy.typing import NDArray + + from dimos.manipulation.planning.spec import ( + CollisionObjectMessage, + JointPath, + Obstacle, + RobotModelConfig, + WorldRobotID, + WorldSpec, + ) + from dimos.msgs.vision_msgs import Detection3D + from dimos.perception.detection.type.detection3d.object import Object + +logger = setup_logger() + + +class WorldMonitor: + """Manages WorldSpec with state/obstacle monitors. Thread-safe via RLock.""" + + def __init__( + self, + backend: str = "drake", + enable_viz: bool = False, + **kwargs: Any, + ): + self._backend = backend + self._world: WorldSpec = create_world(backend=backend, enable_viz=enable_viz, **kwargs) + self._lock = threading.RLock() + self._robot_joints: dict[WorldRobotID, list[str]] = {} + self._state_monitors: dict[WorldRobotID, WorldStateMonitor] = {} + self._obstacle_monitor: WorldObstacleMonitor | None = None + self._viz_thread: threading.Thread | None = None + self._viz_stop_event = threading.Event() + self._viz_rate_hz: float = 10.0 + + # ============= Robot Management ============= + + def add_robot(self, config: RobotModelConfig) -> WorldRobotID: + """Add a robot. Returns robot_id.""" + with self._lock: + robot_id = self._world.add_robot(config) + self._robot_joints[robot_id] = config.joint_names + logger.info(f"Added robot '{config.name}' as '{robot_id}'") + return robot_id + + def get_robot_ids(self) -> list[WorldRobotID]: + """Get all robot IDs.""" + with self._lock: + return self._world.get_robot_ids() + + def get_robot_config(self, robot_id: WorldRobotID) -> RobotModelConfig: + """Get robot configuration.""" + with self._lock: + return self._world.get_robot_config(robot_id) + + def get_joint_limits( + self, robot_id: WorldRobotID + ) -> tuple[NDArray[np.float64], NDArray[np.float64]]: + """Get joint limits for a robot.""" + with self._lock: + return self._world.get_joint_limits(robot_id) + + # ============= Obstacle Management ============= + + def add_obstacle(self, obstacle: Obstacle) -> str: + """Add an obstacle. Returns obstacle_id.""" + with self._lock: + return self._world.add_obstacle(obstacle) + + def remove_obstacle(self, obstacle_id: str) -> bool: + """Remove an obstacle.""" + with self._lock: + return self._world.remove_obstacle(obstacle_id) + + def clear_obstacles(self) -> None: + """Remove all obstacles.""" + with self._lock: + self._world.clear_obstacles() + + # ============= Monitor Control ============= + + def start_state_monitor( + self, + robot_id: WorldRobotID, + joint_names: list[str] | None = None, + joint_name_mapping: dict[str, str] | None = None, + ) -> None: + """Start monitoring joint states. Uses config defaults if args are None.""" + with self._lock: + if robot_id in self._state_monitors: + logger.warning(f"State monitor for '{robot_id}' already started") + return + + # Get config for defaults + config = self._world.get_robot_config(robot_id) + + # Get joint names from config if not provided + if joint_names is None: + if robot_id in self._robot_joints: + joint_names = self._robot_joints[robot_id] + else: + joint_names = config.joint_names + + # Get joint name mapping from config if not provided + if joint_name_mapping is None and config.joint_name_mapping: + joint_name_mapping = config.joint_name_mapping + + monitor = WorldStateMonitor( + world=self._world, + lock=self._lock, + robot_id=robot_id, + joint_names=joint_names, + joint_name_mapping=joint_name_mapping, + ) + monitor.start() + self._state_monitors[robot_id] = monitor + logger.info(f"State monitor started for '{robot_id}'") + + def start_obstacle_monitor(self) -> None: + """Start monitoring obstacle updates.""" + with self._lock: + if self._obstacle_monitor is not None: + logger.warning("Obstacle monitor already started") + return + + self._obstacle_monitor = WorldObstacleMonitor( + world=self._world, + lock=self._lock, + ) + self._obstacle_monitor.start() + logger.info("Obstacle monitor started") + + def stop_all_monitors(self) -> None: + """Stop all monitors and visualization thread.""" + # Stop visualization thread first (outside lock to avoid deadlock) + self.stop_visualization_thread() + + with self._lock: + for _robot_id, monitor in self._state_monitors.items(): + monitor.stop() + self._state_monitors.clear() + + if self._obstacle_monitor is not None: + self._obstacle_monitor.stop() + self._obstacle_monitor = None + + logger.info("All monitors stopped") + + self._world.close() + + # ============= Message Handlers ============= + + def on_joint_state(self, msg: JointState, robot_id: WorldRobotID | None = None) -> None: + """Handle joint state message. Broadcasts to all monitors if robot_id is None.""" + try: + if robot_id is not None: + if robot_id in self._state_monitors: + self._state_monitors[robot_id].on_joint_state(msg) + else: + logger.warning(f"No state monitor for robot_id: {robot_id}") + else: + # Broadcast to all monitors + for monitor in self._state_monitors.values(): + monitor.on_joint_state(msg) + except Exception as e: + logger.error(f"[WorldMonitor] Exception in on_joint_state: {e}") + import traceback + + logger.error(traceback.format_exc()) + + def on_collision_object(self, msg: CollisionObjectMessage) -> None: + """Handle collision object message.""" + if self._obstacle_monitor is not None: + self._obstacle_monitor.on_collision_object(msg) + + def on_detections(self, detections: list[Detection3D]) -> None: + """Handle perception detections (Detection3D from dimos.msgs.vision_msgs).""" + if self._obstacle_monitor is not None: + self._obstacle_monitor.on_detections(detections) + + def on_objects(self, objects: object) -> None: + """Handle Object detections from ObjectDB (preserves object_id).""" + if self._obstacle_monitor is not None and isinstance(objects, list): + self._obstacle_monitor.on_objects(objects) + + def refresh_obstacles(self, min_duration: float = 0.0) -> list[dict[str, Any]]: + """Refresh perception obstacles from cache. Returns list of added obstacles.""" + if self._obstacle_monitor is not None: + return self._obstacle_monitor.refresh_obstacles(min_duration) + return [] + + def clear_perception_obstacles(self) -> int: + """Remove all perception obstacles. Returns count removed.""" + if self._obstacle_monitor is not None: + return self._obstacle_monitor.clear_perception_obstacles() + return 0 + + def get_perception_status(self) -> dict[str, int]: + """Get perception obstacle status.""" + if self._obstacle_monitor is not None: + return self._obstacle_monitor.get_perception_status() + return {"cached": 0, "added": 0} + + def get_cached_objects(self) -> list[Object]: + """Get cached Object instances from perception.""" + if self._obstacle_monitor is not None: + return self._obstacle_monitor.get_cached_objects() + return [] + + def list_cached_detections(self) -> list[dict[str, Any]]: + """List cached detections from perception.""" + if self._obstacle_monitor is not None: + return self._obstacle_monitor.list_cached_detections() + return [] + + def list_added_obstacles(self) -> list[dict[str, Any]]: + """List perception obstacles currently in the planning world.""" + if self._obstacle_monitor is not None: + return self._obstacle_monitor.list_added_obstacles() + return [] + + # ============= State Access ============= + + def get_current_joint_state(self, robot_id: WorldRobotID) -> JointState | None: + """Get current joint state. Returns None if not yet received.""" + # Try state monitor first for positions + if robot_id in self._state_monitors: + positions = self._state_monitors[robot_id].get_current_positions() + velocities = self._state_monitors[robot_id].get_current_velocities() + if positions is not None: + joint_names = self._robot_joints.get(robot_id, []) + return JointState( + name=joint_names, + position=positions.tolist(), + velocity=velocities.tolist() if velocities is not None else [], + ) + + # Fall back to world's live context + with self._lock: + ctx = self._world.get_live_context() + return self._world.get_joint_state(ctx, robot_id) + + def get_current_velocities(self, robot_id: WorldRobotID) -> JointState | None: + """Get current joint velocities as JointState. Returns None if not available.""" + if robot_id in self._state_monitors: + velocities = self._state_monitors[robot_id].get_current_velocities() + if velocities is not None: + joint_names = self._robot_joints.get(robot_id, []) + return JointState(name=joint_names, velocity=velocities.tolist()) + return None + + def wait_for_state(self, robot_id: WorldRobotID, timeout: float = 1.0) -> bool: + """Wait until state is received. Returns False on timeout.""" + if robot_id in self._state_monitors: + return self._state_monitors[robot_id].wait_for_state(timeout) + return False + + def is_state_stale(self, robot_id: WorldRobotID, max_age: float = 1.0) -> bool: + """Check if state is stale.""" + if robot_id in self._state_monitors: + return self._state_monitors[robot_id].is_state_stale(max_age) + return True + + # ============= Context Management ============= + + @contextmanager + def scratch_context(self) -> Generator[Any, None, None]: + """Thread-safe scratch context for planning.""" + with self._world.scratch_context() as ctx: + yield ctx + + def get_live_context(self) -> Any: + """Get live context. Prefer scratch_context() for planning.""" + return self._world.get_live_context() + + # ============= Collision Checking ============= + + def is_state_valid(self, robot_id: WorldRobotID, joint_state: JointState) -> bool: + """Check if configuration is collision-free.""" + return self._world.check_config_collision_free(robot_id, joint_state) + + def is_path_valid( + self, robot_id: WorldRobotID, path: JointPath, step_size: float = 0.05 + ) -> bool: + """Check if path is collision-free with interpolation. + + Args: + robot_id: Robot to check + path: List of JointState waypoints + step_size: Max step size for interpolation (radians) + + Returns: + True if entire path is collision-free + """ + if len(path) < 2: + return len(path) == 0 or self._world.check_config_collision_free(robot_id, path[0]) + + # Check each edge + for i in range(len(path) - 1): + if not self._world.check_edge_collision_free(robot_id, path[i], path[i + 1], step_size): + return False + + return True + + def get_min_distance(self, robot_id: WorldRobotID) -> float: + """Get minimum distance to obstacles for current state.""" + with self._world.scratch_context() as ctx: + return self._world.get_min_distance(ctx, robot_id) + + # ============= Kinematics ============= + + def get_ee_pose( + self, robot_id: WorldRobotID, joint_state: JointState | None = None + ) -> PoseStamped: + """Get end-effector pose. Uses current state if joint_state is None.""" + with self._world.scratch_context() as ctx: + # If no state provided, fetch current from state monitor + if joint_state is None: + joint_state = self.get_current_joint_state(robot_id) + + if joint_state is not None: + self._world.set_joint_state(ctx, robot_id, joint_state) + + return self._world.get_ee_pose(ctx, robot_id) + + def get_link_pose( + self, robot_id: WorldRobotID, link_name: str, joint_state: JointState | None = None + ) -> PoseStamped | None: + """Get arbitrary link pose as PoseStamped. + + Args: + robot_id: Robot to query + link_name: Name of the link in the URDF + joint_state: Joint state to use (uses current if None) + """ + from dimos.msgs.geometry_msgs import Quaternion + + with self._world.scratch_context() as ctx: + if joint_state is None: + joint_state = self.get_current_joint_state(robot_id) + if joint_state is not None: + self._world.set_joint_state(ctx, robot_id, joint_state) + try: + mat = self._world.get_link_pose(ctx, robot_id, link_name) + except KeyError: + logger.warning(f"Link '{link_name}' not found in robot '{robot_id}'") + return None + + pos = mat[:3, 3] + rot = mat[:3, :3] + quat = Quaternion.from_rotation_matrix(rot) + return PoseStamped( + frame_id="world", + position=[float(pos[0]), float(pos[1]), float(pos[2])], + orientation=[float(quat.x), float(quat.y), float(quat.z), float(quat.w)], + ) + + def get_jacobian(self, robot_id: WorldRobotID, joint_state: JointState) -> NDArray[np.float64]: + """Get 6xN Jacobian matrix.""" + with self._world.scratch_context() as ctx: + self._world.set_joint_state(ctx, robot_id, joint_state) + return self._world.get_jacobian(ctx, robot_id) + + # ============= Lifecycle ============= + + def finalize(self) -> None: + """Finalize world. Must be called before collision checking.""" + with self._lock: + self._world.finalize() + logger.info("World finalized") + + @property + def is_finalized(self) -> bool: + """Check if world is finalized.""" + return self._world.is_finalized + + # ============= Visualization ============= + + def get_visualization_url(self) -> str | None: + """Get visualization URL or None if not enabled.""" + if hasattr(self._world, "get_visualization_url"): + url = self._world.get_visualization_url() + return str(url) if url else None + return None + + def publish_visualization(self) -> None: + """Force publish current state to visualization.""" + if hasattr(self._world, "publish_visualization"): + self._world.publish_visualization() + + def start_visualization_thread(self, rate_hz: float = 10.0) -> None: + """Start background thread for visualization updates at given rate.""" + if self._viz_thread is not None and self._viz_thread.is_alive(): + logger.warning("Visualization thread already running") + return + + if not hasattr(self._world, "publish_visualization"): + logger.warning("World does not support visualization") + return + + self._viz_rate_hz = rate_hz + self._viz_stop_event.clear() + self._viz_thread = threading.Thread( + target=self._visualization_loop, + name="MeshcatVizThread", + daemon=True, + ) + self._viz_thread.start() + logger.info(f"Visualization thread started at {rate_hz}Hz") + + def stop_visualization_thread(self) -> None: + """Stop the visualization thread.""" + if self._viz_thread is None: + return + + self._viz_stop_event.set() + self._viz_thread.join(timeout=1.0) + if self._viz_thread.is_alive(): + logger.warning("Visualization thread did not stop cleanly") + self._viz_thread = None + logger.info("Visualization thread stopped") + + def _visualization_loop(self) -> None: + """Internal: Visualization update loop.""" + import time + + period = 1.0 / self._viz_rate_hz + while not self._viz_stop_event.is_set(): + try: + if hasattr(self._world, "publish_visualization"): + self._world.publish_visualization() + except Exception as e: + logger.debug(f"Visualization publish failed: {e}") + time.sleep(period) + + # ============= Direct World Access ============= + + @property + def world(self) -> WorldSpec: + """Get underlying WorldSpec. Not thread-safe for modifications.""" + return self._world + + def get_state_monitor(self, robot_id: str) -> WorldStateMonitor | None: + """Get state monitor for a robot (may be None).""" + return self._state_monitors.get(robot_id) + + @property + def obstacle_monitor(self) -> WorldObstacleMonitor | None: + """Get obstacle monitor (may be None if not started).""" + return self._obstacle_monitor diff --git a/dimos/manipulation/planning/monitor/world_obstacle_monitor.py b/dimos/manipulation/planning/monitor/world_obstacle_monitor.py new file mode 100644 index 0000000000..6082ab93a9 --- /dev/null +++ b/dimos/manipulation/planning/monitor/world_obstacle_monitor.py @@ -0,0 +1,607 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +World Obstacle Monitor + +Monitors obstacle updates and applies them to a WorldSpec instance. +This is the WorldSpec-based replacement for WorldGeometryMonitor. + +Example: + monitor = WorldObstacleMonitor(world, lock) + monitor.start() + monitor.on_collision_object(collision_msg) # Called by subscriber +""" + +from __future__ import annotations + +import time +from typing import TYPE_CHECKING, Any + +from dimos.manipulation.planning.spec import ( + CollisionObjectMessage, + Obstacle, + ObstacleType, +) +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from collections.abc import Callable + import threading + + from dimos.manipulation.planning.spec import WorldSpec + from dimos.msgs.vision_msgs import Detection3D + from dimos.perception.detection.type.detection3d.object import Object + +logger = setup_logger() + + +class WorldObstacleMonitor: + """Monitors world obstacles and updates WorldSpec. + + This class handles updates from: + - Explicit collision objects (CollisionObjectMessage) + - Perception detections (Detection3D from dimos.msgs.vision_msgs) + + ## Thread Safety + + All obstacle operations are protected by the provided lock. + Callbacks can be called from any thread. + + ## Comparison with WorldGeometryMonitor + + - WorldGeometryMonitor: Works with PlanningScene ABC + - WorldObstacleMonitor: Works with WorldSpec Protocol + """ + + def __init__( + self, + world: WorldSpec, + lock: threading.RLock, + detection_timeout: float = 2.0, + use_mesh_obstacles: bool = True, + ): + """Create a world obstacle monitor. + + Args: + world: WorldSpec instance to update + lock: Shared lock for thread-safe access + detection_timeout: Time before removing stale detections (seconds) + use_mesh_obstacles: Use convex hull meshes from pointclouds instead of bounding boxes + """ + self._world = world + self._lock = lock + self._detection_timeout = detection_timeout + self._use_mesh_obstacles = use_mesh_obstacles + + # Track obstacles from different sources + self._collision_objects: dict[str, str] = {} # msg_id -> obstacle_id + self._perception_objects: dict[str, str] = {} # detection_id -> obstacle_id + self._perception_timestamps: dict[str, float] = {} # detection_id -> timestamp + + # Object-based cache (from ObjectDB, keyed by object_id) + # object_id -> (Object, first_seen, last_seen) + self._object_cache: dict[str, tuple[Object, float, float]] = {} + # object_id -> obstacle_id (objects currently added to Drake world) + self._object_obstacles: dict[str, str] = {} + + # Running state + self._running = False + + # Callbacks: (operation, obstacle_id, obstacle) where operation is "add"/"update"/"remove" + self._obstacle_callbacks: list[Callable[[str, str, Obstacle | None], None]] = [] + + def start(self) -> None: + """Start the obstacle monitor.""" + self._running = True + logger.info("World obstacle monitor started") + + def stop(self) -> None: + """Stop the obstacle monitor.""" + self._running = False + logger.info("World obstacle monitor stopped") + + def is_running(self) -> bool: + """Check if monitor is running.""" + return self._running + + def on_collision_object(self, msg: CollisionObjectMessage) -> None: + """Handle explicit collision object message. + + Args: + msg: Collision object message + """ + if not self._running: + return + + with self._lock: + if msg.operation == "add": + self._add_collision_object(msg) + elif msg.operation == "remove": + self._remove_collision_object(msg.id) + elif msg.operation == "update": + self._update_collision_object(msg) + else: + logger.warning(f"Unknown collision object operation: {msg.operation}") + + def _add_collision_object(self, msg: CollisionObjectMessage) -> None: + """Add a collision object from message.""" + if msg.id in self._collision_objects: + logger.debug(f"Collision object '{msg.id}' already exists, updating") + self._update_collision_object(msg) + return + + obstacle = self._msg_to_obstacle(msg) + if obstacle is None: + logger.warning(f"Failed to create obstacle from message: {msg.id}") + return + + obstacle_id = self._world.add_obstacle(obstacle) + self._collision_objects[msg.id] = obstacle_id + + logger.debug(f"Added collision object '{msg.id}' as '{obstacle_id}'") + + # Notify callbacks + for callback in self._obstacle_callbacks: + try: + callback("add", obstacle_id, obstacle) + except Exception as e: + logger.error(f"Obstacle callback error: {e}") + + def _remove_collision_object(self, msg_id: str) -> None: + """Remove a collision object.""" + if msg_id not in self._collision_objects: + logger.debug(f"Collision object '{msg_id}' not found") + return + + obstacle_id = self._collision_objects[msg_id] + self._world.remove_obstacle(obstacle_id) + del self._collision_objects[msg_id] + + logger.debug(f"Removed collision object '{msg_id}'") + + # Notify callbacks + for callback in self._obstacle_callbacks: + try: + callback("remove", obstacle_id, None) + except Exception as e: + logger.error(f"Obstacle callback error: {e}") + + def _update_collision_object(self, msg: CollisionObjectMessage) -> None: + """Update a collision object pose.""" + if msg.id not in self._collision_objects: + # Treat as add if doesn't exist + self._add_collision_object(msg) + return + + obstacle_id = self._collision_objects[msg.id] + + if msg.pose is not None: + self._world.update_obstacle_pose(obstacle_id, msg.pose) + logger.debug(f"Updated collision object '{msg.id}' pose") + + # Notify callbacks + for callback in self._obstacle_callbacks: + try: + callback("update", obstacle_id, None) + except Exception as e: + logger.error(f"Obstacle callback error: {e}") + + def _msg_to_obstacle(self, msg: CollisionObjectMessage) -> Obstacle | None: + """Convert collision object message to Obstacle.""" + if msg.primitive_type is None or msg.pose is None or msg.dimensions is None: + return None + + type_map = { + "box": ObstacleType.BOX, + "sphere": ObstacleType.SPHERE, + "cylinder": ObstacleType.CYLINDER, + } + + obstacle_type = type_map.get(msg.primitive_type.lower()) + if obstacle_type is None: + logger.warning(f"Unknown primitive type: {msg.primitive_type}") + return None + + return Obstacle( + name=msg.id, + obstacle_type=obstacle_type, + pose=msg.pose, + dimensions=msg.dimensions, + color=msg.color, + ) + + def on_detections(self, detections: list[Detection3D]) -> None: + """Handle perception detection results. + + Updates obstacles based on detections: + - Adds new obstacles for new detections + - Updates existing obstacles + - Removes obstacles for detections that are no longer present + + Args: + detections: List of Detection3D messages from dimos.msgs.vision_msgs + """ + if not self._running: + return + + with self._lock: + current_time = time.time() + seen_ids = set() + + for detection in detections: + det_id = detection.id + seen_ids.add(det_id) + + pose = self._detection3d_to_pose(detection) + + if det_id in self._perception_objects: + # Update existing obstacle + obstacle_id = self._perception_objects[det_id] + self._world.update_obstacle_pose(obstacle_id, pose) + self._perception_timestamps[det_id] = current_time + else: + # Add new obstacle + obstacle = self._detection_to_obstacle(detection) + obstacle_id = self._world.add_obstacle(obstacle) + self._perception_objects[det_id] = obstacle_id + self._perception_timestamps[det_id] = current_time + + logger.debug(f"Added perception object '{det_id}' as '{obstacle_id}'") + + # Notify callbacks + for callback in self._obstacle_callbacks: + try: + callback("add", obstacle_id, obstacle) + except Exception as e: + logger.error(f"Obstacle callback error: {e}") + + # Remove stale detections + self._cleanup_stale_detections(current_time, seen_ids) + + def _detection3d_to_pose(self, detection: Detection3D) -> PoseStamped: + """Convert Detection3D bbox.center to PoseStamped.""" + center = detection.bbox.center + return PoseStamped( + position=center.position, + orientation=center.orientation, + ) + + def _detection_to_obstacle(self, detection: Detection3D) -> Obstacle: + """Convert Detection3D to Obstacle.""" + pose = self._detection3d_to_pose(detection) + size = detection.bbox.size + return Obstacle( + name=f"detection_{detection.id}", + obstacle_type=ObstacleType.BOX, + pose=pose, + dimensions=(size.x, size.y, size.z), + color=(0.2, 0.8, 0.2, 0.6), # Green for perception objects + ) + + def _cleanup_stale_detections( + self, + current_time: float, + seen_ids: set[str], + ) -> None: + """Remove detections that haven't been seen recently.""" + stale_ids = [] + + for det_id, timestamp in self._perception_timestamps.items(): + age = current_time - timestamp + if det_id not in seen_ids and age > self._detection_timeout: + stale_ids.append(det_id) + + for det_id in stale_ids: + obstacle_id = self._perception_objects[det_id] + removed = self._world.remove_obstacle(obstacle_id) + if not removed: + logger.warning(f"Obstacle '{obstacle_id}' not found in world during cleanup") + del self._perception_objects[det_id] + del self._perception_timestamps[det_id] + + logger.debug(f"Removed stale perception object '{det_id}'") + + # Notify callbacks + for callback in self._obstacle_callbacks: + try: + callback("remove", obstacle_id, None) + except Exception as e: + logger.error(f"Obstacle callback error: {e}") + + def add_static_obstacle( + self, + name: str, + obstacle_type: str, + pose: PoseStamped, + dimensions: tuple[float, ...], + color: tuple[float, float, float, float] = (0.8, 0.2, 0.2, 0.8), + ) -> str: + """Manually add a static obstacle. + + Args: + name: Unique name for the obstacle + obstacle_type: Type ("box", "sphere", "cylinder") + pose: Pose of the obstacle in world frame + dimensions: Type-specific dimensions + color: RGBA color + + Returns: + Obstacle ID + """ + msg = CollisionObjectMessage( + id=name, + operation="add", + primitive_type=obstacle_type, + pose=pose, + dimensions=dimensions, + color=color, + ) + self.on_collision_object(msg) + return self._collision_objects.get(name, "") + + def remove_static_obstacle(self, name: str) -> bool: + """Remove a static obstacle by name. + + Args: + name: Name of the obstacle + + Returns: + True if removed + """ + if name not in self._collision_objects: + return False + + msg = CollisionObjectMessage(id=name, operation="remove") + self.on_collision_object(msg) + return True + + def clear_all_obstacles(self) -> None: + """Remove all tracked obstacles.""" + with self._lock: + # Clear collision objects + for msg_id in list(self._collision_objects.keys()): + self._remove_collision_object(msg_id) + + # Clear perception objects + for det_id, obstacle_id in list(self._perception_objects.items()): + self._world.remove_obstacle(obstacle_id) + del self._perception_objects[det_id] + del self._perception_timestamps[det_id] + + def get_obstacle_count(self) -> int: + """Get total number of tracked obstacles.""" + with self._lock: + return len(self._collision_objects) + len(self._perception_objects) + + def add_obstacle_callback( + self, + callback: Callable[[str, str, Obstacle | None], None], + ) -> None: + """Add callback for obstacle changes. + + Args: + callback: Function called with (operation, obstacle_id, obstacle) + where operation is "add", "update", or "remove" + """ + self._obstacle_callbacks.append(callback) + + def remove_obstacle_callback( + self, + callback: Callable[[str, str, Obstacle | None], None], + ) -> None: + """Remove an obstacle callback.""" + if callback in self._obstacle_callbacks: + self._obstacle_callbacks.remove(callback) + + # ============= Object-Based Perception (from ObjectDB) ============= + + def on_objects(self, objects: list[object]) -> None: + """Cache objects from ObjectDB (preserves stable object_id). + + Unlike on_detections(), this receives Object instances with stable IDs + from ObjectDB deduplication, making the cache trivially keyed by object_id. + + Args: + objects: List of Object instances from ObjectDB + """ + if not self._running: + return + + from dimos.perception.detection.type.detection3d.object import Object + + now = time.time() + seen: set[str] = set() + + with self._lock: + for obj in objects: + if not isinstance(obj, Object): + continue + oid = obj.object_id + seen.add(oid) + if oid in self._object_cache: + _, first, _ = self._object_cache[oid] + self._object_cache[oid] = (obj, first, now) + else: + self._object_cache[oid] = (obj, now, now) + + # Remove objects no longer reported by ObjectDB + stale = [oid for oid in self._object_cache if oid not in seen] + for oid in stale: + del self._object_cache[oid] + + def refresh_obstacles(self, min_duration: float = 0.0) -> list[dict[str, Any]]: + """Full sync: remove all object obstacles, re-add from cache. + + Args: + min_duration: Minimum seconds an object must have been seen to be included + + Returns: + List of added obstacles with object_id, obstacle_id, name, center, size + """ + from dimos.perception.detection.type.detection3d.object import Object + + # Step 1: snapshot eligible objects under lock (fast) + eligible: list[tuple[str, Object]] = [] + with self._lock: + for oid, (obj, first_seen, last_seen) in self._object_cache.items(): + if not isinstance(obj, Object): + continue + if last_seen - first_seen < min_duration: + continue + eligible.append((oid, obj)) + + # Step 2: compute obstacles OUTSIDE lock (convex hull can be slow) + prepared: list[tuple[str, Object, Obstacle]] = [] + for oid, obj in eligible: + obstacle = self._object_to_obstacle(obj) + prepared.append((oid, obj, obstacle)) + + # Step 3: apply to Drake world under lock (fast) + with self._lock: + for obs_id in self._object_obstacles.values(): + self._world.remove_obstacle(obs_id) + self._object_obstacles.clear() + + result: list[dict[str, Any]] = [] + for oid, obj, obstacle in prepared: + assert isinstance(obj, Object) + obs_id = self._world.add_obstacle(obstacle) + self._object_obstacles[oid] = obs_id + result.append( + { + "object_id": oid, + "obstacle_id": obs_id, + "name": obj.name, + "center": [float(obj.center.x), float(obj.center.y), float(obj.center.z)], + "size": [float(obj.size.x), float(obj.size.y), float(obj.size.z)], + } + ) + logger.debug(f"Added object obstacle '{oid}' ({obj.name}) as '{obs_id}'") + + return result + + def clear_perception_obstacles(self) -> int: + """Remove all object obstacles from the planning world. + + Returns: + Number of obstacles removed + """ + with self._lock: + count = len(self._object_obstacles) + for obs_id in self._object_obstacles.values(): + self._world.remove_obstacle(obs_id) + self._object_obstacles.clear() + return count + + def get_perception_status(self) -> dict[str, int]: + """Get perception obstacle status.""" + with self._lock: + return { + "cached": len(self._object_cache), + "added": len(self._object_obstacles), + } + + def get_cached_objects(self) -> list[Object]: + """Get cached Object instances from perception. + + Returns raw Object instances for typed access to .name, .center, .size etc. + """ + from dimos.perception.detection.type.detection3d.object import Object as _Object + + with self._lock: + return [obj for obj, _, _ in self._object_cache.values() if isinstance(obj, _Object)] + + def list_cached_detections(self) -> list[dict[str, Any]]: + """List cached detections from perception.""" + from dimos.perception.detection.type.detection3d.object import Object + + with self._lock: + result: list[dict[str, Any]] = [] + for oid, (obj, first_seen, last_seen) in self._object_cache.items(): + if not isinstance(obj, Object): + continue + result.append( + { + "object_id": oid, + "name": obj.name, + "center": [float(obj.center.x), float(obj.center.y), float(obj.center.z)], + "size": [float(obj.size.x), float(obj.size.y), float(obj.size.z)], + "duration": round(last_seen - first_seen, 1), + "in_world": oid in self._object_obstacles, + } + ) + return result + + def list_added_obstacles(self) -> list[dict[str, Any]]: + """List perception obstacles currently in the planning world.""" + from dimos.perception.detection.type.detection3d.object import Object + + with self._lock: + result: list[dict[str, Any]] = [] + for oid, obs_id in self._object_obstacles.items(): + entry = self._object_cache.get(oid) + if entry is None: + continue + obj, first_seen, last_seen = entry + if not isinstance(obj, Object): + continue + result.append( + { + "object_id": oid, + "obstacle_id": obs_id, + "name": obj.name, + "center": [float(obj.center.x), float(obj.center.y), float(obj.center.z)], + "size": [float(obj.size.x), float(obj.size.y), float(obj.size.z)], + } + ) + return result + + def _object_to_obstacle(self, obj: object) -> Obstacle: + """Convert Object to obstacle. Uses bounding box by default, convex hull if use_mesh_obstacles=True.""" + from dimos.perception.detection.type.detection3d.object import Object + + assert isinstance(obj, Object) + name = f"object_{obj.object_id}" + + # Try convex hull from pointcloud (opt-in) + if self._use_mesh_obstacles and obj.pointcloud is not None: + try: + from dimos.manipulation.planning.utils.mesh_utils import ( + pointcloud_to_convex_hull_obj, + ) + + points, _ = obj.pointcloud.as_numpy() + if points is not None and points.shape[0] >= 4: + mesh_path = pointcloud_to_convex_hull_obj(points) + if mesh_path is not None: + return Obstacle( + name=name, + obstacle_type=ObstacleType.MESH, + pose=obj.pose, + color=(0.2, 0.8, 0.2, 0.6), + mesh_path=mesh_path, + ) + except Exception as e: + logger.debug(f"Convex hull failed for {name}, falling back to box: {e}") + + # Default: bounding box + return Obstacle( + name=name, + obstacle_type=ObstacleType.BOX, + pose=obj.pose or PoseStamped(position=obj.center), + dimensions=(float(obj.size.x), float(obj.size.y), float(obj.size.z)), + color=(0.2, 0.8, 0.2, 0.6), + ) diff --git a/dimos/manipulation/planning/monitor/world_state_monitor.py b/dimos/manipulation/planning/monitor/world_state_monitor.py new file mode 100644 index 0000000000..87d61bb66f --- /dev/null +++ b/dimos/manipulation/planning/monitor/world_state_monitor.py @@ -0,0 +1,331 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +World State Monitor + +Monitors joint state updates and syncs them to a WorldSpec instance. +This is the WorldSpec-based replacement for StateMonitor. + +Example: + monitor = WorldStateMonitor(world, lock, robot_id, joint_names) + monitor.start() + monitor.on_joint_state(joint_state_msg) # Called by subscriber +""" + +from __future__ import annotations + +import time +from typing import TYPE_CHECKING + +import numpy as np + +from dimos.msgs.sensor_msgs import JointState +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from collections.abc import Callable + import threading + + from numpy.typing import NDArray + + from dimos.manipulation.planning.spec import WorldSpec + +logger = setup_logger() + + +class WorldStateMonitor: + """Monitors joint state updates and syncs them to WorldSpec. + + This class subscribes to joint state messages and calls + world.sync_from_joint_state() to keep the world's live context + synchronized with the real robot state. + + ## Thread Safety + + All state updates are protected by the provided lock. The on_joint_state + callback can be called from any thread. + + ## Comparison with StateMonitor + + - StateMonitor: Works with PlanningScene ABC + - WorldStateMonitor: Works with WorldSpec Protocol + """ + + def __init__( + self, + world: WorldSpec, + lock: threading.RLock, + robot_id: str, + joint_names: list[str], + joint_name_mapping: dict[str, str] | None = None, + timeout: float = 1.0, + ): + """Create a world state monitor. + + Args: + world: WorldSpec instance to sync state to + lock: Shared lock for thread-safe access + robot_id: ID of the robot to monitor + joint_names: Ordered list of joint names for this robot (URDF names) + joint_name_mapping: Maps coordinator joint names to URDF joint names. + Example: {"left_joint1": "joint1"} means messages with "left_joint1" + will be mapped to URDF "joint1". If None, names must match exactly. + timeout: Timeout for waiting for initial state (seconds) + """ + self._world = world + self._lock = lock + self._robot_id = robot_id + self._joint_names = joint_names + self._timeout = timeout + + # Joint name mapping: coordinator name -> URDF name + self._joint_name_mapping = joint_name_mapping or {} + # Build reverse mapping: URDF name -> coordinator name + self._reverse_mapping = {v: k for k, v in self._joint_name_mapping.items()} + + # Latest state + self._latest_positions: NDArray[np.float64] | None = None + self._latest_velocities: NDArray[np.float64] | None = None + self._last_update_time: float | None = None + + # Running state + self._running = False + + # Callbacks: (robot_id, joint_state) called on each state update + self._state_callbacks: list[Callable[[str, JointState], None]] = [] + + def start(self) -> None: + """Start the state monitor.""" + self._running = True + logger.info(f"World state monitor started for robot '{self._robot_id}'") + + def stop(self) -> None: + """Stop the state monitor.""" + self._running = False + logger.info(f"World state monitor stopped for robot '{self._robot_id}'") + + def is_running(self) -> bool: + """Check if monitor is running.""" + return self._running + + @property + def robot_id(self) -> str: + """Get the robot ID being monitored.""" + return self._robot_id + + def on_joint_state(self, msg: JointState) -> None: + """Handle incoming joint state message. + + This is called by the subscriber when a new JointState message arrives. + It extracts joint positions and syncs them to the world. + + Args: + msg: JointState message with joint names and positions + """ + try: + if not self._running: + return + + # Extract positions for our robot's joints + positions = self._extract_positions(msg) + if positions is None: + logger.debug( + "[WorldStateMonitor] Failed to extract positions - joint names mismatch" + ) + logger.debug(f" Expected joints: {self._joint_names}") + logger.debug(f" Received joints: {msg.name}") + return # Not all joints present in message + + velocities = self._extract_velocities(msg) + + # Track message count for debugging + self._msg_count = getattr(self, "_msg_count", 0) + 1 + + with self._lock: + current_time = time.time() + + # Store latest state FIRST - this ensures planning always has + # current positions even if sync_from_joint_state fails + # (e.g., after dynamically adding obstacles) + self._latest_positions = positions + self._latest_velocities = velocities + self._last_update_time = current_time + + # Sync to world's live context (for visualization) + try: + # Create JointState for world sync (API uses JointState) + joint_state = JointState( + name=self._joint_names, + position=positions.tolist(), + ) + self._world.sync_from_joint_state(self._robot_id, joint_state) + except Exception as e: + logger.error(f"Failed to sync joint state to live context: {e}") + + # Call registered callbacks + for callback in self._state_callbacks: + try: + callback(self._robot_id, joint_state) + except Exception as e: + logger.error(f"State callback error: {e}") + + except Exception as e: + logger.error(f"[WorldStateMonitor] Unexpected exception in on_joint_state: {e}") + import traceback + + logger.error(traceback.format_exc()) + + def _extract_positions(self, msg: JointState) -> NDArray[np.float64] | None: + """Extract positions for our joints from JointState message. + + Handles joint name translation from coordinator namespace to URDF namespace. + If joint_name_mapping is set, message names are looked up via the reverse mapping. + + Args: + msg: JointState message (may use coordinator joint names) + + Returns: + Array of joint positions or None if any joint is missing + """ + # Build name->index map from message (coordinator names) + name_to_idx = {name: i for i, name in enumerate(msg.name)} + + positions = [] + for urdf_joint_name in self._joint_names: + # Try direct match first (when no mapping or names already match) + if urdf_joint_name in name_to_idx: + idx = name_to_idx[urdf_joint_name] + else: + # Try reverse mapping: URDF name -> coordinator name -> msg index + orch_name = self._reverse_mapping.get(urdf_joint_name) + if orch_name is None or orch_name not in name_to_idx: + return None # Missing joint + idx = name_to_idx[orch_name] + + if idx >= len(msg.position): + return None # Position not available + positions.append(msg.position[idx]) + + return np.array(positions, dtype=np.float64) + + def _extract_velocities(self, msg: JointState) -> NDArray[np.float64] | None: + """Extract velocities for our joints. + + Uses same name translation as _extract_positions. + """ + if not msg.velocity or len(msg.velocity) == 0: + return None + + name_to_idx = {name: i for i, name in enumerate(msg.name)} + + velocities = [] + for urdf_joint_name in self._joint_names: + # Try direct match first + if urdf_joint_name in name_to_idx: + idx = name_to_idx[urdf_joint_name] + else: + # Try reverse mapping + orch_name = self._reverse_mapping.get(urdf_joint_name) + if orch_name is None or orch_name not in name_to_idx: + return None + idx = name_to_idx[orch_name] + + if idx >= len(msg.velocity): + return None + velocities.append(msg.velocity[idx]) + + return np.array(velocities, dtype=np.float64) + + def get_current_positions(self) -> NDArray[np.float64] | None: + """Get current joint positions (thread-safe). + + Returns: + Current positions or None if not yet received + """ + with self._lock: + return self._latest_positions.copy() if self._latest_positions is not None else None + + def get_current_velocities(self) -> NDArray[np.float64] | None: + """Get current joint velocities (thread-safe). + + Returns: + Current velocities or None if not available + """ + with self._lock: + return self._latest_velocities.copy() if self._latest_velocities is not None else None + + def wait_for_state(self, timeout: float | None = None) -> bool: + """Wait until a state is received. + + Args: + timeout: Maximum time to wait (uses default if None) + + Returns: + True if state was received, False if timeout + """ + timeout = timeout if timeout is not None else self._timeout + start_time = time.time() + + while time.time() - start_time < timeout: + with self._lock: + if self._latest_positions is not None: + return True + time.sleep(0.01) + + return False + + def get_state_age(self) -> float | None: + """Get age of the latest state in seconds. + + Returns: + Age in seconds or None if no state received + """ + with self._lock: + if self._last_update_time is None: + return None + return time.time() - self._last_update_time + + def is_state_stale(self, max_age: float = 1.0) -> bool: + """Check if state is stale (older than max_age). + + Args: + max_age: Maximum acceptable age in seconds + + Returns: + True if state is stale or not received + """ + age = self.get_state_age() + if age is None: + return True + return age > max_age + + def add_state_callback( + self, + callback: Callable[[str, JointState], None], + ) -> None: + """Add callback for state updates. + + Args: + callback: Function called with (robot_id, joint_state) on each update + """ + self._state_callbacks.append(callback) + + def remove_state_callback( + self, + callback: Callable[[str, JointState], None], + ) -> None: + """Remove a state callback.""" + if callback in self._state_callbacks: + self._state_callbacks.remove(callback) diff --git a/dimos/manipulation/planning/planners/__init__.py b/dimos/manipulation/planning/planners/__init__.py new file mode 100644 index 0000000000..8fb8ae042b --- /dev/null +++ b/dimos/manipulation/planning/planners/__init__.py @@ -0,0 +1,41 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Motion Planners Module + +Contains motion planning implementations that use WorldSpec. + +All planners are backend-agnostic - they only use WorldSpec methods and +work with any physics backend (Drake, MuJoCo, PyBullet, etc.). + +## Implementations + +- RRTConnectPlanner: Bi-directional RRT-Connect planner (fast, reliable) + +## Usage + +Use factory functions to create planners: + +```python +from dimos.manipulation.planning.factory import create_planner + +planner = create_planner(name="rrt_connect") # Returns PlannerSpec +result = planner.plan_joint_path(world, robot_id, q_start, q_goal) +``` +""" + +from dimos.manipulation.planning.planners.rrt_planner import RRTConnectPlanner + +__all__ = ["RRTConnectPlanner"] diff --git a/dimos/manipulation/planning/planners/rrt_planner.py b/dimos/manipulation/planning/planners/rrt_planner.py new file mode 100644 index 0000000000..f2be8736d5 --- /dev/null +++ b/dimos/manipulation/planning/planners/rrt_planner.py @@ -0,0 +1,350 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""RRT-Connect and RRT* motion planners implementing PlannerSpec. + +These planners are backend-agnostic - they only use WorldSpec methods and can work +with any physics backend (Drake, MuJoCo, PyBullet, etc.). +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +import time +from typing import TYPE_CHECKING + +import numpy as np + +from dimos.manipulation.planning.spec import ( + JointPath, + PlanningResult, + PlanningStatus, + WorldRobotID, + WorldSpec, +) +from dimos.manipulation.planning.utils.path_utils import compute_path_length +from dimos.msgs.sensor_msgs import JointState +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from numpy.typing import NDArray + +logger = setup_logger() + + +@dataclass(eq=False) +class TreeNode: + """Node in RRT tree with optional cost tracking (for RRT*).""" + + config: NDArray[np.float64] + parent: TreeNode | None = None + children: list[TreeNode] = field(default_factory=list) + cost: float = 0.0 + + def path_to_root(self) -> list[NDArray[np.float64]]: + """Get path from this node to root.""" + path = [] + node: TreeNode | None = self + while node is not None: + path.append(node.config) + node = node.parent + return list(reversed(path)) + + +class RRTConnectPlanner: + """Bi-directional RRT-Connect planner. + + This planner is backend-agnostic - it only uses WorldSpec methods for + collision checking and can work with any physics backend. + """ + + def __init__( + self, + step_size: float = 0.1, + connect_step_size: float = 0.05, + goal_tolerance: float = 0.1, + collision_step_size: float = 0.02, + ): + self._step_size = step_size + self._connect_step_size = connect_step_size + self._goal_tolerance = goal_tolerance + self._collision_step_size = collision_step_size + + def plan_joint_path( + self, + world: WorldSpec, + robot_id: WorldRobotID, + start: JointState, + goal: JointState, + timeout: float = 10.0, + max_iterations: int = 5000, + ) -> PlanningResult: + """Plan collision-free path using bi-directional RRT.""" + start_time = time.time() + + # Extract positions as numpy arrays for internal computation + q_start = np.array(start.position, dtype=np.float64) + q_goal = np.array(goal.position, dtype=np.float64) + joint_names = start.name # Store for converting back to JointState + + error = self._validate_inputs(world, robot_id, start, goal) + if error is not None: + return error + + lower, upper = world.get_joint_limits(robot_id) + start_tree = [TreeNode(config=q_start.copy())] + goal_tree = [TreeNode(config=q_goal.copy())] + trees_swapped = False + + for iteration in range(max_iterations): + if time.time() - start_time > timeout: + return _create_failure_result( + PlanningStatus.TIMEOUT, + f"Timeout after {iteration} iterations", + time.time() - start_time, + iteration, + ) + + sample = np.random.uniform(lower, upper) + extended = self._extend_tree( + world, robot_id, start_tree, sample, self._step_size, joint_names + ) + + if extended is not None: + connected = self._connect_tree( + world, + robot_id, + goal_tree, + extended.config, + self._connect_step_size, + joint_names, + ) + if connected is not None: + path = self._extract_path(extended, connected, joint_names) + if trees_swapped: + path = list(reversed(path)) + path = self._simplify_path(world, robot_id, path) + return _create_success_result(path, time.time() - start_time, iteration + 1) + + start_tree, goal_tree = goal_tree, start_tree + trees_swapped = not trees_swapped + + return _create_failure_result( + PlanningStatus.NO_SOLUTION, + f"No path found after {max_iterations} iterations", + time.time() - start_time, + max_iterations, + ) + + def get_name(self) -> str: + """Get planner name.""" + return "RRTConnect" + + def _validate_inputs( + self, + world: WorldSpec, + robot_id: WorldRobotID, + start: JointState, + goal: JointState, + ) -> PlanningResult | None: + """Validate planning inputs, returns error result or None if valid.""" + # Check world is finalized + if not world.is_finalized: + return _create_failure_result( + PlanningStatus.NO_SOLUTION, + "World must be finalized before planning", + ) + + # Check robot exists + if robot_id not in world.get_robot_ids(): + return _create_failure_result( + PlanningStatus.NO_SOLUTION, + f"Robot '{robot_id}' not found", + ) + + # Check start validity using context-free method + if not world.check_config_collision_free(robot_id, start): + return _create_failure_result( + PlanningStatus.COLLISION_AT_START, + "Start configuration is in collision", + ) + + # Check goal validity using context-free method + if not world.check_config_collision_free(robot_id, goal): + return _create_failure_result( + PlanningStatus.COLLISION_AT_GOAL, + "Goal configuration is in collision", + ) + + # Check limits with small tolerance for driver floating-point drift + lower, upper = world.get_joint_limits(robot_id) + q_start = np.array(start.position, dtype=np.float64) + q_goal = np.array(goal.position, dtype=np.float64) + limit_eps = 1e-3 # ~0.06 degrees + + if np.any(q_start < lower - limit_eps) or np.any(q_start > upper + limit_eps): + return _create_failure_result( + PlanningStatus.INVALID_START, + "Start configuration is outside joint limits", + ) + + if np.any(q_goal < lower - limit_eps) or np.any(q_goal > upper + limit_eps): + return _create_failure_result( + PlanningStatus.INVALID_GOAL, + "Goal configuration is outside joint limits", + ) + + return None + + def _extend_tree( + self, + world: WorldSpec, + robot_id: WorldRobotID, + tree: list[TreeNode], + target: NDArray[np.float64], + step_size: float, + joint_names: list[str], + ) -> TreeNode | None: + """Extend tree toward target, returns new node if successful.""" + # Find nearest node + nearest = min(tree, key=lambda n: float(np.linalg.norm(n.config - target))) + + # Compute new config + diff = target - nearest.config + dist = float(np.linalg.norm(diff)) + + if dist <= step_size: + new_config = target.copy() + else: + new_config = nearest.config + step_size * (diff / dist) + + # Check validity of edge using context-free method + start_state = JointState(name=joint_names, position=nearest.config.tolist()) + end_state = JointState(name=joint_names, position=new_config.tolist()) + if world.check_edge_collision_free( + robot_id, start_state, end_state, self._collision_step_size + ): + new_node = TreeNode(config=new_config, parent=nearest) + nearest.children.append(new_node) + tree.append(new_node) + return new_node + + return None + + def _connect_tree( + self, + world: WorldSpec, + robot_id: WorldRobotID, + tree: list[TreeNode], + target: NDArray[np.float64], + step_size: float, + joint_names: list[str], + ) -> TreeNode | None: + """Try to connect tree to target, returns connected node if successful.""" + # Keep extending toward target + while True: + result = self._extend_tree(world, robot_id, tree, target, step_size, joint_names) + + if result is None: + return None # Extension failed + + # Check if reached target + if float(np.linalg.norm(result.config - target)) < self._goal_tolerance: + return result + + def _extract_path( + self, + start_node: TreeNode, + goal_node: TreeNode, + joint_names: list[str], + ) -> JointPath: + """Extract path from two connected nodes.""" + # Path from start node to its root (reversed to be root->node) + start_path = start_node.path_to_root() + + # Path from goal node to its root + goal_path = goal_node.path_to_root() + + # Combine: start_root -> start_node -> goal_node -> goal_root + # But we need start -> goal, so reverse the goal path + full_path_arrays = start_path + list(reversed(goal_path)) + + # Convert to list of JointState + return [JointState(name=joint_names, position=q.tolist()) for q in full_path_arrays] + + def _simplify_path( + self, + world: WorldSpec, + robot_id: WorldRobotID, + path: JointPath, + max_iterations: int = 100, + ) -> JointPath: + """Simplify path by random shortcutting.""" + if len(path) <= 2: + return path + + simplified = list(path) + + for _ in range(max_iterations): + if len(simplified) <= 2: + break + + # Pick two random indices (at least 2 apart) + i = np.random.randint(0, len(simplified) - 2) + j = np.random.randint(i + 2, len(simplified)) + + # Check if direct connection is valid using context-free method + # path elements are already JointState + if world.check_edge_collision_free( + robot_id, simplified[i], simplified[j], self._collision_step_size + ): + # Remove intermediate waypoints + simplified = simplified[: i + 1] + simplified[j:] + + return simplified + + +# ============= Result Helpers ============= + + +def _create_success_result( + path: JointPath, + planning_time: float, + iterations: int, +) -> PlanningResult: + """Create a successful planning result.""" + return PlanningResult( + status=PlanningStatus.SUCCESS, + path=path, + planning_time=planning_time, + path_length=compute_path_length(path), + iterations=iterations, + message="Path found", + ) + + +def _create_failure_result( + status: PlanningStatus, + message: str, + planning_time: float = 0.0, + iterations: int = 0, +) -> PlanningResult: + """Create a failed planning result.""" + return PlanningResult( + status=status, + path=[], + planning_time=planning_time, + iterations=iterations, + message=message, + ) diff --git a/dimos/manipulation/planning/spec/__init__.py b/dimos/manipulation/planning/spec/__init__.py new file mode 100644 index 0000000000..a78fb6e5fd --- /dev/null +++ b/dimos/manipulation/planning/spec/__init__.py @@ -0,0 +1,51 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Manipulation Planning Specifications.""" + +from dimos.manipulation.planning.spec.config import RobotModelConfig +from dimos.manipulation.planning.spec.enums import IKStatus, ObstacleType, PlanningStatus +from dimos.manipulation.planning.spec.protocols import ( + KinematicsSpec, + PlannerSpec, + WorldSpec, +) +from dimos.manipulation.planning.spec.types import ( + CollisionObjectMessage, + IKResult, + Jacobian, + JointPath, + Obstacle, + PlanningResult, + RobotName, + WorldRobotID, +) + +__all__ = [ + "CollisionObjectMessage", + "IKResult", + "IKStatus", + "Jacobian", + "JointPath", + "KinematicsSpec", + "Obstacle", + "ObstacleType", + "PlannerSpec", + "PlanningResult", + "PlanningStatus", + "RobotModelConfig", + "RobotName", + "WorldRobotID", + "WorldSpec", +] diff --git a/dimos/manipulation/planning/spec/config.py b/dimos/manipulation/planning/spec/config.py new file mode 100644 index 0000000000..dc302689ea --- /dev/null +++ b/dimos/manipulation/planning/spec/config.py @@ -0,0 +1,99 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Robot configuration for manipulation planning.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pathlib import Path + + from dimos.msgs.geometry_msgs import PoseStamped + + +@dataclass +class RobotModelConfig: + """Configuration for adding a robot to the world. + + Attributes: + name: Human-readable robot name + urdf_path: Path to URDF file (can be .urdf or .xacro) + base_pose: Pose of robot base in world frame (position + orientation) + joint_names: Ordered list of controlled joint names (in URDF namespace) + end_effector_link: Name of the end-effector link for FK/IK + base_link: Name of the base link (default: "base_link") + package_paths: Dict mapping package names to filesystem Paths + joint_limits_lower: Lower joint limits (radians) + joint_limits_upper: Upper joint limits (radians) + velocity_limits: Joint velocity limits (rad/s) + auto_convert_meshes: Auto-convert DAE/STL meshes to OBJ for Drake + xacro_args: Arguments to pass to xacro processor (for .xacro files) + collision_exclusion_pairs: List of (link1, link2) pairs to exclude from collision. + Useful for parallel linkage mechanisms like grippers where non-adjacent + links may legitimately overlap (e.g., mimic joints). + max_velocity: Maximum joint velocity for trajectory generation (rad/s) + max_acceleration: Maximum joint acceleration for trajectory generation (rad/s^2) + joint_name_mapping: Maps coordinator joint names to URDF joint names. + Example: {"left_joint1": "joint1"} means coordinator's "left_joint1" + corresponds to URDF's "joint1". If empty, names are assumed to match. + coordinator_task_name: Task name for executing trajectories via coordinator RPC. + If set, trajectories can be executed via execute_trajectory() RPC. + """ + + name: str + urdf_path: Path + base_pose: PoseStamped + joint_names: list[str] + end_effector_link: str + base_link: str = "base_link" + package_paths: dict[str, Path] = field(default_factory=dict) + joint_limits_lower: list[float] | None = None + joint_limits_upper: list[float] | None = None + velocity_limits: list[float] | None = None + auto_convert_meshes: bool = False + xacro_args: dict[str, str] = field(default_factory=dict) + collision_exclusion_pairs: list[tuple[str, str]] = field(default_factory=list) + # Motion constraints for trajectory generation + max_velocity: float = 1.0 + max_acceleration: float = 2.0 + # Coordinator integration + joint_name_mapping: dict[str, str] = field(default_factory=dict) + coordinator_task_name: str | None = None + gripper_hardware_id: str | None = None + # TF publishing for extra links (e.g., camera mount) + tf_extra_links: list[str] = field(default_factory=list) + # Home/observe joint configuration for go_home skill + home_joints: list[float] | None = None + # Pre-grasp offset distance in meters (along approach direction) + pre_grasp_offset: float = 0.10 + + def get_urdf_joint_name(self, coordinator_name: str) -> str: + """Translate coordinator joint name to URDF joint name.""" + return self.joint_name_mapping.get(coordinator_name, coordinator_name) + + def get_coordinator_joint_name(self, urdf_name: str) -> str: + """Translate URDF joint name to coordinator joint name.""" + for coord_name, u_name in self.joint_name_mapping.items(): + if u_name == urdf_name: + return coord_name + return urdf_name + + def get_coordinator_joint_names(self) -> list[str]: + """Get joint names in coordinator namespace.""" + if not self.joint_name_mapping: + return self.joint_names + return [self.get_coordinator_joint_name(j) for j in self.joint_names] diff --git a/dimos/manipulation/planning/spec/enums.py b/dimos/manipulation/planning/spec/enums.py new file mode 100644 index 0000000000..66a17ee199 --- /dev/null +++ b/dimos/manipulation/planning/spec/enums.py @@ -0,0 +1,49 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Enumerations for manipulation planning.""" + +from enum import Enum, auto + + +class ObstacleType(Enum): + """Type of obstacle geometry.""" + + BOX = auto() + SPHERE = auto() + CYLINDER = auto() + MESH = auto() + + +class IKStatus(Enum): + """Status of IK solution.""" + + SUCCESS = auto() + NO_SOLUTION = auto() + SINGULARITY = auto() + JOINT_LIMITS = auto() + COLLISION = auto() + TIMEOUT = auto() + + +class PlanningStatus(Enum): + """Status of motion planning.""" + + SUCCESS = auto() + NO_SOLUTION = auto() + TIMEOUT = auto() + INVALID_START = auto() + INVALID_GOAL = auto() + COLLISION_AT_START = auto() + COLLISION_AT_GOAL = auto() diff --git a/dimos/manipulation/planning/spec/protocols.py b/dimos/manipulation/planning/spec/protocols.py new file mode 100644 index 0000000000..dea4718abb --- /dev/null +++ b/dimos/manipulation/planning/spec/protocols.py @@ -0,0 +1,231 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Protocol definitions for manipulation planning. + +All code should use these Protocol types (not concrete classes). +Use factory functions from dimos.manipulation.planning.factory to create instances. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable + +if TYPE_CHECKING: + from contextlib import AbstractContextManager + + import numpy as np + from numpy.typing import NDArray + + from dimos.manipulation.planning.spec.config import RobotModelConfig + from dimos.manipulation.planning.spec.types import ( + IKResult, + JointPath, + Obstacle, + PlanningResult, + WorldRobotID, + ) + from dimos.msgs.geometry_msgs import PoseStamped + from dimos.msgs.sensor_msgs import JointState + + +@runtime_checkable +class WorldSpec(Protocol): + """Protocol for the world/scene backend. + + The world owns the physics/collision backend and provides: + - Robot/obstacle management + - Collision checking + - Forward kinematics + - Context management for thread safety + + Context Management: + - Live context: Mirrors current robot state (synced from driver) + - Scratch contexts: Thread-safe clones for planning/IK operations + + Implementations: + - DrakeWorld: Uses Drake's MultibodyPlant and SceneGraph + """ + + # Robot Management + def add_robot(self, config: RobotModelConfig) -> WorldRobotID: + """Add a robot to the world. Returns unique robot ID.""" + ... + + def get_robot_ids(self) -> list[WorldRobotID]: + """Get all robot IDs.""" + ... + + def get_robot_config(self, robot_id: WorldRobotID) -> RobotModelConfig: + """Get robot configuration.""" + ... + + def get_joint_limits( + self, robot_id: WorldRobotID + ) -> tuple[NDArray[np.float64], NDArray[np.float64]]: # lower limits, upper limits + """Get joint limits (lower, upper) for a robot.""" + ... + + # Obstacle Management + def add_obstacle(self, obstacle: Obstacle) -> str: + """Add an obstacle to the world. Returns unique obstacle ID.""" + ... + + def remove_obstacle(self, obstacle_id: str) -> bool: + """Remove an obstacle. Returns True if removed.""" + ... + + def update_obstacle_pose(self, obstacle_id: str, pose: PoseStamped) -> bool: + """Update obstacle pose. Returns True if updated.""" + ... + + def clear_obstacles(self) -> None: + """Remove all obstacles.""" + ... + + # Lifecycle + def finalize(self) -> None: + """Finalize the world. Must be called after adding robots.""" + ... + + @property + def is_finalized(self) -> bool: + """Check if world is finalized.""" + ... + + # Context Management + def get_live_context(self) -> Any: + """Get the live context (mirrors real robot state).""" + ... + + def scratch_context(self) -> AbstractContextManager[Any]: + """Get a scratch context for planning (thread-safe clone).""" + ... + + def sync_from_joint_state(self, robot_id: WorldRobotID, joint_state: JointState) -> None: + """Sync live context from joint state message.""" + ... + + # State Operations (require context) + def set_joint_state(self, ctx: Any, robot_id: WorldRobotID, joint_state: JointState) -> None: + """Set robot joint state in a context.""" + ... + + def get_joint_state(self, ctx: Any, robot_id: WorldRobotID) -> JointState: + """Get robot joint state from a context.""" + ... + + # Collision Checking (require context) + def is_collision_free(self, ctx: Any, robot_id: WorldRobotID) -> bool: + """Check if robot configuration is collision-free.""" + ... + + def get_min_distance(self, ctx: Any, robot_id: WorldRobotID) -> float: + """Get minimum distance to obstacles (negative if collision).""" + ... + + # Collision Checking (context-free, for planning) + def check_config_collision_free(self, robot_id: WorldRobotID, joint_state: JointState) -> bool: + """Check if a joint state is collision-free (manages context internally).""" + ... + + def check_edge_collision_free( + self, + robot_id: WorldRobotID, + start: JointState, + end: JointState, + step_size: float = 0.05, + ) -> bool: + """Check if the entire edge between two joint states is collision-free.""" + ... + + # Forward Kinematics (require context) + def get_ee_pose(self, ctx: Any, robot_id: WorldRobotID) -> PoseStamped: + """Get end-effector pose.""" + ... + + def get_link_pose( + self, ctx: Any, robot_id: WorldRobotID, link_name: str + ) -> NDArray[np.float64]: + """Get link pose as 4x4 homogeneous transform.""" + ... + + def get_jacobian(self, ctx: Any, robot_id: WorldRobotID) -> NDArray[np.float64]: + """Get end-effector Jacobian (6 x n_joints).""" + ... + + # Visualization (optional) + def get_visualization_url(self) -> str | None: + """Get visualization URL if enabled.""" + ... + + def publish_visualization(self, ctx: Any | None = None) -> None: + """Publish current state to visualization.""" + ... + + def animate_path(self, robot_id: WorldRobotID, path: JointPath, duration: float = 3.0) -> None: + """Animate a path in visualization.""" + ... + + def close(self) -> None: + """Release visualization resources.""" + ... + + +@runtime_checkable +class KinematicsSpec(Protocol): + """Protocol for inverse kinematics solvers. Stateless, uses WorldSpec for FK/collision.""" + + def solve( + self, + world: WorldSpec, + robot_id: WorldRobotID, + target_pose: PoseStamped, + seed: JointState | None = None, + position_tolerance: float = 0.001, + orientation_tolerance: float = 0.01, + check_collision: bool = True, + max_attempts: int = 10, + ) -> IKResult: + """Solve IK with optional collision checking.""" + ... + + +@runtime_checkable +class PlannerSpec(Protocol): + """Protocol for motion planner. + + Planners find collision-free paths from start to goal configurations. + They use WorldSpec for collision checking and are stateless. + All planners are backend-agnostic - they only use WorldSpec methods. + + Implementations: + - RRTConnectPlanner: Bi-directional RRT-Connect planner + - RRTStarPlanner: RRT* planner (asymptotically optimal) + """ + + def plan_joint_path( + self, + world: WorldSpec, + robot_id: WorldRobotID, + start: JointState, + goal: JointState, + timeout: float = 10.0, + ) -> PlanningResult: + """Plan a collision-free joint-space path.""" + ... + + def get_name(self) -> str: + """Get planner name.""" + ... diff --git a/dimos/manipulation/planning/spec/types.py b/dimos/manipulation/planning/spec/types.py new file mode 100644 index 0000000000..a38cc0da26 --- /dev/null +++ b/dimos/manipulation/planning/spec/types.py @@ -0,0 +1,161 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Data types for manipulation planning.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, TypeAlias + +from dimos.manipulation.planning.spec.enums import ( + IKStatus, + ObstacleType, + PlanningStatus, +) + +if TYPE_CHECKING: + import numpy as np + from numpy.typing import NDArray + + from dimos.msgs.geometry_msgs import PoseStamped + from dimos.msgs.sensor_msgs import JointState + +# ============================================================================= +# Semantic ID Types (documentation only, not enforced at runtime) +# ============================================================================= + +RobotName: TypeAlias = str +"""User-facing robot name (e.g., 'left_arm', 'right_arm')""" + +WorldRobotID: TypeAlias = str +"""Internal Drake world robot ID""" + +JointPath: TypeAlias = "list[JointState]" +"""List of joint states forming a path (each waypoint has names + positions)""" + +# ============================================================================= +# Numeric Array Types +# ============================================================================= + +Jacobian: TypeAlias = "NDArray[np.float64]" +"""6 x n Jacobian matrix (rows: [vx, vy, vz, wx, wy, wz])""" + + +# ============================================================================= +# Data Classes +# ============================================================================= + + +@dataclass +class Obstacle: + """Obstacle specification for collision avoidance. + + Attributes: + name: Unique name for the obstacle + obstacle_type: Type of geometry (BOX, SPHERE, CYLINDER, MESH) + pose: Pose of the obstacle in world frame + dimensions: Type-specific dimensions: + - BOX: (width, height, depth) + - SPHERE: (radius,) + - CYLINDER: (radius, height) + - MESH: Not used + color: RGBA color tuple (0-1 range) + mesh_path: Path to mesh file (for MESH type) + """ + + name: str + obstacle_type: ObstacleType + pose: PoseStamped + dimensions: tuple[float, ...] = () + color: tuple[float, float, float, float] = (0.8, 0.2, 0.2, 0.8) + mesh_path: str | None = None + + +@dataclass +class IKResult: + """Result of an IK solve. + + Attributes: + status: Solution status + joint_state: Solution joint state with names and positions (None if failed) + position_error: Cartesian position error (meters) + orientation_error: Orientation error (radians) + iterations: Number of iterations taken + message: Human-readable status message + """ + + status: IKStatus + joint_state: JointState | None = None + position_error: float = 0.0 + orientation_error: float = 0.0 + iterations: int = 0 + message: str = "" + + def is_success(self) -> bool: + """Check if IK was successful.""" + return self.status == IKStatus.SUCCESS + + +@dataclass +class PlanningResult: + """Result of motion planning. + + Attributes: + status: Planning status + path: List of joint states forming the path (empty if failed). + Each JointState contains names, positions, and optionally velocities. + planning_time: Time taken to plan (seconds) + path_length: Total path length in joint space (radians) + iterations: Number of iterations/nodes expanded + message: Human-readable status message + timestamps: Optional timestamps for each waypoint (seconds from start). + If provided by the planner, trajectory generator can use these directly. + """ + + status: PlanningStatus + path: list[JointState] = field(default_factory=list) + planning_time: float = 0.0 + path_length: float = 0.0 + iterations: int = 0 + message: str = "" + # Optional timing (set by optimization-based planners) + timestamps: list[float] | None = None + + def is_success(self) -> bool: + """Check if planning was successful.""" + return self.status == PlanningStatus.SUCCESS + + +@dataclass +class CollisionObjectMessage: + """Message for adding/updating/removing obstacles. + + Used by monitors to handle obstacle updates from external sources. + + Attributes: + id: Unique identifier for the object + operation: "add", "update", or "remove" + primitive_type: "box", "sphere", or "cylinder" (for add/update) + pose: Pose of the obstacle (for add/update) + dimensions: Type-specific dimensions (for add/update) + color: RGBA color tuple + """ + + id: str + operation: str # "add", "update", "remove" + primitive_type: str | None = None + pose: PoseStamped | None = None + dimensions: tuple[float, ...] | None = None + color: tuple[float, float, float, float] = (0.8, 0.2, 0.2, 0.8) diff --git a/dimos/manipulation/planning/utils/__init__.py b/dimos/manipulation/planning/utils/__init__.py new file mode 100644 index 0000000000..04ec1806b5 --- /dev/null +++ b/dimos/manipulation/planning/utils/__init__.py @@ -0,0 +1,51 @@ +# Copyright 2025 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Manipulation Planning Utilities + +Standalone utility functions for kinematics and path operations. +These are extracted from the old ABC base classes to enable composition over inheritance. + +## Modules + +- kinematics_utils: Jacobian operations, singularity detection, pose error computation +- path_utils: Path interpolation, simplification, length computation +""" + +from dimos.manipulation.planning.utils.kinematics_utils import ( + check_singularity, + compute_error_twist, + compute_pose_error, + damped_pseudoinverse, + get_manipulability, +) +from dimos.manipulation.planning.utils.path_utils import ( + compute_path_length, + interpolate_path, + interpolate_segment, +) + +__all__ = [ + # Kinematics utilities + "check_singularity", + "compute_error_twist", + # Path utilities + "compute_path_length", + "compute_pose_error", + "damped_pseudoinverse", + "get_manipulability", + "interpolate_path", + "interpolate_segment", +] diff --git a/dimos/manipulation/planning/utils/kinematics_utils.py b/dimos/manipulation/planning/utils/kinematics_utils.py new file mode 100644 index 0000000000..c9f3f95a3d --- /dev/null +++ b/dimos/manipulation/planning/utils/kinematics_utils.py @@ -0,0 +1,296 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Kinematics Utilities + +Standalone utility functions for inverse kinematics operations. +These functions are stateless and can be used by any IK solver implementation. + +## Functions + +- damped_pseudoinverse(): Compute damped pseudoinverse of Jacobian +- check_singularity(): Check if Jacobian is near singularity +- get_manipulability(): Compute manipulability measure +- compute_pose_error(): Compute position/orientation error between poses +- compute_error_twist(): Compute error twist for differential IK +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from dimos.manipulation.planning.spec import Jacobian + + +def damped_pseudoinverse( + J: Jacobian, + damping: float = 0.01, +) -> NDArray[np.float64]: + """Compute damped pseudoinverse of Jacobian. + + Uses the damped least-squares formula: + J_pinv = J^T @ (J @ J^T + λ²I)^(-1) + + This avoids numerical issues near singularities where J @ J^T becomes + ill-conditioned. The damping factor λ controls the trade-off between + accuracy and stability. + + Args: + J: 6 x n Jacobian matrix (rows: [vx, vy, vz, wx, wy, wz]) + damping: Damping factor λ (higher = more regularization, more stable) + + Returns: + n x 6 pseudoinverse matrix + + Example: + J = world.get_jacobian(ctx, robot_id) + J_pinv = damped_pseudoinverse(J, damping=0.01) + q_dot = J_pinv @ twist + """ + JJT = J @ J.T + I = np.eye(JJT.shape[0]) + result: NDArray[np.float64] = J.T @ np.linalg.inv(JJT + damping**2 * I) + return result + + +def check_singularity( + J: Jacobian, + threshold: float = 0.01, +) -> bool: + """Check if Jacobian is near singularity. + + Computes the manipulability measure (sqrt(det(J @ J^T))) and checks + if it's below the threshold. Near singularities, the manipulability + approaches zero. + + Args: + J: 6 x n Jacobian matrix + threshold: Manipulability threshold (default 0.01) + + Returns: + True if near singularity (manipulability < threshold) + + Example: + J = world.get_jacobian(ctx, robot_id) + if check_singularity(J, threshold=0.001): + logger.warning("Near singularity, using damped IK") + """ + return get_manipulability(J) < threshold + + +def get_manipulability(J: Jacobian) -> float: + """Compute manipulability measure. + + The manipulability measure w = sqrt(det(J @ J^T)) represents the + volume of the velocity ellipsoid - how well the robot can move + in all directions. + + Values: + - Higher = better manipulability + - Zero = singularity + - Lower = near singularity + + Args: + J: 6 x n Jacobian matrix + + Returns: + Manipulability measure (non-negative) + + Example: + J = world.get_jacobian(ctx, robot_id) + w = get_manipulability(J) + print(f"Manipulability: {w:.4f}") + """ + JJT = J @ J.T + det = np.linalg.det(JJT) + return float(np.sqrt(max(0, det))) + + +def compute_pose_error( + current_pose: NDArray[np.float64], + target_pose: NDArray[np.float64], +) -> tuple[float, float]: + """Compute position and orientation error between two poses. + + Position error is the Euclidean distance between origins. + Orientation error is the angle of the rotation matrix relating the two frames. + + Args: + current_pose: Current 4x4 homogeneous transform + target_pose: Target 4x4 homogeneous transform + + Returns: + Tuple of (position_error, orientation_error) in meters and radians + + Example: + current = world.get_ee_pose(ctx, robot_id) + pos_err, ori_err = compute_pose_error(current, target) + converged = pos_err < 0.001 and ori_err < 0.01 + """ + # Position error (Euclidean distance) + position_error = float(np.linalg.norm(target_pose[:3, 3] - current_pose[:3, 3])) + + # Orientation error using rotation matrices + R_current = current_pose[:3, :3] + R_target = target_pose[:3, :3] + R_error = R_target @ R_current.T + + # Convert to axis-angle for scalar error + trace = np.trace(R_error) + # Clamp to valid range for arccos (numerical stability) + cos_angle = (trace - 1) / 2 + cos_angle = np.clip(cos_angle, -1, 1) + orientation_error = float(np.arccos(cos_angle)) + + return position_error, orientation_error + + +def compute_error_twist( + current_pose: NDArray[np.float64], + target_pose: NDArray[np.float64], + gain: float = 1.0, +) -> NDArray[np.float64]: + """Compute error twist for differential IK. + + Computes the 6D twist (linear + angular velocity) that would move + from the current pose toward the target pose. Used in iterative IK. + + The twist is expressed in the world frame: + twist = [vx, vy, vz, wx, wy, wz] + + Args: + current_pose: Current 4x4 homogeneous transform + target_pose: Target 4x4 homogeneous transform + gain: Proportional gain (higher = faster convergence, less stable) + + Returns: + 6D twist vector [vx, vy, vz, wx, wy, wz] + + Example: + twist = compute_error_twist(current_pose, target_pose, gain=0.5) + q_dot = damped_pseudoinverse(J) @ twist + q_new = q + q_dot * dt + """ + # Position error (linear velocity direction) + pos_error = target_pose[:3, 3] - current_pose[:3, 3] + + # Orientation error -> angular velocity + R_current = current_pose[:3, :3] + R_target = target_pose[:3, :3] + R_error = R_target @ R_current.T + + # Extract axis-angle from rotation matrix + # Using Rodrigues' formula inverse + trace = np.trace(R_error) + cos_angle = (trace - 1) / 2 + cos_angle = np.clip(cos_angle, -1, 1) + angle = np.arccos(cos_angle) + + if angle < 1e-6: + # No rotation needed + angular_error = np.zeros(3) + elif angle > np.pi - 1e-6: + # 180 degree rotation - extract axis from diagonal + diag = np.diag(R_error) + idx = np.argmax(diag) + axis = np.zeros(3) + axis[idx] = 1.0 + # Refine axis + axis = axis * np.sqrt((diag[idx] + 1) / 2) + angular_error = axis * angle + else: + # General case: axis from skew-symmetric part + sin_angle = np.sin(angle) + axis = np.array( + [ + R_error[2, 1] - R_error[1, 2], + R_error[0, 2] - R_error[2, 0], + R_error[1, 0] - R_error[0, 1], + ] + ) / (2 * sin_angle) + angular_error = axis * angle + + # Combine into twist with gain + twist: NDArray[np.float64] = np.concatenate([pos_error * gain, angular_error * gain]) + + return twist + + +def skew_symmetric(v: NDArray[np.float64]) -> NDArray[np.float64]: + """Create skew-symmetric matrix from 3D vector. + + The skew-symmetric matrix [v]_x satisfies: [v]_x @ w = v cross w + + Args: + v: 3D vector + + Returns: + 3x3 skew-symmetric matrix + """ + return np.array( + [ + [0, -v[2], v[1]], + [v[2], 0, -v[0]], + [-v[1], v[0], 0], + ] + ) + + +def rotation_matrix_to_axis_angle(R: NDArray[np.float64]) -> tuple[NDArray[np.float64], float]: + """Convert rotation matrix to axis-angle representation. + + Args: + R: 3x3 rotation matrix + + Returns: + Tuple of (axis, angle) where axis is unit vector and angle is radians + """ + trace = np.trace(R) + cos_angle = (trace - 1) / 2 + cos_angle = np.clip(cos_angle, -1, 1) + angle = float(np.arccos(cos_angle)) + + if angle < 1e-6: + # Identity rotation + return np.array([1.0, 0.0, 0.0]), 0.0 + + if angle > np.pi - 1e-6: + # 180 degree rotation + diag = np.diag(R) + idx = int(np.argmax(diag)) + axis = np.zeros(3) + axis[idx] = np.sqrt((diag[idx] + 1) / 2) + if axis[idx] > 1e-12: + for j in range(3): + if j != idx: + axis[j] = R[idx, j] / (2 * axis[idx]) + return axis, angle + + # General case + sin_angle = np.sin(angle) + axis = np.array( + [ + R[2, 1] - R[1, 2], + R[0, 2] - R[2, 0], + R[1, 0] - R[0, 1], + ] + ) / (2 * sin_angle) + + return axis, angle diff --git a/dimos/manipulation/planning/utils/mesh_utils.py b/dimos/manipulation/planning/utils/mesh_utils.py new file mode 100644 index 0000000000..92fcfc6eca --- /dev/null +++ b/dimos/manipulation/planning/utils/mesh_utils.py @@ -0,0 +1,354 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Mesh Utilities for Drake + +Provides utilities for preparing URDF files for use with Drake: +- Xacro processing +- Mesh format conversion (DAE/STL to OBJ) +- Package path resolution + +Example: + urdf_path = prepare_urdf_for_drake( + urdf_path="/path/to/robot.xacro", + package_paths={"robot_description": "/path/to/robot_description"}, + xacro_args={"use_sim": "true"}, + convert_meshes=True, + ) +""" + +from __future__ import annotations + +import hashlib +from pathlib import Path +import re +import shutil +import tempfile +from typing import TYPE_CHECKING + +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + import numpy as np + from numpy.typing import NDArray + +logger = setup_logger() + +# Cache directory for processed URDFs +_CACHE_DIR = Path(tempfile.gettempdir()) / "dimos_urdf_cache" + + +def prepare_urdf_for_drake( + urdf_path: Path | str, + package_paths: dict[str, Path] | None = None, + xacro_args: dict[str, str] | None = None, + convert_meshes: bool = False, +) -> str: + """Prepare a URDF/xacro file for use with Drake. + + This function: + 1. Processes xacro files if needed + 2. Resolves package:// URIs in mesh paths + 3. Optionally converts DAE/STL meshes to OBJ format + + Args: + urdf_path: Path to URDF or xacro file + package_paths: Dict mapping package names to filesystem paths + xacro_args: Arguments to pass to xacro processor + convert_meshes: Convert DAE/STL meshes to OBJ for Drake compatibility + + Returns: + Path to the prepared URDF file (may be cached) + """ + urdf_path = Path(urdf_path) + package_paths = package_paths or {} + xacro_args = xacro_args or {} + + # Generate cache key + cache_key = _generate_cache_key(urdf_path, package_paths, xacro_args, convert_meshes) + cache_path = _CACHE_DIR / cache_key / urdf_path.stem + cache_path.mkdir(parents=True, exist_ok=True) + cached_urdf = cache_path / f"{urdf_path.stem}.urdf" + + # Check cache + if cached_urdf.exists(): + logger.debug(f"Using cached URDF: {cached_urdf}") + return str(cached_urdf) + + # Process xacro if needed + if urdf_path.suffix in (".xacro", ".urdf.xacro"): + urdf_content = _process_xacro(urdf_path, package_paths, xacro_args) + else: + urdf_content = urdf_path.read_text() + + # Strip transmission blocks (Drake doesn't need them, and they can cause issues) + urdf_content = _strip_transmission_blocks(urdf_content) + + # Resolve package:// URIs + urdf_content = _resolve_package_uris(urdf_content, package_paths, cache_path) + + # Convert meshes if requested + if convert_meshes: + urdf_content = _convert_meshes(urdf_content, cache_path) + + # Write processed URDF + cached_urdf.write_text(urdf_content) + logger.info(f"Prepared URDF cached at: {cached_urdf}") + + return str(cached_urdf) + + +def _generate_cache_key( + urdf_path: Path, + package_paths: dict[str, Path], + xacro_args: dict[str, str], + convert_meshes: bool, +) -> str: + """Generate a cache key for the URDF configuration. + + Includes a version number to invalidate cache when processing logic changes. + """ + # Include file modification time + mtime = urdf_path.stat().st_mtime if urdf_path.exists() else 0 + + # Version number to invalidate cache when processing logic changes + # Increment this when adding new processing steps (e.g., stripping transmission blocks) + processing_version = "v2" + + key_data = f"{processing_version}:{urdf_path}:{mtime}:{sorted(package_paths.items())}:{sorted(xacro_args.items())}:{convert_meshes}" + return hashlib.md5(key_data.encode()).hexdigest()[:16] + + +def _process_xacro( + xacro_path: Path, + package_paths: dict[str, Path], + xacro_args: dict[str, str], +) -> str: + """Process xacro file to URDF.""" + try: + import xacro # type: ignore[import-not-found,import-untyped] + except ImportError: + raise ImportError( + "xacro is required for processing .xacro files. Install with: pip install xacro" + ) + + # Create a custom substitution_args_context that resolves $(find pkg) to our paths + # This avoids requiring ROS package discovery + from xacro import substitution_args + + # Store original function + original_find = substitution_args._find + + def custom_find(resolved: str, a: str, args: list[str], context: dict[str, str]) -> str: + """Custom $(find pkg) handler that uses our package_paths.""" + pkg_name = args[0] if args else "" + if pkg_name in package_paths: + pkg_path = str(Path(package_paths[pkg_name]).resolve()) + return resolved.replace(f"$({a})", pkg_path) + # Fall back to original behavior + return str(original_find(resolved, a, args, context)) + + # Monkey-patch the find function temporarily + substitution_args._find = custom_find + + try: + # Process xacro with our mappings + doc = xacro.process_file( + str(xacro_path), + mappings=xacro_args, + ) + return str(doc.toprettyxml(indent=" ")) + finally: + # Restore original function + substitution_args._find = original_find + + +def _strip_transmission_blocks(urdf_content: str) -> str: + """Remove transmission blocks from URDF content. + + Drake doesn't need transmission blocks (they're for Gazebo/ROS control), + and they can cause parsing errors if they contain malformed actuator names. + + Args: + urdf_content: URDF XML content as string + + Returns: + URDF content with transmission blocks removed + """ + # Pattern to match ... blocks and self-closing + # Uses non-greedy matching and handles nested tags + pattern = r"]*(?:/>|>.*?)" + + # Remove transmission blocks (with flags for multiline and dotall) + result = re.sub(pattern, "", urdf_content, flags=re.DOTALL | re.MULTILINE) + + # Also remove any standalone blocks that might reference transmissions + # (some URDFs have gazebo plugins that reference transmissions) + gazebo_pattern = r".*?]*gazebo_ros_control[^>]*>.*?.*?" + result = re.sub(gazebo_pattern, "", result, flags=re.DOTALL | re.MULTILINE) + + return result + + +def _resolve_package_uris( + urdf_content: str, + package_paths: dict[str, Path], + output_dir: Path, +) -> str: + """Resolve package:// URIs to filesystem paths.""" + # Pattern for package:// URIs (handles both single and double quotes) + # Note: Use triple quotes so \s is correctly interpreted as whitespace, not literal 's' + pattern = r"""package://([^/]+)/(.+?)(["'<>\s])""" + + def replace_uri(match: re.Match[str]) -> str: + pkg_name = match.group(1) + rel_path = match.group(2) + suffix = match.group(3) + + if pkg_name in package_paths: + # Ensure absolute path for proper resolution + pkg_path = Path(package_paths[pkg_name]).resolve() + full_path = pkg_path / rel_path + if full_path.exists(): + return f"{full_path}{suffix}" + else: + logger.warning(f"File not found: {full_path}") + + # Return original if not found + return match.group(0) + + return re.sub(pattern, replace_uri, urdf_content) + + +def _convert_meshes(urdf_content: str, output_dir: Path) -> str: + """Convert DAE/STL meshes to OBJ format for Drake compatibility.""" + try: + import trimesh + except ImportError: + logger.warning("trimesh not installed, skipping mesh conversion") + return urdf_content + + mesh_dir = output_dir / "meshes" + mesh_dir.mkdir(exist_ok=True) + + # Find mesh file references + pattern = r'filename="([^"]+\.(dae|stl|DAE|STL))"' + + converted: dict[str, str] = {} + + def convert_mesh(match: re.Match[str]) -> str: + original_path = match.group(1) + + if original_path in converted: + return f'filename="{converted[original_path]}"' + + try: + # Load mesh + mesh = trimesh.load(original_path, force="mesh") + + # Generate output path + mesh_name = Path(original_path).stem + obj_path = mesh_dir / f"{mesh_name}.obj" + + # Export as OBJ (trimesh.export returns None, ignore) + mesh.export(str(obj_path), file_type="obj") # type: ignore[no-untyped-call] + logger.debug(f"Converted mesh: {original_path} -> {obj_path}") + + converted[original_path] = str(obj_path) + return f'filename="{obj_path}"' + + except Exception as e: + logger.warning(f"Failed to convert mesh {original_path}: {e}") + return match.group(0) + + return re.sub(pattern, convert_mesh, urdf_content) + + +def pointcloud_to_convex_hull_obj( + points: NDArray[np.float64], + output_path: Path | str | None = None, + *, + voxel_size: float = 0.005, + min_points: int = 4, +) -> str | None: + """Compute convex hull from point cloud and save as OBJ file. + + Points are centered at origin so the mesh is in local frame. + The caller sets the obstacle pose to place it in the world. + + Args: + points: Nx3 numpy array of 3D points (world frame) + output_path: Where to save OBJ. If None, uses a temp file. + voxel_size: Downsample voxel size in meters (0 to skip) + min_points: Minimum points required for convex hull + + Returns: + Path to OBJ file, or None if hull computation fails + """ + import numpy as np + + if points.shape[0] < min_points: + logger.warning(f"Too few points ({points.shape[0]}) for convex hull") + return None + + try: + import open3d as o3d # type: ignore[import-untyped] + except ImportError: + logger.warning("open3d not installed, cannot compute convex hull") + return None + + # Center at origin so mesh is in local frame + centered = points - points.mean(axis=0) + + pcd = o3d.geometry.PointCloud() + pcd.points = o3d.utility.Vector3dVector(centered.astype(np.float64)) + + if voxel_size > 0 and len(pcd.points) > 100: + pcd = pcd.voxel_down_sample(voxel_size) + + if len(pcd.points) < min_points: + logger.warning(f"Too few points after downsample ({len(pcd.points)})") + return None + + try: + hull, _ = pcd.compute_convex_hull() + except Exception as e: + logger.warning(f"Convex hull computation failed: {e}") + return None + + if output_path is None: + hull_dir = _CACHE_DIR / "convex_hulls" + hull_dir.mkdir(parents=True, exist_ok=True) + output_path = hull_dir / f"hull_{id(points):x}.obj" + + output_path = Path(output_path) + output_path.parent.mkdir(parents=True, exist_ok=True) + + try: + o3d.io.write_triangle_mesh(str(output_path), hull) + logger.debug( + f"Convex hull: {len(hull.vertices)} verts, {len(hull.triangles)} faces -> {output_path}" + ) + return str(output_path) + except Exception as e: + logger.warning(f"Failed to write convex hull OBJ: {e}") + return None + + +def clear_cache() -> None: + """Clear the URDF cache directory.""" + if _CACHE_DIR.exists(): + shutil.rmtree(_CACHE_DIR) + logger.info(f"Cleared URDF cache: {_CACHE_DIR}") diff --git a/dimos/manipulation/planning/utils/path_utils.py b/dimos/manipulation/planning/utils/path_utils.py new file mode 100644 index 0000000000..fbf8af4032 --- /dev/null +++ b/dimos/manipulation/planning/utils/path_utils.py @@ -0,0 +1,299 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Path Utilities + +Standalone utility functions for path manipulation and post-processing. +These functions are stateless and can be used by any planner implementation. + +## Functions + +- interpolate_path(): Interpolate path to uniform resolution +- interpolate_segment(): Interpolate between two configurations +- simplify_path(): Remove unnecessary waypoints (requires WorldSpec) +- compute_path_length(): Compute total path length in joint space +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from dimos.msgs.sensor_msgs import JointState + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from dimos.manipulation.planning.spec import JointPath, WorldRobotID, WorldSpec + + +def interpolate_path( + path: JointPath, + resolution: float = 0.05, +) -> JointPath: + """Interpolate path to have uniform resolution. + + Adds intermediate waypoints so that the maximum joint-space distance + between consecutive waypoints is at most `resolution`. + + Args: + path: Original path (list of JointState waypoints) + resolution: Maximum distance between waypoints (radians) + + Returns: + Interpolated path with more waypoints + + Example: + # After planning, interpolate for smoother execution + raw_path = planner.plan_joint_path(world, robot_id, start, goal).path + smooth_path = interpolate_path(raw_path, resolution=0.02) + """ + if len(path) <= 1: + return list(path) + + interpolated: list[JointState] = [path[0]] + joint_names = path[0].name + + for i in range(len(path) - 1): + q_start = np.array(path[i].position, dtype=np.float64) + q_end = np.array(path[i + 1].position, dtype=np.float64) + + diff = q_end - q_start + max_diff = float(np.max(np.abs(diff))) + + if max_diff <= resolution: + interpolated.append(path[i + 1]) + else: + num_steps = int(np.ceil(max_diff / resolution)) + for step in range(1, num_steps + 1): + alpha = step / num_steps + q_interp = q_start + alpha * diff + interpolated.append(JointState(name=joint_names, position=q_interp.tolist())) + + return interpolated + + +def interpolate_segment( + start: JointState, + end: JointState, + step_size: float, +) -> JointPath: + """Interpolate between two configurations. + + Returns a list of configurations from start to end (inclusive) + with at most `step_size` distance between consecutive points. + + Args: + start: Start joint configuration + end: End joint configuration + step_size: Maximum step size (radians) + + Returns: + List of interpolated JointState waypoints [start, ..., end] + + Example: + # Check collision along a segment + segment = interpolate_segment(start_state, end_state, step_size=0.02) + for state in segment: + if not world.check_config_collision_free(robot_id, state): + return False + """ + q_start = np.array(start.position, dtype=np.float64) + q_end = np.array(end.position, dtype=np.float64) + joint_names = start.name + + diff = q_end - q_start + distance = float(np.linalg.norm(diff)) + + if distance <= step_size: + return [start, end] + + num_steps = int(np.ceil(distance / step_size)) + segment: JointPath = [] + + for i in range(num_steps + 1): + alpha = i / num_steps + q_interp = q_start + alpha * diff + segment.append(JointState(name=joint_names, position=q_interp.tolist())) + + return segment + + +def simplify_path( + world: WorldSpec, + robot_id: WorldRobotID, + path: JointPath, + max_iterations: int = 100, + collision_step_size: float = 0.02, +) -> JointPath: + """Simplify path by removing unnecessary waypoints. + + Uses random shortcutting: randomly select two points and check if + the direct connection is collision-free. If so, remove intermediate + waypoints. + + Args: + world: World for collision checking + robot_id: Which robot + path: Original path (list of JointState waypoints) + max_iterations: Maximum shortcutting attempts + collision_step_size: Step size for collision checking along shortcuts + + Returns: + Simplified path with fewer waypoints + + Example: + raw_path = planner.plan_joint_path(world, robot_id, start, goal).path + simplified = simplify_path(world, robot_id, raw_path) + """ + if len(path) <= 2: + return list(path) + + simplified = list(path) + + for _ in range(max_iterations): + if len(simplified) <= 2: + break + + # Pick two random indices (at least 2 apart) + i = np.random.randint(0, len(simplified) - 2) + j = np.random.randint(i + 2, len(simplified)) + + # Check if direct connection is valid using context-free API + if world.check_edge_collision_free( + robot_id, simplified[i], simplified[j], collision_step_size + ): + # Remove intermediate waypoints + simplified = simplified[: i + 1] + simplified[j:] + + return simplified + + +def compute_path_length(path: JointPath) -> float: + """Compute total path length in joint space. + + Sums the Euclidean distances between consecutive waypoints. + + Args: + path: Path to measure (list of JointState waypoints) + + Returns: + Total length in radians + + Example: + length = compute_path_length(path) + print(f"Path length: {length:.2f} rad") + """ + if len(path) <= 1: + return 0.0 + + length = 0.0 + for i in range(len(path) - 1): + q_curr = np.array(path[i].position, dtype=np.float64) + q_next = np.array(path[i + 1].position, dtype=np.float64) + length += float(np.linalg.norm(q_next - q_curr)) + + return length + + +def is_path_within_limits( + path: JointPath, + lower_limits: NDArray[np.float64], + upper_limits: NDArray[np.float64], +) -> bool: + """Check if all waypoints in path are within joint limits. + + Args: + path: Path to check (list of JointState waypoints) + lower_limits: Lower joint limits (radians) + upper_limits: Upper joint limits (radians) + + Returns: + True if all waypoints are within limits + """ + for state in path: + q = np.array(state.position, dtype=np.float64) + if np.any(q < lower_limits) or np.any(q > upper_limits): + return False + return True + + +def clip_path_to_limits( + path: JointPath, + lower_limits: NDArray[np.float64], + upper_limits: NDArray[np.float64], +) -> JointPath: + """Clip all waypoints in path to joint limits. + + Args: + path: Path to clip (list of JointState waypoints) + lower_limits: Lower joint limits (radians) + upper_limits: Upper joint limits (radians) + + Returns: + Path with all waypoints clipped to limits + """ + clipped: list[JointState] = [] + for state in path: + q = np.array(state.position, dtype=np.float64) + q_clipped = np.clip(q, lower_limits, upper_limits) + clipped.append(JointState(name=state.name, position=q_clipped.tolist())) + return clipped + + +def reverse_path(path: JointPath) -> JointPath: + """Reverse a path (for returning to start, etc.). + + Args: + path: Path to reverse + + Returns: + Reversed path + """ + return list(reversed(path)) + + +def concatenate_paths( + *paths: JointPath, + remove_duplicates: bool = True, +) -> JointPath: + """Concatenate multiple paths into one. + + Args: + *paths: Paths to concatenate (each is a list of JointState waypoints) + remove_duplicates: If True, remove duplicate waypoints at junctions + + Returns: + Single concatenated path + """ + result: list[JointState] = [] + + for path in paths: + if not path: + continue + + if remove_duplicates and result: + # Check if last point matches first point (tight tolerance for joint space) + q_last = np.array(result[-1].position, dtype=np.float64) + q_first = np.array(path[0].position, dtype=np.float64) + if np.allclose(q_last, q_first, atol=1e-6, rtol=0): + result.extend(path[1:]) + else: + result.extend(path) + else: + result.extend(path) + + return result diff --git a/dimos/manipulation/planning/world/__init__.py b/dimos/manipulation/planning/world/__init__.py new file mode 100644 index 0000000000..8ddef7fdff --- /dev/null +++ b/dimos/manipulation/planning/world/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2025 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +World Module + +Contains world implementations that own the physics/collision backend. + +## Implementations + +- DrakeWorld: Uses Drake MultibodyPlant + SceneGraph +""" + +from dimos.manipulation.planning.world.drake_world import DrakeWorld + +__all__ = ["DrakeWorld"] diff --git a/dimos/manipulation/planning/world/drake_world.py b/dimos/manipulation/planning/world/drake_world.py new file mode 100644 index 0000000000..2ab996f410 --- /dev/null +++ b/dimos/manipulation/planning/world/drake_world.py @@ -0,0 +1,1047 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Drake World Implementation - WorldSpec using Drake's MultibodyPlant and SceneGraph.""" + +from __future__ import annotations + +from concurrent.futures import ThreadPoolExecutor +from contextlib import contextmanager +from dataclasses import dataclass, field +from pathlib import Path +from threading import RLock, current_thread +from typing import TYPE_CHECKING, Any + +import numpy as np + +from dimos.manipulation.planning.spec import ( + JointPath, + Obstacle, + ObstacleType, + RobotModelConfig, + WorldRobotID, + WorldSpec, +) +from dimos.manipulation.planning.utils.mesh_utils import prepare_urdf_for_drake +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from collections.abc import Generator + + from numpy.typing import NDArray + +from dimos.msgs.geometry_msgs import PoseStamped, Transform +from dimos.msgs.sensor_msgs import JointState + +try: + from pydrake.geometry import ( # type: ignore[import-not-found] + AddContactMaterial, + Box, + CollisionFilterDeclaration, + Convex, + Cylinder, + GeometryInstance, + GeometrySet, + IllustrationProperties, + MakePhongIllustrationProperties, + Meshcat, + MeshcatVisualizer, + MeshcatVisualizerParams, + ProximityProperties, + Rgba, + Role, + RoleAssign, + SceneGraph, + Sphere, + ) + from pydrake.math import RigidTransform # type: ignore[import-not-found] + from pydrake.multibody.parsing import Parser # type: ignore[import-not-found] + from pydrake.multibody.plant import ( # type: ignore[import-not-found] + AddMultibodyPlantSceneGraph, + CoulombFriction, + MultibodyPlant, + ) + from pydrake.multibody.tree import JacobianWrtVariable # type: ignore[import-not-found] + from pydrake.systems.framework import Context, DiagramBuilder # type: ignore[import-not-found] + + DRAKE_AVAILABLE = True +except ImportError: + DRAKE_AVAILABLE = False + +logger = setup_logger() + + +@dataclass +class _RobotData: + """Internal data for tracking a robot in the world.""" + + robot_id: WorldRobotID + config: RobotModelConfig + model_instance: Any # ModelInstanceIndex + joint_indices: list[int] # Indices into plant's position vector + ee_frame: Any # BodyFrame for end-effector + base_frame: Any # BodyFrame for base + preview_model_instance: Any = None # ModelInstanceIndex for preview (yellow) robot + preview_joint_indices: list[int] = field(default_factory=list) + + +@dataclass +class _ObstacleData: + """Internal data for tracking an obstacle in the world.""" + + obstacle_id: str + obstacle: Obstacle + geometry_id: Any # GeometryId + source_id: Any # SourceId + + +class _ThreadSafeMeshcat: + """Wraps Drake Meshcat so all calls run on the creator thread. + + Drake throws SystemExit from non-creator threads for every Meshcat operation. + This class creates a single-thread executor, constructs Meshcat on it, + and proxies all calls through it. + """ + + def __init__(self) -> None: + self._executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="meshcat") + self._thread = self._executor.submit(current_thread).result() + self._inner: Meshcat = self._executor.submit(Meshcat).result() + + def _call(self, fn: Any, *args: Any, **kwargs: Any) -> Any: + if current_thread() is self._thread: + return fn(*args, **kwargs) + return self._executor.submit(fn, *args, **kwargs).result() + + # --- Meshcat proxies --- + + def SetObject(self, *args: Any, **kwargs: Any) -> Any: + return self._call(self._inner.SetObject, *args, **kwargs) + + def SetTransform(self, *args: Any, **kwargs: Any) -> Any: + return self._call(self._inner.SetTransform, *args, **kwargs) + + def SetProperty(self, *args: Any, **kwargs: Any) -> Any: + return self._call(self._inner.SetProperty, *args, **kwargs) + + def Delete(self, *args: Any, **kwargs: Any) -> Any: + return self._call(self._inner.Delete, *args, **kwargs) + + def web_url(self) -> str: + result: str = self._call(self._inner.web_url) + return result + + def forced_publish(self, visualizer: Any, viz_ctx: Any) -> None: + """Run MeshcatVisualizer.ForcedPublish on the creator thread.""" + self._call(visualizer.ForcedPublish, viz_ctx) + + def close(self) -> None: + self._executor.shutdown(wait=False) + + +class DrakeWorld(WorldSpec): + """Drake implementation of WorldSpec with MultibodyPlant, SceneGraph, optional Meshcat.""" + + def __init__(self, time_step: float = 0.0, enable_viz: bool = False): + if not DRAKE_AVAILABLE: + raise ImportError("Drake is not installed. Install with: pip install drake") + + self._time_step = time_step + self._enable_viz = enable_viz + self._lock = RLock() + + # Build Drake diagram + self._builder = DiagramBuilder() + self._plant: MultibodyPlant + self._scene_graph: SceneGraph + self._plant, self._scene_graph = AddMultibodyPlantSceneGraph( + self._builder, time_step=time_step + ) + self._parser = Parser(self._plant) + # Enable auto-renaming to avoid conflicts when adding multiple robots + # with the same URDF (e.g., 4 XArm6 arms all have model name "UF_ROBOT") + self._parser.SetAutoRenaming(True) + + # Visualization — wrapped to enforce Drake's thread affinity + self._meshcat: _ThreadSafeMeshcat | None = None + self._meshcat_visualizer: MeshcatVisualizer | None = None + if enable_viz: + self._meshcat = _ThreadSafeMeshcat() + + # Create model instance for obstacles + self._obstacles_model_instance = self._plant.AddModelInstance("obstacles") + + # Tracking data + self._robots: dict[WorldRobotID, _RobotData] = {} + self._obstacles: dict[str, _ObstacleData] = {} + self._robot_counter = 0 + self._obstacle_counter = 0 + + # Built diagram and contexts (created after finalize) + self._diagram: Any = None + self._live_context: Context | None = None + self._plant_context: Context | None = None + self._scene_graph_context: Context | None = None + self._finalized = False + + # Obstacle source for dynamic obstacles + self._obstacle_source_id: Any = None + + def add_robot(self, config: RobotModelConfig) -> WorldRobotID: + """Add a robot to the world. Returns robot_id.""" + if self._finalized: + raise RuntimeError("Cannot add robot after world is finalized") + + with self._lock: + self._robot_counter += 1 + robot_id = f"robot_{self._robot_counter}" + + model_instance = self._load_urdf(config) + self._weld_base_if_needed(config, model_instance) + self._validate_joints(config, model_instance) + + ee_frame = self._plant.GetBodyByName( + config.end_effector_link, model_instance + ).body_frame() + base_frame = self._plant.GetBodyByName(config.base_link, model_instance).body_frame() + + # Load a second copy of the URDF as the preview (yellow ghost) robot + preview_model_instance = None + if self._enable_viz: + preview_model_instance = self._load_urdf(config) + self._weld_base_if_needed(config, preview_model_instance) + + self._robots[robot_id] = _RobotData( + robot_id=robot_id, + config=config, + model_instance=model_instance, + joint_indices=[], + ee_frame=ee_frame, + base_frame=base_frame, + preview_model_instance=preview_model_instance, + ) + + logger.info(f"Added robot '{robot_id}' ({config.name})") + return robot_id + + def _load_urdf(self, config: RobotModelConfig) -> Any: + """Load URDF/xacro and return model instance.""" + original_path = config.urdf_path.resolve() + if not original_path.exists(): + raise FileNotFoundError(f"URDF/xacro not found: {original_path}") + + urdf_path = prepare_urdf_for_drake( + urdf_path=original_path, + package_paths=config.package_paths, + xacro_args=config.xacro_args, + convert_meshes=config.auto_convert_meshes, + ) + urdf_path_obj = Path(urdf_path) + logger.info(f"Using prepared URDF: {urdf_path_obj}") + + # Register package paths + if config.package_paths: + for pkg_name, pkg_path in config.package_paths.items(): + self._parser.package_map().Add(pkg_name, Path(pkg_path)) + else: + self._parser.package_map().Add(f"{config.name}_description", urdf_path_obj.parent) + + model_instances = self._parser.AddModels(urdf_path_obj) + if not model_instances: + raise ValueError(f"Failed to parse URDF: {urdf_path}") + return model_instances[0] + + def _weld_base_if_needed(self, config: RobotModelConfig, model_instance: Any) -> None: + """Weld robot base to world if not already welded in URDF.""" + base_body = self._plant.GetBodyByName(config.base_link, model_instance) + + # Check if any joint already connects world to base_link + for joint_index in self._plant.GetJointIndices(model_instance): + joint = self._plant.get_joint(joint_index) + if ( + joint.parent_body().name() == "world" + and joint.child_body().name() == config.base_link + ): + logger.info( + f"URDF already has joint '{joint.name()}' welding " + f"world→{config.base_link}, skipping weld" + ) + return + + # Weld base to world + base_transform = self._pose_to_rigid_transform(config.base_pose) + self._plant.WeldFrames( + self._plant.world_frame(), + base_body.body_frame(), + base_transform, + ) + + def _validate_joints(self, config: RobotModelConfig, model_instance: Any) -> None: + """Validate that all configured joints exist in URDF.""" + for joint_name in config.joint_names: + try: + self._plant.GetJointByName(joint_name, model_instance) + except RuntimeError: + raise ValueError(f"Joint '{joint_name}' not found in URDF") + + def get_robot_ids(self) -> list[WorldRobotID]: + """Get all robot IDs in the world.""" + return list(self._robots.keys()) + + def get_robot_config(self, robot_id: WorldRobotID) -> RobotModelConfig: + """Get robot configuration by ID.""" + if robot_id not in self._robots: + raise KeyError(f"Robot '{robot_id}' not found") + return self._robots[robot_id].config + + def get_joint_limits( + self, robot_id: WorldRobotID + ) -> tuple[NDArray[np.float64], NDArray[np.float64]]: + """Get joint limits (lower, upper) in radians.""" + if robot_id not in self._robots: + raise KeyError(f"Robot '{robot_id}' not found") + + config = self._robots[robot_id].config + + if config.joint_limits_lower is not None and config.joint_limits_upper is not None: + return ( + np.array(config.joint_limits_lower), + np.array(config.joint_limits_upper), + ) + + # Default to ±π + n_joints = len(config.joint_names) + return ( + np.full(n_joints, -np.pi), + np.full(n_joints, np.pi), + ) + + # ============= Obstacle Management ============= + + def add_obstacle(self, obstacle: Obstacle) -> str: + """Add an obstacle to the world.""" + with self._lock: + # Use obstacle's name as ID (allows external ID management) + obstacle_id = obstacle.name + + # Check for duplicate in our tracking + if obstacle_id in self._obstacles: + logger.debug(f"Obstacle '{obstacle_id}' already exists, skipping") + return obstacle_id + + try: + if not self._finalized: + geometry_id = self._add_obstacle_to_plant(obstacle, obstacle_id) + self._obstacles[obstacle_id] = _ObstacleData( + obstacle_id=obstacle_id, + obstacle=obstacle, + geometry_id=geometry_id, + source_id=self._plant.get_source_id(), + ) + else: + geometry_id = self._add_obstacle_to_scene_graph(obstacle, obstacle_id) + self._obstacles[obstacle_id] = _ObstacleData( + obstacle_id=obstacle_id, + obstacle=obstacle, + geometry_id=geometry_id, + source_id=self._obstacle_source_id, + ) + + logger.debug(f"Added obstacle '{obstacle_id}': {obstacle.obstacle_type.value}") + except RuntimeError as e: + # Handle case where geometry name already exists in SceneGraph + # (can happen with concurrent access) + if "already been used" in str(e): + logger.debug(f"Obstacle '{obstacle_id}' already in SceneGraph, skipping") + else: + raise + + return obstacle_id + + def _add_obstacle_to_plant(self, obstacle: Obstacle, obstacle_id: str) -> Any: + """Add obstacle to plant (before finalization).""" + shape = self._create_shape(obstacle) + + body = self._plant.AddRigidBody( + obstacle_id, + self._obstacles_model_instance, # type: ignore[arg-type] + ) + + transform = self._pose_to_rigid_transform(obstacle.pose) + geometry_id = self._plant.RegisterCollisionGeometry( + body, + RigidTransform(), + shape, + obstacle_id + "_collision", + ProximityProperties(), + ) + + diffuse_color = np.array(obstacle.color) + self._plant.RegisterVisualGeometry( + body, + RigidTransform(), + shape, + obstacle_id + "_visual", + diffuse_color, # type: ignore[arg-type] + ) + + self._plant.WeldFrames( + self._plant.world_frame(), + body.body_frame(), + transform, + ) + + return geometry_id + + def _add_obstacle_to_scene_graph(self, obstacle: Obstacle, obstacle_id: str) -> Any: + """Add obstacle to scene graph (after finalization).""" + if self._obstacle_source_id is None: + raise RuntimeError("Obstacle source not initialized") + + shape = self._create_shape(obstacle) + transform = self._pose_to_rigid_transform(obstacle.pose) + # MakePhongIllustrationProperties expects numpy array, not Rgba + rgba_array = np.array(obstacle.color, dtype=np.float64) + + # Create proximity properties with contact material for collision detection + # Without these properties, the geometry is invisible to collision queries + proximity_props = ProximityProperties() + AddContactMaterial( + dissipation=0.0, + point_stiffness=1e6, + friction=CoulombFriction(static_friction=1.0, dynamic_friction=1.0), + properties=proximity_props, + ) + + geometry_instance = GeometryInstance( + X_PG=transform, + shape=shape, + name=obstacle_id, + ) + geometry_instance.set_illustration_properties( + MakePhongIllustrationProperties(rgba_array) # type: ignore[arg-type] + ) + geometry_instance.set_proximity_properties(proximity_props) + + frame_id = self._scene_graph.world_frame_id() + geometry_id = self._scene_graph.RegisterGeometry( + self._obstacle_source_id, + frame_id, + geometry_instance, + ) + + # Also add to Meshcat directly (MeshcatVisualizer doesn't show dynamic geometries) + if self._meshcat is not None: + self._add_obstacle_to_meshcat(obstacle, obstacle_id) + + return geometry_id + + def _add_obstacle_to_meshcat(self, obstacle: Obstacle, obstacle_id: str) -> None: + """Add obstacle visualization directly to Meshcat.""" + if self._meshcat is None: + return + + # Use Drake's geometry types for Meshcat + path = f"obstacles/{obstacle_id}" + transform = self._pose_to_rigid_transform(obstacle.pose) + rgba = Rgba(*obstacle.color) + + # Create Drake shape and add to Meshcat + drake_shape = self._create_shape(obstacle) + self._meshcat.SetObject(path, drake_shape, rgba) + self._meshcat.SetTransform(path, transform) + + def _pose_to_rigid_transform(self, pose: PoseStamped) -> Any: + """Convert PoseStamped to Drake RigidTransform.""" + pose_matrix = Transform( + translation=pose.position, + rotation=pose.orientation, + ).to_matrix() + return RigidTransform(pose_matrix) + + def _create_shape(self, obstacle: Obstacle) -> Any: + """Create Drake shape from obstacle specification.""" + if obstacle.obstacle_type == ObstacleType.BOX: + return Box(*obstacle.dimensions) + elif obstacle.obstacle_type == ObstacleType.SPHERE: + return Sphere(obstacle.dimensions[0]) + elif obstacle.obstacle_type == ObstacleType.CYLINDER: + return Cylinder(obstacle.dimensions[0], obstacle.dimensions[1]) + elif obstacle.obstacle_type == ObstacleType.MESH: + if not obstacle.mesh_path: + raise ValueError("MESH obstacle requires mesh_path") + return Convex(Path(obstacle.mesh_path)) + else: + raise ValueError(f"Unsupported obstacle type: {obstacle.obstacle_type}") + + def remove_obstacle(self, obstacle_id: str) -> bool: + """Remove an obstacle by ID.""" + with self._lock: + if obstacle_id not in self._obstacles: + return False + + obstacle_data = self._obstacles[obstacle_id] + + if self._finalized and self._scene_graph_context is not None: + self._scene_graph.RemoveGeometry( + obstacle_data.source_id, + obstacle_data.geometry_id, + ) + + # Also remove from Meshcat + if self._meshcat is not None: + path = f"obstacles/{obstacle_id}" + self._meshcat.Delete(path) + + del self._obstacles[obstacle_id] + logger.debug(f"Removed obstacle '{obstacle_id}'") + return True + + def update_obstacle_pose(self, obstacle_id: str, pose: PoseStamped) -> bool: + """Update obstacle pose.""" + with self._lock: + if obstacle_id not in self._obstacles: + return False + + # Store PoseStamped directly + self._obstacles[obstacle_id].obstacle.pose = pose + + # Update Meshcat visualization + if self._meshcat is not None: + path = f"obstacles/{obstacle_id}" + transform = self._pose_to_rigid_transform(pose) + self._meshcat.SetTransform(path, transform) + + # Note: SceneGraph geometry pose is fixed after registration + # Meshcat is updated for visualization, but collision checking + # uses the original pose. For dynamic obstacles, remove and re-add. + + return True + + def clear_obstacles(self) -> None: + """Remove all obstacles.""" + with self._lock: + obstacle_ids = list(self._obstacles.keys()) + for obs_id in obstacle_ids: + self.remove_obstacle(obs_id) + + # ============= Preview Robot Setup ============= + + def _set_preview_colors(self) -> None: + """Set all preview robot visual geometries to yellow/semi-transparent.""" + source_id: Any = self._plant.get_source_id() + preview_color = Rgba(1.0, 0.8, 0.0, 0.4) + + for robot_data in self._robots.values(): + if robot_data.preview_model_instance is None: + continue + for body_idx in self._plant.GetBodyIndices(robot_data.preview_model_instance): + body = self._plant.get_body(body_idx) + for geom_id in self._plant.GetVisualGeometriesForBody(body): + props = IllustrationProperties() + props.AddProperty("phong", "diffuse", preview_color) + self._scene_graph.AssignRole(source_id, geom_id, props, RoleAssign.kReplace) # type: ignore[call-overload] + + def _remove_preview_collision_roles(self) -> None: + """Remove proximity (collision) role from all preview robot geometries.""" + source_id: Any = self._plant.get_source_id() # SourceId + + for robot_data in self._robots.values(): + if robot_data.preview_model_instance is None: + continue + for body_idx in self._plant.GetBodyIndices(robot_data.preview_model_instance): + body = self._plant.get_body(body_idx) + for geom_id in self._plant.GetCollisionGeometriesForBody(body): + self._scene_graph.RemoveRole(source_id, geom_id, Role.kProximity) + + # ============= Lifecycle ============= + + def finalize(self) -> None: + """Finalize world - locks robot topology, enables collision checking.""" + if self._finalized: + logger.warning("World already finalized") + return + + with self._lock: + # Finalize plant + self._plant.Finalize() + + # Compute joint indices for each robot (live + preview) + for robot_id, robot_data in self._robots.items(): + joint_indices: list[int] = [] + for joint_name in robot_data.config.joint_names: + joint = self._plant.GetJointByName(joint_name, robot_data.model_instance) + start_idx = joint.position_start() + num_positions = joint.num_positions() + joint_indices.extend(range(start_idx, start_idx + num_positions)) + robot_data.joint_indices = joint_indices + logger.debug(f"Robot '{robot_id}' joint indices: {joint_indices}") + + # Compute preview joint indices + if robot_data.preview_model_instance is not None: + preview_indices: list[int] = [] + for joint_name in robot_data.config.joint_names: + joint = self._plant.GetJointByName( + joint_name, robot_data.preview_model_instance + ) + start_idx = joint.position_start() + num_positions = joint.num_positions() + preview_indices.extend(range(start_idx, start_idx + num_positions)) + robot_data.preview_joint_indices = preview_indices + logger.debug(f"Robot '{robot_id}' preview joint indices: {preview_indices}") + + # Setup collision filters + self._setup_collision_filters() + + # Remove collision roles from preview robots (visual-only) + self._remove_preview_collision_roles() + + # Set preview robots to yellow/semi-transparent + self._set_preview_colors() + + # Register obstacle source for dynamic obstacles + self._obstacle_source_id = self._scene_graph.RegisterSource("dynamic_obstacles") + + # Add visualization if enabled + if self._meshcat is not None: + params = MeshcatVisualizerParams() + params.role = Role.kIllustration + self._meshcat_visualizer = MeshcatVisualizer.AddToBuilder( + self._builder, + self._scene_graph, + self._meshcat._inner, + params, + ) + + # Build diagram + self._diagram = self._builder.Build() + self._live_context = self._diagram.CreateDefaultContext() + + # Get subsystem contexts + self._plant_context = self._diagram.GetMutableSubsystemContext( + self._plant, self._live_context + ) + self._scene_graph_context = self._diagram.GetMutableSubsystemContext( + self._scene_graph, self._live_context + ) + + self._finalized = True + logger.info(f"World finalized with {len(self._robots)} robots") + + # Initial visualization publish (routed to Meshcat thread) + if self._meshcat_visualizer is not None: + self.publish_visualization() + # Hide all preview robots initially + for robot_id in self._robots: + self.hide_preview(robot_id) + + @property + def is_finalized(self) -> bool: + """Check if world is finalized.""" + return self._finalized + + def _setup_collision_filters(self) -> None: + """Filter collisions between adjacent links and user-specified pairs.""" + for robot_data in self._robots.values(): + # Filter parent-child pairs (adjacent links always "collide") + for joint_idx in self._plant.GetJointIndices(robot_data.model_instance): + joint = self._plant.get_joint(joint_idx) + parent, child = joint.parent_body(), joint.child_body() + if parent.index() != self._plant.world_body().index(): + self._exclude_body_pair(parent, child) + + # Filter user-specified pairs (e.g., parallel linkage grippers) + for name1, name2 in robot_data.config.collision_exclusion_pairs: + try: + body1 = self._plant.GetBodyByName(name1, robot_data.model_instance) + body2 = self._plant.GetBodyByName(name2, robot_data.model_instance) + self._exclude_body_pair(body1, body2) + except RuntimeError: + logger.warning(f"Collision exclusion: link not found: {name1} or {name2}") + + logger.info("Collision filters applied") + + def _exclude_body_pair(self, body1: Any, body2: Any) -> None: + """Exclude collision between two bodies.""" + geoms1 = self._plant.GetCollisionGeometriesForBody(body1) + geoms2 = self._plant.GetCollisionGeometriesForBody(body2) + if geoms1 and geoms2: + self._scene_graph.collision_filter_manager().Apply( + CollisionFilterDeclaration().ExcludeBetween( + GeometrySet(geoms1), GeometrySet(geoms2) + ) + ) + + # ============= Context Management ============= + + def get_live_context(self) -> Context: + """Get the live context (mirrors current robot state). + + WARNING: Not thread-safe for reads during writes. + Use scratch_context() for planning operations. + """ + if not self._finalized or self._live_context is None: + raise RuntimeError("World must be finalized first") + return self._live_context + + @contextmanager + def scratch_context(self) -> Generator[Context, None, None]: + """Thread-safe context for planning. Copies current robot states for inter-robot collision checking.""" + if not self._finalized: + raise RuntimeError("World must be finalized first") + + ctx = self._diagram.CreateDefaultContext() + + # Copy live robot states so inter-robot collision checking works + with self._lock: + if self._plant_context is not None: + plant_ctx = self._diagram.GetMutableSubsystemContext(self._plant, ctx) + for robot_data in self._robots.values(): + try: + positions = self._plant.GetPositions( + self._plant_context, robot_data.model_instance + ) + self._plant.SetPositions(plant_ctx, robot_data.model_instance, positions) + except RuntimeError: + pass # Robot not yet synced + + yield ctx + + def sync_from_joint_state(self, robot_id: WorldRobotID, joint_state: JointState) -> None: + """Sync live context from driver's joint state message. + + Called by StateMonitor when new JointState arrives. + """ + if not self._finalized or self._plant_context is None: + return # Silently ignore before finalization + + # Extract positions as numpy array for internal use + positions = np.array(joint_state.position, dtype=np.float64) + + with self._lock: + self._set_positions_internal(self._plant_context, robot_id, positions) + + # NOTE: ForcedPublish is intentionally NOT called here. + # Calling ForcedPublish from the LCM callback thread blocks message processing. + # Visualization can be updated via publish_to_meshcat() from non-callback contexts. + + # ============= State Operations (context-based) ============= + + def set_joint_state( + self, ctx: Context, robot_id: WorldRobotID, joint_state: JointState + ) -> None: + """Set robot joint state in given context.""" + if not self._finalized: + raise RuntimeError("World must be finalized first") + + # Extract positions as numpy array for internal use + positions = np.array(joint_state.position, dtype=np.float64) + + # Get plant context from diagram context + plant_ctx = self._diagram.GetMutableSubsystemContext(self._plant, ctx) + self._set_positions_internal(plant_ctx, robot_id, positions) + + def _set_positions_internal( + self, plant_ctx: Context, robot_id: WorldRobotID, positions: NDArray[np.float64] + ) -> None: + """Internal: Set positions in a plant context.""" + if robot_id not in self._robots: + raise KeyError(f"Robot '{robot_id}' not found") + + robot_data = self._robots[robot_id] + full_positions = self._plant.GetPositions(plant_ctx).copy() + + for i, joint_idx in enumerate(robot_data.joint_indices): + full_positions[joint_idx] = positions[i] + + self._plant.SetPositions(plant_ctx, full_positions) + + def get_joint_state(self, ctx: Context, robot_id: WorldRobotID) -> JointState: + """Get robot joint state from given context.""" + if not self._finalized: + raise RuntimeError("World must be finalized first") + + if robot_id not in self._robots: + raise KeyError(f"Robot '{robot_id}' not found") + + robot_data = self._robots[robot_id] + plant_ctx = self._diagram.GetSubsystemContext(self._plant, ctx) + full_positions = self._plant.GetPositions(plant_ctx) + + positions = [float(full_positions[idx]) for idx in robot_data.joint_indices] + return JointState(name=robot_data.config.joint_names, position=positions) + + # ============= Collision Checking (context-based) ============= + + def is_collision_free(self, ctx: Context, robot_id: WorldRobotID) -> bool: + """Check if current configuration in context is collision-free.""" + if not self._finalized: + raise RuntimeError("World must be finalized first") + + if robot_id not in self._robots: + raise KeyError(f"Robot '{robot_id}' not found") + + scene_graph_ctx = self._diagram.GetSubsystemContext(self._scene_graph, ctx) + query_object = self._scene_graph.get_query_output_port().Eval(scene_graph_ctx) + + return not query_object.HasCollisions() # type: ignore[attr-defined] + + def get_min_distance(self, ctx: Context, robot_id: WorldRobotID) -> float: + """Get minimum signed distance (positive = clearance, negative = penetration).""" + if not self._finalized: + raise RuntimeError("World must be finalized first") + + scene_graph_ctx = self._diagram.GetSubsystemContext(self._scene_graph, ctx) + query_object = self._scene_graph.get_query_output_port().Eval(scene_graph_ctx) + + signed_distance_pairs = query_object.ComputeSignedDistancePairwiseClosestPoints() # type: ignore[attr-defined] + + if not signed_distance_pairs: + return float("inf") + + return float(min(pair.distance for pair in signed_distance_pairs)) + + # ============= Collision Checking (context-free, for planning) ============= + + def check_config_collision_free(self, robot_id: WorldRobotID, joint_state: JointState) -> bool: + """Check if a joint state is collision-free (manages context internally). + + This is a convenience method for planners that don't need to manage contexts. + """ + with self.scratch_context() as ctx: + self.set_joint_state(ctx, robot_id, joint_state) + return self.is_collision_free(ctx, robot_id) + + def check_edge_collision_free( + self, + robot_id: WorldRobotID, + start: JointState, + end: JointState, + step_size: float = 0.05, + ) -> bool: + """Check if the entire edge between two joint states is collision-free. + + Interpolates between start and end at the given step_size and checks + each configuration for collisions. This is more efficient than checking + each configuration separately as it uses a single scratch context. + """ + # Extract positions as numpy arrays for interpolation + q_start = np.array(start.position, dtype=np.float64) + q_end = np.array(end.position, dtype=np.float64) + + # Compute number of steps needed + dist = float(np.linalg.norm(q_end - q_start)) + if dist < 1e-8: + return self.check_config_collision_free(robot_id, start) + + n_steps = max(2, int(np.ceil(dist / step_size)) + 1) + + with self.scratch_context() as ctx: + for i in range(n_steps): + t = i / (n_steps - 1) + q = q_start + t * (q_end - q_start) + # Create interpolated JointState + interp_state = JointState(name=start.name, position=q.tolist()) + self.set_joint_state(ctx, robot_id, interp_state) + if not self.is_collision_free(ctx, robot_id): + return False + + return True + + # ============= Forward Kinematics (context-based) ============= + + def get_ee_pose(self, ctx: Context, robot_id: WorldRobotID) -> PoseStamped: + """Get end-effector pose.""" + if not self._finalized: + raise RuntimeError("World must be finalized first") + + if robot_id not in self._robots: + raise KeyError(f"Robot '{robot_id}' not found") + + robot_data = self._robots[robot_id] + plant_ctx = self._diagram.GetSubsystemContext(self._plant, ctx) + + ee_body = robot_data.ee_frame.body() + X_WE = self._plant.EvalBodyPoseInWorld(plant_ctx, ee_body) + + # Extract position and quaternion from Drake transform + pos = X_WE.translation() + quat = X_WE.rotation().ToQuaternion() # Drake returns [w, x, y, z] + + return PoseStamped( + frame_id="world", + position=[float(pos[0]), float(pos[1]), float(pos[2])], + orientation=[float(quat.x()), float(quat.y()), float(quat.z()), float(quat.w())], + ) + + def get_link_pose( + self, ctx: Context, robot_id: WorldRobotID, link_name: str + ) -> NDArray[np.float64]: + """Get link pose as 4x4 transform.""" + if not self._finalized: + raise RuntimeError("World must be finalized first") + + if robot_id not in self._robots: + raise KeyError(f"Robot '{robot_id}' not found") + + robot_data = self._robots[robot_id] + plant_ctx = self._diagram.GetSubsystemContext(self._plant, ctx) + + try: + body = self._plant.GetBodyByName(link_name, robot_data.model_instance) + except RuntimeError: + raise KeyError(f"Link '{link_name}' not found in robot '{robot_id}'") + + X_WL = self._plant.EvalBodyPoseInWorld(plant_ctx, body) + + result = X_WL.GetAsMatrix4() + return result # type: ignore[no-any-return, return-value] + + def get_jacobian(self, ctx: Context, robot_id: WorldRobotID) -> NDArray[np.float64]: + """Get geometric Jacobian (6 x n_joints). + + Rows: [vx, vy, vz, wx, wy, wz] (linear, then angular) + """ + if not self._finalized: + raise RuntimeError("World must be finalized first") + + if robot_id not in self._robots: + raise KeyError(f"Robot '{robot_id}' not found") + + robot_data = self._robots[robot_id] + plant_ctx = self._diagram.GetSubsystemContext(self._plant, ctx) + + # Compute full Jacobian + J_full = self._plant.CalcJacobianSpatialVelocity( + plant_ctx, + JacobianWrtVariable.kQDot, + robot_data.ee_frame, + np.array([0.0, 0.0, 0.0]), # type: ignore[arg-type] # Point on end-effector + self._plant.world_frame(), + self._plant.world_frame(), + ) + + # Extract columns for this robot's joints + n_joints = len(robot_data.joint_indices) + J_robot = np.zeros((6, n_joints)) + + for i, joint_idx in enumerate(robot_data.joint_indices): + J_robot[:, i] = J_full[:, joint_idx] + + # Reorder rows: Drake uses [angular, linear], we want [linear, angular] + J_reordered = np.vstack([J_robot[3:6, :], J_robot[0:3, :]]) + + return J_reordered + + # ============= Visualization ============= + + def get_visualization_url(self) -> str | None: + """Get visualization URL if enabled.""" + if self._meshcat is not None: + return self._meshcat.web_url() + return None + + def publish_visualization(self, ctx: Context | None = None) -> None: + """Publish current state to visualization.""" + if self._meshcat_visualizer is None or self._meshcat is None: + return + if ctx is None: + ctx = self._live_context + if ctx is not None: + viz_ctx = self._diagram.GetSubsystemContext(self._meshcat_visualizer, ctx) + self._meshcat.forced_publish(self._meshcat_visualizer, viz_ctx) + + def _set_preview_positions( + self, plant_ctx: Context, robot_id: WorldRobotID, positions: NDArray[np.float64] + ) -> None: + """Set preview robot positions in a plant context.""" + robot_data = self._robots.get(robot_id) + if robot_data is None or robot_data.preview_model_instance is None: + return + + full_positions = self._plant.GetPositions(plant_ctx).copy() + for i, idx in enumerate(robot_data.preview_joint_indices): + full_positions[idx] = positions[i] + self._plant.SetPositions(plant_ctx, full_positions) + + def show_preview(self, robot_id: WorldRobotID) -> None: + """Show the preview (yellow ghost) robot in Meshcat.""" + if self._meshcat is None: + return + robot_data = self._robots.get(robot_id) + if robot_data is None or robot_data.preview_model_instance is None: + return + model_name = self._plant.GetModelInstanceName(robot_data.preview_model_instance) + self._meshcat.SetProperty(f"visualizer/{model_name}", "visible", True) + + def hide_preview(self, robot_id: WorldRobotID) -> None: + """Hide the preview (yellow ghost) robot in Meshcat.""" + if self._meshcat is None: + return + robot_data = self._robots.get(robot_id) + if robot_data is None or robot_data.preview_model_instance is None: + return + model_name = self._plant.GetModelInstanceName(robot_data.preview_model_instance) + self._meshcat.SetProperty(f"visualizer/{model_name}", "visible", False) + + def animate_path( + self, + robot_id: WorldRobotID, + path: JointPath, + duration: float = 3.0, + ) -> None: + """Animate a path using the preview (yellow ghost) robot. + + The preview stays visible after animation completes. + """ + if self._meshcat is None or len(path) < 2: + return + + robot_data = self._robots.get(robot_id) + if robot_data is None or robot_data.preview_model_instance is None: + return + + import time + + self.show_preview(robot_id) + dt = duration / (len(path) - 1) + for joint_state in path: + positions = np.array(joint_state.position, dtype=np.float64) + with self._lock: + assert self._plant_context is not None + self._set_preview_positions(self._plant_context, robot_id, positions) + self.publish_visualization() + time.sleep(dt) + + def close(self) -> None: + """Shut down the viz thread.""" + if self._meshcat is not None: + self._meshcat.close() + + # ============= Direct Access (use with caution) ============= + + @property + def plant(self) -> MultibodyPlant: + """Get underlying MultibodyPlant.""" + return self._plant + + @property + def scene_graph(self) -> SceneGraph: + """Get underlying SceneGraph.""" + return self._scene_graph + + @property + def diagram(self) -> Any: + """Get underlying Diagram.""" + return self._diagram diff --git a/dimos/manipulation/test_manipulation_history.py b/dimos/manipulation/test_manipulation_history.py deleted file mode 100644 index ec4e503bed..0000000000 --- a/dimos/manipulation/test_manipulation_history.py +++ /dev/null @@ -1,458 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0) -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import tempfile -import time - -import pytest - -from dimos.manipulation.manipulation_history import ManipulationHistory, ManipulationHistoryEntry -from dimos.types.manipulation import ( - ForceConstraint, - ManipulationTask, - RotationConstraint, - TranslationConstraint, -) -from dimos.types.vector import Vector - - -@pytest.fixture -def sample_task(): - """Create a sample manipulation task for testing.""" - return ManipulationTask( - description="Pick up the cup", - target_object="cup", - target_point=(100, 200), - task_id="task1", - metadata={ - "timestamp": time.time(), - "objects": { - "cup1": { - "object_id": 1, - "label": "cup", - "confidence": 0.95, - "position": {"x": 1.5, "y": 2.0, "z": 0.5}, - }, - "table1": { - "object_id": 2, - "label": "table", - "confidence": 0.98, - "position": {"x": 0.0, "y": 0.0, "z": 0.0}, - }, - }, - }, - ) - - -@pytest.fixture -def sample_task_with_constraints(): - """Create a sample manipulation task with constraints for testing.""" - task = ManipulationTask( - description="Rotate the bottle", - target_object="bottle", - target_point=(150, 250), - task_id="task2", - metadata={ - "timestamp": time.time(), - "objects": { - "bottle1": { - "object_id": 3, - "label": "bottle", - "confidence": 0.92, - "position": {"x": 2.5, "y": 1.0, "z": 0.3}, - } - }, - }, - ) - - # Add rich translation constraint - translation_constraint = TranslationConstraint( - translation_axis="y", - reference_point=Vector(2.5, 1.0, 0.3), - bounds_min=Vector(2.0, 0.5, 0.3), - bounds_max=Vector(3.0, 1.5, 0.3), - target_point=Vector(2.7, 1.2, 0.3), - description="Constrained translation along Y-axis only", - ) - task.add_constraint(translation_constraint) - - # Add rich rotation constraint - rotation_constraint = RotationConstraint( - rotation_axis="roll", - start_angle=Vector(0, 0, 0), - end_angle=Vector(90, 0, 0), - pivot_point=Vector(2.5, 1.0, 0.3), - secondary_pivot_point=Vector(2.5, 1.0, 0.5), - description="Constrained rotation around X-axis (roll only)", - ) - task.add_constraint(rotation_constraint) - - # Add force constraint - force_constraint = ForceConstraint( - min_force=2.0, - max_force=5.0, - force_direction=Vector(0, 0, -1), - description="Apply moderate downward force during manipulation", - ) - task.add_constraint(force_constraint) - - return task - - -@pytest.fixture -def temp_output_dir(): - """Create a temporary directory for testing history saving/loading.""" - with tempfile.TemporaryDirectory() as temp_dir: - yield temp_dir - - -@pytest.fixture -def populated_history(sample_task, sample_task_with_constraints): - """Create a populated history with multiple entries for testing.""" - history = ManipulationHistory() - - # Add first entry - entry1 = ManipulationHistoryEntry( - task=sample_task, - result={"status": "success", "execution_time": 2.5}, - manipulation_response="Successfully picked up the cup", - ) - history.add_entry(entry1) - - # Add second entry - entry2 = ManipulationHistoryEntry( - task=sample_task_with_constraints, - result={"status": "failure", "error": "Collision detected"}, - manipulation_response="Failed to rotate the bottle due to collision", - ) - history.add_entry(entry2) - - return history - - -def test_manipulation_history_init() -> None: - """Test initialization of ManipulationHistory.""" - # Default initialization - history = ManipulationHistory() - assert len(history) == 0 - assert str(history) == "ManipulationHistory(empty)" - - # With output directory - with tempfile.TemporaryDirectory() as temp_dir: - history = ManipulationHistory(output_dir=temp_dir, new_memory=True) - assert len(history) == 0 - assert os.path.exists(temp_dir) - - -def test_manipulation_history_add_entry(sample_task) -> None: - """Test adding entries to ManipulationHistory.""" - history = ManipulationHistory() - - # Create and add entry - entry = ManipulationHistoryEntry( - task=sample_task, result={"status": "success"}, manipulation_response="Task completed" - ) - history.add_entry(entry) - - assert len(history) == 1 - assert history.get_entry_by_index(0) == entry - - -def test_manipulation_history_create_task_entry(sample_task) -> None: - """Test creating a task entry directly.""" - history = ManipulationHistory() - - entry = history.create_task_entry( - task=sample_task, result={"status": "success"}, agent_response="Task completed" - ) - - assert len(history) == 1 - assert entry.task == sample_task - assert entry.result["status"] == "success" - assert entry.manipulation_response == "Task completed" - - -def test_manipulation_history_save_load(temp_output_dir, sample_task) -> None: - """Test saving and loading history from disk.""" - # Create history and add entry - history = ManipulationHistory(output_dir=temp_output_dir) - history.create_task_entry( - task=sample_task, result={"status": "success"}, agent_response="Task completed" - ) - - # Check that files were created - pickle_path = os.path.join(temp_output_dir, "manipulation_history.pickle") - json_path = os.path.join(temp_output_dir, "manipulation_history.json") - assert os.path.exists(pickle_path) - assert os.path.exists(json_path) - - # Create new history that loads from the saved files - loaded_history = ManipulationHistory(output_dir=temp_output_dir) - assert len(loaded_history) == 1 - assert loaded_history.get_entry_by_index(0).task.description == sample_task.description - - -def test_manipulation_history_clear(populated_history) -> None: - """Test clearing the history.""" - assert len(populated_history) > 0 - - populated_history.clear() - assert len(populated_history) == 0 - assert str(populated_history) == "ManipulationHistory(empty)" - - -def test_manipulation_history_get_methods(populated_history) -> None: - """Test various getter methods of ManipulationHistory.""" - # get_all_entries - entries = populated_history.get_all_entries() - assert len(entries) == 2 - - # get_entry_by_index - entry = populated_history.get_entry_by_index(0) - assert entry.task.task_id == "task1" - - # Out of bounds index - assert populated_history.get_entry_by_index(100) is None - - # get_entries_by_timerange - start_time = time.time() - 3600 # 1 hour ago - end_time = time.time() + 3600 # 1 hour from now - entries = populated_history.get_entries_by_timerange(start_time, end_time) - assert len(entries) == 2 - - # get_entries_by_object - cup_entries = populated_history.get_entries_by_object("cup") - assert len(cup_entries) == 1 - assert cup_entries[0].task.task_id == "task1" - - bottle_entries = populated_history.get_entries_by_object("bottle") - assert len(bottle_entries) == 1 - assert bottle_entries[0].task.task_id == "task2" - - -def test_manipulation_history_search_basic(populated_history) -> None: - """Test basic search functionality.""" - # Search by exact match on top-level fields - results = populated_history.search(timestamp=populated_history.get_entry_by_index(0).timestamp) - assert len(results) == 1 - - # Search by task fields - results = populated_history.search(**{"task.task_id": "task1"}) - assert len(results) == 1 - assert results[0].task.target_object == "cup" - - # Search by result fields - results = populated_history.search(**{"result.status": "success"}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Search by manipulation_response (substring match for strings) - results = populated_history.search(manipulation_response="picked up") - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - -def test_manipulation_history_search_nested(populated_history) -> None: - """Test search with nested field paths.""" - # Search by nested metadata fields - results = populated_history.search( - **{ - "task.metadata.timestamp": populated_history.get_entry_by_index(0).task.metadata[ - "timestamp" - ] - } - ) - assert len(results) == 1 - - # Search by nested object fields - results = populated_history.search(**{"task.metadata.objects.cup1.label": "cup"}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Search by position values - results = populated_history.search(**{"task.metadata.objects.cup1.position.x": 1.5}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - -def test_manipulation_history_search_wildcards(populated_history) -> None: - """Test search with wildcard patterns.""" - # Search for any object with label "cup" - results = populated_history.search(**{"task.metadata.objects.*.label": "cup"}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Search for any object with confidence > 0.95 - results = populated_history.search(**{"task.metadata.objects.*.confidence": 0.98}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Search for any object position with x=2.5 - results = populated_history.search(**{"task.metadata.objects.*.position.x": 2.5}) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - -def test_manipulation_history_search_constraints(populated_history) -> None: - """Test search by constraint properties.""" - # Find entries with any TranslationConstraint with y-axis - results = populated_history.search(**{"task.constraints.*.translation_axis": "y"}) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - # Find entries with any RotationConstraint with roll axis - results = populated_history.search(**{"task.constraints.*.rotation_axis": "roll"}) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - -def test_manipulation_history_search_string_contains(populated_history) -> None: - """Test string contains searching.""" - # Basic string contains - results = populated_history.search(**{"task.description": "Pick"}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Nested string contains - results = populated_history.search(manipulation_response="collision") - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - -def test_manipulation_history_search_multiple_criteria(populated_history) -> None: - """Test search with multiple criteria.""" - # Multiple criteria - all must match - results = populated_history.search(**{"task.target_object": "cup", "result.status": "success"}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Multiple criteria with no matches - results = populated_history.search(**{"task.target_object": "cup", "result.status": "failure"}) - assert len(results) == 0 - - # Combination of direct and wildcard paths - results = populated_history.search( - **{"task.target_object": "bottle", "task.metadata.objects.*.position.z": 0.3} - ) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - -def test_manipulation_history_search_nonexistent_fields(populated_history) -> None: - """Test search with fields that don't exist.""" - # Search by nonexistent field - results = populated_history.search(nonexistent_field="value") - assert len(results) == 0 - - # Search by nonexistent nested field - results = populated_history.search(**{"task.nonexistent_field": "value"}) - assert len(results) == 0 - - # Search by nonexistent object - results = populated_history.search(**{"task.metadata.objects.nonexistent_object": "value"}) - assert len(results) == 0 - - -def test_manipulation_history_search_timestamp_ranges(populated_history) -> None: - """Test searching by timestamp ranges.""" - # Get reference timestamps - entry1_time = populated_history.get_entry_by_index(0).task.metadata["timestamp"] - entry2_time = populated_history.get_entry_by_index(1).task.metadata["timestamp"] - mid_time = (entry1_time + entry2_time) / 2 - - # Search for timestamps before second entry - results = populated_history.search(**{"task.metadata.timestamp": ("<", entry2_time)}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Search for timestamps after first entry - results = populated_history.search(**{"task.metadata.timestamp": (">", entry1_time)}) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - # Search within a time window using >= and <= - results = populated_history.search(**{"task.metadata.timestamp": (">=", mid_time - 1800)}) - assert len(results) == 2 - assert results[0].task.task_id == "task1" - assert results[1].task.task_id == "task2" - - -def test_manipulation_history_search_vector_fields(populated_history) -> None: - """Test searching by vector components in constraints.""" - # Search by reference point components - results = populated_history.search(**{"task.constraints.*.reference_point.x": 2.5}) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - # Search by target point components - results = populated_history.search(**{"task.constraints.*.target_point.z": 0.3}) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - # Search by rotation angles - results = populated_history.search(**{"task.constraints.*.end_angle.x": 90}) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - -def test_manipulation_history_search_execution_details(populated_history) -> None: - """Test searching by execution time and error patterns.""" - # Search by execution time - results = populated_history.search(**{"result.execution_time": 2.5}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Search by error message pattern - results = populated_history.search(**{"result.error": "Collision"}) - assert len(results) == 1 - assert results[0].task.task_id == "task2" - - # Search by status - results = populated_history.search(**{"result.status": "success"}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - -def test_manipulation_history_search_multiple_criteria(populated_history) -> None: - """Test search with multiple criteria.""" - # Multiple criteria - all must match - results = populated_history.search(**{"task.target_object": "cup", "result.status": "success"}) - assert len(results) == 1 - assert results[0].task.task_id == "task1" - - # Multiple criteria with no matches - results = populated_history.search(**{"task.target_object": "cup", "result.status": "failure"}) - assert len(results) == 0 - - # Combination of direct and wildcard paths - results = populated_history.search( - **{"task.target_object": "bottle", "task.metadata.objects.*.position.z": 0.3} - ) - assert len(results) == 1 - assert results[0].task.task_id == "task2" diff --git a/dimos/manipulation/test_manipulation_module.py b/dimos/manipulation/test_manipulation_module.py new file mode 100644 index 0000000000..c30ba9b55c --- /dev/null +++ b/dimos/manipulation/test_manipulation_module.py @@ -0,0 +1,293 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Integration tests for ManipulationModule. + +These tests verify the full planning stack with Drake backend. +They require Drake to be installed and will be skipped otherwise. +""" + +from __future__ import annotations + +import importlib.util +from unittest.mock import MagicMock + +import pytest + +from dimos.manipulation.manipulation_module import ( + ManipulationModule, + ManipulationState, +) +from dimos.manipulation.planning.spec import RobotModelConfig +from dimos.msgs.geometry_msgs import Pose, PoseStamped, Quaternion, Vector3 +from dimos.msgs.sensor_msgs import JointState +from dimos.utils.data import get_data + + +def _drake_available() -> bool: + return importlib.util.find_spec("pydrake") is not None + + +def _xarm_urdf_available() -> bool: + try: + desc_path = get_data("xarm_description") + urdf_path = desc_path / "urdf/xarm_device.urdf.xacro" + return urdf_path.exists() + except Exception: + return False + + +def _get_xarm7_config() -> RobotModelConfig: + """Create XArm7 robot config for testing.""" + desc_path = get_data("xarm_description") + return RobotModelConfig( + name="test_arm", + urdf_path=desc_path / "urdf/xarm_device.urdf.xacro", + base_pose=PoseStamped(position=Vector3(), orientation=Quaternion()), + joint_names=["joint1", "joint2", "joint3", "joint4", "joint5", "joint6", "joint7"], + end_effector_link="link7", + base_link="link_base", + package_paths={"xarm_description": desc_path}, + xacro_args={"dof": "7", "limited": "true"}, + auto_convert_meshes=True, + max_velocity=1.0, + max_acceleration=2.0, + joint_name_mapping={ + "arm_joint1": "joint1", + "arm_joint2": "joint2", + "arm_joint3": "joint3", + "arm_joint4": "joint4", + "arm_joint5": "joint5", + "arm_joint6": "joint6", + "arm_joint7": "joint7", + }, + coordinator_task_name="traj_arm", + ) + + +@pytest.fixture +def xarm7_config(): + return _get_xarm7_config() + + +@pytest.fixture +def joint_state_zeros(): + """Create a JointState message with zeros for XArm7.""" + return JointState( + name=[ + "arm_joint1", + "arm_joint2", + "arm_joint3", + "arm_joint4", + "arm_joint5", + "arm_joint6", + "arm_joint7", + ], + position=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + velocity=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + effort=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) + + +@pytest.fixture +def module(xarm7_config): + """Create a started ManipulationModule with ports disabled.""" + mod = ManipulationModule( + robots=[xarm7_config], + planning_timeout=10.0, + enable_viz=False, + ) + mod.joint_state = None + mod.objects = None + mod.start() + yield mod + mod.stop() + + +@pytest.mark.skipif(not _drake_available(), reason="Drake not installed") +@pytest.mark.skipif(not _xarm_urdf_available(), reason="XArm URDF not available") +class TestManipulationModuleIntegration: + """Integration tests for ManipulationModule with real Drake backend.""" + + def test_module_initialization(self, module): + """Test module initializes with real Drake world.""" + assert module._state == ManipulationState.IDLE + assert module._world_monitor is not None + assert module._planner is not None + assert module._kinematics is not None + assert "test_arm" in module._robots + + def test_joint_state_sync(self, module, joint_state_zeros): + """Test joint state synchronization to Drake world.""" + module._on_joint_state(joint_state_zeros) + + joints = module.get_current_joints() + assert joints is not None + assert len(joints) == 7 + assert all(abs(j) < 0.01 for j in joints) + + def test_collision_check(self, module, joint_state_zeros): + """Test collision checking at a configuration.""" + module._on_joint_state(joint_state_zeros) + + is_free = module.is_collision_free([0.0] * 7) + assert is_free is True + + def test_plan_to_joints(self, module, joint_state_zeros): + """Test planning to a joint configuration.""" + module._on_joint_state(joint_state_zeros) + + target = JointState(position=[0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1]) + success = module.plan_to_joints(target) + + assert success is True + assert module._state == ManipulationState.COMPLETED + assert module.has_planned_path() is True + + assert "test_arm" in module._planned_trajectories + traj = module._planned_trajectories["test_arm"] + assert len(traj.points) > 1 + assert traj.duration > 0 + + def test_add_and_remove_obstacle(self, module, joint_state_zeros): + """Test adding and removing obstacles.""" + module._on_joint_state(joint_state_zeros) + + pose = Pose( + position=Vector3(0.5, 0.0, 0.3), + orientation=Quaternion(), # default is identity (w=1) + ) + obstacle_id = module.add_obstacle("test_box", pose, "box", [0.1, 0.1, 0.1]) + + assert obstacle_id != "" + assert obstacle_id is not None + + removed = module.remove_obstacle(obstacle_id) + assert removed is True + + def test_robot_info(self, module): + """Test getting robot information.""" + info = module.get_robot_info() + + assert info is not None + assert info["name"] == "test_arm" + assert len(info["joint_names"]) == 7 + assert info["end_effector_link"] == "link7" + assert info["coordinator_task_name"] == "traj_arm" + assert info["has_joint_name_mapping"] is True + + def test_ee_pose(self, module, joint_state_zeros): + """Test getting end-effector pose.""" + module._on_joint_state(joint_state_zeros) + + pose = module.get_ee_pose() + + assert pose is not None + assert hasattr(pose, "x") + assert hasattr(pose, "y") + assert hasattr(pose, "z") + + def test_trajectory_name_translation(self, module, joint_state_zeros): + """Test that trajectory joint names are translated for coordinator.""" + module._on_joint_state(joint_state_zeros) + + success = module.plan_to_joints(JointState(position=[0.05] * 7)) + assert success is True + + traj = module._planned_trajectories["test_arm"] + robot_config = module._robots["test_arm"][1] + + translated = module._translate_trajectory_to_coordinator(traj, robot_config) + + for name in translated.joint_names: + assert name.startswith("arm_") # Should have arm_ prefix + + +@pytest.mark.skipif(not _drake_available(), reason="Drake not installed") +@pytest.mark.skipif(not _xarm_urdf_available(), reason="XArm URDF not available") +class TestCoordinatorIntegration: + """Test coordinator integration with mocked RPC client.""" + + def test_execute_with_mock_coordinator(self, module, joint_state_zeros): + """Test execute sends trajectory to coordinator.""" + module._on_joint_state(joint_state_zeros) + + success = module.plan_to_joints(JointState(position=[0.05] * 7)) + assert success is True + + # Mock the coordinator client + mock_client = MagicMock() + mock_client.task_invoke.return_value = True + module._coordinator_client = mock_client + + result = module.execute() + + assert result is True + assert module._state == ManipulationState.COMPLETED + + # Verify coordinator was called + mock_client.task_invoke.assert_called_once() + call_args = mock_client.task_invoke.call_args + task_name, method_name, kwargs = call_args[0] + + assert task_name == "traj_arm" + assert method_name == "execute" + trajectory = kwargs["trajectory"] + assert len(trajectory.points) > 1 + # Joint names should be translated + assert all(n.startswith("arm_") for n in trajectory.joint_names) + + def test_execute_rejected_by_coordinator(self, module, joint_state_zeros): + """Test handling of coordinator rejection.""" + module._on_joint_state(joint_state_zeros) + + module.plan_to_joints(JointState(position=[0.05] * 7)) + + # Mock coordinator to reject + mock_client = MagicMock() + mock_client.task_invoke.return_value = False + module._coordinator_client = mock_client + + result = module.execute() + + assert result is False + assert module._state == ManipulationState.FAULT + assert "rejected" in module._error_message.lower() + + def test_state_transitions_during_execution(self, module, joint_state_zeros): + """Test state transitions during plan and execute.""" + assert module._state == ManipulationState.IDLE + + module._on_joint_state(joint_state_zeros) + + # Plan - should go through PLANNING -> COMPLETED + module.plan_to_joints(JointState(position=[0.05] * 7)) + assert module._state == ManipulationState.COMPLETED + + # Reset works from COMPLETED + module.reset() + assert module._state == ManipulationState.IDLE + + # Plan again + module.plan_to_joints(JointState(position=[0.05] * 7)) + + # Mock coordinator + mock_client = MagicMock() + mock_client.task_invoke.return_value = True + module._coordinator_client = mock_client + + # Execute - should go to EXECUTING then COMPLETED + module.execute() + assert module._state == ManipulationState.COMPLETED diff --git a/dimos/manipulation/test_manipulation_unit.py b/dimos/manipulation/test_manipulation_unit.py new file mode 100644 index 0000000000..de551d99cd --- /dev/null +++ b/dimos/manipulation/test_manipulation_unit.py @@ -0,0 +1,308 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests for the ManipulationModule.""" + +from __future__ import annotations + +from pathlib import Path +import threading +from unittest.mock import MagicMock, patch + +import pytest + +from dimos.manipulation.manipulation_module import ( + ManipulationModule, + ManipulationState, +) +from dimos.manipulation.planning.spec import RobotModelConfig +from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Vector3 +from dimos.msgs.trajectory_msgs import JointTrajectory, TrajectoryPoint + +# ============================================================================= +# Fixtures +# ============================================================================= + + +@pytest.fixture +def robot_config(): + """Create a robot config for testing.""" + return RobotModelConfig( + name="test_arm", + urdf_path=Path("/path/to/robot.urdf"), + base_pose=PoseStamped(position=Vector3(), orientation=Quaternion()), + joint_names=["joint1", "joint2", "joint3"], + end_effector_link="link_tcp", + base_link="link_base", + max_velocity=1.0, + max_acceleration=2.0, + coordinator_task_name="traj_arm", + ) + + +@pytest.fixture +def robot_config_with_mapping(): + """Create a robot config with joint name mapping (dual-arm scenario).""" + return RobotModelConfig( + name="left_arm", + urdf_path=Path("/path/to/robot.urdf"), + base_pose=PoseStamped(position=Vector3(), orientation=Quaternion()), + joint_names=["joint1", "joint2", "joint3"], + end_effector_link="link_tcp", + base_link="link_base", + joint_name_mapping={ + "left_joint1": "joint1", + "left_joint2": "joint2", + "left_joint3": "joint3", + }, + coordinator_task_name="traj_left", + ) + + +@pytest.fixture +def simple_trajectory(): + """Create a simple trajectory for testing.""" + return JointTrajectory( + joint_names=["joint1", "joint2", "joint3"], + points=[ + TrajectoryPoint( + positions=[0.0, 0.0, 0.0], velocities=[0.0, 0.0, 0.0], time_from_start=0.0 + ), + TrajectoryPoint( + positions=[0.5, 0.5, 0.5], velocities=[0.0, 0.0, 0.0], time_from_start=1.0 + ), + ], + ) + + +def _make_module(): + """Create a ManipulationModule instance with mocked __init__.""" + with patch.object(ManipulationModule, "__init__", lambda self: None): + module = ManipulationModule.__new__(ManipulationModule) + module._state = ManipulationState.IDLE + module._lock = threading.Lock() + module._error_message = "" + module._robots = {} + module._planned_paths = {} + module._planned_trajectories = {} + module._world_monitor = None + module._planner = None + module._kinematics = None + module._coordinator_client = None + module._graspgen = None + return module + + +# ============================================================================= +# Test State Machine +# ============================================================================= + + +class TestStateMachine: + """Test state transitions.""" + + def test_cancel_only_during_execution(self): + """Cancel only works in EXECUTING state.""" + module = _make_module() + + module._state = ManipulationState.IDLE + assert module.cancel() is False + + module._state = ManipulationState.EXECUTING + assert module.cancel() is True + assert module._state == ManipulationState.IDLE + + def test_reset_not_during_execution(self): + """Reset works in any state except EXECUTING.""" + module = _make_module() + + module._state = ManipulationState.FAULT + module._error_message = "Error" + assert module.reset() is True + assert module._state == ManipulationState.IDLE + assert module._error_message == "" + + module._state = ManipulationState.EXECUTING + assert module.reset() is False + + def test_fail_sets_fault_state(self): + """_fail helper sets FAULT state and message.""" + module = _make_module() + module._state = ManipulationState.PLANNING + + result = module._fail("Test error") + assert result is False + assert module._state == ManipulationState.FAULT + assert module._error_message == "Test error" + + def test_begin_planning_state_checks(self, robot_config): + """_begin_planning only allowed from IDLE or COMPLETED.""" + module = _make_module() + module._world_monitor = MagicMock() + module._robots = {"test_arm": ("robot_id", robot_config, MagicMock())} + + # From IDLE - OK + module._state = ManipulationState.IDLE + assert module._begin_planning() == ("test_arm", "robot_id") + assert module._state == ManipulationState.PLANNING + + # From COMPLETED - OK + module._state = ManipulationState.COMPLETED + assert module._begin_planning() == ("test_arm", "robot_id") + + # From EXECUTING - Fail + module._state = ManipulationState.EXECUTING + assert module._begin_planning() is None + + +# ============================================================================= +# Test Robot Selection +# ============================================================================= + + +class TestRobotSelection: + """Test robot selection logic.""" + + def test_single_robot_default(self, robot_config): + """Single robot is used by default.""" + module = _make_module() + module._robots = {"arm": ("id", robot_config, MagicMock())} + + result = module._get_robot() + assert result is not None + assert result[0] == "arm" + + def test_multiple_robots_require_name(self, robot_config): + """Multiple robots require explicit name.""" + module = _make_module() + module._robots = { + "left": ("id1", robot_config, MagicMock()), + "right": ("id2", robot_config, MagicMock()), + } + + # No name - fails + assert module._get_robot() is None + + # With name - works + result = module._get_robot("left") + assert result is not None + assert result[0] == "left" + + +# ============================================================================= +# Test Joint Name Translation (for coordinator integration) +# ============================================================================= + + +class TestJointNameTranslation: + """Test trajectory joint name translation for coordinator.""" + + def test_no_mapping_returns_original(self, robot_config, simple_trajectory): + """Without mapping, trajectory is returned unchanged.""" + module = _make_module() + + result = module._translate_trajectory_to_coordinator(simple_trajectory, robot_config) + assert result is simple_trajectory # Same object + + def test_mapping_translates_names(self, robot_config_with_mapping, simple_trajectory): + """With mapping, joint names are translated.""" + module = _make_module() + + result = module._translate_trajectory_to_coordinator( + simple_trajectory, robot_config_with_mapping + ) + assert result.joint_names == ["left_joint1", "left_joint2", "left_joint3"] + assert len(result.points) == 2 # Points preserved + + +# ============================================================================= +# Test Execute Method +# ============================================================================= + + +class TestExecute: + """Test coordinator execution.""" + + def test_execute_requires_trajectory(self, robot_config): + """Execute fails without planned trajectory.""" + module = _make_module() + module._robots = {"test_arm": ("id", robot_config, MagicMock())} + module._planned_trajectories = {} + + assert module.execute() is False + + def test_execute_requires_task_name(self): + """Execute fails without coordinator_task_name.""" + module = _make_module() + config_no_task = RobotModelConfig( + name="arm", + urdf_path=Path("/path"), + base_pose=PoseStamped(position=Vector3(), orientation=Quaternion()), + joint_names=["j1"], + end_effector_link="ee", + ) + module._robots = {"arm": ("id", config_no_task, MagicMock())} + module._planned_trajectories = {"arm": MagicMock()} + + assert module.execute() is False + + def test_execute_success(self, robot_config, simple_trajectory): + """Successful execute calls coordinator via task_invoke.""" + module = _make_module() + module._robots = {"test_arm": ("id", robot_config, MagicMock())} + module._planned_trajectories = {"test_arm": simple_trajectory} + + mock_client = MagicMock() + mock_client.task_invoke.return_value = True + module._coordinator_client = mock_client + + assert module.execute() is True + assert module._state == ManipulationState.COMPLETED + mock_client.task_invoke.assert_called_once_with( + "traj_arm", "execute", {"trajectory": simple_trajectory} + ) + + def test_execute_rejected(self, robot_config, simple_trajectory): + """Rejected execution sets FAULT state.""" + module = _make_module() + module._robots = {"test_arm": ("id", robot_config, MagicMock())} + module._planned_trajectories = {"test_arm": simple_trajectory} + + mock_client = MagicMock() + mock_client.task_invoke.return_value = False + module._coordinator_client = mock_client + + assert module.execute() is False + assert module._state == ManipulationState.FAULT + + +# ============================================================================= +# Test RobotModelConfig Mapping Helpers +# ============================================================================= + + +class TestRobotModelConfigMapping: + """Test RobotModelConfig joint name mapping helpers.""" + + def test_bidirectional_mapping(self, robot_config_with_mapping): + """Test URDF <-> coordinator name translation.""" + config = robot_config_with_mapping + + # Coordinator -> URDF + assert config.get_urdf_joint_name("left_joint1") == "joint1" + assert config.get_urdf_joint_name("unknown") == "unknown" + + # URDF -> Coordinator + assert config.get_coordinator_joint_name("joint1") == "left_joint1" + assert config.get_coordinator_joint_name("unknown") == "unknown" diff --git a/dimos/mapping/costmapper.py b/dimos/mapping/costmapper.py index 97c96f8180..70cd770777 100644 --- a/dimos/mapping/costmapper.py +++ b/dimos/mapping/costmapper.py @@ -16,13 +16,10 @@ import time from reactivex import operators as ops -import rerun as rr -import rerun.blueprint as rrb from dimos.core import In, Module, Out, rpc -from dimos.core.global_config import GlobalConfig +from dimos.core.global_config import GlobalConfig, global_config from dimos.core.module import ModuleConfig -from dimos.dashboard.rerun_init import connect_rerun from dimos.mapping.pointclouds.occupancy import ( OCCUPANCY_ALGOS, HeightCostConfig, @@ -48,66 +45,17 @@ class CostMapper(Module): global_map: In[PointCloud2] global_costmap: Out[OccupancyGrid] - @classmethod - def rerun_views(cls): # type: ignore[no-untyped-def] - """Return Rerun view blueprints for costmap visualization.""" - return [ - rrb.Spatial2DView( - name="Costmap", - origin="world/nav/costmap/image", - ), - rrb.TimeSeriesView( - name="Costmap (ms)", - origin="/metrics/costmap", - contents=["+ /metrics/costmap/calc_ms"], - ), - ] - - def __init__(self, global_config: GlobalConfig | None = None, **kwargs: object) -> None: + def __init__(self, cfg: GlobalConfig = global_config, **kwargs: object) -> None: super().__init__(**kwargs) - self._global_config = global_config or GlobalConfig() + self._global_config = cfg @rpc def start(self) -> None: super().start() - # Only start Rerun logging if Rerun backend is selected - if self._global_config.viewer_backend.startswith("rerun"): - connect_rerun(global_config=self._global_config) - logger.info("CostMapper: Rerun logging enabled (sync)") - def _publish_costmap(grid: OccupancyGrid, calc_time_ms: float, rx_monotonic: float) -> None: - # Publish to downstream first. self.global_costmap.publish(grid) - # Synchronous Rerun logging (no queues/threads). - if self._global_config.viewer_backend.startswith("rerun"): - try: - # 2D image panel - rr.log( - "world/nav/costmap/image", - grid.to_rerun( - mode="image", - colormap="RdBu_r", - ), - ) - - # 3D floor overlay (mesh) - rr.log( - "world/nav/costmap/floor", - grid.to_rerun( - mode="mesh", - colormap=None, # grayscale / foxglove-style - z_offset=0.07, - ), - ) - - rr.log("metrics/costmap/calc_ms", rr.Scalars(calc_time_ms)) - latency_ms = (time.monotonic() - rx_monotonic) * 1000 - rr.log("metrics/costmap/latency_ms", rr.Scalars(latency_ms)) - except Exception as e: - logger.warning(f"Rerun logging error: {e}") - def _calculate_and_time( msg: PointCloud2, ) -> tuple[OccupancyGrid, float, float]: diff --git a/dimos/mapping/occupancy/visualizations.py b/dimos/mapping/occupancy/visualizations.py index 33a1336874..2ed0364257 100644 --- a/dimos/mapping/occupancy/visualizations.py +++ b/dimos/mapping/occupancy/visualizations.py @@ -21,8 +21,7 @@ from dimos.msgs.nav_msgs import Path from dimos.msgs.nav_msgs.OccupancyGrid import OccupancyGrid -from dimos.msgs.sensor_msgs import Image -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ImageFormat +from dimos.msgs.sensor_msgs.Image import Image, ImageFormat Palette: TypeAlias = Literal["rainbow", "turbo"] diff --git a/dimos/mapping/occupancy/visualize_path.py b/dimos/mapping/occupancy/visualize_path.py index 1a6e4887f1..0662582f72 100644 --- a/dimos/mapping/occupancy/visualize_path.py +++ b/dimos/mapping/occupancy/visualize_path.py @@ -18,8 +18,7 @@ from dimos.mapping.occupancy.visualizations import visualize_occupancy_grid from dimos.msgs.nav_msgs import Path from dimos.msgs.nav_msgs.OccupancyGrid import OccupancyGrid -from dimos.msgs.sensor_msgs.Image import Image -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ImageFormat +from dimos.msgs.sensor_msgs.Image import Image, ImageFormat def visualize_path( diff --git a/dimos/mapping/osm/demo_osm.py b/dimos/mapping/osm/demo_osm.py index 3e4ba8e61b..97622cfaf2 100644 --- a/dimos/mapping/osm/demo_osm.py +++ b/dimos/mapping/osm/demo_osm.py @@ -13,20 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dotenv import load_dotenv - -from dimos.agents.agent import llm_agent -from dimos.agents.cli.human import human_input +from dimos.agents.agent import agent from dimos.agents.skills.demo_robot import demo_robot from dimos.agents.skills.osm import osm_skill from dimos.core.blueprints import autoconnect -load_dotenv() - - demo_osm = autoconnect( demo_robot(), osm_skill(), - human_input(), - llm_agent(), + agent(), ) diff --git a/dimos/mapping/osm/query.py b/dimos/mapping/osm/query.py index fd6e3694f6..410f879c20 100644 --- a/dimos/mapping/osm/query.py +++ b/dimos/mapping/osm/query.py @@ -27,7 +27,7 @@ def query_for_one_position(vl_model: VlModel, map_image: MapImage, query: str) -> LatLon | None: full_query = f"{_PROLOGUE} {query} {_JSON} If there's a match return the x, y coordinates from the image. Example: `[123, 321]`. If there's no match return `null`." - response = vl_model.query(map_image.image.data, full_query) + response = vl_model.query(map_image.image, full_query) coords = tuple(map(int, re.findall(r"\d+", response))) if len(coords) != 2: return None @@ -42,7 +42,7 @@ def query_for_one_position_and_context( my_location = f"I'm currently at x={x}, y={y}." full_query = f"{_PROLOGUE} {my_location} {query} {_JSON} If there's a match return the x, y coordinates from the image and what is there. Example response: `{example}`. If there's no match return `null`." logger.info(f"Qwen query: `{full_query}`") - response = vl_model.query(map_image.image.data, full_query) + response = vl_model.query(map_image.image, full_query) try: doc = extract_json_from_llm_response(response) diff --git a/dimos/mapping/pointclouds/test_occupancy_speed.py b/dimos/mapping/pointclouds/test_occupancy_speed.py index c34c2865f2..2def839dd5 100644 --- a/dimos/mapping/pointclouds/test_occupancy_speed.py +++ b/dimos/mapping/pointclouds/test_occupancy_speed.py @@ -20,7 +20,7 @@ from dimos.mapping.pointclouds.occupancy import OCCUPANCY_ALGOS from dimos.mapping.voxels import VoxelGridMapper from dimos.utils.cli.plot import bar -from dimos.utils.data import _get_data_dir, get_data +from dimos.utils.data import get_data, get_data_dir from dimos.utils.testing import TimedSensorReplay @@ -28,11 +28,10 @@ def test_build_map(): mapper = VoxelGridMapper(publish_interval=-1) - for ts, frame in TimedSensorReplay("unitree_go2_bigoffice/lidar").iterate_duration(): - print(ts, frame) + for _ts, frame in TimedSensorReplay("unitree_go2_bigoffice/lidar").iterate(): mapper.add_frame(frame) - pickle_file = _get_data_dir() / "unitree_go2_bigoffice_map.pickle" + pickle_file = get_data_dir() / "unitree_go2_bigoffice_map.pickle" global_pcd = mapper.get_global_pointcloud2() with open(pickle_file, "wb") as f: diff --git a/dimos/mapping/voxels.py b/dimos/mapping/voxels.py index 6570d9ba33..4c1805e059 100644 --- a/dimos/mapping/voxels.py +++ b/dimos/mapping/voxels.py @@ -21,13 +21,10 @@ from reactivex import interval, operators as ops from reactivex.disposable import Disposable from reactivex.subject import Subject -import rerun as rr -import rerun.blueprint as rrb from dimos.core import In, Module, Out, rpc -from dimos.core.global_config import GlobalConfig +from dimos.core.global_config import GlobalConfig, global_config from dimos.core.module import ModuleConfig -from dimos.dashboard.rerun_init import connect_rerun from dimos.msgs.sensor_msgs import PointCloud2 from dimos.utils.decorators import simple_mcache from dimos.utils.logging_config import setup_logger @@ -54,29 +51,9 @@ class VoxelGridMapper(Module): lidar: In[PointCloud2] global_map: Out[PointCloud2] - @classmethod - def rerun_views(cls): # type: ignore[no-untyped-def] - """Return Rerun view blueprints for voxel map visualization.""" - return [ - rrb.TimeSeriesView( - name="Voxel Pipeline (ms)", - origin="/metrics/voxel_map", - contents=[ - "+ /metrics/voxel_map/extract_ms", - "+ /metrics/voxel_map/transport_ms", - "+ /metrics/voxel_map/publish_ms", - ], - ), - rrb.TimeSeriesView( - name="Voxel Count", - origin="/metrics/voxel_map", - contents=["+ /metrics/voxel_map/voxel_count"], - ), - ] - - def __init__(self, global_config: GlobalConfig | None = None, **kwargs: object) -> None: + def __init__(self, cfg: GlobalConfig = global_config, **kwargs: object) -> None: super().__init__(**kwargs) - self._global_config = global_config or GlobalConfig() + self._global_config = cfg dev = ( o3c.Device(self.config.device) @@ -84,7 +61,7 @@ def __init__(self, global_config: GlobalConfig | None = None, **kwargs: object) else o3c.Device("CPU:0") ) - print(f"VoxelGridMapper using device: {dev}") + logger.info(f"VoxelGridMapper using device: {dev}") self.vbg = o3d.t.geometry.VoxelBlockGrid( attr_names=("dummy",), @@ -100,18 +77,11 @@ def __init__(self, global_config: GlobalConfig | None = None, **kwargs: object) self._voxel_hashmap = self.vbg.hashmap() self._key_dtype = self._voxel_hashmap.key_tensor().dtype self._latest_frame_ts: float = 0.0 - # Monotonic timestamp of last received frame (for accurate latency in replay) - self._latest_frame_rx_monotonic: float | None = None @rpc def start(self) -> None: super().start() - # Only start Rerun logging if Rerun backend is selected - if self._global_config.viewer_backend.startswith("rerun"): - connect_rerun(global_config=self._global_config) - logger.info("VoxelGridMapper: Rerun logging enabled (sync)") - # Subject to trigger publishing, with backpressure to drop if busy self._publish_trigger: Subject[None] = Subject() self._disposables.add( @@ -136,52 +106,13 @@ def stop(self) -> None: super().stop() def _on_frame(self, frame: PointCloud2) -> None: - # Track receipt time with monotonic clock (works correctly in replay) - self._latest_frame_rx_monotonic = time.monotonic() self.add_frame(frame) if self.config.publish_interval == 0: self._publish_trigger.on_next(None) def publish_global_map(self) -> None: - # Snapshot monotonic timestamp once (won't be overwritten during slow publish) - rx_monotonic = self._latest_frame_rx_monotonic - - start_total = time.perf_counter() - - # 1. Extract pointcloud from GPU hashmap - t1 = time.perf_counter() pc = self.get_global_pointcloud2() - extract_ms = (time.perf_counter() - t1) * 1000 - - # 2. Publish to downstream (NO auto-logging - fast!) - t2 = time.perf_counter() self.global_map.publish(pc) - publish_ms = (time.perf_counter() - t2) * 1000 - # 3. Synchronous Rerun logging (no queues/threads). - if self._global_config.viewer_backend.startswith("rerun"): - try: - rr.log( - "world/map", - pc.to_rerun( - mode="boxes", - size=self.config.voxel_size, - colormap="turbo", - ), - ) - except Exception as e: - logger.warning(f"Rerun logging error: {e}") - - # Log detailed timing breakdown to Rerun - total_ms = (time.perf_counter() - start_total) * 1000 - rr.log("metrics/voxel_map/publish_ms", rr.Scalars(total_ms)) - rr.log("metrics/voxel_map/extract_ms", rr.Scalars(extract_ms)) - rr.log("metrics/voxel_map/transport_ms", rr.Scalars(publish_ms)) - rr.log("metrics/voxel_map/voxel_count", rr.Scalars(float(len(pc)))) - - # Log pipeline latency (time from frame receipt to publish complete) - if rx_monotonic is not None: - latency_ms = (time.monotonic() - rx_monotonic) * 1000 - rr.log("metrics/voxel_map/latency_ms", rr.Scalars(latency_ms)) def size(self) -> int: return self._voxel_hashmap.size() # type: ignore[no-any-return] diff --git a/dimos/memory/embedding.py b/dimos/memory/embedding.py new file mode 100644 index 0000000000..20dd82422c --- /dev/null +++ b/dimos/memory/embedding.py @@ -0,0 +1,105 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections.abc import Callable +from dataclasses import dataclass, field +from typing import cast + +import reactivex as rx +from reactivex import operators as ops +from reactivex.observable import Observable + +from dimos.core import In, rpc +from dimos.core.module import Module, ModuleConfig +from dimos.models.embedding.base import Embedding, EmbeddingModel +from dimos.models.embedding.clip import CLIPModel +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.msgs.nav_msgs import OccupancyGrid +from dimos.msgs.sensor_msgs import Image +from dimos.msgs.sensor_msgs.Image import Image, sharpness_barrier +from dimos.utils.reactive import getter_hot + + +@dataclass +class Config(ModuleConfig): + embedding_model: EmbeddingModel = field(default_factory=CLIPModel) + + +@dataclass +class SpatialEntry: + image: Image + pose: PoseStamped + + +@dataclass +class SpatialEmbedding(SpatialEntry): + embedding: Embedding + + +class EmbeddingMemory(Module[Config]): + default_config = Config + config: Config + color_image: In[Image] + global_costmap: In[OccupancyGrid] + + _costmap_getter: Callable[[], OccupancyGrid] | None = None + + def get_costmap(self) -> OccupancyGrid: + if self._costmap_getter is None: + self._costmap_getter = getter_hot(self.global_costmap.pure_observable()) + self._disposables.add(self._costmap_getter) + return self._costmap_getter() + + @rpc + def query_costmap(self, text: str) -> OccupancyGrid: + costmap = self.get_costmap() + # overlay costmap with embedding heat + return costmap + + @rpc + def start(self) -> None: + # would be cool if this sharpness_barrier was somehow self-calibrating + # + # we need a Governor system, sharpness_barrier frequency shouldn't + # be a fixed float but an observable that adjusts based on downstream load + # + # (also voxel size for mapper for example would benefit from this) + self.color_image.pure_observable().pipe( + sharpness_barrier(0.5), + ops.flat_map(self._try_create_spatial_entry), + ops.map(self._embed_spatial_entry), + ops.map(self._store_spatial_entry), + ).subscribe(print) + + def _try_create_spatial_entry(self, img: Image) -> Observable[SpatialEntry]: + pose = self.tf.get_pose("world", "base_link") + if not pose: + return rx.empty() + return rx.of(SpatialEntry(image=img, pose=pose)) + + def _embed_spatial_entry(self, spatial_entry: SpatialEntry) -> SpatialEmbedding: + embedding = cast("Embedding", self.config.embedding_model.embed(spatial_entry.image)) + return SpatialEmbedding( + image=spatial_entry.image, + pose=spatial_entry.pose, + embedding=embedding, + ) + + def _store_spatial_entry(self, spatial_embedding: SpatialEmbedding) -> SpatialEmbedding: + return spatial_embedding + + def query_text(self, query: str) -> list[SpatialEmbedding]: + self.config.embedding_model.embed_text(query) + results: list[SpatialEmbedding] = [] + return results diff --git a/dimos/memory/test_embedding.py b/dimos/memory/test_embedding.py new file mode 100644 index 0000000000..b7e7fbb294 --- /dev/null +++ b/dimos/memory/test_embedding.py @@ -0,0 +1,53 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from dimos.memory.embedding import EmbeddingMemory, SpatialEntry +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.utils.data import get_data +from dimos.utils.testing import TimedSensorReplay + +dir_name = "unitree_go2_bigoffice" + + +@pytest.mark.skip +def test_embed_frame() -> None: + """Test embedding a single frame.""" + # Load a frame from recorded data + video = TimedSensorReplay(get_data(dir_name) / "video") + frame = video.find_closest_seek(10) + + # Create memory and embed + memory = EmbeddingMemory() + + try: + # Create a spatial entry with dummy pose (no TF needed for this test) + dummy_pose = PoseStamped( + position=[0, 0, 0], + orientation=[0, 0, 0, 1], # identity quaternion + ) + spatial_entry = SpatialEntry(image=frame, pose=dummy_pose) + + # Embed the frame + result = memory._embed_spatial_entry(spatial_entry) + + # Verify + assert result is not None + assert result.embedding is not None + assert result.embedding.vector is not None + print(f"Embedding shape: {result.embedding.vector.shape}") + print(f"Embedding vector (first 5): {result.embedding.vector[:5]}") + finally: + memory.stop() diff --git a/dimos/memory/timeseries/__init__.py b/dimos/memory/timeseries/__init__.py new file mode 100644 index 0000000000..debc14ab3a --- /dev/null +++ b/dimos/memory/timeseries/__init__.py @@ -0,0 +1,41 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Time series storage and replay.""" + +from dimos.memory.timeseries.base import TimeSeriesStore +from dimos.memory.timeseries.inmemory import InMemoryStore +from dimos.memory.timeseries.pickledir import PickleDirStore +from dimos.memory.timeseries.sqlite import SqliteStore + + +def __getattr__(name: str): # type: ignore[no-untyped-def] + if name == "PostgresStore": + from dimos.memory.timeseries.postgres import PostgresStore + + return PostgresStore + if name == "reset_db": + from dimos.memory.timeseries.postgres import reset_db + + return reset_db + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + + +__all__ = [ + "InMemoryStore", + "PickleDirStore", + "PostgresStore", + "SqliteStore", + "TimeSeriesStore", + "reset_db", +] diff --git a/dimos/memory/timeseries/base.py b/dimos/memory/timeseries/base.py new file mode 100644 index 0000000000..0d88355b5b --- /dev/null +++ b/dimos/memory/timeseries/base.py @@ -0,0 +1,367 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unified time series storage and replay.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +import time +from typing import TYPE_CHECKING, Generic, TypeVar + +import reactivex as rx +from reactivex import operators as ops +from reactivex.disposable import CompositeDisposable, Disposable +from reactivex.scheduler import TimeoutScheduler + +if TYPE_CHECKING: + from collections.abc import Iterator + + from reactivex.observable import Observable + + from dimos.types.timestamped import Timestamped + +T = TypeVar("T", bound="Timestamped") + + +class TimeSeriesStore(Generic[T], ABC): + """Unified storage + replay for sensor data. + + Implement abstract methods for your backend (in-memory, pickle, sqlite, etc.). + All iteration, streaming, and seek logic comes free from the base class. + + T must be a Timestamped subclass — timestamps are taken from .ts attribute. + """ + + @abstractmethod + def _save(self, timestamp: float, data: T) -> None: + """Save data at timestamp.""" + ... + + @abstractmethod + def _load(self, timestamp: float) -> T | None: + """Load data at exact timestamp. Returns None if not found.""" + ... + + @abstractmethod + def _delete(self, timestamp: float) -> T | None: + """Delete data at exact timestamp. Returns the deleted item or None.""" + ... + + @abstractmethod + def _iter_items( + self, start: float | None = None, end: float | None = None + ) -> Iterator[tuple[float, T]]: + """Lazy iteration of (timestamp, data) in range.""" + ... + + @abstractmethod + def _find_closest_timestamp( + self, timestamp: float, tolerance: float | None = None + ) -> float | None: + """Find closest timestamp. Backend can optimize (binary search, db index, etc.).""" + ... + + @abstractmethod + def _count(self) -> int: + """Return number of stored items.""" + ... + + @abstractmethod + def _last_timestamp(self) -> float | None: + """Return the last (largest) timestamp, or None if empty.""" + ... + + @abstractmethod + def _find_before(self, timestamp: float) -> tuple[float, T] | None: + """Find the last (ts, data) strictly before the given timestamp.""" + ... + + @abstractmethod + def _find_after(self, timestamp: float) -> tuple[float, T] | None: + """Find the first (ts, data) strictly after the given timestamp.""" + ... + + # --- Collection API (built on abstract methods) --- + + def __len__(self) -> int: + return self._count() + + def __iter__(self) -> Iterator[T]: + """Iterate over data items in timestamp order.""" + for _, data in self._iter_items(): + yield data + + def last_timestamp(self) -> float | None: + """Get the last timestamp in the store.""" + return self._last_timestamp() + + def last(self) -> T | None: + """Get the last data item in the store.""" + ts = self._last_timestamp() + if ts is None: + return None + return self._load(ts) + + @property + def start_ts(self) -> float | None: + """Get the start timestamp of the store.""" + return self.first_timestamp() + + @property + def end_ts(self) -> float | None: + """Get the end timestamp of the store.""" + return self._last_timestamp() + + def time_range(self) -> tuple[float, float] | None: + """Get the time range (start, end) of the store.""" + s = self.first_timestamp() + e = self._last_timestamp() + if s is None or e is None: + return None + return (s, e) + + def duration(self) -> float: + """Get the duration of the store in seconds.""" + r = self.time_range() + return (r[1] - r[0]) if r else 0.0 + + def find_before(self, timestamp: float) -> T | None: + """Find the last item strictly before the given timestamp.""" + result = self._find_before(timestamp) + return result[1] if result else None + + def find_after(self, timestamp: float) -> T | None: + """Find the first item strictly after the given timestamp.""" + result = self._find_after(timestamp) + return result[1] if result else None + + def slice_by_time(self, start: float, end: float) -> list[T]: + """Return items in [start, end) range.""" + return [data for _, data in self._iter_items(start=start, end=end)] + + def save(self, *data: T) -> None: + """Save one or more Timestamped items.""" + for item in data: + self._save(item.ts, item) + + def pipe_save(self, source: Observable[T]) -> Observable[T]: + """Operator for Observable.pipe() — saves items using .ts. + + Usage: + observable.pipe(store.pipe_save).subscribe(...) + """ + + def _save_and_return(data: T) -> T: + self._save(data.ts, data) + return data + + return source.pipe(ops.map(_save_and_return)) + + def consume_stream(self, observable: Observable[T]) -> rx.abc.DisposableBase: + """Subscribe to an observable and save items using .ts. + + Usage: + disposable = store.consume_stream(observable) + """ + return observable.subscribe(on_next=lambda data: self._save(data.ts, data)) + + def load(self, timestamp: float) -> T | None: + """Load data at exact timestamp.""" + return self._load(timestamp) + + def prune_old(self, cutoff: float) -> None: + """Prune items older than cutoff timestamp.""" + to_delete = [ts for ts, _ in self._iter_items(end=cutoff)] + for ts in to_delete: + self._delete(ts) + + def find_closest( + self, + timestamp: float, + tolerance: float | None = None, + ) -> T | None: + """Find data closest to the given absolute timestamp.""" + closest_ts = self._find_closest_timestamp(timestamp, tolerance) + if closest_ts is None: + return None + return self._load(closest_ts) + + def find_closest_seek( + self, + relative_seconds: float, + tolerance: float | None = None, + ) -> T | None: + """Find data closest to a time relative to the start.""" + first = self.first_timestamp() + if first is None: + return None + return self.find_closest(first + relative_seconds, tolerance) + + def first_timestamp(self) -> float | None: + """Get the first timestamp in the store.""" + for ts, _ in self._iter_items(): + return ts + return None + + def first(self) -> T | None: + """Get the first data item in the store.""" + for _, data in self._iter_items(): + return data + return None + + def iterate_items( + self, + seek: float | None = None, + duration: float | None = None, + from_timestamp: float | None = None, + loop: bool = False, + ) -> Iterator[tuple[float, T]]: + """Iterate over (timestamp, data) tuples with optional seek/duration.""" + first = self.first_timestamp() + if first is None: + return + + if from_timestamp is not None: + start = from_timestamp + elif seek is not None: + start = first + seek + else: + start = None + + end = None + if duration is not None: + start_ts = start if start is not None else first + end = start_ts + duration + + while True: + yield from self._iter_items(start=start, end=end) + if not loop: + break + + def iterate( + self, + seek: float | None = None, + duration: float | None = None, + from_timestamp: float | None = None, + loop: bool = False, + ) -> Iterator[T]: + """Iterate over data items with optional seek/duration.""" + for _, data in self.iterate_items( + seek=seek, duration=duration, from_timestamp=from_timestamp, loop=loop + ): + yield data + + def iterate_realtime( + self, + speed: float = 1.0, + seek: float | None = None, + duration: float | None = None, + from_timestamp: float | None = None, + loop: bool = False, + ) -> Iterator[T]: + """Iterate data, sleeping to match original timing.""" + prev_ts: float | None = None + for ts, data in self.iterate_items( + seek=seek, duration=duration, from_timestamp=from_timestamp, loop=loop + ): + if prev_ts is not None: + delay = (ts - prev_ts) / speed + if delay > 0: + time.sleep(delay) + prev_ts = ts + yield data + + def stream( + self, + speed: float = 1.0, + seek: float | None = None, + duration: float | None = None, + from_timestamp: float | None = None, + loop: bool = False, + ) -> Observable[T]: + """Stream data as Observable with timing control. + + Uses scheduler-based timing with absolute time reference to prevent drift. + """ + + def subscribe( + observer: rx.abc.ObserverBase[T], + scheduler: rx.abc.SchedulerBase | None = None, + ) -> rx.abc.DisposableBase: + sched = scheduler or TimeoutScheduler() + disp = CompositeDisposable() + is_disposed = False + + iterator = self.iterate_items( + seek=seek, duration=duration, from_timestamp=from_timestamp, loop=loop + ) + + try: + first_ts, first_data = next(iterator) + except StopIteration: + observer.on_completed() + return Disposable() + + start_local_time = time.time() + start_replay_time = first_ts + + observer.on_next(first_data) + + try: + next_message: tuple[float, T] | None = next(iterator) + except StopIteration: + observer.on_completed() + return disp + + def schedule_emission(message: tuple[float, T]) -> None: + nonlocal next_message, is_disposed + + if is_disposed: + return + + msg_ts, msg_data = message + + try: + next_message = next(iterator) + except StopIteration: + next_message = None + + target_time = start_local_time + (msg_ts - start_replay_time) / speed + delay = max(0.0, target_time - time.time()) + + def emit( + _scheduler: rx.abc.SchedulerBase, _state: object + ) -> rx.abc.DisposableBase | None: + if is_disposed: + return None + observer.on_next(msg_data) + if next_message is not None: + schedule_emission(next_message) + else: + observer.on_completed() + return None + + sched.schedule_relative(delay, emit) + + if next_message is not None: + schedule_emission(next_message) + + def dispose() -> None: + nonlocal is_disposed + is_disposed = True + disp.dispose() + + return Disposable(dispose) + + return rx.create(subscribe) diff --git a/dimos/memory/timeseries/inmemory.py b/dimos/memory/timeseries/inmemory.py new file mode 100644 index 0000000000..b67faca644 --- /dev/null +++ b/dimos/memory/timeseries/inmemory.py @@ -0,0 +1,119 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""In-memory backend for TimeSeriesStore.""" + +from collections.abc import Iterator + +from sortedcontainers import SortedKeyList # type: ignore[import-untyped] + +from dimos.memory.timeseries.base import T, TimeSeriesStore + + +class InMemoryStore(TimeSeriesStore[T]): + """In-memory storage using SortedKeyList. O(log n) insert, lookup, and range queries.""" + + def __init__(self) -> None: + self._entries: SortedKeyList = SortedKeyList(key=lambda e: e.ts) + + def _bisect_exact(self, timestamp: float) -> int | None: + """Return index of entry with exact timestamp, or None.""" + pos = self._entries.bisect_key_left(timestamp) + if pos < len(self._entries) and self._entries[pos].ts == timestamp: + return pos # type: ignore[no-any-return] + return None + + def _save(self, timestamp: float, data: T) -> None: + self._entries.add(data) + + def _load(self, timestamp: float) -> T | None: + idx = self._bisect_exact(timestamp) + if idx is not None: + return self._entries[idx] # type: ignore[no-any-return] + return None + + def _delete(self, timestamp: float) -> T | None: + idx = self._bisect_exact(timestamp) + if idx is not None: + data = self._entries[idx] + del self._entries[idx] + return data # type: ignore[no-any-return] + return None + + def __iter__(self) -> Iterator[T]: + yield from self._entries + + def _iter_items( + self, start: float | None = None, end: float | None = None + ) -> Iterator[tuple[float, T]]: + if start is not None and end is not None: + it = self._entries.irange_key(start, end, (True, False)) + elif start is not None: + it = self._entries.irange_key(min_key=start) + elif end is not None: + it = self._entries.irange_key(max_key=end, inclusive=(True, False)) + else: + it = iter(self._entries) + for e in it: + yield (e.ts, e) + + def _find_closest_timestamp( + self, timestamp: float, tolerance: float | None = None + ) -> float | None: + if not self._entries: + return None + + pos = self._entries.bisect_key_left(timestamp) + + candidates: list[float] = [] + if pos > 0: + candidates.append(self._entries[pos - 1].ts) + if pos < len(self._entries): + candidates.append(self._entries[pos].ts) + + if not candidates: + return None + + # On ties, prefer the later timestamp (more recent data) + closest = max(candidates, key=lambda ts: (-abs(ts - timestamp), ts)) + + if tolerance is not None and abs(closest - timestamp) > tolerance: + return None + + return closest + + def _count(self) -> int: + return len(self._entries) + + def _last_timestamp(self) -> float | None: + if not self._entries: + return None + return self._entries[-1].ts # type: ignore[no-any-return] + + def _find_before(self, timestamp: float) -> tuple[float, T] | None: + if not self._entries: + return None + pos = self._entries.bisect_key_left(timestamp) + if pos > 0: + e = self._entries[pos - 1] + return (e.ts, e) + return None + + def _find_after(self, timestamp: float) -> tuple[float, T] | None: + if not self._entries: + return None + pos = self._entries.bisect_key_right(timestamp) + if pos < len(self._entries): + e = self._entries[pos] + return (e.ts, e) + return None diff --git a/dimos/memory/timeseries/legacy.py b/dimos/memory/timeseries/legacy.py new file mode 100644 index 0000000000..821d306d2d --- /dev/null +++ b/dimos/memory/timeseries/legacy.py @@ -0,0 +1,398 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Legacy pickle directory backend for TimeSeriesStore. + +Compatible with TimedSensorReplay/TimedSensorStorage file format. +""" + +from collections.abc import Callable, Iterator +import glob +import os +from pathlib import Path +import pickle +import re +import time +from typing import Any, cast + +import reactivex as rx +from reactivex.disposable import CompositeDisposable, Disposable +from reactivex.observable import Observable +from reactivex.scheduler import TimeoutScheduler + +from dimos.memory.timeseries.base import T, TimeSeriesStore +from dimos.utils.data import get_data, get_data_dir + + +class LegacyPickleStore(TimeSeriesStore[T]): + """Legacy pickle backend compatible with TimedSensorReplay/TimedSensorStorage. + + File format: + {name}/ + 000.pickle # contains (timestamp, data) tuple + 001.pickle + ... + + Files are assumed to be in chronological order (timestamps increase with file number). + No index is built - iteration is lazy and memory-efficient for large datasets. + + Usage: + # Load existing recording (auto-downloads from LFS if needed) + store = LegacyPickleStore("unitree_go2_bigoffice/lidar") + data = store.find_closest_seek(10.0) + + # Create new recording (directory created on first save) + store = LegacyPickleStore("my_recording/images") + store.save_ts(image) # uses image.ts for timestamp + + Backward compatibility: + This class also supports the old TimedSensorReplay/SensorReplay API: + - iterate_ts() - iterate returning (timestamp, data) tuples + - files - property returning list of file paths + - load_one() - load a single pickle file + """ + + def __init__(self, name: str | Path, autocast: Callable[[Any], T] | None = None) -> None: + """ + Args: + name: Data directory name (e.g. "unitree_go2_bigoffice/lidar") or absolute path. + autocast: Optional function to transform data after loading (for replay) or + before saving (for storage). E.g., `Odometry.from_msg`. + """ + self._name = str(name) + self._root_dir: Path | None = None + self._counter: int = 0 + self._autocast = autocast + + def _get_root_dir(self, for_write: bool = False) -> Path: + """Get root directory, creating on first write if needed.""" + if self._root_dir is not None: + # Ensure directory exists if writing + if for_write: + self._root_dir.mkdir(parents=True, exist_ok=True) + return self._root_dir + + # If absolute path, use directly + if Path(self._name).is_absolute(): + self._root_dir = Path(self._name) + if for_write: + self._root_dir.mkdir(parents=True, exist_ok=True) + elif for_write: + # For writing: use get_data_dir and create if needed + self._root_dir = get_data_dir(self._name) + self._root_dir.mkdir(parents=True, exist_ok=True) + else: + # For reading: use get_data (handles LFS download) + self._root_dir = get_data(self._name) + + return self._root_dir + + def _iter_files(self) -> Iterator[Path]: + """Iterate pickle files in sorted order (by number in filename).""" + + def extract_number(filepath: str) -> int: + basename = os.path.basename(filepath) + match = re.search(r"(\d+)\.pickle$", basename) + return int(match.group(1)) if match else 0 + + root_dir = self._get_root_dir() + files = sorted( + glob.glob(os.path.join(root_dir, "*.pickle")), + key=extract_number, + ) + for f in files: + yield Path(f) + + def _save(self, timestamp: float, data: T) -> None: + root_dir = self._get_root_dir(for_write=True) + + # Initialize counter from existing files if needed + if self._counter == 0: + existing = list(root_dir.glob("*.pickle")) + if existing: + # Find highest existing counter + max_num = 0 + for filepath in existing: + match = re.search(r"(\d+)\.pickle$", filepath.name) + if match: + max_num = max(max_num, int(match.group(1))) + self._counter = max_num + 1 + + full_path = root_dir / f"{self._counter:03d}.pickle" + + if full_path.exists(): + raise RuntimeError(f"File {full_path} already exists") + + # Save as (timestamp, data) tuple for legacy compatibility + with open(full_path, "wb") as f: + pickle.dump((timestamp, data), f) + + self._counter += 1 + + def _load(self, timestamp: float) -> T | None: + """Load data at exact timestamp (linear scan).""" + for ts, data in self._iter_items(): + if ts == timestamp: + return data + return None + + def _delete(self, timestamp: float) -> T | None: + """Delete not supported for legacy pickle format.""" + raise NotImplementedError("LegacyPickleStore does not support deletion") + + def _iter_items( + self, start: float | None = None, end: float | None = None + ) -> Iterator[tuple[float, T]]: + """Lazy iteration - loads one file at a time. + + Handles both timed format (timestamp, data) and non-timed format (just data). + For non-timed data, uses file index as synthetic timestamp. + """ + for idx, filepath in enumerate(self._iter_files()): + try: + with open(filepath, "rb") as f: + raw = pickle.load(f) + + # Handle both timed (timestamp, data) and non-timed (just data) formats + if isinstance(raw, tuple) and len(raw) == 2: + ts, data = raw + ts = float(ts) + else: + # Non-timed format: use index as synthetic timestamp + ts = float(idx) + data = raw + except Exception: + continue + + if start is not None and ts < start: + continue + if end is not None and ts >= end: + break + + if self._autocast is not None: + data = self._autocast(data) + yield (ts, cast("T", data)) + + def _find_closest_timestamp( + self, timestamp: float, tolerance: float | None = None + ) -> float | None: + """Linear scan with early exit (assumes timestamps are monotonically increasing).""" + closest_ts: float | None = None + closest_diff = float("inf") + + for ts, _ in self._iter_items(): + diff = abs(ts - timestamp) + + if diff < closest_diff: + closest_diff = diff + closest_ts = ts + elif diff > closest_diff: + # Moving away from target, can stop + break + + if closest_ts is None: + return None + + if tolerance is not None and closest_diff > tolerance: + return None + + return closest_ts + + def _count(self) -> int: + return sum(1 for _ in self._iter_files()) + + def _last_timestamp(self) -> float | None: + last_ts: float | None = None + for ts, _ in self._iter_items(): + last_ts = ts + return last_ts + + def _find_before(self, timestamp: float) -> tuple[float, T] | None: + result: tuple[float, T] | None = None + for ts, data in self._iter_items(): + if ts < timestamp: + result = (ts, data) + else: + break + return result + + def _find_after(self, timestamp: float) -> tuple[float, T] | None: + for ts, data in self._iter_items(): + if ts > timestamp: + return (ts, data) + return None + + # === Backward-compatible API (TimedSensorReplay/SensorReplay) === + + @property + def files(self) -> list[Path]: + """Return list of pickle files (backward compatibility with SensorReplay).""" + return list(self._iter_files()) + + def load_one(self, name: int | str | Path) -> T | Any: + """Load a single pickle file (backward compatibility with SensorReplay). + + Args: + name: File index (int), filename without extension (str), or full path (Path) + + Returns: + For TimedSensorReplay: (timestamp, data) tuple + For SensorReplay: just the data + """ + root_dir = self._get_root_dir() + + if isinstance(name, int): + full_path = root_dir / f"{name:03d}.pickle" + elif isinstance(name, Path): + full_path = name + else: + full_path = root_dir / Path(f"{name}.pickle") + + with open(full_path, "rb") as f: + data = pickle.load(f) + + # Legacy format: (timestamp, data) tuple + if isinstance(data, tuple) and len(data) == 2: + ts, payload = data + if self._autocast is not None: + payload = self._autocast(payload) + return (ts, payload) + + # Non-timed format: just data + if self._autocast is not None: + data = self._autocast(data) + return data + + def iterate_ts( + self, + seek: float | None = None, + duration: float | None = None, + from_timestamp: float | None = None, + loop: bool = False, + ) -> Iterator[tuple[float, T]]: + """Iterate with timestamps (backward compatibility with TimedSensorReplay). + + Args: + seek: Relative seconds from start + duration: Duration window in seconds + from_timestamp: Absolute timestamp to start from + loop: Whether to loop the data + + Yields: + (timestamp, data) tuples + """ + first = self.first_timestamp() + if first is None: + return + + # Calculate start timestamp + start: float | None = None + if from_timestamp is not None: + start = from_timestamp + elif seek is not None: + start = first + seek + + # Calculate end timestamp + end: float | None = None + if duration is not None: + start_ts = start if start is not None else first + end = start_ts + duration + + while True: + yield from self._iter_items(start=start, end=end) + if not loop: + break + + def stream( + self, + speed: float = 1.0, + seek: float | None = None, + duration: float | None = None, + from_timestamp: float | None = None, + loop: bool = False, + ) -> Observable[T]: + """Stream data as Observable with timing control. + + Uses stored timestamps from pickle files for timing (not data.ts). + """ + + def subscribe( + observer: rx.abc.ObserverBase[T], + scheduler: rx.abc.SchedulerBase | None = None, + ) -> rx.abc.DisposableBase: + sched = scheduler or TimeoutScheduler() + disp = CompositeDisposable() + is_disposed = False + + iterator = self.iterate_ts( + seek=seek, duration=duration, from_timestamp=from_timestamp, loop=loop + ) + + try: + first_ts, first_data = next(iterator) + except StopIteration: + observer.on_completed() + return Disposable() + + start_local_time = time.time() + start_replay_time = first_ts + + observer.on_next(first_data) + + try: + next_message: tuple[float, T] | None = next(iterator) + except StopIteration: + observer.on_completed() + return disp + + def schedule_emission(message: tuple[float, T]) -> None: + nonlocal next_message, is_disposed + + if is_disposed: + return + + ts, data = message + + try: + next_message = next(iterator) + except StopIteration: + next_message = None + + target_time = start_local_time + (ts - start_replay_time) / speed + delay = max(0.0, target_time - time.time()) + + def emit( + _scheduler: rx.abc.SchedulerBase, _state: object + ) -> rx.abc.DisposableBase | None: + if is_disposed: + return None + observer.on_next(data) + if next_message is not None: + schedule_emission(next_message) + else: + observer.on_completed() + return None + + sched.schedule_relative(delay, emit) + + if next_message is not None: + schedule_emission(next_message) + + def dispose() -> None: + nonlocal is_disposed + is_disposed = True + disp.dispose() + + return Disposable(dispose) + + return rx.create(subscribe) diff --git a/dimos/memory/timeseries/pickledir.py b/dimos/memory/timeseries/pickledir.py new file mode 100644 index 0000000000..9e8cd5a249 --- /dev/null +++ b/dimos/memory/timeseries/pickledir.py @@ -0,0 +1,198 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Pickle directory backend for TimeSeriesStore.""" + +import bisect +from collections.abc import Iterator +import glob +import os +from pathlib import Path +import pickle + +from dimos.memory.timeseries.base import T, TimeSeriesStore +from dimos.utils.data import get_data, get_data_dir + + +class PickleDirStore(TimeSeriesStore[T]): + """Pickle directory backend. Files named by timestamp. + + Directory structure: + {name}/ + 1704067200.123.pickle + 1704067200.456.pickle + ... + + Usage: + # Load existing recording (auto-downloads from LFS if needed) + store = PickleDirStore("unitree_go2_bigoffice/lidar") + data = store.find_closest_seek(10.0) + + # Create new recording (directory created on first save) + store = PickleDirStore("my_recording/images") + store.save(image) # saves using image.ts + """ + + def __init__(self, name: str) -> None: + """ + Args: + name: Data directory name (e.g. "unitree_go2_bigoffice/lidar") + """ + self._name = name + self._root_dir: Path | None = None + + # Cached sorted timestamps for find_closest + self._timestamps: list[float] | None = None + + def _get_root_dir(self, for_write: bool = False) -> Path: + """Get root directory, creating on first write if needed.""" + if self._root_dir is not None: + return self._root_dir + + # If absolute path, use directly + if Path(self._name).is_absolute(): + self._root_dir = Path(self._name) + if for_write: + self._root_dir.mkdir(parents=True, exist_ok=True) + elif for_write: + # For writing: use get_data_dir and create if needed + self._root_dir = get_data_dir(self._name) + self._root_dir.mkdir(parents=True, exist_ok=True) + else: + # For reading: use get_data (handles LFS download) + self._root_dir = get_data(self._name) + + return self._root_dir + + def _save(self, timestamp: float, data: T) -> None: + root_dir = self._get_root_dir(for_write=True) + full_path = root_dir / f"{timestamp}.pickle" + + if full_path.exists(): + raise RuntimeError(f"File {full_path} already exists") + + with open(full_path, "wb") as f: + pickle.dump(data, f) + + self._timestamps = None # Invalidate cache + + def _load(self, timestamp: float) -> T | None: + filepath = self._get_root_dir() / f"{timestamp}.pickle" + if filepath.exists(): + return self._load_file(filepath) + return None + + def _delete(self, timestamp: float) -> T | None: + filepath = self._get_root_dir() / f"{timestamp}.pickle" + if filepath.exists(): + data = self._load_file(filepath) + filepath.unlink() + self._timestamps = None # Invalidate cache + return data + return None + + def _iter_items( + self, start: float | None = None, end: float | None = None + ) -> Iterator[tuple[float, T]]: + for ts in self._get_timestamps(): + if start is not None and ts < start: + continue + if end is not None and ts >= end: + break + data = self._load(ts) + if data is not None: + yield (ts, data) + + def _find_closest_timestamp( + self, timestamp: float, tolerance: float | None = None + ) -> float | None: + timestamps = self._get_timestamps() + if not timestamps: + return None + + pos = bisect.bisect_left(timestamps, timestamp) + + # Check neighbors + candidates = [] + if pos > 0: + candidates.append(timestamps[pos - 1]) + if pos < len(timestamps): + candidates.append(timestamps[pos]) + + if not candidates: + return None + + closest = min(candidates, key=lambda ts: abs(ts - timestamp)) + + if tolerance is not None and abs(closest - timestamp) > tolerance: + return None + + return closest + + def _get_timestamps(self) -> list[float]: + """Get sorted list of all timestamps.""" + if self._timestamps is not None: + return self._timestamps + + timestamps: list[float] = [] + root_dir = self._get_root_dir() + for filepath in glob.glob(os.path.join(root_dir, "*.pickle")): + try: + ts = float(Path(filepath).stem) + timestamps.append(ts) + except ValueError: + continue + + timestamps.sort() + self._timestamps = timestamps + return timestamps + + def _count(self) -> int: + return len(self._get_timestamps()) + + def _last_timestamp(self) -> float | None: + timestamps = self._get_timestamps() + return timestamps[-1] if timestamps else None + + def _find_before(self, timestamp: float) -> tuple[float, T] | None: + timestamps = self._get_timestamps() + if not timestamps: + return None + pos = bisect.bisect_left(timestamps, timestamp) + if pos > 0: + ts = timestamps[pos - 1] + data = self._load(ts) + if data is not None: + return (ts, data) + return None + + def _find_after(self, timestamp: float) -> tuple[float, T] | None: + timestamps = self._get_timestamps() + if not timestamps: + return None + pos = bisect.bisect_right(timestamps, timestamp) + if pos < len(timestamps): + ts = timestamps[pos] + data = self._load(ts) + if data is not None: + return (ts, data) + return None + + def _load_file(self, filepath: Path) -> T | None: + """Load data from a pickle file (LRU cached).""" + try: + with open(filepath, "rb") as f: + data: T = pickle.load(f) + return data + except Exception: + return None diff --git a/dimos/memory/timeseries/postgres.py b/dimos/memory/timeseries/postgres.py new file mode 100644 index 0000000000..0daae44adb --- /dev/null +++ b/dimos/memory/timeseries/postgres.py @@ -0,0 +1,312 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""PostgreSQL backend for TimeSeriesStore.""" + +from collections.abc import Iterator +import pickle +import re + +import psycopg2 # type: ignore[import-untyped] +import psycopg2.extensions # type: ignore[import-untyped] + +from dimos.core.resource import Resource +from dimos.memory.timeseries.base import T, TimeSeriesStore + +# Valid SQL identifier: alphanumeric and underscores, not starting with digit +_VALID_IDENTIFIER = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$") + + +def _validate_identifier(name: str) -> str: + """Validate SQL identifier to prevent injection.""" + if not _VALID_IDENTIFIER.match(name): + raise ValueError( + f"Invalid identifier '{name}': must be alphanumeric/underscore, not start with digit" + ) + if len(name) > 128: + raise ValueError(f"Identifier too long: {len(name)} > 128") + return name + + +class PostgresStore(TimeSeriesStore[T], Resource): + """PostgreSQL backend for sensor data. + + Multiple stores can share the same database with different tables. + Implements Resource for lifecycle management (start/stop/dispose). + + Usage: + # Create store + store = PostgresStore("lidar") + store.start() # open connection + + # Use store + store.save(data) # saves using data.ts + data = store.find_closest_seek(10.0) + + # Cleanup + store.stop() # close connection + + # Multiple sensors in same db + lidar = PostgresStore("lidar") + images = PostgresStore("images") + + # Manual run management via table naming + run1_lidar = PostgresStore("run1_lidar") + """ + + def __init__( + self, + table: str, + db: str = "dimensional", + host: str = "localhost", + port: int = 5432, + user: str | None = None, + ) -> None: + """ + Args: + table: Table name for this sensor's data (alphanumeric/underscore only). + db: Database name (alphanumeric/underscore only). + host: PostgreSQL host. + port: PostgreSQL port. + user: PostgreSQL user. Defaults to current system user. + """ + self._table = _validate_identifier(table) + self._db = _validate_identifier(db) + self._host = host + self._port = port + self._user = user + self._conn: psycopg2.extensions.connection | None = None + self._table_created = False + + def start(self) -> None: + """Open database connection.""" + if self._conn is not None: + return + self._conn = psycopg2.connect( + dbname=self._db, + host=self._host, + port=self._port, + user=self._user, + ) + + def stop(self) -> None: + """Close database connection.""" + if self._conn is not None: + self._conn.close() + self._conn = None + + def _get_conn(self) -> psycopg2.extensions.connection: + """Get connection, starting if needed.""" + if self._conn is None: + self.start() + assert self._conn is not None + return self._conn + + def _ensure_table(self) -> None: + """Create table if it doesn't exist.""" + if self._table_created: + return + conn = self._get_conn() + with conn.cursor() as cur: + cur.execute(f""" + CREATE TABLE IF NOT EXISTS {self._table} ( + timestamp DOUBLE PRECISION PRIMARY KEY, + data BYTEA NOT NULL + ) + """) + cur.execute(f""" + CREATE INDEX IF NOT EXISTS idx_{self._table}_ts + ON {self._table}(timestamp) + """) + conn.commit() + self._table_created = True + + def _save(self, timestamp: float, data: T) -> None: + self._ensure_table() + conn = self._get_conn() + blob = pickle.dumps(data) + with conn.cursor() as cur: + cur.execute( + f""" + INSERT INTO {self._table} (timestamp, data) VALUES (%s, %s) + ON CONFLICT (timestamp) DO UPDATE SET data = EXCLUDED.data + """, + (timestamp, psycopg2.Binary(blob)), + ) + conn.commit() + + def _load(self, timestamp: float) -> T | None: + self._ensure_table() + conn = self._get_conn() + with conn.cursor() as cur: + cur.execute(f"SELECT data FROM {self._table} WHERE timestamp = %s", (timestamp,)) + row = cur.fetchone() + if row is None: + return None + data: T = pickle.loads(row[0]) + return data + + def _delete(self, timestamp: float) -> T | None: + data = self._load(timestamp) + if data is not None: + conn = self._get_conn() + with conn.cursor() as cur: + cur.execute(f"DELETE FROM {self._table} WHERE timestamp = %s", (timestamp,)) + conn.commit() + return data + + def _iter_items( + self, start: float | None = None, end: float | None = None + ) -> Iterator[tuple[float, T]]: + self._ensure_table() + conn = self._get_conn() + + query = f"SELECT timestamp, data FROM {self._table}" + params: list[float] = [] + conditions = [] + + if start is not None: + conditions.append("timestamp >= %s") + params.append(start) + if end is not None: + conditions.append("timestamp < %s") + params.append(end) + + if conditions: + query += " WHERE " + " AND ".join(conditions) + query += " ORDER BY timestamp" + + with conn.cursor() as cur: + cur.execute(query, params) + for row in cur: + ts: float = row[0] + data: T = pickle.loads(row[1]) + yield (ts, data) + + def _find_closest_timestamp( + self, timestamp: float, tolerance: float | None = None + ) -> float | None: + self._ensure_table() + conn = self._get_conn() + + with conn.cursor() as cur: + # Get closest timestamp <= target + cur.execute( + f""" + SELECT timestamp FROM {self._table} + WHERE timestamp <= %s + ORDER BY timestamp DESC LIMIT 1 + """, + (timestamp,), + ) + before = cur.fetchone() + + # Get closest timestamp >= target + cur.execute( + f""" + SELECT timestamp FROM {self._table} + WHERE timestamp >= %s + ORDER BY timestamp ASC LIMIT 1 + """, + (timestamp,), + ) + after = cur.fetchone() + + candidates: list[float] = [] + if before: + candidates.append(before[0]) + if after: + candidates.append(after[0]) + + if not candidates: + return None + + closest = min(candidates, key=lambda ts: abs(ts - timestamp)) + + if tolerance is not None and abs(closest - timestamp) > tolerance: + return None + + return closest + + def _count(self) -> int: + self._ensure_table() + conn = self._get_conn() + with conn.cursor() as cur: + cur.execute(f"SELECT COUNT(*) FROM {self._table}") + row = cur.fetchone() + return row[0] if row else 0 # type: ignore[no-any-return] + + def _last_timestamp(self) -> float | None: + self._ensure_table() + conn = self._get_conn() + with conn.cursor() as cur: + cur.execute(f"SELECT MAX(timestamp) FROM {self._table}") + row = cur.fetchone() + if row is None or row[0] is None: + return None + return row[0] # type: ignore[no-any-return] + + def _find_before(self, timestamp: float) -> tuple[float, T] | None: + self._ensure_table() + conn = self._get_conn() + with conn.cursor() as cur: + cur.execute( + f"SELECT timestamp, data FROM {self._table} WHERE timestamp < %s ORDER BY timestamp DESC LIMIT 1", + (timestamp,), + ) + row = cur.fetchone() + if row is None: + return None + return (row[0], pickle.loads(row[1])) + + def _find_after(self, timestamp: float) -> tuple[float, T] | None: + self._ensure_table() + conn = self._get_conn() + with conn.cursor() as cur: + cur.execute( + f"SELECT timestamp, data FROM {self._table} WHERE timestamp > %s ORDER BY timestamp ASC LIMIT 1", + (timestamp,), + ) + row = cur.fetchone() + if row is None: + return None + return (row[0], pickle.loads(row[1])) + + +def reset_db(db: str = "dimensional", host: str = "localhost", port: int = 5432) -> None: + """Drop and recreate database. Simple migration strategy. + + WARNING: This deletes all data in the database! + + Args: + db: Database name to reset (alphanumeric/underscore only). + host: PostgreSQL host. + port: PostgreSQL port. + """ + db = _validate_identifier(db) + # Connect to 'postgres' database to drop/create + conn = psycopg2.connect(dbname="postgres", host=host, port=port) + conn.autocommit = True + with conn.cursor() as cur: + # Terminate existing connections + cur.execute( + """ + SELECT pg_terminate_backend(pid) + FROM pg_stat_activity + WHERE datname = %s AND pid <> pg_backend_pid() + """, + (db,), + ) + cur.execute(f"DROP DATABASE IF EXISTS {db}") + cur.execute(f"CREATE DATABASE {db}") + conn.close() diff --git a/dimos/memory/timeseries/sqlite.py b/dimos/memory/timeseries/sqlite.py new file mode 100644 index 0000000000..6e2ac7a7f5 --- /dev/null +++ b/dimos/memory/timeseries/sqlite.py @@ -0,0 +1,268 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""SQLite backend for TimeSeriesStore.""" + +from collections.abc import Iterator +from pathlib import Path +import pickle +import re +import sqlite3 + +from dimos.memory.timeseries.base import T, TimeSeriesStore +from dimos.utils.data import get_data, get_data_dir + +# Valid SQL identifier: alphanumeric and underscores, not starting with digit +_VALID_IDENTIFIER = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$") + + +def _validate_identifier(name: str) -> str: + """Validate SQL identifier to prevent injection.""" + if not _VALID_IDENTIFIER.match(name): + raise ValueError( + f"Invalid identifier '{name}': must be alphanumeric/underscore, not start with digit" + ) + if len(name) > 128: + raise ValueError(f"Identifier too long: {len(name)} > 128") + return name + + +class SqliteStore(TimeSeriesStore[T]): + """SQLite backend for sensor data. Good for indexed queries and single-file storage. + + Data is stored as pickled BLOBs with timestamp as indexed column. + + Usage: + # Named store (uses data/ directory, auto-downloads from LFS if needed) + store = SqliteStore("recordings/lidar") # -> data/recordings/lidar.db + store.save(data) # saves using data.ts + + # Absolute path + store = SqliteStore("/path/to/sensors.db") + + # In-memory (for testing) + store = SqliteStore(":memory:") + + # Multiple tables in one DB + store = SqliteStore("recordings/sensors", table="lidar") + """ + + def __init__(self, name: str | Path, table: str = "sensor_data") -> None: + """ + Args: + name: Data name (e.g. "recordings/lidar") resolved via get_data, + absolute path, or ":memory:" for in-memory. + table: Table name for this sensor's data (alphanumeric/underscore only). + """ + self._name = str(name) + self._table = _validate_identifier(table) + self._db_path: str | None = None + self._conn: sqlite3.Connection | None = None + + def _get_db_path(self, for_write: bool = False) -> str: + """Get database path, resolving via get_data if needed.""" + if self._db_path is not None: + return self._db_path + + # Special case for in-memory + if self._name == ":memory:": + self._db_path = ":memory:" + return self._db_path + + # If absolute path, use directly + if Path(self._name).is_absolute(): + self._db_path = self._name + elif for_write: + # For writing: use get_data_dir + db_file = get_data_dir(self._name + ".db") + db_file.parent.mkdir(parents=True, exist_ok=True) + self._db_path = str(db_file) + else: + # For reading: use get_data (handles LFS download) + # Try with .db extension first + try: + db_file = get_data(self._name + ".db") + self._db_path = str(db_file) + except FileNotFoundError: + # Fall back to get_data_dir for new databases + db_file = get_data_dir(self._name + ".db") + db_file.parent.mkdir(parents=True, exist_ok=True) + self._db_path = str(db_file) + + return self._db_path + + def _get_conn(self) -> sqlite3.Connection: + """Get or create database connection.""" + if self._conn is None: + db_path = self._get_db_path(for_write=True) + self._conn = sqlite3.connect(db_path, check_same_thread=False) + self._create_table() + return self._conn + + def _create_table(self) -> None: + """Create table if it doesn't exist.""" + conn = self._conn + assert conn is not None + conn.execute(f""" + CREATE TABLE IF NOT EXISTS {self._table} ( + timestamp REAL PRIMARY KEY, + data BLOB NOT NULL + ) + """) + conn.execute(f""" + CREATE INDEX IF NOT EXISTS idx_{self._table}_timestamp + ON {self._table}(timestamp) + """) + conn.commit() + + def _save(self, timestamp: float, data: T) -> None: + conn = self._get_conn() + blob = pickle.dumps(data) + conn.execute( + f"INSERT OR REPLACE INTO {self._table} (timestamp, data) VALUES (?, ?)", + (timestamp, blob), + ) + conn.commit() + + def _load(self, timestamp: float) -> T | None: + conn = self._get_conn() + cursor = conn.execute(f"SELECT data FROM {self._table} WHERE timestamp = ?", (timestamp,)) + row = cursor.fetchone() + if row is None: + return None + data: T = pickle.loads(row[0]) + return data + + def _delete(self, timestamp: float) -> T | None: + data = self._load(timestamp) + if data is not None: + conn = self._get_conn() + conn.execute(f"DELETE FROM {self._table} WHERE timestamp = ?", (timestamp,)) + conn.commit() + return data + + def _iter_items( + self, start: float | None = None, end: float | None = None + ) -> Iterator[tuple[float, T]]: + conn = self._get_conn() + + # Build query with optional range filters + query = f"SELECT timestamp, data FROM {self._table}" + params: list[float] = [] + conditions = [] + + if start is not None: + conditions.append("timestamp >= ?") + params.append(start) + if end is not None: + conditions.append("timestamp < ?") + params.append(end) + + if conditions: + query += " WHERE " + " AND ".join(conditions) + query += " ORDER BY timestamp" + + cursor = conn.execute(query, params) + for row in cursor: + ts: float = row[0] + data: T = pickle.loads(row[1]) + yield (ts, data) + + def _find_closest_timestamp( + self, timestamp: float, tolerance: float | None = None + ) -> float | None: + conn = self._get_conn() + + # Find closest timestamp using SQL + # Get the closest timestamp <= target + cursor = conn.execute( + f""" + SELECT timestamp FROM {self._table} + WHERE timestamp <= ? + ORDER BY timestamp DESC LIMIT 1 + """, + (timestamp,), + ) + before = cursor.fetchone() + + # Get the closest timestamp >= target + cursor = conn.execute( + f""" + SELECT timestamp FROM {self._table} + WHERE timestamp >= ? + ORDER BY timestamp ASC LIMIT 1 + """, + (timestamp,), + ) + after = cursor.fetchone() + + # Find the closest of the two + candidates: list[float] = [] + if before: + candidates.append(before[0]) + if after: + candidates.append(after[0]) + + if not candidates: + return None + + closest = min(candidates, key=lambda ts: abs(ts - timestamp)) + + if tolerance is not None and abs(closest - timestamp) > tolerance: + return None + + return closest + + def _count(self) -> int: + conn = self._get_conn() + cursor = conn.execute(f"SELECT COUNT(*) FROM {self._table}") + return cursor.fetchone()[0] # type: ignore[no-any-return] + + def _last_timestamp(self) -> float | None: + conn = self._get_conn() + cursor = conn.execute(f"SELECT MAX(timestamp) FROM {self._table}") + row = cursor.fetchone() + if row is None or row[0] is None: + return None + return row[0] # type: ignore[no-any-return] + + def _find_before(self, timestamp: float) -> tuple[float, T] | None: + conn = self._get_conn() + cursor = conn.execute( + f"SELECT timestamp, data FROM {self._table} WHERE timestamp < ? ORDER BY timestamp DESC LIMIT 1", + (timestamp,), + ) + row = cursor.fetchone() + if row is None: + return None + return (row[0], pickle.loads(row[1])) + + def _find_after(self, timestamp: float) -> tuple[float, T] | None: + conn = self._get_conn() + cursor = conn.execute( + f"SELECT timestamp, data FROM {self._table} WHERE timestamp > ? ORDER BY timestamp ASC LIMIT 1", + (timestamp,), + ) + row = cursor.fetchone() + if row is None: + return None + return (row[0], pickle.loads(row[1])) + + def close(self) -> None: + """Close the database connection.""" + if self._conn is not None: + self._conn.close() + self._conn = None + + def __del__(self) -> None: + self.close() diff --git a/dimos/memory/timeseries/test_base.py b/dimos/memory/timeseries/test_base.py new file mode 100644 index 0000000000..9491d2c93c --- /dev/null +++ b/dimos/memory/timeseries/test_base.py @@ -0,0 +1,468 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for TimeSeriesStore implementations.""" + +from dataclasses import dataclass +from pathlib import Path +import tempfile +import uuid + +import pytest + +from dimos.memory.timeseries.base import TimeSeriesStore +from dimos.memory.timeseries.inmemory import InMemoryStore +from dimos.memory.timeseries.legacy import LegacyPickleStore +from dimos.memory.timeseries.pickledir import PickleDirStore +from dimos.memory.timeseries.sqlite import SqliteStore +from dimos.types.timestamped import Timestamped + + +@dataclass +class SampleData(Timestamped): + """Simple timestamped data for testing.""" + + value: str + + def __init__(self, value: str, ts: float) -> None: + super().__init__(ts) + self.value = value + + def __eq__(self, other: object) -> bool: + if isinstance(other, SampleData): + return self.value == other.value and self.ts == other.ts + return False + + +@pytest.fixture +def temp_dir(): + """Create a temporary directory for file-based store tests.""" + with tempfile.TemporaryDirectory() as tmpdir: + yield tmpdir + + +def make_in_memory_store() -> TimeSeriesStore[SampleData]: + return InMemoryStore[SampleData]() + + +def make_pickle_dir_store(tmpdir: str) -> TimeSeriesStore[SampleData]: + return PickleDirStore[SampleData](tmpdir) + + +def make_sqlite_store(tmpdir: str) -> TimeSeriesStore[SampleData]: + return SqliteStore[SampleData](Path(tmpdir) / "test.db") + + +def make_legacy_pickle_store(tmpdir: str) -> TimeSeriesStore[SampleData]: + return LegacyPickleStore[SampleData](Path(tmpdir) / "legacy") + + +# Base test data (always available) +testdata: list[tuple[object, str]] = [ + (lambda _: make_in_memory_store(), "InMemoryStore"), + (lambda tmpdir: make_pickle_dir_store(tmpdir), "PickleDirStore"), + (lambda tmpdir: make_sqlite_store(tmpdir), "SqliteStore"), + (lambda tmpdir: make_legacy_pickle_store(tmpdir), "LegacyPickleStore"), +] + +# Track postgres tables to clean up +_postgres_tables: list[str] = [] + +try: + import psycopg2 + + from dimos.memory.timeseries.postgres import PostgresStore + + # Test connection + _test_conn = psycopg2.connect(dbname="dimensional") + _test_conn.close() + + def make_postgres_store(_tmpdir: str) -> TimeSeriesStore[SampleData]: + """Create PostgresStore with unique table name.""" + table = f"test_{uuid.uuid4().hex[:8]}" + _postgres_tables.append(table) + store = PostgresStore[SampleData](table) + store.start() + return store + + testdata.append((lambda tmpdir: make_postgres_store(tmpdir), "PostgresStore")) + + @pytest.fixture(autouse=True) + def cleanup_postgres_tables(): + """Clean up postgres test tables after each test.""" + yield + if _postgres_tables: + try: + conn = psycopg2.connect(dbname="dimensional") + conn.autocommit = True + with conn.cursor() as cur: + for table in _postgres_tables: + cur.execute(f"DROP TABLE IF EXISTS {table}") + conn.close() + except Exception: + pass # Ignore cleanup errors + _postgres_tables.clear() + +except Exception: + print("PostgreSQL not available") + + +@pytest.mark.parametrize("store_factory,store_name", testdata) +class TestTimeSeriesStore: + """Parametrized tests for all TimeSeriesStore implementations.""" + + def test_save_and_load(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("data_at_1", 1.0)) + store.save(SampleData("data_at_2", 2.0)) + + assert store.load(1.0) == SampleData("data_at_1", 1.0) + assert store.load(2.0) == SampleData("data_at_2", 2.0) + assert store.load(3.0) is None + + def test_find_closest_timestamp(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + # Exact match + assert store._find_closest_timestamp(2.0) == 2.0 + + # Closest to 1.4 is 1.0 + assert store._find_closest_timestamp(1.4) == 1.0 + + # Closest to 1.6 is 2.0 + assert store._find_closest_timestamp(1.6) == 2.0 + + # With tolerance + assert store._find_closest_timestamp(1.4, tolerance=0.5) == 1.0 + assert store._find_closest_timestamp(1.4, tolerance=0.3) is None + + def test_iter_items(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + # Should iterate in timestamp order + items = list(store._iter_items()) + assert items == [ + (1.0, SampleData("a", 1.0)), + (2.0, SampleData("b", 2.0)), + (3.0, SampleData("c", 3.0)), + ] + + def test_iter_items_with_range(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save( + SampleData("a", 1.0), + SampleData("b", 2.0), + SampleData("c", 3.0), + SampleData("d", 4.0), + ) + + # Start only + items = list(store._iter_items(start=2.0)) + assert items == [ + (2.0, SampleData("b", 2.0)), + (3.0, SampleData("c", 3.0)), + (4.0, SampleData("d", 4.0)), + ] + + # End only + items = list(store._iter_items(end=3.0)) + assert items == [(1.0, SampleData("a", 1.0)), (2.0, SampleData("b", 2.0))] + + # Both + items = list(store._iter_items(start=2.0, end=4.0)) + assert items == [(2.0, SampleData("b", 2.0)), (3.0, SampleData("c", 3.0))] + + def test_empty_store(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + + assert store.load(1.0) is None + assert store._find_closest_timestamp(1.0) is None + assert list(store._iter_items()) == [] + + def test_first_and_first_timestamp(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + + # Empty store + assert store.first() is None + assert store.first_timestamp() is None + + # Add data (in chronological order) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + # Should return first by timestamp + assert store.first_timestamp() == 1.0 + assert store.first() == SampleData("a", 1.0) + + def test_find_closest(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + # Exact match + assert store.find_closest(2.0) == SampleData("b", 2.0) + + # Closest to 1.4 is 1.0 + assert store.find_closest(1.4) == SampleData("a", 1.0) + + # Closest to 1.6 is 2.0 + assert store.find_closest(1.6) == SampleData("b", 2.0) + + # With tolerance + assert store.find_closest(1.4, tolerance=0.5) == SampleData("a", 1.0) + assert store.find_closest(1.4, tolerance=0.3) is None + + def test_find_closest_seek(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 10.0), SampleData("b", 11.0), SampleData("c", 12.0)) + + # Seek 0 = first item (10.0) + assert store.find_closest_seek(0.0) == SampleData("a", 10.0) + + # Seek 1.0 = 11.0 + assert store.find_closest_seek(1.0) == SampleData("b", 11.0) + + # Seek 1.4 -> closest to 11.4 is 11.0 + assert store.find_closest_seek(1.4) == SampleData("b", 11.0) + + # Seek 1.6 -> closest to 11.6 is 12.0 + assert store.find_closest_seek(1.6) == SampleData("c", 12.0) + + # With tolerance + assert store.find_closest_seek(1.4, tolerance=0.5) == SampleData("b", 11.0) + assert store.find_closest_seek(1.4, tolerance=0.3) is None + + def test_iterate(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + # Should iterate in timestamp order, returning data only (not tuples) + items = list(store.iterate()) + assert items == [ + SampleData("a", 1.0), + SampleData("b", 2.0), + SampleData("c", 3.0), + ] + + def test_iterate_with_seek_and_duration(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save( + SampleData("a", 10.0), + SampleData("b", 11.0), + SampleData("c", 12.0), + SampleData("d", 13.0), + ) + + # Seek from start + items = list(store.iterate(seek=1.0)) + assert items == [ + SampleData("b", 11.0), + SampleData("c", 12.0), + SampleData("d", 13.0), + ] + + # Duration + items = list(store.iterate(duration=2.0)) + assert items == [SampleData("a", 10.0), SampleData("b", 11.0)] + + # Seek + duration + items = list(store.iterate(seek=1.0, duration=2.0)) + assert items == [SampleData("b", 11.0), SampleData("c", 12.0)] + + # from_timestamp + items = list(store.iterate(from_timestamp=12.0)) + assert items == [SampleData("c", 12.0), SampleData("d", 13.0)] + + def test_variadic_save(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + + # Save multiple items at once + store.save( + SampleData("a", 1.0), + SampleData("b", 2.0), + SampleData("c", 3.0), + ) + + assert store.load(1.0) == SampleData("a", 1.0) + assert store.load(2.0) == SampleData("b", 2.0) + assert store.load(3.0) == SampleData("c", 3.0) + + def test_pipe_save(self, store_factory, store_name, temp_dir): + import reactivex as rx + + store = store_factory(temp_dir) + + # Create observable with test data + source = rx.of( + SampleData("a", 1.0), + SampleData("b", 2.0), + SampleData("c", 3.0), + ) + + # Pipe through store.pipe_save — should save and pass through + results: list[SampleData] = [] + source.pipe(store.pipe_save).subscribe(results.append) + + # Data should be saved + assert store.load(1.0) == SampleData("a", 1.0) + assert store.load(2.0) == SampleData("b", 2.0) + assert store.load(3.0) == SampleData("c", 3.0) + + # Data should also pass through + assert results == [ + SampleData("a", 1.0), + SampleData("b", 2.0), + SampleData("c", 3.0), + ] + + def test_consume_stream(self, store_factory, store_name, temp_dir): + import reactivex as rx + + store = store_factory(temp_dir) + + # Create observable with test data + source = rx.of( + SampleData("a", 1.0), + SampleData("b", 2.0), + SampleData("c", 3.0), + ) + + # Consume stream — should save all items + disposable = store.consume_stream(source) + + # Data should be saved + assert store.load(1.0) == SampleData("a", 1.0) + assert store.load(2.0) == SampleData("b", 2.0) + assert store.load(3.0) == SampleData("c", 3.0) + + disposable.dispose() + + def test_iterate_items(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + items = list(store.iterate_items()) + assert items == [ + (1.0, SampleData("a", 1.0)), + (2.0, SampleData("b", 2.0)), + (3.0, SampleData("c", 3.0)), + ] + + # With seek + items = list(store.iterate_items(seek=1.0)) + assert len(items) == 2 + assert items[0] == (2.0, SampleData("b", 2.0)) + + def test_stream_basic(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + # Stream at high speed (essentially instant) + results: list[SampleData] = [] + store.stream(speed=1000.0).subscribe( + on_next=results.append, + on_completed=lambda: None, + ) + + # Give it a moment to complete + import time + + time.sleep(0.1) + + assert results == [ + SampleData("a", 1.0), + SampleData("b", 2.0), + SampleData("c", 3.0), + ] + + +@pytest.mark.parametrize("store_factory,store_name", testdata) +class TestCollectionAPI: + """Test new collection API methods on all backends.""" + + def test_len(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + assert len(store) == 0 + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + assert len(store) == 3 + + def test_iter(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0)) + items = list(store) + assert items == [SampleData("a", 1.0), SampleData("b", 2.0)] + + def test_last_timestamp(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + assert store.last_timestamp() is None + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + assert store.last_timestamp() == 3.0 + + def test_last(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + assert store.last() is None + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + assert store.last() == SampleData("c", 3.0) + + def test_start_end_ts(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + assert store.start_ts is None + assert store.end_ts is None + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + assert store.start_ts == 1.0 + assert store.end_ts == 3.0 + + def test_time_range(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + assert store.time_range() is None + store.save(SampleData("a", 1.0), SampleData("b", 5.0)) + assert store.time_range() == (1.0, 5.0) + + def test_duration(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + assert store.duration() == 0.0 + store.save(SampleData("a", 1.0), SampleData("b", 5.0)) + assert store.duration() == 4.0 + + def test_find_before(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + assert store.find_before(0.5) is None + assert store.find_before(1.0) is None # strictly before + assert store.find_before(1.5) == SampleData("a", 1.0) + assert store.find_before(2.5) == SampleData("b", 2.0) + assert store.find_before(10.0) == SampleData("c", 3.0) + + def test_find_after(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save(SampleData("a", 1.0), SampleData("b", 2.0), SampleData("c", 3.0)) + + assert store.find_after(0.5) == SampleData("a", 1.0) + assert store.find_after(1.0) == SampleData("b", 2.0) # strictly after + assert store.find_after(2.5) == SampleData("c", 3.0) + assert store.find_after(3.0) is None # strictly after + assert store.find_after(10.0) is None + + def test_slice_by_time(self, store_factory, store_name, temp_dir): + store = store_factory(temp_dir) + store.save( + SampleData("a", 1.0), + SampleData("b", 2.0), + SampleData("c", 3.0), + SampleData("d", 4.0), + ) + + # [2.0, 4.0) should include b, c + result = store.slice_by_time(2.0, 4.0) + assert result == [SampleData("b", 2.0), SampleData("c", 3.0)] diff --git a/dimos/memory/timeseries/test_legacy.py b/dimos/memory/timeseries/test_legacy.py new file mode 100644 index 0000000000..aaad962a95 --- /dev/null +++ b/dimos/memory/timeseries/test_legacy.py @@ -0,0 +1,48 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests specific to LegacyPickleStore.""" + +from dimos.memory.timeseries.legacy import LegacyPickleStore + + +class TestLegacyPickleStoreRealData: + """Test LegacyPickleStore with real recorded data.""" + + def test_read_lidar_recording(self) -> None: + """Test reading from unitree_go2_bigoffice/lidar recording.""" + store = LegacyPickleStore("unitree_go2_bigoffice/lidar") + + # Check first timestamp exists + first_ts = store.first_timestamp() + assert first_ts is not None + assert first_ts > 0 + + # Check first data + first = store.first() + assert first is not None + assert hasattr(first, "ts") + + # Check find_closest_seek works + data_at_10s = store.find_closest_seek(10.0) + assert data_at_10s is not None + + # Check iteration returns monotonically increasing timestamps + prev_ts = None + for i, item in enumerate(store.iterate()): + assert item.ts is not None + if prev_ts is not None: + assert item.ts >= prev_ts, "Timestamps should be monotonically increasing" + prev_ts = item.ts + if i >= 10: # Only check first 10 items + break diff --git a/dimos/models/depth/metric3d.py b/dimos/models/depth/metric3d.py index 41b5086991..a668ea321e 100644 --- a/dimos/models/depth/metric3d.py +++ b/dimos/models/depth/metric3d.py @@ -78,7 +78,10 @@ def infer_depth(self, img, debug: bool = False): # type: ignore[no-untyped-def] try: if isinstance(img, str): print(f"Image type string: {type(img)}") - self.rgb_origin = cv2.imread(img)[:, :, ::-1] + img_data = cv2.imread(img) + if img_data is None: + raise ValueError(f"Failed to load image from {img}") + self.rgb_origin = img_data[:, :, ::-1] else: # print(f"Image type not string: {type(img)}, cv2 conversion assumed to be handled. If not, this will throw an error") self.rgb_origin = img @@ -172,11 +175,13 @@ def unpad_transform_depth(self, pred_depth): # type: ignore[no-untyped-def] def eval_predicted_depth(self, depth_file, pred_depth) -> None: # type: ignore[no-untyped-def] if depth_file is not None: - gt_depth = cv2.imread(depth_file, -1) - gt_depth = gt_depth / self.gt_depth_scale # type: ignore[assignment] - gt_depth = torch.from_numpy(gt_depth).float().to(self.device) # type: ignore[assignment] + gt_depth_np = cv2.imread(depth_file, -1) + if gt_depth_np is None: + raise ValueError(f"Failed to load depth file from {depth_file}") + gt_depth_scaled = gt_depth_np / self.gt_depth_scale + gt_depth = torch.from_numpy(gt_depth_scaled).float().to(self.device) assert gt_depth.shape == pred_depth.shape - mask = gt_depth > 1e-8 - abs_rel_err = (torch.abs(pred_depth[mask] - gt_depth[mask]) / gt_depth[mask]).mean() + mask = gt_depth > 1e-8 # type: ignore[operator] + abs_rel_err = (torch.abs(pred_depth[mask] - gt_depth[mask]) / gt_depth[mask]).mean() # type: ignore[index] print("abs_rel_err:", abs_rel_err.item()) diff --git a/dimos/models/depth/test_metric3d.py b/dimos/models/depth/test_metric3d.py index 050100047b..33e39f6a29 100644 --- a/dimos/models/depth/test_metric3d.py +++ b/dimos/models/depth/test_metric3d.py @@ -1,3 +1,5 @@ +from contextlib import contextmanager + import numpy as np import pytest @@ -6,12 +8,22 @@ from dimos.utils.data import get_data +@contextmanager +def skip_xformers_unsupported(): + try: + yield + except NotImplementedError as e: + if "memory_efficient_attention" in str(e): + pytest.skip(f"xformers not supported on this GPU: {e}") + raise + + @pytest.fixture def sample_intrinsics() -> list[float]: """Sample camera intrinsics [fx, fy, cx, cy].""" return [500.0, 500.0, 320.0, 240.0] - +@pytest.mark.cuda @pytest.mark.gpu def test_metric3d_init(sample_intrinsics: list[float]) -> None: """Test Metric3D initialization.""" @@ -30,7 +42,6 @@ def test_metric3d_update_intrinsic(sample_intrinsics: list[float]) -> None: model.update_intrinsic(new_intrinsics) assert model.intrinsic == new_intrinsics - @pytest.mark.gpu def test_metric3d_update_intrinsic_invalid(sample_intrinsics: list[float]) -> None: """Test that invalid intrinsics raise an error.""" @@ -40,6 +51,7 @@ def test_metric3d_update_intrinsic_invalid(sample_intrinsics: list[float]) -> No model.update_intrinsic([1.0, 2.0]) # Only 2 values +@pytest.mark.cuda @pytest.mark.gpu def test_metric3d_infer_depth(sample_intrinsics: list[float]) -> None: """Test depth inference on a sample image.""" @@ -51,7 +63,8 @@ def test_metric3d_infer_depth(sample_intrinsics: list[float]) -> None: rgb_array = image.data # Run inference - depth_map = model.infer_depth(rgb_array) + with skip_xformers_unsupported(): + depth_map = model.infer_depth(rgb_array) # Verify output assert isinstance(depth_map, np.ndarray) @@ -65,6 +78,7 @@ def test_metric3d_infer_depth(sample_intrinsics: list[float]) -> None: model.stop() +@pytest.mark.cuda @pytest.mark.gpu def test_metric3d_multiple_inferences(sample_intrinsics: list[float]) -> None: """Test multiple depth inferences.""" @@ -77,7 +91,8 @@ def test_metric3d_multiple_inferences(sample_intrinsics: list[float]) -> None: # Run multiple inferences depths = [] for _ in range(3): - depth = model.infer_depth(rgb_array) + with skip_xformers_unsupported(): + depth = model.infer_depth(rgb_array) depths.append(depth) # Results should be consistent diff --git a/dimos/models/embedding/__init__.py b/dimos/models/embedding/__init__.py index 981e25e5c2..050d35467e 100644 --- a/dimos/models/embedding/__init__.py +++ b/dimos/models/embedding/__init__.py @@ -7,24 +7,24 @@ # Optional: CLIP support try: - from dimos.models.embedding.clip import CLIPEmbedding, CLIPModel + from dimos.models.embedding.clip import CLIPModel - __all__.extend(["CLIPEmbedding", "CLIPModel"]) + __all__.append("CLIPModel") except ImportError: pass # Optional: MobileCLIP support try: - from dimos.models.embedding.mobileclip import MobileCLIPEmbedding, MobileCLIPModel + from dimos.models.embedding.mobileclip import MobileCLIPModel - __all__.extend(["MobileCLIPEmbedding", "MobileCLIPModel"]) + __all__.append("MobileCLIPModel") except ImportError: pass # Optional: TorchReID support try: - from dimos.models.embedding.treid import TorchReIDEmbedding, TorchReIDModel + from dimos.models.embedding.treid import TorchReIDModel - __all__.extend(["TorchReIDEmbedding", "TorchReIDModel"]) + __all__.append("TorchReIDModel") except ImportError: pass diff --git a/dimos/models/embedding/base.py b/dimos/models/embedding/base.py index eba5e45894..c6b78fcf2c 100644 --- a/dimos/models/embedding/base.py +++ b/dimos/models/embedding/base.py @@ -17,7 +17,7 @@ from abc import ABC, abstractmethod from dataclasses import dataclass import time -from typing import TYPE_CHECKING, Generic, TypeVar +from typing import TYPE_CHECKING import numpy as np import torch @@ -90,16 +90,13 @@ def to_cpu(self) -> Embedding: return self -E = TypeVar("E", bound="Embedding") - - -class EmbeddingModel(ABC, Generic[E]): +class EmbeddingModel(ABC): """Abstract base class for embedding models supporting vision and language.""" device: str @abstractmethod - def embed(self, *images: Image) -> E | list[E]: + def embed(self, *images: Image) -> Embedding | list[Embedding]: """ Embed one or more images. Returns single Embedding if one image, list if multiple. @@ -107,14 +104,14 @@ def embed(self, *images: Image) -> E | list[E]: pass @abstractmethod - def embed_text(self, *texts: str) -> E | list[E]: + def embed_text(self, *texts: str) -> Embedding | list[Embedding]: """ Embed one or more text strings. Returns single Embedding if one text, list if multiple. """ pass - def compare_one_to_many(self, query: E, candidates: list[E]) -> torch.Tensor: + def compare_one_to_many(self, query: Embedding, candidates: list[Embedding]) -> torch.Tensor: """ Efficiently compare one query against many candidates on GPU. @@ -129,7 +126,9 @@ def compare_one_to_many(self, query: E, candidates: list[E]) -> torch.Tensor: candidate_tensors = torch.stack([c.to_torch(self.device) for c in candidates]) return query_tensor @ candidate_tensors.T - def compare_many_to_many(self, queries: list[E], candidates: list[E]) -> torch.Tensor: + def compare_many_to_many( + self, queries: list[Embedding], candidates: list[Embedding] + ) -> torch.Tensor: """ Efficiently compare all queries against all candidates on GPU. @@ -144,7 +143,9 @@ def compare_many_to_many(self, queries: list[E], candidates: list[E]) -> torch.T candidate_tensors = torch.stack([c.to_torch(self.device) for c in candidates]) return query_tensors @ candidate_tensors.T - def query(self, query_emb: E, candidates: list[E], top_k: int = 5) -> list[tuple[int, float]]: + def query( + self, query_emb: Embedding, candidates: list[Embedding], top_k: int = 5 + ) -> list[tuple[int, float]]: """ Find top-k most similar candidates to query (GPU accelerated). @@ -160,6 +161,5 @@ def query(self, query_emb: E, candidates: list[E], top_k: int = 5) -> list[tuple top_values, top_indices = similarities.topk(k=min(top_k, len(candidates))) return [(idx.item(), val.item()) for idx, val in zip(top_indices, top_values, strict=False)] - def warmup(self) -> None: - """Optional warmup method to pre-load model.""" - pass + + ... diff --git a/dimos/models/embedding/clip.py b/dimos/models/embedding/clip.py index d8a62efcb2..1b8d3e68bb 100644 --- a/dimos/models/embedding/clip.py +++ b/dimos/models/embedding/clip.py @@ -17,7 +17,7 @@ from PIL import Image as PILImage import torch -import torch.nn.functional as F +import torch.nn.functional as functional from transformers import CLIPModel as HFCLIPModel, CLIPProcessor # type: ignore[import-untyped] from dimos.models.base import HuggingFaceModel @@ -25,16 +25,13 @@ from dimos.msgs.sensor_msgs import Image -class CLIPEmbedding(Embedding): ... - - @dataclass class CLIPModelConfig(HuggingFaceEmbeddingModelConfig): model_name: str = "openai/clip-vit-base-patch32" dtype: torch.dtype = torch.float32 -class CLIPModel(EmbeddingModel[CLIPEmbedding], HuggingFaceModel): +class CLIPModel(EmbeddingModel, HuggingFaceModel): """CLIP embedding model for vision-language re-identification.""" default_config = CLIPModelConfig @@ -50,7 +47,7 @@ def _model(self) -> HFCLIPModel: def _processor(self) -> CLIPProcessor: return CLIPProcessor.from_pretrained(self.config.model_name) - def embed(self, *images: Image) -> CLIPEmbedding | list[CLIPEmbedding]: + def embed(self, *images: Image) -> Embedding | list[Embedding]: """Embed one or more images. Returns embeddings as torch.Tensor on device for efficient GPU comparisons. @@ -64,17 +61,17 @@ def embed(self, *images: Image) -> CLIPEmbedding | list[CLIPEmbedding]: image_features = self._model.get_image_features(**inputs) if self.config.normalize: - image_features = F.normalize(image_features, dim=-1) + image_features = functional.normalize(image_features, dim=-1) # Create embeddings (keep as torch.Tensor on device) - embeddings = [] + embeddings: list[Embedding] = [] for i, feat in enumerate(image_features): timestamp = images[i].ts - embeddings.append(CLIPEmbedding(vector=feat, timestamp=timestamp)) + embeddings.append(Embedding(vector=feat, timestamp=timestamp)) return embeddings[0] if len(images) == 1 else embeddings - def embed_text(self, *texts: str) -> CLIPEmbedding | list[CLIPEmbedding]: + def embed_text(self, *texts: str) -> Embedding | list[Embedding]: """Embed one or more text strings. Returns embeddings as torch.Tensor on device for efficient GPU comparisons. @@ -86,12 +83,12 @@ def embed_text(self, *texts: str) -> CLIPEmbedding | list[CLIPEmbedding]: text_features = self._model.get_text_features(**inputs) if self.config.normalize: - text_features = F.normalize(text_features, dim=-1) + text_features = functional.normalize(text_features, dim=-1) # Create embeddings (keep as torch.Tensor on device) - embeddings = [] + embeddings: list[Embedding] = [] for feat in text_features: - embeddings.append(CLIPEmbedding(vector=feat)) + embeddings.append(Embedding(vector=feat)) return embeddings[0] if len(texts) == 1 else embeddings diff --git a/dimos/models/embedding/mobileclip.py b/dimos/models/embedding/mobileclip.py index 7c3d7adc69..c02361b367 100644 --- a/dimos/models/embedding/mobileclip.py +++ b/dimos/models/embedding/mobileclip.py @@ -27,15 +27,12 @@ from dimos.utils.data import get_data -class MobileCLIPEmbedding(Embedding): ... - - @dataclass class MobileCLIPModelConfig(EmbeddingModelConfig): model_name: str = "MobileCLIP2-S4" -class MobileCLIPModel(EmbeddingModel[MobileCLIPEmbedding], LocalModel): +class MobileCLIPModel(EmbeddingModel, LocalModel): """MobileCLIP embedding model for vision-language re-identification.""" default_config = MobileCLIPModelConfig @@ -62,7 +59,7 @@ def _preprocess(self) -> Any: def _tokenizer(self) -> Any: return open_clip.get_tokenizer(self.config.model_name) - def embed(self, *images: Image) -> MobileCLIPEmbedding | list[MobileCLIPEmbedding]: + def embed(self, *images: Image) -> Embedding | list[Embedding]: """Embed one or more images. Returns embeddings as torch.Tensor on device for efficient GPU comparisons. @@ -83,11 +80,11 @@ def embed(self, *images: Image) -> MobileCLIPEmbedding | list[MobileCLIPEmbeddin embeddings = [] for i, feat in enumerate(feats): timestamp = images[i].ts - embeddings.append(MobileCLIPEmbedding(vector=feat, timestamp=timestamp)) + embeddings.append(Embedding(vector=feat, timestamp=timestamp)) return embeddings[0] if len(images) == 1 else embeddings - def embed_text(self, *texts: str) -> MobileCLIPEmbedding | list[MobileCLIPEmbedding]: + def embed_text(self, *texts: str) -> Embedding | list[Embedding]: """Embed one or more text strings. Returns embeddings as torch.Tensor on device for efficient GPU comparisons. @@ -101,7 +98,7 @@ def embed_text(self, *texts: str) -> MobileCLIPEmbedding | list[MobileCLIPEmbedd # Create embeddings (keep as torch.Tensor on device) embeddings = [] for feat in feats: - embeddings.append(MobileCLIPEmbedding(vector=feat)) + embeddings.append(Embedding(vector=feat)) return embeddings[0] if len(texts) == 1 else embeddings diff --git a/dimos/models/embedding/treid.py b/dimos/models/embedding/treid.py index a8893d38e4..85e32cd39b 100644 --- a/dimos/models/embedding/treid.py +++ b/dimos/models/embedding/treid.py @@ -12,11 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import warnings + +warnings.filterwarnings("ignore", message="Cython evaluation.*unavailable", category=UserWarning) + from dataclasses import dataclass from functools import cached_property import torch -import torch.nn.functional as F +import torch.nn.functional as functional from torchreid import utils as torchreid_utils from dimos.models.base import LocalModel @@ -25,9 +29,6 @@ from dimos.utils.data import get_data -class TorchReIDEmbedding(Embedding): ... - - # osnet models downloaded from https://kaiyangzhou.github.io/deep-person-reid/MODEL_ZOO.html # into dimos/data/models_torchreid/ # feel free to add more @@ -36,7 +37,7 @@ class TorchReIDModelConfig(EmbeddingModelConfig): model_name: str = "osnet_x1_0" -class TorchReIDModel(EmbeddingModel[TorchReIDEmbedding], LocalModel): +class TorchReIDModel(EmbeddingModel, LocalModel): """TorchReID embedding model for person re-identification.""" default_config = TorchReIDModelConfig @@ -51,7 +52,7 @@ def _model(self) -> torchreid_utils.FeatureExtractor: device=self.config.device, ) - def embed(self, *images: Image) -> TorchReIDEmbedding | list[TorchReIDEmbedding]: + def embed(self, *images: Image) -> Embedding | list[Embedding]: """Embed one or more images. Returns embeddings as torch.Tensor on device for efficient GPU comparisons. @@ -70,17 +71,17 @@ def embed(self, *images: Image) -> TorchReIDEmbedding | list[TorchReIDEmbedding] features_tensor = torch.from_numpy(features).to(self.config.device) if self.config.normalize: - features_tensor = F.normalize(features_tensor, dim=-1) + features_tensor = functional.normalize(features_tensor, dim=-1) # Create embeddings (keep as torch.Tensor on device) embeddings = [] for i, feat in enumerate(features_tensor): timestamp = images[i].ts - embeddings.append(TorchReIDEmbedding(vector=feat, timestamp=timestamp)) + embeddings.append(Embedding(vector=feat, timestamp=timestamp)) return embeddings[0] if len(images) == 1 else embeddings - def embed_text(self, *texts: str) -> TorchReIDEmbedding | list[TorchReIDEmbedding]: + def embed_text(self, *texts: str) -> Embedding | list[Embedding]: """Text embedding not supported for ReID models. TorchReID models are vision-only person re-identification models diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/README.md b/dimos/models/manipulation/contact_graspnet_pytorch/README.md deleted file mode 100644 index bf95fa39cd..0000000000 --- a/dimos/models/manipulation/contact_graspnet_pytorch/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# ContactGraspNet PyTorch Module - -This module provides a PyTorch implementation of ContactGraspNet for robotic grasping on dimOS. - -## Setup Instructions - -### 1. Install Required Dependencies - -Install the manipulation extras from the main repository: - -```bash -# From the root directory of the dimos repository -pip install -e ".[manipulation]" -``` - -This will install all the necessary dependencies for using the contact_graspnet_pytorch module, including: -- PyTorch -- Open3D -- Other manipulation-specific dependencies - -### 2. Testing the Module - -To test that the module is properly installed and functioning: - -```bash -# From the root directory of the dimos repository -pytest -s dimos/models/manipulation/contact_graspnet_pytorch/test_contact_graspnet.py -``` - -The test will verify that: -- The model can be loaded -- Inference runs correctly -- Grasping outputs are generated as expected - -### 3. Using in Your Code - -Reference ```inference.py``` for usage example. - -### Troubleshooting - -If you encounter issues with imports or missing dependencies: - -1. Verify that the manipulation extras are properly installed: - ```python - import contact_graspnet_pytorch - print("Module loaded successfully!") - ``` - -2. If LFS data files are missing, ensure Git LFS is installed and initialized: - ```bash - git lfs pull - ``` \ No newline at end of file diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/inference.py b/dimos/models/manipulation/contact_graspnet_pytorch/inference.py deleted file mode 100644 index 76bb377869..0000000000 --- a/dimos/models/manipulation/contact_graspnet_pytorch/inference.py +++ /dev/null @@ -1,117 +0,0 @@ -import argparse -import glob -import os - -from contact_graspnet_pytorch import config_utils # type: ignore[import-not-found] -from contact_graspnet_pytorch.contact_grasp_estimator import ( # type: ignore[import-not-found] - GraspEstimator, -) -from contact_graspnet_pytorch.data import ( # type: ignore[import-not-found] - load_available_input_data, -) -import numpy as np -import torch - -from dimos.utils.data import get_data - - -def inference(global_config, # type: ignore[no-untyped-def] - ckpt_dir, - input_paths, - local_regions: bool=True, - filter_grasps: bool=True, - skip_border_objects: bool=False, - z_range = None, - forward_passes: int=1, - K=None,): - """ - Predict 6-DoF grasp distribution for given model and input data - - :param global_config: config.yaml from checkpoint directory - :param checkpoint_dir: checkpoint directory - :param input_paths: .png/.npz/.npy file paths that contain depth/pointcloud and optionally intrinsics/segmentation/rgb - :param K: Camera Matrix with intrinsics to convert depth to point cloud - :param local_regions: Crop 3D local regions around given segments. - :param skip_border_objects: When extracting local_regions, ignore segments at depth map boundary. - :param filter_grasps: Filter and assign grasp contacts according to segmap. - :param segmap_id: only return grasps from specified segmap_id. - :param z_range: crop point cloud at a minimum/maximum z distance from camera to filter out outlier points. Default: [0.2, 1.8] m - :param forward_passes: Number of forward passes to run on each point cloud. Default: 1 - """ - # Build the model - if z_range is None: - z_range = [0.2, 1.8] - grasp_estimator = GraspEstimator(global_config) - - # Load the weights - model_checkpoint_dir = get_data(ckpt_dir) - checkpoint_path = os.path.join(model_checkpoint_dir, 'model.pt') - state_dict = torch.load(checkpoint_path, weights_only=False) - grasp_estimator.model.load_state_dict(state_dict['model']) - - os.makedirs('results', exist_ok=True) - - # Process example test scenes - for p in glob.glob(input_paths): - print('Loading ', p) - - pc_segments = {} - segmap, rgb, depth, cam_K, pc_full, pc_colors = load_available_input_data(p, K=K) - - if segmap is None and (local_regions or filter_grasps): - raise ValueError('Need segmentation map to extract local regions or filter grasps') - - if pc_full is None: - print('Converting depth to point cloud(s)...') - pc_full, pc_segments, pc_colors = grasp_estimator.extract_point_clouds(depth, cam_K, segmap=segmap, rgb=rgb, - skip_border_objects=skip_border_objects, - z_range=z_range) - - print(pc_full.shape) - - print('Generating Grasps...') - pred_grasps_cam, scores, contact_pts, _ = grasp_estimator.predict_scene_grasps(pc_full, - pc_segments=pc_segments, - local_regions=local_regions, - filter_grasps=filter_grasps, - forward_passes=forward_passes) - - # Save results - np.savez('results/predictions_{}'.format(os.path.basename(p.replace('png','npz').replace('npy','npz'))), - pc_full=pc_full, pred_grasps_cam=pred_grasps_cam, scores=scores, contact_pts=contact_pts, pc_colors=pc_colors) - - # Visualize results - # show_image(rgb, segmap) - # visualize_grasps(pc_full, pred_grasps_cam, scores, plot_opencv_cam=True, pc_colors=pc_colors) - - if not glob.glob(input_paths): - print('No files found: ', input_paths) - -if __name__ == "__main__": - - parser = argparse.ArgumentParser() - parser.add_argument('--ckpt_dir', default='models_contact_graspnet', help='Log dir') - parser.add_argument('--np_path', default='test_data/7.npy', help='Input data: npz/npy file with keys either "depth" & camera matrix "K" or just point cloud "pc" in meters. Optionally, a 2D "segmap"') - parser.add_argument('--K', default=None, help='Flat Camera Matrix, pass as "[fx, 0, cx, 0, fy, cy, 0, 0 ,1]"') - parser.add_argument('--z_range', default=[0.2,1.8], help='Z value threshold to crop the input point cloud') - parser.add_argument('--local_regions', action='store_true', default=True, help='Crop 3D local regions around given segments.') - parser.add_argument('--filter_grasps', action='store_true', default=True, help='Filter grasp contacts according to segmap.') - parser.add_argument('--skip_border_objects', action='store_true', default=False, help='When extracting local_regions, ignore segments at depth map boundary.') - parser.add_argument('--forward_passes', type=int, default=1, help='Run multiple parallel forward passes to mesh_utils more potential contact points.') - parser.add_argument('--arg_configs', nargs="*", type=str, default=[], help='overwrite config parameters') - FLAGS = parser.parse_args() - - global_config = config_utils.load_config(FLAGS.ckpt_dir, batch_size=FLAGS.forward_passes, arg_configs=FLAGS.arg_configs) - - print(str(global_config)) - print(f'pid: {os.getpid()!s}') - - inference(global_config, - FLAGS.ckpt_dir, - FLAGS.np_path, - local_regions=FLAGS.local_regions, - filter_grasps=FLAGS.filter_grasps, - skip_border_objects=FLAGS.skip_border_objects, - z_range=eval(str(FLAGS.z_range)), - forward_passes=FLAGS.forward_passes, - K=eval(str(FLAGS.K))) diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_contact_graspnet.py b/dimos/models/manipulation/contact_graspnet_pytorch/test_contact_graspnet.py deleted file mode 100644 index 7ee0f49451..0000000000 --- a/dimos/models/manipulation/contact_graspnet_pytorch/test_contact_graspnet.py +++ /dev/null @@ -1,72 +0,0 @@ -import glob -import os - -import numpy as np -import pytest - - -def is_manipulation_installed() -> bool: - """Check if the manipulation extras are installed.""" - try: - import contact_graspnet_pytorch - return True - except ImportError: - return False - -@pytest.mark.integration -@pytest.mark.skipif(not is_manipulation_installed(), - reason="This test requires 'pip install .[manipulation]' to be run") -def test_contact_graspnet_inference() -> None: - """Test contact graspnet inference with local regions and filter grasps.""" - # Skip test if manipulation dependencies not installed - if not is_manipulation_installed(): - pytest.skip("contact_graspnet_pytorch not installed. Run 'pip install .[manipulation]' first.") - return - - try: - from contact_graspnet_pytorch import config_utils - - from dimos.models.manipulation.contact_graspnet_pytorch.inference import inference - from dimos.utils.data import get_data - except ImportError: - pytest.skip("Required modules could not be imported. Make sure you have run 'pip install .[manipulation]'.") - return - - # Test data path - use the default test data path - test_data_path = os.path.join(get_data("models_contact_graspnet"), "test_data/0.npy") - - # Check if test data exists - test_files = glob.glob(test_data_path) - if not test_files: - pytest.fail(f"No test data found at {test_data_path}") - - # Load config with default values - ckpt_dir = 'models_contact_graspnet' - global_config = config_utils.load_config(ckpt_dir, batch_size=1) - - # Run inference function with the same params as the command line - result_files_before = glob.glob('results/predictions_*.npz') - - inference( - global_config=global_config, - ckpt_dir=ckpt_dir, - input_paths=test_data_path, - local_regions=True, - filter_grasps=True, - skip_border_objects=False, - z_range=[0.2, 1.8], - forward_passes=1, - K=None - ) - - # Verify results were created - result_files_after = glob.glob('results/predictions_*.npz') - assert len(result_files_after) >= len(result_files_before), "No result files were generated" - - # Load at least one result file and verify it contains expected data - if result_files_after: - latest_result = sorted(result_files_after)[-1] - result_data = np.load(latest_result, allow_pickle=True) - expected_keys = ['pc_full', 'pred_grasps_cam', 'scores', 'contact_pts', 'pc_colors'] - for key in expected_keys: - assert key in result_data.files, f"Expected key '{key}' not found in results" diff --git a/dimos/models/segmentation/edge_tam.py b/dimos/models/segmentation/edge_tam.py index ba351be130..54158b2b92 100644 --- a/dimos/models/segmentation/edge_tam.py +++ b/dimos/models/segmentation/edge_tam.py @@ -36,7 +36,7 @@ from dimos.utils.logging_config import setup_logger if TYPE_CHECKING: - from sam2.sam2_video_predictor import SAM2VideoPredictor # type: ignore[import-untyped] + from sam2.sam2_video_predictor import SAM2VideoPredictor os.environ['TQDM_DISABLE'] = '1' diff --git a/dimos/models/vl/__init__.py b/dimos/models/vl/__init__.py index e4bb68e03c..482a907cbd 100644 --- a/dimos/models/vl/__init__.py +++ b/dimos/models/vl/__init__.py @@ -1,16 +1,13 @@ -from dimos.models.vl.base import Captioner, VlModel -from dimos.models.vl.florence import Florence2Model -from dimos.models.vl.moondream import MoondreamVlModel -from dimos.models.vl.moondream_hosted import MoondreamHostedVlModel -from dimos.models.vl.openai import OpenAIVlModel -from dimos.models.vl.qwen import QwenVlModel +import lazy_loader as lazy -__all__ = [ - "Captioner", - "Florence2Model", - "MoondreamHostedVlModel", - "MoondreamVlModel", - "OpenAIVlModel", - "QwenVlModel", - "VlModel", -] +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "base": ["Captioner", "VlModel"], + "florence": ["Florence2Model"], + "moondream": ["MoondreamVlModel"], + "moondream_hosted": ["MoondreamHostedVlModel"], + "openai": ["OpenAIVlModel"], + "qwen": ["QwenVlModel"], + }, +) diff --git a/dimos/msgs/__init__.py b/dimos/msgs/__init__.py index b2bcabab01..4395dbcc51 100644 --- a/dimos/msgs/__init__.py +++ b/dimos/msgs/__init__.py @@ -1,3 +1,4 @@ +from dimos.msgs.helpers import resolve_msg_type from dimos.msgs.protocol import DimosMsg -__all__ = ["DimosMsg"] +__all__ = ["DimosMsg", "resolve_msg_type"] diff --git a/dimos/msgs/geometry_msgs/Pose.py b/dimos/msgs/geometry_msgs/Pose.py index bf6a821cc8..261aae6452 100644 --- a/dimos/msgs/geometry_msgs/Pose.py +++ b/dimos/msgs/geometry_msgs/Pose.py @@ -20,18 +20,6 @@ Pose as LCMPose, Transform as LCMTransform, ) - -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - Point as ROSPoint, - Pose as ROSPose, - Quaternion as ROSQuaternion, - ) -except ImportError: - ROSPose = None # type: ignore[assignment, misc] - ROSPoint = None # type: ignore[assignment, misc] - ROSQuaternion = None # type: ignore[assignment, misc] - from plum import dispatch from dimos.msgs.geometry_msgs.Quaternion import Quaternion, QuaternionConvertable @@ -222,42 +210,18 @@ def __add__(self, other: Pose | PoseConvertable | LCMTransform | Transform) -> P return Pose(new_position, new_orientation) - @classmethod - def from_ros_msg(cls, ros_msg: ROSPose) -> Pose: - """Create a Pose from a ROS geometry_msgs/Pose message. - - Args: - ros_msg: ROS Pose message - - Returns: - Pose instance - """ - position = Vector3(ros_msg.position.x, ros_msg.position.y, ros_msg.position.z) - orientation = Quaternion( - ros_msg.orientation.x, - ros_msg.orientation.y, - ros_msg.orientation.z, - ros_msg.orientation.w, - ) - return cls(position, orientation) + def __sub__(self, other: Pose) -> Pose: + """Compute the delta pose: self - other. - def to_ros_msg(self) -> ROSPose: - """Convert to a ROS geometry_msgs/Pose message. + For position: simple subtraction. + For orientation: delta_quat = self.orientation * inverse(other.orientation) Returns: - ROS Pose message + A new Pose representing the delta transformation """ - ros_msg = ROSPose() # type: ignore[no-untyped-call] - ros_msg.position = ROSPoint( # type: ignore[no-untyped-call] - x=float(self.position.x), y=float(self.position.y), z=float(self.position.z) - ) - ros_msg.orientation = ROSQuaternion( # type: ignore[no-untyped-call] - x=float(self.orientation.x), - y=float(self.orientation.y), - z=float(self.orientation.z), - w=float(self.orientation.w), - ) - return ros_msg + delta_position = self.position - other.position + delta_orientation = self.orientation * other.orientation.inverse() + return Pose(delta_position, delta_orientation) @dispatch diff --git a/dimos/msgs/geometry_msgs/PoseArray.py b/dimos/msgs/geometry_msgs/PoseArray.py new file mode 100644 index 0000000000..e27f56b6bf --- /dev/null +++ b/dimos/msgs/geometry_msgs/PoseArray.py @@ -0,0 +1,97 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""PoseArray message type for Dimos.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from dimos.msgs.std_msgs.Header import Header + +if TYPE_CHECKING: + from collections.abc import Iterator + + from dimos.msgs.geometry_msgs.Pose import Pose + + +class PoseArray: + """ + An array of poses with a header for reference frame and timestamp. + + This is commonly used for representing multiple candidate positions, + such as grasp poses, particle filter samples, or waypoints. + """ + + msg_name = "geometry_msgs.PoseArray" + + def __init__(self, header: Header | None = None, poses: list[Pose] | None = None) -> None: + """ + Initialize a PoseArray. + + Args: + header: Header with frame_id and timestamp + poses: List of Pose objects + """ + self.header = header if header is not None else Header() + self.poses = poses if poses is not None else [] + + def __repr__(self) -> str: + return f"PoseArray(header={self.header!r}, poses={len(self.poses)} poses)" + + def __str__(self) -> str: + return f"PoseArray(frame_id={self.header.frame_id}, num_poses={len(self.poses)})" + + def __len__(self) -> int: + """Return the number of poses in the array.""" + return len(self.poses) + + def __getitem__(self, index: int) -> Pose: + """Get pose at index.""" + return self.poses[index] + + def __iter__(self) -> Iterator[Pose]: + """Iterate over poses.""" + return iter(self.poses) + + def append(self, pose: Pose) -> None: + """Add a pose to the array.""" + self.poses.append(pose) + + def encode(self) -> bytes: + """ + Encode to bytes for LCM transmission. + + Note: This is a simple implementation. For production use, + consider using proper LCM encoding. + """ + import pickle + + return pickle.dumps({"header": self.header, "poses": self.poses}) + + @classmethod + def decode(cls, data: bytes) -> PoseArray: + """ + Decode from bytes. + + Args: + data: Pickled PoseArray data + + Returns: + Decoded PoseArray + """ + import pickle + + decoded = pickle.loads(data) + return cls(header=decoded["header"], poses=decoded["poses"]) diff --git a/dimos/msgs/geometry_msgs/PoseStamped.py b/dimos/msgs/geometry_msgs/PoseStamped.py index 406c5d7ac7..acf0af8b32 100644 --- a/dimos/msgs/geometry_msgs/PoseStamped.py +++ b/dimos/msgs/geometry_msgs/PoseStamped.py @@ -16,18 +16,13 @@ import math import time -from typing import BinaryIO, TypeAlias +from typing import TYPE_CHECKING, BinaryIO, TypeAlias -from dimos_lcm.geometry_msgs import PoseStamped as LCMPoseStamped +if TYPE_CHECKING: + from rerun._baseclasses import Archetype -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - PoseStamped as ROSPoseStamped, - ) -except ImportError: - ROSPoseStamped = None # type: ignore[assignment, misc] +from dimos_lcm.geometry_msgs import PoseStamped as LCMPoseStamped from plum import dispatch -import rerun as rr from dimos.msgs.geometry_msgs.Pose import Pose from dimos.msgs.geometry_msgs.Quaternion import Quaternion, QuaternionConvertable @@ -87,12 +82,14 @@ def __str__(self) -> str: f"euler=[{math.degrees(self.roll):.1f}, {math.degrees(self.pitch):.1f}, {math.degrees(self.yaw):.1f}])" ) - def to_rerun(self): # type: ignore[no-untyped-def] + def to_rerun(self) -> Archetype: """Convert to rerun Transform3D format. Returns a Transform3D that can be logged to Rerun to position child entities in the transform hierarchy. """ + import rerun as rr + return rr.Transform3D( translation=[self.x, self.y, self.z], rotation=rr.Quaternion( @@ -107,6 +104,8 @@ def to_rerun(self): # type: ignore[no-untyped-def] def to_rerun_arrow(self, length: float = 0.5): # type: ignore[no-untyped-def] """Convert to rerun Arrows3D format for visualization.""" + import rerun as rr + origin = [[self.x, self.y, self.z]] forward = self.orientation.rotate_vector(Vector3(length, 0, 0)) vector = [[forward.x, forward.y, forward.z]] @@ -139,44 +138,3 @@ def find_transform(self, other: PoseStamped) -> Transform: translation=local_translation, rotation=relative_rotation, ) - - @classmethod - def from_ros_msg(cls, ros_msg: ROSPoseStamped) -> PoseStamped: # type: ignore[override] - """Create a PoseStamped from a ROS geometry_msgs/PoseStamped message. - - Args: - ros_msg: ROS PoseStamped message - - Returns: - PoseStamped instance - """ - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - # Convert pose - pose = Pose.from_ros_msg(ros_msg.pose) - - return cls( - ts=ts, - frame_id=ros_msg.header.frame_id, - position=pose.position, - orientation=pose.orientation, - ) - - def to_ros_msg(self) -> ROSPoseStamped: # type: ignore[override] - """Convert to a ROS geometry_msgs/PoseStamped message. - - Returns: - ROS PoseStamped message - """ - ros_msg = ROSPoseStamped() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Set pose - ros_msg.pose = Pose.to_ros_msg(self) - - return ros_msg diff --git a/dimos/msgs/geometry_msgs/PoseWithCovariance.py b/dimos/msgs/geometry_msgs/PoseWithCovariance.py index b619679a78..03ce7fd081 100644 --- a/dimos/msgs/geometry_msgs/PoseWithCovariance.py +++ b/dimos/msgs/geometry_msgs/PoseWithCovariance.py @@ -14,7 +14,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, TypeAlias +from typing import TYPE_CHECKING, Any, TypeAlias from dimos_lcm.geometry_msgs import ( PoseWithCovariance as LCMPoseWithCovariance, @@ -22,13 +22,6 @@ import numpy as np from plum import dispatch -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - PoseWithCovariance as ROSPoseWithCovariance, - ) -except ImportError: - ROSPoseWithCovariance = None # type: ignore[assignment, misc] - from dimos.msgs.geometry_msgs.Pose import Pose, PoseConvertable if TYPE_CHECKING: @@ -45,6 +38,7 @@ class PoseWithCovariance(LCMPoseWithCovariance): # type: ignore[misc] pose: Pose + covariance: np.ndarray[tuple[int], np.dtype[np.floating[Any]]] msg_name = "geometry_msgs.PoseWithCovariance" @dispatch @@ -201,33 +195,3 @@ def lcm_decode(cls, data: bytes) -> PoseWithCovariance: ], ) return cls(pose, lcm_msg.covariance) - - @classmethod - def from_ros_msg(cls, ros_msg: ROSPoseWithCovariance) -> PoseWithCovariance: - """Create a PoseWithCovariance from a ROS geometry_msgs/PoseWithCovariance message. - - Args: - ros_msg: ROS PoseWithCovariance message - - Returns: - PoseWithCovariance instance - """ - - pose = Pose.from_ros_msg(ros_msg.pose) - return cls(pose, list(ros_msg.covariance)) - - def to_ros_msg(self) -> ROSPoseWithCovariance: - """Convert to a ROS geometry_msgs/PoseWithCovariance message. - - Returns: - ROS PoseWithCovariance message - """ - - ros_msg = ROSPoseWithCovariance() # type: ignore[no-untyped-call] - ros_msg.pose = self.pose.to_ros_msg() - # ROS expects list, not numpy array - if isinstance(self.covariance, np.ndarray): # type: ignore[has-type] - ros_msg.covariance = self.covariance.tolist() # type: ignore[has-type] - else: - ros_msg.covariance = list(self.covariance) # type: ignore[has-type] - return ros_msg diff --git a/dimos/msgs/geometry_msgs/PoseWithCovarianceStamped.py b/dimos/msgs/geometry_msgs/PoseWithCovarianceStamped.py index c6138fd064..9e92aa06f0 100644 --- a/dimos/msgs/geometry_msgs/PoseWithCovarianceStamped.py +++ b/dimos/msgs/geometry_msgs/PoseWithCovarianceStamped.py @@ -23,13 +23,6 @@ import numpy as np from plum import dispatch -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - PoseWithCovarianceStamped as ROSPoseWithCovarianceStamped, - ) -except ImportError: - ROSPoseWithCovarianceStamped = None # type: ignore[assignment, misc] - from dimos.msgs.geometry_msgs.Pose import Pose, PoseConvertable from dimos.msgs.geometry_msgs.PoseWithCovariance import PoseWithCovariance from dimos.types.timestamped import Timestamped @@ -115,51 +108,3 @@ def __str__(self) -> str: f"euler=[{self.roll:.3f}, {self.pitch:.3f}, {self.yaw:.3f}], " f"cov_trace={np.trace(self.covariance_matrix):.3f})" ) - - @classmethod - def from_ros_msg(cls, ros_msg: ROSPoseWithCovarianceStamped) -> PoseWithCovarianceStamped: # type: ignore[override] - """Create a PoseWithCovarianceStamped from a ROS geometry_msgs/PoseWithCovarianceStamped message. - - Args: - ros_msg: ROS PoseWithCovarianceStamped message - - Returns: - PoseWithCovarianceStamped instance - """ - - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - # Convert pose with covariance - pose_with_cov = PoseWithCovariance.from_ros_msg(ros_msg.pose) - - return cls( - ts=ts, - frame_id=ros_msg.header.frame_id, - pose=pose_with_cov.pose, - covariance=pose_with_cov.covariance, # type: ignore[has-type] - ) - - def to_ros_msg(self) -> ROSPoseWithCovarianceStamped: # type: ignore[override] - """Convert to a ROS geometry_msgs/PoseWithCovarianceStamped message. - - Returns: - ROS PoseWithCovarianceStamped message - """ - - ros_msg = ROSPoseWithCovarianceStamped() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Set pose with covariance - ros_msg.pose.pose = self.pose.to_ros_msg() - # ROS expects list, not numpy array - if isinstance(self.covariance, np.ndarray): # type: ignore[has-type] - ros_msg.pose.covariance = self.covariance.tolist() # type: ignore[has-type] - else: - ros_msg.pose.covariance = list(self.covariance) # type: ignore[has-type] - - return ros_msg diff --git a/dimos/msgs/geometry_msgs/Quaternion.py b/dimos/msgs/geometry_msgs/Quaternion.py index 02c9592ea6..8e83a32e50 100644 --- a/dimos/msgs/geometry_msgs/Quaternion.py +++ b/dimos/msgs/geometry_msgs/Quaternion.py @@ -17,7 +17,10 @@ from collections.abc import Sequence from io import BytesIO import struct -from typing import BinaryIO, TypeAlias +from typing import TYPE_CHECKING, BinaryIO, TypeAlias + +if TYPE_CHECKING: + import rerun as rr from dimos_lcm.geometry_msgs import Quaternion as LCMQuaternion import numpy as np @@ -112,6 +115,11 @@ def to_radians(self) -> Vector3: """Radians representation of the quaternion (x, y, z, w).""" return self.to_euler() + def to_rerun(self) -> rr.Quaternion: + import rerun as rr + + return rr.Quaternion(xyzw=[self.x, self.y, self.z, self.w]) + @classmethod def from_euler(cls, vector: Vector3) -> Quaternion: """Convert Euler angles (roll, pitch, yaw) in radians to quaternion. diff --git a/dimos/msgs/geometry_msgs/Transform.py b/dimos/msgs/geometry_msgs/Transform.py index 3a52f5a8c0..5f50f9b9d1 100644 --- a/dimos/msgs/geometry_msgs/Transform.py +++ b/dimos/msgs/geometry_msgs/Transform.py @@ -15,27 +15,18 @@ from __future__ import annotations import time -from typing import BinaryIO +from typing import TYPE_CHECKING, BinaryIO + +if TYPE_CHECKING: + import rerun as rr + + from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped from dimos_lcm.geometry_msgs import ( Transform as LCMTransform, TransformStamped as LCMTransformStamped, ) -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - Quaternion as ROSQuaternion, - Transform as ROSTransform, - TransformStamped as ROSTransformStamped, - Vector3 as ROSVector3, - ) -except ImportError: - ROSTransformStamped = None # type: ignore[assignment, misc] - ROSTransform = None # type: ignore[assignment, misc] - ROSVector3 = None # type: ignore[assignment, misc] - ROSQuaternion = None # type: ignore[assignment, misc] -import rerun as rr - from dimos.msgs.geometry_msgs.Quaternion import Quaternion from dimos.msgs.geometry_msgs.Vector3 import Vector3 from dimos.msgs.std_msgs import Header @@ -166,70 +157,6 @@ def inverse(self) -> Transform: ts=self.ts, ) - @classmethod - def from_ros_transform_stamped(cls, ros_msg: ROSTransformStamped) -> Transform: - """Create a Transform from a ROS geometry_msgs/TransformStamped message. - - Args: - ros_msg: ROS TransformStamped message - - Returns: - Transform instance - """ - - # Convert timestamp - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - # Convert translation - translation = Vector3( - ros_msg.transform.translation.x, - ros_msg.transform.translation.y, - ros_msg.transform.translation.z, - ) - - # Convert rotation - rotation = Quaternion( - ros_msg.transform.rotation.x, - ros_msg.transform.rotation.y, - ros_msg.transform.rotation.z, - ros_msg.transform.rotation.w, - ) - - return cls( - translation=translation, - rotation=rotation, - frame_id=ros_msg.header.frame_id, - child_frame_id=ros_msg.child_frame_id, - ts=ts, - ) - - def to_ros_transform_stamped(self) -> ROSTransformStamped: - """Convert to a ROS geometry_msgs/TransformStamped message. - - Returns: - ROS TransformStamped message - """ - - ros_msg = ROSTransformStamped() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Set child frame - ros_msg.child_frame_id = self.child_frame_id - - # Set transform - ros_msg.transform.translation = ROSVector3( # type: ignore[no-untyped-call] - x=self.translation.x, y=self.translation.y, z=self.translation.z - ) - ros_msg.transform.rotation = ROSQuaternion( # type: ignore[no-untyped-call] - x=self.rotation.x, y=self.rotation.y, z=self.rotation.z, w=self.rotation.w - ) - - return ros_msg - def __neg__(self) -> Transform: """Unary minus operator returns the inverse transform.""" return self.inverse() @@ -266,7 +193,7 @@ def from_pose(cls, frame_id: str, pose: Pose | PoseStamped) -> Transform: # typ else: raise TypeError(f"Expected Pose or PoseStamped, got {type(pose).__name__}") - def to_pose(self, **kwargs) -> PoseStamped: # type: ignore[name-defined, no-untyped-def] + def to_pose(self, **kwargs: object) -> PoseStamped: """Create a Transform from a Pose or PoseStamped. Args: @@ -276,10 +203,10 @@ def to_pose(self, **kwargs) -> PoseStamped: # type: ignore[name-defined, no-unt A Transform with the same translation and rotation as the pose """ # Import locally to avoid circular imports - from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped + from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped as _PoseStamped # Handle both Pose and PoseStamped - return PoseStamped( + result: PoseStamped = _PoseStamped( **{ "position": self.translation, "orientation": self.rotation, @@ -287,6 +214,7 @@ def to_pose(self, **kwargs) -> PoseStamped: # type: ignore[name-defined, no-unt }, **kwargs, ) + return result def to_matrix(self) -> np.ndarray: # type: ignore[name-defined] """Convert Transform to a 4x4 transformation matrix. @@ -361,17 +289,17 @@ def lcm_decode(cls, data: bytes | BinaryIO) -> Transform: ts=ts, ) - def to_rerun(self): # type: ignore[no-untyped-def] + def to_rerun(self) -> rr.Transform3D: """Convert to rerun Transform3D format with frame IDs. Returns: rr.Transform3D archetype for logging to rerun with parent/child frames """ + import rerun as rr + return rr.Transform3D( translation=[self.translation.x, self.translation.y, self.translation.z], - rotation=rr.Quaternion( - xyzw=[self.rotation.x, self.rotation.y, self.rotation.z, self.rotation.w] - ), - parent_frame=self.frame_id, # type: ignore[call-arg] - child_frame=self.child_frame_id, # type: ignore[call-arg] + rotation=self.rotation.to_rerun(), + parent_frame="tf#/" + self.frame_id, + child_frame="tf#/" + self.child_frame_id, ) diff --git a/dimos/msgs/geometry_msgs/Twist.py b/dimos/msgs/geometry_msgs/Twist.py index 5184afc5f7..be5d9a34a0 100644 --- a/dimos/msgs/geometry_msgs/Twist.py +++ b/dimos/msgs/geometry_msgs/Twist.py @@ -17,15 +17,6 @@ from dimos_lcm.geometry_msgs import Twist as LCMTwist from plum import dispatch -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - Twist as ROSTwist, - Vector3 as ROSVector3, - ) -except ImportError: - ROSTwist = None # type: ignore[assignment, misc] - ROSVector3 = None # type: ignore[assignment, misc] - # Import Quaternion at runtime for beartype compatibility # (beartype needs to resolve forward references at runtime) from dimos.msgs.geometry_msgs.Quaternion import Quaternion @@ -97,6 +88,24 @@ def is_zero(self) -> bool: """Check if this is a zero twist (no linear or angular velocity).""" return self.linear.is_zero() and self.angular.is_zero() + def __sub__(self, other: Twist) -> Twist: + """Component-wise subtraction: self - other.""" + if not isinstance(other, Twist): + return NotImplemented + return Twist( + linear=self.linear - other.linear, + angular=self.angular - other.angular, + ) + + def __add__(self, other: Twist) -> Twist: + """Component-wise addition: self + other.""" + if not isinstance(other, Twist): + return NotImplemented + return Twist( + linear=self.linear + other.linear, + angular=self.angular + other.angular, + ) + def __bool__(self) -> bool: """Boolean conversion for Twist. @@ -108,32 +117,5 @@ def __bool__(self) -> bool: """ return not self.is_zero() - @classmethod - def from_ros_msg(cls, ros_msg: ROSTwist) -> Twist: - """Create a Twist from a ROS geometry_msgs/Twist message. - - Args: - ros_msg: ROS Twist message - - Returns: - Twist instance - """ - - linear = Vector3(ros_msg.linear.x, ros_msg.linear.y, ros_msg.linear.z) - angular = Vector3(ros_msg.angular.x, ros_msg.angular.y, ros_msg.angular.z) - return cls(linear, angular) - - def to_ros_msg(self) -> ROSTwist: - """Convert to a ROS geometry_msgs/Twist message. - - Returns: - ROS Twist message - """ - - ros_msg = ROSTwist() # type: ignore[no-untyped-call] - ros_msg.linear = ROSVector3(x=self.linear.x, y=self.linear.y, z=self.linear.z) # type: ignore[no-untyped-call] - ros_msg.angular = ROSVector3(x=self.angular.x, y=self.angular.y, z=self.angular.z) # type: ignore[no-untyped-call] - return ros_msg - __all__ = ["Quaternion", "Twist"] diff --git a/dimos/msgs/geometry_msgs/TwistStamped.py b/dimos/msgs/geometry_msgs/TwistStamped.py index f5305509e5..ab3dc507b9 100644 --- a/dimos/msgs/geometry_msgs/TwistStamped.py +++ b/dimos/msgs/geometry_msgs/TwistStamped.py @@ -20,13 +20,6 @@ from dimos_lcm.geometry_msgs import TwistStamped as LCMTwistStamped from plum import dispatch -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - TwistStamped as ROSTwistStamped, - ) -except ImportError: - ROSTwistStamped = None # type: ignore[assignment, misc] - from dimos.msgs.geometry_msgs.Twist import Twist from dimos.msgs.geometry_msgs.Vector3 import VectorConvertable from dimos.types.timestamped import Timestamped @@ -75,46 +68,3 @@ def __str__(self) -> str: f"TwistStamped(linear=[{self.linear.x:.3f}, {self.linear.y:.3f}, {self.linear.z:.3f}], " f"angular=[{self.angular.x:.3f}, {self.angular.y:.3f}, {self.angular.z:.3f}])" ) - - @classmethod - def from_ros_msg(cls, ros_msg: ROSTwistStamped) -> TwistStamped: # type: ignore[override] - """Create a TwistStamped from a ROS geometry_msgs/TwistStamped message. - - Args: - ros_msg: ROS TwistStamped message - - Returns: - TwistStamped instance - """ - - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - # Convert twist - twist = Twist.from_ros_msg(ros_msg.twist) - - return cls( - ts=ts, - frame_id=ros_msg.header.frame_id, - linear=twist.linear, - angular=twist.angular, - ) - - def to_ros_msg(self) -> ROSTwistStamped: # type: ignore[override] - """Convert to a ROS geometry_msgs/TwistStamped message. - - Returns: - ROS TwistStamped message - """ - - ros_msg = ROSTwistStamped() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Set twist - ros_msg.twist = Twist.to_ros_msg(self) - - return ros_msg diff --git a/dimos/msgs/geometry_msgs/TwistWithCovariance.py b/dimos/msgs/geometry_msgs/TwistWithCovariance.py index 1abbe54468..90ddb94d7c 100644 --- a/dimos/msgs/geometry_msgs/TwistWithCovariance.py +++ b/dimos/msgs/geometry_msgs/TwistWithCovariance.py @@ -14,7 +14,7 @@ from __future__ import annotations -from typing import TypeAlias +from typing import Any, TypeAlias from dimos_lcm.geometry_msgs import ( TwistWithCovariance as LCMTwistWithCovariance, @@ -22,13 +22,6 @@ import numpy as np from plum import dispatch -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - TwistWithCovariance as ROSTwistWithCovariance, - ) -except ImportError: - ROSTwistWithCovariance = None # type: ignore[assignment, misc] - from dimos.msgs.geometry_msgs.Twist import Twist from dimos.msgs.geometry_msgs.Vector3 import Vector3, VectorConvertable @@ -42,6 +35,7 @@ class TwistWithCovariance(LCMTwistWithCovariance): # type: ignore[misc] twist: Twist + covariance: np.ndarray[tuple[int], np.dtype[np.floating[Any]]] msg_name = "geometry_msgs.TwistWithCovariance" @dispatch @@ -197,33 +191,3 @@ def lcm_decode(cls, data: bytes) -> TwistWithCovariance: angular=[lcm_msg.twist.angular.x, lcm_msg.twist.angular.y, lcm_msg.twist.angular.z], ) return cls(twist, lcm_msg.covariance) - - @classmethod - def from_ros_msg(cls, ros_msg: ROSTwistWithCovariance) -> TwistWithCovariance: - """Create a TwistWithCovariance from a ROS geometry_msgs/TwistWithCovariance message. - - Args: - ros_msg: ROS TwistWithCovariance message - - Returns: - TwistWithCovariance instance - """ - - twist = Twist.from_ros_msg(ros_msg.twist) - return cls(twist, list(ros_msg.covariance)) - - def to_ros_msg(self) -> ROSTwistWithCovariance: - """Convert to a ROS geometry_msgs/TwistWithCovariance message. - - Returns: - ROS TwistWithCovariance message - """ - - ros_msg = ROSTwistWithCovariance() # type: ignore[no-untyped-call] - ros_msg.twist = self.twist.to_ros_msg() - # ROS expects list, not numpy array - if isinstance(self.covariance, np.ndarray): # type: ignore[has-type] - ros_msg.covariance = self.covariance.tolist() # type: ignore[has-type] - else: - ros_msg.covariance = list(self.covariance) # type: ignore[has-type] - return ros_msg diff --git a/dimos/msgs/geometry_msgs/TwistWithCovarianceStamped.py b/dimos/msgs/geometry_msgs/TwistWithCovarianceStamped.py index 3b1df6819b..82d0ba7eb2 100644 --- a/dimos/msgs/geometry_msgs/TwistWithCovarianceStamped.py +++ b/dimos/msgs/geometry_msgs/TwistWithCovarianceStamped.py @@ -23,13 +23,6 @@ import numpy as np from plum import dispatch -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - TwistWithCovarianceStamped as ROSTwistWithCovarianceStamped, - ) -except ImportError: - ROSTwistWithCovarianceStamped = None # type: ignore[assignment, misc] - from dimos.msgs.geometry_msgs.Twist import Twist from dimos.msgs.geometry_msgs.TwistWithCovariance import TwistWithCovariance from dimos.msgs.geometry_msgs.Vector3 import VectorConvertable @@ -123,51 +116,3 @@ def __str__(self) -> str: f"angular=[{self.angular.x:.3f}, {self.angular.y:.3f}, {self.angular.z:.3f}], " f"cov_trace={np.trace(self.covariance_matrix):.3f})" ) - - @classmethod - def from_ros_msg(cls, ros_msg: ROSTwistWithCovarianceStamped) -> TwistWithCovarianceStamped: # type: ignore[override] - """Create a TwistWithCovarianceStamped from a ROS geometry_msgs/TwistWithCovarianceStamped message. - - Args: - ros_msg: ROS TwistWithCovarianceStamped message - - Returns: - TwistWithCovarianceStamped instance - """ - - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - # Convert twist with covariance - twist_with_cov = TwistWithCovariance.from_ros_msg(ros_msg.twist) - - return cls( - ts=ts, - frame_id=ros_msg.header.frame_id, - twist=twist_with_cov.twist, - covariance=twist_with_cov.covariance, # type: ignore[has-type] - ) - - def to_ros_msg(self) -> ROSTwistWithCovarianceStamped: # type: ignore[override] - """Convert to a ROS geometry_msgs/TwistWithCovarianceStamped message. - - Returns: - ROS TwistWithCovarianceStamped message - """ - - ros_msg = ROSTwistWithCovarianceStamped() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Set twist with covariance - ros_msg.twist.twist = self.twist.to_ros_msg() - # ROS expects list, not numpy array - if isinstance(self.covariance, np.ndarray): # type: ignore[has-type] - ros_msg.twist.covariance = self.covariance.tolist() # type: ignore[has-type] - else: - ros_msg.twist.covariance = list(self.covariance) # type: ignore[has-type] - - return ros_msg diff --git a/dimos/msgs/geometry_msgs/__init__.py b/dimos/msgs/geometry_msgs/__init__.py index fd47d5f0ed..3c6a742fec 100644 --- a/dimos/msgs/geometry_msgs/__init__.py +++ b/dimos/msgs/geometry_msgs/__init__.py @@ -1,4 +1,5 @@ from dimos.msgs.geometry_msgs.Pose import Pose, PoseLike, to_pose +from dimos.msgs.geometry_msgs.PoseArray import PoseArray from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped from dimos.msgs.geometry_msgs.PoseWithCovariance import PoseWithCovariance from dimos.msgs.geometry_msgs.PoseWithCovarianceStamped import PoseWithCovarianceStamped @@ -14,6 +15,7 @@ __all__ = [ "Pose", + "PoseArray", "PoseLike", "PoseStamped", "PoseWithCovariance", diff --git a/dimos/msgs/geometry_msgs/test_Pose.py b/dimos/msgs/geometry_msgs/test_Pose.py index 50bfaf1388..dac5ed6207 100644 --- a/dimos/msgs/geometry_msgs/test_Pose.py +++ b/dimos/msgs/geometry_msgs/test_Pose.py @@ -18,13 +18,6 @@ import numpy as np import pytest -try: - from geometry_msgs.msg import Point as ROSPoint, Pose as ROSPose, Quaternion as ROSQuaternion -except ImportError: - ROSPose = None - ROSPoint = None - ROSQuaternion = None - from dimos.msgs.geometry_msgs.Pose import Pose, to_pose from dimos.msgs.geometry_msgs.Quaternion import Quaternion from dimos.msgs.geometry_msgs.Vector3 import Vector3 @@ -754,55 +747,3 @@ def test_pose_addition_3d_rotation() -> None: assert np.isclose(result.position.x, 1.0, atol=1e-10) # X unchanged assert np.isclose(result.position.y, cos45 - sin45, atol=1e-10) assert np.isclose(result.position.z, sin45 + cos45, atol=1e-10) - - -@pytest.mark.ros -def test_pose_from_ros_msg() -> None: - """Test creating a Pose from a ROS Pose message.""" - ros_msg = ROSPose() - ros_msg.position = ROSPoint(x=1.0, y=2.0, z=3.0) - ros_msg.orientation = ROSQuaternion(x=0.1, y=0.2, z=0.3, w=0.9) - - pose = Pose.from_ros_msg(ros_msg) - - assert pose.position.x == 1.0 - assert pose.position.y == 2.0 - assert pose.position.z == 3.0 - assert pose.orientation.x == 0.1 - assert pose.orientation.y == 0.2 - assert pose.orientation.z == 0.3 - assert pose.orientation.w == 0.9 - - -@pytest.mark.ros -def test_pose_to_ros_msg() -> None: - """Test converting a Pose to a ROS Pose message.""" - pose = Pose(1.0, 2.0, 3.0, 0.1, 0.2, 0.3, 0.9) - - ros_msg = pose.to_ros_msg() - - assert isinstance(ros_msg, ROSPose) - assert ros_msg.position.x == 1.0 - assert ros_msg.position.y == 2.0 - assert ros_msg.position.z == 3.0 - assert ros_msg.orientation.x == 0.1 - assert ros_msg.orientation.y == 0.2 - assert ros_msg.orientation.z == 0.3 - assert ros_msg.orientation.w == 0.9 - - -@pytest.mark.ros -def test_pose_ros_roundtrip() -> None: - """Test round-trip conversion between Pose and ROS Pose.""" - original = Pose(1.5, 2.5, 3.5, 0.15, 0.25, 0.35, 0.85) - - ros_msg = original.to_ros_msg() - restored = Pose.from_ros_msg(ros_msg) - - assert restored.position.x == original.position.x - assert restored.position.y == original.position.y - assert restored.position.z == original.position.z - assert restored.orientation.x == original.orientation.x - assert restored.orientation.y == original.orientation.y - assert restored.orientation.z == original.orientation.z - assert restored.orientation.w == original.orientation.w diff --git a/dimos/msgs/geometry_msgs/test_PoseStamped.py b/dimos/msgs/geometry_msgs/test_PoseStamped.py index 603723b610..82250a9113 100644 --- a/dimos/msgs/geometry_msgs/test_PoseStamped.py +++ b/dimos/msgs/geometry_msgs/test_PoseStamped.py @@ -15,13 +15,6 @@ import pickle import time -import pytest - -try: - from geometry_msgs.msg import PoseStamped as ROSPoseStamped -except ImportError: - ROSPoseStamped = None - from dimos.msgs.geometry_msgs import PoseStamped @@ -60,80 +53,3 @@ def test_pickle_encode_decode() -> None: assert isinstance(pose_dest, PoseStamped) assert pose_dest is not pose_source assert pose_dest == pose_source - - -@pytest.mark.ros -def test_pose_stamped_from_ros_msg() -> None: - """Test creating a PoseStamped from a ROS PoseStamped message.""" - ros_msg = ROSPoseStamped() - ros_msg.header.frame_id = "world" - ros_msg.header.stamp.sec = 123 - ros_msg.header.stamp.nanosec = 456000000 - ros_msg.pose.position.x = 1.0 - ros_msg.pose.position.y = 2.0 - ros_msg.pose.position.z = 3.0 - ros_msg.pose.orientation.x = 0.1 - ros_msg.pose.orientation.y = 0.2 - ros_msg.pose.orientation.z = 0.3 - ros_msg.pose.orientation.w = 0.9 - - pose_stamped = PoseStamped.from_ros_msg(ros_msg) - - assert pose_stamped.frame_id == "world" - assert pose_stamped.ts == 123.456 - assert pose_stamped.position.x == 1.0 - assert pose_stamped.position.y == 2.0 - assert pose_stamped.position.z == 3.0 - assert pose_stamped.orientation.x == 0.1 - assert pose_stamped.orientation.y == 0.2 - assert pose_stamped.orientation.z == 0.3 - assert pose_stamped.orientation.w == 0.9 - - -@pytest.mark.ros -def test_pose_stamped_to_ros_msg() -> None: - """Test converting a PoseStamped to a ROS PoseStamped message.""" - pose_stamped = PoseStamped( - ts=123.456, - frame_id="base_link", - position=(1.0, 2.0, 3.0), - orientation=(0.1, 0.2, 0.3, 0.9), - ) - - ros_msg = pose_stamped.to_ros_msg() - - assert isinstance(ros_msg, ROSPoseStamped) - assert ros_msg.header.frame_id == "base_link" - assert ros_msg.header.stamp.sec == 123 - assert ros_msg.header.stamp.nanosec == 456000000 - assert ros_msg.pose.position.x == 1.0 - assert ros_msg.pose.position.y == 2.0 - assert ros_msg.pose.position.z == 3.0 - assert ros_msg.pose.orientation.x == 0.1 - assert ros_msg.pose.orientation.y == 0.2 - assert ros_msg.pose.orientation.z == 0.3 - assert ros_msg.pose.orientation.w == 0.9 - - -@pytest.mark.ros -def test_pose_stamped_ros_roundtrip() -> None: - """Test round-trip conversion between PoseStamped and ROS PoseStamped.""" - original = PoseStamped( - ts=123.789, - frame_id="odom", - position=(1.5, 2.5, 3.5), - orientation=(0.15, 0.25, 0.35, 0.85), - ) - - ros_msg = original.to_ros_msg() - restored = PoseStamped.from_ros_msg(ros_msg) - - assert restored.frame_id == original.frame_id - assert restored.ts == original.ts - assert restored.position.x == original.position.x - assert restored.position.y == original.position.y - assert restored.position.z == original.position.z - assert restored.orientation.x == original.orientation.x - assert restored.orientation.y == original.orientation.y - assert restored.orientation.z == original.orientation.z - assert restored.orientation.w == original.orientation.w diff --git a/dimos/msgs/geometry_msgs/test_PoseWithCovariance.py b/dimos/msgs/geometry_msgs/test_PoseWithCovariance.py index d62ca6e806..f6936db9f7 100644 --- a/dimos/msgs/geometry_msgs/test_PoseWithCovariance.py +++ b/dimos/msgs/geometry_msgs/test_PoseWithCovariance.py @@ -16,19 +16,6 @@ import numpy as np import pytest -try: - from geometry_msgs.msg import ( - Point as ROSPoint, - Pose as ROSPose, - PoseWithCovariance as ROSPoseWithCovariance, - Quaternion as ROSQuaternion, - ) -except ImportError: - ROSPoseWithCovariance = None - ROSPose = None - ROSPoint = None - ROSQuaternion = None - from dimos.msgs.geometry_msgs.Pose import Pose from dimos.msgs.geometry_msgs.PoseWithCovariance import PoseWithCovariance @@ -288,59 +275,6 @@ def test_pose_with_covariance_lcm_encode_decode() -> None: assert isinstance(decoded.covariance, np.ndarray) -@pytest.mark.ros -def test_pose_with_covariance_from_ros_msg() -> None: - """Test creating from ROS message.""" - ros_msg = ROSPoseWithCovariance() - ros_msg.pose.position = ROSPoint(x=1.0, y=2.0, z=3.0) - ros_msg.pose.orientation = ROSQuaternion(x=0.1, y=0.2, z=0.3, w=0.9) - ros_msg.covariance = [float(i) for i in range(36)] - - pose_cov = PoseWithCovariance.from_ros_msg(ros_msg) - - assert pose_cov.pose.position.x == 1.0 - assert pose_cov.pose.position.y == 2.0 - assert pose_cov.pose.position.z == 3.0 - assert pose_cov.pose.orientation.x == 0.1 - assert pose_cov.pose.orientation.y == 0.2 - assert pose_cov.pose.orientation.z == 0.3 - assert pose_cov.pose.orientation.w == 0.9 - assert np.array_equal(pose_cov.covariance, np.arange(36)) - - -@pytest.mark.ros -def test_pose_with_covariance_to_ros_msg() -> None: - """Test converting to ROS message.""" - pose = Pose(1.0, 2.0, 3.0, 0.1, 0.2, 0.3, 0.9) - covariance = np.arange(36, dtype=float) - pose_cov = PoseWithCovariance(pose, covariance) - - ros_msg = pose_cov.to_ros_msg() - - assert isinstance(ros_msg, ROSPoseWithCovariance) - assert ros_msg.pose.position.x == 1.0 - assert ros_msg.pose.position.y == 2.0 - assert ros_msg.pose.position.z == 3.0 - assert ros_msg.pose.orientation.x == 0.1 - assert ros_msg.pose.orientation.y == 0.2 - assert ros_msg.pose.orientation.z == 0.3 - assert ros_msg.pose.orientation.w == 0.9 - assert list(ros_msg.covariance) == list(range(36)) - - -@pytest.mark.ros -def test_pose_with_covariance_ros_roundtrip() -> None: - """Test round-trip conversion with ROS messages.""" - pose = Pose(1.5, 2.5, 3.5, 0.15, 0.25, 0.35, 0.85) - covariance = np.random.rand(36) - original = PoseWithCovariance(pose, covariance) - - ros_msg = original.to_ros_msg() - restored = PoseWithCovariance.from_ros_msg(ros_msg) - - assert restored == original - - def test_pose_with_covariance_zero_covariance() -> None: """Test with zero covariance matrix.""" pose = Pose(1.0, 2.0, 3.0) diff --git a/dimos/msgs/geometry_msgs/test_PoseWithCovarianceStamped.py b/dimos/msgs/geometry_msgs/test_PoseWithCovarianceStamped.py index 1d04bd8e87..1e910b53e7 100644 --- a/dimos/msgs/geometry_msgs/test_PoseWithCovarianceStamped.py +++ b/dimos/msgs/geometry_msgs/test_PoseWithCovarianceStamped.py @@ -15,27 +15,6 @@ import time import numpy as np -import pytest - -try: - from builtin_interfaces.msg import Time as ROSTime - from geometry_msgs.msg import ( - Point as ROSPoint, - Pose as ROSPose, - PoseWithCovariance as ROSPoseWithCovariance, - PoseWithCovarianceStamped as ROSPoseWithCovarianceStamped, - Quaternion as ROSQuaternion, - ) - from std_msgs.msg import Header as ROSHeader -except ImportError: - ROSHeader = None - ROSPoseWithCovarianceStamped = None - ROSPose = None - ROSQuaternion = None - ROSPoint = None - ROSTime = None - ROSPoseWithCovariance = None - from dimos.msgs.geometry_msgs.Pose import Pose from dimos.msgs.geometry_msgs.PoseWithCovariance import PoseWithCovariance @@ -44,20 +23,6 @@ def test_pose_with_covariance_stamped_default_init() -> None: """Test default initialization.""" - if ROSPoseWithCovariance is None: - pytest.skip("ROS not available") - if ROSTime is None: - pytest.skip("ROS not available") - if ROSPoint is None: - pytest.skip("ROS not available") - if ROSQuaternion is None: - pytest.skip("ROS not available") - if ROSPose is None: - pytest.skip("ROS not available") - if ROSPoseWithCovarianceStamped is None: - pytest.skip("ROS not available") - if ROSHeader is None: - pytest.skip("ROS not available") pose_cov_stamped = PoseWithCovarianceStamped() # Should have current timestamp @@ -180,98 +145,6 @@ def test_pose_with_covariance_stamped_lcm_encode_decode() -> None: assert np.array_equal(decoded.covariance, covariance) -@pytest.mark.ros -def test_pose_with_covariance_stamped_from_ros_msg() -> None: - """Test creating from ROS message.""" - ros_msg = ROSPoseWithCovarianceStamped() - - # Set header - ros_msg.header = ROSHeader() - ros_msg.header.stamp = ROSTime() - ros_msg.header.stamp.sec = 1234567890 - ros_msg.header.stamp.nanosec = 123456000 - ros_msg.header.frame_id = "laser" - - # Set pose with covariance - ros_msg.pose = ROSPoseWithCovariance() - ros_msg.pose.pose = ROSPose() - ros_msg.pose.pose.position = ROSPoint(x=1.0, y=2.0, z=3.0) - ros_msg.pose.pose.orientation = ROSQuaternion(x=0.1, y=0.2, z=0.3, w=0.9) - ros_msg.pose.covariance = [float(i) for i in range(36)] - - pose_cov_stamped = PoseWithCovarianceStamped.from_ros_msg(ros_msg) - - assert pose_cov_stamped.ts == 1234567890.123456 - assert pose_cov_stamped.frame_id == "laser" - assert pose_cov_stamped.pose.position.x == 1.0 - assert pose_cov_stamped.pose.position.y == 2.0 - assert pose_cov_stamped.pose.position.z == 3.0 - assert pose_cov_stamped.pose.orientation.x == 0.1 - assert pose_cov_stamped.pose.orientation.y == 0.2 - assert pose_cov_stamped.pose.orientation.z == 0.3 - assert pose_cov_stamped.pose.orientation.w == 0.9 - assert np.array_equal(pose_cov_stamped.covariance, np.arange(36)) - - -@pytest.mark.ros -def test_pose_with_covariance_stamped_to_ros_msg() -> None: - """Test converting to ROS message.""" - ts = 1234567890.567890 - frame_id = "imu" - pose = Pose(1.0, 2.0, 3.0, 0.1, 0.2, 0.3, 0.9) - covariance = np.arange(36, dtype=float) - - pose_cov_stamped = PoseWithCovarianceStamped( - ts=ts, frame_id=frame_id, pose=pose, covariance=covariance - ) - - ros_msg = pose_cov_stamped.to_ros_msg() - - assert isinstance(ros_msg, ROSPoseWithCovarianceStamped) - assert ros_msg.header.frame_id == frame_id - assert ros_msg.header.stamp.sec == 1234567890 - assert abs(ros_msg.header.stamp.nanosec - 567890000) < 100 # Allow small rounding error - - assert ros_msg.pose.pose.position.x == 1.0 - assert ros_msg.pose.pose.position.y == 2.0 - assert ros_msg.pose.pose.position.z == 3.0 - assert ros_msg.pose.pose.orientation.x == 0.1 - assert ros_msg.pose.pose.orientation.y == 0.2 - assert ros_msg.pose.pose.orientation.z == 0.3 - assert ros_msg.pose.pose.orientation.w == 0.9 - assert list(ros_msg.pose.covariance) == list(range(36)) - - -@pytest.mark.ros -def test_pose_with_covariance_stamped_ros_roundtrip() -> None: - """Test round-trip conversion with ROS messages.""" - ts = 2147483647.987654 # Max int32 value for ROS Time.sec - frame_id = "robot_base" - pose = Pose(1.5, 2.5, 3.5, 0.15, 0.25, 0.35, 0.85) - covariance = np.random.rand(36) - - original = PoseWithCovarianceStamped(ts=ts, frame_id=frame_id, pose=pose, covariance=covariance) - - ros_msg = original.to_ros_msg() - restored = PoseWithCovarianceStamped.from_ros_msg(ros_msg) - - # Check timestamp (loses some precision in conversion) - assert abs(restored.ts - ts) < 1e-6 - assert restored.frame_id == frame_id - - # Check pose - assert restored.pose.position.x == original.pose.position.x - assert restored.pose.position.y == original.pose.position.y - assert restored.pose.position.z == original.pose.position.z - assert restored.pose.orientation.x == original.pose.orientation.x - assert restored.pose.orientation.y == original.pose.orientation.y - assert restored.pose.orientation.z == original.pose.orientation.z - assert restored.pose.orientation.w == original.pose.orientation.w - - # Check covariance - assert np.allclose(restored.covariance, original.covariance) - - def test_pose_with_covariance_stamped_zero_timestamp() -> None: """Test that zero timestamp gets replaced with current time.""" pose_cov_stamped = PoseWithCovarianceStamped(ts=0.0) @@ -330,24 +203,6 @@ def test_pose_with_covariance_stamped_sec_nsec() -> None: assert ns == 0 -@pytest.mark.ros -@pytest.mark.parametrize( - "frame_id", - ["", "map", "odom", "base_link", "camera_optical_frame", "sensor/lidar/front"], -) -def test_pose_with_covariance_stamped_frame_ids(frame_id) -> None: - """Test various frame ID values.""" - pose_cov_stamped = PoseWithCovarianceStamped(frame_id=frame_id) - assert pose_cov_stamped.frame_id == frame_id - - # Test roundtrip through ROS - ros_msg = pose_cov_stamped.to_ros_msg() - assert ros_msg.header.frame_id == frame_id - - restored = PoseWithCovarianceStamped.from_ros_msg(ros_msg) - assert restored.frame_id == frame_id - - def test_pose_with_covariance_stamped_different_covariances() -> None: """Test with different covariance patterns.""" pose = Pose(1.0, 2.0, 3.0) diff --git a/dimos/msgs/geometry_msgs/test_Transform.py b/dimos/msgs/geometry_msgs/test_Transform.py index 2a1daff684..0c15610b05 100644 --- a/dimos/msgs/geometry_msgs/test_Transform.py +++ b/dimos/msgs/geometry_msgs/test_Transform.py @@ -18,12 +18,6 @@ import numpy as np import pytest -try: - from geometry_msgs.msg import TransformStamped as ROSTransformStamped -except ImportError: - ROSTransformStamped = None - - from dimos.msgs.geometry_msgs import Pose, PoseStamped, Quaternion, Transform, Vector3 @@ -424,86 +418,3 @@ def test_transform_from_pose_invalid_type() -> None: with pytest.raises(TypeError): Transform.from_pose(None) - - -@pytest.mark.ros -def test_transform_from_ros_transform_stamped() -> None: - """Test creating a Transform from a ROS TransformStamped message.""" - ros_msg = ROSTransformStamped() - ros_msg.header.frame_id = "world" - ros_msg.header.stamp.sec = 123 - ros_msg.header.stamp.nanosec = 456000000 - ros_msg.child_frame_id = "robot" - ros_msg.transform.translation.x = 1.0 - ros_msg.transform.translation.y = 2.0 - ros_msg.transform.translation.z = 3.0 - ros_msg.transform.rotation.x = 0.1 - ros_msg.transform.rotation.y = 0.2 - ros_msg.transform.rotation.z = 0.3 - ros_msg.transform.rotation.w = 0.9 - - transform = Transform.from_ros_transform_stamped(ros_msg) - - assert transform.frame_id == "world" - assert transform.child_frame_id == "robot" - assert transform.ts == 123.456 - assert transform.translation.x == 1.0 - assert transform.translation.y == 2.0 - assert transform.translation.z == 3.0 - assert transform.rotation.x == 0.1 - assert transform.rotation.y == 0.2 - assert transform.rotation.z == 0.3 - assert transform.rotation.w == 0.9 - - -@pytest.mark.ros -def test_transform_to_ros_transform_stamped() -> None: - """Test converting a Transform to a ROS TransformStamped message.""" - transform = Transform( - translation=Vector3(4.0, 5.0, 6.0), - rotation=Quaternion(0.15, 0.25, 0.35, 0.85), - frame_id="base_link", - child_frame_id="sensor", - ts=124.789, - ) - - ros_msg = transform.to_ros_transform_stamped() - - assert isinstance(ros_msg, ROSTransformStamped) - assert ros_msg.header.frame_id == "base_link" - assert ros_msg.child_frame_id == "sensor" - assert ros_msg.header.stamp.sec == 124 - assert ros_msg.header.stamp.nanosec == 789000000 - assert ros_msg.transform.translation.x == 4.0 - assert ros_msg.transform.translation.y == 5.0 - assert ros_msg.transform.translation.z == 6.0 - assert ros_msg.transform.rotation.x == 0.15 - assert ros_msg.transform.rotation.y == 0.25 - assert ros_msg.transform.rotation.z == 0.35 - assert ros_msg.transform.rotation.w == 0.85 - - -@pytest.mark.ros -def test_transform_ros_roundtrip() -> None: - """Test round-trip conversion between Transform and ROS TransformStamped.""" - original = Transform( - translation=Vector3(7.5, 8.5, 9.5), - rotation=Quaternion(0.0, 0.0, 0.383, 0.924), # ~45 degrees around Z - frame_id="odom", - child_frame_id="base_footprint", - ts=99.123, - ) - - ros_msg = original.to_ros_transform_stamped() - restored = Transform.from_ros_transform_stamped(ros_msg) - - assert restored.frame_id == original.frame_id - assert restored.child_frame_id == original.child_frame_id - assert restored.ts == original.ts - assert restored.translation.x == original.translation.x - assert restored.translation.y == original.translation.y - assert restored.translation.z == original.translation.z - assert restored.rotation.x == original.rotation.x - assert restored.rotation.y == original.rotation.y - assert restored.rotation.z == original.rotation.z - assert restored.rotation.w == original.rotation.w diff --git a/dimos/msgs/geometry_msgs/test_Twist.py b/dimos/msgs/geometry_msgs/test_Twist.py index f83ffa3fdd..df4bd8b6a2 100644 --- a/dimos/msgs/geometry_msgs/test_Twist.py +++ b/dimos/msgs/geometry_msgs/test_Twist.py @@ -12,16 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import numpy as np -import pytest - -try: - from geometry_msgs.msg import Twist as ROSTwist, Vector3 as ROSVector3 -except ImportError: - ROSTwist = None - ROSVector3 = None - from dimos_lcm.geometry_msgs import Twist as LCMTwist +import numpy as np from dimos.msgs.geometry_msgs import Quaternion, Twist, Vector3 @@ -205,97 +197,3 @@ def test_twist_with_lists() -> None: tw2 = Twist(linear=np.array([4, 5, 6]), angular=np.array([0.4, 0.5, 0.6])) assert tw2.linear == Vector3(4, 5, 6) assert tw2.angular == Vector3(0.4, 0.5, 0.6) - - -@pytest.mark.ros -def test_twist_from_ros_msg() -> None: - """Test Twist.from_ros_msg conversion.""" - # Create ROS message - ros_msg = ROSTwist() - ros_msg.linear = ROSVector3(x=10.0, y=20.0, z=30.0) - ros_msg.angular = ROSVector3(x=1.0, y=2.0, z=3.0) - - # Convert to LCM - lcm_msg = Twist.from_ros_msg(ros_msg) - - assert isinstance(lcm_msg, Twist) - assert lcm_msg.linear.x == 10.0 - assert lcm_msg.linear.y == 20.0 - assert lcm_msg.linear.z == 30.0 - assert lcm_msg.angular.x == 1.0 - assert lcm_msg.angular.y == 2.0 - assert lcm_msg.angular.z == 3.0 - - -@pytest.mark.ros -def test_twist_to_ros_msg() -> None: - """Test Twist.to_ros_msg conversion.""" - # Create LCM message - lcm_msg = Twist(linear=Vector3(40.0, 50.0, 60.0), angular=Vector3(4.0, 5.0, 6.0)) - - # Convert to ROS - ros_msg = lcm_msg.to_ros_msg() - - assert isinstance(ros_msg, ROSTwist) - assert ros_msg.linear.x == 40.0 - assert ros_msg.linear.y == 50.0 - assert ros_msg.linear.z == 60.0 - assert ros_msg.angular.x == 4.0 - assert ros_msg.angular.y == 5.0 - assert ros_msg.angular.z == 6.0 - - -@pytest.mark.ros -def test_ros_zero_twist_conversion() -> None: - """Test conversion of zero twist messages between ROS and LCM.""" - # Test ROS to LCM with zero twist - ros_zero = ROSTwist() - lcm_zero = Twist.from_ros_msg(ros_zero) - assert lcm_zero.is_zero() - - # Test LCM to ROS with zero twist - lcm_zero2 = Twist.zero() - ros_zero2 = lcm_zero2.to_ros_msg() - assert ros_zero2.linear.x == 0.0 - assert ros_zero2.linear.y == 0.0 - assert ros_zero2.linear.z == 0.0 - assert ros_zero2.angular.x == 0.0 - assert ros_zero2.angular.y == 0.0 - assert ros_zero2.angular.z == 0.0 - - -@pytest.mark.ros -def test_ros_negative_values_conversion() -> None: - """Test ROS conversion with negative values.""" - # Create ROS message with negative values - ros_msg = ROSTwist() - ros_msg.linear = ROSVector3(x=-1.5, y=-2.5, z=-3.5) - ros_msg.angular = ROSVector3(x=-0.1, y=-0.2, z=-0.3) - - # Convert to LCM and back - lcm_msg = Twist.from_ros_msg(ros_msg) - ros_msg2 = lcm_msg.to_ros_msg() - - assert ros_msg2.linear.x == -1.5 - assert ros_msg2.linear.y == -2.5 - assert ros_msg2.linear.z == -3.5 - assert ros_msg2.angular.x == -0.1 - assert ros_msg2.angular.y == -0.2 - assert ros_msg2.angular.z == -0.3 - - -@pytest.mark.ros -def test_ros_roundtrip_conversion() -> None: - """Test round-trip conversion maintains data integrity.""" - # LCM -> ROS -> LCM - original_lcm = Twist(linear=Vector3(1.234, 5.678, 9.012), angular=Vector3(0.111, 0.222, 0.333)) - ros_intermediate = original_lcm.to_ros_msg() - final_lcm = Twist.from_ros_msg(ros_intermediate) - - assert final_lcm == original_lcm - assert final_lcm.linear.x == 1.234 - assert final_lcm.linear.y == 5.678 - assert final_lcm.linear.z == 9.012 - assert final_lcm.angular.x == 0.111 - assert final_lcm.angular.y == 0.222 - assert final_lcm.angular.z == 0.333 diff --git a/dimos/msgs/geometry_msgs/test_TwistStamped.py b/dimos/msgs/geometry_msgs/test_TwistStamped.py index 7ba2f59e7d..afb8489032 100644 --- a/dimos/msgs/geometry_msgs/test_TwistStamped.py +++ b/dimos/msgs/geometry_msgs/test_TwistStamped.py @@ -15,13 +15,6 @@ import pickle import time -import pytest - -try: - from geometry_msgs.msg import TwistStamped as ROSTwistStamped -except ImportError: - ROSTwistStamped = None - from dimos.msgs.geometry_msgs.TwistStamped import TwistStamped @@ -61,98 +54,13 @@ def test_pickle_encode_decode() -> None: assert twist_dest == twist_source -@pytest.mark.ros -def test_twist_stamped_from_ros_msg() -> None: - """Test creating a TwistStamped from a ROS TwistStamped message.""" - ros_msg = ROSTwistStamped() - ros_msg.header.frame_id = "world" - ros_msg.header.stamp.sec = 123 - ros_msg.header.stamp.nanosec = 456000000 - ros_msg.twist.linear.x = 1.0 - ros_msg.twist.linear.y = 2.0 - ros_msg.twist.linear.z = 3.0 - ros_msg.twist.angular.x = 0.1 - ros_msg.twist.angular.y = 0.2 - ros_msg.twist.angular.z = 0.3 - - twist_stamped = TwistStamped.from_ros_msg(ros_msg) - - assert twist_stamped.frame_id == "world" - assert twist_stamped.ts == 123.456 - assert twist_stamped.linear.x == 1.0 - assert twist_stamped.linear.y == 2.0 - assert twist_stamped.linear.z == 3.0 - assert twist_stamped.angular.x == 0.1 - assert twist_stamped.angular.y == 0.2 - assert twist_stamped.angular.z == 0.3 - - -@pytest.mark.ros -def test_twist_stamped_to_ros_msg() -> None: - """Test converting a TwistStamped to a ROS TwistStamped message.""" - twist_stamped = TwistStamped( - ts=123.456, - frame_id="base_link", - linear=(1.0, 2.0, 3.0), - angular=(0.1, 0.2, 0.3), - ) - - ros_msg = twist_stamped.to_ros_msg() - - assert isinstance(ros_msg, ROSTwistStamped) - assert ros_msg.header.frame_id == "base_link" - assert ros_msg.header.stamp.sec == 123 - assert ros_msg.header.stamp.nanosec == 456000000 - assert ros_msg.twist.linear.x == 1.0 - assert ros_msg.twist.linear.y == 2.0 - assert ros_msg.twist.linear.z == 3.0 - assert ros_msg.twist.angular.x == 0.1 - assert ros_msg.twist.angular.y == 0.2 - assert ros_msg.twist.angular.z == 0.3 - - -@pytest.mark.ros -def test_twist_stamped_ros_roundtrip() -> None: - """Test round-trip conversion between TwistStamped and ROS TwistStamped.""" - original = TwistStamped( - ts=123.789, - frame_id="odom", - linear=(1.5, 2.5, 3.5), - angular=(0.15, 0.25, 0.35), - ) - - ros_msg = original.to_ros_msg() - restored = TwistStamped.from_ros_msg(ros_msg) - - assert restored.frame_id == original.frame_id - assert restored.ts == original.ts - assert restored.linear.x == original.linear.x - assert restored.linear.y == original.linear.y - assert restored.linear.z == original.linear.z - assert restored.angular.x == original.angular.x - assert restored.angular.y == original.angular.y - assert restored.angular.z == original.angular.z - - if __name__ == "__main__": print("Running test_lcm_encode_decode...") test_lcm_encode_decode() - print("✓ test_lcm_encode_decode passed") + print("test_lcm_encode_decode passed") print("Running test_pickle_encode_decode...") test_pickle_encode_decode() - print("✓ test_pickle_encode_decode passed") - - print("Running test_twist_stamped_from_ros_msg...") - test_twist_stamped_from_ros_msg() - print("✓ test_twist_stamped_from_ros_msg passed") - - print("Running test_twist_stamped_to_ros_msg...") - test_twist_stamped_to_ros_msg() - print("✓ test_twist_stamped_to_ros_msg passed") - - print("Running test_twist_stamped_ros_roundtrip...") - test_twist_stamped_ros_roundtrip() - print("✓ test_twist_stamped_ros_roundtrip passed") + print("test_pickle_encode_decode passed") print("\nAll tests passed!") diff --git a/dimos/msgs/geometry_msgs/test_TwistWithCovariance.py b/dimos/msgs/geometry_msgs/test_TwistWithCovariance.py index 746b0c3646..1d8ae820ad 100644 --- a/dimos/msgs/geometry_msgs/test_TwistWithCovariance.py +++ b/dimos/msgs/geometry_msgs/test_TwistWithCovariance.py @@ -12,22 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from dimos_lcm.geometry_msgs import TwistWithCovariance as LCMTwistWithCovariance import numpy as np import pytest -try: - from geometry_msgs.msg import ( - Twist as ROSTwist, - TwistWithCovariance as ROSTwistWithCovariance, - Vector3 as ROSVector3, - ) -except ImportError: - ROSTwist = None - ROSTwistWithCovariance = None - ROSVector3 = None - -from dimos_lcm.geometry_msgs import TwistWithCovariance as LCMTwistWithCovariance - from dimos.msgs.geometry_msgs.Twist import Twist from dimos.msgs.geometry_msgs.TwistWithCovariance import TwistWithCovariance from dimos.msgs.geometry_msgs.Vector3 import Vector3 @@ -35,10 +23,6 @@ def test_twist_with_covariance_default_init() -> None: """Test that default initialization creates a zero twist with zero covariance.""" - if ROSVector3 is None: - pytest.skip("ROS not available") - if ROSTwistWithCovariance is None: - pytest.skip("ROS not available") twist_cov = TwistWithCovariance() # Twist should be zero @@ -317,57 +301,6 @@ def test_twist_with_covariance_lcm_encode_decode() -> None: assert isinstance(decoded.covariance, np.ndarray) -@pytest.mark.ros -def test_twist_with_covariance_from_ros_msg() -> None: - """Test creating from ROS message.""" - ros_msg = ROSTwistWithCovariance() - ros_msg.twist.linear = ROSVector3(x=1.0, y=2.0, z=3.0) - ros_msg.twist.angular = ROSVector3(x=0.1, y=0.2, z=0.3) - ros_msg.covariance = [float(i) for i in range(36)] - - twist_cov = TwistWithCovariance.from_ros_msg(ros_msg) - - assert twist_cov.twist.linear.x == 1.0 - assert twist_cov.twist.linear.y == 2.0 - assert twist_cov.twist.linear.z == 3.0 - assert twist_cov.twist.angular.x == 0.1 - assert twist_cov.twist.angular.y == 0.2 - assert twist_cov.twist.angular.z == 0.3 - assert np.array_equal(twist_cov.covariance, np.arange(36)) - - -@pytest.mark.ros -def test_twist_with_covariance_to_ros_msg() -> None: - """Test converting to ROS message.""" - twist = Twist(Vector3(1.0, 2.0, 3.0), Vector3(0.1, 0.2, 0.3)) - covariance = np.arange(36, dtype=float) - twist_cov = TwistWithCovariance(twist, covariance) - - ros_msg = twist_cov.to_ros_msg() - - assert isinstance(ros_msg, ROSTwistWithCovariance) - assert ros_msg.twist.linear.x == 1.0 - assert ros_msg.twist.linear.y == 2.0 - assert ros_msg.twist.linear.z == 3.0 - assert ros_msg.twist.angular.x == 0.1 - assert ros_msg.twist.angular.y == 0.2 - assert ros_msg.twist.angular.z == 0.3 - assert list(ros_msg.covariance) == list(range(36)) - - -@pytest.mark.ros -def test_twist_with_covariance_ros_roundtrip() -> None: - """Test round-trip conversion with ROS messages.""" - twist = Twist(Vector3(1.5, 2.5, 3.5), Vector3(0.15, 0.25, 0.35)) - covariance = np.random.rand(36) - original = TwistWithCovariance(twist, covariance) - - ros_msg = original.to_ros_msg() - restored = TwistWithCovariance.from_ros_msg(ros_msg) - - assert restored == original - - def test_twist_with_covariance_zero_covariance() -> None: """Test with zero covariance matrix.""" twist = Twist(Vector3(1.0, 2.0, 3.0), Vector3(0.1, 0.2, 0.3)) diff --git a/dimos/msgs/geometry_msgs/test_TwistWithCovarianceStamped.py b/dimos/msgs/geometry_msgs/test_TwistWithCovarianceStamped.py index f0d7e5b4ab..2be647bff1 100644 --- a/dimos/msgs/geometry_msgs/test_TwistWithCovarianceStamped.py +++ b/dimos/msgs/geometry_msgs/test_TwistWithCovarianceStamped.py @@ -15,25 +15,6 @@ import time import numpy as np -import pytest - -try: - from builtin_interfaces.msg import Time as ROSTime - from geometry_msgs.msg import ( - Twist as ROSTwist, - TwistWithCovariance as ROSTwistWithCovariance, - TwistWithCovarianceStamped as ROSTwistWithCovarianceStamped, - Vector3 as ROSVector3, - ) - from std_msgs.msg import Header as ROSHeader -except ImportError: - ROSTwistWithCovarianceStamped = None - ROSTwist = None - ROSHeader = None - ROSTime = None - ROSTwistWithCovariance = None - ROSVector3 = None - from dimos.msgs.geometry_msgs.Twist import Twist from dimos.msgs.geometry_msgs.TwistWithCovariance import TwistWithCovariance @@ -43,18 +24,6 @@ def test_twist_with_covariance_stamped_default_init() -> None: """Test default initialization.""" - if ROSVector3 is None: - pytest.skip("ROS not available") - if ROSTwistWithCovariance is None: - pytest.skip("ROS not available") - if ROSTime is None: - pytest.skip("ROS not available") - if ROSHeader is None: - pytest.skip("ROS not available") - if ROSTwist is None: - pytest.skip("ROS not available") - if ROSTwistWithCovarianceStamped is None: - pytest.skip("ROS not available") twist_cov_stamped = TwistWithCovarianceStamped() # Should have current timestamp @@ -191,97 +160,6 @@ def test_twist_with_covariance_stamped_lcm_encode_decode() -> None: assert np.array_equal(decoded.covariance, covariance) -@pytest.mark.ros -def test_twist_with_covariance_stamped_from_ros_msg() -> None: - """Test creating from ROS message.""" - ros_msg = ROSTwistWithCovarianceStamped() - - # Set header - ros_msg.header = ROSHeader() - ros_msg.header.stamp = ROSTime() - ros_msg.header.stamp.sec = 1234567890 - ros_msg.header.stamp.nanosec = 123456000 - ros_msg.header.frame_id = "laser" - - # Set twist with covariance - ros_msg.twist = ROSTwistWithCovariance() - ros_msg.twist.twist = ROSTwist() - ros_msg.twist.twist.linear = ROSVector3(x=1.0, y=2.0, z=3.0) - ros_msg.twist.twist.angular = ROSVector3(x=0.1, y=0.2, z=0.3) - ros_msg.twist.covariance = [float(i) for i in range(36)] - - twist_cov_stamped = TwistWithCovarianceStamped.from_ros_msg(ros_msg) - - assert twist_cov_stamped.ts == 1234567890.123456 - assert twist_cov_stamped.frame_id == "laser" - assert twist_cov_stamped.twist.linear.x == 1.0 - assert twist_cov_stamped.twist.linear.y == 2.0 - assert twist_cov_stamped.twist.linear.z == 3.0 - assert twist_cov_stamped.twist.angular.x == 0.1 - assert twist_cov_stamped.twist.angular.y == 0.2 - assert twist_cov_stamped.twist.angular.z == 0.3 - assert np.array_equal(twist_cov_stamped.covariance, np.arange(36)) - - -@pytest.mark.ros -def test_twist_with_covariance_stamped_to_ros_msg() -> None: - """Test converting to ROS message.""" - ts = 1234567890.567890 - frame_id = "imu" - twist = Twist(Vector3(1.0, 2.0, 3.0), Vector3(0.1, 0.2, 0.3)) - covariance = np.arange(36, dtype=float) - - twist_cov_stamped = TwistWithCovarianceStamped( - ts=ts, frame_id=frame_id, twist=twist, covariance=covariance - ) - - ros_msg = twist_cov_stamped.to_ros_msg() - - assert isinstance(ros_msg, ROSTwistWithCovarianceStamped) - assert ros_msg.header.frame_id == frame_id - assert ros_msg.header.stamp.sec == 1234567890 - assert abs(ros_msg.header.stamp.nanosec - 567890000) < 100 # Allow small rounding error - - assert ros_msg.twist.twist.linear.x == 1.0 - assert ros_msg.twist.twist.linear.y == 2.0 - assert ros_msg.twist.twist.linear.z == 3.0 - assert ros_msg.twist.twist.angular.x == 0.1 - assert ros_msg.twist.twist.angular.y == 0.2 - assert ros_msg.twist.twist.angular.z == 0.3 - assert list(ros_msg.twist.covariance) == list(range(36)) - - -@pytest.mark.ros -def test_twist_with_covariance_stamped_ros_roundtrip() -> None: - """Test round-trip conversion with ROS messages.""" - ts = 2147483647.987654 # Max int32 value for ROS Time.sec - frame_id = "robot_base" - twist = Twist(Vector3(1.5, 2.5, 3.5), Vector3(0.15, 0.25, 0.35)) - covariance = np.random.rand(36) - - original = TwistWithCovarianceStamped( - ts=ts, frame_id=frame_id, twist=twist, covariance=covariance - ) - - ros_msg = original.to_ros_msg() - restored = TwistWithCovarianceStamped.from_ros_msg(ros_msg) - - # Check timestamp (loses some precision in conversion) - assert abs(restored.ts - ts) < 1e-6 - assert restored.frame_id == frame_id - - # Check twist - assert restored.twist.linear.x == original.twist.linear.x - assert restored.twist.linear.y == original.twist.linear.y - assert restored.twist.linear.z == original.twist.linear.z - assert restored.twist.angular.x == original.twist.angular.x - assert restored.twist.angular.y == original.twist.angular.y - assert restored.twist.angular.z == original.twist.angular.z - - # Check covariance - assert np.allclose(restored.covariance, original.covariance) - - def test_twist_with_covariance_stamped_zero_timestamp() -> None: """Test that zero timestamp gets replaced with current time.""" twist_cov_stamped = TwistWithCovarianceStamped(ts=0.0) @@ -354,24 +232,6 @@ def test_twist_with_covariance_stamped_sec_nsec() -> None: assert ns == 0 -@pytest.mark.ros -@pytest.mark.parametrize( - "frame_id", - ["", "map", "odom", "base_link", "cmd_vel", "sensor/velocity/front"], -) -def test_twist_with_covariance_stamped_frame_ids(frame_id) -> None: - """Test various frame ID values.""" - twist_cov_stamped = TwistWithCovarianceStamped(frame_id=frame_id) - assert twist_cov_stamped.frame_id == frame_id - - # Test roundtrip through ROS - ros_msg = twist_cov_stamped.to_ros_msg() - assert ros_msg.header.frame_id == frame_id - - restored = TwistWithCovarianceStamped.from_ros_msg(ros_msg) - assert restored.frame_id == frame_id - - def test_twist_with_covariance_stamped_different_covariances() -> None: """Test with different covariance patterns.""" twist = Twist(Vector3(1.0, 0.0, 0.0), Vector3(0.0, 0.0, 0.5)) diff --git a/dimos/msgs/helpers.py b/dimos/msgs/helpers.py new file mode 100644 index 0000000000..8464ec4ab1 --- /dev/null +++ b/dimos/msgs/helpers.py @@ -0,0 +1,53 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from functools import lru_cache +import importlib +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dimos.msgs import DimosMsg + + +@lru_cache(maxsize=256) +def resolve_msg_type(type_name: str) -> type[DimosMsg] | None: + """Resolve a message type name to its class. + + Args: + type_name: Type name in format "module.ClassName" (e.g., "geometry_msgs.Vector3") + + Returns: + The message class or None if not found. + """ + try: + module_name, class_name = type_name.rsplit(".", 1) + except ValueError: + return None + + # Try different import paths + import_paths = [ + f"dimos.msgs.{module_name}", + f"dimos_lcm.{module_name}", + ] + + for path in import_paths: + try: + module = importlib.import_module(path) + return getattr(module, class_name) # type: ignore[no-any-return] + except (ImportError, AttributeError): + continue + + return None diff --git a/dimos/msgs/nav_msgs/OccupancyGrid.py b/dimos/msgs/nav_msgs/OccupancyGrid.py index 3876b44fab..d45e1b6232 100644 --- a/dimos/msgs/nav_msgs/OccupancyGrid.py +++ b/dimos/msgs/nav_msgs/OccupancyGrid.py @@ -17,7 +17,7 @@ from enum import IntEnum from functools import lru_cache import time -from typing import TYPE_CHECKING, Any, BinaryIO +from typing import TYPE_CHECKING, BinaryIO from dimos_lcm.nav_msgs import ( MapMetaData, @@ -27,7 +27,6 @@ import matplotlib.pyplot as plt import numpy as np from PIL import Image -import rerun as rr from dimos.msgs.geometry_msgs import Pose, Vector3, VectorLike from dimos.types.timestamped import Timestamped @@ -43,6 +42,7 @@ def _get_matplotlib_cmap(name: str): # type: ignore[no-untyped-def] from pathlib import Path from numpy.typing import NDArray + from rerun._baseclasses import Archetype class CostValues(IntEnum): @@ -427,256 +427,166 @@ def cell_value(self, world_position: Vector3) -> int: return int(self.grid[y, x]) - def to_rerun( # type: ignore[no-untyped-def] + def _generate_rgba_texture( self, colormap: str | None = None, - mode: str = "image", - z_offset: float = 0.01, - **kwargs: Any, - ): # type: ignore[no-untyped-def] - """Convert to Rerun visualization format. + opacity: float = 1.0, + cost_range: tuple[int, int] | None = None, + background: str | None = None, + ) -> NDArray[np.uint8]: + """Generate RGBA texture for the occupancy grid. Args: - colormap: Optional colormap name (e.g., "RdBu_r" for blue=free, red=occupied). - If None, uses grayscale for image mode or default colors for 3D modes. - mode: Visualization mode: - - "image": 2D grayscale/colored image (default) - - "mesh": 3D textured plane overlay on floor - - "points": 3D points for occupied cells only - z_offset: Height offset for 3D modes (default 0.01m above floor) - **kwargs: Additional args (ignored for compatibility) + colormap: Optional matplotlib colormap name. + opacity: Blend factor (0.0 to 1.0). Blends towards background color. + cost_range: Optional (min, max) cost range. Cells outside range use background. + background: Hex color for background (e.g. "#484981"). Default is black. Returns: - Rerun archetype for logging (rr.Image, rr.Mesh3D, or rr.Points3D) - - The visualization uses: - - Free space (value 0): white/blue - - Unknown space (value -1): gray/transparent - - Occupied space (value > 0): black/red with gradient + RGBA numpy array of shape (height, width, 4). + Note: NOT flipped - caller handles orientation. """ - if self.grid.size == 0: - if mode == "image": - return rr.Image(np.zeros((1, 1), dtype=np.uint8), color_model="L") - elif mode == "mesh": - return rr.Mesh3D(vertex_positions=[]) - else: - return rr.Points3D([]) - - if mode == "points": - return self._to_rerun_points(colormap, z_offset) - elif mode == "mesh": - return self._to_rerun_mesh(colormap, z_offset) + # Parse background hex to RGB + if background is not None: + bg = background.lstrip("#") + bg_rgb = np.array([int(bg[i : i + 2], 16) for i in (0, 2, 4)], dtype=np.float32) else: - return self._to_rerun_image(colormap) - - def _to_rerun_image(self, colormap: str | None = None): # type: ignore[no-untyped-def] - """Convert to 2D image visualization.""" - # Use existing cached visualization functions for supported palettes - if colormap in ("turbo", "rainbow"): - from dimos.mapping.occupancy.visualizations import rainbow_image, turbo_image - - if colormap == "turbo": - bgr_image = turbo_image(self.grid) - else: - bgr_image = rainbow_image(self.grid) + bg_rgb = np.array([0, 0, 0], dtype=np.float32) - # Convert BGR to RGB and flip for world coordinates - rgb_image = np.flipud(bgr_image[:, :, ::-1]) - return rr.Image(rgb_image, color_model="RGB") + # Determine which cells are in range (if cost_range specified) + if cost_range is not None: + in_range_mask = (self.grid >= cost_range[0]) & (self.grid <= cost_range[1]) + else: + in_range_mask = None if colormap is not None: - # Use matplotlib colormap (cached for performance) cmap = _get_matplotlib_cmap(colormap) - grid_float = self.grid.astype(np.float32) - # Create RGBA image vis = np.zeros((self.height, self.width, 4), dtype=np.uint8) - # Free space: low cost (blue in RdBu_r) free_mask = self.grid == 0 - # Occupied: high cost (red in RdBu_r) occupied_mask = self.grid > 0 - # Unknown: transparent gray - unknown_mask = self.grid == -1 - # Map free to 0, costs to normalized value if np.any(free_mask): - colors_free = (cmap(0.0)[:3] * np.array([255, 255, 255])).astype(np.uint8) - vis[free_mask, :3] = colors_free + fg = np.array(cmap(0.0)[:3]) * 255 + blended = fg * opacity + bg_rgb * (1 - opacity) + vis[free_mask, :3] = blended.astype(np.uint8) vis[free_mask, 3] = 255 if np.any(occupied_mask): - # Normalize costs 1-100 to 0.5-1.0 range costs = grid_float[occupied_mask] cost_norm = 0.5 + (costs / 100) * 0.5 - colors_occ = (cmap(cost_norm)[:, :3] * 255).astype(np.uint8) - vis[occupied_mask, :3] = colors_occ + fg = cmap(cost_norm)[:, :3] * 255 + blended = fg * opacity + bg_rgb * (1 - opacity) + vis[occupied_mask, :3] = blended.astype(np.uint8) vis[occupied_mask, 3] = 255 - if np.any(unknown_mask): - vis[unknown_mask] = [128, 128, 128, 100] # Semi-transparent gray - - # Flip vertically to match world coordinates (y=0 at bottom) - return rr.Image(np.flipud(vis), color_model="RGBA") + # Unknown cells: always black + unknown_mask = self.grid == -1 + vis[unknown_mask, :3] = 0 + vis[unknown_mask, 3] = 255 - # Grayscale visualization (no colormap) - vis_gray = np.zeros((self.height, self.width), dtype=np.uint8) + # Apply cost_range filter - set out-of-range cells to background + if in_range_mask is not None: + out_of_range = ~in_range_mask & (self.grid != -1) + vis[out_of_range, :3] = bg_rgb.astype(np.uint8) + vis[out_of_range, 3] = 255 - # Free space = white - vis_gray[self.grid == 0] = 255 + return vis - # Unknown = gray - vis_gray[self.grid == -1] = 128 + # Default: Foxglove-style coloring + vis = np.zeros((self.height, self.width, 4), dtype=np.uint8) - # Occupied (100) = black, costs (1-99) = gradient + free_mask = self.grid == 0 occupied_mask = self.grid > 0 + + # Free space: blue-purple #484981, blended with background + fg_free = np.array([72, 73, 129], dtype=np.float32) + blended_free = fg_free * opacity + bg_rgb * (1 - opacity) + vis[free_mask, :3] = blended_free.astype(np.uint8) + vis[free_mask, 3] = 255 + + # Occupied: gradient from blue-purple to black, blended with background if np.any(occupied_mask): - # Map 1-100 to 127-0 (darker = more occupied) costs = self.grid[occupied_mask].astype(np.float32) - vis_gray[occupied_mask] = (127 * (1 - costs / 100)).astype(np.uint8) - - # Flip vertically to match world coordinates (y=0 at bottom) - return rr.Image(np.flipud(vis_gray), color_model="L") + factor = (1 - costs / 100).clip(0, 1) + fg_occ = np.column_stack([72 * factor, 73 * factor, 129 * factor]) + blended_occ = fg_occ * opacity + bg_rgb * (1 - opacity) + vis[occupied_mask, :3] = blended_occ.astype(np.uint8) + vis[occupied_mask, 3] = 255 + + # Unknown cells: always black + unknown_mask = self.grid == -1 + vis[unknown_mask, :3] = 0 + vis[unknown_mask, 3] = 255 + + # Apply cost_range filter - set out-of-range cells to background + if in_range_mask is not None: + out_of_range = ~in_range_mask & (self.grid != -1) + vis[out_of_range, :3] = bg_rgb.astype(np.uint8) + vis[out_of_range, 3] = 255 + + return vis + + def to_rerun( + self, + colormap: str | None = None, + z_offset: float = 0.01, + opacity: float = 1.0, + cost_range: tuple[int, int] | None = None, + background: str | None = None, + ) -> Archetype: + """Convert to 3D textured mesh overlay on floor plane. + + Uses a single quad with the occupancy grid as a texture. + Much more efficient than per-cell quads (4 vertices vs n_cells*4). + """ + import rerun as rr - def _to_rerun_points(self, colormap: str | None = None, z_offset: float = 0.01): # type: ignore[no-untyped-def] - """Convert to 3D points for occupied cells.""" - # Find occupied cells (cost > 0) - occupied_mask = self.grid > 0 - if not np.any(occupied_mask): - return rr.Points3D([]) + if self.grid.size == 0: + return rr.Mesh3D(vertex_positions=[]) - # Get grid coordinates of occupied cells - gy, gx = np.where(occupied_mask) - costs = self.grid[occupied_mask].astype(np.float32) + # Generate RGBA texture and flip to match world coordinates + # Grid row 0 is at world y=origin (bottom), but texture row 0 is at UV v=0 (top) + rgba = np.flipud(self._generate_rgba_texture(colormap, opacity, cost_range, background)) - # Convert to world coordinates + # Single quad covering entire grid ox = self.origin.position.x oy = self.origin.position.y - wx = ox + (gx + 0.5) * self.resolution - wy = oy + (gy + 0.5) * self.resolution - wz = np.full_like(wx, z_offset) - - points = np.column_stack([wx, wy, wz]) - - # Determine colors - if colormap is not None: - # Normalize costs to 0-1 range - cost_norm = costs / 100.0 - cmap = _get_matplotlib_cmap(colormap) - point_colors = (cmap(cost_norm)[:, :3] * 255).astype(np.uint8) - else: - # Default: red gradient based on cost - intensity = (costs / 100.0 * 255).astype(np.uint8) - point_colors = np.column_stack( - [intensity, np.zeros_like(intensity), np.zeros_like(intensity)] - ) - - return rr.Points3D( - positions=points, - radii=self.resolution / 2, - colors=point_colors, + w = self.width * self.resolution + h = self.height * self.resolution + + vertices = np.array( + [ + [ox, oy, z_offset], # 0: bottom-left (world) + [ox + w, oy, z_offset], # 1: bottom-right + [ox + w, oy + h, z_offset], # 2: top-right + [ox, oy + h, z_offset], # 3: top-left + ], + dtype=np.float32, ) - def _to_rerun_mesh(self, colormap: str | None = None, z_offset: float = 0.01): # type: ignore[no-untyped-def] - """Convert to 3D mesh overlay on floor plane. - - Only renders known cells (free or occupied), skipping unknown cells. - Uses per-vertex colors for proper alpha blending. - Fully vectorized for performance (~100x faster than loop version). - """ - # Only render known cells (not unknown = -1) - known_mask = self.grid != -1 - if not np.any(known_mask): - return rr.Mesh3D(vertex_positions=[]) - - # Get grid coordinates of known cells - gy, gx = np.where(known_mask) - n_cells = len(gy) - - ox = self.origin.position.x - oy = self.origin.position.y - r = self.resolution - - # === VECTORIZED VERTEX GENERATION === - # World positions of cell corners (bottom-left of each cell) - wx = ox + gx.astype(np.float32) * r - wy = oy + gy.astype(np.float32) * r - - # Each cell has 4 vertices: (wx,wy), (wx+r,wy), (wx+r,wy+r), (wx,wy+r) - # Shape: (n_cells, 4, 3) - vertices = np.zeros((n_cells, 4, 3), dtype=np.float32) - vertices[:, 0, 0] = wx - vertices[:, 0, 1] = wy - vertices[:, 0, 2] = z_offset - vertices[:, 1, 0] = wx + r - vertices[:, 1, 1] = wy - vertices[:, 1, 2] = z_offset - vertices[:, 2, 0] = wx + r - vertices[:, 2, 1] = wy + r - vertices[:, 2, 2] = z_offset - vertices[:, 3, 0] = wx - vertices[:, 3, 1] = wy + r - vertices[:, 3, 2] = z_offset - # Flatten to (n_cells*4, 3) - flat_vertices = vertices.reshape(-1, 3) - - # === VECTORIZED INDEX GENERATION === - # Base vertex indices for each cell: [0, 4, 8, 12, ...] - base_v = np.arange(n_cells, dtype=np.uint32) * 4 - # Two triangles per cell: (0,1,2) and (0,2,3) relative to base - indices = np.zeros((n_cells, 2, 3), dtype=np.uint32) - indices[:, 0, 0] = base_v - indices[:, 0, 1] = base_v + 1 - indices[:, 0, 2] = base_v + 2 - indices[:, 1, 0] = base_v - indices[:, 1, 1] = base_v + 2 - indices[:, 1, 2] = base_v + 3 - # Flatten to (n_cells*2, 3) - flat_indices = indices.reshape(-1, 3) - - # === VECTORIZED COLOR GENERATION === - cell_values = self.grid[gy, gx] # Get all cell values at once - - if colormap: - cmap = _get_matplotlib_cmap(colormap) - # Normalize costs: free(0) -> 0.0, cost(1-100) -> 0.5-1.0 - cost_norm = np.where(cell_values == 0, 0.0, 0.5 + (cell_values / 100) * 0.5) - # Sample colormap for all cells at once (returns Nx4 RGBA float) - rgba_float = cmap(cost_norm)[:, :3] # Drop alpha, we set our own - rgb = (rgba_float * 255).astype(np.uint8) - # Alpha: 180 for free, 220 for occupied - alpha = np.where(cell_values == 0, 180, 220).astype(np.uint8) - else: - # Foxglove-style coloring: blue-purple for free, black for occupied - # Free (0): #484981 = RGB(72, 73, 129) - # Occupied (100): #000000 = RGB(0, 0, 0) - rgb = np.zeros((n_cells, 3), dtype=np.uint8) - is_free = cell_values == 0 - is_occupied = ~is_free - - # Free space: blue-purple #484981 - rgb[is_free] = [72, 73, 129] - - # Occupied: gradient from blue-purple to black based on cost - # cost 1 -> mostly blue-purple, cost 100 -> black - if np.any(is_occupied): - costs = cell_values[is_occupied].astype(np.float32) - # Linear interpolation: (1 - cost/100) * blue-purple - factor = (1 - costs / 100).clip(0, 1) - rgb[is_occupied, 0] = (72 * factor).astype(np.uint8) - rgb[is_occupied, 1] = (73 * factor).astype(np.uint8) - rgb[is_occupied, 2] = (129 * factor).astype(np.uint8) - - alpha = np.where(is_free, 180, 220).astype(np.uint8) - - # Combine RGB and alpha into RGBA - colors_per_cell = np.column_stack([rgb, alpha]) # (n_cells, 4) - # Repeat each color 4 times (one per vertex) - colors = np.repeat(colors_per_cell, 4, axis=0) # (n_cells*4, 4) + indices = np.array([[0, 1, 2], [0, 2, 3]], dtype=np.uint32) + + # UV coords: Rerun uses top-left origin for textures + # Grid row 0 is at world y=oy (bottom), row H-1 at y=oy+h (top) + # Texture row 0 = grid row 0, so: + # world bottom (v0,v1) -> texture v=1 (bottom of texture) + # world top (v2,v3) -> texture v=0 (top of texture) + texcoords = np.array( + [ + [0.0, 1.0], # v0: bottom-left world -> bottom-left tex + [1.0, 1.0], # v1: bottom-right world -> bottom-right tex + [1.0, 0.0], # v2: top-right world -> top-right tex + [0.0, 0.0], # v3: top-left world -> top-left tex + ], + dtype=np.float32, + ) return rr.Mesh3D( - vertex_positions=flat_vertices, - triangle_indices=flat_indices, - vertex_colors=colors, + vertex_positions=vertices, + triangle_indices=indices, + vertex_texcoords=texcoords, + albedo_texture=rgba, ) diff --git a/dimos/msgs/nav_msgs/Odometry.py b/dimos/msgs/nav_msgs/Odometry.py index 3cdd631aa7..a958f8dba0 100644 --- a/dimos/msgs/nav_msgs/Odometry.py +++ b/dimos/msgs/nav_msgs/Odometry.py @@ -15,16 +15,13 @@ from __future__ import annotations import time -from typing import TYPE_CHECKING, TypeAlias +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from rerun._baseclasses import Archetype from dimos_lcm.nav_msgs import Odometry as LCMOdometry import numpy as np -from plum import dispatch - -try: - from nav_msgs.msg import Odometry as ROSOdometry # type: ignore[attr-defined] -except ImportError: - ROSOdometry = None # type: ignore[assignment, misc] from dimos.msgs.geometry_msgs.Pose import Pose from dimos.msgs.geometry_msgs.PoseWithCovariance import PoseWithCovariance @@ -33,28 +30,15 @@ from dimos.types.timestamped import Timestamped if TYPE_CHECKING: + from dimos.msgs.geometry_msgs.Quaternion import Quaternion from dimos.msgs.geometry_msgs.Vector3 import Vector3 -# Types that can be converted to/from Odometry -OdometryConvertable: TypeAlias = ( - LCMOdometry | dict[str, float | str | PoseWithCovariance | TwistWithCovariance | Pose | Twist] -) +class Odometry(Timestamped): + """Odometry message with pose, twist, and frame information.""" -def sec_nsec(ts): # type: ignore[no-untyped-def] - s = int(ts) - return [s, int((ts - s) * 1_000_000_000)] - - -class Odometry(LCMOdometry, Timestamped): # type: ignore[misc] - pose: PoseWithCovariance - twist: TwistWithCovariance msg_name = "nav_msgs.Odometry" - ts: float - frame_id: str - child_frame_id: str - @dispatch def __init__( self, ts: float = 0.0, @@ -63,234 +47,113 @@ def __init__( pose: PoseWithCovariance | Pose | None = None, twist: TwistWithCovariance | Twist | None = None, ) -> None: - """Initialize with timestamp, frame IDs, pose and twist. - - Args: - ts: Timestamp in seconds (defaults to current time if 0) - frame_id: Reference frame ID (e.g., "odom", "map") - child_frame_id: Child frame ID (e.g., "base_link", "base_footprint") - pose: Pose with covariance (or just Pose, covariance will be zero) - twist: Twist with covariance (or just Twist, covariance will be zero) - """ self.ts = ts if ts != 0 else time.time() self.frame_id = frame_id self.child_frame_id = child_frame_id - # Handle pose if pose is None: self.pose = PoseWithCovariance() - elif isinstance(pose, PoseWithCovariance): - self.pose = pose elif isinstance(pose, Pose): self.pose = PoseWithCovariance(pose) else: - self.pose = PoseWithCovariance(Pose(pose)) - - # Handle twist - if twist is None: - self.twist = TwistWithCovariance() - elif isinstance(twist, TwistWithCovariance): - self.twist = twist - elif isinstance(twist, Twist): - self.twist = TwistWithCovariance(twist) - else: - self.twist = TwistWithCovariance(Twist(twist)) - - @dispatch # type: ignore[no-redef] - def __init__(self, odometry: Odometry) -> None: - """Initialize from another Odometry (copy constructor).""" - self.ts = odometry.ts - self.frame_id = odometry.frame_id - self.child_frame_id = odometry.child_frame_id - self.pose = PoseWithCovariance(odometry.pose) - self.twist = TwistWithCovariance(odometry.twist) - - @dispatch # type: ignore[no-redef] - def __init__(self, lcm_odometry: LCMOdometry) -> None: - """Initialize from an LCM Odometry.""" - self.ts = lcm_odometry.header.stamp.sec + (lcm_odometry.header.stamp.nsec / 1_000_000_000) - self.frame_id = lcm_odometry.header.frame_id - self.child_frame_id = lcm_odometry.child_frame_id - self.pose = PoseWithCovariance(lcm_odometry.pose) - self.twist = TwistWithCovariance(lcm_odometry.twist) - - @dispatch # type: ignore[no-redef] - def __init__( - self, - odometry_dict: dict[ - str, float | str | PoseWithCovariance | TwistWithCovariance | Pose | Twist - ], - ) -> None: - """Initialize from a dictionary.""" - self.ts = odometry_dict.get("ts", odometry_dict.get("timestamp", time.time())) - self.frame_id = odometry_dict.get("frame_id", "") - self.child_frame_id = odometry_dict.get("child_frame_id", "") - - # Handle pose - pose = odometry_dict.get("pose") - if pose is None: - self.pose = PoseWithCovariance() - elif isinstance(pose, PoseWithCovariance): self.pose = pose - elif isinstance(pose, Pose): - self.pose = PoseWithCovariance(pose) - else: - self.pose = PoseWithCovariance(Pose(pose)) - # Handle twist - twist = odometry_dict.get("twist") if twist is None: self.twist = TwistWithCovariance() - elif isinstance(twist, TwistWithCovariance): - self.twist = twist elif isinstance(twist, Twist): self.twist = TwistWithCovariance(twist) else: - self.twist = TwistWithCovariance(Twist(twist)) + self.twist = twist + + # -- Convenience properties -- @property def position(self) -> Vector3: - """Get position from pose.""" return self.pose.position @property - def orientation(self): # type: ignore[no-untyped-def] - """Get orientation from pose.""" + def orientation(self) -> Quaternion: return self.pose.orientation @property def linear_velocity(self) -> Vector3: - """Get linear velocity from twist.""" return self.twist.linear @property def angular_velocity(self) -> Vector3: - """Get angular velocity from twist.""" return self.twist.angular @property def x(self) -> float: - """X position.""" return self.pose.x @property def y(self) -> float: - """Y position.""" return self.pose.y @property def z(self) -> float: - """Z position.""" return self.pose.z @property def vx(self) -> float: - """Linear velocity in X.""" return self.twist.linear.x @property def vy(self) -> float: - """Linear velocity in Y.""" return self.twist.linear.y @property def vz(self) -> float: - """Linear velocity in Z.""" return self.twist.linear.z @property def wx(self) -> float: - """Angular velocity around X (roll rate).""" return self.twist.angular.x @property def wy(self) -> float: - """Angular velocity around Y (pitch rate).""" return self.twist.angular.y @property def wz(self) -> float: - """Angular velocity around Z (yaw rate).""" return self.twist.angular.z @property def roll(self) -> float: - """Roll angle in radians.""" return self.pose.roll @property def pitch(self) -> float: - """Pitch angle in radians.""" return self.pose.pitch @property def yaw(self) -> float: - """Yaw angle in radians.""" return self.pose.yaw - def __repr__(self) -> str: - return ( - f"Odometry(ts={self.ts:.6f}, frame_id='{self.frame_id}', " - f"child_frame_id='{self.child_frame_id}', pose={self.pose!r}, twist={self.twist!r})" - ) - - def __str__(self) -> str: - return ( - f"Odometry:\n" - f" Timestamp: {self.ts:.6f}\n" - f" Frame: {self.frame_id} -> {self.child_frame_id}\n" - f" Position: [{self.x:.3f}, {self.y:.3f}, {self.z:.3f}]\n" - f" Orientation: [roll={self.roll:.3f}, pitch={self.pitch:.3f}, yaw={self.yaw:.3f}]\n" - f" Linear Velocity: [{self.vx:.3f}, {self.vy:.3f}, {self.vz:.3f}]\n" - f" Angular Velocity: [{self.wx:.3f}, {self.wy:.3f}, {self.wz:.3f}]" - ) - - def __eq__(self, other) -> bool: # type: ignore[no-untyped-def] - """Check if two Odometry messages are equal.""" - if not isinstance(other, Odometry): - return False - return ( - abs(self.ts - other.ts) < 1e-6 - and self.frame_id == other.frame_id - and self.child_frame_id == other.child_frame_id - and self.pose == other.pose - and self.twist == other.twist - ) + # -- Serialization -- def lcm_encode(self) -> bytes: - """Encode to LCM binary format.""" lcm_msg = LCMOdometry() - # Set header - [lcm_msg.header.stamp.sec, lcm_msg.header.stamp.nsec] = sec_nsec(self.ts) # type: ignore[no-untyped-call] + lcm_msg.header.stamp.sec, lcm_msg.header.stamp.nsec = self.ros_timestamp() lcm_msg.header.frame_id = self.frame_id lcm_msg.child_frame_id = self.child_frame_id - # Set pose with covariance lcm_msg.pose.pose = self.pose.pose - if isinstance(self.pose.covariance, np.ndarray): # type: ignore[has-type] - lcm_msg.pose.covariance = self.pose.covariance.tolist() # type: ignore[has-type] - else: - lcm_msg.pose.covariance = list(self.pose.covariance) # type: ignore[has-type] + lcm_msg.pose.covariance = list(np.asarray(self.pose.covariance)) - # Set twist with covariance lcm_msg.twist.twist = self.twist.twist - if isinstance(self.twist.covariance, np.ndarray): # type: ignore[has-type] - lcm_msg.twist.covariance = self.twist.covariance.tolist() # type: ignore[has-type] - else: - lcm_msg.twist.covariance = list(self.twist.covariance) # type: ignore[has-type] + lcm_msg.twist.covariance = list(np.asarray(self.twist.covariance)) return lcm_msg.lcm_encode() # type: ignore[no-any-return] @classmethod def lcm_decode(cls, data: bytes) -> Odometry: - """Decode from LCM binary format.""" lcm_msg = LCMOdometry.lcm_decode(data) - # Extract timestamp ts = lcm_msg.header.stamp.sec + (lcm_msg.header.stamp.nsec / 1_000_000_000) - # Create pose with covariance pose = Pose( position=[ lcm_msg.pose.pose.position.x, @@ -304,9 +167,6 @@ def lcm_decode(cls, data: bytes) -> Odometry: lcm_msg.pose.pose.orientation.w, ], ) - pose_with_cov = PoseWithCovariance(pose, lcm_msg.pose.covariance) - - # Create twist with covariance twist = Twist( linear=[ lcm_msg.twist.twist.linear.x, @@ -319,63 +179,57 @@ def lcm_decode(cls, data: bytes) -> Odometry: lcm_msg.twist.twist.angular.z, ], ) - twist_with_cov = TwistWithCovariance(twist, lcm_msg.twist.covariance) return cls( ts=ts, frame_id=lcm_msg.header.frame_id, child_frame_id=lcm_msg.child_frame_id, - pose=pose_with_cov, - twist=twist_with_cov, + pose=PoseWithCovariance(pose, lcm_msg.pose.covariance), + twist=TwistWithCovariance(twist, lcm_msg.twist.covariance), ) - @classmethod - def from_ros_msg(cls, ros_msg: ROSOdometry) -> Odometry: - """Create an Odometry from a ROS nav_msgs/Odometry message. - - Args: - ros_msg: ROS Odometry message - - Returns: - Odometry instance - """ + # -- Comparison / display -- - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - # Convert pose and twist with covariance - pose_with_cov = PoseWithCovariance.from_ros_msg(ros_msg.pose) - twist_with_cov = TwistWithCovariance.from_ros_msg(ros_msg.twist) - - return cls( - ts=ts, - frame_id=ros_msg.header.frame_id, - child_frame_id=ros_msg.child_frame_id, - pose=pose_with_cov, - twist=twist_with_cov, + def __eq__(self, other: object) -> bool: + if not isinstance(other, Odometry): + return False + return ( + abs(self.ts - other.ts) < 1e-6 + and self.frame_id == other.frame_id + and self.child_frame_id == other.child_frame_id + and self.pose == other.pose + and self.twist == other.twist ) - def to_ros_msg(self) -> ROSOdometry: - """Convert to a ROS nav_msgs/Odometry message. - - Returns: - ROS Odometry message - """ - - ros_msg = ROSOdometry() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Set child frame ID - ros_msg.child_frame_id = self.child_frame_id - - # Set pose with covariance - ros_msg.pose = self.pose.to_ros_msg() + def __repr__(self) -> str: + return ( + f"Odometry(ts={self.ts:.6f}, frame_id='{self.frame_id}', " + f"child_frame_id='{self.child_frame_id}', pose={self.pose!r}, twist={self.twist!r})" + ) - # Set twist with covariance - ros_msg.twist = self.twist.to_ros_msg() + def __str__(self) -> str: + return ( + f"Odometry:\n" + f" Timestamp: {self.ts:.6f}\n" + f" Frame: {self.frame_id} -> {self.child_frame_id}\n" + f" Position: [{self.x:.3f}, {self.y:.3f}, {self.z:.3f}]\n" + f" Orientation: [roll={self.roll:.3f}, pitch={self.pitch:.3f}, yaw={self.yaw:.3f}]\n" + f" Linear Velocity: [{self.vx:.3f}, {self.vy:.3f}, {self.vz:.3f}]\n" + f" Angular Velocity: [{self.wx:.3f}, {self.wy:.3f}, {self.wz:.3f}]" + ) - return ros_msg + def to_rerun(self) -> Archetype: + """Convert to rerun Transform3D for visualizing the pose.""" + import rerun as rr + + return rr.Transform3D( + translation=[self.x, self.y, self.z], + rotation=rr.Quaternion( + xyzw=[ + self.orientation.x, + self.orientation.y, + self.orientation.z, + self.orientation.w, + ] + ), + ) diff --git a/dimos/msgs/nav_msgs/Path.py b/dimos/msgs/nav_msgs/Path.py index e92eab17a4..1582c4b775 100644 --- a/dimos/msgs/nav_msgs/Path.py +++ b/dimos/msgs/nav_msgs/Path.py @@ -26,18 +26,14 @@ from dimos_lcm.nav_msgs import Path as LCMPath from dimos_lcm.std_msgs import Header as LCMHeader, Time as LCMTime -try: - from nav_msgs.msg import Path as ROSPath # type: ignore[attr-defined] -except ImportError: - ROSPath = None # type: ignore[assignment, misc] -import rerun as rr - from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped from dimos.types.timestamped import Timestamped if TYPE_CHECKING: from collections.abc import Iterator + from rerun._baseclasses import Archetype + def sec_nsec(ts): # type: ignore[no-untyped-def] s = int(ts) @@ -192,53 +188,12 @@ def clear(self) -> None: """Clear all poses from this path (mutable).""" self.poses.clear() - @classmethod - def from_ros_msg(cls, ros_msg: ROSPath) -> Path: - """Create a Path from a ROS nav_msgs/Path message. - - Args: - ros_msg: ROS Path message - - Returns: - Path instance - """ - - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - # Convert poses - poses = [] - for ros_pose_stamped in ros_msg.poses: - poses.append(PoseStamped.from_ros_msg(ros_pose_stamped)) - - return cls(ts=ts, frame_id=ros_msg.header.frame_id, poses=poses) - - def to_ros_msg(self) -> ROSPath: - """Convert to a ROS nav_msgs/Path message. - - Returns: - ROS Path message - """ - - ros_msg = ROSPath() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Convert poses - for pose in self.poses: - ros_msg.poses.append(pose.to_ros_msg()) - - return ros_msg - - def to_rerun( # type: ignore[no-untyped-def] + def to_rerun( self, color: tuple[int, int, int] = (0, 255, 128), - z_offset: float = 0.2, + z_offset: float = 0.5, radii: float = 0.05, - ): + ) -> Archetype: """Convert to rerun LineStrips3D format. Args: @@ -249,6 +204,8 @@ def to_rerun( # type: ignore[no-untyped-def] Returns: rr.LineStrips3D archetype for logging to rerun """ + import rerun as rr + if not self.poses: return rr.LineStrips3D([]) diff --git a/dimos/msgs/nav_msgs/test_OccupancyGrid.py b/dimos/msgs/nav_msgs/test_OccupancyGrid.py index 262a872c68..29ef196de8 100644 --- a/dimos/msgs/nav_msgs/test_OccupancyGrid.py +++ b/dimos/msgs/nav_msgs/test_OccupancyGrid.py @@ -26,7 +26,7 @@ from dimos.msgs.geometry_msgs import Pose from dimos.msgs.nav_msgs import OccupancyGrid from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.protocol.pubsub.lcmpubsub import LCM, Topic +from dimos.protocol.pubsub.impl.lcmpubsub import LCM, Topic from dimos.utils.data import get_data diff --git a/dimos/msgs/nav_msgs/test_Odometry.py b/dimos/msgs/nav_msgs/test_Odometry.py index ecdc83c6b4..c532aed1ca 100644 --- a/dimos/msgs/nav_msgs/test_Odometry.py +++ b/dimos/msgs/nav_msgs/test_Odometry.py @@ -15,33 +15,6 @@ import time import numpy as np -import pytest - -try: - from builtin_interfaces.msg import Time as ROSTime - from geometry_msgs.msg import ( - Point as ROSPoint, - Pose as ROSPose, - PoseWithCovariance as ROSPoseWithCovariance, - Quaternion as ROSQuaternion, - Twist as ROSTwist, - TwistWithCovariance as ROSTwistWithCovariance, - Vector3 as ROSVector3, - ) - from nav_msgs.msg import Odometry as ROSOdometry - from std_msgs.msg import Header as ROSHeader -except ImportError: - ROSTwist = None - ROSHeader = None - ROSPose = None - ROSPoseWithCovariance = None - ROSQuaternion = None - ROSOdometry = None - ROSPoint = None - ROSTime = None - ROSTwistWithCovariance = None - ROSVector3 = None - from dimos.msgs.geometry_msgs.Pose import Pose from dimos.msgs.geometry_msgs.PoseWithCovariance import PoseWithCovariance @@ -52,68 +25,34 @@ def test_odometry_default_init() -> None: - """Test default initialization.""" - if ROSVector3 is None: - pytest.skip("ROS not available") - if ROSTwistWithCovariance is None: - pytest.skip("ROS not available") - if ROSTime is None: - pytest.skip("ROS not available") - if ROSPoint is None: - pytest.skip("ROS not available") - if ROSOdometry is None: - pytest.skip("ROS not available") - if ROSQuaternion is None: - pytest.skip("ROS not available") - if ROSPoseWithCovariance is None: - pytest.skip("ROS not available") - if ROSPose is None: - pytest.skip("ROS not available") - if ROSHeader is None: - pytest.skip("ROS not available") - if ROSTwist is None: - pytest.skip("ROS not available") odom = Odometry() - # Should have current timestamp assert odom.ts > 0 assert odom.frame_id == "" assert odom.child_frame_id == "" - # Pose should be at origin with identity orientation assert odom.pose.position.x == 0.0 assert odom.pose.position.y == 0.0 assert odom.pose.position.z == 0.0 assert odom.pose.orientation.w == 1.0 - # Twist should be zero assert odom.twist.linear.x == 0.0 - assert odom.twist.linear.y == 0.0 - assert odom.twist.linear.z == 0.0 assert odom.twist.angular.x == 0.0 - assert odom.twist.angular.y == 0.0 - assert odom.twist.angular.z == 0.0 - # Covariances should be zero assert np.all(odom.pose.covariance == 0.0) assert np.all(odom.twist.covariance == 0.0) def test_odometry_with_frames() -> None: - """Test initialization with frame IDs.""" ts = 1234567890.123456 - frame_id = "odom" - child_frame_id = "base_link" - - odom = Odometry(ts=ts, frame_id=frame_id, child_frame_id=child_frame_id) + odom = Odometry(ts=ts, frame_id="odom", child_frame_id="base_link") assert odom.ts == ts - assert odom.frame_id == frame_id - assert odom.child_frame_id == child_frame_id + assert odom.frame_id == "odom" + assert odom.child_frame_id == "base_link" def test_odometry_with_pose_and_twist() -> None: - """Test initialization with pose and twist.""" pose = Pose(1.0, 2.0, 3.0, 0.1, 0.2, 0.3, 0.9) twist = Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1)) @@ -127,7 +66,6 @@ def test_odometry_with_pose_and_twist() -> None: def test_odometry_with_covariances() -> None: - """Test initialization with pose and twist with covariances.""" pose = Pose(1.0, 2.0, 3.0) pose_cov = np.arange(36, dtype=float) pose_with_cov = PoseWithCovariance(pose, pose_cov) @@ -150,88 +88,35 @@ def test_odometry_with_covariances() -> None: assert np.array_equal(odom.twist.covariance, twist_cov) -def test_odometry_copy_constructor() -> None: - """Test copy constructor.""" - original = Odometry( - ts=1000.0, - frame_id="odom", - child_frame_id="base_link", - pose=Pose(1.0, 2.0, 3.0), - twist=Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1)), - ) - - copy = Odometry(original) - - assert copy == original - assert copy is not original - assert copy.pose is not original.pose - assert copy.twist is not original.twist - - -def test_odometry_dict_init() -> None: - """Test initialization from dictionary.""" - odom_dict = { - "ts": 1000.0, - "frame_id": "odom", - "child_frame_id": "base_link", - "pose": Pose(1.0, 2.0, 3.0), - "twist": Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1)), - } - - odom = Odometry(odom_dict) - - assert odom.ts == 1000.0 - assert odom.frame_id == "odom" - assert odom.child_frame_id == "base_link" - assert odom.pose.position.x == 1.0 - assert odom.twist.linear.x == 0.5 - - def test_odometry_properties() -> None: - """Test convenience properties.""" pose = Pose(1.0, 2.0, 3.0, 0.1, 0.2, 0.3, 0.9) twist = Twist(Vector3(0.5, 0.6, 0.7), Vector3(0.1, 0.2, 0.3)) odom = Odometry(ts=1000.0, frame_id="odom", child_frame_id="base_link", pose=pose, twist=twist) - # Position properties assert odom.x == 1.0 assert odom.y == 2.0 assert odom.z == 3.0 assert odom.position.x == 1.0 - assert odom.position.y == 2.0 - assert odom.position.z == 3.0 - # Orientation properties assert odom.orientation.x == 0.1 - assert odom.orientation.y == 0.2 - assert odom.orientation.z == 0.3 - assert odom.orientation.w == 0.9 - # Velocity properties assert odom.vx == 0.5 assert odom.vy == 0.6 assert odom.vz == 0.7 assert odom.linear_velocity.x == 0.5 - assert odom.linear_velocity.y == 0.6 - assert odom.linear_velocity.z == 0.7 - # Angular velocity properties assert odom.wx == 0.1 assert odom.wy == 0.2 assert odom.wz == 0.3 assert odom.angular_velocity.x == 0.1 - assert odom.angular_velocity.y == 0.2 - assert odom.angular_velocity.z == 0.3 - # Euler angles assert odom.roll == pose.roll assert odom.pitch == pose.pitch assert odom.yaw == pose.yaw def test_odometry_str_repr() -> None: - """Test string representations.""" odom = Odometry( ts=1234567890.123456, frame_id="odom", @@ -240,22 +125,16 @@ def test_odometry_str_repr() -> None: twist=Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1)), ) - repr_str = repr(odom) - assert "Odometry" in repr_str - assert "1234567890.123456" in repr_str - assert "odom" in repr_str - assert "base_link" in repr_str + assert "Odometry" in repr(odom) + assert "1234567890.123456" in repr(odom) - str_repr = str(odom) - assert "Odometry" in str_repr - assert "odom -> base_link" in str_repr - assert "1.234" in str_repr - assert "0.500" in str_repr + s = str(odom) + assert "odom -> base_link" in s + assert "1.234" in s def test_odometry_equality() -> None: - """Test equality comparison.""" - odom1 = Odometry( + kwargs = dict( ts=1000.0, frame_id="odom", child_frame_id="base_link", @@ -263,29 +142,12 @@ def test_odometry_equality() -> None: twist=Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1)), ) - odom2 = Odometry( - ts=1000.0, - frame_id="odom", - child_frame_id="base_link", - pose=Pose(1.0, 2.0, 3.0), - twist=Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1)), - ) - - odom3 = Odometry( - ts=1000.0, - frame_id="odom", - child_frame_id="base_link", - pose=Pose(1.1, 2.0, 3.0), # Different position - twist=Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1)), - ) - - assert odom1 == odom2 - assert odom1 != odom3 - assert odom1 != "not an odometry" + assert Odometry(**kwargs) == Odometry(**kwargs) + assert Odometry(**kwargs) != Odometry(**{**kwargs, "pose": Pose(1.1, 2.0, 3.0)}) + assert Odometry(**kwargs) != "not an odometry" -def test_odometry_lcm_encode_decode() -> None: - """Test LCM encoding and decoding.""" +def test_odometry_lcm_roundtrip() -> None: pose = Pose(1.0, 2.0, 3.0, 0.1, 0.2, 0.3, 0.9) pose_cov = np.arange(36, dtype=float) twist = Twist(Vector3(0.5, 0.6, 0.7), Vector3(0.1, 0.2, 0.3)) @@ -299,11 +161,8 @@ def test_odometry_lcm_encode_decode() -> None: twist=TwistWithCovariance(twist, twist_cov), ) - # Encode and decode - binary_msg = source.lcm_encode() - decoded = Odometry.lcm_decode(binary_msg) + decoded = Odometry.lcm_decode(source.lcm_encode()) - # Check values (allowing for timestamp precision loss) assert abs(decoded.ts - source.ts) < 1e-6 assert decoded.frame_id == source.frame_id assert decoded.child_frame_id == source.child_frame_id @@ -311,194 +170,39 @@ def test_odometry_lcm_encode_decode() -> None: assert decoded.twist == source.twist -@pytest.mark.ros -def test_odometry_from_ros_msg() -> None: - """Test creating from ROS message.""" - ros_msg = ROSOdometry() - - # Set header - ros_msg.header = ROSHeader() - ros_msg.header.stamp = ROSTime() - ros_msg.header.stamp.sec = 1234567890 - ros_msg.header.stamp.nanosec = 123456000 - ros_msg.header.frame_id = "odom" - ros_msg.child_frame_id = "base_link" - - # Set pose with covariance - ros_msg.pose = ROSPoseWithCovariance() - ros_msg.pose.pose = ROSPose() - ros_msg.pose.pose.position = ROSPoint(x=1.0, y=2.0, z=3.0) - ros_msg.pose.pose.orientation = ROSQuaternion(x=0.1, y=0.2, z=0.3, w=0.9) - ros_msg.pose.covariance = [float(i) for i in range(36)] - - # Set twist with covariance - ros_msg.twist = ROSTwistWithCovariance() - ros_msg.twist.twist = ROSTwist() - ros_msg.twist.twist.linear = ROSVector3(x=0.5, y=0.6, z=0.7) - ros_msg.twist.twist.angular = ROSVector3(x=0.1, y=0.2, z=0.3) - ros_msg.twist.covariance = [float(i) for i in range(36, 72)] - - odom = Odometry.from_ros_msg(ros_msg) - - assert odom.ts == 1234567890.123456 - assert odom.frame_id == "odom" - assert odom.child_frame_id == "base_link" - assert odom.pose.position.x == 1.0 - assert odom.twist.linear.x == 0.5 - assert np.array_equal(odom.pose.covariance, np.arange(36)) - assert np.array_equal(odom.twist.covariance, np.arange(36, 72)) - - -@pytest.mark.ros -def test_odometry_to_ros_msg() -> None: - """Test converting to ROS message.""" - pose = Pose(1.0, 2.0, 3.0, 0.1, 0.2, 0.3, 0.9) - pose_cov = np.arange(36, dtype=float) - twist = Twist(Vector3(0.5, 0.6, 0.7), Vector3(0.1, 0.2, 0.3)) - twist_cov = np.arange(36, 72, dtype=float) - - odom = Odometry( - ts=1234567890.567890, - frame_id="odom", - child_frame_id="base_link", - pose=PoseWithCovariance(pose, pose_cov), - twist=TwistWithCovariance(twist, twist_cov), - ) - - ros_msg = odom.to_ros_msg() - - assert isinstance(ros_msg, ROSOdometry) - assert ros_msg.header.frame_id == "odom" - assert ros_msg.header.stamp.sec == 1234567890 - assert abs(ros_msg.header.stamp.nanosec - 567890000) < 100 # Allow small rounding error - assert ros_msg.child_frame_id == "base_link" - - # Check pose - assert ros_msg.pose.pose.position.x == 1.0 - assert ros_msg.pose.pose.position.y == 2.0 - assert ros_msg.pose.pose.position.z == 3.0 - assert ros_msg.pose.pose.orientation.x == 0.1 - assert ros_msg.pose.pose.orientation.y == 0.2 - assert ros_msg.pose.pose.orientation.z == 0.3 - assert ros_msg.pose.pose.orientation.w == 0.9 - assert list(ros_msg.pose.covariance) == list(range(36)) - - # Check twist - assert ros_msg.twist.twist.linear.x == 0.5 - assert ros_msg.twist.twist.linear.y == 0.6 - assert ros_msg.twist.twist.linear.z == 0.7 - assert ros_msg.twist.twist.angular.x == 0.1 - assert ros_msg.twist.twist.angular.y == 0.2 - assert ros_msg.twist.twist.angular.z == 0.3 - assert list(ros_msg.twist.covariance) == list(range(36, 72)) - - -@pytest.mark.ros -def test_odometry_ros_roundtrip() -> None: - """Test round-trip conversion with ROS messages.""" - pose = Pose(1.5, 2.5, 3.5, 0.15, 0.25, 0.35, 0.85) - pose_cov = np.random.rand(36) - twist = Twist(Vector3(0.55, 0.65, 0.75), Vector3(0.15, 0.25, 0.35)) - twist_cov = np.random.rand(36) - - original = Odometry( - ts=2147483647.987654, # Max int32 value for ROS Time.sec - frame_id="world", - child_frame_id="robot", - pose=PoseWithCovariance(pose, pose_cov), - twist=TwistWithCovariance(twist, twist_cov), - ) - - ros_msg = original.to_ros_msg() - restored = Odometry.from_ros_msg(ros_msg) - - # Check values (allowing for timestamp precision loss) - assert abs(restored.ts - original.ts) < 1e-6 - assert restored.frame_id == original.frame_id - assert restored.child_frame_id == original.child_frame_id - assert restored.pose == original.pose - assert restored.twist == original.twist - - def test_odometry_zero_timestamp() -> None: - """Test that zero timestamp gets replaced with current time.""" odom = Odometry(ts=0.0) - - # Should have been replaced with current time assert odom.ts > 0 assert odom.ts <= time.time() def test_odometry_with_just_pose() -> None: - """Test initialization with just a Pose (no covariance).""" - pose = Pose(1.0, 2.0, 3.0) - - odom = Odometry(pose=pose) + odom = Odometry(pose=Pose(1.0, 2.0, 3.0)) assert odom.pose.position.x == 1.0 - assert odom.pose.position.y == 2.0 - assert odom.pose.position.z == 3.0 - assert np.all(odom.pose.covariance == 0.0) # Should have zero covariance - assert np.all(odom.twist.covariance == 0.0) # Twist should also be zero + assert np.all(odom.pose.covariance == 0.0) + assert np.all(odom.twist.covariance == 0.0) def test_odometry_with_just_twist() -> None: - """Test initialization with just a Twist (no covariance).""" - twist = Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1)) - - odom = Odometry(twist=twist) + odom = Odometry(twist=Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.1))) assert odom.twist.linear.x == 0.5 assert odom.twist.angular.z == 0.1 - assert np.all(odom.twist.covariance == 0.0) # Should have zero covariance - assert np.all(odom.pose.covariance == 0.0) # Pose should also be zero - - -@pytest.mark.ros -@pytest.mark.parametrize( - "frame_id,child_frame_id", - [ - ("odom", "base_link"), - ("map", "odom"), - ("world", "robot"), - ("base_link", "camera_link"), - ("", ""), # Empty frames - ], -) -def test_odometry_frame_combinations(frame_id, child_frame_id) -> None: - """Test various frame ID combinations.""" - odom = Odometry(frame_id=frame_id, child_frame_id=child_frame_id) - - assert odom.frame_id == frame_id - assert odom.child_frame_id == child_frame_id - - # Test roundtrip through ROS - ros_msg = odom.to_ros_msg() - assert ros_msg.header.frame_id == frame_id - assert ros_msg.child_frame_id == child_frame_id - - restored = Odometry.from_ros_msg(ros_msg) - assert restored.frame_id == frame_id - assert restored.child_frame_id == child_frame_id + assert np.all(odom.twist.covariance == 0.0) def test_odometry_typical_robot_scenario() -> None: - """Test a typical robot odometry scenario.""" - # Robot moving forward at 0.5 m/s with slight rotation odom = Odometry( ts=1000.0, frame_id="odom", child_frame_id="base_footprint", - pose=Pose(10.0, 5.0, 0.0, 0.0, 0.0, np.sin(0.1), np.cos(0.1)), # 0.2 rad yaw - twist=Twist( - Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.05) - ), # Moving forward, turning slightly + pose=Pose(10.0, 5.0, 0.0, 0.0, 0.0, np.sin(0.1), np.cos(0.1)), + twist=Twist(Vector3(0.5, 0.0, 0.0), Vector3(0.0, 0.0, 0.05)), ) - # Check we can access all the typical properties assert odom.x == 10.0 assert odom.y == 5.0 - assert odom.z == 0.0 - assert abs(odom.yaw - 0.2) < 0.01 # Approximately 0.2 radians - assert odom.vx == 0.5 # Forward velocity - assert odom.wz == 0.05 # Yaw rate + assert abs(odom.yaw - 0.2) < 0.01 + assert odom.vx == 0.5 + assert odom.wz == 0.05 diff --git a/dimos/msgs/nav_msgs/test_Path.py b/dimos/msgs/nav_msgs/test_Path.py index d933123b2b..9bd0cc92b6 100644 --- a/dimos/msgs/nav_msgs/test_Path.py +++ b/dimos/msgs/nav_msgs/test_Path.py @@ -13,15 +13,6 @@ # limitations under the License. -import pytest - -try: - from geometry_msgs.msg import PoseStamped as ROSPoseStamped - from nav_msgs.msg import Path as ROSPath -except ImportError: - ROSPoseStamped = None - ROSPath = None - from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped from dimos.msgs.geometry_msgs.Quaternion import Quaternion from dimos.msgs.nav_msgs.Path import Path @@ -294,98 +285,3 @@ def test_str_representation() -> None: path.push_mut(create_test_pose(1, 1, 0)) path.push_mut(create_test_pose(2, 2, 0)) assert str(path) == "Path(frame_id='map', poses=2)" - - -@pytest.mark.ros -def test_path_from_ros_msg() -> None: - """Test creating a Path from a ROS Path message.""" - ros_msg = ROSPath() - ros_msg.header.frame_id = "map" - ros_msg.header.stamp.sec = 123 - ros_msg.header.stamp.nanosec = 456000000 - - # Add some poses - for i in range(3): - ros_pose = ROSPoseStamped() - ros_pose.header.frame_id = "map" - ros_pose.header.stamp.sec = 123 + i - ros_pose.header.stamp.nanosec = 0 - ros_pose.pose.position.x = float(i) - ros_pose.pose.position.y = float(i * 2) - ros_pose.pose.position.z = float(i * 3) - ros_pose.pose.orientation.x = 0.0 - ros_pose.pose.orientation.y = 0.0 - ros_pose.pose.orientation.z = 0.0 - ros_pose.pose.orientation.w = 1.0 - ros_msg.poses.append(ros_pose) - - path = Path.from_ros_msg(ros_msg) - - assert path.frame_id == "map" - assert path.ts == 123.456 - assert len(path.poses) == 3 - - for i, pose in enumerate(path.poses): - assert pose.position.x == float(i) - assert pose.position.y == float(i * 2) - assert pose.position.z == float(i * 3) - assert pose.orientation.w == 1.0 - - -@pytest.mark.ros -def test_path_to_ros_msg() -> None: - """Test converting a Path to a ROS Path message.""" - poses = [ - PoseStamped( - ts=124.0 + i, frame_id="odom", position=[i, i * 2, i * 3], orientation=[0, 0, 0, 1] - ) - for i in range(3) - ] - - path = Path(ts=123.456, frame_id="odom", poses=poses) - - ros_msg = path.to_ros_msg() - - assert isinstance(ros_msg, ROSPath) - assert ros_msg.header.frame_id == "odom" - assert ros_msg.header.stamp.sec == 123 - assert ros_msg.header.stamp.nanosec == 456000000 - assert len(ros_msg.poses) == 3 - - for i, ros_pose in enumerate(ros_msg.poses): - assert ros_pose.pose.position.x == float(i) - assert ros_pose.pose.position.y == float(i * 2) - assert ros_pose.pose.position.z == float(i * 3) - assert ros_pose.pose.orientation.w == 1.0 - - -@pytest.mark.ros -def test_path_ros_roundtrip() -> None: - """Test round-trip conversion between Path and ROS Path.""" - poses = [ - PoseStamped( - ts=100.0 + i * 0.1, - frame_id="world", - position=[i * 1.5, i * 2.5, i * 3.5], - orientation=[0.1, 0.2, 0.3, 0.9], - ) - for i in range(3) - ] - - original = Path(ts=99.789, frame_id="world", poses=poses) - - ros_msg = original.to_ros_msg() - restored = Path.from_ros_msg(ros_msg) - - assert restored.frame_id == original.frame_id - assert restored.ts == original.ts - assert len(restored.poses) == len(original.poses) - - for orig_pose, rest_pose in zip(original.poses, restored.poses, strict=False): - assert rest_pose.position.x == orig_pose.position.x - assert rest_pose.position.y == orig_pose.position.y - assert rest_pose.position.z == orig_pose.position.z - assert rest_pose.orientation.x == orig_pose.orientation.x - assert rest_pose.orientation.y == orig_pose.orientation.y - assert rest_pose.orientation.z == orig_pose.orientation.z - assert rest_pose.orientation.w == orig_pose.orientation.w diff --git a/dimos/msgs/sensor_msgs/CameraInfo.py b/dimos/msgs/sensor_msgs/CameraInfo.py index 855276b4e6..a371475675 100644 --- a/dimos/msgs/sensor_msgs/CameraInfo.py +++ b/dimos/msgs/sensor_msgs/CameraInfo.py @@ -15,24 +15,16 @@ from __future__ import annotations import time +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dimos.visualization.rerun.bridge import RerunData, RerunMulti # Import LCM types from dimos_lcm.sensor_msgs import CameraInfo as LCMCameraInfo from dimos_lcm.std_msgs.Header import Header import numpy as np -# Import ROS types -try: - from sensor_msgs.msg import ( # type: ignore[attr-defined] - CameraInfo as ROSCameraInfo, - RegionOfInterest as ROSRegionOfInterest, - ) - from std_msgs.msg import Header as ROSHeader # type: ignore[attr-defined] - - ROS_AVAILABLE = True -except ImportError: - ROS_AVAILABLE = False - from dimos.types.timestamped import Timestamped @@ -276,89 +268,6 @@ def lcm_decode(cls, data: bytes) -> CameraInfo: return camera_info - @classmethod - def from_ros_msg(cls, ros_msg: ROSCameraInfo) -> CameraInfo: - """Create CameraInfo from ROS sensor_msgs/CameraInfo message. - - Args: - ros_msg: ROS CameraInfo message - - Returns: - CameraInfo instance - """ - if not ROS_AVAILABLE: - raise ImportError("ROS packages not available. Cannot convert from ROS message.") - - # Extract timestamp - ts = ros_msg.header.stamp.sec + ros_msg.header.stamp.nanosec / 1e9 - - camera_info = cls( - height=ros_msg.height, - width=ros_msg.width, - distortion_model=ros_msg.distortion_model, - D=list(ros_msg.d), - K=list(ros_msg.k), - R=list(ros_msg.r), - P=list(ros_msg.p), - binning_x=ros_msg.binning_x, - binning_y=ros_msg.binning_y, - frame_id=ros_msg.header.frame_id, - ts=ts, - ) - - # Set ROI - camera_info.roi_x_offset = ros_msg.roi.x_offset - camera_info.roi_y_offset = ros_msg.roi.y_offset - camera_info.roi_height = ros_msg.roi.height - camera_info.roi_width = ros_msg.roi.width - camera_info.roi_do_rectify = ros_msg.roi.do_rectify - - return camera_info - - def to_ros_msg(self) -> ROSCameraInfo: - """Convert to ROS sensor_msgs/CameraInfo message. - - Returns: - ROS CameraInfo message - """ - if not ROS_AVAILABLE: - raise ImportError("ROS packages not available. Cannot convert to ROS message.") - - ros_msg = ROSCameraInfo() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header = ROSHeader() # type: ignore[no-untyped-call] - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1e9) - - # Image dimensions - ros_msg.height = self.height - ros_msg.width = self.width - - # Distortion model and coefficients - ros_msg.distortion_model = self.distortion_model - ros_msg.d = self.D - - # Camera matrices (all row-major) - ros_msg.k = self.K - ros_msg.r = self.R - ros_msg.p = self.P - - # Binning - ros_msg.binning_x = self.binning_x - ros_msg.binning_y = self.binning_y - - # ROI - ros_msg.roi = ROSRegionOfInterest() # type: ignore[no-untyped-call] - ros_msg.roi.x_offset = self.roi_x_offset - ros_msg.roi.y_offset = self.roi_y_offset - ros_msg.roi.height = self.roi_height - ros_msg.roi.width = self.roi_width - ros_msg.roi.do_rectify = self.roi_do_rectify - - return ros_msg - def __repr__(self) -> str: """String representation.""" return ( @@ -395,7 +304,20 @@ def __eq__(self, other) -> bool: # type: ignore[no-untyped-def] and self.frame_id == other.frame_id ) - def to_rerun(self, image_plane_distance: float = 0.5): # type: ignore[no-untyped-def] + def to_rerun( + self, + image_plane_distance: float = 1.0, + # These are defaults for a typical RGB camera with a known transform + # + # TODO this should be done by the actual emitting modules, + # they know the camera image topic, spatial relationships etc + # + # Poor CameraInfo class has no idea on this + # We just provide the parameters here for convenience in case your + # module doesn't implement this correctly + image_topic: str | None = None, + optical_frame: str | None = None, + ) -> RerunData: """Convert to Rerun Pinhole archetype for camera frustum visualization. Args: @@ -411,7 +333,7 @@ def to_rerun(self, image_plane_distance: float = 0.5): # type: ignore[no-untype fx, fy = self.K[0], self.K[4] cx, cy = self.K[2], self.K[5] - return rr.Pinhole( + pinhole = rr.Pinhole( focal_length=[fx, fy], principal_point=[cx, cy], width=self.width, @@ -419,6 +341,43 @@ def to_rerun(self, image_plane_distance: float = 0.5): # type: ignore[no-untype image_plane_distance=image_plane_distance, ) + # If no image topic is specified, We don't know which Image this CameraInfo refers to + # return just the pinhole + if not image_topic: + return pinhole + + ret: RerunMulti = [] + + # Add pinhole under world/image_topic (we know which Image this CameraInfo refers to) + # Note: parent_frame is supposed to work according to: + # https://rerun.io/docs/reference/types/archetypes/pinhole + # But it doesn't, so we add the transform separately below + ret.append( + ( + image_topic, + rr.Pinhole( + focal_length=[fx, fy], + principal_point=[cx, cy], + width=self.width, + height=self.height, + image_plane_distance=image_plane_distance, + ), + ) + ) + + if not optical_frame: + return ret + + # Add 3d transform from optical frame to world/image_topic (We know where the camera is) + ret.append( + ( + image_topic, + rr.Transform3D(parent_frame=f"tf#/{optical_frame}"), + ) + ) + + return ret + class CalibrationProvider: """Provides lazy-loaded access to camera calibration YAML files in a directory.""" diff --git a/dimos/msgs/sensor_msgs/Image.py b/dimos/msgs/sensor_msgs/Image.py index de3e7abeca..66c2876b62 100644 --- a/dimos/msgs/sensor_msgs/Image.py +++ b/dimos/msgs/sensor_msgs/Image.py @@ -15,6 +15,8 @@ from __future__ import annotations import base64 +from dataclasses import dataclass, field +from enum import Enum import time from typing import TYPE_CHECKING, Any, Literal, TypedDict @@ -24,37 +26,51 @@ import numpy as np import reactivex as rx from reactivex import operators as ops +import rerun as rr from turbojpeg import TurboJPEG # type: ignore[import-untyped] -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ( - HAS_CUDA, - HAS_NVIMGCODEC, - NVIMGCODEC_LAST_USED, - ImageFormat, -) -from dimos.msgs.sensor_msgs.image_impls.CudaImage import CudaImage -from dimos.msgs.sensor_msgs.image_impls.NumpyImage import NumpyImage from dimos.types.timestamped import Timestamped, TimestampedBufferCollection, to_human_readable from dimos.utils.reactive import quality_barrier if TYPE_CHECKING: + from collections.abc import Callable import os from reactivex.observable import Observable - from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ( - AbstractImage, - ) - -try: - import cupy as cp # type: ignore[import-not-found] -except Exception: - cp = None -try: - from sensor_msgs.msg import Image as ROSImage # type: ignore[attr-defined] -except ImportError: - ROSImage = None # type: ignore[assignment, misc] +class ImageFormat(Enum): + BGR = "BGR" + RGB = "RGB" + RGBA = "RGBA" + BGRA = "BGRA" + GRAY = "GRAY" + GRAY16 = "GRAY16" + DEPTH = "DEPTH" + DEPTH16 = "DEPTH16" + + +def _format_to_rerun(data: np.ndarray, fmt: ImageFormat) -> Any: # type: ignore[type-arg] + """Convert image data to Rerun archetype based on format.""" + match fmt: + case ImageFormat.RGB: + return rr.Image(data, color_model="RGB") + case ImageFormat.RGBA: + return rr.Image(data, color_model="RGBA") + case ImageFormat.BGR: + return rr.Image(data, color_model="BGR") + case ImageFormat.BGRA: + return rr.Image(data, color_model="BGRA") + case ImageFormat.GRAY: + return rr.Image(data, color_model="L") + case ImageFormat.GRAY16: + return rr.Image(data, color_model="L") + case ImageFormat.DEPTH: + return rr.DepthImage(data) + case ImageFormat.DEPTH16: + return rr.DepthImage(data) + case _: + raise ValueError(f"Unsupported format for Rerun: {fmt}") class AgentImageMessage(TypedDict): @@ -66,108 +82,104 @@ class AgentImageMessage(TypedDict): data: str # Base64 encoded image data +@dataclass class Image(Timestamped): + """Simple NumPy-based image container.""" + msg_name = "sensor_msgs.Image" - def __init__( # type: ignore[no-untyped-def] - self, - impl: AbstractImage | None = None, - *, - data=None, - format: ImageFormat | None = None, - frame_id: str | None = None, - ts: float | None = None, - ) -> None: - """Construct an Image facade. + data: np.ndarray[Any, np.dtype[Any]] = field( + default_factory=lambda: np.zeros((1, 1, 3), dtype=np.uint8) + ) + format: ImageFormat = field(default=ImageFormat.BGR) + frame_id: str = field(default="") + ts: float = field(default_factory=time.time) - Usage: - - Image(impl=) - - Image(data=, format=ImageFormat.RGB, frame_id=str, ts=float) + def __post_init__(self) -> None: + if not isinstance(self.data, np.ndarray): + self.data = np.asarray(self.data) + if self.data.ndim < 2: + raise ValueError("Image requires a 2D/3D NumPy array") - Notes: - - When constructed from `data`, uses CudaImage if `data` is a CuPy array and CUDA is available; otherwise NumpyImage. - - `format` defaults to ImageFormat.RGB; `frame_id` defaults to ""; `ts` defaults to `time.time()`. - """ - # Disallow mixing impl with raw kwargs - if impl is not None and any(x is not None for x in (data, format, frame_id, ts)): - raise TypeError( - "Provide either 'impl' or ('data', 'format', 'frame_id', 'ts'), not both" - ) + def __str__(self) -> str: + return ( + f"Image(shape={self.shape}, format={self.format.value}, dtype={self.dtype}, " + f"ts={to_human_readable(self.ts)})" + ) - if impl is not None: - self._impl = impl - return + def __repr__(self) -> str: + return f"Image(shape={self.shape}, format={self.format.value}, dtype={self.dtype}, frame_id='{self.frame_id}', ts={self.ts})" - # Raw constructor path - if data is None: - raise TypeError("'data' is required when constructing Image without 'impl'") - fmt = format if format is not None else ImageFormat.BGR - fid = frame_id if frame_id is not None else "" - tstamp = ts if ts is not None else time.time() + def __eq__(self, other: object) -> bool: + if not isinstance(other, Image): + return False + return ( + np.array_equal(self.data, other.data) + and self.format == other.format + and self.frame_id == other.frame_id + and abs(self.ts - other.ts) < 1e-6 + ) - # Detect CuPy array without a hard dependency - is_cu = False - try: - import cupy as _cp + def __len__(self) -> int: + return int(self.height * self.width) - is_cu = isinstance(data, _cp.ndarray) - except Exception: - is_cu = False + def __getstate__(self) -> dict[str, Any]: + return {"data": self.data, "format": self.format, "frame_id": self.frame_id, "ts": self.ts} - if is_cu and HAS_CUDA: - self._impl = CudaImage(data, fmt, fid, tstamp) - else: - self._impl = NumpyImage(np.asarray(data), fmt, fid, tstamp) + def __setstate__(self, state: dict[str, Any]) -> None: + self.data = state.get("data", np.zeros((1, 1, 3), dtype=np.uint8)) + self.format = state.get("format", ImageFormat.BGR) + self.frame_id = state.get("frame_id", "") + self.ts = state.get("ts", time.time()) - def __str__(self) -> str: - dev = "cuda" if self.is_cuda else "cpu" - return ( - f"Image(shape={self.shape}, format={self.format.value}, dtype={self.dtype}, " - f"dev={dev}, ts={to_human_readable(self.ts)})" - ) + @property + def height(self) -> int: + return int(self.data.shape[0]) - @classmethod - def from_impl(cls, impl: AbstractImage) -> Image: - return cls(impl) + @property + def width(self) -> int: + return int(self.data.shape[1]) + + @property + def channels(self) -> int: + if self.data.ndim == 2: + return 1 + if self.data.ndim == 3: + return int(self.data.shape[2]) + raise ValueError("Invalid image dimensions") + + @property + def shape(self) -> tuple[int, ...]: + return tuple(self.data.shape) + + @property + def dtype(self) -> np.dtype[Any]: + return self.data.dtype + + def copy(self) -> Image: + return Image(data=self.data.copy(), format=self.format, frame_id=self.frame_id, ts=self.ts) @classmethod - def from_numpy( # type: ignore[no-untyped-def] + def from_numpy( cls, np_image: np.ndarray, # type: ignore[type-arg] format: ImageFormat = ImageFormat.BGR, - to_cuda: bool = False, - **kwargs, + frame_id: str = "", + ts: float | None = None, ) -> Image: - if kwargs.pop("to_gpu", False): - to_cuda = True - if to_cuda and HAS_CUDA: - return cls( - CudaImage( - np_image if hasattr(np_image, "shape") else np.asarray(np_image), - format, - kwargs.get("frame_id", ""), - kwargs.get("ts", time.time()), - ) - ) return cls( - NumpyImage( - np.asarray(np_image), - format, - kwargs.get("frame_id", ""), - kwargs.get("ts", time.time()), - ) + data=np.asarray(np_image), + format=format, + frame_id=frame_id, + ts=ts if ts is not None else time.time(), ) @classmethod - def from_file( # type: ignore[no-untyped-def] + def from_file( cls, filepath: str | os.PathLike[str], format: ImageFormat = ImageFormat.RGB, - to_cuda: bool = False, - **kwargs, ) -> Image: - if kwargs.pop("to_gpu", False): - to_cuda = True arr = cv2.imread(str(filepath), cv2.IMREAD_UNCHANGED) if arr is None: raise ValueError(f"Could not load image from {filepath}") @@ -179,156 +191,143 @@ def from_file( # type: ignore[no-untyped-def] detected = ImageFormat.BGRA # OpenCV default else: detected = format - return cls(CudaImage(arr, detected) if to_cuda and HAS_CUDA else NumpyImage(arr, detected)) + return cls(data=arr, format=detected) @classmethod - def from_opencv( # type: ignore[no-untyped-def] + def from_opencv( cls, cv_image: np.ndarray, # type: ignore[type-arg] format: ImageFormat = ImageFormat.BGR, - **kwargs, + frame_id: str = "", + ts: float | None = None, ) -> Image: """Construct from an OpenCV image (NumPy array).""" return cls( - NumpyImage(cv_image, format, kwargs.get("frame_id", ""), kwargs.get("ts", time.time())) - ) - - @classmethod - def from_depth( # type: ignore[no-untyped-def] - cls, depth_data, frame_id: str = "", ts: float | None = None, to_cuda: bool = False - ) -> Image: - arr = np.asarray(depth_data) - if arr.dtype != np.float32: - arr = arr.astype(np.float32) - impl = ( - CudaImage(arr, ImageFormat.DEPTH, frame_id, time.time() if ts is None else ts) - if to_cuda and HAS_CUDA - else NumpyImage(arr, ImageFormat.DEPTH, frame_id, time.time() if ts is None else ts) - ) - return cls(impl) - - # Delegation - @property - def is_cuda(self) -> bool: - return self._impl.is_cuda - - @property - def data(self): # type: ignore[no-untyped-def] - return self._impl.data - - @data.setter - def data(self, value) -> None: # type: ignore[no-untyped-def] - # Preserve backend semantics: ensure array type matches implementation - if isinstance(self._impl, NumpyImage): - self._impl.data = np.asarray(value) - elif isinstance(self._impl, CudaImage): - if cp is None: - raise RuntimeError("CuPy not available to set CUDA image data") - self._impl.data = cp.asarray(value) - else: - self._impl.data = value - - @property - def format(self) -> ImageFormat: - return self._impl.format - - @format.setter - def format(self, value) -> None: # type: ignore[no-untyped-def] - if isinstance(value, ImageFormat): - self._impl.format = value - elif isinstance(value, str): - try: - self._impl.format = ImageFormat[value] - except KeyError as e: - raise ValueError(f"Invalid ImageFormat: {value}") from e - else: - raise TypeError("format must be ImageFormat or str name") - - @property - def frame_id(self) -> str: - return self._impl.frame_id - - @frame_id.setter - def frame_id(self, value: str) -> None: - self._impl.frame_id = str(value) - - @property - def ts(self) -> float: - return self._impl.ts - - @ts.setter - def ts(self, value: float) -> None: - self._impl.ts = float(value) - - @property - def height(self) -> int: - return self._impl.height - - @property - def width(self) -> int: - return self._impl.width - - @property - def channels(self) -> int: - return self._impl.channels - - @property - def shape(self): # type: ignore[no-untyped-def] - return self._impl.shape - - @property - def dtype(self): # type: ignore[no-untyped-def] - return self._impl.dtype - - def copy(self) -> Image: - return Image(self._impl.copy()) - - def to_cpu(self) -> Image: - if isinstance(self._impl, NumpyImage): - return self.copy() - - data = self._impl.data.get() # CuPy array to NumPy - - return Image( - NumpyImage( - data, - self._impl.format, - self._impl.frame_id, - self._impl.ts, - ) - ) - - def to_cupy(self) -> Image: - if isinstance(self._impl, CudaImage): - return self.copy() - return Image( - CudaImage( - np.asarray(self._impl.data), self._impl.format, self._impl.frame_id, self._impl.ts - ) + data=cv_image, + format=format, + frame_id=frame_id, + ts=ts if ts is not None else time.time(), ) def to_opencv(self) -> np.ndarray: # type: ignore[type-arg] - return self._impl.to_opencv() + """Convert to OpenCV BGR format.""" + arr = self.data + if self.format == ImageFormat.BGR: + return arr + if self.format == ImageFormat.RGB: + return cv2.cvtColor(arr, cv2.COLOR_RGB2BGR) + if self.format == ImageFormat.RGBA: + return cv2.cvtColor(arr, cv2.COLOR_RGBA2BGR) + if self.format == ImageFormat.BGRA: + return cv2.cvtColor(arr, cv2.COLOR_BGRA2BGR) + if self.format in ( + ImageFormat.GRAY, + ImageFormat.GRAY16, + ImageFormat.DEPTH, + ImageFormat.DEPTH16, + ): + return arr + raise ValueError(f"Unsupported format: {self.format}") def as_numpy(self) -> np.ndarray: # type: ignore[type-arg] - """Get image data as numpy array in RGB format.""" - return np.asarray(self.data) + """Get image data as numpy array.""" + return self.data def to_rgb(self) -> Image: - return Image(self._impl.to_rgb()) + if self.format == ImageFormat.RGB: + return self.copy() + arr = self.data + if self.format == ImageFormat.BGR: + return Image( + data=cv2.cvtColor(arr, cv2.COLOR_BGR2RGB), + format=ImageFormat.RGB, + frame_id=self.frame_id, + ts=self.ts, + ) + if self.format == ImageFormat.RGBA: + return self.copy() # RGBA contains RGB + alpha + if self.format == ImageFormat.BGRA: + rgba = cv2.cvtColor(arr, cv2.COLOR_BGRA2RGBA) + return Image(data=rgba, format=ImageFormat.RGBA, frame_id=self.frame_id, ts=self.ts) + if self.format in (ImageFormat.GRAY, ImageFormat.GRAY16, ImageFormat.DEPTH16): + gray8 = (arr / 256).astype(np.uint8) if self.format != ImageFormat.GRAY else arr + rgb = cv2.cvtColor(gray8, cv2.COLOR_GRAY2RGB) + return Image(data=rgb, format=ImageFormat.RGB, frame_id=self.frame_id, ts=self.ts) + return self.copy() def to_bgr(self) -> Image: - return Image(self._impl.to_bgr()) + if self.format == ImageFormat.BGR: + return self.copy() + arr = self.data + if self.format == ImageFormat.RGB: + return Image( + data=cv2.cvtColor(arr, cv2.COLOR_RGB2BGR), + format=ImageFormat.BGR, + frame_id=self.frame_id, + ts=self.ts, + ) + if self.format == ImageFormat.RGBA: + return Image( + data=cv2.cvtColor(arr, cv2.COLOR_RGBA2BGR), + format=ImageFormat.BGR, + frame_id=self.frame_id, + ts=self.ts, + ) + if self.format == ImageFormat.BGRA: + return Image( + data=cv2.cvtColor(arr, cv2.COLOR_BGRA2BGR), + format=ImageFormat.BGR, + frame_id=self.frame_id, + ts=self.ts, + ) + if self.format in (ImageFormat.GRAY, ImageFormat.GRAY16, ImageFormat.DEPTH16): + gray8 = (arr / 256).astype(np.uint8) if self.format != ImageFormat.GRAY else arr + return Image( + data=cv2.cvtColor(gray8, cv2.COLOR_GRAY2BGR), + format=ImageFormat.BGR, + frame_id=self.frame_id, + ts=self.ts, + ) + return self.copy() def to_grayscale(self) -> Image: - return Image(self._impl.to_grayscale()) + if self.format in (ImageFormat.GRAY, ImageFormat.GRAY16, ImageFormat.DEPTH): + return self.copy() + if self.format == ImageFormat.BGR: + return Image( + data=cv2.cvtColor(self.data, cv2.COLOR_BGR2GRAY), + format=ImageFormat.GRAY, + frame_id=self.frame_id, + ts=self.ts, + ) + if self.format == ImageFormat.RGB: + return Image( + data=cv2.cvtColor(self.data, cv2.COLOR_RGB2GRAY), + format=ImageFormat.GRAY, + frame_id=self.frame_id, + ts=self.ts, + ) + if self.format in (ImageFormat.RGBA, ImageFormat.BGRA): + code = cv2.COLOR_RGBA2GRAY if self.format == ImageFormat.RGBA else cv2.COLOR_BGRA2GRAY + return Image( + data=cv2.cvtColor(self.data, code), + format=ImageFormat.GRAY, + frame_id=self.frame_id, + ts=self.ts, + ) + raise ValueError(f"Unsupported format: {self.format}") def to_rerun(self) -> Any: """Convert to rerun Image format.""" - return self._impl.to_rerun() + return _format_to_rerun(self.data, self.format) def resize(self, width: int, height: int, interpolation: int = cv2.INTER_LINEAR) -> Image: - return Image(self._impl.resize(width, height, interpolation)) + return Image( + data=cv2.resize(self.data, (width, height), interpolation=interpolation), + format=self.format, + frame_id=self.frame_id, + ts=self.ts, + ) def resize_to_fit( self, max_width: int, max_height: int, interpolation: int = cv2.INTER_LINEAR @@ -349,29 +348,48 @@ def resize_to_fit( return self.resize(new_width, new_height, interpolation), scale def crop(self, x: int, y: int, width: int, height: int) -> Image: - return Image(self._impl.crop(x, y, width, height)) # type: ignore[attr-defined] + """Crop the image to the specified region. - @property - def sharpness(self) -> float: - """Return sharpness score.""" - return self._impl.sharpness() + Args: + x: Starting x coordinate (left edge) + y: Starting y coordinate (top edge) + width: Width of the cropped region + height: Height of the cropped region + + Returns: + A new Image containing the cropped region + """ + img_height, img_width = self.data.shape[:2] - def to_depth_meters(self) -> Image: - """Return a depth image normalized to meters as float32.""" - depth_cv = self.to_opencv() - fmt = self.format + # Clamp the crop region to image bounds + x = max(0, min(x, img_width)) + y = max(0, min(y, img_height)) + x_end = min(x + width, img_width) + y_end = min(y + height, img_height) - if fmt == ImageFormat.DEPTH16: - depth_cv = depth_cv.astype(np.float32) / 1000.0 - fmt = ImageFormat.DEPTH - elif depth_cv.dtype != np.float32: - depth_cv = depth_cv.astype(np.float32) - fmt = ImageFormat.DEPTH if fmt == ImageFormat.DEPTH else fmt + # Perform the crop using array slicing + if self.data.ndim == 2: + cropped_data = self.data[y:y_end, x:x_end] + else: + cropped_data = self.data[y:y_end, x:x_end, :] + + return Image(data=cropped_data, format=self.format, frame_id=self.frame_id, ts=self.ts) - return Image.from_numpy(depth_cv, format=fmt, frame_id=self.frame_id, ts=self.ts) + @property + def sharpness(self) -> float: + """Return sharpness score.""" + gray = self.to_grayscale() + sx = cv2.Sobel(gray.data, cv2.CV_32F, 1, 0, ksize=5) + sy = cv2.Sobel(gray.data, cv2.CV_32F, 0, 1, ksize=5) + magnitude = cv2.magnitude(sx, sy) + mean_mag = float(magnitude.mean()) + if mean_mag <= 0: + return 0.0 + return float(np.clip((np.log10(mean_mag + 1) - 1.7) / 2.0, 0.0, 1.0)) def save(self, filepath: str) -> bool: - return self._impl.save(filepath) + arr = self.to_opencv() + return cv2.imwrite(filepath, arr) def to_base64( self, @@ -448,34 +466,32 @@ def lcm_encode(self, frame_id: str | None = None) -> bytes: channels = 1 if self.data.ndim == 2 else self.data.shape[2] msg.step = self.width * self.dtype.itemsize * channels - view = memoryview(np.ascontiguousarray(self.data)).cast("B") + view = memoryview(np.ascontiguousarray(self.data)).cast("B") # type: ignore[arg-type] msg.data_length = len(view) msg.data = view return msg.lcm_encode() # type: ignore[no-any-return] @classmethod - def lcm_decode(cls, data: bytes, **kwargs) -> Image: # type: ignore[no-untyped-def] + def lcm_decode(cls, data: bytes, **kwargs: Any) -> Image: msg = LCMImage.lcm_decode(data) fmt, dtype, channels = _parse_lcm_encoding(msg.encoding) - arr = np.frombuffer(msg.data, dtype=dtype) + arr: np.ndarray[Any, Any] = np.frombuffer(msg.data, dtype=dtype) if channels == 1: arr = arr.reshape((msg.height, msg.width)) else: arr = arr.reshape((msg.height, msg.width, channels)) return cls( - NumpyImage( - arr, - fmt, - msg.header.frame_id if hasattr(msg, "header") else "", - ( - msg.header.stamp.sec + msg.header.stamp.nsec / 1e9 - if hasattr(msg, "header") - and hasattr(msg.header, "stamp") - and msg.header.stamp.sec > 0 - else time.time() - ), - ) + data=arr, + format=fmt, + frame_id=msg.header.frame_id if hasattr(msg, "header") else "", + ts=( + msg.header.stamp.sec + msg.header.stamp.nsec / 1e9 + if hasattr(msg, "header") + and hasattr(msg.header, "stamp") + and msg.header.stamp.sec > 0 + else time.time() + ), ) def lcm_jpeg_encode(self, quality: int = 75, frame_id: str | None = None) -> bytes: @@ -524,7 +540,7 @@ def lcm_jpeg_encode(self, quality: int = 75, frame_id: str | None = None) -> byt return msg.lcm_encode() # type: ignore[no-any-return] @classmethod - def lcm_jpeg_decode(cls, data: bytes, **kwargs) -> Image: # type: ignore[no-untyped-def] + def lcm_jpeg_decode(cls, data: bytes, **kwargs: Any) -> Image: """Decode an LCM Image message with JPEG-compressed data. Args: @@ -543,159 +559,21 @@ def lcm_jpeg_decode(cls, data: bytes, **kwargs) -> Image: # type: ignore[no-unt bgr_array = jpeg.decode(msg.data) return cls( - NumpyImage( - bgr_array, - ImageFormat.BGR, - msg.header.frame_id if hasattr(msg, "header") else "", - ( - msg.header.stamp.sec + msg.header.stamp.nsec / 1e9 - if hasattr(msg, "header") - and hasattr(msg.header, "stamp") - and msg.header.stamp.sec > 0 - else time.time() - ), - ) - ) - - # PnP wrappers - def solve_pnp(self, *args, **kwargs): # type: ignore[no-untyped-def] - return self._impl.solve_pnp(*args, **kwargs) # type: ignore[attr-defined] - - def solve_pnp_ransac(self, *args, **kwargs): # type: ignore[no-untyped-def] - return self._impl.solve_pnp_ransac(*args, **kwargs) # type: ignore[attr-defined] - - def solve_pnp_batch(self, *args, **kwargs): # type: ignore[no-untyped-def] - return self._impl.solve_pnp_batch(*args, **kwargs) # type: ignore[attr-defined] - - def create_csrt_tracker(self, *args, **kwargs): # type: ignore[no-untyped-def] - return self._impl.create_csrt_tracker(*args, **kwargs) # type: ignore[attr-defined] - - def csrt_update(self, *args, **kwargs): # type: ignore[no-untyped-def] - return self._impl.csrt_update(*args, **kwargs) # type: ignore[attr-defined] - - @classmethod - def from_ros_msg(cls, ros_msg: ROSImage) -> Image: - """Create an Image from a ROS sensor_msgs/Image message. - - Args: - ros_msg: ROS Image message - - Returns: - Image instance - """ - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - # Parse encoding to determine format and data type - format_info = cls._parse_encoding(ros_msg.encoding) - - # Convert data from ROS message (array.array) to numpy array - data_array = np.frombuffer(ros_msg.data, dtype=format_info["dtype"]) - - # Reshape to image dimensions - if format_info["channels"] == 1: - data_array = data_array.reshape((ros_msg.height, ros_msg.width)) - else: - data_array = data_array.reshape( - (ros_msg.height, ros_msg.width, format_info["channels"]) - ) - - # Crop to center 1/3 of the image (simulate 120-degree FOV from 360-degree) - original_width = data_array.shape[1] - crop_width = original_width // 3 - start_x = (original_width - crop_width) // 2 - end_x = start_x + crop_width - - # Crop the image horizontally to center 1/3 - if len(data_array.shape) == 2: - # Grayscale image - data_array = data_array[:, start_x:end_x] - else: - # Color image - data_array = data_array[:, start_x:end_x, :] - - # Fix color channel order: if ROS sends RGB but we expect BGR, swap channels - # ROS typically uses rgb8 encoding, but OpenCV/our system expects BGR - if format_info["format"] == ImageFormat.RGB: - # Convert RGB to BGR by swapping channels - if len(data_array.shape) == 3 and data_array.shape[2] == 3: - data_array = data_array[:, :, [2, 1, 0]] # RGB -> BGR - format_info["format"] = ImageFormat.BGR - elif format_info["format"] == ImageFormat.RGBA: - # Convert RGBA to BGRA by swapping channels - if len(data_array.shape) == 3 and data_array.shape[2] == 4: - data_array = data_array[:, :, [2, 1, 0, 3]] # RGBA -> BGRA - format_info["format"] = ImageFormat.BGRA - - return cls( - data=data_array, - format=format_info["format"], - frame_id=ros_msg.header.frame_id, - ts=ts, + data=bgr_array, + format=ImageFormat.BGR, + frame_id=msg.header.frame_id if hasattr(msg, "header") else "", + ts=( + msg.header.stamp.sec + msg.header.stamp.nsec / 1e9 + if hasattr(msg, "header") + and hasattr(msg.header, "stamp") + and msg.header.stamp.sec > 0 + else time.time() + ), ) - @staticmethod - def _parse_encoding(encoding: str) -> dict: # type: ignore[type-arg] - """Translate ROS encoding strings into format metadata.""" - encoding_map = { - "mono8": {"format": ImageFormat.GRAY, "dtype": np.uint8, "channels": 1}, - "mono16": {"format": ImageFormat.GRAY16, "dtype": np.uint16, "channels": 1}, - "rgb8": {"format": ImageFormat.RGB, "dtype": np.uint8, "channels": 3}, - "rgba8": {"format": ImageFormat.RGBA, "dtype": np.uint8, "channels": 4}, - "bgr8": {"format": ImageFormat.BGR, "dtype": np.uint8, "channels": 3}, - "bgra8": {"format": ImageFormat.BGRA, "dtype": np.uint8, "channels": 4}, - "32FC1": {"format": ImageFormat.DEPTH, "dtype": np.float32, "channels": 1}, - "32FC3": {"format": ImageFormat.RGB, "dtype": np.float32, "channels": 3}, - "64FC1": {"format": ImageFormat.DEPTH, "dtype": np.float64, "channels": 1}, - "16UC1": {"format": ImageFormat.DEPTH16, "dtype": np.uint16, "channels": 1}, - "16SC1": {"format": ImageFormat.DEPTH16, "dtype": np.int16, "channels": 1}, - } - - key = encoding.strip() - for candidate in (key, key.lower(), key.upper()): - if candidate in encoding_map: - return dict(encoding_map[candidate]) - - raise ValueError(f"Unsupported encoding: {encoding}") - def __repr__(self) -> str: - dev = "cuda" if self.is_cuda else "cpu" - return f"Image(shape={self.shape}, format={self.format.value}, dtype={self.dtype}, dev={dev}, frame_id='{self.frame_id}', ts={self.ts})" - - def __eq__(self, other) -> bool: # type: ignore[no-untyped-def] - if not isinstance(other, Image): - return False - return ( - np.array_equal(self.data, other.data) - and self.format == other.format - and self.frame_id == other.frame_id - and abs(self.ts - other.ts) < 1e-6 - ) - - def __len__(self) -> int: - return int(self.height * self.width) - - def __getstate__(self): # type: ignore[no-untyped-def] - return {"data": self.data, "format": self.format, "frame_id": self.frame_id, "ts": self.ts} - - def __setstate__(self, state) -> None: # type: ignore[no-untyped-def] - self.__init__( # type: ignore[misc] - data=state.get("data"), - format=state.get("format"), - frame_id=state.get("frame_id"), - ts=state.get("ts"), - ) - - -# Re-exports for tests -HAS_CUDA = HAS_CUDA -ImageFormat = ImageFormat -NVIMGCODEC_LAST_USED = NVIMGCODEC_LAST_USED -HAS_NVIMGCODEC = HAS_NVIMGCODEC __all__ = [ - "HAS_CUDA", - "HAS_NVIMGCODEC", - "NVIMGCODEC_LAST_USED", + "Image", "ImageFormat", "sharpness_barrier", "sharpness_window", @@ -704,27 +582,29 @@ def __setstate__(self, state) -> None: # type: ignore[no-untyped-def] def sharpness_window(target_frequency: float, source: Observable[Image]) -> Observable[Image]: """Emit the sharpest Image seen within each sliding time window.""" + from reactivex.scheduler import ThreadPoolScheduler + if target_frequency <= 0: raise ValueError("target_frequency must be positive") window = TimestampedBufferCollection(1.0 / target_frequency) # type: ignore[var-annotated] source.subscribe(window.add) - thread_scheduler = ThreadPoolScheduler(max_workers=1) # type: ignore[name-defined] + thread_scheduler = ThreadPoolScheduler(max_workers=1) - def find_best(*_args): # type: ignore[no-untyped-def] - if not window._items: + def find_best(*_args: Any) -> Image | None: + if len(window) == 0: return None - return max(window._items, key=lambda img: img.sharpness) + return max(window, key=lambda img: img.sharpness) # type: ignore[no-any-return] - return rx.interval(1.0 / target_frequency).pipe( + return rx.interval(1.0 / target_frequency).pipe( # type: ignore[misc] ops.observe_on(thread_scheduler), ops.map(find_best), ops.filter(lambda img: img is not None), ) -def sharpness_barrier(target_frequency: float): # type: ignore[no-untyped-def] +def sharpness_barrier(target_frequency: float) -> Callable[[Observable[Image]], Observable[Image]]: """Select the sharpest Image within each time window.""" if target_frequency <= 0: raise ValueError("target_frequency must be positive") @@ -760,7 +640,7 @@ def _get_lcm_encoding(fmt: ImageFormat, dtype: np.dtype) -> str: # type: ignore raise ValueError(f"Unsupported LCM encoding for fmt={fmt}, dtype={dtype}") -def _parse_lcm_encoding(enc: str): # type: ignore[no-untyped-def] +def _parse_lcm_encoding(enc: str) -> tuple[ImageFormat, type, int]: m = { "mono8": (ImageFormat.GRAY, np.uint8, 1), "mono16": (ImageFormat.GRAY16, np.uint16, 1), diff --git a/dimos/msgs/sensor_msgs/Imu.py b/dimos/msgs/sensor_msgs/Imu.py new file mode 100644 index 0000000000..7fe03ce03f --- /dev/null +++ b/dimos/msgs/sensor_msgs/Imu.py @@ -0,0 +1,118 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import time + +from dimos_lcm.sensor_msgs.Imu import Imu as LCMImu + +from dimos.msgs.geometry_msgs import Quaternion, Vector3 +from dimos.types.timestamped import Timestamped + + +class Imu(Timestamped): + """IMU sensor message mirroring ROS sensor_msgs/Imu. + + Contains orientation, angular velocity, and linear acceleration + with optional covariance matrices (3x3 row-major as flat 9-element lists). + """ + + msg_name = "sensor_msgs.Imu" + + def __init__( + self, + angular_velocity: Vector3 | None = None, + linear_acceleration: Vector3 | None = None, + orientation: Quaternion | None = None, + orientation_covariance: list[float] | None = None, + angular_velocity_covariance: list[float] | None = None, + linear_acceleration_covariance: list[float] | None = None, + frame_id: str = "imu_link", + ts: float | None = None, + ) -> None: + self.ts = ts if ts is not None else time.time() # type: ignore[assignment] + self.frame_id = frame_id + self.angular_velocity = angular_velocity or Vector3(0.0, 0.0, 0.0) + self.linear_acceleration = linear_acceleration or Vector3(0.0, 0.0, 0.0) + self.orientation = orientation or Quaternion(0.0, 0.0, 0.0, 1.0) + self.orientation_covariance = orientation_covariance or [0.0] * 9 + self.angular_velocity_covariance = angular_velocity_covariance or [0.0] * 9 + self.linear_acceleration_covariance = linear_acceleration_covariance or [0.0] * 9 + + def lcm_encode(self) -> bytes: + msg = LCMImu() + [msg.header.stamp.sec, msg.header.stamp.nsec] = self.ros_timestamp() + msg.header.frame_id = self.frame_id + + msg.orientation.x = self.orientation.x + msg.orientation.y = self.orientation.y + msg.orientation.z = self.orientation.z + msg.orientation.w = self.orientation.w + msg.orientation_covariance = self.orientation_covariance + + msg.angular_velocity.x = self.angular_velocity.x + msg.angular_velocity.y = self.angular_velocity.y + msg.angular_velocity.z = self.angular_velocity.z + msg.angular_velocity_covariance = self.angular_velocity_covariance + + msg.linear_acceleration.x = self.linear_acceleration.x + msg.linear_acceleration.y = self.linear_acceleration.y + msg.linear_acceleration.z = self.linear_acceleration.z + msg.linear_acceleration_covariance = self.linear_acceleration_covariance + + return msg.lcm_encode() # type: ignore[no-any-return] + + @classmethod + def lcm_decode(cls, data: bytes) -> Imu: + msg = LCMImu.lcm_decode(data) + ts = msg.header.stamp.sec + (msg.header.stamp.nsec / 1_000_000_000) + return cls( + angular_velocity=Vector3( + msg.angular_velocity.x, + msg.angular_velocity.y, + msg.angular_velocity.z, + ), + linear_acceleration=Vector3( + msg.linear_acceleration.x, + msg.linear_acceleration.y, + msg.linear_acceleration.z, + ), + orientation=Quaternion( + msg.orientation.x, + msg.orientation.y, + msg.orientation.z, + msg.orientation.w, + ), + orientation_covariance=list(msg.orientation_covariance), + angular_velocity_covariance=list(msg.angular_velocity_covariance), + linear_acceleration_covariance=list(msg.linear_acceleration_covariance), + frame_id=msg.header.frame_id, + ts=ts, + ) + + def __str__(self) -> str: + return ( + f"Imu(frame_id='{self.frame_id}', " + f"gyro=({self.angular_velocity.x:.3f}, {self.angular_velocity.y:.3f}, {self.angular_velocity.z:.3f}), " + f"accel=({self.linear_acceleration.x:.3f}, {self.linear_acceleration.y:.3f}, {self.linear_acceleration.z:.3f}))" + ) + + def __repr__(self) -> str: + return ( + f"Imu(ts={self.ts}, frame_id='{self.frame_id}', " + f"angular_velocity={self.angular_velocity}, " + f"linear_acceleration={self.linear_acceleration}, " + f"orientation={self.orientation})" + ) diff --git a/dimos/msgs/sensor_msgs/JointState.py b/dimos/msgs/sensor_msgs/JointState.py index 2936012bcc..9faf0be42c 100644 --- a/dimos/msgs/sensor_msgs/JointState.py +++ b/dimos/msgs/sensor_msgs/JointState.py @@ -18,12 +18,6 @@ from typing import TypeAlias from dimos_lcm.sensor_msgs import JointState as LCMJointState - -try: - from sensor_msgs.msg import JointState as ROSJointState # type: ignore[attr-defined] -except ImportError: - ROSJointState = None # type: ignore[assignment, misc] - from plum import dispatch from dimos.types.timestamped import Timestamped @@ -150,46 +144,3 @@ def __eq__(self, other) -> bool: # type: ignore[no-untyped-def] and self.effort == other.effort and self.frame_id == other.frame_id ) - - @classmethod - def from_ros_msg(cls, ros_msg: ROSJointState) -> JointState: - """Create a JointState from a ROS sensor_msgs/JointState message. - - Args: - ros_msg: ROS JointState message - - Returns: - JointState instance - """ - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - return cls( - ts=ts, - frame_id=ros_msg.header.frame_id, - name=list(ros_msg.name), - position=list(ros_msg.position), - velocity=list(ros_msg.velocity), - effort=list(ros_msg.effort), - ) - - def to_ros_msg(self) -> ROSJointState: - """Convert to a ROS sensor_msgs/JointState message. - - Returns: - ROS JointState message - """ - ros_msg = ROSJointState() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Set joint data - ros_msg.name = self.name - ros_msg.position = self.position - ros_msg.velocity = self.velocity - ros_msg.effort = self.effort - - return ros_msg diff --git a/dimos/msgs/sensor_msgs/Joy.py b/dimos/msgs/sensor_msgs/Joy.py index c8c2fbcd3e..3823f132b7 100644 --- a/dimos/msgs/sensor_msgs/Joy.py +++ b/dimos/msgs/sensor_msgs/Joy.py @@ -18,12 +18,6 @@ from typing import TypeAlias from dimos_lcm.sensor_msgs import Joy as LCMJoy - -try: - from sensor_msgs.msg import Joy as ROSJoy # type: ignore[attr-defined] -except ImportError: - ROSJoy = None # type: ignore[assignment, misc] - from plum import dispatch from dimos.types.timestamped import Timestamped @@ -140,42 +134,3 @@ def __eq__(self, other) -> bool: # type: ignore[no-untyped-def] and self.buttons == other.buttons and self.frame_id == other.frame_id ) - - @classmethod - def from_ros_msg(cls, ros_msg: ROSJoy) -> Joy: - """Create a Joy from a ROS sensor_msgs/Joy message. - - Args: - ros_msg: ROS Joy message - - Returns: - Joy instance - """ - # Convert timestamp from ROS header - ts = ros_msg.header.stamp.sec + (ros_msg.header.stamp.nanosec / 1_000_000_000) - - return cls( - ts=ts, - frame_id=ros_msg.header.frame_id, - axes=list(ros_msg.axes), - buttons=list(ros_msg.buttons), - ) - - def to_ros_msg(self) -> ROSJoy: - """Convert to a ROS sensor_msgs/Joy message. - - Returns: - ROS Joy message - """ - ros_msg = ROSJoy() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1_000_000_000) - - # Set axes and buttons - ros_msg.axes = self.axes - ros_msg.buttons = self.buttons - - return ros_msg diff --git a/dimos/msgs/sensor_msgs/PointCloud2.py b/dimos/msgs/sensor_msgs/PointCloud2.py index d68c62f51d..3f28edc680 100644 --- a/dimos/msgs/sensor_msgs/PointCloud2.py +++ b/dimos/msgs/sensor_msgs/PointCloud2.py @@ -16,6 +16,7 @@ import functools import struct +from typing import TYPE_CHECKING, Any # Import LCM types from dimos_lcm.sensor_msgs.PointCloud2 import ( @@ -28,24 +29,11 @@ import open3d.core as o3c # type: ignore[import-untyped] from dimos.msgs.geometry_msgs import Transform, Vector3 - -# Import ROS types -try: - from sensor_msgs.msg import ( # type: ignore[attr-defined] - PointCloud2 as ROSPointCloud2, - PointField as ROSPointField, - ) - from std_msgs.msg import Header as ROSHeader # type: ignore[attr-defined] - - ROS_AVAILABLE = True -except ImportError: - ROS_AVAILABLE = False - -from typing import TYPE_CHECKING, Any - from dimos.types.timestamped import Timestamped if TYPE_CHECKING: + from rerun._baseclasses import Archetype + from dimos.msgs.sensor_msgs.CameraInfo import CameraInfo from dimos.msgs.sensor_msgs.Image import Image @@ -618,25 +606,23 @@ def __len__(self) -> int: return 0 return int(self._pcd_tensor.point["positions"].shape[0]) - def to_rerun( # type: ignore[no-untyped-def] + def to_rerun( self, - radii: float = 0.02, + voxel_size: float = 0.05, colormap: str | None = None, colors: list[int] | None = None, - mode: str = "boxes", + mode: str = "points", size: float | None = None, fill_mode: str = "solid", - **kwargs, # type: ignore[no-untyped-def] - ): # type: ignore[no-untyped-def] - import rerun as rr - - """Convert to Rerun Points3D or Boxes3D archetype. + **kwargs: object, + ) -> Archetype: + """Convert to Rerun archetype for visualization. Args: - radii: Point radius for visualization (only for mode="points") + voxel_size: size for visualization colormap: Optional colormap name (e.g., "turbo", "viridis") to color by height colors: Optional RGB color [r, g, b] for all points (0-255) - mode: Visualization mode - "points" for spheres, "boxes" for cubes (default) + mode: "points" for raw points, "boxes" for cubes (default), or "spheres" for sized spheres size: Box size for mode="boxes" (e.g., voxel_size). Defaults to radii*2. fill_mode: Fill mode for boxes - "solid", "majorwireframe", or "densewireframe" **kwargs: Additional args (ignored for compatibility) @@ -644,14 +630,17 @@ def to_rerun( # type: ignore[no-untyped-def] Returns: rr.Points3D or rr.Boxes3D archetype for logging to Rerun """ + import rerun as rr + points, _ = self.as_numpy() if len(points) == 0: - return rr.Points3D([]) if mode == "points" else rr.Boxes3D(centers=[]) + return rr.Points3D([]) if mode != "boxes" else rr.Boxes3D(centers=[]) + if colors is None and colormap is None: + colormap = "turbo" # Default colormap if no colors provided # Determine colors point_colors = None if colormap is not None: - # Color by height (z-coordinate) z = points[:, 2] z_norm = (z - z.min()) / (z.max() - z.min() + 1e-8) cmap = _get_matplotlib_cmap(colormap) @@ -659,10 +648,19 @@ def to_rerun( # type: ignore[no-untyped-def] elif colors is not None: point_colors = colors - if mode == "boxes": - # Use boxes for voxel visualization - box_size = size if size is not None else radii * 2 + if mode == "points": + return rr.Points3D( + positions=points, + colors=point_colors, + ) + elif mode == "boxes": + box_size = size if size is not None else voxel_size half = box_size / 2 + # Snap points to voxel grid centers so boxes tile properly + points = np.floor(points / box_size) * box_size + half + points, unique_idx = np.unique(points, axis=0, return_index=True) + if point_colors is not None and isinstance(point_colors, np.ndarray): + point_colors = point_colors[unique_idx] return rr.Boxes3D( centers=points, half_sizes=[half, half, half], @@ -672,7 +670,7 @@ def to_rerun( # type: ignore[no-untyped-def] else: return rr.Points3D( positions=points, - radii=radii, + radii=voxel_size / 2, colors=point_colors, ) @@ -751,204 +749,3 @@ def filter_by_height( def __repr__(self) -> str: """String representation.""" return f"PointCloud(points={len(self)}, frame_id='{self.frame_id}', ts={self.ts})" - - @classmethod - def from_ros_msg(cls, ros_msg: ROSPointCloud2) -> PointCloud2: - """Convert from ROS sensor_msgs/PointCloud2 message. - - Args: - ros_msg: ROS PointCloud2 message - - Returns: - PointCloud2 instance - """ - if not ROS_AVAILABLE: - raise ImportError("ROS packages not available. Cannot convert from ROS message.") - - # Handle empty point cloud - if ros_msg.width == 0 or ros_msg.height == 0: - pc = o3d.geometry.PointCloud() - return cls( - pointcloud=pc, - frame_id=ros_msg.header.frame_id, - ts=ros_msg.header.stamp.sec + ros_msg.header.stamp.nanosec / 1e9, - ) - - # Parse field information to find X, Y, Z offsets - x_offset = y_offset = z_offset = None - for field in ros_msg.fields: - if field.name == "x": - x_offset = field.offset - elif field.name == "y": - y_offset = field.offset - elif field.name == "z": - z_offset = field.offset - - if any(offset is None for offset in [x_offset, y_offset, z_offset]): - raise ValueError("PointCloud2 message missing X, Y, or Z fields") - - # Extract points from binary data using numpy for bulk conversion - num_points = ros_msg.width * ros_msg.height - data = ros_msg.data - point_step = ros_msg.point_step - - # Determine byte order - byte_order = ">" if ros_msg.is_bigendian else "<" - - # Check if we can use fast numpy path (common case: sequential float32 x,y,z) - if ( - x_offset == 0 - and y_offset == 4 - and z_offset == 8 - and point_step >= 12 - and not ros_msg.is_bigendian - ): - # Fast path: direct numpy reshape for tightly packed float32 x,y,z - # This is the most common case for point clouds - if point_step == 12: - # Perfectly packed x,y,z with no padding - points = np.frombuffer(data, dtype=np.float32).reshape(-1, 3) - else: - # Has additional fields after x,y,z, need to extract with stride - dt = np.dtype( - [("x", " 0: # type: ignore[operator] - dt_fields.append(("_pad_x", f"V{x_offset}")) - dt_fields.append(("x", f"{byte_order}f4")) - - # Add padding between x and y if needed - gap_xy = y_offset - x_offset - 4 # type: ignore[operator] - if gap_xy > 0: - dt_fields.append(("_pad_xy", f"V{gap_xy}")) - dt_fields.append(("y", f"{byte_order}f4")) - - # Add padding between y and z if needed - gap_yz = z_offset - y_offset - 4 # type: ignore[operator] - if gap_yz > 0: - dt_fields.append(("_pad_yz", f"V{gap_yz}")) - dt_fields.append(("z", f"{byte_order}f4")) - - # Add padding at the end to match point_step - remaining = point_step - z_offset - 4 - if remaining > 0: - dt_fields.append(("_pad_end", f"V{remaining}")) - - dt = np.dtype(dt_fields) - structured = np.frombuffer(data, dtype=dt, count=num_points) - points = np.column_stack((structured["x"], structured["y"], structured["z"])) - - # Filter out NaN and Inf values if not dense - if not ros_msg.is_dense: - mask = np.isfinite(points).all(axis=1) - points = points[mask] # type: ignore[assignment] - - # Create Open3D point cloud - pc = o3d.geometry.PointCloud() - pc.points = o3d.utility.Vector3dVector(points) - - # Extract timestamp - ts = ros_msg.header.stamp.sec + ros_msg.header.stamp.nanosec / 1e9 - - return cls( - pointcloud=pc, - frame_id=ros_msg.header.frame_id, - ts=ts, - ) - - def to_ros_msg(self) -> ROSPointCloud2: - """Convert to ROS sensor_msgs/PointCloud2 message. - - Includes RGB color data if the pointcloud has colors. - - Returns: - ROS PointCloud2 message - """ - if not ROS_AVAILABLE: - raise ImportError("ROS packages not available. Cannot convert to ROS message.") - - ros_msg = ROSPointCloud2() # type: ignore[no-untyped-call] - - # Set header - ros_msg.header = ROSHeader() # type: ignore[no-untyped-call] - ros_msg.header.frame_id = self.frame_id - ros_msg.header.stamp.sec = int(self.ts) - ros_msg.header.stamp.nanosec = int((self.ts - int(self.ts)) * 1e9) - - points, _ = self.as_numpy() - - if len(points) == 0: - # Empty point cloud - ros_msg.height = 0 - ros_msg.width = 0 - ros_msg.fields = [] - ros_msg.is_bigendian = False - ros_msg.point_step = 0 - ros_msg.row_step = 0 - ros_msg.data = b"" - ros_msg.is_dense = True - return ros_msg - - # Set dimensions - ros_msg.height = 1 # Unorganized point cloud - ros_msg.width = len(points) - - # Check if pointcloud has colors - has_colors = self.pointcloud.has_colors() - - if has_colors: - # Include RGB field - pack as XYZRGB - ros_msg.fields = [ - ROSPointField(name="x", offset=0, datatype=ROSPointField.FLOAT32, count=1), # type: ignore[no-untyped-call] - ROSPointField(name="y", offset=4, datatype=ROSPointField.FLOAT32, count=1), # type: ignore[no-untyped-call] - ROSPointField(name="z", offset=8, datatype=ROSPointField.FLOAT32, count=1), # type: ignore[no-untyped-call] - ROSPointField(name="rgb", offset=12, datatype=ROSPointField.UINT32, count=1), # type: ignore[no-untyped-call] - ] - ros_msg.point_step = 16 # 3 floats + 1 uint32 - - # Get colors and convert to packed RGB uint32 - colors = np.asarray(self.pointcloud.colors) # (N, 3) in [0, 1] - colors_uint8 = (colors * 255).astype(np.uint8) - rgb_packed = ( - (colors_uint8[:, 0].astype(np.uint32) << 16) - | (colors_uint8[:, 1].astype(np.uint32) << 8) - | colors_uint8[:, 2].astype(np.uint32) - ) - - # Create structured array with x, y, z, rgb - cloud_data = np.zeros( - len(points), - dtype=[("x", np.float32), ("y", np.float32), ("z", np.float32), ("rgb", np.uint32)], - ) - cloud_data["x"] = points[:, 0] - cloud_data["y"] = points[:, 1] - cloud_data["z"] = points[:, 2] - cloud_data["rgb"] = rgb_packed - - ros_msg.data = cloud_data.tobytes() - else: - # No colors - just XYZ - ros_msg.fields = [ - ROSPointField(name="x", offset=0, datatype=ROSPointField.FLOAT32, count=1), # type: ignore[no-untyped-call] - ROSPointField(name="y", offset=4, datatype=ROSPointField.FLOAT32, count=1), # type: ignore[no-untyped-call] - ROSPointField(name="z", offset=8, datatype=ROSPointField.FLOAT32, count=1), # type: ignore[no-untyped-call] - ] - ros_msg.point_step = 12 # 3 floats * 4 bytes each - - ros_msg.data = points.astype(np.float32).tobytes() - - ros_msg.row_step = ros_msg.point_step * ros_msg.width - - # Set properties - ros_msg.is_bigendian = False # Little endian - ros_msg.is_dense = True # No invalid points - - return ros_msg diff --git a/dimos/msgs/sensor_msgs/__init__.py b/dimos/msgs/sensor_msgs/__init__.py index b58dda8db5..7fec2d2793 100644 --- a/dimos/msgs/sensor_msgs/__init__.py +++ b/dimos/msgs/sensor_msgs/__init__.py @@ -1,5 +1,6 @@ from dimos.msgs.sensor_msgs.CameraInfo import CameraInfo from dimos.msgs.sensor_msgs.Image import Image, ImageFormat +from dimos.msgs.sensor_msgs.Imu import Imu from dimos.msgs.sensor_msgs.JointCommand import JointCommand from dimos.msgs.sensor_msgs.JointState import JointState from dimos.msgs.sensor_msgs.Joy import Joy @@ -10,6 +11,7 @@ "CameraInfo", "Image", "ImageFormat", + "Imu", "JointCommand", "JointState", "Joy", diff --git a/dimos/msgs/sensor_msgs/image_impls/AbstractImage.py b/dimos/msgs/sensor_msgs/image_impls/AbstractImage.py index b71c5476fc..80a1bb7cec 100644 --- a/dimos/msgs/sensor_msgs/image_impls/AbstractImage.py +++ b/dimos/msgs/sensor_msgs/image_impls/AbstractImage.py @@ -1,4 +1,4 @@ -# Copyright 2025-2026 Dimensional Inc. +# Copyright 2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,262 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import annotations +# Backwards compatibility stub for unpickling old data. +# AbstractImage and ImageFormat were moved to Image. +from dimos.msgs.sensor_msgs.Image import Image as AbstractImage, ImageFormat -from abc import ABC, abstractmethod -import base64 -from enum import Enum -import os -from typing import Any - -import cv2 -import numpy as np -import rerun as rr - -try: - import cupy as cp # type: ignore[import-not-found] - - HAS_CUDA = True -except Exception: # pragma: no cover - optional dependency - cp = None - HAS_CUDA = False - -# NVRTC defaults to C++11; libcu++ in recent CUDA requires at least C++17. -if HAS_CUDA: - try: - import cupy.cuda.compiler as _cupy_compiler # type: ignore[import-not-found] - - if not getattr(_cupy_compiler, "_dimos_force_cxx17", False): - _orig_compile_using_nvrtc = _cupy_compiler.compile_using_nvrtc - - def _compile_using_nvrtc( # type: ignore[no-untyped-def] - source, options=(), *args, **kwargs - ): - filtered = tuple( - opt - for opt in options - if opt not in ("-std=c++11", "--std=c++11", "-std=c++14", "--std=c++14") - ) - if "--std=c++17" not in filtered and "-std=c++17" not in filtered: - filtered = (*filtered, "--std=c++17") - return _orig_compile_using_nvrtc(source, filtered, *args, **kwargs) - - _cupy_compiler.compile_using_nvrtc = _compile_using_nvrtc - _cupy_compiler._dimos_force_cxx17 = True - except Exception: - pass - -# Optional nvImageCodec (preferred GPU codec) -USE_NVIMGCODEC = os.environ.get("USE_NVIMGCODEC", "0") == "1" -NVIMGCODEC_LAST_USED = False -try: # pragma: no cover - optional dependency - if HAS_CUDA and USE_NVIMGCODEC: - from nvidia import nvimgcodec # type: ignore[import-untyped] - - try: - _enc_probe = nvimgcodec.Encoder() - HAS_NVIMGCODEC = True - except Exception: - nvimgcodec = None - HAS_NVIMGCODEC = False - else: - nvimgcodec = None - HAS_NVIMGCODEC = False -except Exception: # pragma: no cover - optional dependency - nvimgcodec = None - HAS_NVIMGCODEC = False - - -class ImageFormat(Enum): - BGR = "BGR" - RGB = "RGB" - RGBA = "RGBA" - BGRA = "BGRA" - GRAY = "GRAY" - GRAY16 = "GRAY16" - DEPTH = "DEPTH" - DEPTH16 = "DEPTH16" - - -def _is_cu(x) -> bool: # type: ignore[no-untyped-def] - return HAS_CUDA and cp is not None and isinstance(x, cp.ndarray) - - -def _ascontig(x): # type: ignore[no-untyped-def] - if _is_cu(x): - return x if x.flags["C_CONTIGUOUS"] else cp.ascontiguousarray(x) - return x if x.flags["C_CONTIGUOUS"] else np.ascontiguousarray(x) - - -def _to_cpu(x): # type: ignore[no-untyped-def] - return cp.asnumpy(x) if _is_cu(x) else x - - -def _to_cu(x): # type: ignore[no-untyped-def] - if HAS_CUDA and cp is not None and isinstance(x, np.ndarray): - return cp.asarray(x) - return x - - -def _encode_nvimgcodec_cuda(bgr_cu, quality: int = 80) -> bytes: # type: ignore[no-untyped-def] # pragma: no cover - optional - if not HAS_NVIMGCODEC or nvimgcodec is None: - raise RuntimeError("nvimgcodec not available") - if bgr_cu.ndim != 3 or bgr_cu.shape[2] != 3: - raise RuntimeError("nvimgcodec expects HxWx3 image") - if bgr_cu.dtype != cp.uint8: - raise RuntimeError("nvimgcodec requires uint8 input") - if not bgr_cu.flags["C_CONTIGUOUS"]: - bgr_cu = cp.ascontiguousarray(bgr_cu) - encoder = nvimgcodec.Encoder() - try: - img = nvimgcodec.Image(bgr_cu, nvimgcodec.PixelFormat.BGR) - except Exception: - img = nvimgcodec.Image(cp.asnumpy(bgr_cu), nvimgcodec.PixelFormat.BGR) - if hasattr(nvimgcodec, "EncodeParams"): - params = nvimgcodec.EncodeParams(quality=quality) - bitstreams = encoder.encode([img], [params]) - else: - bitstreams = encoder.encode([img]) - bs0 = bitstreams[0] - if hasattr(bs0, "buf"): - return bytes(bs0.buf) - return bytes(bs0) - - -def format_to_rerun(data, fmt: ImageFormat): # type: ignore[no-untyped-def] - """Convert image data to Rerun archetype based on format. - - Args: - data: Image data (numpy array or cupy array on CPU) - fmt: ImageFormat enum value - - Returns: - Rerun archetype (rr.Image or rr.DepthImage) - """ - match fmt: - case ImageFormat.RGB: - return rr.Image(data, color_model="RGB") - case ImageFormat.RGBA: - return rr.Image(data, color_model="RGBA") - case ImageFormat.BGR: - return rr.Image(data, color_model="BGR") - case ImageFormat.BGRA: - return rr.Image(data, color_model="BGRA") - case ImageFormat.GRAY: - return rr.Image(data, color_model="L") - case ImageFormat.GRAY16: - return rr.Image(data, color_model="L") - case ImageFormat.DEPTH: - return rr.DepthImage(data) - case ImageFormat.DEPTH16: - return rr.DepthImage(data) - case _: - raise ValueError(f"Unsupported format for Rerun: {fmt}") - - -class AbstractImage(ABC): - data: Any - format: ImageFormat - frame_id: str - ts: float - - @property - @abstractmethod - def is_cuda(self) -> bool: # pragma: no cover - abstract - ... - - @property - def height(self) -> int: - return int(self.data.shape[0]) - - @property - def width(self) -> int: - return int(self.data.shape[1]) - - @property - def channels(self) -> int: - if getattr(self.data, "ndim", 0) == 2: - return 1 - if getattr(self.data, "ndim", 0) == 3: - return int(self.data.shape[2]) - raise ValueError("Invalid image dimensions") - - @property - def shape(self): # type: ignore[no-untyped-def] - return tuple(self.data.shape) - - @property - def dtype(self): # type: ignore[no-untyped-def] - return self.data.dtype - - @abstractmethod - def to_opencv(self) -> np.ndarray: # type: ignore[type-arg] # pragma: no cover - abstract - ... - - @abstractmethod - def to_rgb(self) -> AbstractImage: # pragma: no cover - abstract - ... - - @abstractmethod - def to_bgr(self) -> AbstractImage: # pragma: no cover - abstract - ... - - @abstractmethod - def to_grayscale(self) -> AbstractImage: # pragma: no cover - abstract - ... - - @abstractmethod - def resize( - self, width: int, height: int, interpolation: int = cv2.INTER_LINEAR - ) -> AbstractImage: # pragma: no cover - abstract - ... - - @abstractmethod - def to_rerun(self) -> Any: # pragma: no cover - abstract - ... - - @abstractmethod - def sharpness(self) -> float: # pragma: no cover - abstract - ... - - def copy(self) -> AbstractImage: - return self.__class__( # type: ignore[call-arg] - data=self.data.copy(), format=self.format, frame_id=self.frame_id, ts=self.ts - ) - - def save(self, filepath: str) -> bool: - global NVIMGCODEC_LAST_USED - if self.is_cuda and HAS_NVIMGCODEC and nvimgcodec is not None: - try: - bgr = self.to_bgr() - if _is_cu(bgr.data): - jpeg = _encode_nvimgcodec_cuda(bgr.data) - NVIMGCODEC_LAST_USED = True - with open(filepath, "wb") as f: - f.write(jpeg) - return True - except Exception: - NVIMGCODEC_LAST_USED = False - arr = self.to_opencv() - return cv2.imwrite(filepath, arr) - - def to_base64(self, quality: int = 80) -> str: - global NVIMGCODEC_LAST_USED - if self.is_cuda and HAS_NVIMGCODEC and nvimgcodec is not None: - try: - bgr = self.to_bgr() - if _is_cu(bgr.data): - jpeg = _encode_nvimgcodec_cuda(bgr.data, quality=quality) - NVIMGCODEC_LAST_USED = True - return base64.b64encode(jpeg).decode("utf-8") - except Exception: - NVIMGCODEC_LAST_USED = False - bgr = self.to_bgr() - success, buffer = cv2.imencode( - ".jpg", - _to_cpu(bgr.data), # type: ignore[no-untyped-call] - [int(cv2.IMWRITE_JPEG_QUALITY), int(quality)], - ) - if not success: - raise ValueError("Failed to encode image as JPEG") - return base64.b64encode(buffer.tobytes()).decode("utf-8") +__all__ = ["AbstractImage", "ImageFormat"] diff --git a/dimos/msgs/sensor_msgs/image_impls/CudaImage.py b/dimos/msgs/sensor_msgs/image_impls/CudaImage.py deleted file mode 100644 index cdfa1bf088..0000000000 --- a/dimos/msgs/sensor_msgs/image_impls/CudaImage.py +++ /dev/null @@ -1,960 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -from dataclasses import dataclass, field -import time - -import cv2 -import numpy as np - -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ( - HAS_CUDA, - AbstractImage, - ImageFormat, - _ascontig, - _is_cu, - _to_cpu, -) - -try: - import cupy as cp # type: ignore[import-not-found] - from cupyx.scipy import ( # type: ignore[import-not-found] - ndimage as cndimage, - signal as csignal, - ) -except Exception: # pragma: no cover - cp = None - cndimage = None - csignal = None - - -_CUDA_SRC = r""" -extern "C" { - -__device__ __forceinline__ void rodrigues_R(const float r[3], float R[9]){ - float theta = sqrtf(r[0]*r[0] + r[1]*r[1] + r[2]*r[2]); - if(theta < 1e-8f){ - R[0]=1.f; R[1]=0.f; R[2]=0.f; - R[3]=0.f; R[4]=1.f; R[5]=0.f; - R[6]=0.f; R[7]=0.f; R[8]=1.f; - return; - } - float kx=r[0]/theta, ky=r[1]/theta, kz=r[2]/theta; - float c=cosf(theta), s=sinf(theta), v=1.f-c; - R[0]=kx*kx*v + c; R[1]=kx*ky*v - kz*s; R[2]=kx*kz*v + ky*s; - R[3]=ky*kx*v + kz*s; R[4]=ky*ky*v + c; R[5]=ky*kz*v - kx*s; - R[6]=kz*kx*v - ky*s; R[7]=kz*ky*v + kx*s; R[8]=kz*kz*v + c; -} - -__device__ __forceinline__ void mat3x3_vec3(const float R[9], const float x[3], float y[3]){ - y[0] = R[0]*x[0] + R[1]*x[1] + R[2]*x[2]; - y[1] = R[3]*x[0] + R[4]*x[1] + R[5]*x[2]; - y[2] = R[6]*x[0] + R[7]*x[1] + R[8]*x[2]; -} - -__device__ __forceinline__ void cross_mat(const float v[3], float S[9]){ - S[0]=0.f; S[1]=-v[2]; S[2]= v[1]; - S[3]= v[2]; S[4]=0.f; S[5]=-v[0]; - S[6]=-v[1]; S[7]= v[0]; S[8]=0.f; -} - -// Solve a 6x6 system (JTJ * x = JTr) with Gauss-Jordan; JTJ is SPD after damping. -__device__ void solve6_gauss_jordan(float A[36], float b[6], float x[6]){ - float M[6][7]; - #pragma unroll - for(int r=0;r<6;++r){ - #pragma unroll - for(int c=0;c<6;++c) M[r][c] = A[r*6 + c]; - M[r][6] = b[r]; - } - for(int piv=0;piv<6;++piv){ - float invd = 1.f / M[piv][piv]; - for(int c=piv;c<7;++c) M[piv][c] *= invd; - for(int r=0;r<6;++r){ - if(r==piv) continue; - float f = M[r][piv]; - if(fabsf(f) < 1e-20f) continue; - for(int c=piv;c<7;++c) M[r][c] -= f * M[piv][c]; - } - } - #pragma unroll - for(int r=0;r<6;++r) x[r] = M[r][6]; -} - -// One block solves one pose; dynamic shared memory holds per-thread accumulators. -__global__ void pnp_gn_batch( - const float* __restrict__ obj, // (B,N,3) - const float* __restrict__ img, // (B,N,2) - const int N, - const float* __restrict__ intr, // (B,4) -> fx, fy, cx, cy - const int max_iters, - const float damping, - float* __restrict__ rvec_out, // (B,3) - float* __restrict__ tvec_out // (B,3) -){ - if(N <= 0) return; - int b = blockIdx.x; - const float* obj_b = obj + b * N * 3; - const float* img_b = img + b * N * 2; - float fx = intr[4*b + 0]; - float fy = intr[4*b + 1]; - float cx = intr[4*b + 2]; - float cy = intr[4*b + 3]; - - __shared__ float s_R[9]; - __shared__ float s_rvec[3]; - __shared__ float s_tvec[3]; - __shared__ float s_JTJ[36]; - __shared__ float s_JTr[6]; - __shared__ int s_done; - - extern __shared__ float scratch[]; - float* sh_JTJ = scratch; - float* sh_JTr = scratch + 36 * blockDim.x; - - if(threadIdx.x==0){ - s_rvec[0]=0.f; s_rvec[1]=0.f; s_rvec[2]=0.f; - s_tvec[0]=0.f; s_tvec[1]=0.f; s_tvec[2]=2.f; - } - __syncthreads(); - - for(int it=0; itmatrix) for NumPy/CuPy arrays.""" - - if cp is not None and ( - isinstance(x, cp.ndarray) or getattr(x, "__cuda_array_interface__", None) is not None - ): - xp = cp - else: - xp = np - arr = xp.asarray(x, dtype=xp.float64) - - if not inverse and arr.ndim >= 2 and arr.shape[-2:] == (3, 3): - inverse = True - - if not inverse: - vec = arr - if vec.ndim >= 2 and vec.shape[-1] == 1: - vec = vec[..., 0] - if vec.shape[-1] != 3: - raise ValueError("Rodrigues expects vectors of shape (..., 3)") - orig_shape = vec.shape[:-1] - vec = vec.reshape(-1, 3) - n = vec.shape[0] - theta = xp.linalg.norm(vec, axis=1) - small = theta < 1e-12 - - def _skew(v): # type: ignore[no-untyped-def] - vx, vy, vz = v[:, 0], v[:, 1], v[:, 2] - O = xp.zeros_like(vx) - return xp.stack( - [ - xp.stack([O, -vz, vy], axis=-1), - xp.stack([vz, O, -vx], axis=-1), - xp.stack([-vy, vx, O], axis=-1), - ], - axis=-2, - ) - - K = _skew(vec) # type: ignore[no-untyped-call] - theta2 = theta * theta - theta4 = theta2 * theta2 - theta_safe = xp.where(small, 1.0, theta) - theta2_safe = xp.where(small, 1.0, theta2) - A = xp.where(small, 1.0 - theta2 / 6.0 + theta4 / 120.0, xp.sin(theta) / theta_safe)[ - :, None, None - ] - B = xp.where( - small, - 0.5 - theta2 / 24.0 + theta4 / 720.0, - (1.0 - xp.cos(theta)) / theta2_safe, - )[:, None, None] - I = xp.eye(3, dtype=arr.dtype) - I = I[None, :, :] if n == 1 else xp.broadcast_to(I, (n, 3, 3)) - KK = xp.matmul(K, K) - out = I + A * K + B * KK - return out.reshape((*orig_shape, 3, 3)) if orig_shape else out[0] - - mat = arr - if mat.shape[-2:] != (3, 3): - raise ValueError("Rodrigues expects rotation matrices of shape (..., 3, 3)") - orig_shape = mat.shape[:-2] - mat = mat.reshape(-1, 3, 3) - trace = xp.trace(mat, axis1=1, axis2=2) - trace = xp.clip((trace - 1.0) / 2.0, -1.0, 1.0) - theta = xp.arccos(trace) - v = xp.stack( - [ - mat[:, 2, 1] - mat[:, 1, 2], - mat[:, 0, 2] - mat[:, 2, 0], - mat[:, 1, 0] - mat[:, 0, 1], - ], - axis=1, - ) - norm_v = xp.linalg.norm(v, axis=1) - small = theta < 1e-7 - eps = 1e-8 - norm_safe = xp.where(norm_v < eps, 1.0, norm_v) - r_general = theta[:, None] * v / norm_safe[:, None] - r_small = 0.5 * v - r = xp.where(small[:, None], r_small, r_general) - pi_mask = xp.abs(theta - xp.pi) < 1e-4 - if np.any(pi_mask) if xp is np else bool(cp.asnumpy(pi_mask).any()): - diag = xp.diagonal(mat, axis1=1, axis2=2) - axis_candidates = xp.clip((diag + 1.0) / 2.0, 0.0, None) - axis = xp.sqrt(axis_candidates) - signs = xp.sign(v) - axis = xp.where(signs == 0, axis, xp.copysign(axis, signs)) - axis_norm = xp.linalg.norm(axis, axis=1) - axis_norm = xp.where(axis_norm < eps, 1.0, axis_norm) - axis = axis / axis_norm[:, None] - r_pi = theta[:, None] * axis - r = xp.where(pi_mask[:, None], r_pi, r) - out = r.reshape((*orig_shape, 3)) if orig_shape else r[0] - return out - - -def _undistort_points_cuda( - img_px: cp.ndarray, K: cp.ndarray, dist: cp.ndarray, iterations: int = 8 -) -> cp.ndarray: - """Iteratively undistort pixel coordinates on device (Brown–Conrady). - - Returns pixel coordinates after undistortion (fx*xu+cx, fy*yu+cy). - """ - N = img_px.shape[0] - ones = cp.ones((N, 1), dtype=cp.float64) - uv1 = cp.concatenate([img_px.astype(cp.float64), ones], axis=1) - Kinv = cp.linalg.inv(K) - xdyd1 = uv1 @ Kinv.T - xd = xdyd1[:, 0] - yd = xdyd1[:, 1] - xu = xd.copy() - yu = yd.copy() - k1 = dist[0] - k2 = dist[1] if dist.size > 1 else 0.0 - p1 = dist[2] if dist.size > 2 else 0.0 - p2 = dist[3] if dist.size > 3 else 0.0 - k3 = dist[4] if dist.size > 4 else 0.0 - for _ in range(iterations): - r2 = xu * xu + yu * yu - r4 = r2 * r2 - r6 = r4 * r2 - radial = 1.0 + k1 * r2 + k2 * r4 + k3 * r6 - delta_x = 2.0 * p1 * xu * yu + p2 * (r2 + 2.0 * xu * xu) - delta_y = p1 * (r2 + 2.0 * yu * yu) + 2.0 * p2 * xu * yu - xu = (xd - delta_x) / radial - yu = (yd - delta_y) / radial - fx, fy, cx, cy = K[0, 0], K[1, 1], K[0, 2], K[1, 2] - return cp.stack([fx * xu + cx, fy * yu + cy], axis=1) - - -@dataclass -class CudaImage(AbstractImage): - data: any # type: ignore[valid-type] # cupy.ndarray - format: ImageFormat = field(default=ImageFormat.BGR) - frame_id: str = field(default="") - ts: float = field(default_factory=time.time) - - def __post_init__(self): # type: ignore[no-untyped-def] - if not HAS_CUDA or cp is None: - raise RuntimeError("CuPy/CUDA not available") - if not _is_cu(self.data): - # Accept NumPy arrays and move to device automatically - try: - self.data = cp.asarray(self.data) - except Exception as e: - raise ValueError("CudaImage requires a CuPy array") from e - if self.data.ndim < 2: # type: ignore[attr-defined] - raise ValueError("Image data must be at least 2D") - self.data = _ascontig(self.data) # type: ignore[no-untyped-call] - - @property - def is_cuda(self) -> bool: - return True - - def to_opencv(self) -> np.ndarray: # type: ignore[type-arg] - if self.format in (ImageFormat.BGR, ImageFormat.RGB, ImageFormat.RGBA, ImageFormat.BGRA): - return _to_cpu(self.to_bgr().data) # type: ignore[no-any-return, no-untyped-call] - return _to_cpu(self.data) # type: ignore[no-any-return, no-untyped-call] - - def to_rgb(self) -> CudaImage: - if self.format == ImageFormat.RGB: - return self.copy() # type: ignore[return-value] - if self.format == ImageFormat.BGR: - return CudaImage(_bgr_to_rgb_cuda(self.data), ImageFormat.RGB, self.frame_id, self.ts) # type: ignore[no-untyped-call] - if self.format == ImageFormat.RGBA: - return self.copy() # type: ignore[return-value] - if self.format == ImageFormat.BGRA: - return CudaImage( - _bgra_to_rgba_cuda(self.data), # type: ignore[no-untyped-call] - ImageFormat.RGBA, - self.frame_id, - self.ts, - ) - if self.format == ImageFormat.GRAY: - return CudaImage(_gray_to_rgb_cuda(self.data), ImageFormat.RGB, self.frame_id, self.ts) # type: ignore[no-untyped-call] - if self.format in (ImageFormat.GRAY16, ImageFormat.DEPTH16): - gray8 = (self.data.astype(cp.float32) / 256.0).clip(0, 255).astype(cp.uint8) # type: ignore[attr-defined] - return CudaImage(_gray_to_rgb_cuda(gray8), ImageFormat.RGB, self.frame_id, self.ts) # type: ignore[no-untyped-call] - return self.copy() # type: ignore[return-value] - - def to_bgr(self) -> CudaImage: - if self.format == ImageFormat.BGR: - return self.copy() # type: ignore[return-value] - if self.format == ImageFormat.RGB: - return CudaImage(_rgb_to_bgr_cuda(self.data), ImageFormat.BGR, self.frame_id, self.ts) # type: ignore[no-untyped-call] - if self.format == ImageFormat.RGBA: - return CudaImage( - _rgba_to_bgra_cuda(self.data)[..., :3], # type: ignore[no-untyped-call] - ImageFormat.BGR, - self.frame_id, - self.ts, - ) - if self.format == ImageFormat.BGRA: - return CudaImage(self.data[..., :3], ImageFormat.BGR, self.frame_id, self.ts) # type: ignore[index] - if self.format in (ImageFormat.GRAY, ImageFormat.DEPTH): - return CudaImage( - _rgb_to_bgr_cuda(_gray_to_rgb_cuda(self.data)), # type: ignore[no-untyped-call] - ImageFormat.BGR, - self.frame_id, - self.ts, - ) - if self.format in (ImageFormat.GRAY16, ImageFormat.DEPTH16): - gray8 = (self.data.astype(cp.float32) / 256.0).clip(0, 255).astype(cp.uint8) # type: ignore[attr-defined] - return CudaImage( - _rgb_to_bgr_cuda(_gray_to_rgb_cuda(gray8)), # type: ignore[no-untyped-call] - ImageFormat.BGR, - self.frame_id, - self.ts, - ) - return self.copy() # type: ignore[return-value] - - def to_grayscale(self) -> CudaImage: - if self.format in (ImageFormat.GRAY, ImageFormat.GRAY16, ImageFormat.DEPTH): - return self.copy() # type: ignore[return-value] - if self.format == ImageFormat.BGR: - return CudaImage( - _rgb_to_gray_cuda(_bgr_to_rgb_cuda(self.data)), # type: ignore[no-untyped-call] - ImageFormat.GRAY, - self.frame_id, - self.ts, - ) - if self.format == ImageFormat.RGB: - return CudaImage(_rgb_to_gray_cuda(self.data), ImageFormat.GRAY, self.frame_id, self.ts) # type: ignore[no-untyped-call] - if self.format in (ImageFormat.RGBA, ImageFormat.BGRA): - rgb = ( - self.data[..., :3] # type: ignore[index] - if self.format == ImageFormat.RGBA - else _bgra_to_rgba_cuda(self.data)[..., :3] # type: ignore[no-untyped-call] - ) - return CudaImage(_rgb_to_gray_cuda(rgb), ImageFormat.GRAY, self.frame_id, self.ts) # type: ignore[no-untyped-call] - raise ValueError(f"Unsupported format: {self.format}") - - def resize(self, width: int, height: int, interpolation: int = cv2.INTER_LINEAR) -> CudaImage: - return CudaImage( - _resize_bilinear_hwc_cuda(self.data, height, width), self.format, self.frame_id, self.ts - ) - - def to_rerun(self): # type: ignore[no-untyped-def] - """Convert to rerun Image format. - - Transfers data from GPU to CPU and converts to appropriate format. - - Returns: - rr.Image or rr.DepthImage archetype for logging to rerun - """ - from dimos.msgs.sensor_msgs.image_impls.AbstractImage import format_to_rerun - - # Transfer to CPU - cpu_data = cp.asnumpy(self.data) - return format_to_rerun(cpu_data, self.format) - - def crop(self, x: int, y: int, width: int, height: int) -> CudaImage: - """Crop the image to the specified region. - - Args: - x: Starting x coordinate (left edge) - y: Starting y coordinate (top edge) - width: Width of the cropped region - height: Height of the cropped region - - Returns: - A new CudaImage containing the cropped region - """ - # Get current image dimensions - img_height, img_width = self.data.shape[:2] # type: ignore[attr-defined] - - # Clamp the crop region to image bounds - x = max(0, min(x, img_width)) - y = max(0, min(y, img_height)) - x_end = min(x + width, img_width) - y_end = min(y + height, img_height) - - # Perform the crop using array slicing - if self.data.ndim == 2: # type: ignore[attr-defined] - # Grayscale image - cropped_data = self.data[y:y_end, x:x_end] # type: ignore[index] - else: - # Color image (HxWxC) - cropped_data = self.data[y:y_end, x:x_end, :] # type: ignore[index] - - # Return a new CudaImage with the cropped data - return CudaImage(cropped_data, self.format, self.frame_id, self.ts) - - def sharpness(self) -> float: - if cp is None: - return 0.0 - try: - from cupyx.scipy import ndimage as cndimage - - gray = self.to_grayscale().data.astype(cp.float32) # type: ignore[attr-defined] - deriv5 = cp.asarray([1, 2, 0, -2, -1], dtype=cp.float32) - smooth5 = cp.asarray([1, 4, 6, 4, 1], dtype=cp.float32) - gx = cndimage.convolve1d(gray, deriv5, axis=1, mode="reflect") - gx = cndimage.convolve1d(gx, smooth5, axis=0, mode="reflect") - gy = cndimage.convolve1d(gray, deriv5, axis=0, mode="reflect") - gy = cndimage.convolve1d(gy, smooth5, axis=1, mode="reflect") - magnitude = cp.hypot(gx, gy) - mean_mag = float(cp.asnumpy(magnitude.mean())) - except Exception: - raise - if mean_mag <= 0: - return 0.0 - return float(np.clip((np.log10(mean_mag + 1) - 1.7) / 2.0, 0.0, 1.0)) - - # CUDA tracker (template NCC with small scale pyramid) - @dataclass - class BBox: - x: int - y: int - w: int - h: int - - def create_csrt_tracker(self, bbox: BBox): # type: ignore[no-untyped-def] - if csignal is None: - raise RuntimeError("cupyx.scipy.signal not available for CUDA tracker") - x, y, w, h = map(int, bbox) # type: ignore[call-overload] - gray = self.to_grayscale().data.astype(cp.float32) # type: ignore[attr-defined] - tmpl = gray[y : y + h, x : x + w] - if tmpl.size == 0: - raise ValueError("Invalid bbox for CUDA tracker") - return _CudaTemplateTracker(tmpl, x0=x, y0=y) - - def csrt_update(self, tracker) -> tuple[bool, tuple[int, int, int, int]]: # type: ignore[no-untyped-def] - if not isinstance(tracker, _CudaTemplateTracker): - raise TypeError("Expected CUDA tracker instance") - gray = self.to_grayscale().data.astype(cp.float32) # type: ignore[attr-defined] - x, y, w, h = tracker.update(gray) - return True, (int(x), int(y), int(w), int(h)) - - # PnP – Gauss–Newton (no distortion in batch), iterative per-instance - def solve_pnp( - self, - object_points: np.ndarray, # type: ignore[type-arg] - image_points: np.ndarray, # type: ignore[type-arg] - camera_matrix: np.ndarray, # type: ignore[type-arg] - dist_coeffs: np.ndarray | None = None, # type: ignore[type-arg] - flags: int = cv2.SOLVEPNP_ITERATIVE, - ) -> tuple[bool, np.ndarray, np.ndarray]: # type: ignore[type-arg] - if not HAS_CUDA or cp is None or (dist_coeffs is not None and np.any(dist_coeffs)): - obj = np.asarray(object_points, dtype=np.float32).reshape(-1, 3) - img = np.asarray(image_points, dtype=np.float32).reshape(-1, 2) - K = np.asarray(camera_matrix, dtype=np.float64) - dist = None if dist_coeffs is None else np.asarray(dist_coeffs, dtype=np.float64) - ok, rvec, tvec = cv2.solvePnP(obj, img, K, dist, flags=flags) # type: ignore[arg-type] - return bool(ok), rvec.astype(np.float64), tvec.astype(np.float64) - - rvec, tvec = _solve_pnp_cuda_kernel(object_points, image_points, camera_matrix) - ok = np.isfinite(rvec).all() and np.isfinite(tvec).all() - return ok, rvec, tvec - - def solve_pnp_batch( - self, - object_points_batch: np.ndarray, # type: ignore[type-arg] - image_points_batch: np.ndarray, # type: ignore[type-arg] - camera_matrix: np.ndarray, # type: ignore[type-arg] - dist_coeffs: np.ndarray | None = None, # type: ignore[type-arg] - iterations: int = 15, - damping: float = 1e-6, - ) -> tuple[np.ndarray, np.ndarray]: # type: ignore[type-arg] - """Batched PnP (each block = one instance).""" - if not HAS_CUDA or cp is None or (dist_coeffs is not None and np.any(dist_coeffs)): - obj = np.asarray(object_points_batch, dtype=np.float32) - img = np.asarray(image_points_batch, dtype=np.float32) - if obj.ndim != 3 or img.ndim != 3 or obj.shape[:2] != img.shape[:2]: - raise ValueError( - "Batched object/image arrays must be shaped (B,N,...) with matching sizes" - ) - K = np.asarray(camera_matrix, dtype=np.float64) - dist = None if dist_coeffs is None else np.asarray(dist_coeffs, dtype=np.float64) - B = obj.shape[0] - r_list = np.empty((B, 3, 1), dtype=np.float64) - t_list = np.empty((B, 3, 1), dtype=np.float64) - for b in range(B): - K_b = K if K.ndim == 2 else K[b] - dist_b = None - if dist is not None: - if dist.ndim == 1: - dist_b = dist - elif dist.ndim == 2: - dist_b = dist[b] - else: - raise ValueError("dist_coeffs must be 1D or batched 2D") - ok, rvec, tvec = cv2.solvePnP( - obj[b], - img[b], - K_b, - dist_b, # type: ignore[arg-type] - flags=cv2.SOLVEPNP_ITERATIVE, - ) - if not ok: - raise RuntimeError(f"cv2.solvePnP failed for batch index {b}") - r_list[b] = rvec.astype(np.float64) - t_list[b] = tvec.astype(np.float64) - return r_list, t_list - - return _solve_pnp_cuda_kernel( # type: ignore[no-any-return] - object_points_batch, - image_points_batch, - camera_matrix, - iterations=iterations, - damping=damping, - ) - - def solve_pnp_ransac( - self, - object_points: np.ndarray, # type: ignore[type-arg] - image_points: np.ndarray, # type: ignore[type-arg] - camera_matrix: np.ndarray, # type: ignore[type-arg] - dist_coeffs: np.ndarray | None = None, # type: ignore[type-arg] - iterations_count: int = 100, - reprojection_error: float = 3.0, - confidence: float = 0.99, - min_sample: int = 6, - ) -> tuple[bool, np.ndarray, np.ndarray, np.ndarray]: # type: ignore[type-arg] - """RANSAC with CUDA PnP solver.""" - if not HAS_CUDA or cp is None or (dist_coeffs is not None and np.any(dist_coeffs)): - obj = np.asarray(object_points, dtype=np.float32) - img = np.asarray(image_points, dtype=np.float32) - K = np.asarray(camera_matrix, dtype=np.float64) - dist = None if dist_coeffs is None else np.asarray(dist_coeffs, dtype=np.float64) - ok, rvec, tvec, mask = cv2.solvePnPRansac( - obj, - img, - K, - dist, # type: ignore[arg-type] - iterationsCount=int(iterations_count), - reprojectionError=float(reprojection_error), - confidence=float(confidence), - flags=cv2.SOLVEPNP_ITERATIVE, - ) - mask_flat = np.zeros((obj.shape[0],), dtype=np.uint8) - if mask is not None and len(mask) > 0: - mask_flat[mask.flatten()] = 1 - return bool(ok), rvec.astype(np.float64), tvec.astype(np.float64), mask_flat - - obj = cp.asarray(object_points, dtype=cp.float32) - img = cp.asarray(image_points, dtype=cp.float32) - camera_matrix_np = np.asarray(_to_cpu(camera_matrix), dtype=np.float32) # type: ignore[no-untyped-call] - fx = float(camera_matrix_np[0, 0]) - fy = float(camera_matrix_np[1, 1]) - cx = float(camera_matrix_np[0, 2]) - cy = float(camera_matrix_np[1, 2]) - N = obj.shape[0] - rng = cp.random.RandomState(1234) - best_inliers = -1 - _best_r, _best_t, best_mask = None, None, None - - for _ in range(iterations_count): - idx = rng.choice(N, size=min_sample, replace=False) - rvec, tvec = _solve_pnp_cuda_kernel(obj[idx], img[idx], camera_matrix_np) - R = _rodrigues(cp.asarray(rvec.flatten())) - Xc = obj @ R.T + cp.asarray(tvec.flatten()) - invZ = 1.0 / cp.clip(Xc[:, 2], 1e-6, None) - u_hat = fx * Xc[:, 0] * invZ + cx - v_hat = fy * Xc[:, 1] * invZ + cy - err = cp.sqrt((img[:, 0] - u_hat) ** 2 + (img[:, 1] - v_hat) ** 2) - mask = (err < reprojection_error).astype(cp.uint8) - inliers = int(mask.sum()) - if inliers > best_inliers: - best_inliers, _best_r, _best_t, best_mask = inliers, rvec, tvec, mask - if inliers >= int(confidence * N): - break - - if best_inliers <= 0: - return False, np.zeros((3, 1)), np.zeros((3, 1)), np.zeros((N,), dtype=np.uint8) - in_idx = cp.nonzero(best_mask)[0] - rvec, tvec = _solve_pnp_cuda_kernel(obj[in_idx], img[in_idx], camera_matrix_np) - return True, rvec, tvec, cp.asnumpy(best_mask) - - -class _CudaTemplateTracker: - def __init__( - self, - tmpl: cp.ndarray, - scale_step: float = 1.05, - lr: float = 0.1, - search_radius: int = 16, - x0: int = 0, - y0: int = 0, - ) -> None: - self.tmpl = tmpl.astype(cp.float32) - self.h, self.w = int(tmpl.shape[0]), int(tmpl.shape[1]) - self.scale_step = float(scale_step) - self.lr = float(lr) - self.search_radius = int(search_radius) - # Cosine window - wy = cp.hanning(self.h).astype(cp.float32) - wx = cp.hanning(self.w).astype(cp.float32) - self.window = wy[:, None] * wx[None, :] - self.tmpl = self.tmpl * self.window - self.y = int(y0) - self.x = int(x0) - - def update(self, gray: cp.ndarray): # type: ignore[no-untyped-def] - H, W = int(gray.shape[0]), int(gray.shape[1]) - r = self.search_radius - x0 = max(0, self.x - r) - y0 = max(0, self.y - r) - x1 = min(W, self.x + self.w + r) - y1 = min(H, self.y + self.h + r) - search = gray[y0:y1, x0:x1] - if search.shape[0] < self.h or search.shape[1] < self.w: - search = gray - x0 = y0 = 0 - best = (self.x, self.y, self.w, self.h) - best_score = -1e9 - for s in (1.0 / self.scale_step, 1.0, self.scale_step): - th = max(1, round(self.h * s)) - tw = max(1, round(self.w * s)) - tmpl_s = _resize_bilinear_hwc_cuda(self.tmpl, th, tw) - if tmpl_s.ndim == 3: - tmpl_s = tmpl_s[..., 0] - tmpl_s = tmpl_s.astype(cp.float32) - tmpl_zm = tmpl_s - tmpl_s.mean() - tmpl_energy = cp.sqrt(cp.sum(tmpl_zm * tmpl_zm)) + 1e-6 - # NCC via correlate2d and local std - ones = cp.ones((th, tw), dtype=cp.float32) - num = csignal.correlate2d(search, tmpl_zm, mode="valid") - sumS = csignal.correlate2d(search, ones, mode="valid") - sumS2 = csignal.correlate2d(search * search, ones, mode="valid") - n = float(th * tw) - meanS = sumS / n - varS = cp.clip(sumS2 - n * meanS * meanS, 0.0, None) - stdS = cp.sqrt(varS) + 1e-6 - res = num / (stdS * tmpl_energy) - ij = cp.unravel_index(cp.argmax(res), res.shape) - dy, dx = int(ij[0].get()), int(ij[1].get()) - score = float(res[ij].get()) - if score > best_score: - best_score = score - best = (x0 + dx, y0 + dy, tw, th) - x, y, w, h = best - patch = gray[y : y + h, x : x + w] - if patch.shape[0] != self.h or patch.shape[1] != self.w: - patch = _resize_bilinear_hwc_cuda(patch, self.h, self.w) - if patch.ndim == 3: - patch = patch[..., 0] - patch = patch.astype(cp.float32) * self.window - self.tmpl = (1.0 - self.lr) * self.tmpl + self.lr * patch - self.x, self.y, self.w, self.h = x, y, w, h - return x, y, w, h diff --git a/dimos/msgs/sensor_msgs/image_impls/NumpyImage.py b/dimos/msgs/sensor_msgs/image_impls/NumpyImage.py deleted file mode 100644 index 250b951371..0000000000 --- a/dimos/msgs/sensor_msgs/image_impls/NumpyImage.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -from dataclasses import dataclass, field -import time - -import cv2 -import numpy as np - -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ( - AbstractImage, - ImageFormat, -) - - -@dataclass -class NumpyImage(AbstractImage): - data: np.ndarray # type: ignore[type-arg] - format: ImageFormat = field(default=ImageFormat.BGR) - frame_id: str = field(default="") - ts: float = field(default_factory=time.time) - - def __post_init__(self): # type: ignore[no-untyped-def] - if not isinstance(self.data, np.ndarray) or self.data.ndim < 2: - raise ValueError("NumpyImage requires a 2D/3D NumPy array") - - @property - def is_cuda(self) -> bool: - return False - - def to_opencv(self) -> np.ndarray: # type: ignore[type-arg] - arr = self.data - if self.format == ImageFormat.BGR: - return arr - if self.format == ImageFormat.RGB: - return cv2.cvtColor(arr, cv2.COLOR_RGB2BGR) - if self.format == ImageFormat.RGBA: - return cv2.cvtColor(arr, cv2.COLOR_RGBA2BGR) - if self.format == ImageFormat.BGRA: - return cv2.cvtColor(arr, cv2.COLOR_BGRA2BGR) - if self.format in ( - ImageFormat.GRAY, - ImageFormat.GRAY16, - ImageFormat.DEPTH, - ImageFormat.DEPTH16, - ): - return arr - raise ValueError(f"Unsupported format: {self.format}") - - def to_rgb(self) -> NumpyImage: - if self.format == ImageFormat.RGB: - return self.copy() # type: ignore[return-value] - arr = self.data - if self.format == ImageFormat.BGR: - return NumpyImage( - cv2.cvtColor(arr, cv2.COLOR_BGR2RGB), ImageFormat.RGB, self.frame_id, self.ts - ) - if self.format == ImageFormat.RGBA: - return self.copy() # type: ignore[return-value] # RGBA contains RGB + alpha - if self.format == ImageFormat.BGRA: - rgba = cv2.cvtColor(arr, cv2.COLOR_BGRA2RGBA) - return NumpyImage(rgba, ImageFormat.RGBA, self.frame_id, self.ts) - if self.format in (ImageFormat.GRAY, ImageFormat.GRAY16, ImageFormat.DEPTH16): - gray8 = (arr / 256).astype(np.uint8) if self.format != ImageFormat.GRAY else arr - rgb = cv2.cvtColor(gray8, cv2.COLOR_GRAY2RGB) - return NumpyImage(rgb, ImageFormat.RGB, self.frame_id, self.ts) - return self.copy() # type: ignore[return-value] - - def to_bgr(self) -> NumpyImage: - if self.format == ImageFormat.BGR: - return self.copy() # type: ignore[return-value] - arr = self.data - if self.format == ImageFormat.RGB: - return NumpyImage( - cv2.cvtColor(arr, cv2.COLOR_RGB2BGR), ImageFormat.BGR, self.frame_id, self.ts - ) - if self.format == ImageFormat.RGBA: - return NumpyImage( - cv2.cvtColor(arr, cv2.COLOR_RGBA2BGR), ImageFormat.BGR, self.frame_id, self.ts - ) - if self.format == ImageFormat.BGRA: - return NumpyImage( - cv2.cvtColor(arr, cv2.COLOR_BGRA2BGR), ImageFormat.BGR, self.frame_id, self.ts - ) - if self.format in (ImageFormat.GRAY, ImageFormat.GRAY16, ImageFormat.DEPTH16): - gray8 = (arr / 256).astype(np.uint8) if self.format != ImageFormat.GRAY else arr - return NumpyImage( - cv2.cvtColor(gray8, cv2.COLOR_GRAY2BGR), ImageFormat.BGR, self.frame_id, self.ts - ) - return self.copy() # type: ignore[return-value] - - def to_grayscale(self) -> NumpyImage: - if self.format in (ImageFormat.GRAY, ImageFormat.GRAY16, ImageFormat.DEPTH): - return self.copy() # type: ignore[return-value] - if self.format == ImageFormat.BGR: - return NumpyImage( - cv2.cvtColor(self.data, cv2.COLOR_BGR2GRAY), - ImageFormat.GRAY, - self.frame_id, - self.ts, - ) - if self.format == ImageFormat.RGB: - return NumpyImage( - cv2.cvtColor(self.data, cv2.COLOR_RGB2GRAY), - ImageFormat.GRAY, - self.frame_id, - self.ts, - ) - if self.format in (ImageFormat.RGBA, ImageFormat.BGRA): - code = cv2.COLOR_RGBA2GRAY if self.format == ImageFormat.RGBA else cv2.COLOR_BGRA2GRAY - return NumpyImage( - cv2.cvtColor(self.data, code), ImageFormat.GRAY, self.frame_id, self.ts - ) - raise ValueError(f"Unsupported format: {self.format}") - - def to_rerun(self): # type: ignore[no-untyped-def] - """Convert to rerun Image format.""" - from dimos.msgs.sensor_msgs.image_impls.AbstractImage import format_to_rerun - - return format_to_rerun(self.data, self.format) - - def resize(self, width: int, height: int, interpolation: int = cv2.INTER_LINEAR) -> NumpyImage: - return NumpyImage( - cv2.resize(self.data, (width, height), interpolation=interpolation), - self.format, - self.frame_id, - self.ts, - ) - - def crop(self, x: int, y: int, width: int, height: int) -> NumpyImage: - """Crop the image to the specified region. - - Args: - x: Starting x coordinate (left edge) - y: Starting y coordinate (top edge) - width: Width of the cropped region - height: Height of the cropped region - - Returns: - A new NumpyImage containing the cropped region - """ - # Get current image dimensions - img_height, img_width = self.data.shape[:2] - - # Clamp the crop region to image bounds - x = max(0, min(x, img_width)) - y = max(0, min(y, img_height)) - x_end = min(x + width, img_width) - y_end = min(y + height, img_height) - - # Perform the crop using array slicing - if self.data.ndim == 2: - # Grayscale image - cropped_data = self.data[y:y_end, x:x_end] - else: - # Color image (HxWxC) - cropped_data = self.data[y:y_end, x:x_end, :] - - # Return a new NumpyImage with the cropped data - return NumpyImage(cropped_data, self.format, self.frame_id, self.ts) - - def sharpness(self) -> float: - gray = self.to_grayscale() - sx = cv2.Sobel(gray.data, cv2.CV_32F, 1, 0, ksize=5) - sy = cv2.Sobel(gray.data, cv2.CV_32F, 0, 1, ksize=5) - magnitude = cv2.magnitude(sx, sy) - mean_mag = float(magnitude.mean()) - if mean_mag <= 0: - return 0.0 - return float(np.clip((np.log10(mean_mag + 1) - 1.7) / 2.0, 0.0, 1.0)) - - # PnP wrappers - def solve_pnp( - self, - object_points: np.ndarray, # type: ignore[type-arg] - image_points: np.ndarray, # type: ignore[type-arg] - camera_matrix: np.ndarray, # type: ignore[type-arg] - dist_coeffs: np.ndarray | None = None, # type: ignore[type-arg] - flags: int = cv2.SOLVEPNP_ITERATIVE, - ) -> tuple[bool, np.ndarray, np.ndarray]: # type: ignore[type-arg] - obj = np.asarray(object_points, dtype=np.float32).reshape(-1, 3) - img = np.asarray(image_points, dtype=np.float32).reshape(-1, 2) - K = np.asarray(camera_matrix, dtype=np.float64) - dist = None if dist_coeffs is None else np.asarray(dist_coeffs, dtype=np.float64) - ok, rvec, tvec = cv2.solvePnP(obj, img, K, dist, flags=flags) # type: ignore[arg-type] - return bool(ok), rvec.astype(np.float64), tvec.astype(np.float64) - - def create_csrt_tracker(self, bbox: tuple[int, int, int, int]): # type: ignore[no-untyped-def] - tracker = None - if hasattr(cv2, "legacy") and hasattr(cv2.legacy, "TrackerCSRT_create"): - tracker = cv2.legacy.TrackerCSRT_create() - elif hasattr(cv2, "TrackerCSRT_create"): - tracker = cv2.TrackerCSRT_create() - else: - raise RuntimeError("OpenCV CSRT tracker not available") - ok = tracker.init(self.to_bgr().to_opencv(), tuple(map(int, bbox))) - if not ok: - raise RuntimeError("Failed to initialize CSRT tracker") - return tracker - - def csrt_update(self, tracker) -> tuple[bool, tuple[int, int, int, int]]: # type: ignore[no-untyped-def] - ok, box = tracker.update(self.to_bgr().to_opencv()) - if not ok: - return False, (0, 0, 0, 0) - x, y, w, h = map(int, box) - return True, (x, y, w, h) - - def solve_pnp_ransac( - self, - object_points: np.ndarray, # type: ignore[type-arg] - image_points: np.ndarray, # type: ignore[type-arg] - camera_matrix: np.ndarray, # type: ignore[type-arg] - dist_coeffs: np.ndarray | None = None, # type: ignore[type-arg] - iterations_count: int = 100, - reprojection_error: float = 3.0, - confidence: float = 0.99, - min_sample: int = 6, - ) -> tuple[bool, np.ndarray, np.ndarray, np.ndarray]: # type: ignore[type-arg] - obj = np.asarray(object_points, dtype=np.float32).reshape(-1, 3) - img = np.asarray(image_points, dtype=np.float32).reshape(-1, 2) - K = np.asarray(camera_matrix, dtype=np.float64) - dist = None if dist_coeffs is None else np.asarray(dist_coeffs, dtype=np.float64) - ok, rvec, tvec, inliers = cv2.solvePnPRansac( - obj, - img, - K, - dist, # type: ignore[arg-type] - iterationsCount=int(iterations_count), - reprojectionError=float(reprojection_error), - confidence=float(confidence), - flags=cv2.SOLVEPNP_ITERATIVE, - ) - mask = np.zeros((obj.shape[0],), dtype=np.uint8) - if inliers is not None and len(inliers) > 0: - mask[inliers.flatten()] = 1 - return bool(ok), rvec.astype(np.float64), tvec.astype(np.float64), mask diff --git a/dimos/msgs/sensor_msgs/image_impls/test_image_backend_utils.py b/dimos/msgs/sensor_msgs/image_impls/test_image_backend_utils.py deleted file mode 100644 index 9ddc15fe85..0000000000 --- a/dimos/msgs/sensor_msgs/image_impls/test_image_backend_utils.py +++ /dev/null @@ -1,287 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import numpy as np -import pytest - -from dimos.msgs.sensor_msgs import Image, ImageFormat - -try: - HAS_CUDA = True - print("Running image backend utils tests with CUDA/CuPy support (GPU mode)") -except: - HAS_CUDA = False - print("Running image backend utils tests in CPU-only mode") - -from dimos.perception.common.utils import ( - colorize_depth, - draw_bounding_box, - draw_object_detection_visualization, - draw_segmentation_mask, - project_2d_points_to_3d, - project_3d_points_to_2d, - rectify_image, -) - - -def _has_cupy() -> bool: - try: - import cupy as cp - - try: - ndev = cp.cuda.runtime.getDeviceCount() - if ndev <= 0: - return False - x = cp.array([1, 2, 3]) - _ = int(x.sum().get()) - return True - except Exception: - return False - except Exception: - return False - - -@pytest.mark.parametrize( - "shape,fmt", [((64, 64, 3), ImageFormat.BGR), ((64, 64), ImageFormat.GRAY)] -) -def test_rectify_image_cpu(shape, fmt) -> None: - arr = (np.random.rand(*shape) * (255 if fmt != ImageFormat.GRAY else 65535)).astype( - np.uint8 if fmt != ImageFormat.GRAY else np.uint16 - ) - img = Image(data=arr, format=fmt, frame_id="cam", ts=123.456) - K = np.array( - [[100.0, 0, arr.shape[1] / 2], [0, 100.0, arr.shape[0] / 2], [0, 0, 1]], dtype=np.float64 - ) - D = np.zeros(5, dtype=np.float64) - out = rectify_image(img, K, D) - assert out.shape[:2] == arr.shape[:2] - assert out.format == fmt - assert out.frame_id == "cam" - assert abs(out.ts - 123.456) < 1e-9 - # With zero distortion, pixels should match - np.testing.assert_array_equal(out.data, arr) - - -@pytest.mark.skipif(not _has_cupy(), reason="CuPy/CUDA not available") -@pytest.mark.parametrize( - "shape,fmt", [((32, 32, 3), ImageFormat.BGR), ((32, 32), ImageFormat.GRAY)] -) -def test_rectify_image_gpu_parity(shape, fmt) -> None: - import cupy as cp - - arr_np = (np.random.rand(*shape) * (255 if fmt != ImageFormat.GRAY else 65535)).astype( - np.uint8 if fmt != ImageFormat.GRAY else np.uint16 - ) - arr_cu = cp.asarray(arr_np) - img = Image(data=arr_cu, format=fmt, frame_id="cam", ts=1.23) - K = np.array( - [[80.0, 0, arr_np.shape[1] / 2], [0, 80.0, arr_np.shape[0] / 2], [0, 0, 1.0]], - dtype=np.float64, - ) - D = np.zeros(5, dtype=np.float64) - out = rectify_image(img, K, D) - # Zero distortion parity and backend preservation - assert out.format == fmt - assert out.frame_id == "cam" - assert abs(out.ts - 1.23) < 1e-9 - assert out.data.__class__.__module__.startswith("cupy") - np.testing.assert_array_equal(cp.asnumpy(out.data), arr_np) - - -@pytest.mark.skipif(not _has_cupy(), reason="CuPy/CUDA not available") -def test_rectify_image_gpu_nonzero_dist_close() -> None: - import cupy as cp - - H, W = 64, 96 - # Structured pattern to make interpolation deterministic enough - x = np.linspace(0, 255, W, dtype=np.float32) - y = np.linspace(0, 255, H, dtype=np.float32) - xv, yv = np.meshgrid(x, y) - arr_np = np.stack( - [ - xv.astype(np.uint8), - yv.astype(np.uint8), - ((xv + yv) / 2).astype(np.uint8), - ], - axis=2, - ) - img_cpu = Image(data=arr_np, format=ImageFormat.BGR, frame_id="cam", ts=0.5) - img_gpu = Image(data=cp.asarray(arr_np), format=ImageFormat.BGR, frame_id="cam", ts=0.5) - - fx, fy = 120.0, 125.0 - cx, cy = W / 2.0, H / 2.0 - K = np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1.0]], dtype=np.float64) - D = np.array([0.05, -0.02, 0.001, -0.001, 0.0], dtype=np.float64) - - out_cpu = rectify_image(img_cpu, K, D) - out_gpu = rectify_image(img_gpu, K, D) - # Compare within a small tolerance - # Small numeric differences may remain due to model and casting; keep tight tolerance - np.testing.assert_allclose( - cp.asnumpy(out_gpu.data).astype(np.int16), out_cpu.data.astype(np.int16), atol=4 - ) - - -def test_project_roundtrip_cpu() -> None: - pts3d = np.array([[0.1, 0.2, 1.0], [0.0, 0.0, 2.0], [0.5, -0.3, 3.0]], dtype=np.float32) - fx, fy, cx, cy = 200.0, 220.0, 64.0, 48.0 - K = np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1.0]], dtype=np.float64) - uv = project_3d_points_to_2d(pts3d, K) - assert uv.shape == (3, 2) - Z = pts3d[:, 2] - pts3d_back = project_2d_points_to_3d(uv.astype(np.float32), Z.astype(np.float32), K) - # Allow small rounding differences due to int rounding in 2D - assert pts3d_back.shape == (3, 3) - assert np.all(pts3d_back[:, 2] > 0) - - -@pytest.mark.skipif(not _has_cupy(), reason="CuPy/CUDA not available") -def test_project_parity_gpu_cpu() -> None: - import cupy as cp - - pts3d_np = np.array([[0.1, 0.2, 1.0], [0.0, 0.0, 2.0], [0.5, -0.3, 3.0]], dtype=np.float32) - fx, fy, cx, cy = 200.0, 220.0, 64.0, 48.0 - K_np = np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1.0]], dtype=np.float64) - uv_cpu = project_3d_points_to_2d(pts3d_np, K_np) - uv_gpu = project_3d_points_to_2d(cp.asarray(pts3d_np), cp.asarray(K_np)) - np.testing.assert_array_equal(cp.asnumpy(uv_gpu), uv_cpu) - - Z_np = pts3d_np[:, 2] - pts3d_cpu = project_2d_points_to_3d(uv_cpu.astype(np.float32), Z_np.astype(np.float32), K_np) - pts3d_gpu = project_2d_points_to_3d( - cp.asarray(uv_cpu.astype(np.float32)), cp.asarray(Z_np.astype(np.float32)), cp.asarray(K_np) - ) - assert pts3d_cpu.shape == cp.asnumpy(pts3d_gpu).shape - - -@pytest.mark.skipif(not _has_cupy(), reason="CuPy/CUDA not available") -def test_project_parity_gpu_cpu_random() -> None: - import cupy as cp - - rng = np.random.RandomState(0) - N = 1000 - Z = rng.uniform(0.1, 5.0, size=(N, 1)).astype(np.float32) - XY = rng.uniform(-1.0, 1.0, size=(N, 2)).astype(np.float32) - pts3d_np = np.concatenate([XY, Z], axis=1) - - fx, fy = 300.0, 320.0 - cx, cy = 128.0, 96.0 - K_np = np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1.0]], dtype=np.float64) - - uv_cpu = project_3d_points_to_2d(pts3d_np, K_np) - uv_gpu = project_3d_points_to_2d(cp.asarray(pts3d_np), cp.asarray(K_np)) - np.testing.assert_array_equal(cp.asnumpy(uv_gpu), uv_cpu) - - # Roundtrip - Z_flat = pts3d_np[:, 2] - pts3d_cpu = project_2d_points_to_3d(uv_cpu.astype(np.float32), Z_flat.astype(np.float32), K_np) - pts3d_gpu = project_2d_points_to_3d( - cp.asarray(uv_cpu.astype(np.float32)), - cp.asarray(Z_flat.astype(np.float32)), - cp.asarray(K_np), - ) - assert pts3d_cpu.shape == cp.asnumpy(pts3d_gpu).shape - - -def test_colorize_depth_cpu() -> None: - depth = np.zeros((32, 48), dtype=np.float32) - depth[8:16, 12:24] = 1.5 - out = colorize_depth(depth, max_depth=3.0, overlay_stats=False) - assert isinstance(out, np.ndarray) - assert out.shape == (32, 48, 3) - assert out.dtype == np.uint8 - - -@pytest.mark.skipif(not _has_cupy(), reason="CuPy/CUDA not available") -def test_colorize_depth_gpu_parity() -> None: - import cupy as cp - - depth_np = np.zeros((16, 20), dtype=np.float32) - depth_np[4:8, 5:15] = 2.0 - out_cpu = colorize_depth(depth_np, max_depth=4.0, overlay_stats=False) - out_gpu = colorize_depth(cp.asarray(depth_np), max_depth=4.0, overlay_stats=False) - np.testing.assert_array_equal(cp.asnumpy(out_gpu), out_cpu) - - -def test_draw_bounding_box_cpu() -> None: - img = np.zeros((20, 30, 3), dtype=np.uint8) - out = draw_bounding_box(img, [2, 3, 10, 12], color=(255, 0, 0), thickness=1) - assert isinstance(out, np.ndarray) - assert out.shape == img.shape - assert out.dtype == img.dtype - - -@pytest.mark.skipif(not _has_cupy(), reason="CuPy/CUDA not available") -def test_draw_bounding_box_gpu_parity() -> None: - import cupy as cp - - img_np = np.zeros((20, 30, 3), dtype=np.uint8) - out_cpu = draw_bounding_box(img_np.copy(), [2, 3, 10, 12], color=(0, 255, 0), thickness=2) - img_cu = cp.asarray(img_np) - out_gpu = draw_bounding_box(img_cu, [2, 3, 10, 12], color=(0, 255, 0), thickness=2) - np.testing.assert_array_equal(cp.asnumpy(out_gpu), out_cpu) - - -def test_draw_segmentation_mask_cpu() -> None: - img = np.zeros((20, 30, 3), dtype=np.uint8) - mask = np.zeros((20, 30), dtype=np.uint8) - mask[5:10, 8:15] = 1 - out = draw_segmentation_mask(img, mask, color=(0, 200, 200), alpha=0.5) - assert out.shape == img.shape - - -@pytest.mark.skipif(not _has_cupy(), reason="CuPy/CUDA not available") -def test_draw_segmentation_mask_gpu_parity() -> None: - import cupy as cp - - img_np = np.zeros((20, 30, 3), dtype=np.uint8) - mask_np = np.zeros((20, 30), dtype=np.uint8) - mask_np[2:12, 3:20] = 1 - out_cpu = draw_segmentation_mask(img_np.copy(), mask_np, color=(100, 50, 200), alpha=0.4) - out_gpu = draw_segmentation_mask( - cp.asarray(img_np), cp.asarray(mask_np), color=(100, 50, 200), alpha=0.4 - ) - np.testing.assert_array_equal(cp.asnumpy(out_gpu), out_cpu) - - -def test_draw_object_detection_visualization_cpu() -> None: - img = np.zeros((30, 40, 3), dtype=np.uint8) - objects = [ - { - "object_id": 1, - "bbox": [5, 6, 20, 25], - "label": "box", - "confidence": 0.9, - } - ] - out = draw_object_detection_visualization(img, objects) - assert out.shape == img.shape - - -@pytest.mark.skipif(not _has_cupy(), reason="CuPy/CUDA not available") -def test_draw_object_detection_visualization_gpu_parity() -> None: - import cupy as cp - - img_np = np.zeros((30, 40, 3), dtype=np.uint8) - objects = [ - { - "object_id": 1, - "bbox": [5, 6, 20, 25], - "label": "box", - "confidence": 0.9, - } - ] - out_cpu = draw_object_detection_visualization(img_np.copy(), objects) - out_gpu = draw_object_detection_visualization(cp.asarray(img_np), objects) - np.testing.assert_array_equal(cp.asnumpy(out_gpu), out_cpu) diff --git a/dimos/msgs/sensor_msgs/image_impls/test_image_backends.py b/dimos/msgs/sensor_msgs/image_impls/test_image_backends.py deleted file mode 100644 index 7951a095b3..0000000000 --- a/dimos/msgs/sensor_msgs/image_impls/test_image_backends.py +++ /dev/null @@ -1,792 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -import cv2 -import numpy as np -import pytest - -from dimos.msgs.sensor_msgs.Image import HAS_CUDA, Image, ImageFormat -from dimos.utils.data import get_data - -IMAGE_PATH = get_data("chair-image.png") - -if HAS_CUDA: - print("Running image backend tests with CUDA/CuPy support (GPU mode)") -else: - print("Running image backend tests in CPU-only mode") - - -def _load_chair_image() -> np.ndarray: - img = cv2.imread(IMAGE_PATH, cv2.IMREAD_UNCHANGED) - if img is None: - raise FileNotFoundError(f"unable to load test image at {IMAGE_PATH}") - return img - - -_CHAIR_BGRA = _load_chair_image() - - -def _prepare_image(fmt: ImageFormat, shape=None) -> np.ndarray: - base = _CHAIR_BGRA - if fmt == ImageFormat.BGR: - arr = cv2.cvtColor(base, cv2.COLOR_BGRA2BGR) - elif fmt == ImageFormat.RGB: - arr = cv2.cvtColor(base, cv2.COLOR_BGRA2RGB) - elif fmt == ImageFormat.BGRA: - arr = base.copy() - elif fmt == ImageFormat.GRAY: - arr = cv2.cvtColor(base, cv2.COLOR_BGRA2GRAY) - else: - raise ValueError(f"unsupported image format {fmt}") - - if shape is None: - return arr.copy() - - if len(shape) == 2: - height, width = shape - orig_h, orig_w = arr.shape[:2] - interp = cv2.INTER_AREA if height <= orig_h and width <= orig_w else cv2.INTER_LINEAR - resized = cv2.resize(arr, (width, height), interpolation=interp) - return resized.copy() - - if len(shape) == 3: - height, width, channels = shape - orig_h, orig_w = arr.shape[:2] - interp = cv2.INTER_AREA if height <= orig_h and width <= orig_w else cv2.INTER_LINEAR - resized = cv2.resize(arr, (width, height), interpolation=interp) - if resized.ndim == 2: - resized = np.repeat(resized[:, :, None], channels, axis=2) - elif resized.shape[2] != channels: - if channels == 4 and resized.shape[2] == 3: - alpha = np.full((height, width, 1), 255, dtype=resized.dtype) - resized = np.concatenate([resized, alpha], axis=2) - elif channels == 3 and resized.shape[2] == 4: - resized = resized[:, :, :3] - else: - raise ValueError(f"cannot adjust image to {channels} channels") - return resized.copy() - - raise ValueError("shape must be a tuple of length 2 or 3") - - -@pytest.fixture -def alloc_timer(request): - """Helper fixture for adaptive testing with optional GPU support.""" - - def _alloc( - arr: np.ndarray, fmt: ImageFormat, *, to_cuda: bool | None = None, label: str | None = None - ): - tag = label or request.node.name - - # Always create CPU image - start = time.perf_counter() - cpu = Image.from_numpy(arr, format=fmt, to_cuda=False) - cpu_time = time.perf_counter() - start - - # Optionally create GPU image if CUDA is available - gpu = None - gpu_time = None - if to_cuda is None: - to_cuda = HAS_CUDA - - if to_cuda and HAS_CUDA: - arr_gpu = np.array(arr, copy=True) - start = time.perf_counter() - gpu = Image.from_numpy(arr_gpu, format=fmt, to_cuda=True) - gpu_time = time.perf_counter() - start - - if gpu_time is not None: - print(f"[alloc {tag}] cpu={cpu_time:.6f}s gpu={gpu_time:.6f}s") - else: - print(f"[alloc {tag}] cpu={cpu_time:.6f}s") - return cpu, gpu, cpu_time, gpu_time - - return _alloc - - -@pytest.mark.parametrize( - "shape,fmt", - [ - ((64, 64, 3), ImageFormat.BGR), - ((64, 64, 4), ImageFormat.BGRA), - ((64, 64, 3), ImageFormat.RGB), - ((64, 64), ImageFormat.GRAY), - ], -) -def test_color_conversions(shape, fmt, alloc_timer) -> None: - """Test color conversions with NumpyImage always, add CudaImage parity when available.""" - arr = _prepare_image(fmt, shape) - cpu, gpu, _, _ = alloc_timer(arr, fmt) - - # Always test CPU backend - cpu_round = cpu.to_rgb().to_bgr().to_opencv() - assert cpu_round.shape[0] == shape[0] - assert cpu_round.shape[1] == shape[1] - assert cpu_round.shape[2] == 3 # to_opencv always returns BGR (3 channels) - assert cpu_round.dtype == np.uint8 - - # Optionally test GPU parity when CUDA is available - if gpu is not None: - gpu_round = gpu.to_rgb().to_bgr().to_opencv() - assert gpu_round.shape == cpu_round.shape - assert gpu_round.dtype == cpu_round.dtype - # Exact match for uint8 color ops - assert np.array_equal(cpu_round, gpu_round) - - -def test_grayscale(alloc_timer) -> None: - """Test grayscale conversion with NumpyImage always, add CudaImage parity when available.""" - arr = _prepare_image(ImageFormat.BGR, (48, 32, 3)) - cpu, gpu, _, _ = alloc_timer(arr, ImageFormat.BGR) - - # Always test CPU backend - cpu_gray = cpu.to_grayscale().to_opencv() - assert cpu_gray.shape == (48, 32) # Grayscale has no channel dimension in OpenCV - assert cpu_gray.dtype == np.uint8 - - # Optionally test GPU parity when CUDA is available - if gpu is not None: - gpu_gray = gpu.to_grayscale().to_opencv() - assert gpu_gray.shape == cpu_gray.shape - assert gpu_gray.dtype == cpu_gray.dtype - # Allow tiny rounding differences (<=1 LSB) — visually indistinguishable - diff = np.abs(cpu_gray.astype(np.int16) - gpu_gray.astype(np.int16)) - assert diff.max() <= 1 - - -@pytest.mark.parametrize("fmt", [ImageFormat.BGR, ImageFormat.RGB, ImageFormat.BGRA]) -def test_resize(fmt, alloc_timer) -> None: - """Test resize with NumpyImage always, add CudaImage parity when available.""" - shape = (60, 80, 3) if fmt in (ImageFormat.BGR, ImageFormat.RGB) else (60, 80, 4) - arr = _prepare_image(fmt, shape) - cpu, gpu, _, _ = alloc_timer(arr, fmt) - - new_w, new_h = 37, 53 - - # Always test CPU backend - cpu_res = cpu.resize(new_w, new_h).to_opencv() - assert ( - cpu_res.shape == (53, 37, 3) if fmt != ImageFormat.BGRA else (53, 37, 3) - ) # to_opencv drops alpha - assert cpu_res.dtype == np.uint8 - - # Optionally test GPU parity when CUDA is available - if gpu is not None: - gpu_res = gpu.resize(new_w, new_h).to_opencv() - assert gpu_res.shape == cpu_res.shape - assert gpu_res.dtype == cpu_res.dtype - # Allow small tolerance due to float interpolation differences - assert np.max(np.abs(cpu_res.astype(np.int16) - gpu_res.astype(np.int16))) <= 1 - - -def test_perf_alloc(alloc_timer) -> None: - """Test allocation performance with NumpyImage always, add CudaImage when available.""" - arr = _prepare_image(ImageFormat.BGR, (480, 640, 3)) - alloc_timer(arr, ImageFormat.BGR, label="test_perf_alloc-setup") - - runs = 5 - - # Always test CPU allocation - t0 = time.perf_counter() - for _ in range(runs): - _ = Image.from_numpy(arr, format=ImageFormat.BGR, to_cuda=False) - cpu_t = (time.perf_counter() - t0) / runs - assert cpu_t > 0 - - # Optionally test GPU allocation when CUDA is available - if HAS_CUDA: - t0 = time.perf_counter() - for _ in range(runs): - _ = Image.from_numpy(arr, format=ImageFormat.BGR, to_cuda=True) - gpu_t = (time.perf_counter() - t0) / runs - print(f"alloc (avg per call) cpu={cpu_t:.6f}s gpu={gpu_t:.6f}s") - assert gpu_t > 0 - else: - print(f"alloc (avg per call) cpu={cpu_t:.6f}s") - - -def test_sharpness(alloc_timer) -> None: - """Test sharpness computation with NumpyImage always, add CudaImage parity when available.""" - arr = _prepare_image(ImageFormat.BGR, (64, 64, 3)) - cpu = alloc_timer(arr, ImageFormat.BGR)[0] - - # Always test CPU backend - s_cpu = cpu.sharpness - assert s_cpu >= 0 # Sharpness should be non-negative - assert s_cpu < 1000 # Reasonable upper bound - - -def test_to_opencv(alloc_timer) -> None: - """Test to_opencv conversion with NumpyImage always, add CudaImage parity when available.""" - # BGRA should drop alpha and produce BGR - arr = _prepare_image(ImageFormat.BGRA, (32, 32, 4)) - cpu, gpu, _, _ = alloc_timer(arr, ImageFormat.BGRA) - - # Always test CPU backend - cpu_bgr = cpu.to_opencv() - assert cpu_bgr.shape == (32, 32, 3) - assert cpu_bgr.dtype == np.uint8 - - # Optionally test GPU parity when CUDA is available - if gpu is not None: - gpu_bgr = gpu.to_opencv() - assert gpu_bgr.shape == cpu_bgr.shape - assert gpu_bgr.dtype == cpu_bgr.dtype - assert np.array_equal(cpu_bgr, gpu_bgr) - - -def test_solve_pnp(alloc_timer) -> None: - """Test solve_pnp with NumpyImage always, add CudaImage parity when available.""" - # Synthetic camera and 3D points - K = np.array([[400.0, 0.0, 32.0], [0.0, 400.0, 24.0], [0.0, 0.0, 1.0]], dtype=np.float64) - dist = None - obj = np.array( - [ - [-0.5, -0.5, 0.0], - [0.5, -0.5, 0.0], - [0.5, 0.5, 0.0], - [-0.5, 0.5, 0.0], - [0.0, 0.0, 0.5], - [0.0, 0.0, 1.0], - ], - dtype=np.float32, - ) - - rvec_true = np.zeros((3, 1), dtype=np.float64) - tvec_true = np.array([[0.0], [0.0], [2.0]], dtype=np.float64) - img_pts, _ = cv2.projectPoints(obj, rvec_true, tvec_true, K, dist) - img_pts = img_pts.reshape(-1, 2).astype(np.float32) - - # Build images using deterministic fixture content - base_bgr = _prepare_image(ImageFormat.BGR, (48, 64, 3)) - cpu, gpu, _, _ = alloc_timer(base_bgr, ImageFormat.BGR) - - # Always test CPU backend - ok_cpu, r_cpu, t_cpu = cpu.solve_pnp(obj, img_pts, K, dist) - assert ok_cpu - - # Validate reprojection error for CPU solver - proj_cpu, _ = cv2.projectPoints(obj, r_cpu, t_cpu, K, dist) - proj_cpu = proj_cpu.reshape(-1, 2) - err_cpu = np.linalg.norm(proj_cpu - img_pts, axis=1) - assert err_cpu.mean() < 1e-3 - assert err_cpu.max() < 1e-2 - - # Optionally test GPU parity when CUDA is available - if gpu is not None: - ok_gpu, r_gpu, t_gpu = gpu.solve_pnp(obj, img_pts, K, dist) - assert ok_gpu - - # Validate reprojection error for GPU solver - proj_gpu, _ = cv2.projectPoints(obj, r_gpu, t_gpu, K, dist) - proj_gpu = proj_gpu.reshape(-1, 2) - err_gpu = np.linalg.norm(proj_gpu - img_pts, axis=1) - assert err_gpu.mean() < 1e-3 - assert err_gpu.max() < 1e-2 - - -def test_perf_grayscale(alloc_timer) -> None: - """Test grayscale performance with NumpyImage always, add CudaImage when available.""" - arr = _prepare_image(ImageFormat.BGR, (480, 640, 3)) - cpu, gpu, _, _ = alloc_timer(arr, ImageFormat.BGR, label="test_perf_grayscale-setup") - - runs = 10 - - # Always test CPU performance - t0 = time.perf_counter() - for _ in range(runs): - _ = cpu.to_grayscale() - cpu_t = (time.perf_counter() - t0) / runs - assert cpu_t > 0 - - # Optionally test GPU performance when CUDA is available - if gpu is not None: - t0 = time.perf_counter() - for _ in range(runs): - _ = gpu.to_grayscale() - gpu_t = (time.perf_counter() - t0) / runs - print(f"grayscale (avg per call) cpu={cpu_t:.6f}s gpu={gpu_t:.6f}s") - assert gpu_t > 0 - else: - print(f"grayscale (avg per call) cpu={cpu_t:.6f}s") - - -def test_perf_resize(alloc_timer) -> None: - """Test resize performance with NumpyImage always, add CudaImage when available.""" - arr = _prepare_image(ImageFormat.BGR, (480, 640, 3)) - cpu, gpu, _, _ = alloc_timer(arr, ImageFormat.BGR, label="test_perf_resize-setup") - - runs = 5 - - # Always test CPU performance - t0 = time.perf_counter() - for _ in range(runs): - _ = cpu.resize(320, 240) - cpu_t = (time.perf_counter() - t0) / runs - assert cpu_t > 0 - - # Optionally test GPU performance when CUDA is available - if gpu is not None: - t0 = time.perf_counter() - for _ in range(runs): - _ = gpu.resize(320, 240) - gpu_t = (time.perf_counter() - t0) / runs - print(f"resize (avg per call) cpu={cpu_t:.6f}s gpu={gpu_t:.6f}s") - assert gpu_t > 0 - else: - print(f"resize (avg per call) cpu={cpu_t:.6f}s") - - -@pytest.mark.integration -def test_perf_sharpness(alloc_timer) -> None: - """Test sharpness performance with NumpyImage always, add CudaImage when available.""" - arr = _prepare_image(ImageFormat.BGR, (480, 640, 3)) - cpu, gpu, _, _ = alloc_timer(arr, ImageFormat.BGR, label="test_perf_sharpness-setup") - - runs = 3 - - # Always test CPU performance - t0 = time.perf_counter() - for _ in range(runs): - _ = cpu.sharpness - cpu_t = (time.perf_counter() - t0) / runs - assert cpu_t > 0 - - # Optionally test GPU performance when CUDA is available - if gpu is not None: - t0 = time.perf_counter() - for _ in range(runs): - _ = gpu.sharpness - gpu_t = (time.perf_counter() - t0) / runs - print(f"sharpness (avg per call) cpu={cpu_t:.6f}s gpu={gpu_t:.6f}s") - assert gpu_t > 0 - else: - print(f"sharpness (avg per call) cpu={cpu_t:.6f}s") - - -def test_perf_solvepnp(alloc_timer) -> None: - """Test solve_pnp performance with NumpyImage always, add CudaImage when available.""" - K = np.array([[600.0, 0.0, 320.0], [0.0, 600.0, 240.0], [0.0, 0.0, 1.0]], dtype=np.float64) - dist = None - rng = np.random.default_rng(123) - obj = rng.standard_normal((200, 3)).astype(np.float32) - rvec_true = np.array([[0.1], [-0.2], [0.05]]) - tvec_true = np.array([[0.0], [0.0], [3.0]]) - img_pts, _ = cv2.projectPoints(obj, rvec_true, tvec_true, K, dist) - img_pts = img_pts.reshape(-1, 2).astype(np.float32) - base_bgr = _prepare_image(ImageFormat.BGR, (480, 640, 3)) - cpu, gpu, _, _ = alloc_timer(base_bgr, ImageFormat.BGR, label="test_perf_solvepnp-setup") - - runs = 5 - - # Always test CPU performance - t0 = time.perf_counter() - for _ in range(runs): - _ = cpu.solve_pnp(obj, img_pts, K, dist) - cpu_t = (time.perf_counter() - t0) / runs - assert cpu_t > 0 - - # Optionally test GPU performance when CUDA is available - if gpu is not None: - t0 = time.perf_counter() - for _ in range(runs): - _ = gpu.solve_pnp(obj, img_pts, K, dist) - gpu_t = (time.perf_counter() - t0) / runs - print(f"solvePnP (avg per call) cpu={cpu_t:.6f}s gpu={gpu_t:.6f}s") - assert gpu_t > 0 - else: - print(f"solvePnP (avg per call) cpu={cpu_t:.6f}s") - - -# this test is failing with -# raise RuntimeError("OpenCV CSRT tracker not available") -@pytest.mark.skip -def test_perf_tracker(alloc_timer) -> None: - """Test tracker performance with NumpyImage always, add CudaImage when available.""" - # Don't check - just let it fail if CSRT isn't available - - H, W = 240, 320 - img_base = _prepare_image(ImageFormat.BGR, (H, W, 3)) - img1 = img_base.copy() - img2 = img_base.copy() - bbox0 = (80, 60, 40, 30) - x0, y0, w0, h0 = bbox0 - cv2.rectangle(img1, (x0, y0), (x0 + w0, y0 + h0), (255, 255, 255), thickness=-1) - dx, dy = 8, 5 - cv2.rectangle( - img2, - (x0 + dx, y0 + dy), - (x0 + dx + w0, y0 + dy + h0), - (255, 255, 255), - thickness=-1, - ) - cpu1, gpu1, _, _ = alloc_timer(img1, ImageFormat.BGR, label="test_perf_tracker-frame1") - cpu2, gpu2, _, _ = alloc_timer(img2, ImageFormat.BGR, label="test_perf_tracker-frame2") - - # Always test CPU tracker - trk_cpu = cpu1.create_csrt_tracker(bbox0) - - runs = 10 - t0 = time.perf_counter() - for _ in range(runs): - _ = cpu2.csrt_update(trk_cpu) - cpu_t = (time.perf_counter() - t0) / runs - assert cpu_t > 0 - - # Optionally test GPU performance when CUDA is available - if gpu1 is not None and gpu2 is not None: - trk_gpu = gpu1.create_csrt_tracker(bbox0) - t0 = time.perf_counter() - for _ in range(runs): - _ = gpu2.csrt_update(trk_gpu) - gpu_t = (time.perf_counter() - t0) / runs - print(f"tracker (avg per call) cpu={cpu_t:.6f}s gpu={gpu_t:.6f}s") - assert gpu_t > 0 - else: - print(f"tracker (avg per call) cpu={cpu_t:.6f}s") - - -# this test is failing with -# raise RuntimeError("OpenCV CSRT tracker not available") -@pytest.mark.skip -def test_csrt_tracker(alloc_timer) -> None: - """Test CSRT tracker with NumpyImage always, add CudaImage parity when available.""" - # Don't check - just let it fail if CSRT isn't available - - H, W = 100, 100 - # Create two frames with a moving rectangle - img_base = _prepare_image(ImageFormat.BGR, (H, W, 3)) - img1 = img_base.copy() - img2 = img_base.copy() - bbox0 = (30, 30, 20, 15) - x0, y0, w0, h0 = bbox0 - # draw rect in img1 - cv2.rectangle(img1, (x0, y0), (x0 + w0, y0 + h0), (255, 255, 255), thickness=-1) - # shift by (dx,dy) - dx, dy = 5, 3 - cv2.rectangle( - img2, - (x0 + dx, y0 + dy), - (x0 + dx + w0, y0 + dy + h0), - (255, 255, 255), - thickness=-1, - ) - - cpu1, gpu1, _, _ = alloc_timer(img1, ImageFormat.BGR, label="test_csrt_tracker-frame1") - cpu2, gpu2, _, _ = alloc_timer(img2, ImageFormat.BGR, label="test_csrt_tracker-frame2") - - # Always test CPU tracker - trk_cpu = cpu1.create_csrt_tracker(bbox0) - ok_cpu, bbox_cpu = cpu2.csrt_update(trk_cpu) - assert ok_cpu - - # Compare to ground-truth expected bbox - expected = (x0 + dx, y0 + dy, w0, h0) - err_cpu = sum(abs(a - b) for a, b in zip(bbox_cpu, expected, strict=False)) - assert err_cpu <= 8 - - # Optionally test GPU parity when CUDA is available - if gpu1 is not None and gpu2 is not None: - trk_gpu = gpu1.create_csrt_tracker(bbox0) - ok_gpu, bbox_gpu = gpu2.csrt_update(trk_gpu) - assert ok_gpu - - err_gpu = sum(abs(a - b) for a, b in zip(bbox_gpu, expected, strict=False)) - assert err_gpu <= 10 # allow some slack for scale/window effects - - -def test_solve_pnp_ransac(alloc_timer) -> None: - """Test solve_pnp_ransac with NumpyImage always, add CudaImage when available.""" - # Camera with distortion - K = np.array([[500.0, 0.0, 320.0], [0.0, 500.0, 240.0], [0.0, 0.0, 1.0]], dtype=np.float64) - dist = np.array([0.1, -0.05, 0.001, 0.001, 0.0], dtype=np.float64) - rng = np.random.default_rng(202) - obj = rng.uniform(-1.0, 1.0, size=(200, 3)).astype(np.float32) - obj[:, 2] = np.abs(obj[:, 2]) + 2.0 # keep in front of camera - rvec_true = np.array([[0.1], [-0.15], [0.05]], dtype=np.float64) - tvec_true = np.array([[0.2], [-0.1], [3.0]], dtype=np.float64) - img_pts, _ = cv2.projectPoints(obj, rvec_true, tvec_true, K, dist) - img_pts = img_pts.reshape(-1, 2) - # Add outliers - n_out = 20 - idx = rng.choice(len(img_pts), size=n_out, replace=False) - img_pts[idx] += rng.uniform(-50, 50, size=(n_out, 2)) - img_pts = img_pts.astype(np.float32) - - base_bgr = _prepare_image(ImageFormat.BGR, (480, 640, 3)) - cpu, gpu, _, _ = alloc_timer(base_bgr, ImageFormat.BGR, label="test_solve_pnp_ransac-setup") - - # Always test CPU backend - ok_cpu, r_cpu, t_cpu, mask_cpu = cpu.solve_pnp_ransac( - obj, img_pts, K, dist, iterations_count=150, reprojection_error=3.0 - ) - assert ok_cpu - inlier_ratio = mask_cpu.mean() - assert inlier_ratio > 0.7 - - # Reprojection error on inliers - in_idx = np.nonzero(mask_cpu)[0] - proj_cpu, _ = cv2.projectPoints(obj[in_idx], r_cpu, t_cpu, K, dist) - proj_cpu = proj_cpu.reshape(-1, 2) - err = np.linalg.norm(proj_cpu - img_pts[in_idx], axis=1) - assert err.mean() < 1.5 - assert err.max() < 4.0 - - # Optionally test GPU parity when CUDA is available - if gpu is not None: - ok_gpu, r_gpu, t_gpu, mask_gpu = gpu.solve_pnp_ransac( - obj, img_pts, K, dist, iterations_count=150, reprojection_error=3.0 - ) - assert ok_gpu - inlier_ratio_gpu = mask_gpu.mean() - assert inlier_ratio_gpu > 0.7 - - # Reprojection error on inliers for GPU - in_idx_gpu = np.nonzero(mask_gpu)[0] - proj_gpu, _ = cv2.projectPoints(obj[in_idx_gpu], r_gpu, t_gpu, K, dist) - proj_gpu = proj_gpu.reshape(-1, 2) - err_gpu = np.linalg.norm(proj_gpu - img_pts[in_idx_gpu], axis=1) - assert err_gpu.mean() < 1.5 - assert err_gpu.max() < 4.0 - - -def test_solve_pnp_batch(alloc_timer) -> None: - """Test solve_pnp batch processing with NumpyImage always, add CudaImage when available.""" - # Note: Batch processing is primarily a GPU feature, but we can still test CPU loop - # Generate batched problems - B, N = 8, 50 - rng = np.random.default_rng(99) - obj = rng.uniform(-1.0, 1.0, size=(B, N, 3)).astype(np.float32) - obj[:, :, 2] = np.abs(obj[:, :, 2]) + 2.0 - K = np.array([[600.0, 0.0, 320.0], [0.0, 600.0, 240.0], [0.0, 0.0, 1.0]], dtype=np.float64) - r_true = np.zeros((B, 3, 1), dtype=np.float64) - t_true = np.tile(np.array([[0.0], [0.0], [3.0]], dtype=np.float64), (B, 1, 1)) - img = [] - for b in range(B): - ip, _ = cv2.projectPoints(obj[b], r_true[b], t_true[b], K, None) - img.append(ip.reshape(-1, 2)) - img = np.stack(img, axis=0).astype(np.float32) - - base_bgr = _prepare_image(ImageFormat.BGR, (10, 10, 3)) - cpu, gpu, _, _ = alloc_timer(base_bgr, ImageFormat.BGR, label="test_solve_pnp_batch-setup") - - # Always test CPU loop - t0 = time.perf_counter() - r_list = [] - t_list = [] - for b in range(B): - ok, r, t = cpu.solve_pnp(obj[b], img[b], K, None) - assert ok - r_list.append(r) - t_list.append(t) - cpu_total = time.perf_counter() - t0 - cpu_t = cpu_total / B - - # Check reprojection for CPU results - for b in range(min(B, 2)): - proj, _ = cv2.projectPoints(obj[b], r_list[b], t_list[b], K, None) - err = np.linalg.norm(proj.reshape(-1, 2) - img[b], axis=1) - assert err.mean() < 1e-2 - assert err.max() < 1e-1 - - # Optionally test GPU batch when CUDA is available - if gpu is not None and hasattr(gpu._impl, "solve_pnp_batch"): - t0 = time.perf_counter() - r_b, t_b = gpu.solve_pnp_batch(obj, img, K) - gpu_total = time.perf_counter() - t0 - gpu_t = gpu_total / B - print(f"solvePnP-batch (avg per pose) cpu={cpu_t:.6f}s gpu={gpu_t:.6f}s (B={B}, N={N})") - - # Check reprojection for GPU batches - for b in range(min(B, 4)): - proj, _ = cv2.projectPoints(obj[b], r_b[b], t_b[b], K, None) - err = np.linalg.norm(proj.reshape(-1, 2) - img[b], axis=1) - assert err.mean() < 1e-2 - assert err.max() < 1e-1 - else: - print(f"solvePnP-batch (avg per pose) cpu={cpu_t:.6f}s (GPU batch not available)") - - -def test_nvimgcodec_flag_and_fallback(monkeypatch) -> None: - # Test that to_base64() works with and without nvimgcodec by patching runtime flags - import dimos.msgs.sensor_msgs.image_impls.AbstractImage as AbstractImageMod - - arr = _prepare_image(ImageFormat.BGR, (32, 32, 3)) - - # Save original values - original_has_nvimgcodec = AbstractImageMod.HAS_NVIMGCODEC - original_nvimgcodec = AbstractImageMod.nvimgcodec - - try: - # Test 1: Simulate nvimgcodec not available - monkeypatch.setattr(AbstractImageMod, "HAS_NVIMGCODEC", False) - monkeypatch.setattr(AbstractImageMod, "nvimgcodec", None) - - # Should work via cv2 fallback for CPU - img_cpu = Image.from_numpy(arr, format=ImageFormat.BGR, to_cuda=False) - b64_cpu = img_cpu.to_base64() - assert isinstance(b64_cpu, str) and len(b64_cpu) > 0 - - # If CUDA available, test GPU fallback to CPU encoding - if HAS_CUDA: - img_gpu = Image.from_numpy(arr, format=ImageFormat.BGR, to_cuda=True) - b64_gpu = img_gpu.to_base64() - assert isinstance(b64_gpu, str) and len(b64_gpu) > 0 - # Should have fallen back to CPU encoding - assert not AbstractImageMod.NVIMGCODEC_LAST_USED - - # Test 2: Restore nvimgcodec if it was originally available - if original_has_nvimgcodec: - monkeypatch.setattr(AbstractImageMod, "HAS_NVIMGCODEC", True) - monkeypatch.setattr(AbstractImageMod, "nvimgcodec", original_nvimgcodec) - - # Test it still works with nvimgcodec "available" - img2 = Image.from_numpy(arr, format=ImageFormat.BGR, to_cuda=HAS_CUDA) - b64_2 = img2.to_base64() - assert isinstance(b64_2, str) and len(b64_2) > 0 - - finally: - pass - - -@pytest.mark.skipif(not HAS_CUDA, reason="CuPy/CUDA not available") -def test_nvimgcodec_gpu_path(monkeypatch) -> None: - """Test nvimgcodec GPU encoding path when CUDA is available. - - This test specifically verifies that when nvimgcodec is available, - GPU images can be encoded directly without falling back to CPU. - """ - import dimos.msgs.sensor_msgs.image_impls.AbstractImage as AbstractImageMod - - # Check if nvimgcodec was originally available - if not AbstractImageMod.HAS_NVIMGCODEC: - pytest.skip("nvimgcodec library not available") - - # Save original nvimgcodec module reference - - # Create a CUDA image and encode using the actual nvimgcodec if available - arr = _prepare_image(ImageFormat.BGR, (32, 32, 3)) - - # Test with nvimgcodec enabled (should be the default if available) - img = Image.from_numpy(arr, format=ImageFormat.BGR, to_cuda=True) - b64 = img.to_base64() - assert isinstance(b64, str) and len(b64) > 0 - - # Check if GPU encoding was actually used - # Some builds may import nvimgcodec but not support CuPy device buffers - if not getattr(AbstractImageMod, "NVIMGCODEC_LAST_USED", False): - pytest.skip("nvimgcodec present but encode fell back to CPU in this environment") - - # Now test that we can disable nvimgcodec and still encode via fallback - monkeypatch.setattr(AbstractImageMod, "HAS_NVIMGCODEC", False) - monkeypatch.setattr(AbstractImageMod, "nvimgcodec", None) - - # Create another GPU image - should fall back to CPU encoding - img2 = Image.from_numpy(arr, format=ImageFormat.BGR, to_cuda=True) - b64_2 = img2.to_base64() - assert isinstance(b64_2, str) and len(b64_2) > 0 - # Should have fallen back to CPU encoding - assert not AbstractImageMod.NVIMGCODEC_LAST_USED - - -@pytest.mark.skipif(not HAS_CUDA, reason="CuPy/CUDA not available") -def test_to_cpu_format_preservation() -> None: - """Test that to_cpu() preserves image format correctly. - - This tests the fix for the bug where to_cpu() was using to_opencv() - which always returns BGR, but keeping the original format label. - """ - # Test RGB format preservation - rgb_array = np.random.randint(0, 255, (100, 100, 3), dtype=np.uint8) - gpu_img_rgb = Image.from_numpy(rgb_array, format=ImageFormat.RGB, to_cuda=True) - cpu_img_rgb = gpu_img_rgb.to_cpu() - - # Verify format is preserved - assert cpu_img_rgb.format == ImageFormat.RGB, ( - f"Format mismatch: expected RGB, got {cpu_img_rgb.format}" - ) - # Verify data is actually in RGB format (not BGR) - np.testing.assert_array_equal(cpu_img_rgb.data, rgb_array) - - # Test RGBA format preservation - rgba_array = np.random.randint(0, 255, (100, 100, 4), dtype=np.uint8) - gpu_img_rgba = Image.from_numpy(rgba_array, format=ImageFormat.RGBA, to_cuda=True) - cpu_img_rgba = gpu_img_rgba.to_cpu() - - assert cpu_img_rgba.format == ImageFormat.RGBA, ( - f"Format mismatch: expected RGBA, got {cpu_img_rgba.format}" - ) - np.testing.assert_array_equal(cpu_img_rgba.data, rgba_array) - - # Test BGR format (should be unchanged since to_opencv returns BGR) - bgr_array = np.random.randint(0, 255, (100, 100, 3), dtype=np.uint8) - gpu_img_bgr = Image.from_numpy(bgr_array, format=ImageFormat.BGR, to_cuda=True) - cpu_img_bgr = gpu_img_bgr.to_cpu() - - assert cpu_img_bgr.format == ImageFormat.BGR, ( - f"Format mismatch: expected BGR, got {cpu_img_bgr.format}" - ) - np.testing.assert_array_equal(cpu_img_bgr.data, bgr_array) - - # Test BGRA format - bgra_array = np.random.randint(0, 255, (100, 100, 4), dtype=np.uint8) - gpu_img_bgra = Image.from_numpy(bgra_array, format=ImageFormat.BGRA, to_cuda=True) - cpu_img_bgra = gpu_img_bgra.to_cpu() - - assert cpu_img_bgra.format == ImageFormat.BGRA, ( - f"Format mismatch: expected BGRA, got {cpu_img_bgra.format}" - ) - np.testing.assert_array_equal(cpu_img_bgra.data, bgra_array) - - # Test GRAY format - gray_array = np.random.randint(0, 255, (100, 100), dtype=np.uint8) - gpu_img_gray = Image.from_numpy(gray_array, format=ImageFormat.GRAY, to_cuda=True) - cpu_img_gray = gpu_img_gray.to_cpu() - - assert cpu_img_gray.format == ImageFormat.GRAY, ( - f"Format mismatch: expected GRAY, got {cpu_img_gray.format}" - ) - np.testing.assert_array_equal(cpu_img_gray.data, gray_array) - - # Test DEPTH format (float32) - depth_array = np.random.uniform(0.5, 10.0, (100, 100)).astype(np.float32) - gpu_img_depth = Image.from_numpy(depth_array, format=ImageFormat.DEPTH, to_cuda=True) - cpu_img_depth = gpu_img_depth.to_cpu() - - assert cpu_img_depth.format == ImageFormat.DEPTH, ( - f"Format mismatch: expected DEPTH, got {cpu_img_depth.format}" - ) - np.testing.assert_array_equal(cpu_img_depth.data, depth_array) - - # Test DEPTH16 format (uint16) - depth16_array = np.random.randint(100, 65000, (100, 100), dtype=np.uint16) - gpu_img_depth16 = Image.from_numpy(depth16_array, format=ImageFormat.DEPTH16, to_cuda=True) - cpu_img_depth16 = gpu_img_depth16.to_cpu() - - assert cpu_img_depth16.format == ImageFormat.DEPTH16, ( - f"Format mismatch: expected DEPTH16, got {cpu_img_depth16.format}" - ) - np.testing.assert_array_equal(cpu_img_depth16.data, depth16_array) - - # Test GRAY16 format (uint16) - gray16_array = np.random.randint(0, 65535, (100, 100), dtype=np.uint16) - gpu_img_gray16 = Image.from_numpy(gray16_array, format=ImageFormat.GRAY16, to_cuda=True) - cpu_img_gray16 = gpu_img_gray16.to_cpu() - - assert cpu_img_gray16.format == ImageFormat.GRAY16, ( - f"Format mismatch: expected GRAY16, got {cpu_img_gray16.format}" - ) - np.testing.assert_array_equal(cpu_img_gray16.data, gray16_array) diff --git a/dimos/msgs/sensor_msgs/test_CameraInfo.py b/dimos/msgs/sensor_msgs/test_CameraInfo.py index d66a39727f..0cf51d15c7 100644 --- a/dimos/msgs/sensor_msgs/test_CameraInfo.py +++ b/dimos/msgs/sensor_msgs/test_CameraInfo.py @@ -14,15 +14,6 @@ # limitations under the License. import numpy as np -import pytest - -try: - from sensor_msgs.msg import CameraInfo as ROSCameraInfo, RegionOfInterest as ROSRegionOfInterest - from std_msgs.msg import Header as ROSHeader -except ImportError: - ROSCameraInfo = None - ROSRegionOfInterest = None - ROSHeader = None from dimos.msgs.sensor_msgs.CameraInfo import CalibrationProvider, CameraInfo from dimos.utils.path_utils import get_project_root @@ -186,175 +177,6 @@ def test_numpy_matrix_operations() -> None: print("✓ All numpy matrix operations passed!") -@pytest.mark.ros -def test_ros_conversion() -> None: - """Test ROS message conversion preserves CameraInfo data.""" - print("\nTesting ROS CameraInfo conversion...") - - # Create test camera info - original = CameraInfo( - height=720, - width=1280, - distortion_model="rational_polynomial", - D=[0.1, -0.2, 0.001, 0.002, -0.05, 0.01, -0.02, 0.003], # 8 coefficients - K=[600.0, 0.0, 640.0, 0.0, 600.0, 360.0, 0.0, 0.0, 1.0], - R=[0.999, -0.01, 0.02, 0.01, 0.999, -0.01, -0.02, 0.01, 0.999], - P=[ - 600.0, - 0.0, - 640.0, - -60.0, # Stereo baseline of 0.1m - 0.0, - 600.0, - 360.0, - 0.0, - 0.0, - 0.0, - 1.0, - 0.0, - ], - binning_x=1, - binning_y=1, - frame_id="left_camera_optical", - ts=1234567890.987654, - ) - - # Set ROI - original.roi_x_offset = 200 - original.roi_y_offset = 100 - original.roi_height = 400 - original.roi_width = 800 - original.roi_do_rectify = False - - # Test 1: Convert to ROS and back - ros_msg = original.to_ros_msg() - converted = CameraInfo.from_ros_msg(ros_msg) - - # Check all properties - assert original.height == converted.height, ( - f"Height mismatch: {original.height} vs {converted.height}" - ) - assert original.width == converted.width, ( - f"Width mismatch: {original.width} vs {converted.width}" - ) - print(f"✓ Dimensions preserved: {converted.width}x{converted.height}") - - assert original.distortion_model == converted.distortion_model, ( - f"Distortion model mismatch: '{original.distortion_model}' vs '{converted.distortion_model}'" - ) - print(f"✓ Distortion model preserved: '{converted.distortion_model}'") - - np.testing.assert_allclose( - original.D, - converted.D, - rtol=1e-9, - atol=1e-9, - err_msg="D coefficients don't match after ROS conversion", - ) - print(f"✓ Distortion coefficients preserved: {len(converted.D)} coefficients") - - np.testing.assert_allclose( - original.K, - converted.K, - rtol=1e-9, - atol=1e-9, - err_msg="K matrix doesn't match after ROS conversion", - ) - print("✓ K matrix preserved") - - np.testing.assert_allclose( - original.R, - converted.R, - rtol=1e-9, - atol=1e-9, - err_msg="R matrix doesn't match after ROS conversion", - ) - print("✓ R matrix preserved") - - np.testing.assert_allclose( - original.P, - converted.P, - rtol=1e-9, - atol=1e-9, - err_msg="P matrix doesn't match after ROS conversion", - ) - print("✓ P matrix preserved") - - assert original.binning_x == converted.binning_x, "Binning X mismatch" - assert original.binning_y == converted.binning_y, "Binning Y mismatch" - print(f"✓ Binning preserved: {converted.binning_x}x{converted.binning_y}") - - assert original.roi_x_offset == converted.roi_x_offset, "ROI x_offset mismatch" - assert original.roi_y_offset == converted.roi_y_offset, "ROI y_offset mismatch" - assert original.roi_height == converted.roi_height, "ROI height mismatch" - assert original.roi_width == converted.roi_width, "ROI width mismatch" - assert original.roi_do_rectify == converted.roi_do_rectify, "ROI do_rectify mismatch" - print("✓ ROI preserved") - - assert original.frame_id == converted.frame_id, ( - f"Frame ID mismatch: '{original.frame_id}' vs '{converted.frame_id}'" - ) - print(f"✓ Frame ID preserved: '{converted.frame_id}'") - - assert abs(original.ts - converted.ts) < 1e-6, ( - f"Timestamp mismatch: {original.ts} vs {converted.ts}" - ) - print(f"✓ Timestamp preserved: {converted.ts}") - - # Test 2: Create ROS message directly and convert to DIMOS - ros_msg2 = ROSCameraInfo() - ros_msg2.header = ROSHeader() - ros_msg2.header.frame_id = "test_camera" - ros_msg2.header.stamp.sec = 1234567890 - ros_msg2.header.stamp.nanosec = 500000000 - - ros_msg2.height = 1080 - ros_msg2.width = 1920 - ros_msg2.distortion_model = "plumb_bob" - ros_msg2.d = [-0.3, 0.15, 0.0, 0.0, 0.0] - ros_msg2.k = [1000.0, 0.0, 960.0, 0.0, 1000.0, 540.0, 0.0, 0.0, 1.0] - ros_msg2.r = [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] - ros_msg2.p = [1000.0, 0.0, 960.0, 0.0, 0.0, 1000.0, 540.0, 0.0, 0.0, 0.0, 1.0, 0.0] - ros_msg2.binning_x = 4 - ros_msg2.binning_y = 4 - - ros_msg2.roi = ROSRegionOfInterest() - ros_msg2.roi.x_offset = 10 - ros_msg2.roi.y_offset = 20 - ros_msg2.roi.height = 100 - ros_msg2.roi.width = 200 - ros_msg2.roi.do_rectify = True - - # Convert to DIMOS - dimos_info = CameraInfo.from_ros_msg(ros_msg2) - - assert dimos_info.height == 1080, ( - f"Height not preserved: expected 1080, got {dimos_info.height}" - ) - assert dimos_info.width == 1920, f"Width not preserved: expected 1920, got {dimos_info.width}" - assert dimos_info.frame_id == "test_camera", ( - f"Frame ID not preserved: expected 'test_camera', got '{dimos_info.frame_id}'" - ) - assert dimos_info.distortion_model == "plumb_bob", "Distortion model not preserved" - assert len(dimos_info.D) == 5, ( - f"Wrong number of distortion coefficients: expected 5, got {len(dimos_info.D)}" - ) - print("✓ ROS to DIMOS conversion works correctly") - - # Test 3: Empty/minimal CameraInfo - minimal = CameraInfo(frame_id="minimal_camera", ts=1234567890.0) - minimal_ros = minimal.to_ros_msg() - minimal_converted = CameraInfo.from_ros_msg(minimal_ros) - - assert minimal.frame_id == minimal_converted.frame_id, ( - "Minimal CameraInfo frame_id not preserved" - ) - assert len(minimal_converted.D) == 0, "Minimal CameraInfo should have empty D" - print("✓ Minimal CameraInfo handling works") - - print("\n✓ All ROS conversion tests passed!") - - def test_equality() -> None: """Test CameraInfo equality comparison.""" print("\nTesting CameraInfo equality...") diff --git a/dimos/msgs/sensor_msgs/test_Joy.py b/dimos/msgs/sensor_msgs/test_Joy.py index 77b47f4983..499c2bb860 100644 --- a/dimos/msgs/sensor_msgs/test_Joy.py +++ b/dimos/msgs/sensor_msgs/test_Joy.py @@ -14,18 +14,6 @@ # limitations under the License. -import pytest - -try: - from sensor_msgs.msg import Joy as ROSJoy - from std_msgs.msg import Header as ROSHeader - - ROS_AVAILABLE = True -except ImportError: - ROSJoy = None - ROSHeader = None - ROS_AVAILABLE = False - from dimos.msgs.sensor_msgs.Joy import Joy @@ -165,38 +153,6 @@ def test_string_representation() -> None: print("✓ Joy string representation test passed") -@pytest.mark.ros -def test_ros_conversion() -> None: - """Test conversion to/from ROS Joy messages.""" - print("Testing Joy ROS conversion...") - - # Create a ROS Joy message - ros_msg = ROSJoy() - ros_msg.header = ROSHeader() - ros_msg.header.stamp.sec = 1234567890 - ros_msg.header.stamp.nanosec = 123456789 - ros_msg.header.frame_id = "ros_gamepad" - ros_msg.axes = [0.25, -0.75, 0.0, 1.0, -1.0] - ros_msg.buttons = [1, 1, 0, 0, 1, 0, 1, 0] - - # Convert from ROS - joy = Joy.from_ros_msg(ros_msg) - assert abs(joy.ts - 1234567890.123456789) < 1e-9 - assert joy.frame_id == "ros_gamepad" - assert joy.axes == [0.25, -0.75, 0.0, 1.0, -1.0] - assert joy.buttons == [1, 1, 0, 0, 1, 0, 1, 0] - - # Convert back to ROS - ros_msg2 = joy.to_ros_msg() - assert ros_msg2.header.frame_id == "ros_gamepad" - assert ros_msg2.header.stamp.sec == 1234567890 - assert abs(ros_msg2.header.stamp.nanosec - 123456789) < 100 # Allow small rounding - assert list(ros_msg2.axes) == [0.25, -0.75, 0.0, 1.0, -1.0] - assert list(ros_msg2.buttons) == [1, 1, 0, 0, 1, 0, 1, 0] - - print("✓ Joy ROS conversion test passed") - - def test_edge_cases() -> None: """Test Joy with edge cases.""" print("Testing Joy edge cases...") diff --git a/dimos/msgs/sensor_msgs/test_PointCloud2.py b/dimos/msgs/sensor_msgs/test_PointCloud2.py index 652ff08921..501a4cd441 100644 --- a/dimos/msgs/sensor_msgs/test_PointCloud2.py +++ b/dimos/msgs/sensor_msgs/test_PointCloud2.py @@ -15,26 +15,11 @@ import numpy as np -import pytest - -try: - from sensor_msgs.msg import PointCloud2 as ROSPointCloud2, PointField as ROSPointField - from std_msgs.msg import Header as ROSHeader -except ImportError: - ROSPointCloud2 = None - ROSPointField = None - ROSHeader = None from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.lidar import pointcloud2_from_webrtc_lidar +from dimos.robot.unitree.type.lidar import pointcloud2_from_webrtc_lidar from dimos.utils.testing import SensorReplay -# Try to import ROS types for testing -try: - ROS_AVAILABLE = True -except ImportError: - ROS_AVAILABLE = False - def test_lcm_encode_decode() -> None: """Test LCM encode/decode preserves pointcloud data.""" @@ -97,141 +82,6 @@ def test_lcm_encode_decode() -> None: print("✓ LCM encode/decode test passed - all properties preserved!") -@pytest.mark.ros -def test_ros_conversion() -> None: - """Test ROS message conversion preserves pointcloud data.""" - if not ROS_AVAILABLE: - print("ROS packages not available - skipping ROS conversion test") - return - - print("\nTesting ROS PointCloud2 conversion...") - - # Create a simple test point cloud - import open3d as o3d - - points = np.array( - [ - [1.0, 2.0, 3.0], - [4.0, 5.0, 6.0], - [-1.0, -2.0, -3.0], - [0.5, 0.5, 0.5], - ], - dtype=np.float32, - ) - - pc = o3d.geometry.PointCloud() - pc.points = o3d.utility.Vector3dVector(points) - - # Create DIMOS PointCloud2 - original = PointCloud2( - pointcloud=pc, - frame_id="test_frame", - ts=1234567890.123456, - ) - - # Test 1: Convert to ROS and back - ros_msg = original.to_ros_msg() - converted = PointCloud2.from_ros_msg(ros_msg) - - # Check points are preserved - original_points, _ = original.as_numpy() - converted_points, _ = converted.as_numpy() - - assert len(original_points) == len(converted_points), ( - f"Point count mismatch: {len(original_points)} vs {len(converted_points)}" - ) - - np.testing.assert_allclose( - original_points, - converted_points, - rtol=1e-6, - atol=1e-6, - err_msg="Points don't match after ROS conversion", - ) - print(f"✓ Points preserved: {len(converted_points)} points match") - - # Check metadata - assert original.frame_id == converted.frame_id, ( - f"Frame ID mismatch: '{original.frame_id}' vs '{converted.frame_id}'" - ) - print(f"✓ Frame ID preserved: '{converted.frame_id}'") - - assert abs(original.ts - converted.ts) < 1e-6, ( - f"Timestamp mismatch: {original.ts} vs {converted.ts}" - ) - print(f"✓ Timestamp preserved: {converted.ts}") - - # Test 2: Create ROS message directly and convert to DIMOS - ros_msg2 = ROSPointCloud2() - ros_msg2.header = ROSHeader() - ros_msg2.header.frame_id = "ros_test_frame" - ros_msg2.header.stamp.sec = 1234567890 - ros_msg2.header.stamp.nanosec = 123456000 - - # Set up point cloud data - ros_msg2.height = 1 - ros_msg2.width = 3 - ros_msg2.fields = [ - ROSPointField(name="x", offset=0, datatype=ROSPointField.FLOAT32, count=1), - ROSPointField(name="y", offset=4, datatype=ROSPointField.FLOAT32, count=1), - ROSPointField(name="z", offset=8, datatype=ROSPointField.FLOAT32, count=1), - ] - ros_msg2.is_bigendian = False - ros_msg2.point_step = 12 - ros_msg2.row_step = 36 - - # Pack test points - test_points = np.array( - [ - [1.0, 2.0, 3.0], - [4.0, 5.0, 6.0], - [7.0, 8.0, 9.0], - ], - dtype=np.float32, - ) - ros_msg2.data = test_points.tobytes() - ros_msg2.is_dense = True - - # Convert to DIMOS - dimos_pc = PointCloud2.from_ros_msg(ros_msg2) - - assert dimos_pc.frame_id == "ros_test_frame", ( - f"Frame ID not preserved: expected 'ros_test_frame', got '{dimos_pc.frame_id}'" - ) - - decoded_points, _ = dimos_pc.as_numpy() - assert len(decoded_points) == 3, ( - f"Wrong number of points: expected 3, got {len(decoded_points)}" - ) - - np.testing.assert_allclose( - test_points, - decoded_points, - rtol=1e-6, - atol=1e-6, - err_msg="Points from ROS message don't match", - ) - print("✓ ROS to DIMOS conversion works correctly") - - # Test 3: Empty point cloud - empty_pc = PointCloud2( - pointcloud=o3d.geometry.PointCloud(), - frame_id="empty_frame", - ts=1234567890.0, - ) - - empty_ros = empty_pc.to_ros_msg() - assert empty_ros.width == 0, "Empty cloud should have width 0" - assert empty_ros.height == 0, "Empty cloud should have height 0" - assert len(empty_ros.data) == 0, "Empty cloud should have no data" - - empty_converted = PointCloud2.from_ros_msg(empty_ros) - assert len(empty_converted) == 0, "Empty cloud conversion failed" - print("✓ Empty point cloud handling works") - - print("\n✓ All ROS conversion tests passed!") - - def test_bounding_box_intersects() -> None: """Test bounding_box_intersects method with various scenarios.""" # Test 1: Overlapping boxes @@ -307,5 +157,4 @@ def test_bounding_box_intersects() -> None: if __name__ == "__main__": test_lcm_encode_decode() - test_ros_conversion() test_bounding_box_intersects() diff --git a/dimos/msgs/std_msgs/Bool.py b/dimos/msgs/std_msgs/Bool.py index c11743573f..4421447edf 100644 --- a/dimos/msgs/std_msgs/Bool.py +++ b/dimos/msgs/std_msgs/Bool.py @@ -17,41 +17,12 @@ from dimos_lcm.std_msgs import Bool as LCMBool -try: - from std_msgs.msg import Bool as ROSBool # type: ignore[attr-defined] -except ImportError: - ROSBool = None # type: ignore[assignment, misc] - class Bool(LCMBool): # type: ignore[misc] - """ROS-compatible Bool message.""" + """Bool message.""" msg_name = "std_msgs.Bool" def __init__(self, data: bool = False) -> None: """Initialize Bool with data value.""" self.data = data - - @classmethod - def from_ros_msg(cls, ros_msg: ROSBool) -> "Bool": - """Create a Bool from a ROS std_msgs/Bool message. - - Args: - ros_msg: ROS Bool message - - Returns: - Bool instance - """ - return cls(data=ros_msg.data) - - def to_ros_msg(self) -> ROSBool: - """Convert to a ROS std_msgs/Bool message. - - Returns: - ROS Bool message - """ - if ROSBool is None: - raise ImportError("ROS std_msgs not available") - ros_msg = ROSBool() # type: ignore[no-untyped-call] - ros_msg.data = bool(self.data) - return ros_msg diff --git a/dimos/msgs/std_msgs/Int8.py b/dimos/msgs/std_msgs/Int8.py index b07e965e3f..ca87140353 100644 --- a/dimos/msgs/std_msgs/Int8.py +++ b/dimos/msgs/std_msgs/Int8.py @@ -21,41 +21,12 @@ from dimos_lcm.std_msgs import Int8 as LCMInt8 -try: - from std_msgs.msg import Int8 as ROSInt8 # type: ignore[attr-defined] -except ImportError: - ROSInt8 = None # type: ignore[assignment, misc] - class Int8(LCMInt8): # type: ignore[misc] - """ROS-compatible Int32 message.""" + """Int8 message.""" msg_name: ClassVar[str] = "std_msgs.Int8" def __init__(self, data: int = 0) -> None: """Initialize Int8 with data value.""" self.data = data - - @classmethod - def from_ros_msg(cls, ros_msg: ROSInt8) -> "Int8": - """Create a Bool from a ROS std_msgs/Bool message. - - Args: - ros_msg: ROS Int8 message - - Returns: - Int8 instance - """ - return cls(data=ros_msg.data) - - def to_ros_msg(self) -> ROSInt8: - """Convert to a ROS std_msgs/Bool message. - - Returns: - ROS Int8 message - """ - if ROSInt8 is None: - raise ImportError("ROS std_msgs not available") - ros_msg = ROSInt8() # type: ignore[no-untyped-call] - ros_msg.data = self.data - return ros_msg diff --git a/dimos/robot/unitree/go2/go2.py b/dimos/msgs/std_msgs/UInt32.py similarity index 50% rename from dimos/robot/unitree/go2/go2.py rename to dimos/msgs/std_msgs/UInt32.py index d2e7e74674..e617c782fe 100644 --- a/dimos/robot/unitree/go2/go2.py +++ b/dimos/msgs/std_msgs/UInt32.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2025-2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,26 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging +"""UInt32 message type.""" -from dimos.core import DimosCluster -from dimos.robot import foxglove_bridge -from dimos.robot.unitree.connection import go2 -from dimos.utils.logging_config import setup_logger +from typing import ClassVar -logger = setup_logger(level=logging.INFO) +from dimos_lcm.std_msgs import UInt32 as LCMUInt32 -def deploy(dimos: DimosCluster, ip: str): # type: ignore[no-untyped-def] - connection = go2.deploy(dimos, ip) - foxglove_bridge.deploy(dimos) +class UInt32(LCMUInt32): # type: ignore[misc] + """ROS-compatible UInt32 message.""" - # detector = moduleDB.deploy( - # dimos, - # camera=connection, - # lidar=connection, - # ) + msg_name: ClassVar[str] = "std_msgs.UInt32" - # agent = agents.deploy(dimos) - # agent.register_skills(detector) - return connection + def __init__(self, data: int = 0) -> None: + """Initialize UInt32 with data value.""" + self.data = data diff --git a/dimos/msgs/std_msgs/__init__.py b/dimos/msgs/std_msgs/__init__.py index 9002b8c4ef..ae8e3dd8f6 100644 --- a/dimos/msgs/std_msgs/__init__.py +++ b/dimos/msgs/std_msgs/__init__.py @@ -16,5 +16,6 @@ from .Header import Header from .Int8 import Int8 from .Int32 import Int32 +from .UInt32 import UInt32 -__all__ = ["Bool", "Header", "Int8", "Int32"] +__all__ = ["Bool", "Header", "Int8", "Int32", "UInt32"] diff --git a/dimos/msgs/tf2_msgs/TFMessage.py b/dimos/msgs/tf2_msgs/TFMessage.py index 54eaaf9215..7a47a96e6d 100644 --- a/dimos/msgs/tf2_msgs/TFMessage.py +++ b/dimos/msgs/tf2_msgs/TFMessage.py @@ -31,15 +31,6 @@ from dimos_lcm.tf2_msgs import TFMessage as LCMTFMessage -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - TransformStamped as ROSTransformStamped, - ) - from tf2_msgs.msg import TFMessage as ROSTFMessage # type: ignore[attr-defined] -except ImportError: - ROSTFMessage = None # type: ignore[assignment, misc] - ROSTransformStamped = None # type: ignore[assignment, misc] - from dimos.msgs.geometry_msgs.Quaternion import Quaternion from dimos.msgs.geometry_msgs.Transform import Transform from dimos.msgs.geometry_msgs.Vector3 import Vector3 @@ -47,6 +38,8 @@ if TYPE_CHECKING: from collections.abc import Iterator + from dimos.visualization.rerun.bridge import RerunMulti + class TFMessage: """TFMessage that accepts Transform objects and encodes to LCM format.""" @@ -125,42 +118,12 @@ def __repr__(self) -> str: def __str__(self) -> str: lines = [f"TFMessage with {len(self.transforms)} transforms:"] for i, transform in enumerate(self.transforms): - lines.append(f" [{i}] {transform.frame_id} @ {transform.ts:.3f}") + lines.append( + f" [{i}] {transform.frame_id} → {transform.child_frame_id} @ {transform.ts:.3f}" + ) return "\n".join(lines) - @classmethod - def from_ros_msg(cls, ros_msg: ROSTFMessage) -> TFMessage: - """Create a TFMessage from a ROS tf2_msgs/TFMessage message. - - Args: - ros_msg: ROS TFMessage message - - Returns: - TFMessage instance - """ - transforms = [] - for ros_transform_stamped in ros_msg.transforms: - # Convert from ROS TransformStamped to our Transform - transform = Transform.from_ros_transform_stamped(ros_transform_stamped) - transforms.append(transform) - - return cls(*transforms) - - def to_ros_msg(self) -> ROSTFMessage: - """Convert to a ROS tf2_msgs/TFMessage message. - - Returns: - ROS TFMessage message - """ - ros_msg = ROSTFMessage() # type: ignore[no-untyped-call] - - # Convert each Transform to ROS TransformStamped - for transform in self.transforms: - ros_msg.transforms.append(transform.to_ros_transform_stamped()) - - return ros_msg - - def to_rerun(self): # type: ignore[no-untyped-def] + def to_rerun(self) -> RerunMulti: """Convert to a list of rerun Transform3D archetypes. Returns a list of tuples (entity_path, Transform3D) for each transform @@ -176,8 +139,8 @@ def to_rerun(self): # type: ignore[no-untyped-def] for path, transform in tf_msg.to_rerun(): rr.log(path, transform) """ - results = [] + results: RerunMulti = [] for transform in self.transforms: entity_path = f"world/tf/{transform.child_frame_id}" - results.append((entity_path, transform.to_rerun())) # type: ignore[no-untyped-call] + results.append((entity_path, transform.to_rerun())) return results diff --git a/dimos/msgs/tf2_msgs/test_TFMessage.py b/dimos/msgs/tf2_msgs/test_TFMessage.py index 783692fb35..8567de9988 100644 --- a/dimos/msgs/tf2_msgs/test_TFMessage.py +++ b/dimos/msgs/tf2_msgs/test_TFMessage.py @@ -12,15 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest - -try: - from geometry_msgs.msg import TransformStamped as ROSTransformStamped - from tf2_msgs.msg import TFMessage as ROSTFMessage -except ImportError: - ROSTransformStamped = None - ROSTFMessage = None - from dimos_lcm.tf2_msgs import TFMessage as LCMTFMessage from dimos.msgs.geometry_msgs import Quaternion, Transform, Vector3 @@ -70,6 +61,38 @@ def test_tfmessage_add_transform() -> None: assert msg[0] == tf +def test_tfmessage_tree() -> None: + """Test adding transforms to TFMessage.""" + msg = TFMessage() + + msg.add_transform( + Transform( + translation=Vector3(1, 2, 0.5), frame_id="world", child_frame_id="robot", ts=200.0 + ) + ) + msg.add_transform( + Transform( + translation=Vector3(0.1, 0, 1.0), frame_id="robot", child_frame_id="camera", ts=200.0 + ) + ) + msg.add_transform( + Transform( + translation=Vector3(0.2, 0, 0.2), frame_id="robot", child_frame_id="lidar", ts=200.0 + ) + ) + msg.add_transform( + Transform( + translation=Vector3(0.05, 0, 0.0), + frame_id="lidar", + child_frame_id="lidar_scanner", + ts=200.0, + ) + ) + + assert len(msg) == 4 + print(msg) + + def test_tfmessage_lcm_encode_decode() -> None: """Test encoding TFMessage to LCM bytes.""" # Create transforms @@ -115,155 +138,3 @@ def test_tfmessage_lcm_encode_decode() -> None: assert ts2.child_frame_id == "target" assert ts2.transform.rotation.z == 0.707 assert ts2.transform.rotation.w == 0.707 - - -@pytest.mark.ros -def test_tfmessage_from_ros_msg() -> None: - """Test creating a TFMessage from a ROS TFMessage message.""" - - ros_msg = ROSTFMessage() - - # Add first transform - tf1 = ROSTransformStamped() - tf1.header.frame_id = "world" - tf1.header.stamp.sec = 123 - tf1.header.stamp.nanosec = 456000000 - tf1.child_frame_id = "robot" - tf1.transform.translation.x = 1.0 - tf1.transform.translation.y = 2.0 - tf1.transform.translation.z = 3.0 - tf1.transform.rotation.x = 0.0 - tf1.transform.rotation.y = 0.0 - tf1.transform.rotation.z = 0.0 - tf1.transform.rotation.w = 1.0 - ros_msg.transforms.append(tf1) - - # Add second transform - tf2 = ROSTransformStamped() - tf2.header.frame_id = "robot" - tf2.header.stamp.sec = 124 - tf2.header.stamp.nanosec = 567000000 - tf2.child_frame_id = "sensor" - tf2.transform.translation.x = 4.0 - tf2.transform.translation.y = 5.0 - tf2.transform.translation.z = 6.0 - tf2.transform.rotation.x = 0.0 - tf2.transform.rotation.y = 0.0 - tf2.transform.rotation.z = 0.707 - tf2.transform.rotation.w = 0.707 - ros_msg.transforms.append(tf2) - - # Convert to TFMessage - tfmsg = TFMessage.from_ros_msg(ros_msg) - - assert len(tfmsg) == 2 - - # Check first transform - assert tfmsg[0].frame_id == "world" - assert tfmsg[0].child_frame_id == "robot" - assert tfmsg[0].ts == 123.456 - assert tfmsg[0].translation.x == 1.0 - assert tfmsg[0].translation.y == 2.0 - assert tfmsg[0].translation.z == 3.0 - assert tfmsg[0].rotation.w == 1.0 - - # Check second transform - assert tfmsg[1].frame_id == "robot" - assert tfmsg[1].child_frame_id == "sensor" - assert tfmsg[1].ts == 124.567 - assert tfmsg[1].translation.x == 4.0 - assert tfmsg[1].translation.y == 5.0 - assert tfmsg[1].translation.z == 6.0 - assert tfmsg[1].rotation.z == 0.707 - assert tfmsg[1].rotation.w == 0.707 - - -@pytest.mark.ros -def test_tfmessage_to_ros_msg() -> None: - """Test converting a TFMessage to a ROS TFMessage message.""" - # Create transforms - tf1 = Transform( - translation=Vector3(1.0, 2.0, 3.0), - rotation=Quaternion(0.0, 0.0, 0.0, 1.0), - frame_id="map", - child_frame_id="base_link", - ts=123.456, - ) - tf2 = Transform( - translation=Vector3(7.0, 8.0, 9.0), - rotation=Quaternion(0.1, 0.2, 0.3, 0.9), - frame_id="base_link", - child_frame_id="lidar", - ts=125.789, - ) - - tfmsg = TFMessage(tf1, tf2) - - # Convert to ROS message - ros_msg = tfmsg.to_ros_msg() - - assert isinstance(ros_msg, ROSTFMessage) - assert len(ros_msg.transforms) == 2 - - # Check first transform - assert ros_msg.transforms[0].header.frame_id == "map" - assert ros_msg.transforms[0].child_frame_id == "base_link" - assert ros_msg.transforms[0].header.stamp.sec == 123 - assert ros_msg.transforms[0].header.stamp.nanosec == 456000000 - assert ros_msg.transforms[0].transform.translation.x == 1.0 - assert ros_msg.transforms[0].transform.translation.y == 2.0 - assert ros_msg.transforms[0].transform.translation.z == 3.0 - assert ros_msg.transforms[0].transform.rotation.w == 1.0 - - # Check second transform - assert ros_msg.transforms[1].header.frame_id == "base_link" - assert ros_msg.transforms[1].child_frame_id == "lidar" - assert ros_msg.transforms[1].header.stamp.sec == 125 - assert ros_msg.transforms[1].header.stamp.nanosec == 789000000 - assert ros_msg.transforms[1].transform.translation.x == 7.0 - assert ros_msg.transforms[1].transform.translation.y == 8.0 - assert ros_msg.transforms[1].transform.translation.z == 9.0 - assert ros_msg.transforms[1].transform.rotation.x == 0.1 - assert ros_msg.transforms[1].transform.rotation.y == 0.2 - assert ros_msg.transforms[1].transform.rotation.z == 0.3 - assert ros_msg.transforms[1].transform.rotation.w == 0.9 - - -@pytest.mark.ros -def test_tfmessage_ros_roundtrip() -> None: - """Test round-trip conversion between TFMessage and ROS TFMessage.""" - # Create transforms with various properties - tf1 = Transform( - translation=Vector3(1.5, 2.5, 3.5), - rotation=Quaternion(0.15, 0.25, 0.35, 0.85), - frame_id="odom", - child_frame_id="base_footprint", - ts=100.123, - ) - tf2 = Transform( - translation=Vector3(0.1, 0.2, 0.3), - rotation=Quaternion(0.0, 0.0, 0.383, 0.924), - frame_id="base_footprint", - child_frame_id="camera", - ts=100.456, - ) - - original = TFMessage(tf1, tf2) - - # Convert to ROS and back - ros_msg = original.to_ros_msg() - restored = TFMessage.from_ros_msg(ros_msg) - - assert len(restored) == len(original) - - for orig_tf, rest_tf in zip(original, restored, strict=False): - assert rest_tf.frame_id == orig_tf.frame_id - assert rest_tf.child_frame_id == orig_tf.child_frame_id - assert rest_tf.ts == orig_tf.ts - assert rest_tf.translation.x == orig_tf.translation.x - assert rest_tf.translation.y == orig_tf.translation.y - assert rest_tf.translation.z == orig_tf.translation.z - assert rest_tf.rotation.x == orig_tf.rotation.x - assert rest_tf.rotation.y == orig_tf.rotation.y - assert rest_tf.rotation.z == orig_tf.rotation.z - assert rest_tf.rotation.w == orig_tf.rotation.w diff --git a/dimos/msgs/tf2_msgs/test_TFMessage_lcmpub.py b/dimos/msgs/tf2_msgs/test_TFMessage_lcmpub.py index 0846f91ee6..396d796193 100644 --- a/dimos/msgs/tf2_msgs/test_TFMessage_lcmpub.py +++ b/dimos/msgs/tf2_msgs/test_TFMessage_lcmpub.py @@ -18,7 +18,7 @@ from dimos.msgs.geometry_msgs import Quaternion, Transform, Vector3 from dimos.msgs.tf2_msgs import TFMessage -from dimos.protocol.pubsub.lcmpubsub import LCM, Topic +from dimos.protocol.pubsub.impl.lcmpubsub import LCM, Topic # Publishes a series of transforms representing a robot kinematic chain diff --git a/dimos/navigation/demo_ros_navigation.py b/dimos/navigation/demo_ros_navigation.py index 733f66c1b7..4919ab0efd 100644 --- a/dimos/navigation/demo_ros_navigation.py +++ b/dimos/navigation/demo_ros_navigation.py @@ -14,13 +14,9 @@ import time -import rclpy - from dimos import core -from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Twist, Vector3 -from dimos.msgs.nav_msgs import Path -from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.navigation.rosnav import ROSNav +from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Vector3 +from dimos.navigation import rosnav from dimos.protocol import pubsub from dimos.utils.logging_config import setup_logger @@ -31,16 +27,7 @@ def main() -> None: pubsub.lcm.autoconf() # type: ignore[attr-defined] dimos = core.start(2) - ros_nav = dimos.deploy(ROSNav) # type: ignore[attr-defined] - - ros_nav.goal_req.transport = core.LCMTransport("/goal", PoseStamped) - ros_nav.pointcloud.transport = core.LCMTransport("/pointcloud_map", PointCloud2) - ros_nav.global_pointcloud.transport = core.LCMTransport("/global_pointcloud", PointCloud2) - ros_nav.goal_active.transport = core.LCMTransport("/goal_active", PoseStamped) - ros_nav.path_active.transport = core.LCMTransport("/path_active", Path) - ros_nav.cmd_vel.transport = core.LCMTransport("/cmd_vel", Twist) - - ros_nav.start() + ros_nav = rosnav.deploy(dimos) logger.info("\nTesting navigation in 2 seconds...") time.sleep(2) @@ -48,13 +35,17 @@ def main() -> None: test_pose = PoseStamped( ts=time.time(), frame_id="map", - position=Vector3(2.0, 2.0, 0.0), + position=Vector3(10.0, 10.0, 0.0), orientation=Quaternion(0.0, 0.0, 0.0, 1.0), ) - logger.info("Sending navigation goal to: (2.0, 2.0, 0.0)") - success = ros_nav.navigate_to(test_pose, timeout=30.0) - logger.info(f"Navigated successfully: {success}") + logger.info("Sending navigation goal to: (10.0, 10.0, 0.0)") + ros_nav.set_goal(test_pose) + time.sleep(5) + + logger.info("Cancelling goal after 5 seconds...") + cancelled = ros_nav.cancel_goal() + logger.info(f"Goal cancelled: {cancelled}") try: logger.info("\nNavBot running. Press Ctrl+C to stop.") @@ -64,9 +55,6 @@ def main() -> None: logger.info("\nShutting down...") ros_nav.stop() - if rclpy.ok(): # type: ignore[attr-defined] - rclpy.shutdown() - if __name__ == "__main__": main() diff --git a/dimos/navigation/frontier_exploration/wavefront_frontier_goal_selector.py b/dimos/navigation/frontier_exploration/wavefront_frontier_goal_selector.py index c5d5ab2659..3adfc1c598 100644 --- a/dimos/navigation/frontier_exploration/wavefront_frontier_goal_selector.py +++ b/dimos/navigation/frontier_exploration/wavefront_frontier_goal_selector.py @@ -28,6 +28,7 @@ import numpy as np from reactivex.disposable import Disposable +from dimos.agents.annotation import skill from dimos.core import In, Module, Out, rpc from dimos.mapping.occupancy.inflation import simple_inflate from dimos.msgs.geometry_msgs import PoseStamped, Vector3 @@ -100,7 +101,7 @@ class WavefrontFrontierExplorer(Module): # LCM outputs goal_request: Out[PoseStamped] - def __init__( # type: ignore[no-untyped-def] + def __init__( self, min_frontier_perimeter: float = 0.5, occupancy_threshold: int = 99, @@ -110,7 +111,6 @@ def __init__( # type: ignore[no-untyped-def] info_gain_threshold: float = 0.03, num_no_gain_attempts: int = 2, goal_timeout: float = 15.0, - **kwargs, ) -> None: """ Initialize the frontier explorer. @@ -122,7 +122,7 @@ def __init__( # type: ignore[no-untyped-def] info_gain_threshold: Minimum percentage increase in costmap information required to continue exploration (0.05 = 5%) num_no_gain_attempts: Maximum number of consecutive attempts with no information gain """ - super().__init__(**kwargs) + super().__init__() self.min_frontier_perimeter = min_frontier_perimeter self.occupancy_threshold = occupancy_threshold self.safe_distance = safe_distance @@ -149,8 +149,6 @@ def __init__( # type: ignore[no-untyped-def] self.exploration_thread: threading.Thread | None = None self.stop_event = threading.Event() - logger.info("WavefrontFrontierExplorer module initialized") - @rpc def start(self) -> None: super().start() @@ -737,6 +735,16 @@ def stop_exploration(self) -> bool: ): self.exploration_thread.join(timeout=2.0) + # Publish current location as goal to stop the robot. + if self.latest_odometry is not None: + goal = PoseStamped( + position=self.latest_odometry.position, + orientation=self.latest_odometry.orientation, + frame_id="world", + ts=self.latest_odometry.ts, + ) + self.goal_request.publish(goal) + logger.info("Stopped autonomous frontier exploration") return True @@ -814,6 +822,26 @@ def _exploration_loop(self) -> None: ) threading.Event().wait(2.0) + @skill + def begin_exploration(self) -> str: + """Command the robot to move around and explore the area. Cancelled with end_exploration.""" + started = self.explore() + if not started: + return "Exploration skill is already active. Use end_exploration to stop before starting again." + return ( + "Started exploration skill. The robot is now moving. Use end_exploration " + "to stop. You also need to cancel before starting a new movement tool." + ) + + @skill + def end_exploration(self) -> str: + """Cancel the exploration. The robot will stop moving and remain where it is.""" + stopped = self.stop_exploration() + if stopped: + return "Stopped exploration. The robot has stopped moving." + else: + return "Exploration skill was not active, so nothing was stopped." + wavefront_frontier_explorer = WavefrontFrontierExplorer.blueprint diff --git a/dimos/navigation/replanning_a_star/global_planner.py b/dimos/navigation/replanning_a_star/global_planner.py index 8dc1a42ccf..df2680a4a7 100644 --- a/dimos/navigation/replanning_a_star/global_planner.py +++ b/dimos/navigation/replanning_a_star/global_planner.py @@ -28,7 +28,6 @@ from dimos.msgs.geometry_msgs.Vector3 import Vector3 from dimos.msgs.nav_msgs.OccupancyGrid import CostValues, OccupancyGrid from dimos.msgs.nav_msgs.Path import Path -from dimos.msgs.sensor_msgs import Image from dimos.navigation.base import NavigationState from dimos.navigation.replanning_a_star.goal_validator import find_safe_goal from dimos.navigation.replanning_a_star.local_planner import LocalPlanner, StopMessage @@ -156,8 +155,8 @@ def cmd_vel(self) -> Subject[Twist]: return self._local_planner.cmd_vel @property - def debug_navigation(self) -> Subject[Image]: - return self._local_planner.debug_navigation + def navigation_costmap(self) -> Subject[OccupancyGrid]: + return self._local_planner.navigation_costmap def _thread_entrypoint(self) -> None: """Monitor if the robot is stuck, veers off track, or stopped navigating.""" diff --git a/dimos/navigation/replanning_a_star/local_planner.py b/dimos/navigation/replanning_a_star/local_planner.py index 65a18d0637..a5f8d9e457 100644 --- a/dimos/navigation/replanning_a_star/local_planner.py +++ b/dimos/navigation/replanning_a_star/local_planner.py @@ -23,11 +23,9 @@ from dimos.core.global_config import GlobalConfig from dimos.core.resource import Resource -from dimos.mapping.occupancy.visualize_path import visualize_path from dimos.msgs.geometry_msgs import Twist from dimos.msgs.geometry_msgs.PoseStamped import PoseStamped -from dimos.msgs.nav_msgs import Path -from dimos.msgs.sensor_msgs import Image +from dimos.msgs.nav_msgs import OccupancyGrid, Path from dimos.navigation.base import NavigationState from dimos.navigation.replanning_a_star.controllers import Controller, PController from dimos.navigation.replanning_a_star.navigation_map import NavigationMap @@ -47,7 +45,7 @@ class LocalPlanner(Resource): cmd_vel: Subject[Twist] stopped_navigating: Subject[StopMessage] - debug_navigation: Subject[Image] + navigation_costmap: Subject[OccupancyGrid] _thread: Thread | None = None _path: Path | None = None @@ -68,15 +66,15 @@ class LocalPlanner(Resource): _speed: float = 0.55 _control_frequency: float = 10 _orientation_tolerance: float = 0.35 - _debug_navigation_interval: float = 1.0 - _debug_navigation_last: float = 0.0 + _navigation_costmap_interval: float = 1.0 + _navigation_costmap_last: float = 0.0 def __init__( self, global_config: GlobalConfig, navigation_map: NavigationMap, goal_tolerance: float ) -> None: self.cmd_vel = Subject() self.stopped_navigating = Subject() - self.debug_navigation = Subject() + self.navigation_costmap = Subject() self._pose_index = 0 self._lock = RLock() @@ -195,7 +193,7 @@ def _loop(self) -> None: path_clearance.update_costmap(self._navigation_map.binary_costmap) path_clearance.update_pose_index(self._pose_index) - self._send_debug_navigation(path, path_clearance) + self._send_navigation_costmap(path, path_clearance) if path_clearance.is_obstacle_ahead(): logger.info("Obstacle detected ahead, stopping local planner.") @@ -313,51 +311,14 @@ def _reset_state(self) -> None: self._pose_index = 0 self._controller.reset_errors() - def _send_debug_navigation(self, path: Path, path_clearance: PathClearance) -> None: + def _send_navigation_costmap(self, path: Path, path_clearance: PathClearance) -> None: if "DEBUG_NAVIGATION" not in os.environ: return now = time.time() - if now - self._debug_navigation_last < self._debug_navigation_interval: + if now - self._navigation_costmap_last < self._navigation_costmap_interval: return - self._debug_navigation_last = now + self._navigation_costmap_last = now - self.debug_navigation.on_next(self._make_debug_navigation_image(path, path_clearance)) - - def _make_debug_navigation_image(self, path: Path, path_clearance: PathClearance) -> Image: - scale = 8 - image = visualize_path( - self._navigation_map.gradient_costmap, - path, - self._global_config.robot_width, - self._global_config.robot_rotation_diameter, - 2, - scale, - ) - image.data = np.flipud(image.data) - - # Add path mask. - mask = path_clearance.mask - scaled_mask = np.repeat(np.repeat(mask, scale, axis=0), scale, axis=1) - scaled_mask = np.flipud(scaled_mask) - white = np.array([255, 255, 255], dtype=np.int16) - image.data[scaled_mask] = (image.data[scaled_mask].astype(np.int16) * 3 + white * 7) // 10 - - with self._lock: - current_odom = self._current_odom - - # Draw robot position. - if current_odom is not None: - grid_pos = self._navigation_map.gradient_costmap.world_to_grid(current_odom.position) - x = int(grid_pos.x * scale) - y = image.data.shape[0] - 1 - int(grid_pos.y * scale) - radius = 8 - for dy in range(-radius, radius + 1): - for dx in range(-radius, radius + 1): - if dx * dx + dy * dy <= radius * radius: - py, px = y + dy, x + dx - if 0 <= py < image.data.shape[0] and 0 <= px < image.data.shape[1]: - image.data[py, px] = [255, 255, 255] - - return image + self.navigation_costmap.on_next(self._navigation_map.gradient_costmap) diff --git a/dimos/navigation/replanning_a_star/module.py b/dimos/navigation/replanning_a_star/module.py index 6ba1ae0ba1..d1d87cbbf6 100644 --- a/dimos/navigation/replanning_a_star/module.py +++ b/dimos/navigation/replanning_a_star/module.py @@ -16,14 +16,11 @@ from dimos_lcm.std_msgs import Bool, String from reactivex.disposable import Disposable -import rerun as rr from dimos.core import In, Module, Out, rpc -from dimos.core.global_config import GlobalConfig -from dimos.dashboard.rerun_init import connect_rerun +from dimos.core.global_config import GlobalConfig, global_config from dimos.msgs.geometry_msgs import PoseStamped, Twist from dimos.msgs.nav_msgs import OccupancyGrid, Path -from dimos.msgs.sensor_msgs import Image from dimos.navigation.base import NavigationInterface, NavigationState from dimos.navigation.replanning_a_star.global_planner import GlobalPlanner @@ -38,29 +35,20 @@ class ReplanningAStarPlanner(Module, NavigationInterface): navigation_state: Out[String] # TODO: set it cmd_vel: Out[Twist] path: Out[Path] - debug_navigation: Out[Image] + navigation_costmap: Out[OccupancyGrid] _planner: GlobalPlanner _global_config: GlobalConfig - def __init__(self, global_config: GlobalConfig | None = None) -> None: + def __init__(self, cfg: GlobalConfig = global_config) -> None: super().__init__() - self._global_config = global_config or GlobalConfig() + self._global_config = cfg self._planner = GlobalPlanner(self._global_config) @rpc def start(self) -> None: super().start() - if self._global_config.viewer_backend.startswith("rerun"): - connect_rerun(global_config=self._global_config) - - # Manual Rerun logging for path - def _log_path_to_rerun(path: Path) -> None: - rr.log("world/nav/path", path.to_rerun()) # type: ignore[no-untyped-call] - - self._disposables.add(self._planner.path.subscribe(_log_path_to_rerun)) - self._disposables.add(Disposable(self.odom.subscribe(self._planner.handle_odom))) self._disposables.add( Disposable(self.global_costmap.subscribe(self._planner.handle_global_costmap)) @@ -78,7 +66,7 @@ def _log_path_to_rerun(path: Path) -> None: if "DEBUG_NAVIGATION" in os.environ: self._disposables.add( - self._planner.debug_navigation.subscribe(self.debug_navigation.publish) + self._planner.navigation_costmap.subscribe(self.navigation_costmap.publish) ) self._planner.start() diff --git a/dimos/navigation/rosnav.py b/dimos/navigation/rosnav.py index 88fa7985eb..8efabaebee 100644 --- a/dimos/navigation/rosnav.py +++ b/dimos/navigation/rosnav.py @@ -15,49 +15,36 @@ """ NavBot class for navigation-related functionality. -Encapsulates ROS bridge and topic remapping for Unitree robots. +Encapsulates ROS transport and topic remapping for Unitree robots. """ -from collections.abc import Generator from dataclasses import dataclass, field import logging import threading import time -from geometry_msgs.msg import ( # type: ignore[attr-defined] - PointStamped as ROSPointStamped, - PoseStamped as ROSPoseStamped, - TwistStamped as ROSTwistStamped, -) -from nav_msgs.msg import Path as ROSPath # type: ignore[attr-defined] -import rclpy -from rclpy.node import Node from reactivex import operators as ops from reactivex.subject import Subject -from sensor_msgs.msg import ( # type: ignore[attr-defined] - Joy as ROSJoy, - PointCloud2 as ROSPointCloud2, -) -from std_msgs.msg import ( # type: ignore[attr-defined] - Bool as ROSBool, - Int8 as ROSInt8, -) -from tf2_msgs.msg import TFMessage as ROSTFMessage # type: ignore[attr-defined] from dimos import spec -from dimos.agents import Reducer, Stream, skill # type: ignore[attr-defined] -from dimos.core import DimosCluster, In, LCMTransport, Module, Out, pSHMTransport, rpc -from dimos.core.module import ModuleConfig +from dimos.agents.annotation import skill +from dimos.core import DimosCluster, In, LCMTransport, Module, Out, rpc +from dimos.core._dask_exports import DimosCluster +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import In, Out +from dimos.core.transport import LCMTransport, ROSTransport from dimos.msgs.geometry_msgs import ( PoseStamped, Quaternion, Transform, Twist, + TwistStamped, Vector3, ) from dimos.msgs.nav_msgs import Path -from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.msgs.std_msgs import Bool +from dimos.msgs.sensor_msgs import Joy, PointCloud2 +from dimos.msgs.std_msgs import Bool, Int8 from dimos.msgs.tf2_msgs.TFMessage import TFMessage from dimos.navigation.base import NavigationInterface, NavigationState from dimos.utils.logging_config import setup_logger @@ -69,33 +56,48 @@ @dataclass class Config(ModuleConfig): local_pointcloud_freq: float = 2.0 - global_pointcloud_freq: float = 1.0 + global_map_freq: float = 1.0 sensor_to_base_link_transform: Transform = field( default_factory=lambda: Transform(frame_id="sensor", child_frame_id="base_link") ) class ROSNav( - Module, NavigationInterface, spec.Nav, spec.Global3DMap, spec.Pointcloud, spec.LocalPlanner + Module, NavigationInterface, spec.Nav, spec.GlobalPointcloud, spec.Pointcloud, spec.LocalPlanner ): config: Config default_config = Config + # Existing ports (default LCM/pSHM transport) goal_req: In[PoseStamped] pointcloud: Out[PointCloud2] - global_pointcloud: Out[PointCloud2] + global_map: Out[PointCloud2] goal_active: Out[PoseStamped] path_active: Out[Path] cmd_vel: Out[Twist] + # ROS In ports (receiving from ROS topics via ROSTransport) + ros_goal_reached: In[Bool] + ros_cmd_vel: In[TwistStamped] + ros_way_point: In[PoseStamped] + ros_registered_scan: In[PointCloud2] + ros_global_map: In[PointCloud2] + ros_path: In[Path] + ros_tf: In[TFMessage] + + # ROS Out ports (publishing to ROS topics via ROSTransport) + ros_goal_pose: Out[PoseStamped] + ros_cancel_goal: Out[Bool] + ros_soft_stop: Out[Int8] + ros_joy: Out[Joy] + # Using RxPY Subjects for reactive data flow instead of storing state _local_pointcloud_subject: Subject # type: ignore[type-arg] - _global_pointcloud_subject: Subject # type: ignore[type-arg] + _global_map_subject: Subject # type: ignore[type-arg] _current_position_running: bool = False - _spin_thread: threading.Thread | None = None _goal_reach: bool | None = None # Navigation state tracking for NavigationInterface @@ -110,46 +112,14 @@ def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] # Initialize RxPY Subjects for streaming data self._local_pointcloud_subject = Subject() - self._global_pointcloud_subject = Subject() + self._global_map_subject = Subject() # Initialize state tracking self._state_lock = threading.Lock() self._navigation_state = NavigationState.IDLE self._goal_reached = False - if not rclpy.ok(): # type: ignore[attr-defined] - rclpy.init() - - self._node = Node("navigation_module") - - # ROS2 Publishers - self.goal_pose_pub = self._node.create_publisher(ROSPoseStamped, "/goal_pose", 10) - self.cancel_goal_pub = self._node.create_publisher(ROSBool, "/cancel_goal", 10) - self.soft_stop_pub = self._node.create_publisher(ROSInt8, "/stop", 10) - self.joy_pub = self._node.create_publisher(ROSJoy, "/joy", 10) - - # ROS2 Subscribers - self.goal_reached_sub = self._node.create_subscription( - ROSBool, "/goal_reached", self._on_ros_goal_reached, 10 - ) - self.cmd_vel_sub = self._node.create_subscription( - ROSTwistStamped, "/cmd_vel", self._on_ros_cmd_vel, 10 - ) - self.goal_waypoint_sub = self._node.create_subscription( - ROSPointStamped, "/way_point", self._on_ros_goal_waypoint, 10 - ) - self.registered_scan_sub = self._node.create_subscription( - ROSPointCloud2, "/registered_scan", self._on_ros_registered_scan, 10 - ) - - self.global_pointcloud_sub = self._node.create_subscription( - ROSPointCloud2, "/terrain_map_ext", self._on_ros_global_pointcloud, 10 - ) - - self.path_sub = self._node.create_subscription(ROSPath, "/path", self._on_ros_path, 10) - self.tf_sub = self._node.create_subscription(ROSTFMessage, "/tf", self._on_ros_tf, 10) - - logger.info("NavigationModule initialized with ROS2 node") + logger.info("NavigationModule initialized") @rpc def start(self) -> None: @@ -157,8 +127,7 @@ def start(self) -> None: self._disposables.add( self._local_pointcloud_subject.pipe( - ops.sample(1.0 / self.config.local_pointcloud_freq), # Sample at desired frequency - ops.map(lambda msg: PointCloud2.from_ros_msg(msg)), # type: ignore[arg-type] + ops.sample(1.0 / self.config.local_pointcloud_freq), ).subscribe( on_next=self.pointcloud.publish, on_error=lambda e: logger.error(f"Lidar stream error: {e}"), @@ -166,65 +135,50 @@ def start(self) -> None: ) self._disposables.add( - self._global_pointcloud_subject.pipe( - ops.sample(1.0 / self.config.global_pointcloud_freq), # Sample at desired frequency - ops.map(lambda msg: PointCloud2.from_ros_msg(msg)), # type: ignore[arg-type] + self._global_map_subject.pipe( + ops.sample(1.0 / self.config.global_map_freq), ).subscribe( - on_next=self.global_pointcloud.publish, + on_next=self.global_map.publish, on_error=lambda e: logger.error(f"Map stream error: {e}"), ) ) - # Create and start the spin thread for ROS2 node spinning - self._spin_thread = threading.Thread( - target=self._spin_node, daemon=True, name="ROS2SpinThread" - ) - self._spin_thread.start() + # Subscribe to ROS In ports + self.ros_goal_reached.subscribe(self._on_ros_goal_reached) + self.ros_cmd_vel.subscribe(self._on_ros_cmd_vel) + self.ros_way_point.subscribe(self._on_ros_goal_waypoint) + self.ros_registered_scan.subscribe(self._on_ros_registered_scan) + self.ros_global_map.subscribe(self._on_ros_global_map) + self.ros_path.subscribe(self._on_ros_path) + self.ros_tf.subscribe(self._on_ros_tf) self.goal_req.subscribe(self._on_goal_pose) - logger.info("NavigationModule started with ROS2 spinning and RxPY streams") + logger.info("NavigationModule started with ROS transport and RxPY streams") - def _spin_node(self) -> None: - while self._running and rclpy.ok(): # type: ignore[attr-defined] - try: - rclpy.spin_once(self._node, timeout_sec=0.1) - except Exception as e: - if self._running: - logger.error(f"ROS2 spin error: {e}") - - def _on_ros_goal_reached(self, msg: ROSBool) -> None: + def _on_ros_goal_reached(self, msg: Bool) -> None: self._goal_reach = msg.data if msg.data: with self._state_lock: self._goal_reached = True self._navigation_state = NavigationState.IDLE - def _on_ros_goal_waypoint(self, msg: ROSPointStamped) -> None: - dimos_pose = PoseStamped( - ts=time.time(), - frame_id=msg.header.frame_id, - position=Vector3(msg.point.x, msg.point.y, msg.point.z), - orientation=Quaternion(0.0, 0.0, 0.0, 1.0), - ) - self.goal_active.publish(dimos_pose) + def _on_ros_goal_waypoint(self, msg: PoseStamped) -> None: + self.goal_active.publish(msg) - def _on_ros_cmd_vel(self, msg: ROSTwistStamped) -> None: - self.cmd_vel.publish(Twist.from_ros_msg(msg.twist)) + def _on_ros_cmd_vel(self, msg: TwistStamped) -> None: + self.cmd_vel.publish(Twist(linear=msg.linear, angular=msg.angular)) - def _on_ros_registered_scan(self, msg: ROSPointCloud2) -> None: + def _on_ros_registered_scan(self, msg: PointCloud2) -> None: self._local_pointcloud_subject.on_next(msg) - def _on_ros_global_pointcloud(self, msg: ROSPointCloud2) -> None: - self._global_pointcloud_subject.on_next(msg) - - def _on_ros_path(self, msg: ROSPath) -> None: - dimos_path = Path.from_ros_msg(msg) - dimos_path.frame_id = "base_link" - self.path_active.publish(dimos_path) + def _on_ros_global_map(self, msg: PointCloud2) -> None: + self._global_map_subject.on_next(msg) - def _on_ros_tf(self, msg: ROSTFMessage) -> None: - ros_tf = TFMessage.from_ros_msg(msg) + def _on_ros_path(self, msg: Path) -> None: + msg.frame_id = "base_link" + self.path_active.publish(msg) + def _on_ros_tf(self, msg: TFMessage) -> None: map_to_world_tf = Transform( translation=Vector3(0.0, 0.0, 0.0), rotation=euler_to_quaternion(Vector3(0.0, 0.0, 0.0)), @@ -236,7 +190,7 @@ def _on_ros_tf(self, msg: ROSTFMessage) -> None: self.tf.publish( self.config.sensor_to_base_link_transform.now(), map_to_world_tf, - *ros_tf.transforms, + *msg.transforms, ) def _on_goal_pose(self, msg: PoseStamped) -> None: @@ -247,48 +201,15 @@ def _on_cancel_goal(self, msg: Bool) -> None: self.stop() def _set_autonomy_mode(self) -> None: - joy_msg = ROSJoy() # type: ignore[no-untyped-call] - joy_msg.axes = [ - 0.0, # axis 0 - 0.0, # axis 1 - -1.0, # axis 2 - 0.0, # axis 3 - 1.0, # axis 4 - 1.0, # axis 5 - 0.0, # axis 6 - 0.0, # axis 7 - ] - joy_msg.buttons = [ - 0, # button 0 - 0, # button 1 - 0, # button 2 - 0, # button 3 - 0, # button 4 - 0, # button 5 - 0, # button 6 - 1, # button 7 - controls autonomy mode - 0, # button 8 - 0, # button 9 - 0, # button 10 - ] - self.joy_pub.publish(joy_msg) + joy_msg = Joy( + axes=[0.0, 0.0, -1.0, 0.0, 1.0, 1.0, 0.0, 0.0], + buttons=[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0], + ) + self.ros_joy.publish(joy_msg) logger.info("Setting autonomy mode via Joy message") - @skill(stream=Stream.passive, reducer=Reducer.latest) # type: ignore[arg-type] - def current_position(self): # type: ignore[no-untyped-def] - """passively stream the current position of the robot every second""" - if self._current_position_running: - return "already running" - while True: - self._current_position_running = True - time.sleep(1.0) - tf = self.tf.get("map", "base_link") - if not tf: - continue - yield f"current position {tf.translation.x}, {tf.translation.y}" - - @skill(stream=Stream.call_agent, reducer=Reducer.string) # type: ignore[arg-type] - def goto(self, x: float, y: float): # type: ignore[no-untyped-def] + @skill + def goto(self, x: float, y: float) -> str: """ move the robot in relative coordinates x is forward, y is left @@ -302,12 +223,11 @@ def goto(self, x: float, y: float): # type: ignore[no-untyped-def] ts=time.time(), ) - yield "moving, please wait..." self.navigate_to(pose_to) - yield "arrived" + return "arrived" - @skill(stream=Stream.call_agent, reducer=Reducer.string) # type: ignore[arg-type] - def goto_global(self, x: float, y: float) -> Generator[str, None, None]: + @skill + def goto_global(self, x: float, y: float) -> str: """ go to coordinates x,y in the map frame 0,0 is your starting position @@ -319,13 +239,9 @@ def goto_global(self, x: float, y: float) -> Generator[str, None, None]: orientation=Quaternion(0.0, 0.0, 0.0, 0.0), ) - pos = self.tf.get("base_link", "map").translation - - yield f"moving from {pos.x:.2f}, {pos.y:.2f} to {x:.2f}, {y:.2f}, please wait..." - self.navigate_to(target) - yield "arrived to {x:.2f}, {y:.2f}" + return f"arrived to {x:.2f}, {y:.2f}" @rpc def navigate_to(self, pose: PoseStamped, timeout: float = 60.0) -> bool: @@ -347,19 +263,14 @@ def navigate_to(self, pose: PoseStamped, timeout: float = 60.0) -> bool: self._set_autonomy_mode() # Enable soft stop (0 = enable) - soft_stop_msg = ROSInt8() # type: ignore[no-untyped-call] - soft_stop_msg.data = 0 - self.soft_stop_pub.publish(soft_stop_msg) - - ros_pose = pose.to_ros_msg() - self.goal_pose_pub.publish(ros_pose) + self.ros_soft_stop.publish(Int8(data=0)) + self.ros_goal_pose.publish(pose) # Wait for goal to be reached start_time = time.time() while time.time() - start_time < timeout: if self._goal_reach is not None: - soft_stop_msg.data = 2 - self.soft_stop_pub.publish(soft_stop_msg) + self.ros_soft_stop.publish(Int8(data=2)) return self._goal_reach time.sleep(0.1) @@ -377,13 +288,8 @@ def stop_navigation(self) -> bool: """ logger.info("Stopping navigation") - cancel_msg = ROSBool() # type: ignore[no-untyped-call] - cancel_msg.data = True - self.cancel_goal_pub.publish(cancel_msg) - - soft_stop_msg = ROSInt8() # type: ignore[no-untyped-call] - soft_stop_msg.data = 2 - self.soft_stop_pub.publish(soft_stop_msg) + self.ros_cancel_goal.publish(Bool(data=True)) + self.ros_soft_stop.publish(Int8(data=2)) with self._state_lock: self._navigation_state = NavigationState.IDLE @@ -461,13 +367,7 @@ def stop(self) -> None: self._running = False self._local_pointcloud_subject.on_completed() - self._global_pointcloud_subject.on_completed() - - if self._spin_thread and self._spin_thread.is_alive(): - self._spin_thread.join(timeout=1.0) - - if hasattr(self, "_node") and self._node: - self._node.destroy_node() # type: ignore[no-untyped-call] + self._global_map_subject.on_completed() except Exception as e: logger.error(f"Error during shutdown: {e}") @@ -481,13 +381,29 @@ def stop(self) -> None: def deploy(dimos: DimosCluster): # type: ignore[no-untyped-def] nav = dimos.deploy(ROSNav) # type: ignore[attr-defined] - nav.pointcloud.transport = pSHMTransport("/lidar") - nav.global_pointcloud.transport = pSHMTransport("/map") + # Existing ports on LCM transports + nav.pointcloud.transport = LCMTransport("/lidar", PointCloud2) + nav.global_map.transport = LCMTransport("/map", PointCloud2) nav.goal_req.transport = LCMTransport("/goal_req", PoseStamped) nav.goal_active.transport = LCMTransport("/goal_active", PoseStamped) nav.path_active.transport = LCMTransport("/path_active", Path) nav.cmd_vel.transport = LCMTransport("/cmd_vel", Twist) + # ROS In transports (receiving from ROS navigation stack) + nav.ros_goal_reached.transport = ROSTransport("/goal_reached", Bool) + nav.ros_cmd_vel.transport = ROSTransport("/cmd_vel", TwistStamped) + nav.ros_way_point.transport = ROSTransport("/way_point", PoseStamped) + nav.ros_registered_scan.transport = ROSTransport("/registered_scan", PointCloud2) + nav.ros_global_map.transport = ROSTransport("/terrain_map_ext", PointCloud2) + nav.ros_path.transport = ROSTransport("/path", Path) + nav.ros_tf.transport = ROSTransport("/tf", TFMessage) + + # ROS Out transports (publishing to ROS navigation stack) + nav.ros_goal_pose.transport = ROSTransport("/goal_pose", PoseStamped) + nav.ros_cancel_goal.transport = ROSTransport("/cancel_goal", Bool) + nav.ros_soft_stop.transport = ROSTransport("/stop", Int8) + nav.ros_joy.transport = ROSTransport("/joy", Joy) + nav.start() return nav diff --git a/dimos/perception/common/__init__.py b/dimos/perception/common/__init__.py index 16281fe0b6..5902f54bb8 100644 --- a/dimos/perception/common/__init__.py +++ b/dimos/perception/common/__init__.py @@ -1 +1,81 @@ -from .utils import * +from .utils import ( + BoundingBox2D, + CameraInfo, + Detection2D, + Detection3D, + Header, + Image, + ObjectData, + Pose, + Quaternion, + Union, + Vector, + Vector3, + bbox2d_to_corners, + colorize_depth, + combine_object_data, + cp, + cv2, + detection_results_to_object_data, + draw_bounding_box, + draw_object_detection_visualization, + draw_segmentation_mask, + extract_pose_from_detection3d, + find_clicked_detection, + load_camera_info, + load_camera_info_opencv, + logger, + np, + point_in_bbox, + project_2d_points_to_3d, + project_2d_points_to_3d_cpu, + project_2d_points_to_3d_cuda, + project_3d_points_to_2d, + project_3d_points_to_2d_cpu, + project_3d_points_to_2d_cuda, + rectify_image, + setup_logger, + torch, + yaml, +) + +__all__ = [ + "BoundingBox2D", + "CameraInfo", + "Detection2D", + "Detection3D", + "Header", + "Image", + "ObjectData", + "Pose", + "Quaternion", + "Union", + "Vector", + "Vector3", + "bbox2d_to_corners", + "colorize_depth", + "combine_object_data", + "cp", + "cv2", + "detection_results_to_object_data", + "draw_bounding_box", + "draw_object_detection_visualization", + "draw_segmentation_mask", + "extract_pose_from_detection3d", + "find_clicked_detection", + "load_camera_info", + "load_camera_info_opencv", + "logger", + "np", + "point_in_bbox", + "project_2d_points_to_3d", + "project_2d_points_to_3d_cpu", + "project_2d_points_to_3d_cuda", + "project_3d_points_to_2d", + "project_3d_points_to_2d_cpu", + "project_3d_points_to_2d_cuda", + "rectify_image", + "setup_logger", + "torch", + "yaml", +] diff --git a/dimos/perception/common/utils.py b/dimos/perception/common/utils.py index 1144234d71..c5f550ade3 100644 --- a/dimos/perception/common/utils.py +++ b/dimos/perception/common/utils.py @@ -34,9 +34,50 @@ logger = setup_logger() +__all__ = [ + "BoundingBox2D", + "CameraInfo", + "Detection2D", + "Detection3D", + "Header", + "Image", + "ObjectData", + "Pose", + "Quaternion", + "Union", + "Vector", + "Vector3", + "bbox2d_to_corners", + "colorize_depth", + "combine_object_data", + "cp", + "cv2", + "detection_results_to_object_data", + "draw_bounding_box", + "draw_object_detection_visualization", + "draw_segmentation_mask", + "extract_pose_from_detection3d", + "find_clicked_detection", + "load_camera_info", + "load_camera_info_opencv", + "logger", + "np", + "point_in_bbox", + "project_2d_points_to_3d", + "project_2d_points_to_3d_cpu", + "project_2d_points_to_3d_cuda", + "project_3d_points_to_2d", + "project_3d_points_to_2d_cpu", + "project_3d_points_to_2d_cuda", + "rectify_image", + "setup_logger", + "torch", + "yaml", +] + # Optional CuPy support try: # pragma: no cover - optional dependency - import cupy as cp # type: ignore[import-not-found] + import cupy as cp # type: ignore[import-not-found, import-untyped] _HAS_CUDA = True except Exception: # pragma: no cover - optional dependency @@ -147,152 +188,15 @@ def load_camera_info_opencv(yaml_path: str) -> tuple[np.ndarray, np.ndarray]: # return K, dist -def rectify_image_cpu(image: Image, camera_matrix: np.ndarray, dist_coeffs: np.ndarray) -> Image: # type: ignore[type-arg] +def rectify_image(image: Image, camera_matrix: np.ndarray, dist_coeffs: np.ndarray) -> Image: # type: ignore[type-arg] """CPU rectification using OpenCV. Preserves backend by caller. Returns an Image with numpy or cupy data depending on caller choice. """ - src = _to_numpy(image.data) # type: ignore[no-untyped-call] - rect = cv2.undistort(src, camera_matrix, dist_coeffs) - # Caller decides whether to convert back to GPU. - return Image(data=rect, format=image.format, frame_id=image.frame_id, ts=image.ts) - - -def rectify_image_cuda(image: Image, camera_matrix: np.ndarray, dist_coeffs: np.ndarray) -> Image: # type: ignore[type-arg] - """GPU rectification using CuPy bilinear sampling. - - Generates an undistorted output grid and samples from the distorted source. - Falls back to CPU if CUDA not available. - """ - if not _HAS_CUDA or cp is None or not image.is_cuda: - return rectify_image_cpu(image, camera_matrix, dist_coeffs) - - xp = cp - - # Source (distorted) image on device - src = image.data - if src.ndim not in (2, 3): - raise ValueError("Unsupported image rank for rectification") - H, W = int(src.shape[0]), int(src.shape[1]) - - # Extract intrinsics and distortion as float64 - K = xp.asarray(camera_matrix, dtype=xp.float64) - dist = xp.asarray(dist_coeffs, dtype=xp.float64).reshape(-1) - fx, fy, cx, cy = K[0, 0], K[1, 1], K[0, 2], K[1, 2] - k1 = dist[0] if dist.size > 0 else 0.0 - k2 = dist[1] if dist.size > 1 else 0.0 - p1 = dist[2] if dist.size > 2 else 0.0 - p2 = dist[3] if dist.size > 3 else 0.0 - k3 = dist[4] if dist.size > 4 else 0.0 - - # Build undistorted target grid (pixel coords) - u = xp.arange(W, dtype=xp.float64) - v = xp.arange(H, dtype=xp.float64) - uu, vv = xp.meshgrid(u, v, indexing="xy") - - # Convert to normalized undistorted coords - xu = (uu - cx) / fx - yu = (vv - cy) / fy - - # Apply forward distortion model to get distorted normalized coords - r2 = xu * xu + yu * yu - r4 = r2 * r2 - r6 = r4 * r2 - radial = 1.0 + k1 * r2 + k2 * r4 + k3 * r6 - delta_x = 2.0 * p1 * xu * yu + p2 * (r2 + 2.0 * xu * xu) - delta_y = p1 * (r2 + 2.0 * yu * yu) + 2.0 * p2 * xu * yu - xd = xu * radial + delta_x - yd = yu * radial + delta_y - - # Back to pixel coordinates in the source (distorted) image - us = fx * xd + cx - vs = fy * yd + cy - - # Bilinear sample from src at (vs, us) - def _bilinear_sample_cuda(img, x_src, y_src): # type: ignore[no-untyped-def] - h, w = int(img.shape[0]), int(img.shape[1]) - # Base integer corners (not clamped) - x0i = xp.floor(x_src).astype(xp.int32) - y0i = xp.floor(y_src).astype(xp.int32) - x1i = x0i + 1 - y1i = y0i + 1 - - # Masks for in-bounds neighbors (BORDER_CONSTANT behavior) - m00 = (x0i >= 0) & (x0i < w) & (y0i >= 0) & (y0i < h) - m10 = (x1i >= 0) & (x1i < w) & (y0i >= 0) & (y0i < h) - m01 = (x0i >= 0) & (x0i < w) & (y1i >= 0) & (y1i < h) - m11 = (x1i >= 0) & (x1i < w) & (y1i >= 0) & (y1i < h) - - # Clamp indices for safe gather, but multiply contributions by masks - x0 = xp.clip(x0i, 0, w - 1) - y0 = xp.clip(y0i, 0, h - 1) - x1 = xp.clip(x1i, 0, w - 1) - y1 = xp.clip(y1i, 0, h - 1) - - # Weights - wx = (x_src - x0i).astype(xp.float64) - wy = (y_src - y0i).astype(xp.float64) - w00 = (1.0 - wx) * (1.0 - wy) - w10 = wx * (1.0 - wy) - w01 = (1.0 - wx) * wy - w11 = wx * wy - - # Cast masks for arithmetic - m00f = m00.astype(xp.float64) - m10f = m10.astype(xp.float64) - m01f = m01.astype(xp.float64) - m11f = m11.astype(xp.float64) - - if img.ndim == 2: - Ia = img[y0, x0].astype(xp.float64) - Ib = img[y0, x1].astype(xp.float64) - Ic = img[y1, x0].astype(xp.float64) - Id = img[y1, x1].astype(xp.float64) - out = w00 * m00f * Ia + w10 * m10f * Ib + w01 * m01f * Ic + w11 * m11f * Id - else: - Ia = img[y0, x0].astype(xp.float64) - Ib = img[y0, x1].astype(xp.float64) - Ic = img[y1, x0].astype(xp.float64) - Id = img[y1, x1].astype(xp.float64) - # Expand weights and masks for channel broadcasting - w00e = (w00 * m00f)[..., None] - w10e = (w10 * m10f)[..., None] - w01e = (w01 * m01f)[..., None] - w11e = (w11 * m11f)[..., None] - out = w00e * Ia + w10e * Ib + w01e * Ic + w11e * Id - - # Cast back to original dtype with clipping for integers - if img.dtype == xp.uint8: - out = xp.clip(xp.rint(out), 0, 255).astype(xp.uint8) - elif img.dtype == xp.uint16: - out = xp.clip(xp.rint(out), 0, 65535).astype(xp.uint16) - elif img.dtype == xp.int16: - out = xp.clip(xp.rint(out), -32768, 32767).astype(xp.int16) - else: - out = out.astype(img.dtype, copy=False) - return out - - rect = _bilinear_sample_cuda(src, us, vs) # type: ignore[no-untyped-call] + rect = cv2.undistort(image.data, camera_matrix, dist_coeffs) return Image(data=rect, format=image.format, frame_id=image.frame_id, ts=image.ts) -def rectify_image(image: Image, camera_matrix: np.ndarray, dist_coeffs: np.ndarray) -> Image: # type: ignore[type-arg] - """ - Rectify (undistort) an image using camera calibration parameters. - - Args: - image: Input Image object to rectify - camera_matrix: 3x3 camera intrinsic matrix (K) - dist_coeffs: Distortion coefficients array - - Returns: - Image: Rectified Image object with same format and metadata - """ - if image.is_cuda and _HAS_CUDA: - return rectify_image_cuda(image, camera_matrix, dist_coeffs) - return rectify_image_cpu(image, camera_matrix, dist_coeffs) - - def project_3d_points_to_2d_cuda( points_3d: "cp.ndarray", camera_intrinsics: Union[list[float], "cp.ndarray"] ) -> "cp.ndarray": diff --git a/dimos/perception/demo_object_scene_registration.py b/dimos/perception/demo_object_scene_registration.py index d1d879d0ab..c02f7d2984 100644 --- a/dimos/perception/demo_object_scene_registration.py +++ b/dimos/perception/demo_object_scene_registration.py @@ -13,8 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dimos.agents.agent import llm_agent -from dimos.agents.cli.human import human_input +from dimos.agents.agent import agent from dimos.core.blueprints import autoconnect from dimos.hardware.sensors.camera.realsense import realsense_camera from dimos.hardware.sensors.camera.zed import zed_camera @@ -35,6 +34,5 @@ camera_module, object_scene_registration_module(target_frame="world", prompt_mode=YoloePromptMode.LRPC), foxglove_bridge(), - human_input(), - llm_agent(), + agent(), ).global_config(viewer_backend="foxglove") diff --git a/dimos/perception/detection/__init__.py b/dimos/perception/detection/__init__.py index 72663a69b0..ae9f8cb14d 100644 --- a/dimos/perception/detection/__init__.py +++ b/dimos/perception/detection/__init__.py @@ -1,7 +1,10 @@ -from dimos.perception.detection.detectors import * -from dimos.perception.detection.module2D import ( - Detection2DModule, -) -from dimos.perception.detection.module3D import ( - Detection3DModule, +import lazy_loader as lazy + +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "detectors": ["Detector", "Yolo2DDetector"], + "module2D": ["Detection2DModule"], + "module3D": ["Detection3DModule"], + }, ) diff --git a/dimos/perception/detection/conftest.py b/dimos/perception/detection/conftest.py index 8c6953e410..3b24422c47 100644 --- a/dimos/perception/detection/conftest.py +++ b/dimos/perception/detection/conftest.py @@ -35,8 +35,8 @@ ImageDetections3DPC, ) from dimos.protocol.tf import TF -from dimos.robot.unitree.connection import go2 -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.go2 import connection +from dimos.robot.unitree.type.odometry import Odometry from dimos.utils.data import get_data from dimos.utils.testing import TimedSensorReplay @@ -100,7 +100,7 @@ def moment_provider(**kwargs) -> Moment: if odom_frame is None: raise ValueError("No odom frame found") - transforms = go2.GO2Connection._odom_to_tf(odom_frame) + transforms = connection.GO2Connection._odom_to_tf(odom_frame) tf.receive_transform(*transforms) @@ -108,7 +108,7 @@ def moment_provider(**kwargs) -> Moment: "odom_frame": odom_frame, "lidar_frame": lidar_frame, "image_frame": image_frame, - "camera_info": go2._camera_info_static(), + "camera_info": connection._camera_info_static(), "transforms": transforms, "tf": tf, } @@ -260,8 +260,8 @@ def object_db_module(get_moment): from dimos.perception.detection.detectors import Yolo2DDetector module2d = Detection2DModule(detector=lambda: Yolo2DDetector(device="cpu")) - module3d = Detection3DModule(camera_info=go2._camera_info_static()) - moduleDB = ObjectDBModule(camera_info=go2._camera_info_static()) + module3d = Detection3DModule(camera_info=connection._camera_info_static()) + moduleDB = ObjectDBModule(camera_info=connection._camera_info_static()) # Process 5 frames to build up object history for i in range(5): diff --git a/dimos/perception/detection/detectors/__init__.py b/dimos/perception/detection/detectors/__init__.py index d6383d084e..2f151fe3ef 100644 --- a/dimos/perception/detection/detectors/__init__.py +++ b/dimos/perception/detection/detectors/__init__.py @@ -1,3 +1,8 @@ # from dimos.perception.detection.detectors.detic import Detic2DDetector from dimos.perception.detection.detectors.types import Detector from dimos.perception.detection.detectors.yolo import Yolo2DDetector + +__all__ = [ + "Detector", + "Yolo2DDetector", +] diff --git a/dimos/perception/detection/module3D.py b/dimos/perception/detection/module3D.py index 037376f995..d275fbc85f 100644 --- a/dimos/perception/detection/module3D.py +++ b/dimos/perception/detection/module3D.py @@ -13,6 +13,8 @@ # limitations under the License. +from typing import TYPE_CHECKING, Any + from dimos_lcm.foxglove_msgs.ImageAnnotations import ( ImageAnnotations, ) @@ -21,20 +23,24 @@ from reactivex.observable import Observable from dimos import spec -from dimos.agents import skill # type: ignore[attr-defined] -from dimos.core import DimosCluster, In, Out, rpc +from dimos.agents.annotation import skill +from dimos.core.core import rpc +from dimos.core.stream import In, Out from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Transform, Vector3 from dimos.msgs.sensor_msgs import Image, PointCloud2 from dimos.msgs.vision_msgs import Detection2DArray from dimos.perception.detection.module2D import Detection2DModule -from dimos.perception.detection.type import ( - ImageDetections2D, - ImageDetections3DPC, -) +from dimos.perception.detection.type.detection2d.imageDetections2D import ImageDetections2D from dimos.perception.detection.type.detection3d import Detection3DPC +from dimos.perception.detection.type.detection3d.imageDetections3DPC import ImageDetections3DPC from dimos.types.timestamped import align_timestamped from dimos.utils.reactive import backpressure +if TYPE_CHECKING: + from dask.distributed import Client as DimosCluster +else: + DimosCluster = Any + class Detection3DModule(Detection2DModule): color_image: In[Image] @@ -105,7 +111,7 @@ def pixel_to_3d( # Camera optical frame: X right, Y down, Z forward return Vector3(x_norm * assumed_depth, y_norm * assumed_depth, assumed_depth) - @skill() + @skill def ask_vlm(self, question: str) -> str: """asks a visual model about the view of the robot, for example is the bannana in the trunk? diff --git a/dimos/perception/detection/moduleDB.py b/dimos/perception/detection/moduleDB.py index c37dff8dea..bc0a346a59 100644 --- a/dimos/perception/detection/moduleDB.py +++ b/dimos/perception/detection/moduleDB.py @@ -23,13 +23,15 @@ from lcm_msgs.foxglove_msgs import SceneUpdate # type: ignore[import-not-found] from reactivex.observable import Observable -from dimos.core import In, Out, rpc +from dimos.core.core import rpc +from dimos.core.stream import In, Out from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Transform, Vector3 from dimos.msgs.sensor_msgs import Image, PointCloud2 from dimos.msgs.vision_msgs import Detection2DArray from dimos.perception.detection.module3D import Detection3DModule -from dimos.perception.detection.type import ImageDetections3DPC, TableStr from dimos.perception.detection.type.detection3d import Detection3DPC +from dimos.perception.detection.type.detection3d.imageDetections3DPC import ImageDetections3DPC +from dimos.perception.detection.type.utils import TableStr # Represents an object in space, as collection of 3d detections over time @@ -307,6 +309,6 @@ def __len__(self) -> int: return len(self.objects.values()) -detectionDB_module = ObjectDBModule.blueprint +detection_db_module = ObjectDBModule.blueprint -__all__ = ["ObjectDBModule", "detectionDB_module"] +__all__ = ["ObjectDBModule", "detection_db_module"] diff --git a/dimos/perception/detection/objectDB.py b/dimos/perception/detection/objectDB.py index 6ade2d8c8d..9af8058c55 100644 --- a/dimos/perception/detection/objectDB.py +++ b/dimos/perception/detection/objectDB.py @@ -44,10 +44,10 @@ class ObjectDB: def __init__( self, - distance_threshold: float = 0.1, + distance_threshold: float = 0.2, min_detections_for_permanent: int = 6, pending_ttl_s: float = 5.0, - track_id_ttl_s: float = 2.0, + track_id_ttl_s: float = 5.0, ) -> None: self._distance_threshold = distance_threshold self._min_detections = min_detections_for_permanent @@ -139,6 +139,15 @@ def find_by_name(self, name: str) -> list[Object]: with self._lock: return [obj for obj in self._objects.values() if obj.name == name] + def find_by_object_id(self, object_id: str) -> Object | None: + """Find an object by its object_id (searches pending and permanent).""" + with self._lock: + if object_id in self._objects: + return self._objects[object_id] + if object_id in self._pending_objects: + return self._pending_objects[object_id] + return None + def find_nearest( self, position: Vector3, diff --git a/dimos/perception/detection/reid/embedding_id_system.py b/dimos/perception/detection/reid/embedding_id_system.py index 9b57e1eb6c..15bb491f5c 100644 --- a/dimos/perception/detection/reid/embedding_id_system.py +++ b/dimos/perception/detection/reid/embedding_id_system.py @@ -33,7 +33,7 @@ class EmbeddingIDSystem(IDSystem): def __init__( self, - model: Callable[[], EmbeddingModel[Embedding]], + model: Callable[[], EmbeddingModel], padding: int = 0, similarity_threshold: float = 0.63, comparison_mode: Literal["max", "mean", "top_k_mean"] = "top_k_mean", diff --git a/dimos/perception/detection/reid/module.py b/dimos/perception/detection/reid/module.py index f3f2a5a126..0a359746d3 100644 --- a/dimos/perception/detection/reid/module.py +++ b/dimos/perception/detection/reid/module.py @@ -20,13 +20,15 @@ from reactivex import operators as ops from reactivex.observable import Observable -from dimos.core import In, Module, ModuleConfig, Out, rpc +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import In, Out from dimos.msgs.foxglove_msgs.Color import Color from dimos.msgs.sensor_msgs import Image from dimos.msgs.vision_msgs import Detection2DArray from dimos.perception.detection.reid.embedding_id_system import EmbeddingIDSystem from dimos.perception.detection.reid.type import IDSystem -from dimos.perception.detection.type import ImageDetections2D +from dimos.perception.detection.type.detection2d.imageDetections2D import ImageDetections2D from dimos.types.timestamped import align_timestamped, to_ros_stamp from dimos.utils.reactive import backpressure diff --git a/dimos/perception/detection/reid/type.py b/dimos/perception/detection/reid/type.py index 28ea719f81..61571e418f 100644 --- a/dimos/perception/detection/reid/type.py +++ b/dimos/perception/detection/reid/type.py @@ -15,8 +15,12 @@ from __future__ import annotations from abc import ABC, abstractmethod +from typing import TYPE_CHECKING -from dimos.perception.detection.type import Detection2DBBox, ImageDetections2D +from dimos.perception.detection.type.detection2d.bbox import Detection2DBBox + +if TYPE_CHECKING: + from dimos.perception.detection.type.detection2d.imageDetections2D import ImageDetections2D class IDSystem(ABC): diff --git a/dimos/perception/detection/test_moduleDB.py b/dimos/perception/detection/test_moduleDB.py index e9815f1f3e..23885a1c60 100644 --- a/dimos/perception/detection/test_moduleDB.py +++ b/dimos/perception/detection/test_moduleDB.py @@ -22,19 +22,19 @@ from dimos.msgs.sensor_msgs import Image, PointCloud2 from dimos.msgs.vision_msgs import Detection2DArray from dimos.perception.detection.moduleDB import ObjectDBModule -from dimos.robot.unitree.connection import go2 +from dimos.robot.unitree.go2 import connection as go2_connection @pytest.mark.module def test_moduleDB(dimos_cluster) -> None: - connection = go2.deploy(dimos_cluster, "fake") + connection = go2_connection.deploy(dimos_cluster, "fake") moduleDB = dimos_cluster.deploy( ObjectDBModule, - camera_info=go2._camera_info_static(), + camera_info=go2_connection._camera_info_static(), goto=lambda obj_id: print(f"Going to {obj_id}"), ) - moduleDB.image.connect(connection.video) + moduleDB.image.connect(connection.color_image) moduleDB.pointcloud.connect(connection.lidar) moduleDB.annotations.transport = LCMTransport("/annotations", ImageAnnotations) diff --git a/dimos/perception/detection/type/__init__.py b/dimos/perception/detection/type/__init__.py index d69d00ba97..00cf943db3 100644 --- a/dimos/perception/detection/type/__init__.py +++ b/dimos/perception/detection/type/__init__.py @@ -1,45 +1,28 @@ -from dimos.perception.detection.type.detection2d import ( # type: ignore[attr-defined] - Detection2D, - Detection2DBBox, - Detection2DPerson, - Detection2DPoint, - Filter2D, - ImageDetections2D, -) -from dimos.perception.detection.type.detection3d import ( - Detection3D, - Detection3DBBox, - Detection3DPC, - ImageDetections3DPC, - PointCloudFilter, - height_filter, - radius_outlier, - raycast, - statistical, -) -from dimos.perception.detection.type.imageDetections import ImageDetections -from dimos.perception.detection.type.utils import TableStr +import lazy_loader as lazy -__all__ = [ - # 2D Detection types - "Detection2D", - "Detection2DBBox", - "Detection2DPerson", - "Detection2DPoint", - # 3D Detection types - "Detection3D", - "Detection3DBBox", - "Detection3DPC", - "Filter2D", - # Base types - "ImageDetections", - "ImageDetections2D", - "ImageDetections3DPC", - # Point cloud filters - "PointCloudFilter", - "TableStr", - "height_filter", - "radius_outlier", - "raycast", - "statistical", -] +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "detection2d": [ + "Detection2D", + "Detection2DBBox", + "Detection2DPerson", + "Detection2DPoint", + "Filter2D", + "ImageDetections2D", + ], + "detection3d": [ + "Detection3D", + "Detection3DBBox", + "Detection3DPC", + "ImageDetections3DPC", + "PointCloudFilter", + "height_filter", + "radius_outlier", + "raycast", + "statistical", + ], + "imageDetections": ["ImageDetections"], + "utils": ["TableStr"], + }, +) diff --git a/dimos/perception/detection/type/detection2d/__init__.py b/dimos/perception/detection/type/detection2d/__init__.py index 8994d840b6..dc81916679 100644 --- a/dimos/perception/detection/type/detection2d/__init__.py +++ b/dimos/perception/detection/type/detection2d/__init__.py @@ -25,5 +25,6 @@ "Detection2DPerson", "Detection2DPoint", "Detection2DSeg", + "Filter2D", "ImageDetections2D", ] diff --git a/dimos/perception/detection/type/detection2d/seg.py b/dimos/perception/detection/type/detection2d/seg.py index 21f8e8e689..5d4d55d0c3 100644 --- a/dimos/perception/detection/type/detection2d/seg.py +++ b/dimos/perception/detection/type/detection2d/seg.py @@ -183,8 +183,10 @@ def to_points_annotation(self) -> list[PointsAnnotation]: approx = cv2.approxPolyDP(contour, epsilon, True) points = [] - for pt in approx: - points.append(Point2(x=float(pt[0][0]), y=float(pt[0][1]))) + for i in range(len(approx)): + x_coord = float(approx[i, 0, 0]) + y_coord = float(approx[i, 0, 1]) + points.append(Point2(x=x_coord, y=y_coord)) if len(points) < 3: continue diff --git a/dimos/perception/detection/type/detection3d/object.py b/dimos/perception/detection/type/detection3d/object.py index 00d4d88661..43702917e1 100644 --- a/dimos/perception/detection/type/detection3d/object.py +++ b/dimos/perception/detection/type/detection3d/object.py @@ -122,7 +122,8 @@ def to_detection3d_msg(self) -> ROSDetection3D: def agent_encode(self) -> dict[str, Any]: """Encode for agent consumption.""" return { - "id": self.track_id, + "object_id": self.object_id, + "track_id": self.track_id, "name": self.name, "detections": self.detections_count, "last_seen": f"{round(time.time() - self.ts)}s ago", @@ -303,7 +304,7 @@ def aggregate_pointclouds(objects: list[Object]) -> PointCloud2: all_points = [] all_colors = [] - for _i, obj in enumerate(objects): + for obj in objects: points, colors = obj.pointcloud.as_numpy() if len(points) == 0: continue @@ -361,3 +362,6 @@ def to_detection3d_array(objects: list[Object]) -> Detection3DArray: array.detections.append(obj.to_detection3d_msg()) return array + + +__all__ = ["Object", "aggregate_pointclouds", "to_detection3d_array"] diff --git a/dimos/perception/experimental/temporal_memory/clip_filter.py b/dimos/perception/experimental/temporal_memory/clip_filter.py index 8faac3fad8..d747899452 100644 --- a/dimos/perception/experimental/temporal_memory/clip_filter.py +++ b/dimos/perception/experimental/temporal_memory/clip_filter.py @@ -14,7 +14,7 @@ """CLIP-based frame filtering for selecting diverse frames from video windows.""" -from typing import Any, cast +from typing import Any import numpy as np @@ -38,7 +38,7 @@ def _get_image_data(image: Image) -> np.ndarray[Any, Any]: """Extract numpy array from Image.""" if not hasattr(image, "data"): raise AttributeError(f"Image missing .data attribute: {type(image)}") - return cast("np.ndarray[Any, Any]", image.data) + return image.data if CLIP_AVAILABLE: diff --git a/dimos/perception/experimental/temporal_memory/temporal_memory.py b/dimos/perception/experimental/temporal_memory/temporal_memory.py index 29d4ecf3d9..66b6fce911 100644 --- a/dimos/perception/experimental/temporal_memory/temporal_memory.py +++ b/dimos/perception/experimental/temporal_memory/temporal_memory.py @@ -32,10 +32,10 @@ from reactivex import Subject, interval from reactivex.disposable import Disposable -from dimos.agents import skill -from dimos.core import In, rpc -from dimos.core.module import ModuleConfig -from dimos.core.skill_module import SkillModule +from dimos.agents.annotation import skill +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import In from dimos.models.vl.base import VlModel from dimos.msgs.sensor_msgs import Image from dimos.msgs.sensor_msgs.Image import sharpness_barrier @@ -100,7 +100,7 @@ class TemporalMemoryConfig(ModuleConfig): nearby_distance_meters: float = 5.0 # "Nearby" threshold -class TemporalMemory(SkillModule): +class TemporalMemory(Module): """ builds temporal understanding of video streams using vlms. @@ -449,7 +449,7 @@ def _update_rolling_summary(self, w_end: float) -> None: except Exception as e: logger.error(f"summary update failed: {e}", exc_info=True) - @skill() + @skill def query(self, question: str) -> str: """Answer a question about the video stream using temporal memory and graph knowledge. diff --git a/dimos/perception/experimental/temporal_memory/temporal_memory_deploy.py b/dimos/perception/experimental/temporal_memory/temporal_memory_deploy.py index 611385630e..ab3cc7a0f5 100644 --- a/dimos/perception/experimental/temporal_memory/temporal_memory_deploy.py +++ b/dimos/perception/experimental/temporal_memory/temporal_memory_deploy.py @@ -17,17 +17,21 @@ """ import os +from typing import TYPE_CHECKING -from dimos import spec -from dimos.core import DimosCluster +from dimos.core._dask_exports import DimosCluster from dimos.models.vl.base import VlModel +from dimos.spec import Camera as CameraSpec from .temporal_memory import TemporalMemory, TemporalMemoryConfig +if TYPE_CHECKING: + from dimos.msgs.sensor_msgs import Image + def deploy( dimos: DimosCluster, - camera: spec.Camera, + camera: CameraSpec, vlm: VlModel | None = None, config: TemporalMemoryConfig | None = None, ) -> TemporalMemory: @@ -52,7 +56,7 @@ def deploy( if camera.color_image.transport is None: from dimos.core.transport import JpegShmTransport - transport = JpegShmTransport("/temporal_memory/color_image") + transport: JpegShmTransport[Image] = JpegShmTransport("/temporal_memory/color_image") camera.color_image.transport = transport temporal_memory.color_image.connect(camera.color_image) diff --git a/dimos/perception/experimental/temporal_memory/temporal_utils/graph_utils.py b/dimos/perception/experimental/temporal_memory/temporal_utils/graph_utils.py index 315d267a0c..8d05f8c1e1 100644 --- a/dimos/perception/experimental/temporal_memory/temporal_utils/graph_utils.py +++ b/dimos/perception/experimental/temporal_memory/temporal_utils/graph_utils.py @@ -129,6 +129,7 @@ def build_graph_context( "relationships": [], "spatial_info": [], "semantic_knowledge": [], + "entity_timestamps": [], } # Convert time_window_s to a (start_ts, end_ts) tuple if provided @@ -143,6 +144,25 @@ def build_graph_context( ref_time = max((e.get("last_seen_ts", 0) for e in all_entities), default=0) time_window_tuple = (max(0, ref_time - time_window_s), ref_time) + # Get entity timestamp information for visibility duration queries + for entity_id in entity_ids: + entity = graph_db.get_entity(entity_id) + if entity: + first_seen = entity.get("first_seen_ts") + last_seen = entity.get("last_seen_ts") + duration_s = None + if first_seen is not None and last_seen is not None: + duration_s = last_seen - first_seen + + graph_context["entity_timestamps"].append( + { + "entity_id": entity_id, + "first_seen_ts": first_seen, + "last_seen_ts": last_seen, + "duration_s": duration_s, + } + ) + # Get recent relationships for each entity for entity_id in entity_ids: # Get relationships (Graph 1: interactions) diff --git a/dimos/perception/experimental/temporal_memory/temporal_utils/prompts.py b/dimos/perception/experimental/temporal_memory/temporal_utils/prompts.py index 61399fd3f1..5269a3d67d 100644 --- a/dimos/perception/experimental/temporal_memory/temporal_utils/prompts.py +++ b/dimos/perception/experimental/temporal_memory/temporal_utils/prompts.py @@ -240,9 +240,15 @@ def build_query_prompt( - {currently_present_str} - The 'entity_roster' contains all known entities with their descriptions - The 'rolling_summary' describes what has happened over time +- The 'graph_knowledge.entity_timestamps' contains visibility information for each entity: + - 'first_seen_ts': timestamp (in seconds) when the entity was first detected + - 'last_seen_ts': timestamp (in seconds) when the entity was last detected + - 'duration_s': total time span from first to last appearance (last_seen_ts - first_seen_ts) + - Use this information to answer questions about when entities appeared, disappeared, or how long they were visible - If 'currently_present_entities' is empty, it means no entities were detected in recent windows, but entities may still exist in the roster from earlier -- Answer based on the provided context (entity_roster, rolling_summary, currently_present_entities) AND what you see in the current frame +- Answer based on the provided context (entity_roster, rolling_summary, currently_present_entities, graph_knowledge) AND what you see in the current frame - If the context says entities were present but you don't see them in the current frame, mention both: what was recently detected AND what you currently see +- For duration questions, use the 'duration_s' field from 'entity_timestamps' if available Provide a concise answer. """ diff --git a/dimos/perception/experimental/temporal_memory/test_temporal_memory_module.py b/dimos/perception/experimental/temporal_memory/test_temporal_memory_module.py index 7b38e4ce40..1d0dab007b 100644 --- a/dimos/perception/experimental/temporal_memory/test_temporal_memory_module.py +++ b/dimos/perception/experimental/temporal_memory/test_temporal_memory_module.py @@ -27,7 +27,6 @@ from dimos.models.vl.openai import OpenAIVlModel from dimos.msgs.sensor_msgs import Image from dimos.perception.experimental.temporal_memory import TemporalMemory, TemporalMemoryConfig -from dimos.protocol import pubsub from dimos.utils.data import get_data from dimos.utils.logging_config import setup_logger from dimos.utils.testing import TimedSensorReplay @@ -37,8 +36,6 @@ logger = setup_logger() -pubsub.lcm.autoconf() - class VideoReplayModule(Module): """Module that replays video data from TimedSensorReplay.""" diff --git a/dimos/perception/object_scene_registration.py b/dimos/perception/object_scene_registration.py index c21e31bf33..cfc4ab8d3e 100644 --- a/dimos/perception/object_scene_registration.py +++ b/dimos/perception/object_scene_registration.py @@ -15,13 +15,17 @@ import time from typing import Any +import cv2 import numpy as np from numpy.typing import NDArray +import open3d as o3d # type: ignore[import-untyped] +from dimos.agents.annotation import skill from dimos.core import In, Out, rpc -from dimos.core.skill_module import SkillModule +from dimos.core.module import Module from dimos.msgs.foxglove_msgs import ImageAnnotations from dimos.msgs.sensor_msgs import CameraInfo, Image, PointCloud2 +from dimos.msgs.sensor_msgs.Image import ImageFormat from dimos.msgs.std_msgs import Header from dimos.msgs.vision_msgs import Detection2DArray, Detection3DArray from dimos.perception.detection.detectors.yoloe import Yoloe2DDetector, YoloePromptMode @@ -29,10 +33,10 @@ from dimos.perception.detection.type import ImageDetections2D from dimos.perception.detection.type.detection3d.object import ( Object, + Object as DetObject, aggregate_pointclouds, to_detection3d_array, ) -from dimos.protocol.skill.skill import skill from dimos.types.timestamped import align_timestamped from dimos.utils.logging_config import setup_logger from dimos.utils.reactive import backpressure @@ -40,7 +44,7 @@ logger = setup_logger() -class ObjectSceneRegistrationModule(SkillModule): +class ObjectSceneRegistrationModule(Module): """Module for detecting objects in camera images using YOLO-E with 2D and 3D detection.""" color_image: In[Image] @@ -49,12 +53,15 @@ class ObjectSceneRegistrationModule(SkillModule): detections_2d: Out[Detection2DArray] detections_3d: Out[Detection3DArray] + objects: Out[list[DetObject]] overlay: Out[ImageAnnotations] pointcloud: Out[PointCloud2] _detector: Yoloe2DDetector | None = None _camera_info: CameraInfo | None = None _object_db: ObjectDB + _latest_depth_image: Image | None = None + _latest_camera_transform: Any = None def __init__( self, @@ -127,9 +134,96 @@ def get_object_track_ids(self) -> list[int]: """Get track_ids of all permanent objects.""" return [obj.track_id for obj in self._object_db.get_all_objects()] - @skill() + @rpc + def get_detected_objects(self) -> list[dict[str, Any]]: + """Get all detected objects with object_id (UUID) and name.""" + return [obj.agent_encode() for obj in self._object_db.get_all_objects()] + + @rpc + def get_object_pointcloud_by_name(self, name: str) -> PointCloud2 | None: + """Get pointcloud for an object by class name.""" + objects = self._object_db.find_by_name(name) + return objects[0].pointcloud if objects else None + + @rpc + def get_object_pointcloud_by_object_id(self, object_id: str) -> PointCloud2 | None: + """Get pointcloud for an object by its stable object_id (searches all objects).""" + obj = self._object_db.find_by_object_id(object_id) + if obj is None: + logger.warning(f"No object found with object_id='{object_id}'") + return None + pc = obj.pointcloud + num_points = len(pc.pointcloud.points) if pc else 0 + logger.info(f"Found object '{object_id}' ({obj.name}) with {num_points} points") + return pc + + def _get_object_mask(self, object_id: str) -> NDArray[np.uint8] | None: + """Get dilated mask for an object by ID.""" + for obj in self._object_db.get_all_objects(): + if obj.object_id != object_id: + continue + if obj.mask is None: + return None + + mask = obj.mask.astype(np.uint8) + if mask.max() == 1: + mask = (mask * 255).astype(np.uint8) + + kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (15, 15)) + return cv2.dilate(mask, kernel).astype(np.uint8) + + return None + + @rpc + def get_full_scene_pointcloud( + self, + exclude_object_id: str | None = None, + depth_trunc: float = 2.0, + voxel_size: float = 0.01, + ) -> PointCloud2 | None: + """Get full scene pointcloud from depth, including table/surfaces for collision filtering.""" + if self._latest_depth_image is None or self._camera_info is None: + return None + + depth_cv = self._latest_depth_image.to_opencv() + h, w = depth_cv.shape[:2] + + # Zero out excluded object's depth + if exclude_object_id: + exclude_mask = self._get_object_mask(exclude_object_id) + if exclude_mask is not None: + depth_cv = depth_cv.copy() + depth_cv[exclude_mask > 0] = 0 + + # Build pointcloud from depth + fx, fy = self._camera_info.K[0], self._camera_info.K[4] + cx, cy = self._camera_info.K[2], self._camera_info.K[5] + intrinsic = o3d.camera.PinholeCameraIntrinsic(w, h, fx, fy, cx, cy) + + depth_o3d = o3d.geometry.Image(depth_cv.astype(np.float32)) + pcd = o3d.geometry.PointCloud.create_from_depth_image( + depth_o3d, intrinsic, depth_scale=1.0, depth_trunc=depth_trunc + ) + + if len(pcd.points) < 100: + return None + + pcd = pcd.voxel_down_sample(voxel_size) + + pc = PointCloud2( + pcd, + frame_id=self._latest_depth_image.frame_id, + ts=self._latest_depth_image.ts, + ) + + if self._latest_camera_transform is not None: + pc = pc.transform(self._latest_camera_transform) + + return pc + + @skill def detect(self, *prompts: str) -> str: - """Detect objects matching the given text prompts. Returns track_ids after 2 seconds of detection. + """Detect objects matching the given text prompts. Do NOT call this tool multiple times for one query. Pass all objects in a single call. For example, to detect a cup and mouse, call detect("cup", "mouse") not detect("cup") then detect("mouse"). @@ -138,11 +232,11 @@ def detect(self, *prompts: str) -> str: prompts (str): Text descriptions of objects to detect (e.g., "person", "car", "dog") Returns: - str: A message containing the track_ids of detected objects + str: Detected objects with their object_id (stable UUID) and name. Example: detect("person", "car", "dog") - detect("person") + detect("cup") """ if not prompts: return "No prompts provided." @@ -152,12 +246,14 @@ def detect(self, *prompts: str) -> str: self._detector.set_prompts(text=list(prompts)) time.sleep(2.0) - track_ids = self.get_object_track_ids() - if not track_ids: + detected = self.get_detected_objects() + if not detected: return "No objects detected." - return f"Detected objects with track_ids: {track_ids}" - @skill() + obj_list = [f" - {obj['name']} (object_id='{obj['object_id']}')" for obj in detected] + return f"Detected {len(detected)} object(s):\n" + "\n".join(obj_list) + + @skill def select(self, track_id: int) -> str: """Select an object by track_id and promote it to permanent. @@ -179,7 +275,15 @@ def _process_images(self, color_msg: Image, depth_msg: Image) -> None: return color_image = color_msg - depth_image = depth_msg.to_depth_meters() + # Convert depth to meters (float32) + depth_cv = depth_msg.to_opencv() + if depth_msg.format == ImageFormat.DEPTH16: + depth_cv = depth_cv.astype(np.float32) / 1000.0 + elif depth_cv.dtype != np.float32: + depth_cv = depth_cv.astype(np.float32) + depth_image = Image( + data=depth_cv, format=ImageFormat.DEPTH, frame_id=depth_msg.frame_id, ts=depth_msg.ts + ) # Run 2D detection detections_2d: ImageDetections2D[Any] = self._detector.process_image(color_image) @@ -207,6 +311,9 @@ def _process_3d_detections( if self._camera_info is None: return + # Cache depth image for full scene pointcloud generation + self._latest_depth_image = depth_image + # Look up transform from camera frame to target frame (e.g., map) camera_transform = None if self._target_frame != color_image.frame_id: @@ -220,6 +327,9 @@ def _process_3d_detections( logger.warning("Failed to lookup transform from camera frame to target frame") return + # Cache camera transform for full scene pointcloud + self._latest_camera_transform = camera_transform + objects = Object.from_2d_to_list( detections_2d=detections_2d, color_image=color_image, @@ -235,6 +345,7 @@ def _process_3d_detections( detections_3d = to_detection3d_array(objects) self.detections_3d.publish(detections_3d) + self.objects.publish(objects) objects_for_pc = self._object_db.get_objects() aggregated_pc = aggregate_pointclouds(objects_for_pc) diff --git a/dimos/perception/object_tracker.py b/dimos/perception/object_tracker.py index 54a5873435..da415ac32a 100644 --- a/dimos/perception/object_tracker.py +++ b/dimos/perception/object_tracker.py @@ -25,9 +25,12 @@ ObjectHypothesisWithPose, ) import numpy as np +from numpy.typing import NDArray from reactivex.disposable import Disposable -from dimos.core import In, Module, ModuleConfig, Out, rpc +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import In, Out from dimos.msgs.geometry_msgs import Pose, Quaternion, Transform, Vector3 from dimos.msgs.sensor_msgs import ( CameraInfo, @@ -555,9 +558,9 @@ def _process_tracking(self) -> None: viz_msg = Image.from_numpy(viz_image) self.tracked_overlay.publish(viz_msg) - def _draw_reid_matches(self, image: np.ndarray) -> np.ndarray: # type: ignore[type-arg] + def _draw_reid_matches(self, image: NDArray[np.uint8]) -> NDArray[np.uint8]: # type: ignore[type-arg] """Draw REID feature matches on the image.""" - viz_image = image.copy() + viz_image: NDArray[np.uint8] = image.copy() # type: ignore[type-arg] x1, y1, _x2, _y2 = self.last_roi_bbox # type: ignore[misc] diff --git a/dimos/perception/object_tracker_2d.py b/dimos/perception/object_tracker_2d.py index f5d39745c3..1264b0e92b 100644 --- a/dimos/perception/object_tracker_2d.py +++ b/dimos/perception/object_tracker_2d.py @@ -29,9 +29,12 @@ Pose2D, ) import numpy as np +from numpy.typing import NDArray from reactivex.disposable import Disposable -from dimos.core import In, Module, ModuleConfig, Out, rpc +from dimos.core.core import rpc +from dimos.core.module import Module, ModuleConfig +from dimos.core.stream import In, Out from dimos.msgs.sensor_msgs import Image, ImageFormat from dimos.msgs.std_msgs import Header from dimos.msgs.vision_msgs import Detection2DArray @@ -289,9 +292,9 @@ def _process_tracking(self) -> None: viz_msg = Image.from_numpy(viz_copy, format=ImageFormat.RGB) self.tracked_overlay.publish(viz_msg) - def _draw_visualization(self, image: np.ndarray, bbox: list[int]) -> np.ndarray: # type: ignore[type-arg] + def _draw_visualization(self, image: NDArray[np.uint8], bbox: list[int]) -> NDArray[np.uint8]: # type: ignore[type-arg] """Draw tracking visualization.""" - viz_image = image.copy() + viz_image: NDArray[np.uint8] = image.copy() # type: ignore[type-arg] x1, y1, x2, y2 = bbox cv2.rectangle(viz_image, (x1, y1), (x2, y2), (0, 255, 0), 2) cv2.putText(viz_image, "TRACKING", (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 255, 0), 2) diff --git a/dimos/perception/object_tracker_3d.py b/dimos/perception/object_tracker_3d.py index fa6361ac65..f8143dc861 100644 --- a/dimos/perception/object_tracker_3d.py +++ b/dimos/perception/object_tracker_3d.py @@ -283,7 +283,7 @@ def _draw_reid_overlay(self, image: np.ndarray) -> np.ndarray: # type: ignore[t """Draw Re-ID feature matches on visualization.""" import cv2 - viz_image = image.copy() + viz_image: np.ndarray = image.copy() # type: ignore[type-arg] x1, y1, _x2, _y2 = self.last_roi_bbox # type: ignore[attr-defined] # Draw keypoints diff --git a/dimos/perception/spatial_perception.py b/dimos/perception/spatial_perception.py index e33820f22c..d7f27c31dc 100644 --- a/dimos/perception/spatial_perception.py +++ b/dimos/perception/spatial_perception.py @@ -33,7 +33,7 @@ from dimos.agents_deprecated.memory.visual_memory import VisualMemory from dimos.constants import DIMOS_PROJECT_ROOT from dimos.core import DimosCluster, In, rpc -from dimos.core.skill_module import SkillModule +from dimos.core.module import Module from dimos.msgs.sensor_msgs import Image from dimos.types.robot_location import RobotLocation from dimos.utils.logging_config import setup_logger @@ -51,7 +51,7 @@ logger = setup_logger() -class SpatialMemory(SkillModule): +class SpatialMemory(Module): """ A Dask module for building and querying Robot spatial memory. @@ -203,7 +203,7 @@ def set_video(image_msg: Image) -> None: # Start periodic processing using interval unsub = interval(self._process_interval).subscribe(lambda _: self._process_frame()) # type: ignore[assignment] - self._disposables.add(Disposable(unsub)) + self._disposables.add(unsub) @rpc def stop(self) -> None: diff --git a/dimos/perception/test_spatial_memory_module.py b/dimos/perception/test_spatial_memory_module.py index 47518b889b..98ec7a1212 100644 --- a/dimos/perception/test_spatial_memory_module.py +++ b/dimos/perception/test_spatial_memory_module.py @@ -24,16 +24,13 @@ from dimos.core import Module, Out, rpc from dimos.msgs.sensor_msgs import Image from dimos.perception.spatial_perception import SpatialMemory -from dimos.protocol import pubsub -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.odometry import Odometry from dimos.utils.data import get_data from dimos.utils.logging_config import setup_logger from dimos.utils.testing import TimedSensorReplay logger = setup_logger() -pubsub.lcm.autoconf() - class VideoReplayModule(Module): """Module that replays video data from TimedSensorReplay.""" diff --git a/dimos/protocol/mcp/README.md b/dimos/protocol/mcp/README.md index 2a3c382484..233e852669 100644 --- a/dimos/protocol/mcp/README.md +++ b/dimos/protocol/mcp/README.md @@ -4,16 +4,21 @@ Expose DimOS robot skills to Claude Code via Model Context Protocol. ## Setup +```bash +uv sync --extra base --extra unitree +``` + Add to Claude Code (one command): ```bash claude mcp add --transport stdio dimos --scope project -- python -m dimos.protocol.mcp ``` + ## Usage **Terminal 1** - Start DimOS: ```bash -dimos --replay run unitree-go2-agentic +uv run dimos run unitree-go2-agentic-mcp ``` **Claude Code** - Use robot skills: @@ -25,6 +30,6 @@ dimos --replay run unitree-go2-agentic ## How It Works -1. `llm_agent(mcp_port=9990)` in the blueprint starts a TCP server +1. `MCPModule` in the blueprint starts a TCP server on port 9990 2. Claude Code spawns the bridge (`--bridge`) which connects to `localhost:9990` 3. Skills are exposed as MCP tools (e.g., `relative_move`, `navigate_with_text`) diff --git a/dimos/protocol/mcp/__init__.py b/dimos/protocol/mcp/__init__.py index 51432ba0cf..e69de29bb2 100644 --- a/dimos/protocol/mcp/__init__.py +++ b/dimos/protocol/mcp/__init__.py @@ -1,17 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dimos.protocol.mcp.mcp import MCPModule - -__all__ = ["MCPModule"] diff --git a/dimos/protocol/mcp/mcp.py b/dimos/protocol/mcp/mcp.py index f7427cd613..78d19c64db 100644 --- a/dimos/protocol/mcp/mcp.py +++ b/dimos/protocol/mcp/mcp.py @@ -16,26 +16,28 @@ import asyncio import json from typing import TYPE_CHECKING, Any -import uuid from dimos.core import Module, rpc -from dimos.protocol.skill.coordinator import SkillCoordinator, SkillStateEnum +from dimos.core.rpc_client import RpcCall, RPCClient if TYPE_CHECKING: - from dimos.protocol.skill.coordinator import SkillState + from dimos.core.module import SkillInfo class MCPModule(Module): - def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] + _skills: list[SkillInfo] + _rpc_calls: dict[str, RpcCall] + + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.coordinator = SkillCoordinator() + self._skills = [] + self._rpc_calls = {} self._server: asyncio.AbstractServer | None = None self._server_future: object | None = None @rpc def start(self) -> None: super().start() - self.coordinator.start() self._start_server() @rpc @@ -48,12 +50,16 @@ def stop(self) -> None: self._server = None if self._server_future and hasattr(self._server_future, "cancel"): self._server_future.cancel() - self.coordinator.stop() super().stop() @rpc - def register_skills(self, container) -> None: # type: ignore[no-untyped-def] - self.coordinator.register_skills(container) + def on_system_modules(self, modules: list[RPCClient]) -> None: + assert self.rpc is not None + self._skills = [skill for module in modules for skill in (module.get_skills() or [])] + self._rpc_calls = { + skill.func_name: RpcCall(None, self.rpc, skill.func_name, skill.class_name, []) + for skill in self._skills + } def _start_server(self, port: int = 9990) -> None: async def handle_client(reader, writer) -> None: # type: ignore[no-untyped-def] @@ -85,15 +91,16 @@ async def _handle_request(self, request: dict[str, Any]) -> dict[str, Any]: } return {"jsonrpc": "2.0", "id": req_id, "result": init_result} if method == "tools/list": - tools = [ - { - "name": c.name, - "description": c.schema.get("function", {}).get("description", ""), - "inputSchema": c.schema.get("function", {}).get("parameters", {}), - } - for c in self.coordinator.skills().values() - if not c.hide_skill - ] + tools = [] + for skill in self._skills: + schema = json.loads(skill.args_schema) + tools.append( + { + "name": skill.func_name, + "description": schema.get("description", ""), + "inputSchema": schema, + } + ) return {"jsonrpc": "2.0", "id": req_id, "result": {"tools": tools}} if method == "tools/call": name = params.get("name") @@ -106,21 +113,20 @@ async def _handle_request(self, request: dict[str, Any]) -> dict[str, Any]: } if not isinstance(args, dict): args = {} - call_id = str(uuid.uuid4()) - self.coordinator.call_skill(call_id, name, args) - result: SkillState | None = self.coordinator._skill_state.get(call_id) + rpc_call = self._rpc_calls.get(name) + if rpc_call is None: + return { + "jsonrpc": "2.0", + "id": req_id, + "result": {"content": [{"type": "text", "text": "Skill not found"}]}, + } try: - await asyncio.wait_for(self.coordinator.wait_for_updates(), timeout=5.0) - except asyncio.TimeoutError: - pass - if result is None: - text = "Skill not found" - elif result.state == SkillStateEnum.completed: - text = str(result.content()) if result.content() else "Completed" - elif result.state == SkillStateEnum.error: - text = f"Error: {result.content()}" - else: - text = f"Started ({result.state.name})" + result = await asyncio.get_event_loop().run_in_executor( + None, lambda: rpc_call(**args) + ) + text = str(result) if result is not None else "Completed" + except Exception as e: + text = f"Error: {e}" return { "jsonrpc": "2.0", "id": req_id, diff --git a/dimos/protocol/mcp/test_mcp_module.py b/dimos/protocol/mcp/test_mcp_module.py index 2a247e6ff0..050e24f13b 100644 --- a/dimos/protocol/mcp/test_mcp_module.py +++ b/dimos/protocol/mcp/test_mcp_module.py @@ -16,58 +16,52 @@ import asyncio import json -import os from pathlib import Path -import socket -import subprocess -import sys - -import pytest +from unittest.mock import MagicMock +from dimos.core.module import SkillInfo from dimos.protocol.mcp.mcp import MCPModule -from dimos.protocol.skill.coordinator import SkillStateEnum -from dimos.protocol.skill.skill import SkillContainer, skill + + +def _make_mcp(skills: list[SkillInfo], call_results: dict[str, object]) -> MCPModule: + """Create an MCPModule with pre-populated skills and mock RPC calls.""" + mcp = MCPModule.__new__(MCPModule) + mcp._skills = skills + mcp._rpc_calls = {} + for skill in skills: + mock_call = MagicMock() + if skill.func_name in call_results: + mock_call.return_value = call_results[skill.func_name] + else: + mock_call.return_value = None + mcp._rpc_calls[skill.func_name] = mock_call + return mcp def test_unitree_blueprint_has_mcp() -> None: - contents = Path("dimos/robot/unitree_webrtc/unitree_go2_blueprints.py").read_text() + contents = Path( + "dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_mcp.py" + ).read_text() assert "agentic_mcp" in contents assert "MCPModule.blueprint()" in contents def test_mcp_module_request_flow() -> None: - class DummySkill: - def __init__(self) -> None: - self.name = "add" - self.hide_skill = False - self.schema = {"function": {"description": "", "parameters": {"type": "object"}}} - - class DummyState: - def __init__(self, content: int) -> None: - self.state = SkillStateEnum.completed - self._content = content - - def content(self) -> int: - return self._content - - class DummyCoordinator: - def __init__(self) -> None: - self._skill_state: dict[str, DummyState] = {} - - def skills(self) -> dict[str, DummySkill]: - return {"add": DummySkill()} - - def call_skill(self, call_id: str, _name: str, args: dict[str, int]) -> None: - self._skill_state[call_id] = DummyState(args["x"] + args["y"]) - - async def wait_for_updates(self) -> bool: - return True + schema = json.dumps( + { + "type": "object", + "description": "Add two numbers", + "properties": {"x": {"type": "integer"}, "y": {"type": "integer"}}, + "required": ["x", "y"], + } + ) + skills = [SkillInfo(class_name="TestSkills", func_name="add", args_schema=schema)] - mcp = MCPModule.__new__(MCPModule) - mcp.coordinator = DummyCoordinator() + mcp = _make_mcp(skills, {"add": 5}) response = asyncio.run(mcp._handle_request({"method": "tools/list", "id": 1})) assert response["result"]["tools"][0]["name"] == "add" + assert response["result"]["tools"][0]["description"] == "Add two numbers" response = asyncio.run( mcp._handle_request( @@ -81,128 +75,56 @@ async def wait_for_updates(self) -> bool: assert response["result"]["content"][0]["text"] == "5" -def test_mcp_module_handles_hidden_and_errors() -> None: - class DummySkill: - def __init__(self, name: str, hide_skill: bool) -> None: - self.name = name - self.hide_skill = hide_skill - self.schema = {"function": {"description": "", "parameters": {"type": "object"}}} - - class DummyState: - def __init__(self, state: SkillStateEnum, content: str | None) -> None: - self.state = state - self._content = content - - def content(self) -> str | None: - return self._content - - class DummyCoordinator: - def __init__(self) -> None: - self._skill_state: dict[str, DummyState] = {} - self._skills = { - "visible": DummySkill("visible", False), - "hidden": DummySkill("hidden", True), - "fail": DummySkill("fail", False), - } - - def skills(self) -> dict[str, DummySkill]: - return self._skills +def test_mcp_module_handles_errors() -> None: + schema = json.dumps({"type": "object", "properties": {}}) + skills = [ + SkillInfo(class_name="TestSkills", func_name="ok_skill", args_schema=schema), + SkillInfo(class_name="TestSkills", func_name="fail_skill", args_schema=schema), + ] - def call_skill(self, call_id: str, name: str, _args: dict[str, int]) -> None: - if name == "fail": - self._skill_state[call_id] = DummyState(SkillStateEnum.error, "boom") - elif name in self._skills: - self._skill_state[call_id] = DummyState(SkillStateEnum.running, None) - - async def wait_for_updates(self) -> bool: - return True - - mcp = MCPModule.__new__(MCPModule) - mcp.coordinator = DummyCoordinator() + mcp = _make_mcp(skills, {"ok_skill": "done"}) + mcp._rpc_calls["fail_skill"] = MagicMock(side_effect=RuntimeError("boom")) + # All skills listed response = asyncio.run(mcp._handle_request({"method": "tools/list", "id": 1})) tool_names = {tool["name"] for tool in response["result"]["tools"]} - assert "visible" in tool_names - assert "hidden" not in tool_names + assert "ok_skill" in tool_names + assert "fail_skill" in tool_names + # Error skill returns error text response = asyncio.run( mcp._handle_request( - {"method": "tools/call", "id": 2, "params": {"name": "fail", "arguments": {}}} + {"method": "tools/call", "id": 2, "params": {"name": "fail_skill", "arguments": {}}} ) ) assert "Error:" in response["result"]["content"][0]["text"] + assert "boom" in response["result"]["content"][0]["text"] - -@pytest.mark.integration -def test_mcp_end_to_end_lcm_bridge() -> None: - try: - import lcm # type: ignore[import-untyped] - - lcm.LCM() - except Exception as exc: - if os.environ.get("CI"): - pytest.fail(f"LCM unavailable for MCP end-to-end test: {exc}") - pytest.skip("LCM unavailable for MCP end-to-end test.") - - try: - socket.socket(socket.AF_INET, socket.SOCK_STREAM).close() - except PermissionError: - if os.environ.get("CI"): - pytest.fail("Socket creation not permitted in CI environment.") - pytest.skip("Socket creation not permitted in this environment.") - - class TestSkills(SkillContainer): - @skill() - def add(self, x: int, y: int) -> int: - return x + y - - mcp = MCPModule() - mcp.start() - - try: - mcp.register_skills(TestSkills()) - - env = {"MCP_HOST": "127.0.0.1", "MCP_PORT": "9990"} - proc = subprocess.Popen( - [sys.executable, "-m", "dimos.protocol.mcp"], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env={**os.environ, **env}, - text=True, + # Unknown skill returns not found + response = asyncio.run( + mcp._handle_request( + {"method": "tools/call", "id": 3, "params": {"name": "no_such", "arguments": {}}} ) - try: - request = {"jsonrpc": "2.0", "id": 1, "method": "tools/list"} - proc.stdin.write(json.dumps(request) + "\n") - proc.stdin.flush() - stdout = proc.stdout.readline() - assert '"tools"' in stdout - assert '"add"' in stdout - finally: - proc.terminate() - proc.wait(timeout=5) - - proc = subprocess.Popen( - [sys.executable, "-m", "dimos.protocol.mcp"], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env={**os.environ, **env}, - text=True, + ) + assert "not found" in response["result"]["content"][0]["text"].lower() + + +def test_mcp_module_initialize_and_unknown() -> None: + mcp = _make_mcp([], {}) + + response = asyncio.run(mcp._handle_request({"method": "initialize", "id": 1})) + assert response["result"]["serverInfo"]["name"] == "dimensional" + + response = asyncio.run(mcp._handle_request({"method": "unknown/method", "id": 2})) + assert response["error"]["code"] == -32601 + + +def test_mcp_module_invalid_tool_name() -> None: + mcp = _make_mcp([], {}) + + response = asyncio.run( + mcp._handle_request( + {"method": "tools/call", "id": 1, "params": {"name": 123, "arguments": {}}} ) - try: - request = { - "jsonrpc": "2.0", - "id": 2, - "method": "tools/call", - "params": {"name": "add", "arguments": {"x": 2, "y": 3}}, - } - proc.stdin.write(json.dumps(request) + "\n") - proc.stdin.flush() - stdout = proc.stdout.readline() - assert "5" in stdout - finally: - proc.terminate() - proc.wait(timeout=5) - finally: - mcp.stop() + ) + assert response["error"]["code"] == -32602 diff --git a/dimos/protocol/pubsub/__init__.py b/dimos/protocol/pubsub/__init__.py index 89bd292fda..94a58b60de 100644 --- a/dimos/protocol/pubsub/__init__.py +++ b/dimos/protocol/pubsub/__init__.py @@ -1,3 +1,9 @@ -import dimos.protocol.pubsub.lcmpubsub as lcm -from dimos.protocol.pubsub.memory import Memory +import dimos.protocol.pubsub.impl.lcmpubsub as lcm +from dimos.protocol.pubsub.impl.memory import Memory from dimos.protocol.pubsub.spec import PubSub + +__all__ = [ + "Memory", + "PubSub", + "lcm", +] diff --git a/dimos/protocol/pubsub/benchmark/test_benchmark.py b/dimos/protocol/pubsub/benchmark/test_benchmark.py index 865c4ee324..39a4421c35 100644 --- a/dimos/protocol/pubsub/benchmark/test_benchmark.py +++ b/dimos/protocol/pubsub/benchmark/test_benchmark.py @@ -82,6 +82,7 @@ def benchmark_results() -> Generator[BenchmarkResults, None, None]: results.print_heatmap() results.print_bandwidth_heatmap() results.print_latency_heatmap() + results.print_loss_heatmap() @pytest.mark.tool diff --git a/dimos/protocol/pubsub/benchmark/testdata.py b/dimos/protocol/pubsub/benchmark/testdata.py index beb140227f..ad604131e0 100644 --- a/dimos/protocol/pubsub/benchmark/testdata.py +++ b/dimos/protocol/pubsub/benchmark/testdata.py @@ -14,15 +14,27 @@ from collections.abc import Generator from contextlib import contextmanager -from typing import Any +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any import numpy as np from dimos.msgs.sensor_msgs.Image import Image, ImageFormat from dimos.protocol.pubsub.benchmark.type import Case -from dimos.protocol.pubsub.lcmpubsub import LCM, LCMPubSubBase, Topic as LCMTopic -from dimos.protocol.pubsub.memory import Memory -from dimos.protocol.pubsub.shmpubsub import BytesSharedMemory, LCMSharedMemory, PickleSharedMemory + +try: + import cyclonedds as _cyclonedds # noqa: F401 + + DDS_AVAILABLE = True +except ImportError: + DDS_AVAILABLE = False +from dimos.protocol.pubsub.impl.lcmpubsub import LCM, LCMPubSubBase, Topic as LCMTopic +from dimos.protocol.pubsub.impl.memory import Memory +from dimos.protocol.pubsub.impl.shmpubsub import ( + BytesSharedMemory, + LCMSharedMemory, + PickleSharedMemory, +) def make_data_bytes(size: int) -> bytes: @@ -169,9 +181,70 @@ def shm_lcm_pubsub_channel() -> Generator[LCMSharedMemory, None, None]: ) ) +if DDS_AVAILABLE: + from cyclonedds.idl import IdlStruct + from cyclonedds.idl.types import sequence, uint8 + from cyclonedds.qos import Policy, Qos + + from dimos.protocol.pubsub.impl.ddspubsub import ( + DDS, + Topic as DDSTopic, + ) + + @dataclass + class DDSBenchmarkData(IdlStruct): # type: ignore[misc] + """DDS message type for benchmarking with variable-size byte payload.""" + + data: sequence[uint8] # type: ignore[type-arg] + + @contextmanager + def dds_high_throughput_pubsub_channel() -> Generator[DDS, None, None]: + """DDS with high-throughput QoS preset.""" + HIGH_THROUGHPUT_QOS = Qos( + Policy.Reliability.BestEffort, + Policy.History.KeepLast(depth=1), + Policy.Durability.Volatile, + ) + dds_pubsub = DDS(qos=HIGH_THROUGHPUT_QOS) + dds_pubsub.start() + yield dds_pubsub + dds_pubsub.stop() + + @contextmanager + def dds_reliable_pubsub_channel() -> Generator[DDS, None, None]: + """DDS with reliable QoS preset.""" + RELIABLE_QOS = Qos( + Policy.Reliability.Reliable(max_blocking_time=0), + Policy.History.KeepLast(depth=5000), + Policy.Durability.Volatile, + ) + dds_pubsub = DDS(qos=RELIABLE_QOS) + dds_pubsub.start() + yield dds_pubsub + dds_pubsub.stop() + + def dds_msggen(size: int) -> tuple[DDSTopic, DDSBenchmarkData]: + """Generate DDS message for benchmark.""" + topic = DDSTopic(name="benchmark/dds", data_type=DDSBenchmarkData) + return (topic, DDSBenchmarkData(data=list(make_data_bytes(size)))) # type: ignore[arg-type] + + testcases.append( + Case( + pubsub_context=dds_high_throughput_pubsub_channel, + msg_gen=dds_msggen, + ) + ) + + testcases.append( + Case( + pubsub_context=dds_reliable_pubsub_channel, + msg_gen=dds_msggen, + ) + ) + try: - from dimos.protocol.pubsub.redispubsub import Redis + from dimos.protocol.pubsub.impl.redispubsub import Redis @contextmanager def redis_pubsub_channel() -> Generator[Redis, None, None]: @@ -199,15 +272,29 @@ def redis_msggen(size: int) -> tuple[str, Any]: print("Redis not available") -from dimos.protocol.pubsub.rospubsub import ROS_AVAILABLE, RawROS, RawROSTopic +from dimos.protocol.pubsub.impl.rospubsub import ( + ROS_AVAILABLE, + DimosROS, + RawROS, + RawROSTopic, + ROSTopic, +) + +if TYPE_CHECKING: + from numpy.typing import NDArray if ROS_AVAILABLE: - from rclpy.qos import QoSDurabilityPolicy, QoSHistoryPolicy, QoSProfile, QoSReliabilityPolicy - from sensor_msgs.msg import Image as ROSImage + from rclpy.qos import ( # type: ignore[no-untyped-call] + QoSDurabilityPolicy, + QoSHistoryPolicy, + QoSProfile, + QoSReliabilityPolicy, + ) + from sensor_msgs.msg import Image as ROSImage # type: ignore[attr-defined,no-untyped-call] @contextmanager def ros_best_effort_pubsub_channel() -> Generator[RawROS, None, None]: - qos = QoSProfile( + qos = QoSProfile( # type: ignore[no-untyped-call] reliability=QoSReliabilityPolicy.BEST_EFFORT, history=QoSHistoryPolicy.KEEP_LAST, durability=QoSDurabilityPolicy.VOLATILE, @@ -220,7 +307,7 @@ def ros_best_effort_pubsub_channel() -> Generator[RawROS, None, None]: @contextmanager def ros_reliable_pubsub_channel() -> Generator[RawROS, None, None]: - qos = QoSProfile( + qos = QoSProfile( # type: ignore[no-untyped-call] reliability=QoSReliabilityPolicy.RELIABLE, history=QoSHistoryPolicy.KEEP_LAST, durability=QoSDurabilityPolicy.VOLATILE, @@ -235,21 +322,21 @@ def ros_msggen(size: int) -> tuple[RawROSTopic, ROSImage]: import numpy as np # Create image data - data = np.frombuffer(make_data_bytes(size), dtype=np.uint8).reshape(-1) - padded_size = ((len(data) + 2) // 3) * 3 - data = np.pad(data, (0, padded_size - len(data))) - pixels = len(data) // 3 + raw_data: NDArray[np.uint8] = np.frombuffer(make_data_bytes(size), dtype=np.uint8) + padded_size = ((len(raw_data) + 2) // 3) * 3 + padded_data: NDArray[np.uint8] = np.pad(raw_data, (0, padded_size - len(raw_data))) + pixels = len(padded_data) // 3 height = max(1, int(pixels**0.5)) width = pixels // height - data = data[: height * width * 3] + final_data: NDArray[np.uint8] = padded_data[: height * width * 3] # Create ROS Image message - msg = ROSImage() + msg = ROSImage() # type: ignore[no-untyped-call] msg.height = height msg.width = width msg.encoding = "rgb8" msg.step = width * 3 - msg.data = data.tobytes() + msg.data = bytes(final_data) topic = RawROSTopic(topic="/benchmark/ros", ros_type=ROSImage) return (topic, msg) @@ -267,3 +354,49 @@ def ros_msggen(size: int) -> tuple[RawROSTopic, ROSImage]: msg_gen=ros_msggen, ) ) + + @contextmanager + def dimos_ros_best_effort_pubsub_channel() -> Generator[DimosROS, None, None]: + qos = QoSProfile( # type: ignore[no-untyped-call] + reliability=QoSReliabilityPolicy.BEST_EFFORT, + history=QoSHistoryPolicy.KEEP_LAST, + durability=QoSDurabilityPolicy.VOLATILE, + depth=5000, + ) + ros_pubsub = DimosROS(node_name="benchmark_dimos_ros_best_effort", qos=qos) + ros_pubsub.start() + yield ros_pubsub + ros_pubsub.stop() + + @contextmanager + def dimos_ros_reliable_pubsub_channel() -> Generator[DimosROS, None, None]: + qos = QoSProfile( # type: ignore[no-untyped-call] + reliability=QoSReliabilityPolicy.RELIABLE, + history=QoSHistoryPolicy.KEEP_LAST, + durability=QoSDurabilityPolicy.VOLATILE, + depth=5000, + ) + ros_pubsub = DimosROS(node_name="benchmark_dimos_ros_reliable", qos=qos) + ros_pubsub.start() + yield ros_pubsub + ros_pubsub.stop() + + def dimos_ros_msggen(size: int) -> tuple[ROSTopic, Image]: + topic = ROSTopic(topic="/benchmark/dimos_ros", msg_type=Image) + return (topic, make_data_image(size)) + + # commented to save benchmarking time, + # since reliable and best effort are very similar in performance for local pubsub + # testcases.append( + # Case( + # pubsub_context=dimos_ros_best_effort_pubsub_channel, + # msg_gen=dimos_ros_msggen, + # ) + # ) + + testcases.append( + Case( + pubsub_context=dimos_ros_reliable_pubsub_channel, + msg_gen=dimos_ros_msggen, + ) + ) diff --git a/dimos/protocol/pubsub/benchmark/type.py b/dimos/protocol/pubsub/benchmark/type.py index 79101df9c5..a9ef80fe7a 100644 --- a/dimos/protocol/pubsub/benchmark/type.py +++ b/dimos/protocol/pubsub/benchmark/type.py @@ -41,24 +41,31 @@ def __len__(self) -> int: TestData = Sequence[Case[Any, Any]] -def _format_size(size_bytes: int) -> str: - """Format byte size to human-readable string.""" - if size_bytes >= 1048576: - return f"{size_bytes / 1048576:.1f} MB" - if size_bytes >= 1024: - return f"{size_bytes / 1024:.1f} KB" - return f"{size_bytes} B" - - -def _format_throughput(bytes_per_sec: float) -> str: - """Format throughput to human-readable string.""" - if bytes_per_sec >= 1e9: - return f"{bytes_per_sec / 1e9:.2f} GB/s" - if bytes_per_sec >= 1e6: - return f"{bytes_per_sec / 1e6:.2f} MB/s" - if bytes_per_sec >= 1e3: - return f"{bytes_per_sec / 1e3:.2f} KB/s" - return f"{bytes_per_sec:.2f} B/s" +def _format_mib(value: float) -> str: + """Format bytes as MiB with intelligent rounding. + + >= 10 MiB: integer (e.g., "42") + 1-10 MiB: 1 decimal (e.g., "2.5") + < 1 MiB: 2 decimals (e.g., "0.07") + """ + mib = value / (1024**2) + if mib >= 10: + return f"{mib:.0f}" + if mib >= 1: + return f"{mib:.1f}" + return f"{mib:.2f}" + + +def _format_iec(value: float, concise: bool = False, decimals: int = 2) -> str: + """Format bytes with IEC units (Ki/Mi/Gi = 1024^1/2/3)""" + k = 1024.0 + units = ["B", "K", "M", "G", "T"] if concise else ["B", "KiB", "MiB", "GiB", "TiB"] + + for unit in units[:-1]: + if abs(value) < k: + return f"{value:.{decimals}f}{unit}" if concise else f"{value:.{decimals}f} {unit}" + value /= k + return f"{value:.{decimals}f}{units[-1]}" if concise else f"{value:.{decimals}f} {units[-1]}" @dataclass @@ -117,7 +124,7 @@ def print_summary(self) -> None: table.add_column("Sent", justify="right") table.add_column("Recv", justify="right") table.add_column("Msgs/s", justify="right", style="green") - table.add_column("Throughput", justify="right", style="green") + table.add_column("MiB/s", justify="right", style="green") table.add_column("Latency", justify="right") table.add_column("Loss", justify="right") @@ -126,11 +133,11 @@ def print_summary(self) -> None: recv_style = "yellow" if r.receive_time > 0.1 else "dim" table.add_row( r.transport, - _format_size(r.msg_size_bytes), + _format_iec(r.msg_size_bytes, decimals=0), f"{r.msgs_sent:,}", f"{r.msgs_received:,}", f"{r.throughput_msgs:,.0f}", - _format_throughput(r.throughput_bytes), + _format_mib(r.throughput_bytes), f"[{recv_style}]{r.receive_time * 1000:.0f}ms[/{recv_style}]", f"[{loss_style}]{r.loss_pct:.1f}%[/{loss_style}]", ) @@ -149,13 +156,6 @@ def _print_heatmap( if not self.results: return - def size_id(size: int) -> str: - if size >= 1048576: - return f"{size // 1048576}MB" - if size >= 1024: - return f"{size // 1024}KB" - return f"{size}B" - transports = sorted(set(r.transport for r in self.results)) sizes = sorted(set(r.msg_size_bytes for r in self.results)) @@ -211,7 +211,7 @@ def val_to_color(v: float) -> int: return gradient[int(t * (len(gradient) - 1))] reset = "\033[0m" - size_labels = [size_id(s) for s in sizes] + size_labels = [_format_iec(s, concise=True, decimals=0) for s in sizes] col_w = max(8, max(len(s) for s in size_labels) + 1) transport_w = max(len(t) for t in transports) + 1 @@ -245,15 +245,9 @@ def print_bandwidth_heatmap(self) -> None: """Print bandwidth heatmap.""" def fmt(v: float) -> str: - if v >= 1e9: - return f"{v / 1e9:.1f}G" - if v >= 1e6: - return f"{v / 1e6:.0f}M" - if v >= 1e3: - return f"{v / 1e3:.0f}K" - return f"{v:.0f}" + return _format_iec(v, concise=True, decimals=1) - self._print_heatmap("Bandwidth", lambda r: r.throughput_bytes, fmt) + self._print_heatmap("Bandwidth (IEC)", lambda r: r.throughput_bytes, fmt) def print_latency_heatmap(self) -> None: """Print latency heatmap (time waiting for messages after publishing).""" @@ -263,4 +257,17 @@ def fmt(v: float) -> str: return f"{v:.1f}s" return f"{v * 1000:.0f}ms" - self._print_heatmap("Latency", lambda r: r.receive_time, fmt, high_is_good=False) + self._print_heatmap( + "Latency", + lambda r: r.receive_time, + fmt, + high_is_good=False, + ) + + def print_loss_heatmap(self) -> None: + """Print message loss percentage heatmap.""" + + def fmt(v: float) -> str: + return f"{v:.1f}%" + + self._print_heatmap("Loss %", lambda r: r.loss_pct, fmt, high_is_good=False) diff --git a/dimos/protocol/pubsub/bridge.py b/dimos/protocol/pubsub/bridge.py new file mode 100644 index 0000000000..f312caed7b --- /dev/null +++ b/dimos/protocol/pubsub/bridge.py @@ -0,0 +1,97 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Bridge utilities for connecting pubsub systems.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING, Generic, Protocol, TypeVar + +from dimos.protocol.service.spec import Service + +if TYPE_CHECKING: + from collections.abc import Callable + + from dimos.protocol.pubsub.spec import AllPubSub, PubSub + + +TopicT = TypeVar("TopicT") +MsgT = TypeVar("MsgT") +TopicFrom = TypeVar("TopicFrom") +TopicTo = TypeVar("TopicTo") +MsgFrom = TypeVar("MsgFrom") +MsgTo = TypeVar("MsgTo") + + +class Translator(Protocol[TopicFrom, TopicTo, MsgFrom, MsgTo]): # type: ignore[misc] + """Protocol for translating topics and messages between pubsub systems.""" + + def topic(self, topic: TopicFrom) -> TopicTo: + """Translate a topic from source to destination format.""" + ... + + def msg(self, msg: MsgFrom) -> MsgTo: + """Translate a message from source to destination format.""" + ... + + +def bridge( + pubsub1: AllPubSub[TopicFrom, MsgFrom], + pubsub2: PubSub[TopicTo, MsgTo], + translator: Translator[TopicFrom, TopicTo, MsgFrom, MsgTo], + # optionally we can override subscribe_all + # and only bridge a specific part of the pubsub tree + topic_from: TopicFrom | None = None, +) -> Callable[[], None]: + def pass_msg(msg: MsgFrom, topic: TopicFrom) -> None: + pubsub2.publish(translator.topic(topic), translator.msg(msg)) + + # Bridge only specific messages from pubsub1 to pubsub2 + if topic_from: + return pubsub1.subscribe(topic_from, pass_msg) + + # Bridge all messages from pubsub1 to pubsub2 + return pubsub1.subscribe_all(pass_msg) + + +@dataclass +class BridgeConfig(Generic[TopicFrom, TopicTo, MsgFrom, MsgTo]): + """Configuration for a one-way bridge.""" + + source: AllPubSub[TopicFrom, MsgFrom] + destination: PubSub[TopicTo, MsgTo] + translator: Translator[TopicFrom, TopicTo, MsgFrom, MsgTo] + subscribe_topic: TopicFrom | None = None + + +class Bridge(Service[BridgeConfig[TopicFrom, TopicTo, MsgFrom, MsgTo]]): + """Service that bridges messages from one pubsub to another.""" + + _unsubscribe: Callable[[], None] | None = None + + def start(self) -> None: + super().start() + self._unsubscribe = bridge( + self.config.source, + self.config.destination, + self.config.translator, + self.config.subscribe_topic, + ) + + def stop(self) -> None: + if self._unsubscribe: + self._unsubscribe() + self._unsubscribe = None + super().stop() diff --git a/dimos/protocol/pubsub/encoders.py b/dimos/protocol/pubsub/encoders.py new file mode 100644 index 0000000000..6b2056fa8b --- /dev/null +++ b/dimos/protocol/pubsub/encoders.py @@ -0,0 +1,130 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Encoder mixins for PubSub implementations.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +import pickle +from typing import TYPE_CHECKING, Generic, Protocol, TypeVar, cast + +from dimos.msgs import DimosMsg +from dimos.msgs.sensor_msgs import Image + +if TYPE_CHECKING: + from collections.abc import Callable + +TopicT = TypeVar("TopicT") +MsgT = TypeVar("MsgT") +EncodingT = TypeVar("EncodingT") + + +class DecodingError(Exception): + """Raised by decode() to skip a message without calling the callback.""" + + pass + + +class PubSubEncoderMixin(Generic[TopicT, MsgT, EncodingT], ABC): + """Mixin that encodes messages before publishing and decodes them after receiving. + + This will override publish and subscribe methods to add encoding/decoding. + Must be mixed with a class implementing PubSubProtocol[TopicT, EncodingT]. + + Usage: Just specify encoder and decoder as a subclass: + + class MyPubSubWithJSON(PubSubEncoderMixin, MyPubSub): + def encoder(msg, topic): + json.dumps(msg).encode('utf-8') + def decoder(msg, topic): + data: json.loads(data.decode('utf-8')) + """ + + # Declare expected methods from PubSubProtocol for type checking + if TYPE_CHECKING: + _base_publish: Callable[[TopicT, EncodingT], None] + _base_subscribe: Callable[[TopicT, Callable[[EncodingT, TopicT], None]], Callable[[], None]] + + @abstractmethod + def encode(self, msg: MsgT, topic: TopicT) -> EncodingT: ... + + @abstractmethod + def decode(self, msg: EncodingT, topic: TopicT) -> MsgT: ... + + def publish(self, topic: TopicT, message: MsgT) -> None: + """Encode the message and publish it.""" + encoded_message = self.encode(message, topic) + if encoded_message is None: + return + super().publish(topic, encoded_message) # type: ignore[misc] + + def subscribe( + self, topic: TopicT, callback: Callable[[MsgT, TopicT], None] + ) -> Callable[[], None]: + """Subscribe with automatic decoding.""" + + def wrapper_cb(encoded_data: EncodingT, topic: TopicT) -> None: + try: + decoded_message = self.decode(encoded_data, topic) + except DecodingError: + return + callback(decoded_message, topic) + + return cast("Callable[[], None]", super().subscribe(topic, wrapper_cb)) # type: ignore[misc] + + +class PickleEncoderMixin(PubSubEncoderMixin[TopicT, MsgT, bytes]): + """Encoder mixin that uses pickle for serialization. Works with any Python object.""" + + def encode(self, msg: MsgT, _: TopicT) -> bytes: + return pickle.dumps(msg) + + def decode(self, msg: bytes, _: TopicT) -> MsgT: + return cast("MsgT", pickle.loads(msg)) + + +class LCMTopicProto(Protocol): + """Protocol for topics usable with LCM encoders.""" + + topic: str # At decode time, always concrete string + lcm_type: type[DimosMsg] | None + + +class LCMEncoderMixin(PubSubEncoderMixin[LCMTopicProto, DimosMsg, bytes]): + """Encoder mixin for DimosMsg using LCM binary encoding.""" + + def encode(self, msg: DimosMsg | bytes, _: LCMTopicProto) -> bytes: + if isinstance(msg, bytes): + return msg + return msg.lcm_encode() + + def decode(self, msg: bytes, topic: LCMTopicProto) -> DimosMsg: + if topic.lcm_type is None: + raise DecodingError(f"Cannot decode: topic {topic.topic!r} has no lcm_type") + return topic.lcm_type.lcm_decode(msg) + + +class JpegEncoderMixin(PubSubEncoderMixin[LCMTopicProto, Image, bytes]): + """Encoder mixin for DimosMsg using JPEG encoding (for images).""" + + def encode(self, msg: Image, _: LCMTopicProto) -> bytes: + return msg.lcm_jpeg_encode() + + def decode(self, msg: bytes, topic: LCMTopicProto) -> Image: + if topic.topic == "LCM_SELF_TEST": + raise DecodingError("Ignoring LCM_SELF_TEST topic") + if topic.lcm_type is None: + raise DecodingError(f"Cannot decode: topic {topic.topic!r} has no lcm_type") + return cast("type[Image]", topic.lcm_type).lcm_jpeg_decode(msg) diff --git a/dimos/protocol/pubsub/impl/__init__.py b/dimos/protocol/pubsub/impl/__init__.py new file mode 100644 index 0000000000..63a5bfa6d6 --- /dev/null +++ b/dimos/protocol/pubsub/impl/__init__.py @@ -0,0 +1,6 @@ +from dimos.protocol.pubsub.impl.lcmpubsub import ( + LCM as LCM, + LCMPubSubBase as LCMPubSubBase, + PickleLCM as PickleLCM, +) +from dimos.protocol.pubsub.impl.memory import Memory as Memory diff --git a/dimos/protocol/pubsub/impl/ddspubsub.py b/dimos/protocol/pubsub/impl/ddspubsub.py new file mode 100644 index 0000000000..1e6dc36296 --- /dev/null +++ b/dimos/protocol/pubsub/impl/ddspubsub.py @@ -0,0 +1,161 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import threading +from typing import TYPE_CHECKING, Any, TypeAlias + +from cyclonedds.core import Listener +from cyclonedds.pub import DataWriter as DDSDataWriter +from cyclonedds.qos import Policy, Qos +from cyclonedds.sub import DataReader as DDSDataReader +from cyclonedds.topic import Topic as DDSTopic + +from dimos.protocol.pubsub.spec import PubSub +from dimos.protocol.service.ddsservice import DDSService +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from cyclonedds.idl import IdlStruct + +logger = setup_logger() + + +@dataclass(frozen=True) +class Topic: + """Represents a DDS topic.""" + + name: str + data_type: type[IdlStruct] + + def __str__(self) -> str: + return f"{self.name}#{self.data_type.__name__}" + + +MessageCallback: TypeAlias = Callable[[Any, Topic], None] + + +class _DDSMessageListener(Listener): # type: ignore[misc] + """Listener for DataReader that dispatches messages to callbacks.""" + + __slots__ = ("_callbacks", "_lock", "_topic") + + def __init__(self, topic: Topic) -> None: + super().__init__() # type: ignore[no-untyped-call] + self._topic = topic + self._callbacks: tuple[MessageCallback, ...] = () + self._lock = threading.Lock() + + def add_callback(self, callback: MessageCallback) -> None: + """Add a callback to the listener.""" + with self._lock: + self._callbacks = (*self._callbacks, callback) + + def remove_callback(self, callback: MessageCallback) -> None: + """Remove a callback from the listener.""" + with self._lock: + self._callbacks = tuple(cb for cb in self._callbacks if cb is not callback) + + def on_data_available(self, reader: DDSDataReader[Any]) -> None: + """Called when data is available on the reader.""" + try: + samples = reader.take() + except Exception as e: + logger.error(f"Error reading from topic {self._topic}: {e}", exc_info=True) + return + for sample in samples: + if sample is not None: + for callback in self._callbacks: + try: + callback(sample, self._topic) + except Exception as e: + logger.error(f"Callback error on topic {self._topic}: {e}", exc_info=True) + + +class DDS(DDSService, PubSub[Topic, Any]): + def __init__(self, qos: Qos | None = None, **kwargs: Any) -> None: + super().__init__(**kwargs) + self._qos = qos + self._writers: dict[Topic, DDSDataWriter[Any]] = {} + self._writer_lock = threading.Lock() + self._readers: dict[Topic, DDSDataReader[Any]] = {} + self._reader_lock = threading.Lock() + self._listeners: dict[Topic, _DDSMessageListener] = {} + + @property + def qos(self) -> Qos | None: + """Get the QoS settings.""" + return self._qos + + def _get_writer(self, topic: Topic) -> DDSDataWriter[Any]: + """Get or create a DataWriter for the given topic.""" + with self._writer_lock: + if topic not in self._writers: + dds_topic = DDSTopic(self.participant, topic.name, topic.data_type) + self._writers[topic] = DDSDataWriter(self.participant, dds_topic, qos=self._qos) + return self._writers[topic] + + def publish(self, topic: Topic, message: Any) -> None: + """Publish a message to a DDS topic.""" + writer = self._get_writer(topic) + try: + writer.write(message) + except Exception as e: + logger.error(f"Error publishing to topic {topic}: {e}", exc_info=True) + + def _get_listener(self, topic: Topic) -> _DDSMessageListener: + """Get or create a listener and reader for the given topic.""" + with self._reader_lock: + if topic not in self._readers: + dds_topic = DDSTopic(self.participant, topic.name, topic.data_type) + listener = _DDSMessageListener(topic) + self._readers[topic] = DDSDataReader( + self.participant, dds_topic, qos=self._qos, listener=listener + ) + self._listeners[topic] = listener + return self._listeners[topic] + + def subscribe(self, topic: Topic, callback: MessageCallback) -> Callable[[], None]: + """Subscribe to a DDS topic with a callback.""" + listener = self._get_listener(topic) + listener.add_callback(callback) + return lambda: self._unsubscribe_callback(topic, callback) + + def _unsubscribe_callback(self, topic: Topic, callback: MessageCallback) -> None: + """Unsubscribe a callback from a topic.""" + with self._reader_lock: + listener = self._listeners.get(topic) + if listener: + listener.remove_callback(callback) + + def stop(self) -> None: + """Stop the DDS service and clean up resources.""" + with self._reader_lock: + self._readers.clear() + self._listeners.clear() + with self._writer_lock: + self._writers.clear() + super().stop() + + +__all__ = [ + "DDS", + "MessageCallback", + "Policy", + "Qos", + "Topic", +] diff --git a/dimos/protocol/pubsub/jpeg_shm.py b/dimos/protocol/pubsub/impl/jpeg_shm.py similarity index 86% rename from dimos/protocol/pubsub/jpeg_shm.py rename to dimos/protocol/pubsub/impl/jpeg_shm.py index f2c9e35814..074f9fb76d 100644 --- a/dimos/protocol/pubsub/jpeg_shm.py +++ b/dimos/protocol/pubsub/impl/jpeg_shm.py @@ -16,10 +16,9 @@ from turbojpeg import TurboJPEG # type: ignore[import-untyped] -from dimos.msgs.sensor_msgs.Image import Image -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ImageFormat -from dimos.protocol.pubsub.shmpubsub import SharedMemoryPubSubBase -from dimos.protocol.pubsub.spec import PubSubEncoderMixin +from dimos.msgs.sensor_msgs.Image import Image, ImageFormat +from dimos.protocol.pubsub.encoders import PubSubEncoderMixin +from dimos.protocol.pubsub.impl.shmpubsub import SharedMemoryPubSubBase class JpegSharedMemoryEncoderMixin(PubSubEncoderMixin[str, Image, bytes]): diff --git a/dimos/protocol/pubsub/impl/lcmpubsub.py b/dimos/protocol/pubsub/impl/lcmpubsub.py new file mode 100644 index 0000000000..bf6bbd0dec --- /dev/null +++ b/dimos/protocol/pubsub/impl/lcmpubsub.py @@ -0,0 +1,171 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +import re +from typing import TYPE_CHECKING, Any + +from dimos.protocol.pubsub.encoders import ( + JpegEncoderMixin, + LCMEncoderMixin, + PickleEncoderMixin, +) +from dimos.protocol.pubsub.patterns import Glob +from dimos.protocol.pubsub.spec import AllPubSub +from dimos.protocol.service.lcmservice import LCMConfig, LCMService, autoconf +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from collections.abc import Callable + import threading + + from dimos.msgs import DimosMsg + +logger = setup_logger() + + +@dataclass +class Topic: + topic: str | re.Pattern[str] | Glob + lcm_type: type[DimosMsg] | None = None + + @property + def is_pattern(self) -> bool: + return isinstance(self.topic, re.Pattern | Glob) + + @property + def pattern(self) -> str: + if isinstance(self.topic, re.Pattern): + return self.topic.pattern + if isinstance(self.topic, Glob): + return self.topic.pattern + return self.topic + + def __str__(self) -> str: + if self.lcm_type is None: + return self.pattern + return f"{self.pattern}#{self.lcm_type.msg_name}" + + @staticmethod + def from_channel_str(channel: str, default_lcm_type: type[DimosMsg] | None = None) -> Topic: + """Create Topic from channel string. + + Channel format: /topic#module.ClassName + Falls back to default_lcm_type if type cannot be parsed. + """ + from dimos.msgs import resolve_msg_type + + if "#" not in channel: + return Topic(topic=channel, lcm_type=default_lcm_type) + + topic_str, type_name = channel.rsplit("#", 1) + lcm_type = resolve_msg_type(type_name) + return Topic(topic=topic_str, lcm_type=lcm_type or default_lcm_type) + + +class LCMPubSubBase(LCMService, AllPubSub[Topic, Any]): + """LCM-based PubSub with native regex subscription support. + + LCM natively supports regex patterns in subscribe(), so we implement + RegexSubscribable directly without needing discovery-based fallback. + """ + + default_config = LCMConfig + _stop_event: threading.Event + _thread: threading.Thread | None + + def publish(self, topic: Topic | str, message: bytes) -> None: + """Publish a message to the specified channel.""" + if self.l is None: + logger.error("Tried to publish after LCM was closed") + return + + topic_str = str(topic) if isinstance(topic, Topic) else topic + self.l.publish(topic_str, message) + + def subscribe_all(self, callback: Callable[[bytes, Topic], Any]) -> Callable[[], None]: + return self.subscribe(Topic(re.compile(".*")), callback) # type: ignore[arg-type] + + def subscribe( + self, topic: Topic, callback: Callable[[bytes, Topic], None] + ) -> Callable[[], None]: + if self.l is None: + logger.error("Tried to subscribe after LCM was closed") + + def noop() -> None: + pass + + return noop + + if topic.is_pattern: + + def handler(channel: str, msg: bytes) -> None: + if channel == "LCM_SELF_TEST": + return + callback(msg, Topic.from_channel_str(channel, topic.lcm_type)) + + pattern_str = str(topic) + if not pattern_str.endswith("*"): + pattern_str = f"{pattern_str}(#.*)?" + + lcm_subscription = self.l.subscribe(pattern_str, handler) + else: + topic_str = str(topic) + lcm_subscription = self.l.subscribe(topic_str, lambda _, msg: callback(msg, topic)) + + # Set queue capacity to 10000 to handle high-volume bursts + lcm_subscription.set_queue_capacity(10000) + + def unsubscribe() -> None: + if self.l is None: + return + self.l.unsubscribe(lcm_subscription) + + return unsubscribe + + +# these ignoress might be unsolvable +# and should use composition not inheritance for encoding/decoding + + +class LCM( # type: ignore[misc] + LCMEncoderMixin, # type: ignore[type-arg] + LCMPubSubBase, +): ... + + +class PickleLCM( # type: ignore[misc] + PickleEncoderMixin, # type: ignore[type-arg] + LCMPubSubBase, +): ... + + +class JpegLCM( # type: ignore[misc] + JpegEncoderMixin, # type: ignore[type-arg] + LCMPubSubBase, +): ... + + +__all__ = [ + "LCM", + "Glob", + "JpegLCM", + "LCMEncoderMixin", + "LCMPubSubBase", + "PickleLCM", + "Topic", + "autoconf", +] diff --git a/dimos/protocol/pubsub/memory.py b/dimos/protocol/pubsub/impl/memory.py similarity index 94% rename from dimos/protocol/pubsub/memory.py rename to dimos/protocol/pubsub/impl/memory.py index e46fc10500..3425a5ee3d 100644 --- a/dimos/protocol/pubsub/memory.py +++ b/dimos/protocol/pubsub/impl/memory.py @@ -17,7 +17,8 @@ from typing import Any from dimos.protocol import encode -from dimos.protocol.pubsub.spec import PubSub, PubSubEncoderMixin +from dimos.protocol.pubsub.encoders import PubSubEncoderMixin +from dimos.protocol.pubsub.spec import PubSub class Memory(PubSub[str, Any]): diff --git a/dimos/protocol/pubsub/redispubsub.py b/dimos/protocol/pubsub/impl/redispubsub.py similarity index 100% rename from dimos/protocol/pubsub/redispubsub.py rename to dimos/protocol/pubsub/impl/redispubsub.py diff --git a/dimos/protocol/pubsub/rospubsub.py b/dimos/protocol/pubsub/impl/rospubsub.py similarity index 97% rename from dimos/protocol/pubsub/rospubsub.py rename to dimos/protocol/pubsub/impl/rospubsub.py index fdb64aa257..1a3c989a4d 100644 --- a/dimos/protocol/pubsub/rospubsub.py +++ b/dimos/protocol/pubsub/impl/rospubsub.py @@ -38,7 +38,7 @@ import uuid from dimos.msgs import DimosMsg -from dimos.protocol.pubsub.rospubsub_conversion import ( +from dimos.protocol.pubsub.impl.rospubsub_conversion import ( derive_ros_type, dimos_to_ros, ros_to_dimos, @@ -104,7 +104,7 @@ def __init__(self, node_name: str | None = None, qos: "QoSProfile | None" = None if qos is not None: self._qos = qos else: - self._qos = QoSProfile( + self._qos = QoSProfile( # type: ignore[no-untyped-call] # Haven't noticed any difference between BEST_EFFORT and RELIABLE for local comms in our tests # ./bin/dev python -m pytest -svm tool -k ros dimos/protocol/pubsub/benchmark/test_benchmark.py # @@ -120,7 +120,7 @@ def start(self) -> None: if self._spin_thread is not None: return - if not rclpy.ok(): + if not rclpy.ok(): # type: ignore[attr-defined] rclpy.init() self._stop_event.clear() @@ -160,7 +160,7 @@ def stop(self) -> None: self._node.destroy_publisher(publisher) if self._node: - self._node.destroy_node() + self._node.destroy_node() # type: ignore[no-untyped-call] self._node = None self._executor = None diff --git a/dimos/protocol/pubsub/rospubsub_conversion.py b/dimos/protocol/pubsub/impl/rospubsub_conversion.py similarity index 99% rename from dimos/protocol/pubsub/rospubsub_conversion.py rename to dimos/protocol/pubsub/impl/rospubsub_conversion.py index 18181d76b3..275033a5ac 100644 --- a/dimos/protocol/pubsub/rospubsub_conversion.py +++ b/dimos/protocol/pubsub/impl/rospubsub_conversion.py @@ -30,7 +30,7 @@ if TYPE_CHECKING: from dimos.msgs import DimosMsg - from dimos.protocol.pubsub.rospubsub import ROSMessage + from dimos.protocol.pubsub.impl.rospubsub import ROSMessage # Complex types that need LCM roundtrip (explicit list) diff --git a/dimos/protocol/pubsub/shmpubsub.py b/dimos/protocol/pubsub/impl/shmpubsub.py similarity index 98% rename from dimos/protocol/pubsub/shmpubsub.py rename to dimos/protocol/pubsub/impl/shmpubsub.py index 89efb82ac3..db0a91e579 100644 --- a/dimos/protocol/pubsub/shmpubsub.py +++ b/dimos/protocol/pubsub/impl/shmpubsub.py @@ -32,9 +32,10 @@ import numpy as np import numpy.typing as npt -from dimos.protocol.pubsub.lcmpubsub import LCMEncoderMixin, Topic +from dimos.protocol.pubsub.encoders import LCMEncoderMixin, PickleEncoderMixin +from dimos.protocol.pubsub.impl.lcmpubsub import Topic from dimos.protocol.pubsub.shm.ipc_factory import CpuShmChannel -from dimos.protocol.pubsub.spec import PickleEncoderMixin, PubSub, PubSubEncoderMixin +from dimos.protocol.pubsub.spec import PubSub from dimos.utils.logging_config import setup_logger if TYPE_CHECKING: @@ -335,7 +336,7 @@ def reconfigure(self, topic: Topic, *, capacity: int) -> dict: # type: ignore[t return self._shm.reconfigure(str(topic), capacity=capacity) -class LCMSharedMemory( +class LCMSharedMemory( # type: ignore[misc] LCMEncoderMixin, LCMSharedMemoryPubSubBase, ): diff --git a/dimos/protocol/pubsub/test_lcmpubsub.py b/dimos/protocol/pubsub/impl/test_lcmpubsub.py similarity index 99% rename from dimos/protocol/pubsub/test_lcmpubsub.py rename to dimos/protocol/pubsub/impl/test_lcmpubsub.py index 8165be9fef..9467e6a4cc 100644 --- a/dimos/protocol/pubsub/test_lcmpubsub.py +++ b/dimos/protocol/pubsub/impl/test_lcmpubsub.py @@ -19,7 +19,7 @@ import pytest from dimos.msgs.geometry_msgs import Pose, Quaternion, Vector3 -from dimos.protocol.pubsub.lcmpubsub import ( +from dimos.protocol.pubsub.impl.lcmpubsub import ( LCM, LCMPubSubBase, PickleLCM, diff --git a/dimos/protocol/pubsub/test_rospubsub.py b/dimos/protocol/pubsub/impl/test_rospubsub.py similarity index 99% rename from dimos/protocol/pubsub/test_rospubsub.py rename to dimos/protocol/pubsub/impl/test_rospubsub.py index 3a3a020586..6cf49c37b2 100644 --- a/dimos/protocol/pubsub/test_rospubsub.py +++ b/dimos/protocol/pubsub/impl/test_rospubsub.py @@ -23,7 +23,7 @@ from dimos.msgs.geometry_msgs.Twist import Twist from dimos.msgs.geometry_msgs.Vector3 import Vector3 from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 -from dimos.protocol.pubsub.rospubsub import DimosROS, ROSTopic +from dimos.protocol.pubsub.impl.rospubsub import DimosROS, ROSTopic # Add msg_name to LCM PointStamped for testing nested message conversion PointStamped.msg_name = "geometry_msgs.PointStamped" diff --git a/dimos/protocol/pubsub/lcmpubsub.py b/dimos/protocol/pubsub/lcmpubsub.py deleted file mode 100644 index 471b8d6076..0000000000 --- a/dimos/protocol/pubsub/lcmpubsub.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -from dataclasses import dataclass -from typing import TYPE_CHECKING, Any - -from dimos.protocol.pubsub.spec import PickleEncoderMixin, PubSub, PubSubEncoderMixin -from dimos.protocol.service.lcmservice import ( - LCMConfig, - LCMService, - autoconf, -) -from dimos.utils.logging_config import setup_logger - -if TYPE_CHECKING: - from collections.abc import Callable - import threading - - from dimos.msgs import DimosMsg - -logger = setup_logger() - - -@dataclass -class Topic: - topic: str = "" - lcm_type: type[DimosMsg] | None = None - - def __str__(self) -> str: - if self.lcm_type is None: - return self.topic - return f"{self.topic}#{self.lcm_type.msg_name}" - - -class LCMPubSubBase(LCMService, PubSub[Topic, Any]): - default_config = LCMConfig - _stop_event: threading.Event - _thread: threading.Thread | None - _callbacks: dict[str, list[Callable[[Any], None]]] - - def __init__(self, **kwargs) -> None: # type: ignore[no-untyped-def] - super().__init__(**kwargs) - self._callbacks = {} - - def publish(self, topic: Topic, message: bytes) -> None: - """Publish a message to the specified channel.""" - if self.l is None: - logger.error("Tried to publish after LCM was closed") - return - - self.l.publish(str(topic), message) - - def subscribe( - self, topic: Topic, callback: Callable[[bytes, Topic], Any] - ) -> Callable[[], None]: - if self.l is None: - logger.error("Tried to subscribe after LCM was closed") - - def noop() -> None: - pass - - return noop - - lcm_subscription = self.l.subscribe(str(topic), lambda _, msg: callback(msg, topic)) - - # Set queue capacity to 10000 to handle high-volume bursts - lcm_subscription.set_queue_capacity(10000) - - def unsubscribe() -> None: - if self.l is None: - return - self.l.unsubscribe(lcm_subscription) - - return unsubscribe - - -class LCMEncoderMixin(PubSubEncoderMixin[Topic, Any, bytes]): - def encode(self, msg: DimosMsg, _: Topic) -> bytes: - return msg.lcm_encode() - - def decode(self, msg: bytes, topic: Topic) -> DimosMsg: - if topic.lcm_type is None: - raise ValueError( - f"Cannot decode message for topic '{topic.topic}': no lcm_type specified" - ) - return topic.lcm_type.lcm_decode(msg) - - -class JpegEncoderMixin(PubSubEncoderMixin[Topic, Any, bytes]): - def encode(self, msg: DimosMsg, _: Topic) -> bytes: - return msg.lcm_jpeg_encode() # type: ignore[attr-defined, no-any-return] - - def decode(self, msg: bytes, topic: Topic) -> DimosMsg: - if topic.lcm_type is None: - raise ValueError( - f"Cannot decode message for topic '{topic.topic}': no lcm_type specified" - ) - return topic.lcm_type.lcm_jpeg_decode(msg) # type: ignore[attr-defined, no-any-return] - - -class LCM( - LCMEncoderMixin, - LCMPubSubBase, -): ... - - -class PickleLCM( - PickleEncoderMixin, # type: ignore[type-arg] - LCMPubSubBase, -): ... - - -class JpegLCM( - JpegEncoderMixin, - LCMPubSubBase, -): ... - - -__all__ = [ - "LCM", - "JpegLCM", - "LCMEncoderMixin", - "LCMPubSubBase", - "PickleLCM", - "autoconf", -] diff --git a/dimos/protocol/pubsub/patterns.py b/dimos/protocol/pubsub/patterns.py new file mode 100644 index 0000000000..b7c24f4b02 --- /dev/null +++ b/dimos/protocol/pubsub/patterns.py @@ -0,0 +1,98 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import re +from typing import TypeVar + +TopicT = TypeVar("TopicT") +MsgT = TypeVar("MsgT") + + +class Glob: + """Glob pattern that compiles to regex + + Supports: + * - matches any characters except / + ** - matches any characters including / + ? - matches single character + + Example: + Topic(topic=Glob("/sensor/*")) # matches /sensor/temp, /sensor/humidity + Topic(topic=Glob("/robot/**")) # matches /robot/arm/joint1, /robot/leg/motor + """ + + def __init__(self, pattern: str) -> None: + self._glob = pattern + self._regex = self._compile(pattern) + + @staticmethod + def _compile(pattern: str) -> str: + """Convert glob pattern to regex.""" + result = [] + i = 0 + while i < len(pattern): + c = pattern[i] + if c == "*": + if i + 1 < len(pattern) and pattern[i + 1] == "*": + result.append(".*") + i += 2 + else: + result.append("[^/]*") + i += 1 + elif c == "?": + result.append(".") + i += 1 + elif c in r"\^$.|+[]{}()": + result.append("\\" + c) + i += 1 + else: + result.append(c) + i += 1 + return "".join(result) + + @property + def pattern(self) -> str: + """Return the regex pattern string.""" + return self._regex + + @property + def glob(self) -> str: + """Return the original glob pattern.""" + return self._glob + + def __repr__(self) -> str: + return f"Glob({self._glob!r})" + + +Pattern = str | re.Pattern[str] | Glob + + +def pattern_matches(pattern: Pattern, topic_str: str) -> bool: + """Check if a topic string matches a pattern. + + Args: + pattern: A string (exact match), compiled regex, or Glob pattern. + topic_str: The topic string to match against. + + Returns: + True if the topic matches the pattern. + """ + if isinstance(pattern, str): + return pattern == topic_str + elif isinstance(pattern, Glob): + return bool(re.fullmatch(pattern.pattern, topic_str)) + else: + return bool(pattern.fullmatch(topic_str)) diff --git a/dimos/protocol/pubsub/spec.py b/dimos/protocol/pubsub/spec.py index b4e82d3993..fe979fce82 100644 --- a/dimos/protocol/pubsub/spec.py +++ b/dimos/protocol/pubsub/spec.py @@ -17,32 +17,28 @@ from collections.abc import AsyncIterator, Callable from contextlib import asynccontextmanager from dataclasses import dataclass -import pickle -from typing import Any, Generic, TypeVar +from typing import Any, Generic, Protocol, TypeVar, runtime_checkable MsgT = TypeVar("MsgT") TopicT = TypeVar("TopicT") -EncodingT = TypeVar("EncodingT") +MsgT_co = TypeVar("MsgT_co", covariant=True) +TopicT_co = TypeVar("TopicT_co", covariant=True) -class PubSub(Generic[TopicT, MsgT], ABC): - """Abstract base class for pub/sub implementations with sugar methods.""" - - @abstractmethod - def publish(self, topic: TopicT, message: MsgT) -> None: - """Publish a message to a topic.""" - ... +class PubSubBaseMixin(Generic[TopicT, MsgT]): + """Mixin class providing sugar methods for PubSub implementations. + Depends on the basic publish and subscribe methods being implemented. + """ - @abstractmethod def subscribe( self, topic: TopicT, callback: Callable[[MsgT, TopicT], None] ) -> Callable[[], None]: - """Subscribe to a topic with a callback. returns unsubscribe function""" - ... + """Subscribe to a topic. Implemented by subclasses.""" + raise NotImplementedError @dataclass(slots=True) class _Subscription: - _bus: "PubSub[Any, Any]" + _bus: "PubSubBaseMixin[Any, Any]" _topic: Any _cb: Callable[[Any, Any], None] _unsubscribe_fn: Callable[[], None] @@ -50,19 +46,16 @@ class _Subscription: def unsubscribe(self) -> None: self._unsubscribe_fn() - # context-manager helper - def __enter__(self): # type: ignore[no-untyped-def] + def __enter__(self) -> "PubSubBaseMixin._Subscription": return self - def __exit__(self, *exc) -> None: # type: ignore[no-untyped-def] + def __exit__(self, *exc: Any) -> None: self.unsubscribe() - # public helper: returns disposable object def sub(self, topic: TopicT, cb: Callable[[MsgT, TopicT], None]) -> "_Subscription": unsubscribe_fn = self.subscribe(topic, cb) return self._Subscription(self, topic, cb, unsubscribe_fn) - # async iterator async def aiter(self, topic: TopicT, *, max_pending: int | None = None) -> AsyncIterator[MsgT]: q: asyncio.Queue[MsgT] = asyncio.Queue(maxsize=max_pending or 0) @@ -76,10 +69,10 @@ def _cb(msg: MsgT, topic: TopicT) -> None: finally: unsubscribe_fn() - # async context manager returning a queue - @asynccontextmanager - async def queue(self, topic: TopicT, *, max_pending: int | None = None): # type: ignore[no-untyped-def] + async def queue( + self, topic: TopicT, *, max_pending: int | None = None + ) -> AsyncIterator[asyncio.Queue[MsgT]]: q: asyncio.Queue[MsgT] = asyncio.Queue(maxsize=max_pending or 0) def _queue_cb(msg: MsgT, topic: TopicT) -> None: @@ -92,59 +85,107 @@ def _queue_cb(msg: MsgT, topic: TopicT) -> None: unsubscribe_fn() -class PubSubEncoderMixin(Generic[TopicT, MsgT, EncodingT], ABC): - """Mixin that encodes messages before publishing and decodes them after receiving. +class PubSub(PubSubBaseMixin[TopicT, MsgT], ABC): + """Abstract base class for pub/sub implementations with sugar methods.""" + + @abstractmethod + def publish(self, topic: TopicT, message: MsgT) -> None: + """Publish a message to a topic.""" + ... + + @abstractmethod + def subscribe( + self, topic: TopicT, callback: Callable[[MsgT, TopicT], None] + ) -> Callable[[], None]: + """Subscribe to a topic with a callback. returns unsubscribe function""" + ... - Usage: Just specify encoder and decoder as a subclass: - class MyPubSubWithJSON(PubSubEncoderMixin, MyPubSub): - def encoder(msg, topic): - json.dumps(msg).encode('utf-8') - def decoder(msg, topic): - data: json.loads(data.decode('utf-8')) +# AllPubSub and DiscoveryPubSub are complementary mixins: +# +# AllPubsub supports subscribing to all topics (Redis, LCM, MQTT) +# DiscoveryPubSub supports discovering new topics (ROS) +# +# These capabilities are orthogonal but they can implement one another. +# Implementations should subclass whichever matches their native capability. +# The other method will be synthesized automatically. +# +# - AllPubSub: Native support for subscribing to all topics at once. +# Provides a default subscribe_new_topics() by tracking seen topics. +# +# - DiscoveryPubSub: Native support for discovering new topics as they appear. +# Provides a default subscribe_all() by subscribing to each discovered topic. +class AllPubSub(PubSub[TopicT, MsgT], ABC): + """Mixin for PubSub that supports subscribing to all topics. + + Subclass from this if you support native subscribe-all (e.g. MQTT #, Redis *). + Provides a default subscribe_new_topics() implementation. """ @abstractmethod - def encode(self, msg: MsgT, topic: TopicT) -> EncodingT: ... + def subscribe_all(self, callback: Callable[[MsgT, TopicT], Any]) -> Callable[[], None]: + """Subscribe to all topics.""" + ... + + def subscribe_new_topics(self, callback: Callable[[TopicT], Any]) -> Callable[[], None]: + """Discover new topics by tracking seen topics from subscribe_all.""" + import threading + + seen: set[TopicT] = set() + lock = threading.Lock() + + def on_msg(msg: MsgT, topic: TopicT) -> None: + with lock: + if topic not in seen: + seen.add(topic) + callback(topic) + + return self.subscribe_all(on_msg) + + +# This is for ros for now +class DiscoveryPubSub(PubSub[TopicT, MsgT], ABC): + """Mixin for PubSub that supports discovery of topics. + + Subclass from this if you support topic discovery (e.g. MQTT, Redis, NATS, RabbitMQ). + """ @abstractmethod - def decode(self, msg: EncodingT, topic: TopicT) -> MsgT: ... + def subscribe_new_topics(self, callback: Callable[[TopicT], Any]) -> Callable[[], None]: + """Get notified when new topics are discovered.""" + ... - def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] - super().__init__(*args, **kwargs) - self._encode_callback_map: dict = {} # type: ignore[type-arg] + def subscribe_all(self, callback: Callable[[MsgT, TopicT], Any]) -> Callable[[], None]: + """Subscribe to all topics by subscribing to each discovered topic.""" + import threading - def publish(self, topic: TopicT, message: MsgT) -> None: - """Encode the message and publish it.""" - if getattr(self, "_stop_event", None) is not None and self._stop_event.is_set(): # type: ignore[attr-defined] - return - encoded_message = self.encode(message, topic) - if encoded_message is None: - return - super().publish(topic, encoded_message) # type: ignore[misc] + subscriptions: list[Callable[[], None]] = [] + lock = threading.Lock() - def subscribe( - self, topic: TopicT, callback: Callable[[MsgT, TopicT], None] - ) -> Callable[[], None]: - """Subscribe with automatic decoding.""" + def on_new_topic(topic: TopicT) -> None: + unsub = self.subscribe(topic, callback) + with lock: + subscriptions.append(unsub) - def wrapper_cb(encoded_data: EncodingT, topic: TopicT) -> None: - decoded_message = self.decode(encoded_data, topic) - callback(decoded_message, topic) + discovery_unsub = self.subscribe_new_topics(on_new_topic) - return super().subscribe(topic, wrapper_cb) # type: ignore[misc, no-any-return] + def unsubscribe_all() -> None: + discovery_unsub() + with lock: + subs = subscriptions.copy() + for unsub in subs: + unsub() + return unsubscribe_all -class PickleEncoderMixin(PubSubEncoderMixin[TopicT, MsgT, bytes]): - def encode(self, msg: MsgT, *_: TopicT) -> bytes: # type: ignore[return] - try: - return pickle.dumps(msg) - except Exception as e: - print("Pickle encoding error:", e) - import traceback - traceback.print_exc() - print("Tried to pickle:", msg) +@runtime_checkable +class SubscribeAllCapable(Protocol[MsgT_co, TopicT_co]): + """Protocol for pubsubs that support subscribe_all. - def decode(self, msg: bytes, _: TopicT) -> MsgT: - return pickle.loads(msg) # type: ignore[no-any-return] + Both AllPubSub (native) and DiscoveryPubSub (synthesized) satisfy this. + """ + + def subscribe_all(self, callback: Callable[[Any, Any], Any]) -> Callable[[], None]: + """Subscribe to all messages on all topics.""" + ... diff --git a/dimos/protocol/pubsub/test_encoder.py b/dimos/protocol/pubsub/test_encoder.py index 38aac4664d..dec1e42972 100644 --- a/dimos/protocol/pubsub/test_encoder.py +++ b/dimos/protocol/pubsub/test_encoder.py @@ -17,7 +17,7 @@ import json from typing import Any -from dimos.protocol.pubsub.memory import Memory, MemoryWithJSONEncoder +from dimos.protocol.pubsub.impl.memory import Memory, MemoryWithJSONEncoder def test_json_encoded_pubsub() -> None: diff --git a/dimos/protocol/pubsub/test_pattern_sub.py b/dimos/protocol/pubsub/test_pattern_sub.py new file mode 100644 index 0000000000..99aea49b05 --- /dev/null +++ b/dimos/protocol/pubsub/test_pattern_sub.py @@ -0,0 +1,263 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Grid tests for subscribe_all pattern subscriptions.""" + +from collections.abc import Callable, Generator +from contextlib import AbstractContextManager, contextmanager +from dataclasses import dataclass, field +import re +import time +from typing import Any, Generic, TypeVar + +import pytest + +from dimos.msgs.geometry_msgs import Pose, Quaternion, Vector3 +from dimos.protocol.pubsub.impl.lcmpubsub import LCM, LCMPubSubBase, Topic +from dimos.protocol.pubsub.patterns import Glob +from dimos.protocol.pubsub.spec import AllPubSub, PubSub + +TopicT = TypeVar("TopicT") +MsgT = TypeVar("MsgT") + +# Type alias for (publisher, subscriber) tuple +PubSubPair = tuple[PubSub[TopicT, MsgT], AllPubSub[TopicT, MsgT]] + + +@dataclass +class Case(Generic[TopicT, MsgT]): + """Test case for grid testing pubsub implementations.""" + + name: str + pubsub_context: Callable[[], AbstractContextManager[PubSubPair[TopicT, MsgT]]] + topic_values: list[tuple[TopicT, MsgT]] + tags: set[str] = field(default_factory=set) + # Pattern tests: (pattern_topic, {indices of topic_values that should match}) + glob_patterns: list[tuple[TopicT, set[int]]] = field(default_factory=list) + regex_patterns: list[tuple[TopicT, set[int]]] = field(default_factory=list) + + +@contextmanager +def lcm_typed_context() -> Generator[tuple[LCM, LCM], None, None]: + pub = LCM(autoconf=True) + sub = LCM(autoconf=False) + pub.start() + sub.start() + try: + yield pub, sub + finally: + pub.stop() + sub.stop() + + +@contextmanager +def lcm_bytes_context() -> Generator[tuple[LCMPubSubBase, LCMPubSubBase], None, None]: + pub = LCMPubSubBase(autoconf=True) + sub = LCMPubSubBase(autoconf=False) + pub.start() + sub.start() + try: + yield pub, sub + finally: + pub.stop() + sub.stop() + + +testcases: list[Case[Any, Any]] = [ + Case( + name="lcm_typed", + pubsub_context=lcm_typed_context, + topic_values=[ + (Topic("/sensor/position", Vector3), Vector3(1, 2, 3)), + (Topic("/sensor/orientation", Quaternion), Quaternion(0, 0, 0, 1)), + (Topic("/robot/arm", Pose), Pose(Vector3(4, 5, 6), Quaternion(0, 0, 0, 1))), + ], + tags={"all", "glob", "regex"}, + glob_patterns=[ + (Topic(topic=Glob("/sensor/*")), {0, 1}), + (Topic(topic=Glob("/**/arm")), {2}), + (Topic(topic=Glob("/**")), {0, 1, 2}), + ], + regex_patterns=[ + (Topic(re.compile(r"/sensor/.*")), {0, 1}), + (Topic(re.compile(r".*/arm"), Pose), {2}), + (Topic(re.compile(r".*/arm")), {2}), + (Topic(re.compile(r".*/arm#geometry.*")), {2}), + ], + ), + Case( + name="lcm_bytes", + pubsub_context=lcm_bytes_context, + topic_values=[ + (Topic("/sensor/temp"), b"temp"), + (Topic("/sensor/humidity"), b"humidity"), + (Topic("/robot/arm"), b"arm"), + ], + tags={"all", "glob", "regex"}, + glob_patterns=[ + (Topic(topic=Glob("/sensor/*")), {0, 1}), + (Topic(topic=Glob("/**/arm")), {2}), + (Topic(topic=Glob("/**")), {0, 1, 2}), + ], + regex_patterns=[ + (Topic(re.compile(r"/sensor/.*")), {0, 1}), + (Topic(re.compile(r".*/arm")), {2}), + ], + ), +] + +# Build filtered lists for parametrize +all_cases = [c for c in testcases if "all" in c.tags] +glob_cases = [c for c in testcases if "glob" in c.tags] +regex_cases = [c for c in testcases if "regex" in c.tags] + + +def _topic_matches_prefix(topic: Any, prefix: str = "/") -> bool: + """Check if topic string starts with prefix. + + LCM uses UDP multicast, so messages from other tests running in parallel + can leak into subscribe_all callbacks. We filter to only our test topics. + """ + topic_str = str(topic.topic if hasattr(topic, "topic") else topic) + return topic_str.startswith(prefix) + + +@pytest.mark.parametrize("tc", all_cases, ids=lambda c: c.name) +def test_subscribe_all_receives_all_topics(tc: Case[Any, Any]) -> None: + """Test that subscribe_all receives messages from all topics.""" + received: list[tuple[Any, Any]] = [] + + with tc.pubsub_context() as (pub, sub): + # Filter to only our test topics (LCM multicast can leak from parallel tests) + sub.subscribe_all(lambda msg, topic: received.append((msg, topic))) + time.sleep(0.01) # Allow subscription to be ready + + for topic, value in tc.topic_values: + pub.publish(topic, value) + + time.sleep(0.01) + + assert len(received) == len(tc.topic_values) + + # Verify all messages were received + received_msgs = [r[0] for r in received] + expected_msgs = [v for _, v in tc.topic_values] + for expected in expected_msgs: + assert expected in received_msgs + + +@pytest.mark.parametrize("tc", all_cases, ids=lambda c: c.name) +def test_subscribe_all_unsubscribe(tc: Case[Any, Any]) -> None: + """Test that unsubscribe stops receiving messages.""" + received: list[tuple[Any, Any]] = [] + topic, value = tc.topic_values[0] + + with tc.pubsub_context() as (pub, sub): + unsub = sub.subscribe_all(lambda msg, topic: received.append((msg, topic))) + time.sleep(0.01) # Allow subscription to be ready + + pub.publish(topic, value) + time.sleep(0.01) + assert len(received) == 1 + + unsub() + + pub.publish(topic, value) + time.sleep(0.01) + assert len(received) == 1 # No new messages + + +@pytest.mark.parametrize("tc", all_cases, ids=lambda c: c.name) +def test_subscribe_all_with_regular_subscribe(tc: Case[Any, Any]) -> None: + """Test that subscribe_all coexists with regular subscriptions.""" + all_received: list[tuple[Any, Any]] = [] + specific_received: list[tuple[Any, Any]] = [] + topic1, value1 = tc.topic_values[0] + topic2, value2 = tc.topic_values[1] + + with tc.pubsub_context() as (pub, sub): + sub.subscribe_all( + lambda msg, topic: all_received.append((msg, topic)) + if _topic_matches_prefix(topic) + else None + ) + sub.subscribe(topic1, lambda msg, topic: specific_received.append((msg, topic))) + time.sleep(0.01) # Allow subscriptions to be ready + + pub.publish(topic1, value1) + pub.publish(topic2, value2) + time.sleep(0.01) + + # subscribe_all gets both + assert len(all_received) == 2 + + # specific subscription gets only topic1 + assert len(specific_received) == 1 + assert specific_received[0][0] == value1 + + +@pytest.mark.parametrize("tc", glob_cases, ids=lambda c: c.name) +def test_subscribe_glob(tc: Case[Any, Any]) -> None: + """Test that glob pattern subscriptions receive only matching topics.""" + for pattern_topic, expected_indices in tc.glob_patterns: + received: list[tuple[Any, Any]] = [] + + with tc.pubsub_context() as (pub, sub): + sub.subscribe(pattern_topic, lambda msg, topic, r=received: r.append((msg, topic))) + time.sleep(0.01) # Allow subscription to be ready + + for topic, value in tc.topic_values: + pub.publish(topic, value) + + time.sleep(0.01) + + assert len(received) == len(expected_indices), ( + f"Expected {len(expected_indices)} messages for pattern {pattern_topic}, " + f"got {len(received)}" + ) + + # Verify we received the expected messages + expected_msgs = [tc.topic_values[i][1] for i in expected_indices] + received_msgs = [r[0] for r in received] + for expected in expected_msgs: + assert expected in received_msgs + + +@pytest.mark.parametrize("tc", regex_cases, ids=lambda c: c.name) +def test_subscribe_regex(tc: Case[Any, Any]) -> None: + """Test that regex pattern subscriptions receive only matching topics.""" + for pattern_topic, expected_indices in tc.regex_patterns: + received: list[tuple[Any, Any]] = [] + + with tc.pubsub_context() as (pub, sub): + sub.subscribe(pattern_topic, lambda msg, topic, r=received: r.append((msg, topic))) + + time.sleep(0.01) + + for topic, value in tc.topic_values: + pub.publish(topic, value) + + time.sleep(0.01) + + assert len(received) == len(expected_indices), ( + f"Expected {len(expected_indices)} messages for pattern {pattern_topic}, " + f"got {len(received)}" + ) + + # Verify we received the expected messages + expected_msgs = [tc.topic_values[i][1] for i in expected_indices] + received_msgs = [r[0] for r in received] + for expected in expected_msgs: + assert expected in received_msgs diff --git a/dimos/protocol/pubsub/test_patterns.py b/dimos/protocol/pubsub/test_patterns.py new file mode 100644 index 0000000000..6d0ce35016 --- /dev/null +++ b/dimos/protocol/pubsub/test_patterns.py @@ -0,0 +1,132 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for pattern matching utilities.""" + +import re + +from dimos.protocol.pubsub.patterns import Glob, pattern_matches + + +class TestPatternMatchesString: + """Tests for exact string matching.""" + + def test_exact_match(self) -> None: + assert pattern_matches("/sensor/temp", "/sensor/temp") is True + + def test_no_match(self) -> None: + assert pattern_matches("/sensor/temp", "/sensor/humidity") is False + + def test_empty_string(self) -> None: + assert pattern_matches("", "") is True + assert pattern_matches("", "/sensor") is False + + def test_partial_match_fails(self) -> None: + assert pattern_matches("/sensor", "/sensor/temp") is False + assert pattern_matches("/sensor/temp", "/sensor") is False + + +class TestPatternMatchesGlob: + """Tests for Glob pattern matching.""" + + def test_single_wildcard(self) -> None: + glob = Glob("/sensor/*") + assert pattern_matches(glob, "/sensor/temp") is True + assert pattern_matches(glob, "/sensor/humidity") is True + assert pattern_matches(glob, "/sensor/") is True + + def test_single_wildcard_no_slash(self) -> None: + glob = Glob("/sensor/*") + assert pattern_matches(glob, "/sensor/nested/path") is False + + def test_double_wildcard(self) -> None: + glob = Glob("/robot/**") + assert pattern_matches(glob, "/robot/arm") is True + assert pattern_matches(glob, "/robot/arm/joint1") is True + assert pattern_matches(glob, "/robot/leg/motor/encoder") is True + + def test_question_mark(self) -> None: + glob = Glob("/sensor/?") + assert pattern_matches(glob, "/sensor/a") is True + assert pattern_matches(glob, "/sensor/1") is True + assert pattern_matches(glob, "/sensor/ab") is False + + def test_mixed_patterns(self) -> None: + glob = Glob("/robot/*/joint?") + assert pattern_matches(glob, "/robot/arm/joint1") is True + assert pattern_matches(glob, "/robot/leg/joint2") is True + assert pattern_matches(glob, "/robot/arm/joint12") is False + assert pattern_matches(glob, "/robot/arm/nested/joint1") is False + + def test_no_wildcards(self) -> None: + glob = Glob("/exact/path") + assert pattern_matches(glob, "/exact/path") is True + assert pattern_matches(glob, "/exact/other") is False + + def test_double_wildcard_middle(self) -> None: + glob = Glob("/start/**/end") + # Note: ** becomes .* so /start/**/end requires a / before end + assert pattern_matches(glob, "/start//end") is True + assert pattern_matches(glob, "/start/middle/end") is True + assert pattern_matches(glob, "/start/a/b/c/end") is True + + +class TestPatternMatchesRegex: + """Tests for compiled regex pattern matching.""" + + def test_simple_regex(self) -> None: + pattern = re.compile(r"/sensor/\w+") + assert pattern_matches(pattern, "/sensor/temp") is True + assert pattern_matches(pattern, "/sensor/123") is True + + def test_regex_anchored(self) -> None: + pattern = re.compile(r"/sensor/temp") + assert pattern_matches(pattern, "/sensor/temp") is True + assert pattern_matches(pattern, "/sensor/temperature") is False + + def test_regex_groups(self) -> None: + pattern = re.compile(r"/robot/(arm|leg)/joint(\d+)") + assert pattern_matches(pattern, "/robot/arm/joint1") is True + assert pattern_matches(pattern, "/robot/leg/joint42") is True + assert pattern_matches(pattern, "/robot/head/joint1") is False + + def test_regex_optional(self) -> None: + pattern = re.compile(r"/sensor/temp/?") + assert pattern_matches(pattern, "/sensor/temp") is True + assert pattern_matches(pattern, "/sensor/temp/") is True + + +class TestGlobClass: + """Tests for the Glob class itself.""" + + def test_pattern_property(self) -> None: + glob = Glob("/sensor/*") + assert glob.pattern == "/sensor/[^/]*" + + def test_glob_property(self) -> None: + glob = Glob("/sensor/*") + assert glob.glob == "/sensor/*" + + def test_repr(self) -> None: + glob = Glob("/sensor/*") + assert repr(glob) == "Glob('/sensor/*')" + + def test_double_star_regex(self) -> None: + glob = Glob("/robot/**") + assert glob.pattern == "/robot/.*" + + def test_special_chars_escaped(self) -> None: + glob = Glob("/path.with.dots") + assert pattern_matches(glob, "/path.with.dots") is True + assert pattern_matches(glob, "/pathXwithXdots") is False diff --git a/dimos/protocol/pubsub/test_spec.py b/dimos/protocol/pubsub/test_spec.py index 357e1dfa1e..0bdfa62628 100644 --- a/dimos/protocol/pubsub/test_spec.py +++ b/dimos/protocol/pubsub/test_spec.py @@ -23,8 +23,8 @@ import pytest from dimos.msgs.geometry_msgs import Vector3 -from dimos.protocol.pubsub.lcmpubsub import LCM, Topic -from dimos.protocol.pubsub.memory import Memory +from dimos.protocol.pubsub.impl.lcmpubsub import LCM, Topic +from dimos.protocol.pubsub.impl.memory import Memory @contextmanager @@ -44,7 +44,7 @@ def memory_context() -> Generator[Memory, None, None]: ] try: - from dimos.protocol.pubsub.redispubsub import Redis + from dimos.protocol.pubsub.impl.redispubsub import Redis @contextmanager def redis_context() -> Generator[Redis, None, None]: @@ -70,7 +70,7 @@ def redis_context() -> Generator[Redis, None, None]: QoSReliabilityPolicy, ) - from dimos.protocol.pubsub.rospubsub import RawROS, RawROSTopic + from dimos.protocol.pubsub.impl.rospubsub import RawROS, RawROSTopic # Use RELIABLE QoS with larger depth for testing _test_qos = QoSProfile( @@ -124,7 +124,7 @@ def lcm_context() -> Generator[LCM, None, None]: ) -from dimos.protocol.pubsub.shmpubsub import PickleSharedMemory +from dimos.protocol.pubsub.impl.shmpubsub import PickleSharedMemory @contextmanager diff --git a/dimos/protocol/rpc/pubsubrpc.py b/dimos/protocol/rpc/pubsubrpc.py index 05df80aec0..3b77227218 100644 --- a/dimos/protocol/rpc/pubsubrpc.py +++ b/dimos/protocol/rpc/pubsubrpc.py @@ -28,8 +28,8 @@ ) from dimos.constants import LCM_MAX_CHANNEL_NAME_LENGTH -from dimos.protocol.pubsub.lcmpubsub import PickleLCM, Topic -from dimos.protocol.pubsub.shmpubsub import PickleSharedMemory +from dimos.protocol.pubsub.impl.lcmpubsub import PickleLCM, Topic +from dimos.protocol.pubsub.impl.shmpubsub import PickleSharedMemory from dimos.protocol.pubsub.spec import PubSub from dimos.protocol.rpc.rpc_utils import deserialize_exception, serialize_exception from dimos.protocol.rpc.spec import Args, RPCSpec @@ -81,7 +81,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: def __getstate__(self) -> dict[str, Any]: state: dict[str, Any] if hasattr(super(), "__getstate__"): - state = super().__getstate__() # type: ignore[assignment] + state = super().__getstate__() # type: ignore[assignment, misc] else: state = self.__dict__.copy() diff --git a/dimos/protocol/rpc/redisrpc.py b/dimos/protocol/rpc/redisrpc.py index aa8a5b87c5..32c3794bf4 100644 --- a/dimos/protocol/rpc/redisrpc.py +++ b/dimos/protocol/rpc/redisrpc.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dimos.protocol.pubsub.redispubsub import Redis +from dimos.protocol.pubsub.impl.redispubsub import Redis from dimos.protocol.rpc.pubsubrpc import PubSubRPCMixin diff --git a/dimos/protocol/rpc/spec.py b/dimos/protocol/rpc/spec.py index 1c502abe24..47ad77e825 100644 --- a/dimos/protocol/rpc/spec.py +++ b/dimos/protocol/rpc/spec.py @@ -41,6 +41,8 @@ def call(self, name: str, arguments: Args, cb: Callable[[Any], None]) -> Callabl def call(self, name: str, arguments: Args, cb: Callable | None) -> Callable[[], Any] | None: ... # type: ignore[type-arg] + def call_nowait(self, name: str, arguments: Args) -> None: ... + # we expect to crash if we don't get a return value after 10 seconds # but callers can override this timeout for extra long functions def call_sync( diff --git a/dimos/protocol/service/__init__.py b/dimos/protocol/service/__init__.py index 4726ad5f83..fb9df08ca9 100644 --- a/dimos/protocol/service/__init__.py +++ b/dimos/protocol/service/__init__.py @@ -1,2 +1,8 @@ from dimos.protocol.service.lcmservice import LCMService -from dimos.protocol.service.spec import Configurable, Service +from dimos.protocol.service.spec import Configurable as Configurable, Service as Service + +__all__ = [ + "Configurable", + "LCMService", + "Service", +] diff --git a/dimos/protocol/service/ddsservice.py b/dimos/protocol/service/ddsservice.py new file mode 100644 index 0000000000..6ed04c07ad --- /dev/null +++ b/dimos/protocol/service/ddsservice.py @@ -0,0 +1,80 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +import threading +from typing import TYPE_CHECKING, Any + +try: + from cyclonedds.domain import DomainParticipant + + DDS_AVAILABLE = True +except ImportError: + DDS_AVAILABLE = False + DomainParticipant = None # type: ignore[assignment, misc] + +from dimos.protocol.service.spec import Service +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from cyclonedds.qos import Qos + +logger = setup_logger() + +_participants: dict[int, DomainParticipant] = {} +_participants_lock = threading.Lock() + + +@dataclass +class DDSConfig: + """Configuration for DDS service.""" + + domain_id: int = 0 + qos: Qos | None = None + + +class DDSService(Service[DDSConfig]): + default_config = DDSConfig + + def __init__(self, **kwargs: Any) -> None: + super().__init__(**kwargs) + + def start(self) -> None: + """Start the DDS service.""" + domain_id = self.config.domain_id + with _participants_lock: + if domain_id not in _participants: + _participants[domain_id] = DomainParticipant(domain_id) + logger.info(f"DDS service started with Cyclone DDS domain {domain_id}") + super().start() + + def stop(self) -> None: + """Stop the DDS service.""" + super().stop() + + @property + def participant(self) -> DomainParticipant: + """Get the DomainParticipant instance for this service's domain.""" + domain_id = self.config.domain_id + if domain_id not in _participants: + raise RuntimeError(f"DomainParticipant not initialized for domain {domain_id}") + return _participants[domain_id] + + +__all__ = [ + "DDSConfig", + "DDSService", +] diff --git a/dimos/protocol/service/lcmservice.py b/dimos/protocol/service/lcmservice.py index cf0a0647d8..4655780fb3 100644 --- a/dimos/protocol/service/lcmservice.py +++ b/dimos/protocol/service/lcmservice.py @@ -20,7 +20,6 @@ import platform import threading import traceback -from typing import Protocol, runtime_checkable import lcm @@ -79,34 +78,12 @@ def __post_init__(self) -> None: self.url = _DEFAULT_LCM_URL -@runtime_checkable -class LCMMsg(Protocol): - msg_name: str - - @classmethod - def lcm_decode(cls, data: bytes) -> LCMMsg: - """Decode bytes into an LCM message instance.""" - ... - - def lcm_encode(self) -> bytes: - """Encode this message instance into bytes.""" - ... - - -@dataclass -class Topic: - topic: str = "" - lcm_type: type[LCMMsg] | None = None - - def __str__(self) -> str: - if self.lcm_type is None: - return self.topic - return f"{self.topic}#{self.lcm_type.msg_name}" - - _LCM_LOOP_TIMEOUT = 50 +# this class just sets up cpp LCM instance +# and runs its handle loop in a thread +# higher order stuff is done by pubsub/impl/lcmpubsub.py class LCMService(Service[LCMConfig]): default_config = LCMConfig l: lcm.LCM | None diff --git a/dimos/protocol/service/test_lcmservice.py b/dimos/protocol/service/test_lcmservice.py index a85462cf31..4231302426 100644 --- a/dimos/protocol/service/test_lcmservice.py +++ b/dimos/protocol/service/test_lcmservice.py @@ -12,19 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -import pickle import threading import time from unittest.mock import MagicMock, patch -import pytest - +from dimos.protocol.pubsub.impl.lcmpubsub import Topic from dimos.protocol.service.lcmservice import ( _DEFAULT_LCM_URL, LCMConfig, LCMService, - Topic, autoconf, ) from dimos.protocol.service.system_configurator import ( diff --git a/dimos/protocol/service/test_system_configurator.py b/dimos/protocol/service/test_system_configurator.py index 22bb662044..07f8ede64c 100644 --- a/dimos/protocol/service/test_system_configurator.py +++ b/dimos/protocol/service/test_system_configurator.py @@ -14,7 +14,6 @@ import os import resource -import subprocess from unittest.mock import MagicMock, patch import pytest diff --git a/dimos/protocol/skill/__init__.py b/dimos/protocol/skill/__init__.py deleted file mode 100644 index 15ebf0b59c..0000000000 --- a/dimos/protocol/skill/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from dimos.protocol.skill.skill import SkillContainer, skill diff --git a/dimos/protocol/skill/comms.py b/dimos/protocol/skill/comms.py deleted file mode 100644 index 0720140b79..0000000000 --- a/dimos/protocol/skill/comms.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import annotations - -from abc import abstractmethod -from dataclasses import dataclass -from typing import TYPE_CHECKING, Generic, TypeVar - -from dimos.protocol.pubsub.lcmpubsub import PickleLCM -from dimos.protocol.service import Service # type: ignore[attr-defined] -from dimos.protocol.skill.type import SkillMsg - -if TYPE_CHECKING: - from collections.abc import Callable - - from dimos.protocol.pubsub.spec import PubSub - -# defines a protocol for communication between skills and agents -# it has simple requirements of pub/sub semantics capable of sending and receiving SkillMsg objects - - -class SkillCommsSpec: - @abstractmethod - def publish(self, msg: SkillMsg) -> None: ... # type: ignore[type-arg] - - @abstractmethod - def subscribe(self, cb: Callable[[SkillMsg], None]) -> None: ... # type: ignore[type-arg] - - @abstractmethod - def start(self) -> None: ... - - @abstractmethod - def stop(self) -> None: ... - - -MsgT = TypeVar("MsgT") -TopicT = TypeVar("TopicT") - - -@dataclass -class PubSubCommsConfig(Generic[TopicT, MsgT]): - topic: TopicT | None = None - pubsub: type[PubSub[TopicT, MsgT]] | PubSub[TopicT, MsgT] | None = None - autostart: bool = True - - -# implementation of the SkillComms using any standard PubSub mechanism -class PubSubComms(Service[PubSubCommsConfig], SkillCommsSpec): # type: ignore[type-arg] - default_config: type[PubSubCommsConfig] = PubSubCommsConfig # type: ignore[type-arg] - - def __init__(self, **kwargs) -> None: # type: ignore[no-untyped-def] - super().__init__(**kwargs) - pubsub_config = getattr(self.config, "pubsub", None) - if pubsub_config is not None: - if callable(pubsub_config): - self.pubsub = pubsub_config() - else: - self.pubsub = pubsub_config - else: - raise ValueError("PubSub configuration is missing") - - if getattr(self.config, "autostart", True): - self.start() - - def start(self) -> None: - self.pubsub.start() - - def stop(self) -> None: - self.pubsub.stop() - - def publish(self, msg: SkillMsg) -> None: # type: ignore[type-arg] - self.pubsub.publish(self.config.topic, msg) - - def subscribe(self, cb: Callable[[SkillMsg], None]) -> None: # type: ignore[type-arg] - self.pubsub.subscribe(self.config.topic, lambda msg, topic: cb(msg)) - - -@dataclass -class LCMCommsConfig(PubSubCommsConfig[str, SkillMsg]): # type: ignore[type-arg] - topic: str = "/skill" - pubsub: type[PubSub] | PubSub | None = PickleLCM # type: ignore[type-arg] - # lcm needs to be started only if receiving - # skill comms are broadcast only in modules so we don't autostart - autostart: bool = False - - -class LCMSkillComms(PubSubComms): - default_config: type[LCMCommsConfig] = LCMCommsConfig diff --git a/dimos/protocol/skill/coordinator.py b/dimos/protocol/skill/coordinator.py deleted file mode 100644 index 95fc8844d4..0000000000 --- a/dimos/protocol/skill/coordinator.py +++ /dev/null @@ -1,637 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import asyncio -from copy import copy -from dataclasses import dataclass -from enum import Enum -import json -import time -from typing import Any, Literal - -from langchain_core.messages import ToolMessage -from langchain_core.tools import tool as langchain_tool -from rich.console import Console -from rich.table import Table -from rich.text import Text - -from dimos.core import rpc -from dimos.core.module import Module, ModuleConfig -from dimos.protocol.skill.comms import LCMSkillComms, SkillCommsSpec -from dimos.protocol.skill.skill import SkillConfig, SkillContainer # type: ignore[attr-defined] -from dimos.protocol.skill.type import MsgType, Output, Reducer, Return, SkillMsg, Stream -from dimos.protocol.skill.utils import interpret_tool_call_args -from dimos.utils.logging_config import setup_logger - -logger = setup_logger() - - -@dataclass -class SkillCoordinatorConfig(ModuleConfig): - skill_transport: type[SkillCommsSpec] = LCMSkillComms - - -class SkillStateEnum(Enum): - pending = 0 - running = 1 - completed = 2 - error = 3 - - def colored_name(self) -> Text: - """Return the state name as a rich Text object with color.""" - colors = { - SkillStateEnum.pending: "yellow", - SkillStateEnum.running: "blue", - SkillStateEnum.completed: "green", - SkillStateEnum.error: "red", - } - return Text(self.name, style=colors.get(self, "white")) - - -# This object maintains the state of a skill run on a caller end -class SkillState: - call_id: str - name: str - state: SkillStateEnum - skill_config: SkillConfig - - msg_count: int = 0 - sent_tool_msg: bool = False - - start_msg: SkillMsg[Literal[MsgType.start]] = None # type: ignore[assignment] - end_msg: SkillMsg[Literal[MsgType.ret]] = None # type: ignore[assignment] - error_msg: SkillMsg[Literal[MsgType.error]] = None # type: ignore[assignment] - ret_msg: SkillMsg[Literal[MsgType.ret]] = None # type: ignore[assignment] - reduced_stream_msg: list[SkillMsg[Literal[MsgType.reduced_stream]]] = None # type: ignore[assignment] - - def __init__(self, call_id: str, name: str, skill_config: SkillConfig | None = None) -> None: - super().__init__() - - self.skill_config = skill_config or SkillConfig( - name=name, - stream=Stream.none, - ret=Return.none, - reducer=Reducer.all, - output=Output.standard, - schema={}, - ) - - self.state = SkillStateEnum.pending - self.call_id = call_id - self.name = name - - def duration(self) -> float: - """Calculate the duration of the skill run.""" - if self.start_msg and self.end_msg: - return self.end_msg.ts - self.start_msg.ts - elif self.start_msg: - return time.time() - self.start_msg.ts - else: - return 0.0 - - def content(self) -> dict[str, Any] | str | int | float | None: # type: ignore[return] - if self.state == SkillStateEnum.running: - if self.reduced_stream_msg: - return self.reduced_stream_msg.content # type: ignore[attr-defined, no-any-return] - - if self.state == SkillStateEnum.completed: - if self.reduced_stream_msg: # are we a streaming skill? - return self.reduced_stream_msg.content # type: ignore[attr-defined, no-any-return] - return self.ret_msg.content # type: ignore[return-value] - - if self.state == SkillStateEnum.error: - print("Error msg:", self.error_msg.content) - if self.reduced_stream_msg: - (self.reduced_stream_msg.content + "\n" + self.error_msg.content) # type: ignore[attr-defined] - else: - return self.error_msg.content # type: ignore[return-value] - - def agent_encode(self) -> ToolMessage | str: - # tool call can emit a single ToolMessage - # subsequent messages are considered SituationalAwarenessMessages, - # those are collapsed into a HumanMessage, that's artificially prepended to history - - if not self.sent_tool_msg: - self.sent_tool_msg = True - return ToolMessage( - self.content() or "Querying, please wait, you will receive a response soon.", # type: ignore[arg-type] - name=self.name, - tool_call_id=self.call_id, - ) - else: - return json.dumps( - { - "name": self.name, - "call_id": self.call_id, - "state": self.state.name, - "data": self.content(), - "ran_for": self.duration(), - } - ) - - # returns True if the agent should be called for this message - def handle_msg(self, msg: SkillMsg) -> bool: # type: ignore[type-arg] - self.msg_count += 1 - if msg.type == MsgType.stream: - self.state = SkillStateEnum.running - self.reduced_stream_msg = self.skill_config.reducer(self.reduced_stream_msg, msg) # type: ignore[arg-type, assignment] - - if ( - self.skill_config.stream == Stream.none - or self.skill_config.stream == Stream.passive - ): - return False - - if self.skill_config.stream == Stream.call_agent: - return True - - if msg.type == MsgType.ret: - self.state = SkillStateEnum.completed - self.ret_msg = msg - if self.skill_config.ret == Return.call_agent: - return True - return False - - if msg.type == MsgType.error: - self.state = SkillStateEnum.error - self.error_msg = msg - return True - - if msg.type == MsgType.start: - self.state = SkillStateEnum.running - self.start_msg = msg - return False - - return False - - def __len__(self) -> int: - return self.msg_count - - def __str__(self) -> str: - # For standard string representation, we'll use rich's Console to render the colored text - console = Console(force_terminal=True, legacy_windows=False) - colored_state = self.state.colored_name() - - # Build the parts of the string - parts = [Text(f"SkillState({self.name} "), colored_state, Text(f", call_id={self.call_id}")] - - if self.state == SkillStateEnum.completed or self.state == SkillStateEnum.error: - parts.append(Text(", ran for=")) - else: - parts.append(Text(", running for=")) - - parts.append(Text(f"{self.duration():.2f}s")) - - if len(self): - parts.append(Text(f", msg_count={self.msg_count})")) - else: - parts.append(Text(", No Messages)")) - - # Combine all parts into a single Text object - combined = Text() - for part in parts: - combined.append(part) - - # Render to string with console - with console.capture() as capture: - console.print(combined, end="") - return capture.get() - - -# subclassed the dict just to have a better string representation -class SkillStateDict(dict[str, SkillState]): - """Custom dict for skill states with better string representation.""" - - def table(self) -> Table: - # Add skill states section - states_table = Table(show_header=True) - states_table.add_column("Call ID", style="dim", width=12) - states_table.add_column("Skill", style="white") - states_table.add_column("State", style="white") - states_table.add_column("Duration", style="yellow") - states_table.add_column("Messages", style="dim") - - for call_id, skill_state in self.items(): - # Get colored state name - state_text = skill_state.state.colored_name() - - # Duration formatting - if ( - skill_state.state == SkillStateEnum.completed - or skill_state.state == SkillStateEnum.error - ): - duration = f"{skill_state.duration():.2f}s" - else: - duration = f"{skill_state.duration():.2f}s..." - - # Messages info - msg_count = str(len(skill_state)) - - states_table.add_row( - call_id[:8] + "...", skill_state.name, state_text, duration, msg_count - ) - - if not self: - states_table.add_row("", "[dim]No active skills[/dim]", "", "", "") - return states_table - - def __str__(self) -> str: - console = Console(force_terminal=True, legacy_windows=False) - - # Render to string with title above - with console.capture() as capture: - console.print(Text(" SkillState", style="bold blue")) - console.print(self.table()) - return capture.get().strip() - - -# This class is responsible for managing the lifecycle of skills, -# handling skill calls, and coordinating communication between the agent and skills. -# -# It aggregates skills from static and dynamic containers, manages skill states, -# and decides when to notify the agent about updates. -class SkillCoordinator(Module): - default_config = SkillCoordinatorConfig - empty: bool = True - - _static_containers: list[SkillContainer] - _dynamic_containers: list[SkillContainer] - _skill_state: SkillStateDict # key is call_id, not skill_name - _skills: dict[str, SkillConfig] - _updates_available: asyncio.Event | None - _agent_loop: asyncio.AbstractEventLoop | None - - def __init__(self, **kwargs: Any) -> None: - super().__init__(**kwargs) - self._static_containers = [] - self._dynamic_containers = [] - self._skills = {} - self._skill_state = SkillStateDict() - - # Defer event creation until we're in the correct loop context - self._updates_available = None - self._agent_loop = None - self._pending_notifications = 0 # Count pending notifications - self._closed_coord = False - self._transport_unsub_fn = None - - def _ensure_updates_available(self) -> asyncio.Event: - """Lazily create the updates available event in the correct loop context.""" - if self._updates_available is None: - # Create the event in the current running loop, not the stored loop - try: - loop = asyncio.get_running_loop() - # print(f"[DEBUG] Creating _updates_available event in current loop {id(loop)}") - # Always use the current running loop for the event - # This ensures the event is created in the context where it will be used - self._updates_available = asyncio.Event() - # Store the loop where the event was created - this is the agent's loop - self._agent_loop = loop - # print( - # f"[DEBUG] Created _updates_available event {id(self._updates_available)} in agent loop {id(loop)}" - # ) - except RuntimeError: - # No running loop, defer event creation until we have the proper context - # print(f"[DEBUG] No running loop, deferring event creation") - # Don't create the event yet - wait for the proper loop context - pass - else: - ... - # print(f"[DEBUG] Reusing _updates_available event {id(self._updates_available)}") - return self._updates_available # type: ignore[return-value] - - @rpc - def start(self) -> None: - super().start() - self.skill_transport.start() - self._transport_unsub_fn = self.skill_transport.subscribe(self.handle_message) - - @rpc - def stop(self) -> None: - self._close_module() - self._closed_coord = True - self.skill_transport.stop() - if self._transport_unsub_fn: - self._transport_unsub_fn() - - # Stop all registered skill containers - for container in self._static_containers: - container.stop() - for container in self._dynamic_containers: - container.stop() - - super().stop() - - def len(self) -> int: - return len(self._skills) - - def __len__(self) -> int: - return self.len() - - # this can be converted to non-langchain json schema output - # and langchain takes this output as well - # just faster for now - def get_tools(self) -> list[dict]: # type: ignore[type-arg] - return [ - langchain_tool(skill_config.f) # type: ignore[arg-type, misc] - for skill_config in self.skills().values() - if not skill_config.hide_skill - ] - - # internal skill call - def call_skill( - self, call_id: str | Literal[False], skill_name: str, args: dict[str, Any] - ) -> None: - if not call_id: - call_id = str(time.time()) - skill_config = self.get_skill_config(skill_name) - if not skill_config: - logger.error( - f"Skill {skill_name} not found in registered skills, but agent tried to call it (did a dynamic skill expire?)" - ) - return - - self._skill_state[call_id] = SkillState( - call_id=call_id, name=skill_name, skill_config=skill_config - ) - - # TODO agent often calls the skill again if previous response is still loading. - # maybe create a new skill_state linked to a previous one? not sure - - arg_keywords = args.get("args") or {} - arg_list = [] - - if isinstance(arg_keywords, list): - arg_list = arg_keywords - arg_keywords = {} - - arg_list, arg_keywords = interpret_tool_call_args(args) - - return skill_config.call( # type: ignore[no-any-return] - call_id, - *arg_list, - **arg_keywords, - ) - - # Receives a message from active skill - # Updates local skill state (appends to streamed data if needed etc) - # - # Checks if agent needs to be notified (if ToolConfig has Return=call_agent or Stream=call_agent) - def handle_message(self, msg: SkillMsg) -> None: # type: ignore[type-arg] - if self._closed_coord: - import traceback - - traceback.print_stack() - return - # logger.info(f"SkillMsg from {msg.skill_name}, {msg.call_id} - {msg}") - - if self._skill_state.get(msg.call_id) is None: - logger.warn( - f"Skill state for {msg.skill_name} (call_id={msg.call_id}) not found, (skill not called by our agent?) initializing. (message received: {msg})" - ) - self._skill_state[msg.call_id] = SkillState(call_id=msg.call_id, name=msg.skill_name) - - should_notify = self._skill_state[msg.call_id].handle_msg(msg) - - if should_notify: - updates_available = self._ensure_updates_available() - if updates_available is None: - print("[DEBUG] Event not created yet, deferring notification") - return - - try: - current_loop = asyncio.get_running_loop() - agent_loop = getattr(self, "_agent_loop", self._loop) - # print( - # f"[DEBUG] handle_message: current_loop={id(current_loop)}, agent_loop={id(agent_loop) if agent_loop else 'None'}, event={id(updates_available)}" - # ) - if agent_loop and agent_loop != current_loop: - # print( - # f"[DEBUG] Calling set() via call_soon_threadsafe from loop {id(current_loop)} to agent loop {id(agent_loop)}" - # ) - agent_loop.call_soon_threadsafe(updates_available.set) - else: - # print(f"[DEBUG] Calling set() directly in current loop {id(current_loop)}") - updates_available.set() - except RuntimeError: - # No running loop, use call_soon_threadsafe if we have an agent loop - agent_loop = getattr(self, "_agent_loop", self._loop) - # print( - # f"[DEBUG] No current running loop, agent_loop={id(agent_loop) if agent_loop else 'None'}" - # ) - if agent_loop: - # print( - # f"[DEBUG] Calling set() via call_soon_threadsafe to agent loop {id(agent_loop)}" - # ) - agent_loop.call_soon_threadsafe(updates_available.set) - else: - # print(f"[DEBUG] Event creation was deferred, can't notify") - pass - - def has_active_skills(self) -> bool: - if not self.has_passive_skills(): - return False - for skill_run in self._skill_state.values(): - # check if this skill will notify agent - if skill_run.skill_config.ret == Return.call_agent: - return True - if skill_run.skill_config.stream == Stream.call_agent: - return True - return False - - def has_passive_skills(self) -> bool: - # check if dict is empty - if self._skill_state == {}: - return False - return True - - async def wait_for_updates(self, timeout: float | None = None) -> True: # type: ignore[valid-type] - """Wait for skill updates to become available. - - This method should be called by the agent when it's ready to receive updates. - It will block until updates are available or timeout is reached. - - Args: - timeout: Optional timeout in seconds - - Returns: - True if updates are available, False on timeout - """ - updates_available = self._ensure_updates_available() - if updates_available is None: - # Force event creation now that we're in the agent's loop context - # print(f"[DEBUG] wait_for_updates: Creating event in current loop context") - current_loop = asyncio.get_running_loop() - self._updates_available = asyncio.Event() - self._agent_loop = current_loop - updates_available = self._updates_available - # print( - # f"[DEBUG] wait_for_updates: Created event {id(updates_available)} in loop {id(current_loop)}" - # ) - - try: - current_loop = asyncio.get_running_loop() - - # Double-check the loop context before waiting - if self._agent_loop != current_loop: - # print(f"[DEBUG] Loop context changed! Recreating event for loop {id(current_loop)}") - self._updates_available = asyncio.Event() - self._agent_loop = current_loop - updates_available = self._updates_available - - # print( - # f"[DEBUG] wait_for_updates: current_loop={id(current_loop)}, event={id(updates_available)}, is_set={updates_available.is_set()}" - # ) - if timeout: - # print(f"[DEBUG] Waiting for event with timeout {timeout}") - await asyncio.wait_for(updates_available.wait(), timeout=timeout) - else: - print("[DEBUG] Waiting for event without timeout") - await updates_available.wait() - print("[DEBUG] Event was set! Returning True") - return True - except asyncio.TimeoutError: - print("[DEBUG] Timeout occurred while waiting for event") - return False - except RuntimeError as e: - if "bound to a different event loop" in str(e): - print( - "[DEBUG] Event loop binding error detected, recreating event and returning False to retry" - ) - # Recreate the event in the current loop - current_loop = asyncio.get_running_loop() - self._updates_available = asyncio.Event() - self._agent_loop = current_loop - return False - else: - raise - - def generate_snapshot(self, clear: bool = True) -> SkillStateDict: - """Generate a fresh snapshot of completed skills and optionally clear them.""" - ret = copy(self._skill_state) - - if clear: - updates_available = self._ensure_updates_available() - if updates_available is not None: - # print(f"[DEBUG] generate_snapshot: clearing event {id(updates_available)}") - updates_available.clear() - else: - ... - # rint(f"[DEBUG] generate_snapshot: event not created yet, nothing to clear") - to_delete = [] - # Since snapshot is being sent to agent, we can clear the finished skill runs - for call_id, skill_run in self._skill_state.items(): - if skill_run.state == SkillStateEnum.completed: - logger.info(f"Skill {skill_run.name} (call_id={call_id}) finished") - to_delete.append(call_id) - if skill_run.state == SkillStateEnum.error: - error_msg = skill_run.error_msg.content.get("msg", "Unknown error") # type: ignore[union-attr] - error_traceback = skill_run.error_msg.content.get( # type: ignore[union-attr] - "traceback", "No traceback available" - ) - - logger.error( - f"Skill error for {skill_run.name} (call_id={call_id}): {error_msg}" - ) - print(error_traceback) - to_delete.append(call_id) - - elif ( - skill_run.state == SkillStateEnum.running - and skill_run.reduced_stream_msg is not None - ): - # preserve ret as a copy - ret[call_id] = copy(skill_run) - logger.debug( - f"Resetting accumulator for skill {skill_run.name} (call_id={call_id})" - ) - skill_run.reduced_stream_msg = None # type: ignore[assignment] - - for call_id in to_delete: - logger.debug(f"Call {call_id} finished, removing from state") - del self._skill_state[call_id] - - return ret - - def __str__(self) -> str: - console = Console(force_terminal=True, legacy_windows=False) - - # Create main table without any header - table = Table(show_header=False) - - # Add containers section - containers_table = Table(show_header=True, show_edge=False, box=None) - containers_table.add_column("Type", style="cyan") - containers_table.add_column("Container", style="white") - - # Add static containers - for container in self._static_containers: - containers_table.add_row("Static", str(container)) - - # Add dynamic containers - for container in self._dynamic_containers: - containers_table.add_row("Dynamic", str(container)) - - if not self._static_containers and not self._dynamic_containers: - containers_table.add_row("", "[dim]No containers registered[/dim]") - - # Add skill states section - states_table = self._skill_state.table() - states_table.show_edge = False - states_table.box = None - - # Combine into main table - table.add_column("Section", style="bold") - table.add_column("Details", style="none") - table.add_row("Containers", containers_table) - table.add_row("Skills", states_table) - - # Render to string with title above - with console.capture() as capture: - console.print(Text(" SkillCoordinator", style="bold blue")) - console.print(table) - return capture.get().strip() - - # Given skillcontainers can run remotely, we are - # Caching available skills from static containers - # - # Dynamic containers will be queried at runtime via - # .skills() method - def register_skills(self, container: SkillContainer) -> None: - self.empty = False - if not container.dynamic_skills(): - logger.info(f"Registering static skill container, {container}") - self._static_containers.append(container) - for name, skill_config in container.skills().items(): - self._skills[name] = skill_config.bind(getattr(container, name)) - else: - logger.info(f"Registering dynamic skill container, {container}") - self._dynamic_containers.append(container) - - def get_skill_config(self, skill_name: str) -> SkillConfig | None: - skill_config = self._skills.get(skill_name) - if not skill_config: - skill_config = self.skills().get(skill_name) - return skill_config - - def skills(self) -> dict[str, SkillConfig]: - # Static container skilling is already cached - all_skills: dict[str, SkillConfig] = {**self._skills} - - # Then aggregate skills from dynamic containers - for container in self._dynamic_containers: - for skill_name, skill_config in container.skills().items(): - all_skills[skill_name] = skill_config.bind(getattr(container, skill_name)) - - return all_skills diff --git a/dimos/protocol/skill/schema.py b/dimos/protocol/skill/schema.py deleted file mode 100644 index 3b265f9c1b..0000000000 --- a/dimos/protocol/skill/schema.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import inspect -from typing import Union, get_args, get_origin - - -def python_type_to_json_schema(python_type) -> dict: # type: ignore[no-untyped-def, type-arg] - """Convert Python type annotations to JSON Schema format.""" - # Handle None/NoneType - if python_type is type(None) or python_type is None: - return {"type": "null"} - - # Handle Union types (including Optional) - origin = get_origin(python_type) - if origin is Union: - args = get_args(python_type) - # Handle Optional[T] which is Union[T, None] - if len(args) == 2 and type(None) in args: - non_none_type = args[0] if args[1] is type(None) else args[1] - schema = python_type_to_json_schema(non_none_type) - # For OpenAI function calling, we don't use anyOf for optional params - return schema - else: - # For other Union types, use anyOf - return {"anyOf": [python_type_to_json_schema(arg) for arg in args]} - - # Handle List/list types - if origin in (list, list): - args = get_args(python_type) - if args: - return {"type": "array", "items": python_type_to_json_schema(args[0])} - return {"type": "array"} - - # Handle Dict/dict types - if origin in (dict, dict): - return {"type": "object"} - - # Handle basic types - type_map = { - str: {"type": "string"}, - int: {"type": "integer"}, - float: {"type": "number"}, - bool: {"type": "boolean"}, - list: {"type": "array"}, - dict: {"type": "object"}, - } - - return type_map.get(python_type, {"type": "string"}) - - -def function_to_schema(func) -> dict: # type: ignore[no-untyped-def, type-arg] - """Convert a function to OpenAI function schema format.""" - try: - signature = inspect.signature(func) - except ValueError as e: - raise ValueError(f"Failed to get signature for function {func.__name__}: {e!s}") - - properties = {} - required = [] - - for param_name, param in signature.parameters.items(): - # Skip 'self' parameter for methods - if param_name == "self": - continue - - # Get the type annotation - if param.annotation != inspect.Parameter.empty: - param_schema = python_type_to_json_schema(param.annotation) - else: - # Default to string if no type annotation - param_schema = {"type": "string"} - - # Add description from docstring if available (would need more sophisticated parsing) - properties[param_name] = param_schema - - # Add to required list if no default value - if param.default == inspect.Parameter.empty: - required.append(param_name) - - return { - "type": "function", - "function": { - "name": func.__name__, - "description": (func.__doc__ or "").strip(), - "parameters": { - "type": "object", - "properties": properties, - "required": required, - }, - }, - } diff --git a/dimos/protocol/skill/skill.py b/dimos/protocol/skill/skill.py deleted file mode 100644 index 373bb463a7..0000000000 --- a/dimos/protocol/skill/skill.py +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import asyncio -from collections.abc import Callable -from concurrent.futures import ThreadPoolExecutor -from dataclasses import dataclass -from typing import Any - -# from dimos.core.core import rpc -from dimos.protocol.skill.comms import LCMSkillComms, SkillCommsSpec -from dimos.protocol.skill.schema import function_to_schema -from dimos.protocol.skill.type import ( - MsgType, - Output, - Reducer, - Return, - SkillConfig, - SkillMsg, - Stream, -) - -# skill is a decorator that allows us to specify a skill behaviour for a function. -# -# there are several parameters that can be specified: -# - ret: how to return the value from the skill, can be one of: -# -# Return.none: doesn't return anything to an agent -# Return.passive: doesn't schedule an agent call but -# returns the value to the agent when agent is called -# Return.call_agent: calls the agent with the value, scheduling an agent call -# -# - stream: if the skill streams values, it can behave in several ways: -# -# Stream.none: no streaming, skill doesn't emit any values -# Stream.passive: doesn't schedule an agent call upon emitting a value, -# returns the streamed value to the agent when agent is called -# Stream.call_agent: calls the agent with every value emitted, scheduling an agent call -# -# - reducer: defines an optional strategy for passive streams and how we collapse potential -# multiple values into something meaningful for the agent -# -# Reducer.none: no reduction, every emitted value is returned to the agent -# Reducer.latest: only the latest value is returned to the agent -# Reducer.average: assumes the skill emits a number, -# the average of all values is returned to the agent - - -def rpc(fn: Callable[..., Any]) -> Callable[..., Any]: - fn.__rpc__ = True # type: ignore[attr-defined] - return fn - - -def skill( - reducer: Reducer = Reducer.latest, # type: ignore[assignment] - stream: Stream = Stream.none, - ret: Return = Return.call_agent, - output: Output = Output.standard, - hide_skill: bool = False, -) -> Callable: # type: ignore[type-arg] - def decorator(f: Callable[..., Any]) -> Any: - def wrapper(self, *args, **kwargs): # type: ignore[no-untyped-def] - skill = f"{f.__name__}" - - call_id = kwargs.get("call_id", None) - if call_id: - del kwargs["call_id"] - - return self.call_skill(call_id, skill, args, kwargs) - # def run_function(): - # return self.call_skill(call_id, skill, args, kwargs) - # - # thread = threading.Thread(target=run_function) - # thread.start() - # return None - - return f(self, *args, **kwargs) - - # sig = inspect.signature(f) - # params = list(sig.parameters.values()) - # if params and params[0].name == "self": - # params = params[1:] # Remove first parameter 'self' - # wrapper.__signature__ = sig.replace(parameters=params) - - skill_config = SkillConfig( - name=f.__name__, - reducer=reducer, # type: ignore[arg-type] - stream=stream, - # if stream is passive, ret must be passive too - ret=ret.passive if stream == Stream.passive else ret, - output=output, - schema=function_to_schema(f), - hide_skill=hide_skill, - ) - - wrapper.__rpc__ = True # type: ignore[attr-defined] - wrapper._skill_config = skill_config # type: ignore[attr-defined] - wrapper.__name__ = f.__name__ # Preserve original function name - wrapper.__doc__ = f.__doc__ # Preserve original docstring - return wrapper - - return decorator - - -@dataclass -class SkillContainerConfig: - skill_transport: type[SkillCommsSpec] = LCMSkillComms - - -def threaded(f: Callable[..., Any]) -> Callable[..., None]: - """Decorator to run a function in a thread pool.""" - - def wrapper(self, *args, **kwargs): # type: ignore[no-untyped-def] - if self._skill_thread_pool is None: - self._skill_thread_pool = ThreadPoolExecutor( - max_workers=50, thread_name_prefix="skill_worker" - ) - self._skill_thread_pool.submit(f, self, *args, **kwargs) - return None - - return wrapper - - -# Inherited by any class that wants to provide skills -# (This component works standalone but commonly used by DimOS modules) -# -# Hosts the function execution and handles correct publishing of skill messages -# according to the individual skill decorator configuration -# -# - It allows us to specify a communication layer for skills (LCM for now by default) -# - introspection of available skills via the `skills` RPC method -# - ability to provide dynamic context dependant skills with dynamic_skills flag -# for this you'll need to override the `skills` method to return a dynamic set of skills -# SkillCoordinator will call this method to get the skills available upon every request to -# the agent - - -class SkillContainer: - skill_transport_class: type[SkillCommsSpec] = LCMSkillComms - _skill_thread_pool: ThreadPoolExecutor | None = None - _skill_transport: SkillCommsSpec | None = None - - @rpc - def dynamic_skills(self) -> bool: - return False - - def __str__(self) -> str: - return f"SkillContainer({self.__class__.__name__})" - - @rpc - def stop(self) -> None: - if self._skill_transport: - self._skill_transport.stop() - self._skill_transport = None - - if self._skill_thread_pool: - self._skill_thread_pool.shutdown(wait=True) - self._skill_thread_pool = None - - # Continue the MRO chain if there's a parent stop() method - if hasattr(super(), "stop"): - super().stop() # type: ignore[misc] - - # TODO: figure out standard args/kwargs passing format, - # use same interface as skill coordinator call_skill method - @threaded - def call_skill( - self, call_id: str, skill_name: str, args: tuple[Any, ...], kwargs: dict[str, Any] - ) -> None: - f = getattr(self, skill_name, None) - - if f is None: - raise ValueError(f"Function '{skill_name}' not found in {self.__class__.__name__}") - - config = getattr(f, "_skill_config", None) - if config is None: - raise ValueError(f"Function '{skill_name}' in {self.__class__.__name__} is not a skill") - - # we notify the skill transport about the start of the skill call - self.skill_transport.publish(SkillMsg(call_id, skill_name, None, type=MsgType.start)) - - try: - val = f(*args, **kwargs) - - # check if the skill returned a coroutine, if it is, block until it resolves - if isinstance(val, asyncio.Future): - val = asyncio.run(val) # type: ignore[arg-type] - - # check if the skill is a generator, if it is, we need to iterate over it - if hasattr(val, "__iter__") and not isinstance(val, str): - last_value = None - for v in val: - last_value = v - self.skill_transport.publish( - SkillMsg(call_id, skill_name, v, type=MsgType.stream) - ) - self.skill_transport.publish( - SkillMsg(call_id, skill_name, last_value, type=MsgType.ret) - ) - - else: - self.skill_transport.publish(SkillMsg(call_id, skill_name, val, type=MsgType.ret)) - - except Exception as e: - import traceback - - formatted_traceback = "".join(traceback.TracebackException.from_exception(e).format()) - - self.skill_transport.publish( - SkillMsg( - call_id, - skill_name, - {"msg": str(e), "traceback": formatted_traceback}, - type=MsgType.error, - ) - ) - - @rpc - def skills(self) -> dict[str, SkillConfig]: - # Avoid recursion by excluding this property itself - # Also exclude known properties that shouldn't be accessed - excluded = {"skills", "tf", "rpc", "skill_transport"} - return { - name: getattr(self, name)._skill_config - for name in dir(self) - if not name.startswith("_") - and name not in excluded - and hasattr(getattr(self, name), "_skill_config") - } - - @property - def skill_transport(self) -> SkillCommsSpec: - if self._skill_transport is None: - self._skill_transport = self.skill_transport_class() - return self._skill_transport diff --git a/dimos/protocol/skill/test_coordinator.py b/dimos/protocol/skill/test_coordinator.py deleted file mode 100644 index bd00ea69c2..0000000000 --- a/dimos/protocol/skill/test_coordinator.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import asyncio -from collections.abc import Generator -import datetime -import time - -import pytest # type: ignore[import-not-found] - -from dimos.core import Module, rpc -from dimos.msgs.sensor_msgs import Image -from dimos.protocol.skill.coordinator import SkillCoordinator -from dimos.protocol.skill.skill import skill -from dimos.protocol.skill.type import Output, Reducer, Stream -from dimos.utils.data import get_data - - -class SkillContainerTest(Module): - @rpc - def start(self) -> None: - super().start() - - @rpc - def stop(self) -> None: - super().stop() - - @skill() - def add(self, x: int, y: int) -> int: - """adds x and y.""" - time.sleep(2) - return x + y - - @skill() - def delayadd(self, x: int, y: int) -> int: - """waits 0.3 seconds before adding x and y.""" - time.sleep(0.3) - return x + y - - @skill(stream=Stream.call_agent, reducer=Reducer.all) # type: ignore[arg-type] - def counter(self, count_to: int, delay: float | None = 0.05) -> Generator[int, None, None]: - """Counts from 1 to count_to, with an optional delay between counts.""" - for i in range(1, count_to + 1): - if delay is not None and delay > 0: - time.sleep(delay) - yield i - - @skill(stream=Stream.passive, reducer=Reducer.sum) # type: ignore[arg-type] - def counter_passive_sum( - self, count_to: int, delay: float | None = 0.05 - ) -> Generator[int, None, None]: - """Counts from 1 to count_to, with an optional delay between counts.""" - for i in range(1, count_to + 1): - if delay is not None and delay > 0: - time.sleep(delay) - yield i - - @skill(stream=Stream.passive, reducer=Reducer.latest) # type: ignore[arg-type] - def current_time(self, frequency: float | None = 10) -> Generator[str, None, None]: - """Provides current time.""" - while True: - yield str(datetime.datetime.now()) - if frequency is not None: - time.sleep(1 / frequency) - - @skill(stream=Stream.passive, reducer=Reducer.latest) # type: ignore[arg-type] - def uptime_seconds(self, frequency: float | None = 10) -> Generator[float, None, None]: - """Provides current uptime.""" - start_time = datetime.datetime.now() - while True: - yield (datetime.datetime.now() - start_time).total_seconds() - if frequency is not None: - time.sleep(1 / frequency) - - @skill() - def current_date(self, frequency: float | None = 10) -> str: - """Provides current date.""" - return str(datetime.datetime.now()) - - @skill(output=Output.image) - def take_photo(self) -> Image: - """Takes a camera photo""" - print("Taking photo...") - img = Image.from_file(str(get_data("cafe-smol.jpg"))) - print("Photo taken.") - return img - - -@pytest.mark.integration -@pytest.mark.asyncio # type: ignore[untyped-decorator] -async def test_coordinator_parallel_calls() -> None: - container = SkillContainerTest() - skillCoordinator = SkillCoordinator() - skillCoordinator.register_skills(container) - - skillCoordinator.start() - skillCoordinator.call_skill("test-call-0", "add", {"args": [0, 2]}) - - time.sleep(0.1) - - cnt = 0 - while await skillCoordinator.wait_for_updates(1): - print(skillCoordinator) - - skillstates = skillCoordinator.generate_snapshot() - - skill_id = f"test-call-{cnt}" - tool_msg = skillstates[skill_id].agent_encode() - assert tool_msg.content == cnt + 2 # type: ignore[union-attr] - - cnt += 1 - if cnt < 5: - skillCoordinator.call_skill( - f"test-call-{cnt}-delay", - "delayadd", - {"args": [cnt, 2]}, - ) - skillCoordinator.call_skill( - f"test-call-{cnt}", - "add", - {"args": [cnt, 2]}, - ) - - await asyncio.sleep(0.1 * cnt) - - container.stop() - skillCoordinator.stop() - - -@pytest.mark.integration -@pytest.mark.asyncio # type: ignore[untyped-decorator] -async def test_coordinator_generator() -> None: - container = SkillContainerTest() - skillCoordinator = SkillCoordinator() - skillCoordinator.register_skills(container) - skillCoordinator.start() - - # here we call a skill that generates a sequence of messages - skillCoordinator.call_skill("test-gen-0", "counter", {"args": [10]}) - skillCoordinator.call_skill("test-gen-1", "counter_passive_sum", {"args": [5]}) - skillCoordinator.call_skill("test-gen-2", "take_photo", {"args": []}) - - # periodically agent is stopping it's thinking cycle and asks for updates - while await skillCoordinator.wait_for_updates(2): - print(skillCoordinator) - agent_update = skillCoordinator.generate_snapshot(clear=True) - print(agent_update) - await asyncio.sleep(0.125) - - print("coordinator loop finished") - print(skillCoordinator) - container.stop() - skillCoordinator.stop() diff --git a/dimos/protocol/skill/test_utils.py b/dimos/protocol/skill/test_utils.py deleted file mode 100644 index d9fe9f6f91..0000000000 --- a/dimos/protocol/skill/test_utils.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dimos.protocol.skill.utils import interpret_tool_call_args - - -def test_list() -> None: - args, kwargs = interpret_tool_call_args([1, 2, 3]) - assert args == [1, 2, 3] - assert kwargs == {} - - -def test_none() -> None: - args, kwargs = interpret_tool_call_args(None) - assert args == [] - assert kwargs == {} - - -def test_none_nested() -> None: - args, kwargs = interpret_tool_call_args({"args": None}) - assert args == [] - assert kwargs == {} - - -def test_non_dict() -> None: - args, kwargs = interpret_tool_call_args("test") - assert args == ["test"] - assert kwargs == {} - - -def test_dict_with_args_and_kwargs() -> None: - args, kwargs = interpret_tool_call_args({"args": [1, 2], "kwargs": {"key": "value"}}) - assert args == [1, 2] - assert kwargs == {"key": "value"} - - -def test_dict_with_only_kwargs() -> None: - args, kwargs = interpret_tool_call_args({"kwargs": {"a": 1, "b": 2}}) - assert args == [] - assert kwargs == {"a": 1, "b": 2} - - -def test_dict_as_kwargs() -> None: - args, kwargs = interpret_tool_call_args({"x": 10, "y": 20}) - assert args == [] - assert kwargs == {"x": 10, "y": 20} - - -def test_dict_with_only_args_first_pass() -> None: - args, kwargs = interpret_tool_call_args({"args": [5, 6, 7]}) - assert args == [5, 6, 7] - assert kwargs == {} - - -def test_dict_with_only_args_nested() -> None: - args, kwargs = interpret_tool_call_args({"args": {"inner": "value"}}) - assert args == [] - assert kwargs == {"inner": "value"} - - -def test_empty_list() -> None: - args, kwargs = interpret_tool_call_args([]) - assert args == [] - assert kwargs == {} - - -def test_empty_dict() -> None: - args, kwargs = interpret_tool_call_args({}) - assert args == [] - assert kwargs == {} - - -def test_integer() -> None: - args, kwargs = interpret_tool_call_args(42) - assert args == [42] - assert kwargs == {} diff --git a/dimos/protocol/skill/type.py b/dimos/protocol/skill/type.py deleted file mode 100644 index 7881dcd94e..0000000000 --- a/dimos/protocol/skill/type.py +++ /dev/null @@ -1,272 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import annotations - -from collections.abc import Callable -from dataclasses import dataclass -from enum import Enum -import time -from typing import Any, Generic, Literal, TypeVar - -from dimos.types.timestamped import Timestamped -from dimos.utils.generic import truncate_display_string - -# This file defines protocol messages used for communication between skills and agents - - -class Output(Enum): - standard = 0 - human = 1 - image = 2 # this is same as separate_message, but maybe clearer for users - - -class Stream(Enum): - # no streaming - none = 0 - # passive stream, doesn't schedule an agent call, but returns the value to the agent - passive = 1 - # calls the agent with every value emitted, schedules an agent call - call_agent = 2 - - -class Return(Enum): - # doesn't return anything to an agent - none = 0 - # returns the value to the agent, but doesn't schedule an agent call - passive = 1 - # calls the agent with the value, scheduling an agent call - call_agent = 2 - # calls the function to get a value, when the agent is being called - callback = 3 # TODO: this is a work in progress, not implemented yet - - -@dataclass -class SkillConfig: - name: str - reducer: ReducerF - stream: Stream - ret: Return - output: Output - schema: dict[str, Any] - f: Callable | None = None # type: ignore[type-arg] - autostart: bool = False - hide_skill: bool = False - - def bind(self, f: Callable) -> SkillConfig: # type: ignore[type-arg] - self.f = f - return self - - def call(self, call_id, *args, **kwargs) -> Any: # type: ignore[no-untyped-def] - if self.f is None: - raise ValueError( - "Function is not bound to the SkillConfig. This should be called only within AgentListener." - ) - - return self.f(*args, **kwargs, call_id=call_id) - - def __str__(self) -> str: - parts = [f"name={self.name}"] - - # Only show reducer if stream is not none (streaming is happening) - if self.stream != Stream.none: - parts.append(f"stream={self.stream.name}") - - # Always show return mode - parts.append(f"ret={self.ret.name}") - return f"Skill({', '.join(parts)})" - - -class MsgType(Enum): - pending = 0 - start = 1 - stream = 2 - reduced_stream = 3 - ret = 4 - error = 5 - - -M = TypeVar("M", bound="MsgType") - - -def maybe_encode(something: Any) -> str: - if hasattr(something, "agent_encode"): - return something.agent_encode() # type: ignore[no-any-return] - return something # type: ignore[no-any-return] - - -class SkillMsg(Timestamped, Generic[M]): - ts: float - type: M - call_id: str - skill_name: str - content: str | int | float | dict | list # type: ignore[type-arg] - - def __init__( - self, - call_id: str, - skill_name: str, - content: Any, - type: M, - ) -> None: - self.ts = time.time() - self.call_id = call_id - self.skill_name = skill_name - # any tool output can be a custom type that knows how to encode itself - # like a costmap, path, transform etc could be translatable into strings - - self.content = maybe_encode(content) - self.type = type - - @property - def end(self) -> bool: - return self.type == MsgType.ret or self.type == MsgType.error - - @property - def start(self) -> bool: - return self.type == MsgType.start - - def __str__(self) -> str: # type: ignore[return] - time_ago = time.time() - self.ts - - if self.type == MsgType.start: - return f"Start({time_ago:.1f}s ago)" - if self.type == MsgType.ret: - return f"Ret({time_ago:.1f}s ago, val={truncate_display_string(self.content)})" - if self.type == MsgType.error: - return f"Error({time_ago:.1f}s ago, val={truncate_display_string(self.content)})" - if self.type == MsgType.pending: - return f"Pending({time_ago:.1f}s ago)" - if self.type == MsgType.stream: - return f"Stream({time_ago:.1f}s ago, val={truncate_display_string(self.content)})" - if self.type == MsgType.reduced_stream: - return f"Stream({time_ago:.1f}s ago, val={truncate_display_string(self.content)})" - - -# typing looks complex but it's a standard reducer function signature, using SkillMsgs -# (Optional[accumulator], msg) -> accumulator -ReducerF = Callable[ - [SkillMsg[Literal[MsgType.reduced_stream]] | None, SkillMsg[Literal[MsgType.stream]]], - SkillMsg[Literal[MsgType.reduced_stream]], -] - - -C = TypeVar("C") # content type -A = TypeVar("A") # accumulator type -# define a naive reducer function type that's generic in terms of the accumulator type -SimpleReducerF = Callable[[A | None, C], A] - - -def make_reducer(simple_reducer: SimpleReducerF) -> ReducerF: # type: ignore[type-arg] - """ - Converts a naive reducer function into a standard reducer function. - The naive reducer function should accept an accumulator and a message, - and return the updated accumulator. - """ - - def reducer( - accumulator: SkillMsg[Literal[MsgType.reduced_stream]] | None, - msg: SkillMsg[Literal[MsgType.stream]], - ) -> SkillMsg[Literal[MsgType.reduced_stream]]: - # Extract the content from the accumulator if it exists - acc_value = accumulator.content if accumulator else None - - # Apply the simple reducer to get the new accumulated value - new_value = simple_reducer(acc_value, msg.content) - - # Wrap the result in a SkillMsg with reduced_stream type - return SkillMsg( - call_id=msg.call_id, - skill_name=msg.skill_name, - content=new_value, - type=MsgType.reduced_stream, - ) - - return reducer - - -# just a convinience class to hold reducer functions -def _make_skill_msg( - msg: SkillMsg[Literal[MsgType.stream]], content: Any -) -> SkillMsg[Literal[MsgType.reduced_stream]]: - """Helper to create a reduced stream message with new content.""" - return SkillMsg( - call_id=msg.call_id, - skill_name=msg.skill_name, - content=content, - type=MsgType.reduced_stream, - ) - - -def sum_reducer( - accumulator: SkillMsg[Literal[MsgType.reduced_stream]] | None, - msg: SkillMsg[Literal[MsgType.stream]], -) -> SkillMsg[Literal[MsgType.reduced_stream]]: - """Sum reducer that adds values together.""" - acc_value = accumulator.content if accumulator else None - new_value = acc_value + msg.content if acc_value else msg.content # type: ignore[operator] - return _make_skill_msg(msg, new_value) - - -def latest_reducer( - accumulator: SkillMsg[Literal[MsgType.reduced_stream]] | None, - msg: SkillMsg[Literal[MsgType.stream]], -) -> SkillMsg[Literal[MsgType.reduced_stream]]: - """Latest reducer that keeps only the most recent value.""" - return _make_skill_msg(msg, msg.content) - - -def all_reducer( - accumulator: SkillMsg[Literal[MsgType.reduced_stream]] | None, - msg: SkillMsg[Literal[MsgType.stream]], -) -> SkillMsg[Literal[MsgType.reduced_stream]]: - """All reducer that collects all values into a list.""" - acc_value = accumulator.content if accumulator else None - new_value = [*acc_value, msg.content] if acc_value else [msg.content] # type: ignore[misc] - return _make_skill_msg(msg, new_value) - - -def accumulate_list( - accumulator: SkillMsg[Literal[MsgType.reduced_stream]] | None, - msg: SkillMsg[Literal[MsgType.stream]], -) -> SkillMsg[Literal[MsgType.reduced_stream]]: - """All reducer that collects all values into a list.""" - acc_value = accumulator.content if accumulator else [] - return _make_skill_msg(msg, acc_value + msg.content) # type: ignore[operator] - - -def accumulate_dict( - accumulator: SkillMsg[Literal[MsgType.reduced_stream]] | None, - msg: SkillMsg[Literal[MsgType.stream]], -) -> SkillMsg[Literal[MsgType.reduced_stream]]: - """All reducer that collects all values into a list.""" - acc_value = accumulator.content if accumulator else {} - return _make_skill_msg(msg, {**acc_value, **msg.content}) # type: ignore[dict-item] - - -def accumulate_string( - accumulator: SkillMsg[Literal[MsgType.reduced_stream]] | None, - msg: SkillMsg[Literal[MsgType.stream]], -) -> SkillMsg[Literal[MsgType.reduced_stream]]: - """All reducer that collects all values into a list.""" - acc_value = accumulator.content if accumulator else "" - return _make_skill_msg(msg, acc_value + "\n" + msg.content) # type: ignore[operator] - - -class Reducer: - sum = sum_reducer - latest = latest_reducer - all = all_reducer - accumulate_list = accumulate_list - accumulate_dict = accumulate_dict - string = accumulate_string diff --git a/dimos/protocol/skill/utils.py b/dimos/protocol/skill/utils.py deleted file mode 100644 index 278134c525..0000000000 --- a/dimos/protocol/skill/utils.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Any - - -def interpret_tool_call_args( - args: Any, first_pass: bool = True -) -> tuple[list[Any], dict[str, Any]]: - """ - Agents sometimes produce bizarre calls. This tries to interpret the args better. - """ - - if isinstance(args, list): - return args, {} - if args is None: - return [], {} - if not isinstance(args, dict): - return [args], {} - if args.keys() == {"args", "kwargs"}: - return args["args"], args["kwargs"] - if args.keys() == {"kwargs"}: - return [], args["kwargs"] - if args.keys() != {"args"}: - return [], args - - if first_pass: - return interpret_tool_call_args(args["args"], first_pass=False) - - return [], args diff --git a/dimos/protocol/tf/test_tf.py b/dimos/protocol/tf/test_tf.py index 0b5b332c3d..bdbd808cbb 100644 --- a/dimos/protocol/tf/test_tf.py +++ b/dimos/protocol/tf/test_tf.py @@ -48,6 +48,7 @@ def test_tf_ros_example() -> None: time.sleep(0.2) end_effector_global_pose = tf.get("base_link", "end_effector") + assert end_effector_global_pose is not None assert end_effector_global_pose.translation.x == pytest.approx(1.366, abs=1e-3) assert end_effector_global_pose.translation.y == pytest.approx(0.366, abs=1e-3) @@ -116,6 +117,7 @@ def test_tf_main() -> None: # The chain should compose: world->robot (1,2,3) + robot->sensor (0.5,0,0.2) # Expected translation: (1.5, 2.0, 3.2) + assert chain_transform is not None assert abs(chain_transform.translation.x - 1.5) < 0.001 assert abs(chain_transform.translation.y - 2.0) < 0.001 assert abs(chain_transform.translation.z - 3.2) < 0.001 @@ -163,12 +165,14 @@ def test_tf_main() -> None: # if you have "diagon" https://diagon.arthursonzogni.com/ installed you can draw a graph print(broadcaster.graph()) + assert world_object is not None assert abs(world_object.translation.x - 1.5) < 0.001 assert abs(world_object.translation.y - 3.0) < 0.001 assert abs(world_object.translation.z - 3.2) < 0.001 # this doesn't work atm robot_to_charger = broadcaster.get("robot", "charger") + assert robot_to_charger is not None # Expected: robot->world->charger print(f"robot_to_charger translation: {robot_to_charger.translation}") @@ -196,7 +200,7 @@ def test_add_transform(self) -> None: buffer.add(transform) assert len(buffer) == 1 - assert buffer[0] == transform + assert buffer.first() == transform def test_get(self) -> None: buffer = TBuffer() @@ -250,7 +254,9 @@ def test_buffer_pruning(self) -> None: # Old transform should be pruned assert len(buffer) == 1 - assert buffer[0].translation.x == 2.0 + first = buffer.first() + assert first is not None + assert first.translation.x == 2.0 class TestMultiTBuffer: diff --git a/dimos/protocol/tf/tf.py b/dimos/protocol/tf/tf.py index 3688b013cf..825e89fc8c 100644 --- a/dimos/protocol/tf/tf.py +++ b/dimos/protocol/tf/tf.py @@ -20,12 +20,12 @@ from functools import reduce from typing import TypeVar -from dimos.msgs.geometry_msgs import Transform +from dimos.memory.timeseries.inmemory import InMemoryStore +from dimos.msgs.geometry_msgs import PoseStamped, Transform from dimos.msgs.tf2_msgs import TFMessage -from dimos.protocol.pubsub.lcmpubsub import LCM, Topic +from dimos.protocol.pubsub.impl.lcmpubsub import LCM, Topic from dimos.protocol.pubsub.spec import PubSub from dimos.protocol.service.lcmservice import Service # type: ignore[attr-defined] -from dimos.types.timestamped import TimestampedCollection CONFIG = TypeVar("CONFIG") @@ -52,13 +52,13 @@ def get_frames(self) -> set[str]: return set() @abstractmethod - def get( # type: ignore[no-untyped-def] + def get( self, parent_frame: str, child_frame: str, time_point: float | None = None, time_tolerance: float | None = None, - ): ... + ) -> Transform | None: ... def receive_transform(self, *args: Transform) -> None: ... @@ -71,64 +71,44 @@ def receive_tfmessage(self, msg: TFMessage) -> None: TopicT = TypeVar("TopicT") -# stores a single transform -class TBuffer(TimestampedCollection[Transform]): +class TBuffer(InMemoryStore[Transform]): def __init__(self, buffer_size: float = 10.0) -> None: super().__init__() self.buffer_size = buffer_size def add(self, transform: Transform) -> None: - super().add(transform) - self._prune_old_transforms(transform.ts) - - def _prune_old_transforms(self, current_time) -> None: # type: ignore[no-untyped-def] - if not self._items: - return - - cutoff_time = current_time - self.buffer_size - - while self._items and self._items[0].ts < cutoff_time: - self._items.pop(0) + self.save(transform) + self.prune_old(transform.ts - self.buffer_size) def get(self, time_point: float | None = None, time_tolerance: float = 1.0) -> Transform | None: """Get transform at specified time or latest if no time given.""" if time_point is None: - # Return the latest transform - return self[-1] if len(self) > 0 else None - + return self.last() return self.find_closest(time_point, time_tolerance) def __str__(self) -> str: - if not self._items: + if len(self) == 0: return "TBuffer(empty)" - # Get unique frame info from the transforms - frame_pairs = set() - if self._items: - frame_pairs.add((self._items[0].frame_id, self._items[0].child_frame_id)) - + first_item = self.first() time_range = self.time_range() - if time_range: + if time_range and first_item: from dimos.types.timestamped import to_human_readable start_time = to_human_readable(time_range[0]) end_time = to_human_readable(time_range[1]) duration = time_range[1] - time_range[0] - frame_str = ( - f"{self._items[0].frame_id} -> {self._items[0].child_frame_id}" - if self._items - else "unknown" - ) + frame_str = f"{first_item.frame_id} -> {first_item.child_frame_id}" return ( f"TBuffer(" f"{frame_str}, " - f"{len(self._items)} msgs, " + f"{len(self)} msgs, " f"{duration:.2f}s [{start_time} - {end_time}])" ) - return f"TBuffer({len(self._items)} msgs)" + return f"TBuffer({len(self)} msgs)" # stores multiple transform buffers @@ -334,6 +314,18 @@ def get( ) -> Transform | None: return super().get(parent_frame, child_frame, time_point, time_tolerance) + def get_pose( + self, + parent_frame: str, + child_frame: str, + time_point: float | None = None, + time_tolerance: float | None = None, + ) -> PoseStamped | None: + tf = self.get(parent_frame, child_frame, time_point, time_tolerance) + if not tf: + return None + return tf.to_pose() + def receive_msg(self, msg: TFMessage, topic: Topic) -> None: self.receive_tfmessage(msg) diff --git a/dimos/robot/all_blueprints.py b/dimos/robot/all_blueprints.py index c241485472..19d7e7db29 100644 --- a/dimos/robot/all_blueprints.py +++ b/dimos/robot/all_blueprints.py @@ -12,92 +12,133 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dimos.core.blueprints import ModuleBlueprintSet +# This file is auto-generated. Do not edit manually. +# Run `pytest dimos/robot/test_all_blueprints_generation.py` to regenerate. -# The blueprints are defined as import strings so as not to trigger unnecessary imports. all_blueprints = { - "unitree-go2": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:nav", - "unitree-go2-basic": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:basic", - "unitree-go2-nav": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:nav", - "unitree-go2-ros": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:ros", - "unitree-go2-detection": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:detection", - "unitree-go2-spatial": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:spatial", - "unitree-go2-temporal-memory": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:temporal_memory", - "unitree-go2-agentic": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:agentic", - "unitree-go2-agentic-mcp": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:agentic_mcp", - "unitree-go2-agentic-ollama": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:agentic_ollama", - "unitree-go2-agentic-huggingface": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:agentic_huggingface", - "unitree-go2-vlm-stream-test": "dimos.robot.unitree_webrtc.unitree_go2_blueprints:vlm_stream_test", - "unitree-g1": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:standard", - "unitree-g1-sim": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:standard_sim", - "unitree-g1-basic": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:basic_ros", - "unitree-g1-basic-sim": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:basic_sim", - "unitree-g1-shm": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:standard_with_shm", - "unitree-g1-agentic": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:agentic", - "unitree-g1-agentic-sim": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:agentic_sim", - "unitree-g1-joystick": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:with_joystick", - "unitree-g1-full": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:full_featured", - "unitree-g1-detection": "dimos.robot.unitree_webrtc.unitree_g1_blueprints:detection", - # Control orchestrator blueprints - "orchestrator-mock": "dimos.control.blueprints:orchestrator_mock", - "orchestrator-xarm7": "dimos.control.blueprints:orchestrator_xarm7", - "orchestrator-xarm6": "dimos.control.blueprints:orchestrator_xarm6", - "orchestrator-piper": "dimos.control.blueprints:orchestrator_piper", - "orchestrator-dual-mock": "dimos.control.blueprints:orchestrator_dual_mock", - "orchestrator-dual-xarm": "dimos.control.blueprints:orchestrator_dual_xarm", - "orchestrator-piper-xarm": "dimos.control.blueprints:orchestrator_piper_xarm", - # Demo blueprints + "arm-teleop": "dimos.teleop.quest.blueprints:arm_teleop", + "arm-teleop-dual": "dimos.teleop.quest.blueprints:arm_teleop_dual", + "arm-teleop-piper": "dimos.teleop.quest.blueprints:arm_teleop_piper", + "arm-teleop-visualizing": "dimos.teleop.quest.blueprints:arm_teleop_visualizing", + "arm-teleop-xarm6": "dimos.teleop.quest.blueprints:arm_teleop_xarm6", + "coordinator-basic": "dimos.control.blueprints:coordinator_basic", + "coordinator-cartesian-ik-mock": "dimos.control.blueprints:coordinator_cartesian_ik_mock", + "coordinator-cartesian-ik-piper": "dimos.control.blueprints:coordinator_cartesian_ik_piper", + "coordinator-combined-xarm6": "dimos.control.blueprints:coordinator_combined_xarm6", + "coordinator-dual-mock": "dimos.control.blueprints:coordinator_dual_mock", + "coordinator-dual-xarm": "dimos.control.blueprints:coordinator_dual_xarm", + "coordinator-mock": "dimos.control.blueprints:coordinator_mock", + "coordinator-piper": "dimos.control.blueprints:coordinator_piper", + "coordinator-piper-xarm": "dimos.control.blueprints:coordinator_piper_xarm", + "coordinator-teleop-dual": "dimos.control.blueprints:coordinator_teleop_dual", + "coordinator-teleop-piper": "dimos.control.blueprints:coordinator_teleop_piper", + "coordinator-teleop-xarm6": "dimos.control.blueprints:coordinator_teleop_xarm6", + "coordinator-velocity-xarm6": "dimos.control.blueprints:coordinator_velocity_xarm6", + "coordinator-xarm6": "dimos.control.blueprints:coordinator_xarm6", + "coordinator-xarm7": "dimos.control.blueprints:coordinator_xarm7", + "demo-agent": "dimos.agents.demo_agent:demo_agent", + "demo-agent-camera": "dimos.agents.demo_agent:demo_agent_camera", "demo-camera": "dimos.hardware.sensors.camera.module:demo_camera", - "demo-osm": "dimos.mapping.osm.demo_osm:demo_osm", - "demo-skill": "dimos.agents.skills.demo_skill:demo_skill", - "demo-gps-nav": "dimos.agents.skills.demo_gps_nav:demo_gps_nav_skill", + "demo-error-on-name-conflicts": "dimos.robot.unitree.demo_error_on_name_conflicts:demo_error_on_name_conflicts", "demo-google-maps-skill": "dimos.agents.skills.demo_google_maps_skill:demo_google_maps_skill", + "demo-gps-nav": "dimos.agents.skills.demo_gps_nav:demo_gps_nav", + "demo-grasping": "dimos.manipulation.grasping.demo_grasping:demo_grasping", "demo-object-scene-registration": "dimos.perception.demo_object_scene_registration:demo_object_scene_registration", - "demo-error-on-name-conflicts": "dimos.robot.unitree_webrtc.demo_error_on_name_conflicts:blueprint", + "demo-osm": "dimos.mapping.osm.demo_osm:demo_osm", + "demo-skill": "dimos.agents.skills.demo_skill:demo_skill", + "dual-xarm6-planner": "dimos.manipulation.manipulation_blueprints:dual_xarm6_planner", + "keyboard-teleop-piper": "dimos.robot.manipulators.piper.blueprints:keyboard_teleop_piper", + "keyboard-teleop-xarm6": "dimos.robot.manipulators.xarm.blueprints:keyboard_teleop_xarm6", + "keyboard-teleop-xarm7": "dimos.robot.manipulators.xarm.blueprints:keyboard_teleop_xarm7", + "mid360": "dimos.hardware.sensors.lidar.livox.livox_blueprints:mid360", + "mid360-fastlio": "dimos.hardware.sensors.lidar.fastlio2.fastlio_blueprints:mid360_fastlio", + "mid360-fastlio-voxels": "dimos.hardware.sensors.lidar.fastlio2.fastlio_blueprints:mid360_fastlio_voxels", + "mid360-fastlio-voxels-native": "dimos.hardware.sensors.lidar.fastlio2.fastlio_blueprints:mid360_fastlio_voxels_native", + "phone-go2-teleop": "dimos.teleop.phone.blueprints:phone_go2_teleop", + "simple-phone-teleop": "dimos.teleop.phone.blueprints:simple_phone_teleop", + "uintree-g1-primitive-no-nav": "dimos.robot.unitree.g1.blueprints.primitive.uintree_g1_primitive_no_nav:uintree_g1_primitive_no_nav", + "unitree-g1": "dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1:unitree_g1", + "unitree-g1-agentic": "dimos.robot.unitree.g1.blueprints.agentic.unitree_g1_agentic:unitree_g1_agentic", + "unitree-g1-agentic-sim": "dimos.robot.unitree.g1.blueprints.agentic.unitree_g1_agentic_sim:unitree_g1_agentic_sim", + "unitree-g1-basic": "dimos.robot.unitree.g1.blueprints.basic.unitree_g1_basic:unitree_g1_basic", + "unitree-g1-basic-sim": "dimos.robot.unitree.g1.blueprints.basic.unitree_g1_basic_sim:unitree_g1_basic_sim", + "unitree-g1-detection": "dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1_detection:unitree_g1_detection", + "unitree-g1-full": "dimos.robot.unitree.g1.blueprints.agentic.unitree_g1_full:unitree_g1_full", + "unitree-g1-joystick": "dimos.robot.unitree.g1.blueprints.basic.unitree_g1_joystick:unitree_g1_joystick", + "unitree-g1-shm": "dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1_shm:unitree_g1_shm", + "unitree-g1-sim": "dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1_sim:unitree_g1_sim", + "unitree-go2": "dimos.robot.unitree.go2.blueprints.smart.unitree_go2:unitree_go2", + "unitree-go2-agentic": "dimos.robot.unitree.go2.blueprints.agentic.unitree_go2_agentic:unitree_go2_agentic", + "unitree-go2-agentic-huggingface": "dimos.robot.unitree.go2.blueprints.agentic.unitree_go2_agentic_huggingface:unitree_go2_agentic_huggingface", + "unitree-go2-agentic-mcp": "dimos.robot.unitree.go2.blueprints.agentic.unitree_go2_agentic_mcp:unitree_go2_agentic_mcp", + "unitree-go2-agentic-ollama": "dimos.robot.unitree.go2.blueprints.agentic.unitree_go2_agentic_ollama:unitree_go2_agentic_ollama", + "unitree-go2-basic": "dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic:unitree_go2_basic", + "unitree-go2-detection": "dimos.robot.unitree.go2.blueprints.smart.unitree_go2_detection:unitree_go2_detection", + "unitree-go2-ros": "dimos.robot.unitree.go2.blueprints.smart.unitree_go2_ros:unitree_go2_ros", + "unitree-go2-spatial": "dimos.robot.unitree.go2.blueprints.smart.unitree_go2_spatial:unitree_go2_spatial", + "unitree-go2-temporal-memory": "dimos.robot.unitree.go2.blueprints.agentic.unitree_go2_temporal_memory:unitree_go2_temporal_memory", + "unitree-go2-vlm-stream-test": "dimos.robot.unitree.go2.blueprints.smart.unitree_go2_vlm_stream_test:unitree_go2_vlm_stream_test", + "xarm-perception": "dimos.manipulation.manipulation_blueprints:xarm_perception", + "xarm-perception-agent": "dimos.manipulation.manipulation_blueprints:xarm_perception_agent", + "xarm6-planner-only": "dimos.manipulation.manipulation_blueprints:xarm6_planner_only", + "xarm7-planner-coordinator": "dimos.manipulation.manipulation_blueprints:xarm7_planner_coordinator", + "xarm7-trajectory-sim": "dimos.simulation.sim_blueprints:xarm7_trajectory_sim", } all_modules = { - "replanning_a_star_planner": "dimos.navigation.replanning_a_star.module", - "camera_module": "dimos.hardware.camera.module", - "depth_module": "dimos.robot.unitree_webrtc.depth_module", - "detection_2d": "dimos.perception.detection2d.module2D", + "agent": "dimos.agents.agent", + "arm_teleop_module": "dimos.teleop.quest.quest_extensions", + "camera_module": "dimos.hardware.sensors.camera.module", + "cartesian_motion_controller": "dimos.manipulation.control.servo_control.cartesian_motion_controller", + "control_coordinator": "dimos.control.coordinator", + "cost_mapper": "dimos.mapping.costmapper", + "demo_calculator_skill": "dimos.agents.skills.demo_calculator_skill", + "demo_robot": "dimos.agents.skills.demo_robot", + "depth_module": "dimos.robot.unitree.depth_module", + "detection3d_module": "dimos.perception.detection.module3D", + "detection_db_module": "dimos.perception.detection.moduleDB", + "fastlio2_module": "dimos.hardware.sensors.lidar.fastlio2.module", "foxglove_bridge": "dimos.robot.foxglove_bridge", - "g1_connection": "dimos.robot.unitree.connection.g1", - "g1_joystick": "dimos.robot.unitree_webrtc.g1_joystick_module", - "g1_skills": "dimos.robot.unitree_webrtc.unitree_g1_skill_container", + "g1_connection": "dimos.robot.unitree.g1.connection", + "g1_sim_connection": "dimos.robot.unitree.g1.sim", + "g1_skills": "dimos.robot.unitree.g1.skill_container", + "go2_connection": "dimos.robot.unitree.go2.connection", "google_maps_skill": "dimos.agents.skills.google_maps_skill_container", "gps_nav_skill": "dimos.agents.skills.gps_nav_skill", - "human_input": "dimos.agents.cli.human", - "keyboard_teleop": "dimos.robot.unitree_webrtc.keyboard_teleop", - "llm_agent": "dimos.agents.agent", - "mapper": "dimos.robot.unitree_webrtc.type.map", + "grasping_module": "dimos.manipulation.grasping.grasping", + "joint_trajectory_controller": "dimos.manipulation.control.trajectory_controller.joint_trajectory_controller", + "keyboard_teleop": "dimos.robot.unitree.keyboard_teleop", + "keyboard_teleop_module": "dimos.teleop.keyboard.keyboard_teleop_module", + "manipulation_module": "dimos.manipulation.manipulation_module", + "mapper": "dimos.robot.unitree.type.map", + "mid360_module": "dimos.hardware.sensors.lidar.livox.module", "navigation_skill": "dimos.agents.skills.navigation", + "object_scene_registration_module": "dimos.perception.object_scene_registration", "object_tracking": "dimos.perception.object_tracker", "osm_skill": "dimos.agents.skills.osm", + "person_follow_skill": "dimos.agents.skills.person_follow", + "person_tracker_module": "dimos.perception.detection.person_tracker", + "phone_teleop_module": "dimos.teleop.phone.phone_teleop_module", + "quest_teleop_module": "dimos.teleop.quest.quest_teleop_module", + "realsense_camera": "dimos.hardware.sensors.camera.realsense.camera", + "replanning_a_star_planner": "dimos.navigation.replanning_a_star.module", + "rerun_bridge": "dimos.visualization.rerun.bridge", "ros_nav": "dimos.navigation.rosnav", + "simple_phone_teleop_module": "dimos.teleop.phone.phone_extensions", + "simulation": "dimos.simulation.manipulators.sim_module", "spatial_memory": "dimos.perception.spatial_perception", "speak_skill": "dimos.agents.skills.speak_skill", - "unitree_skills": "dimos.robot.unitree_webrtc.unitree_skill_container", + "temporal_memory": "dimos.perception.experimental.temporal_memory.temporal_memory", + "twist_teleop_module": "dimos.teleop.quest.quest_extensions", + "unitree_skills": "dimos.robot.unitree.unitree_skill_container", "utilization": "dimos.utils.monitoring", + "visualizing_teleop_module": "dimos.teleop.quest.quest_extensions", + "vlm_agent": "dimos.agents.vlm_agent", + "vlm_stream_tester": "dimos.agents.vlm_stream_tester", + "voxel_mapper": "dimos.mapping.voxels", "wavefront_frontier_explorer": "dimos.navigation.frontier_exploration.wavefront_frontier_goal_selector", + "web_input": "dimos.agents.web_human_input", "websocket_vis": "dimos.web.websocket_vis.websocket_vis_module", - "web_input": "dimos.agents.cli.web", - # Control orchestrator module - "control_orchestrator": "dimos.control.orchestrator", + "zed_camera": "dimos.hardware.sensors.camera.zed.camera", } - - -def get_blueprint_by_name(name: str) -> ModuleBlueprintSet: - if name not in all_blueprints: - raise ValueError(f"Unknown blueprint set name: {name}") - module_path, attr = all_blueprints[name].split(":") - module = __import__(module_path, fromlist=[attr]) - return getattr(module, attr) # type: ignore[no-any-return] - - -def get_module_by_name(name: str) -> ModuleBlueprintSet: - if name not in all_modules: - raise ValueError(f"Unknown module name: {name}") - python_module = __import__(all_modules[name], fromlist=[name]) - return getattr(python_module, name)() # type: ignore[no-any-return] diff --git a/dimos/robot/cli/dimos.py b/dimos/robot/cli/dimos.py index a000502abc..c390d3b76c 100644 --- a/dimos/robot/cli/dimos.py +++ b/dimos/robot/cli/dimos.py @@ -15,16 +15,13 @@ from enum import Enum import inspect import sys -from typing import Any, Optional, get_args, get_origin +from typing import Any, get_args, get_origin +from dotenv import load_dotenv import typer -from dimos.core.blueprints import autoconnect -from dimos.core.global_config import GlobalConfig -from dimos.protocol import pubsub -from dimos.robot.all_blueprints import all_blueprints, get_blueprint_by_name, get_module_by_name -from dimos.robot.cli.topic import topic_echo, topic_send -from dimos.utils.logging_config import setup_exception_handler +from dimos.core.global_config import GlobalConfig, global_config +from dimos.robot.all_blueprints import all_blueprints RobotType = Enum("RobotType", {key.replace("-", "_").upper(): key for key in all_blueprints.keys()}) # type: ignore[misc] @@ -33,6 +30,8 @@ no_args_is_help=True, ) +load_dotenv() + def create_dynamic_callback(): # type: ignore[no-untyped-def] fields = GlobalConfig.model_fields @@ -48,7 +47,7 @@ def create_dynamic_callback(): # type: ignore[no-untyped-def] # Handle Optional types # Check for Optional/Union with None - if get_origin(field_type) is type(Optional[str]): # noqa: UP045 + if get_origin(field_type) is type(str | None): inner_types = get_args(field_type) if len(inner_types) == 2 and type(None) in inner_types: # It's Optional[T], get the actual type T @@ -72,7 +71,7 @@ def create_dynamic_callback(): # type: ignore[no-untyped-def] f"--{cli_option_name}/--no-{cli_option_name}", help=f"Override {field_name} in GlobalConfig", ), - annotation=Optional[bool], # noqa: UP045 + annotation=bool | None, ) else: # For non-boolean fields, use regular option @@ -84,7 +83,7 @@ def create_dynamic_callback(): # type: ignore[no-untyped-def] f"--{cli_option_name}", help=f"Override {field_name} in GlobalConfig", ), - annotation=Optional[actual_type], # noqa: UP045 + annotation=actual_type | None, ) params.append(param) @@ -109,9 +108,15 @@ def run( ), ) -> None: """Start a robot blueprint""" + from dimos.core.blueprints import autoconnect + from dimos.protocol import pubsub + from dimos.robot.get_all_blueprints import get_blueprint_by_name, get_module_by_name + from dimos.utils.logging_config import setup_exception_handler + setup_exception_handler() cli_config_overrides: dict[str, Any] = ctx.obj + global_config.update(**cli_config_overrides) pubsub.lcm.autoconf() # type: ignore[attr-defined] blueprint = get_blueprint_by_name(robot_type.value) @@ -126,16 +131,19 @@ def run( @main.command() def show_config(ctx: typer.Context) -> None: """Show current config settings and their values.""" + cli_config_overrides: dict[str, Any] = ctx.obj - config = GlobalConfig().model_copy(update=cli_config_overrides) + global_config.update(**cli_config_overrides) - for field_name, value in config.model_dump().items(): + for field_name, value in global_config.model_dump().items(): typer.echo(f"{field_name}: {value}") @main.command() def list() -> None: """List all available blueprints.""" + from dimos.robot.all_blueprints import all_blueprints + blueprints = [name for name in all_blueprints.keys() if not name.startswith("demo-")] for blueprint_name in sorted(blueprints): typer.echo(blueprint_name) @@ -150,15 +158,6 @@ def lcmspy(ctx: typer.Context) -> None: lcmspy_main() -@main.command(context_settings={"allow_extra_args": True, "ignore_unknown_options": True}) -def skillspy(ctx: typer.Context) -> None: - """Skills spy tool for monitoring skills.""" - from dimos.utils.cli.skillspy.skillspy import main as skillspy_main - - sys.argv = ["skillspy", *ctx.args] - skillspy_main() - - @main.command(context_settings={"allow_extra_args": True, "ignore_unknown_options": True}) def agentspy(ctx: typer.Context) -> None: """Agent spy tool for monitoring agents.""" @@ -189,6 +188,8 @@ def echo( help="Optional message type (e.g., PoseStamped). If omitted, infer from '/topic#pkg.Msg'.", ), ) -> None: + from dimos.robot.cli.topic import topic_echo + topic_echo(topic, type_name) @@ -197,8 +198,25 @@ def send( topic: str = typer.Argument(..., help="Topic name to send to (e.g., /goal_request)"), message_expr: str = typer.Argument(..., help="Python expression for the message"), ) -> None: + from dimos.robot.cli.topic import topic_send + topic_send(topic, message_expr) +@main.command(name="rerun-bridge") +def rerun_bridge_cmd( + viewer_mode: str = typer.Option( + "native", help="Viewer mode: native (desktop), web (browser), none (headless)" + ), + memory_limit: str = typer.Option( + "25%", help="Memory limit for Rerun viewer (e.g., '4GB', '16GB', '25%')" + ), +) -> None: + """Launch the Rerun visualization bridge.""" + from dimos.visualization.rerun.bridge import run_bridge + + run_bridge(viewer_mode=viewer_mode, memory_limit=memory_limit) + + if __name__ == "__main__": main() diff --git a/dimos/robot/cli/topic.py b/dimos/robot/cli/topic.py index 582099c4b6..1f7ada4f28 100644 --- a/dimos/robot/cli/topic.py +++ b/dimos/robot/cli/topic.py @@ -21,7 +21,7 @@ import typer from dimos.core.transport import LCMTransport, pLCMTransport -from dimos.protocol.pubsub.lcmpubsub import LCMPubSubBase +from dimos.protocol.pubsub.impl.lcmpubsub import LCMPubSubBase _modules_to_try = [ "dimos.msgs.geometry_msgs", diff --git a/dimos/robot/drone/README.md b/dimos/robot/drone/README.md index fbd7ddf2ae..6e8ceb4d63 100644 --- a/dimos/robot/drone/README.md +++ b/dimos/robot/drone/README.md @@ -265,9 +265,9 @@ Connect Foxglove Studio to `ws://localhost:8765` to see: ## Development ### Adding New Skills -Add to `connection_module.py` with `@skill()` decorator: +Add to `connection_module.py` with `@skill` decorator: ```python -@skill() +@skill def my_skill(self, param: float) -> str: """Skill description for LLM.""" # Implementation diff --git a/dimos/robot/drone/__init__.py b/dimos/robot/drone/__init__.py index 5d4eed4dae..1ed8521b8b 100644 --- a/dimos/robot/drone/__init__.py +++ b/dimos/robot/drone/__init__.py @@ -14,9 +14,14 @@ """Generic drone module for MAVLink-based drones.""" -from .camera_module import DroneCameraModule -from .connection_module import DroneConnectionModule -from .drone import Drone -from .mavlink_connection import MavlinkConnection +import lazy_loader as lazy -__all__ = ["Drone", "DroneCameraModule", "DroneConnectionModule", "MavlinkConnection"] +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "camera_module": ["DroneCameraModule"], + "connection_module": ["DroneConnectionModule"], + "drone": ["Drone"], + "mavlink_connection": ["MavlinkConnection"], + }, +) diff --git a/dimos/robot/drone/camera_module.py b/dimos/robot/drone/camera_module.py index 7806c3eab8..8ba88fd028 100644 --- a/dimos/robot/drone/camera_module.py +++ b/dimos/robot/drone/camera_module.py @@ -98,11 +98,11 @@ def __init__( logger.info(f"DroneCameraModule initialized with intrinsics: {camera_intrinsics}") @rpc - def start(self) -> bool: + def start(self) -> None: """Start the camera module.""" if self._running: logger.warning("Camera module already running") - return True + return # Start processing thread for depth (which will init Metric3D and handle video) self._running = True @@ -111,7 +111,7 @@ def start(self) -> bool: self._processing_thread.start() logger.info("Camera module started") - return True + return def _on_video_frame(self, frame: Image) -> None: """Handle incoming video frame.""" diff --git a/dimos/robot/drone/connection_module.py b/dimos/robot/drone/connection_module.py index 865d98c3d3..db5c4ca4cc 100644 --- a/dimos/robot/drone/connection_module.py +++ b/dimos/robot/drone/connection_module.py @@ -24,12 +24,11 @@ from dimos_lcm.std_msgs import String from reactivex.disposable import CompositeDisposable, Disposable +from dimos.agents.annotation import skill from dimos.core import In, Module, Out, rpc from dimos.mapping.types import LatLon from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Transform, Twist, Vector3 from dimos.msgs.sensor_msgs import Image -from dimos.protocol.skill.skill import skill -from dimos.protocol.skill.type import Output from dimos.robot.drone.dji_video_stream import DJIDroneVideoStream from dimos.robot.drone.mavlink_connection import MavlinkConnection from dimos.utils.logging_config import setup_logger @@ -101,7 +100,7 @@ def __init__( Module.__init__(self, *args, **kwargs) @rpc - def start(self) -> bool: + def start(self) -> None: """Start the connection and subscribe to sensor streams.""" # Check for replay mode if self.connection_string == "replay": @@ -118,7 +117,7 @@ def start(self) -> bool: if not self.connection.connected: logger.error("Failed to connect to drone") - return False + return # Start video stream (already created above) if self.video_stream.start(): @@ -170,7 +169,7 @@ def start(self) -> bool: self._telemetry_thread.start() logger.info("Drone connection module started") - return True + return def _store_and_publish_frame(self, frame: Image) -> None: """Store the latest video frame and publish it.""" @@ -267,7 +266,7 @@ def get_status(self) -> dict[str, Any]: """ return self._status.copy() - @skill() + @skill def move(self, vector: Vector3, duration: float = 0.0) -> None: """Send movement command to drone. @@ -287,7 +286,7 @@ def move(self, vector: Vector3, duration: float = 0.0) -> None: ) self.connection.move(vector, duration) - @skill() + @skill def takeoff(self, altitude: float = 3.0) -> bool: """Takeoff to specified altitude. @@ -301,7 +300,7 @@ def takeoff(self, altitude: float = 3.0) -> bool: return self.connection.takeoff(altitude) return False - @skill() + @skill def land(self) -> bool: """Land the drone. @@ -312,7 +311,7 @@ def land(self) -> bool: return self.connection.land() return False - @skill() + @skill def arm(self) -> bool: """Arm the drone. @@ -323,7 +322,7 @@ def arm(self) -> bool: return self.connection.arm() return False - @skill() + @skill def disarm(self) -> bool: """Disarm the drone. @@ -334,7 +333,7 @@ def disarm(self) -> bool: return self.connection.disarm() return False - @skill() + @skill def set_mode(self, mode: str) -> bool: """Set flight mode. @@ -363,7 +362,7 @@ def move_twist(self, twist: Twist, duration: float = 0.0, lock_altitude: bool = return self.connection.move_twist(twist, duration, lock_altitude) return False - @skill() + @skill def is_flying_to_target(self) -> bool: """Check if drone is currently flying to a GPS target. @@ -374,7 +373,7 @@ def is_flying_to_target(self) -> bool: return self.connection.is_flying_to_target return False - @skill() + @skill def fly_to(self, lat: float, lon: float, alt: float) -> str: """Fly drone to GPS coordinates (blocking operation). @@ -390,7 +389,7 @@ def fly_to(self, lat: float, lon: float, alt: float) -> str: return self.connection.fly_to(lat, lon, alt) return "Failed: No connection to drone" - @skill() + @skill def follow_object( self, object_description: str, duration: float = 120.0 ) -> Generator[str, None, None]: @@ -479,7 +478,7 @@ def stop(self) -> None: # Call parent stop to clean up Module infrastructure (event loop, LCM, disposables, etc.) super().stop() - @skill(output=Output.image) + @skill def observe(self) -> Image | None: """Returns the latest video frame from the drone camera. Use this skill for any visual world queries. diff --git a/dimos/robot/drone/drone.py b/dimos/robot/drone/drone.py index d2e2f3ee0e..8e72d56ed1 100644 --- a/dimos/robot/drone/drone.py +++ b/dimos/robot/drone/drone.py @@ -20,7 +20,6 @@ import functools import logging import os -import time from typing import Any from dimos_lcm.sensor_msgs import CameraInfo @@ -28,8 +27,11 @@ from reactivex import Observable from dimos import core +from dimos.agents.agent import agent from dimos.agents.skills.google_maps_skill_container import GoogleMapsSkillContainer from dimos.agents.skills.osm import OsmSkill +from dimos.agents.web_human_input import web_input +from dimos.core.blueprints import Blueprint, autoconnect from dimos.mapping.types import LatLon from dimos.msgs.geometry_msgs import PoseStamped, Twist, Vector3 from dimos.msgs.sensor_msgs import Image @@ -271,7 +273,8 @@ def get_odom(self) -> PoseStamped | None: @functools.cached_property def gps_position_stream(self) -> Observable[LatLon]: assert self.connection is not None - return self.connection.gps_location.transport.pure_observable() + result: Observable[LatLon] = self.connection.gps_location.transport.pure_observable() + return result def get_status(self) -> dict[str, Any]: """Get drone status. @@ -394,6 +397,51 @@ def stop(self) -> None: logger.info("Drone system stopped") +DRONE_SYSTEM_PROMPT = """\ +You are controlling a DJI drone with MAVLink interface. +You have access to drone control skills you are already flying so only run move_twist, set_mode, and fly_to. +When the user gives commands, use the appropriate skills to control the drone. +Always confirm actions and report results. Send fly_to commands only at above 200 meters altitude to be safe. +Here are some GPS locations to remember +6th and Natoma intersection: 37.78019978319006, -122.40770815020853, +454 Natoma (Office): 37.780967465525244, -122.40688342010769 +5th and mission intersection: 37.782598539339695, -122.40649441875473 +6th and mission intersection: 37.781007204789354, -122.40868447123661""" + + +def drone_agentic( + connection_string: str = "udp:0.0.0.0:14550", + video_port: int = 5600, + outdoor: bool = False, + camera_intrinsics: list[float] | None = None, + system_prompt: str = DRONE_SYSTEM_PROMPT, + model: str = "gpt-4o", +) -> Blueprint: + if camera_intrinsics is None: + camera_intrinsics = [1000.0, 1000.0, 960.0, 540.0] + + return autoconnect( + DroneConnectionModule.blueprint( + connection_string=connection_string, + video_port=video_port, + outdoor=outdoor, + ), + DroneCameraModule.blueprint(camera_intrinsics=camera_intrinsics), + DroneTrackingModule.blueprint(outdoor=outdoor), + WebsocketVisModule.blueprint(), + FoxgloveBridge.blueprint(), + GoogleMapsSkillContainer.blueprint(), + OsmSkill.blueprint(), + agent(system_prompt=system_prompt, model=model), + web_input(), + ).remappings( + [ + (DroneTrackingModule, "video_input", "video"), + (DroneTrackingModule, "cmd_vel", "movecmd_twist"), + ] + ) + + def main() -> None: """Main entry point for drone system.""" import argparse @@ -434,67 +482,13 @@ def main() -> None: pubsub.lcm.autoconf() # type: ignore[attr-defined] - drone = Drone(connection_string=connection, video_port=video_port, outdoor=args.outdoor) - - drone.start() - - print("\n✓ Drone system started successfully!") - print("\nLCM Topics:") - print(" • /drone/odom - Odometry (PoseStamped)") - print(" • /drone/status - Status (String/JSON)") - print(" • /drone/telemetry - Full telemetry (String/JSON)") - print(" • /drone/color_image - RGB Video (Image)") - print(" • /drone/depth_image - Depth estimation (Image)") - print(" • /drone/depth_colorized - Colorized depth (Image)") - print(" • /drone/camera_info - Camera calibration") - print(" • /drone/cmd_vel - Movement commands (Vector3)") - print(" • /drone/tracking_overlay - Object tracking visualization (Image)") - print(" • /drone/tracking_status - Tracking status (String/JSON)") - - from dimos.agents import Agent # type: ignore[attr-defined] - from dimos.agents.cli.human import HumanInput - from dimos.agents.spec import Model, Provider - - assert drone.dimos is not None - human_input = drone.dimos.deploy(HumanInput) # type: ignore[attr-defined] - google_maps = drone.dimos.deploy(GoogleMapsSkillContainer) # type: ignore[attr-defined] - osm_skill = drone.dimos.deploy(OsmSkill) # type: ignore[attr-defined] - - google_maps.gps_location.transport = core.pLCMTransport("/gps_location") - osm_skill.gps_location.transport = core.pLCMTransport("/gps_location") - - agent = Agent( - system_prompt="""You are controlling a DJI drone with MAVLink interface. - You have access to drone control skills you are already flying so only run move_twist, set_mode, and fly_to. - When the user gives commands, use the appropriate skills to control the drone. - Always confirm actions and report results. Send fly_to commands only at above 200 meters altitude to be safe. - Here are some GPS locations to remember - 6th and Natoma intersection: 37.78019978319006, -122.40770815020853, - 454 Natoma (Office): 37.780967465525244, -122.40688342010769 - 5th and mission intersection: 37.782598539339695, -122.40649441875473 - 6th and mission intersection: 37.781007204789354, -122.40868447123661""", - model=Model.GPT_4O, - provider=Provider.OPENAI, # type: ignore[attr-defined] + blueprint = drone_agentic( + connection_string=connection, + video_port=video_port, + outdoor=args.outdoor, ) - agent.register_skills(drone.connection) - agent.register_skills(human_input) - agent.register_skills(google_maps) - agent.register_skills(osm_skill) - agent.run_implicit_skill("human") - - agent.start() - agent.loop_thread() - - # Testing - # from dimos_lcm.geometry_msgs import Twist,Vector3 - # twist = Twist() - # twist.linear = Vector3(-0.5, 0.5, 0.5) - # drone.connection.move_twist(twist, duration=2.0, lock_altitude=True) - # time.sleep(10) - # drone.tracking.track_object("water bottle") - while True: - time.sleep(1) + blueprint.build().loop() if __name__ == "__main__": diff --git a/dimos/robot/drone/drone_tracking_module.py b/dimos/robot/drone/drone_tracking_module.py index e6560142d1..e1b633a05b 100644 --- a/dimos/robot/drone/drone_tracking_module.py +++ b/dimos/robot/drone/drone_tracking_module.py @@ -23,6 +23,7 @@ import cv2 from dimos_lcm.std_msgs import String import numpy as np +from numpy.typing import NDArray from dimos.core import In, Module, Out, rpc from dimos.models.qwen.video_query import get_bbox_from_qwen_frame @@ -113,7 +114,7 @@ def _get_latest_frame(self) -> np.ndarray[Any, np.dtype[Any]] | None: return data @rpc - def start(self) -> bool: + def start(self) -> None: """Start the tracking module and subscribe to video input.""" if self.video_input.transport: self.video_input.subscribe(self._on_new_frame) @@ -124,7 +125,7 @@ def start(self) -> bool: if self.follow_object_cmd.transport: self.follow_object_cmd.subscribe(self._on_follow_object_cmd) - return True + return @rpc def stop(self) -> None: @@ -308,10 +309,10 @@ def _visual_servoing_loop(self, tracker: Any, duration: float) -> None: def _draw_tracking_overlay( self, - frame: np.ndarray[Any, np.dtype[Any]], + frame: NDArray[np.uint8], bbox: tuple[int, int, int, int], center: tuple[int, int], - ) -> np.ndarray[Any, np.dtype[Any]]: + ) -> NDArray[np.uint8]: # type: ignore[type-arg] """Draw tracking visualization overlay. Args: @@ -322,7 +323,7 @@ def _draw_tracking_overlay( Returns: Frame with overlay drawn """ - overlay = frame.copy() + overlay: NDArray[np.uint8] = frame.copy() # type: ignore[type-arg] x, y, w, h = bbox # Draw tracking box (green) diff --git a/dimos/robot/drone/test_drone.py b/dimos/robot/drone/test_drone.py index bfbaa9ed54..d9075beae3 100644 --- a/dimos/robot/drone/test_drone.py +++ b/dimos/robot/drone/test_drone.py @@ -264,9 +264,8 @@ def test_connection_module_replay_mode(self) -> None: try: # Start should use Fake classes - result = module.start() + module.start() - self.assertTrue(result) mock_fake_conn.assert_called_once_with("replay") mock_fake_video.assert_called_once() finally: @@ -380,20 +379,19 @@ def replay_side_effect(store_name: str): try: print("\n[TEST] Starting connection module in replay mode...") - result = module.start() + module.start() # Give time for messages to process import time time.sleep(0.1) - print(f"\n[TEST] Module started: {result}") + print("\n[TEST] Module started") print(f"[TEST] Total odom messages published: {len(published_odom)}") print(f"[TEST] Total video frames published: {len(published_video)}") print(f"[TEST] Total status messages published: {len(published_status)}") # Verify module started and is processing messages - self.assertTrue(result) self.assertIsNotNone(module.connection) self.assertIsNotNone(module.video_stream) @@ -877,8 +875,7 @@ def replay_stream_subscribe(callback) -> None: module.movecmd = MagicMock() # Start module - result = module.start() - self.assertTrue(result) + module.start() # Give time for processing time.sleep(0.2) diff --git a/dimos/robot/get_all_blueprints.py b/dimos/robot/get_all_blueprints.py new file mode 100644 index 0000000000..8658e4f4ec --- /dev/null +++ b/dimos/robot/get_all_blueprints.py @@ -0,0 +1,31 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.blueprints import Blueprint +from dimos.robot.all_blueprints import all_blueprints, all_modules + + +def get_blueprint_by_name(name: str) -> Blueprint: + if name not in all_blueprints: + raise ValueError(f"Unknown blueprint set name: {name}") + module_path, attr = all_blueprints[name].split(":") + module = __import__(module_path, fromlist=[attr]) + return getattr(module, attr) # type: ignore[no-any-return] + + +def get_module_by_name(name: str) -> Blueprint: + if name not in all_modules: + raise ValueError(f"Unknown module name: {name}") + python_module = __import__(all_modules[name], fromlist=[name]) + return getattr(python_module, name)() # type: ignore[no-any-return] diff --git a/dimos/robot/manipulators/__init__.py b/dimos/robot/manipulators/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/robot/manipulators/piper/__init__.py b/dimos/robot/manipulators/piper/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/robot/manipulators/piper/blueprints.py b/dimos/robot/manipulators/piper/blueprints.py new file mode 100644 index 0000000000..68e02fc994 --- /dev/null +++ b/dimos/robot/manipulators/piper/blueprints.py @@ -0,0 +1,97 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Keyboard teleop blueprint for the Piper arm. + +Launches the ControlCoordinator (mock adapter + CartesianIK), the +ManipulationModule (Drake/Meshcat visualization), and a pygame keyboard +teleop UI — all wired together via autoconnect. + +Usage: + dimos run keyboard-teleop-piper +""" + +from dimos.control.components import HardwareComponent, HardwareType, make_joints +from dimos.control.coordinator import TaskConfig, control_coordinator +from dimos.core.blueprints import autoconnect +from dimos.core.transport import LCMTransport +from dimos.manipulation.manipulation_module import manipulation_module +from dimos.manipulation.planning.spec import RobotModelConfig +from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Vector3 +from dimos.msgs.sensor_msgs import JointState +from dimos.teleop.keyboard.keyboard_teleop_module import keyboard_teleop_module +from dimos.utils.data import LfsPath, get_data + +_PIPER_MODEL_PATH = LfsPath("piper_description/mujoco_model/piper_no_gripper_description.xml") +_PIPER_DATA = get_data("piper_description") + +# Piper 6-DOF mock sim + keyboard teleop + Drake visualization +keyboard_teleop_piper = autoconnect( + keyboard_teleop_module(model_path=_PIPER_MODEL_PATH, ee_joint_id=6), + control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="mock", + ), + ], + tasks=[ + TaskConfig( + name="cartesian_ik_arm", + type="cartesian_ik", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + model_path=_PIPER_MODEL_PATH, + ee_joint_id=6, + ), + ], + ), + manipulation_module( + robots=[ + RobotModelConfig( + name="arm", + urdf_path=_PIPER_DATA / "urdf" / "piper_description.xacro", + base_pose=PoseStamped( + position=Vector3(x=0.0, y=0.0, z=0.0), + orientation=Quaternion(0.0, 0.0, 0.0, 1.0), + ), + joint_names=["joint1", "joint2", "joint3", "joint4", "joint5", "joint6"], + end_effector_link="gripper_base", + base_link="base_link", + package_paths={ + "piper_description": _PIPER_DATA, + "piper_gazebo": _PIPER_DATA, # xacro refs $(find piper_gazebo); unused by Drake + }, + joint_name_mapping={f"arm_joint{i}": f"joint{i}" for i in range(1, 7)}, + auto_convert_meshes=True, + home_joints=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ], + enable_viz=True, + ), +).transports( + { + ("cartesian_command", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } +) + +__all__ = ["keyboard_teleop_piper"] diff --git a/dimos/robot/manipulators/xarm/__init__.py b/dimos/robot/manipulators/xarm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/robot/manipulators/xarm/blueprints.py b/dimos/robot/manipulators/xarm/blueprints.py new file mode 100644 index 0000000000..9043e71e3b --- /dev/null +++ b/dimos/robot/manipulators/xarm/blueprints.py @@ -0,0 +1,121 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Keyboard teleop blueprints for XArm6 and XArm7. + +Launches the ControlCoordinator (mock adapter + CartesianIK), the +ManipulationModule (Drake/Meshcat visualization), and a pygame keyboard +teleop UI — all wired together via autoconnect. + +Usage: + dimos run keyboard-teleop-xarm6 + dimos run keyboard-teleop-xarm7 +""" + +from dimos.control.components import HardwareComponent, HardwareType, make_joints +from dimos.control.coordinator import TaskConfig, control_coordinator +from dimos.core.blueprints import autoconnect +from dimos.core.transport import LCMTransport +from dimos.manipulation.manipulation_blueprints import ( + _make_xarm6_config, + _make_xarm7_config, +) +from dimos.manipulation.manipulation_module import manipulation_module +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.msgs.sensor_msgs import JointState +from dimos.teleop.keyboard.keyboard_teleop_module import keyboard_teleop_module +from dimos.utils.data import LfsPath + +_XARM6_MODEL_PATH = LfsPath("xarm_description/urdf/xarm6/xarm6.urdf") +_XARM7_MODEL_PATH = LfsPath("xarm_description/urdf/xarm7/xarm7.urdf") + +# XArm6 mock sim + keyboard teleop + Drake visualization +keyboard_teleop_xarm6 = autoconnect( + keyboard_teleop_module(model_path=_XARM6_MODEL_PATH, ee_joint_id=6), + control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), + adapter_type="mock", + ), + ], + tasks=[ + TaskConfig( + name="cartesian_ik_arm", + type="cartesian_ik", + joint_names=[f"arm_joint{i + 1}" for i in range(6)], + priority=10, + model_path=_XARM6_MODEL_PATH, + ee_joint_id=6, + ), + ], + ), + manipulation_module( + robots=[_make_xarm6_config(name="arm", joint_prefix="arm_", add_gripper=False)], + enable_viz=True, + ), +).transports( + { + ("cartesian_command", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } +) + +# XArm7 mock sim + keyboard teleop + Drake visualization +keyboard_teleop_xarm7 = autoconnect( + keyboard_teleop_module(model_path=_XARM7_MODEL_PATH, ee_joint_id=7), + control_coordinator( + tick_rate=100.0, + publish_joint_state=True, + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 7), + adapter_type="mock", + ), + ], + tasks=[ + TaskConfig( + name="cartesian_ik_arm", + type="cartesian_ik", + joint_names=[f"arm_joint{i + 1}" for i in range(7)], + priority=10, + model_path=_XARM7_MODEL_PATH, + ee_joint_id=7, + ), + ], + ), + manipulation_module( + robots=[_make_xarm7_config(name="arm", joint_prefix="arm_", add_gripper=False)], + enable_viz=True, + ), +).transports( + { + ("cartesian_command", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } +) + +__all__ = ["keyboard_teleop_xarm6", "keyboard_teleop_xarm7"] diff --git a/dimos/robot/ros_bridge.py b/dimos/robot/ros_bridge.py deleted file mode 100644 index 48d201ca32..0000000000 --- a/dimos/robot/ros_bridge.py +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from enum import Enum -import logging -import threading -from typing import Any - -try: - import rclpy - from rclpy.executors import SingleThreadedExecutor - from rclpy.node import Node - from rclpy.qos import ( - QoSDurabilityPolicy, - QoSHistoryPolicy, - QoSProfile, - QoSReliabilityPolicy, - ) -except ImportError: - rclpy = None # type: ignore[assignment] - SingleThreadedExecutor = None # type: ignore[assignment, misc] - Node = None # type: ignore[assignment, misc] - QoSProfile = None # type: ignore[assignment, misc] - QoSReliabilityPolicy = None # type: ignore[assignment, misc] - QoSHistoryPolicy = None # type: ignore[assignment, misc] - QoSDurabilityPolicy = None # type: ignore[assignment, misc] - -from dimos.core.resource import Resource -from dimos.protocol.pubsub.lcmpubsub import LCM, Topic -from dimos.utils.logging_config import setup_logger - -logger = setup_logger(level=logging.INFO) - - -class BridgeDirection(Enum): - """Direction of message bridging.""" - - ROS_TO_DIMOS = "ros_to_dimos" - DIMOS_TO_ROS = "dimos_to_ros" - - -class ROSBridge(Resource): - """Unidirectional bridge between ROS and DIMOS for message passing.""" - - def __init__(self, node_name: str = "dimos_ros_bridge") -> None: - """Initialize the ROS-DIMOS bridge. - - Args: - node_name: Name for the ROS node (default: "dimos_ros_bridge") - """ - if not rclpy.ok(): # type: ignore[attr-defined] - rclpy.init() - - self.node = Node(node_name) - self.lcm = LCM() - self.lcm.start() - - self._executor = SingleThreadedExecutor() - self._executor.add_node(self.node) - - self._spin_thread = threading.Thread(target=self._ros_spin, daemon=True) - self._spin_thread.start() # TODO: don't forget to shut it down - - self._bridges: dict[str, dict[str, Any]] = {} - - self._qos = QoSProfile( # type: ignore[no-untyped-call] - reliability=QoSReliabilityPolicy.RELIABLE, - history=QoSHistoryPolicy.KEEP_LAST, - durability=QoSDurabilityPolicy.VOLATILE, - depth=10, - ) - - logger.info(f"ROSBridge initialized with node name: {node_name}") - - def start(self) -> None: - pass - - def stop(self) -> None: - """Shutdown the bridge and clean up resources.""" - self._executor.shutdown() - self.node.destroy_node() # type: ignore[no-untyped-call] - - if rclpy.ok(): # type: ignore[attr-defined] - rclpy.shutdown() - - logger.info("ROSBridge shutdown complete") - - def _ros_spin(self) -> None: - """Background thread for spinning ROS executor.""" - try: - self._executor.spin() - finally: - self._executor.shutdown() - - def add_topic( - self, - topic_name: str, - dimos_type: type, - ros_type: type, - direction: BridgeDirection, - remap_topic: str | None = None, - ) -> None: - """Add unidirectional bridging for a topic. - - Args: - topic_name: Name of the topic (e.g., "/cmd_vel") - dimos_type: DIMOS message type (e.g., dimos.msgs.geometry_msgs.Twist) - ros_type: ROS message type (e.g., geometry_msgs.msg.Twist) - direction: Direction of bridging (ROS_TO_DIMOS or DIMOS_TO_ROS) - remap_topic: Optional remapped topic name for the other side - """ - if topic_name in self._bridges: - logger.warning(f"Topic {topic_name} already bridged") - return - - # Determine actual topic names for each side - ros_topic_name = topic_name - dimos_topic_name = topic_name - - if remap_topic: - if direction == BridgeDirection.ROS_TO_DIMOS: - dimos_topic_name = remap_topic - else: # DIMOS_TO_ROS - ros_topic_name = remap_topic - - # Create DIMOS/LCM topic - dimos_topic = Topic(dimos_topic_name, dimos_type) - - ros_subscription = None - ros_publisher = None - dimos_subscription = None - - if direction == BridgeDirection.ROS_TO_DIMOS: - - def ros_callback(msg) -> None: # type: ignore[no-untyped-def] - self._ros_to_dimos(msg, dimos_topic, dimos_type, topic_name) - - ros_subscription = self.node.create_subscription( - ros_type, ros_topic_name, ros_callback, self._qos - ) - logger.info(f" ROS → DIMOS: Subscribing to ROS topic {ros_topic_name}") - - elif direction == BridgeDirection.DIMOS_TO_ROS: - ros_publisher = self.node.create_publisher(ros_type, ros_topic_name, self._qos) - - def dimos_callback(msg, _topic) -> None: # type: ignore[no-untyped-def] - self._dimos_to_ros(msg, ros_publisher, topic_name) - - dimos_subscription = self.lcm.subscribe(dimos_topic, dimos_callback) - logger.info(f" DIMOS → ROS: Subscribing to DIMOS topic {dimos_topic_name}") - else: - raise ValueError(f"Invalid bridge direction: {direction}") - - self._bridges[topic_name] = { - "dimos_topic": dimos_topic, - "dimos_type": dimos_type, - "ros_type": ros_type, - "ros_subscription": ros_subscription, - "ros_publisher": ros_publisher, - "dimos_subscription": dimos_subscription, - "direction": direction, - "ros_topic_name": ros_topic_name, - "dimos_topic_name": dimos_topic_name, - } - - direction_str = { - BridgeDirection.ROS_TO_DIMOS: "ROS → DIMOS", - BridgeDirection.DIMOS_TO_ROS: "DIMOS → ROS", - }[direction] - - logger.info(f"Bridged topic: {topic_name} ({direction_str})") - if remap_topic: - logger.info(f" Remapped: ROS '{ros_topic_name}' ↔ DIMOS '{dimos_topic_name}'") - logger.info(f" DIMOS type: {dimos_type.__name__}, ROS type: {ros_type.__name__}") - - def _ros_to_dimos( - self, ros_msg: Any, dimos_topic: Topic, dimos_type: type, _topic_name: str - ) -> None: - """Convert ROS message to DIMOS and publish. - - Args: - ros_msg: ROS message - dimos_topic: DIMOS topic to publish to - dimos_type: DIMOS message type - topic_name: Name of the topic for tracking - """ - dimos_msg = dimos_type.from_ros_msg(ros_msg) # type: ignore[attr-defined] - self.lcm.publish(dimos_topic, dimos_msg) - - def _dimos_to_ros(self, dimos_msg: Any, ros_publisher, _topic_name: str) -> None: # type: ignore[no-untyped-def] - """Convert DIMOS message to ROS and publish. - - Args: - dimos_msg: DIMOS message - ros_publisher: ROS publisher to use - _topic_name: Name of the topic (unused, kept for consistency) - """ - ros_msg = dimos_msg.to_ros_msg() - ros_publisher.publish(ros_msg) diff --git a/dimos/robot/test_all_blueprints.py b/dimos/robot/test_all_blueprints.py index 7e5fa6970c..16f657393b 100644 --- a/dimos/robot/test_all_blueprints.py +++ b/dimos/robot/test_all_blueprints.py @@ -14,11 +14,12 @@ import pytest -from dimos.core.blueprints import ModuleBlueprintSet -from dimos.robot.all_blueprints import all_blueprints, get_blueprint_by_name +from dimos.core.blueprints import Blueprint +from dimos.robot.all_blueprints import all_blueprints +from dimos.robot.get_all_blueprints import get_blueprint_by_name # Optional dependencies that are allowed to be missing -OPTIONAL_DEPENDENCIES = {"pyrealsense2", "geometry_msgs", "turbojpeg"} +OPTIONAL_DEPENDENCIES = {"pyrealsense2", "pyzed", "geometry_msgs", "turbojpeg"} OPTIONAL_ERROR_SUBSTRINGS = { "Unable to locate turbojpeg library automatically", } @@ -27,7 +28,7 @@ @pytest.mark.integration @pytest.mark.parametrize("blueprint_name", all_blueprints.keys()) def test_all_blueprints_are_valid(blueprint_name: str) -> None: - """Test that all blueprints in all_blueprints are valid ModuleBlueprintSet instances.""" + """Test that all blueprints in all_blueprints are valid Blueprint instances.""" try: blueprint = get_blueprint_by_name(blueprint_name) except ModuleNotFoundError as e: @@ -39,6 +40,6 @@ def test_all_blueprints_are_valid(blueprint_name: str) -> None: if any(substring in message for substring in OPTIONAL_ERROR_SUBSTRINGS): pytest.skip(f"Skipping due to missing optional dependency: {message}") raise - assert isinstance(blueprint, ModuleBlueprintSet), ( - f"Blueprint '{blueprint_name}' is not a ModuleBlueprintSet, got {type(blueprint)}" + assert isinstance(blueprint, Blueprint), ( + f"Blueprint '{blueprint_name}' is not a Blueprint, got {type(blueprint)}" ) diff --git a/dimos/robot/test_all_blueprints_generation.py b/dimos/robot/test_all_blueprints_generation.py new file mode 100644 index 0000000000..e7ba79a404 --- /dev/null +++ b/dimos/robot/test_all_blueprints_generation.py @@ -0,0 +1,214 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ast +from collections.abc import Generator +import difflib +import os +from pathlib import Path +import subprocess + +import pytest + +from dimos.constants import DIMOS_PROJECT_ROOT + +IGNORED_FILES: set[str] = { + "dimos/robot/all_blueprints.py", + "dimos/robot/get_all_blueprints.py", + "dimos/robot/test_all_blueprints.py", + "dimos/robot/test_all_blueprints_generation.py", + "dimos/core/blueprints.py", + "dimos/core/test_blueprints.py", +} +BLUEPRINT_METHODS = {"transports", "global_config", "remappings", "requirements"} + + +def test_all_blueprints_is_current() -> None: + root = DIMOS_PROJECT_ROOT / "dimos" + all_blueprints, all_modules = _scan_for_blueprints(root) + generated_content = _generate_all_blueprints_content(all_blueprints, all_modules) + + file_path = root / "robot" / "all_blueprints.py" + + if "CI" in os.environ: + if not file_path.exists(): + pytest.fail(f"all_blueprints.py does not exist at {file_path}") + + current_content = file_path.read_text() + if current_content != generated_content: + diff = difflib.unified_diff( + current_content.splitlines(keepends=True), + generated_content.splitlines(keepends=True), + fromfile="all_blueprints.py (current)", + tofile="all_blueprints.py (generated)", + ) + diff_str = "".join(diff) + pytest.fail( + f"all_blueprints.py is out of date. Run " + f"`pytest dimos/robot/test_all_blueprints_generation.py` locally to update.\n\n" + f"Diff:\n{diff_str}" + ) + else: + file_path.write_text(generated_content) + + if _check_for_uncommitted_changes(file_path): + pytest.fail( + "all_blueprints.py was updated and has uncommitted changes. " + "Please commit the changes." + ) + + +def _scan_for_blueprints(root: Path) -> tuple[dict[str, str], dict[str, str]]: + all_blueprints: dict[str, str] = {} + all_modules: dict[str, str] = {} + + for file_path in sorted(_get_all_python_files(root)): + module_name = _path_to_module_name(file_path, root) + blueprint_vars, module_vars = _find_blueprints_in_file(file_path) + + for var_name in blueprint_vars: + full_path = f"{module_name}:{var_name}" + cli_name = var_name.replace("_", "-") + all_blueprints[cli_name] = full_path + + for var_name in module_vars: + full_path = f"{module_name}:{var_name}" + all_modules[var_name] = module_name + + return all_blueprints, all_modules + + +def _generate_all_blueprints_content( + all_blueprints: dict[str, str], + all_modules: dict[str, str], +) -> str: + lines = [ + "# Copyright 2025-2026 Dimensional Inc.", + "#", + '# Licensed under the Apache License, Version 2.0 (the "License");', + "# you may not use this file except in compliance with the License.", + "# You may obtain a copy of the License at", + "#", + "# http://www.apache.org/licenses/LICENSE-2.0", + "#", + "# Unless required by applicable law or agreed to in writing, software", + '# distributed under the License is distributed on an "AS IS" BASIS,', + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", + "# See the License for the specific language governing permissions and", + "# limitations under the License.", + "", + "# This file is auto-generated. Do not edit manually.", + "# Run `pytest dimos/robot/test_all_blueprints_generation.py` to regenerate.", + "", + "all_blueprints = {", + ] + + for name in sorted(all_blueprints.keys()): + lines.append(f' "{name}": "{all_blueprints[name]}",') + + lines.append("}\n\n") + lines.append("all_modules = {") + + for name in sorted(all_modules.keys()): + lines.append(f' "{name}": "{all_modules[name]}",') + + lines.append("}\n") + + return "\n".join(lines) + + +def _check_for_uncommitted_changes(file_path: Path) -> bool: + try: + result = subprocess.run( + ["git", "diff", "--quiet", str(file_path)], + capture_output=True, + cwd=file_path.parent, + ) + return result.returncode != 0 + except Exception: + return False + + +def _get_all_python_files(root: Path) -> Generator[Path, None, None]: + for path in root.rglob("*.py"): + rel_path = str(path.relative_to(root.parent)) + if "__pycache__" in str(path) or rel_path in IGNORED_FILES: + continue + yield path + + +def _path_to_module_name(path: Path, root: Path) -> str: + parts = list(path.relative_to(root.parent).parts) + parts[-1] = parts[-1].removesuffix(".py") + return ".".join(parts) + + +def _find_blueprints_in_file(file_path: Path) -> tuple[list[str], list[str]]: + blueprint_vars: list[str] = [] + module_vars: list[str] = [] + + try: + source = file_path.read_text(encoding="utf-8") + tree = ast.parse(source, filename=str(file_path)) + except Exception: + return [], [] + + # Only look at top-level statements (direct children of the Module node) + for node in tree.body: + if not isinstance(node, ast.Assign): + continue + + # Get the variable name(s) + for target in node.targets: + if not isinstance(target, ast.Name): + continue + var_name = target.id + + if var_name.startswith("_"): + continue + + # Check if it's a blueprint (ModuleBlueprintSet instance) + if _is_autoconnect_call(node.value) or _ends_with_blueprint_method(node.value): + blueprint_vars.append(var_name) + # Check if it's a module factory (SomeModule.blueprint) + elif _is_blueprint_factory(node.value): + module_vars.append(var_name) + + return blueprint_vars, module_vars + + +def _is_autoconnect_call(node: ast.expr) -> bool: + if isinstance(node, ast.Call): + func = node.func + # Direct call: autoconnect(...) + if isinstance(func, ast.Name) and func.id == "autoconnect": + return True + # Attribute call: module.autoconnect(...) + if isinstance(func, ast.Attribute) and func.attr == "autoconnect": + return True + return False + + +def _ends_with_blueprint_method(node: ast.expr) -> bool: + if isinstance(node, ast.Call): + func = node.func + if isinstance(func, ast.Attribute) and func.attr in BLUEPRINT_METHODS: + return True + return False + + +def _is_blueprint_factory(node: ast.expr) -> bool: + if isinstance(node, ast.Attribute): + return node.attr == "blueprint" + return False diff --git a/dimos/robot/test_ros_bridge.py b/dimos/robot/test_ros_bridge.py deleted file mode 100644 index cf7b2ac0cf..0000000000 --- a/dimos/robot/test_ros_bridge.py +++ /dev/null @@ -1,442 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import threading -import time -import unittest - -import numpy as np -import pytest - -try: - from geometry_msgs.msg import TransformStamped, TwistStamped as ROSTwistStamped - import rclpy - from rclpy.node import Node - from sensor_msgs.msg import PointCloud2 as ROSPointCloud2, PointField - from tf2_msgs.msg import TFMessage as ROSTFMessage -except ImportError: - rclpy = None - Node = None - ROSTwistStamped = None - ROSPointCloud2 = None - PointField = None - ROSTFMessage = None - TransformStamped = None - -from dimos.msgs.geometry_msgs import TwistStamped -from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.msgs.tf2_msgs import TFMessage -from dimos.protocol.pubsub.lcmpubsub import LCM, Topic -from dimos.robot.ros_bridge import BridgeDirection, ROSBridge - - -@pytest.mark.ros -class TestROSBridge(unittest.TestCase): - """Test suite for ROS-DIMOS bridge.""" - - def setUp(self) -> None: - """Set up test fixtures.""" - # Skip if ROS is not available - if rclpy is None: - self.skipTest("ROS not available") - - # Initialize ROS if not already done - if not rclpy.ok(): - rclpy.init() - - # Create test bridge - self.bridge = ROSBridge("test_ros_bridge") - - # Create test node for publishing/subscribing - self.test_node = Node("test_node") - - # Track received messages - self.ros_messages = [] - self.dimos_messages = [] - self.message_timestamps = {"ros": [], "dimos": []} - - def tearDown(self) -> None: - """Clean up test fixtures.""" - self.test_node.destroy_node() - self.bridge.stop() - if rclpy.ok(): - rclpy.try_shutdown() - - def test_ros_to_dimos_twist(self) -> None: - """Test ROS TwistStamped to DIMOS conversion and transmission.""" - # Set up bridge - self.bridge.add_topic( - "/test_twist", TwistStamped, ROSTwistStamped, BridgeDirection.ROS_TO_DIMOS - ) - - # Subscribe to DIMOS side - lcm = LCM() - lcm.start() - topic = Topic("/test_twist", TwistStamped) - - def dimos_callback(msg, _topic) -> None: - self.dimos_messages.append(msg) - self.message_timestamps["dimos"].append(time.time()) - - lcm.subscribe(topic, dimos_callback) - - # Publish from ROS side - ros_pub = self.test_node.create_publisher(ROSTwistStamped, "/test_twist", 10) - - # Send test messages - for i in range(10): - msg = ROSTwistStamped() - msg.header.stamp = self.test_node.get_clock().now().to_msg() - msg.header.frame_id = f"frame_{i}" - msg.twist.linear.x = float(i) - msg.twist.linear.y = float(i * 2) - msg.twist.angular.z = float(i * 0.1) - - ros_pub.publish(msg) - self.message_timestamps["ros"].append(time.time()) - time.sleep(0.01) # 100Hz - - # Allow time for processing - time.sleep(0.5) - - # Verify messages received - self.assertEqual(len(self.dimos_messages), 10, "Should receive all 10 messages") - - # Verify message content - for i, msg in enumerate(self.dimos_messages): - self.assertEqual(msg.frame_id, f"frame_{i}") - self.assertAlmostEqual(msg.linear.x, float(i), places=5) - self.assertAlmostEqual(msg.linear.y, float(i * 2), places=5) - self.assertAlmostEqual(msg.angular.z, float(i * 0.1), places=5) - - lcm.stop() - - def test_dimos_to_ros_twist(self) -> None: - """Test DIMOS TwistStamped to ROS conversion and transmission.""" - # Set up bridge - self.bridge.add_topic( - "/test_twist_reverse", TwistStamped, ROSTwistStamped, BridgeDirection.DIMOS_TO_ROS - ) - - # Subscribe to ROS side - def ros_callback(msg) -> None: - self.ros_messages.append(msg) - self.message_timestamps["ros"].append(time.time()) - - self.test_node.create_subscription(ROSTwistStamped, "/test_twist_reverse", ros_callback, 10) - - # Use the bridge's LCM instance for publishing - topic = Topic("/test_twist_reverse", TwistStamped) - - # Send test messages - for i in range(10): - msg = TwistStamped(ts=time.time(), frame_id=f"dimos_frame_{i}") - msg.linear.x = float(i * 3) - msg.linear.y = float(i * 4) - msg.angular.z = float(i * 0.2) - - self.bridge.lcm.publish(topic, msg) - self.message_timestamps["dimos"].append(time.time()) - time.sleep(0.01) # 100Hz - - # Allow time for processing and spin the test node - for _ in range(50): # Spin for 0.5 seconds - rclpy.spin_once(self.test_node, timeout_sec=0.01) - - # Verify messages received - self.assertEqual(len(self.ros_messages), 10, "Should receive all 10 messages") - - # Verify message content - for i, msg in enumerate(self.ros_messages): - self.assertEqual(msg.header.frame_id, f"dimos_frame_{i}") - self.assertAlmostEqual(msg.twist.linear.x, float(i * 3), places=5) - self.assertAlmostEqual(msg.twist.linear.y, float(i * 4), places=5) - self.assertAlmostEqual(msg.twist.angular.z, float(i * 0.2), places=5) - - def test_frequency_preservation(self) -> None: - """Test that message frequencies are preserved through the bridge.""" - # Set up bridge - self.bridge.add_topic( - "/test_freq", TwistStamped, ROSTwistStamped, BridgeDirection.ROS_TO_DIMOS - ) - - # Subscribe to DIMOS side - lcm = LCM() - lcm.start() - topic = Topic("/test_freq", TwistStamped) - - receive_times = [] - - def dimos_callback(_msg, _topic) -> None: - receive_times.append(time.time()) - - lcm.subscribe(topic, dimos_callback) - - # Publish from ROS at specific frequencies - ros_pub = self.test_node.create_publisher(ROSTwistStamped, "/test_freq", 10) - - # Test different frequencies - test_frequencies = [10, 50, 100] # Hz - - for target_freq in test_frequencies: - receive_times.clear() - send_times = [] - period = 1.0 / target_freq - - # Send messages at target frequency - start_time = time.time() - while time.time() - start_time < 1.0: # Run for 1 second - msg = ROSTwistStamped() - msg.header.stamp = self.test_node.get_clock().now().to_msg() - msg.twist.linear.x = 1.0 - - ros_pub.publish(msg) - send_times.append(time.time()) - time.sleep(period) - - # Allow processing time - time.sleep(0.2) - - # Calculate actual frequencies - if len(send_times) > 1: - send_intervals = np.diff(send_times) - send_freq = 1.0 / np.mean(send_intervals) - else: - send_freq = 0 - - if len(receive_times) > 1: - receive_intervals = np.diff(receive_times) - receive_freq = 1.0 / np.mean(receive_intervals) - else: - receive_freq = 0 - - # Verify frequency preservation (within 10% tolerance) - self.assertAlmostEqual( - receive_freq, - send_freq, - delta=send_freq * 0.1, - msg=f"Frequency not preserved for {target_freq}Hz: sent={send_freq:.1f}Hz, received={receive_freq:.1f}Hz", - ) - - lcm.stop() - - def test_pointcloud_conversion(self) -> None: - """Test PointCloud2 message conversion with numpy optimization.""" - # Set up bridge - self.bridge.add_topic( - "/test_cloud", PointCloud2, ROSPointCloud2, BridgeDirection.ROS_TO_DIMOS - ) - - # Subscribe to DIMOS side - lcm = LCM() - lcm.start() - topic = Topic("/test_cloud", PointCloud2) - - received_cloud = [] - - def dimos_callback(msg, _topic) -> None: - received_cloud.append(msg) - - lcm.subscribe(topic, dimos_callback) - - # Create test point cloud - ros_pub = self.test_node.create_publisher(ROSPointCloud2, "/test_cloud", 10) - - # Generate test points - num_points = 1000 - points = np.random.randn(num_points, 3).astype(np.float32) - - # Create ROS PointCloud2 message - msg = ROSPointCloud2() - msg.header.stamp = self.test_node.get_clock().now().to_msg() - msg.header.frame_id = "test_frame" - msg.height = 1 - msg.width = num_points - msg.fields = [ - PointField(name="x", offset=0, datatype=PointField.FLOAT32, count=1), - PointField(name="y", offset=4, datatype=PointField.FLOAT32, count=1), - PointField(name="z", offset=8, datatype=PointField.FLOAT32, count=1), - ] - msg.is_bigendian = False - msg.point_step = 12 - msg.row_step = msg.point_step * msg.width - msg.data = points.tobytes() - msg.is_dense = True - - # Send point cloud - ros_pub.publish(msg) - - # Allow processing time - time.sleep(0.5) - - # Verify reception - self.assertEqual(len(received_cloud), 1, "Should receive point cloud") - - # Verify point data - received_points, _ = received_cloud[0].as_numpy() - self.assertEqual(received_points.shape, points.shape) - np.testing.assert_array_almost_equal(received_points, points, decimal=5) - - lcm.stop() - - def test_tf_high_frequency(self) -> None: - """Test TF message handling at high frequency.""" - # Set up bridge - self.bridge.add_topic("/test_tf", TFMessage, ROSTFMessage, BridgeDirection.ROS_TO_DIMOS) - - # Subscribe to DIMOS side - lcm = LCM() - lcm.start() - topic = Topic("/test_tf", TFMessage) - - received_tfs = [] - receive_times = [] - - def dimos_callback(msg, _topic) -> None: - received_tfs.append(msg) - receive_times.append(time.time()) - - lcm.subscribe(topic, dimos_callback) - - # Publish TF at high frequency (100Hz) - ros_pub = self.test_node.create_publisher(ROSTFMessage, "/test_tf", 100) - - target_freq = 100 # Hz - period = 1.0 / target_freq - num_messages = 100 # 1 second worth - - send_times = [] - for i in range(num_messages): - msg = ROSTFMessage() - transform = TransformStamped() - transform.header.stamp = self.test_node.get_clock().now().to_msg() - transform.header.frame_id = "world" - transform.child_frame_id = f"link_{i}" - transform.transform.translation.x = float(i) - transform.transform.rotation.w = 1.0 - msg.transforms = [transform] - - ros_pub.publish(msg) - send_times.append(time.time()) - time.sleep(period) - - # Allow processing time - time.sleep(0.5) - - # Check message count (allow 5% loss tolerance) - min_expected = int(num_messages * 0.95) - self.assertGreaterEqual( - len(received_tfs), - min_expected, - f"Should receive at least {min_expected} of {num_messages} TF messages", - ) - - # Check frequency preservation - if len(receive_times) > 1: - receive_intervals = np.diff(receive_times) - receive_freq = 1.0 / np.mean(receive_intervals) - - # For high frequency, allow 20% tolerance - self.assertAlmostEqual( - receive_freq, - target_freq, - delta=target_freq * 0.2, - msg=f"High frequency TF not preserved: expected={target_freq}Hz, got={receive_freq:.1f}Hz", - ) - - lcm.stop() - - def test_bidirectional_bridge(self) -> None: - """Test simultaneous bidirectional message flow.""" - # Set up bidirectional bridges for same topic type - self.bridge.add_topic( - "/ros_to_dimos", TwistStamped, ROSTwistStamped, BridgeDirection.ROS_TO_DIMOS - ) - - self.bridge.add_topic( - "/dimos_to_ros", TwistStamped, ROSTwistStamped, BridgeDirection.DIMOS_TO_ROS - ) - - dimos_received = [] - ros_received = [] - - # DIMOS subscriber - use bridge's LCM - topic_r2d = Topic("/ros_to_dimos", TwistStamped) - self.bridge.lcm.subscribe(topic_r2d, lambda msg, _: dimos_received.append(msg)) - - # ROS subscriber - self.test_node.create_subscription( - ROSTwistStamped, "/dimos_to_ros", lambda msg: ros_received.append(msg), 10 - ) - - # Set up publishers - ros_pub = self.test_node.create_publisher(ROSTwistStamped, "/ros_to_dimos", 10) - topic_d2r = Topic("/dimos_to_ros", TwistStamped) - - # Keep track of whether threads should continue - stop_spinning = threading.Event() - - # Spin the test node in background to receive messages - def spin_test_node() -> None: - while not stop_spinning.is_set(): - rclpy.spin_once(self.test_node, timeout_sec=0.01) - - spin_thread = threading.Thread(target=spin_test_node, daemon=True) - spin_thread.start() - - # Send messages in both directions simultaneously - def send_ros_messages() -> None: - for i in range(50): - msg = ROSTwistStamped() - msg.header.stamp = self.test_node.get_clock().now().to_msg() - msg.twist.linear.x = float(i) - ros_pub.publish(msg) - time.sleep(0.02) # 50Hz - - def send_dimos_messages() -> None: - for i in range(50): - msg = TwistStamped(ts=time.time()) - msg.linear.y = float(i * 2) - self.bridge.lcm.publish(topic_d2r, msg) - time.sleep(0.02) # 50Hz - - # Run both senders in parallel - ros_thread = threading.Thread(target=send_ros_messages) - dimos_thread = threading.Thread(target=send_dimos_messages) - - ros_thread.start() - dimos_thread.start() - - ros_thread.join() - dimos_thread.join() - - # Allow processing time - time.sleep(0.5) - stop_spinning.set() - spin_thread.join(timeout=1.0) - - # Verify both directions worked - self.assertGreaterEqual(len(dimos_received), 45, "Should receive most ROS->DIMOS messages") - self.assertGreaterEqual(len(ros_received), 45, "Should receive most DIMOS->ROS messages") - - # Verify message integrity - for i, msg in enumerate(dimos_received[:45]): - self.assertAlmostEqual(msg.linear.x, float(i), places=5) - - for i, msg in enumerate(ros_received[:45]): - self.assertAlmostEqual(msg.twist.linear.y, float(i * 2), places=5) - - -if __name__ == "__main__": - unittest.main() diff --git a/dimos/robot/unitree/__init__.py b/dimos/robot/unitree/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/robot/unitree_webrtc/unitree_b1/README.md b/dimos/robot/unitree/b1/README.md similarity index 98% rename from dimos/robot/unitree_webrtc/unitree_b1/README.md rename to dimos/robot/unitree/b1/README.md index f59e6a57ae..1443067a2a 100644 --- a/dimos/robot/unitree_webrtc/unitree_b1/README.md +++ b/dimos/robot/unitree/b1/README.md @@ -112,7 +112,7 @@ pip install -e .[cpu,sim] #### With Joystick Control (Recommended for Testing) ```bash -python -m dimos.robot.unitree_webrtc.unitree_b1.unitree_b1 \ +python -m dimos.robot.unitree.b1.unitree_b1 \ --ip 192.168.12.1 \ --port 9090 \ --joystick @@ -129,7 +129,7 @@ python -m dimos.robot.unitree_webrtc.unitree_b1.unitree_b1 \ #### Test Mode (No Robot Required) ```bash -python -m dimos.robot.unitree_webrtc.unitree_b1.unitree_b1 \ +python -m dimos.robot.unitree.b1.unitree_b1 \ --test \ --joystick ``` diff --git a/dimos/robot/unitree_webrtc/unitree_b1/__init__.py b/dimos/robot/unitree/b1/__init__.py similarity index 100% rename from dimos/robot/unitree_webrtc/unitree_b1/__init__.py rename to dimos/robot/unitree/b1/__init__.py diff --git a/dimos/robot/unitree_webrtc/unitree_b1/b1_command.py b/dimos/robot/unitree/b1/b1_command.py similarity index 100% rename from dimos/robot/unitree_webrtc/unitree_b1/b1_command.py rename to dimos/robot/unitree/b1/b1_command.py diff --git a/dimos/robot/unitree_webrtc/unitree_b1/connection.py b/dimos/robot/unitree/b1/connection.py similarity index 93% rename from dimos/robot/unitree_webrtc/unitree_b1/connection.py rename to dimos/robot/unitree/b1/connection.py index f0cb5317e6..bae4bc0844 100644 --- a/dimos/robot/unitree_webrtc/unitree_b1/connection.py +++ b/dimos/robot/unitree/b1/connection.py @@ -28,6 +28,7 @@ from dimos.msgs.geometry_msgs import PoseStamped, Twist, TwistStamped from dimos.msgs.nav_msgs.Odometry import Odometry from dimos.msgs.std_msgs import Int32 +from dimos.msgs.tf2_msgs.TFMessage import TFMessage from dimos.utils.logging_config import setup_logger from .b1_command import B1Command @@ -52,11 +53,17 @@ class B1ConnectionModule(Module): internally converts to B1Command format, and sends UDP packets at 50Hz. """ - cmd_vel: In[TwistStamped] # Timestamped velocity commands from ROS - mode_cmd: In[Int32] # Mode changes - odom_in: In[Odometry] # External odometry from ROS SLAM/lidar + # LCM ports (inter-module communication) + cmd_vel: In[TwistStamped] + mode_cmd: In[Int32] + odom_in: In[Odometry] - odom_pose: Out[PoseStamped] # Converted pose for internal use + odom_pose: Out[PoseStamped] + + # ROS In ports (receiving from ROS via ROSTransport) + ros_cmd_vel: In[TwistStamped] + ros_odom_in: In[Odometry] + ros_tf: In[TFMessage] def __init__( # type: ignore[no-untyped-def] self, ip: str = "192.168.12.1", port: int = 9090, test_mode: bool = False, *args, **kwargs @@ -111,6 +118,17 @@ def start(self) -> None: unsub = self.odom_in.subscribe(self._publish_odom_pose) self._disposables.add(Disposable(unsub)) + # Subscribe to ROS In ports + if self.ros_cmd_vel: + unsub = self.ros_cmd_vel.subscribe(self.handle_twist_stamped) + self._disposables.add(Disposable(unsub)) + if self.ros_odom_in: + unsub = self.ros_odom_in.subscribe(self._publish_odom_pose) + self._disposables.add(Disposable(unsub)) + if self.ros_tf: + unsub = self.ros_tf.subscribe(self._on_ros_tf) + self._disposables.add(Disposable(unsub)) + # Start threads self.running = True self.watchdog_running = True @@ -284,6 +302,10 @@ def _publish_odom_pose(self, msg: Odometry) -> None: ) self.odom_pose.publish(pose_stamped) + def _on_ros_tf(self, msg: TFMessage) -> None: + """Forward ROS TF messages to the module's TF tree.""" + self.tf.publish(*msg.transforms) + def _watchdog_loop(self) -> None: """Single watchdog thread that monitors command freshness.""" while self.watchdog_running: diff --git a/dimos/robot/unitree_webrtc/unitree_b1/joystick_module.py b/dimos/robot/unitree/b1/joystick_module.py similarity index 99% rename from dimos/robot/unitree_webrtc/unitree_b1/joystick_module.py rename to dimos/robot/unitree/b1/joystick_module.py index 3aef29122a..bb07094973 100644 --- a/dimos/robot/unitree_webrtc/unitree_b1/joystick_module.py +++ b/dimos/robot/unitree/b1/joystick_module.py @@ -47,16 +47,16 @@ def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] self.current_mode = 0 # Start in IDLE mode for safety @rpc - def start(self) -> bool: + def start(self) -> None: """Initialize pygame and start control loop.""" super().start() try: - import pygame + import pygame # noqa: F401 except ImportError: print("ERROR: pygame not installed. Install with: pip install pygame") - return False + return self.keys_held = set() # type: ignore[var-annotated] self.pygame_ready = True @@ -66,7 +66,7 @@ def start(self) -> bool: self._thread = threading.Thread(target=self._pygame_loop, daemon=True) self._thread.start() - return True + return @rpc def stop(self) -> None: diff --git a/dimos/robot/unitree_webrtc/unitree_b1/joystick_server_udp.cpp b/dimos/robot/unitree/b1/joystick_server_udp.cpp similarity index 100% rename from dimos/robot/unitree_webrtc/unitree_b1/joystick_server_udp.cpp rename to dimos/robot/unitree/b1/joystick_server_udp.cpp diff --git a/dimos/robot/unitree_webrtc/unitree_b1/test_connection.py b/dimos/robot/unitree/b1/test_connection.py similarity index 100% rename from dimos/robot/unitree_webrtc/unitree_b1/test_connection.py rename to dimos/robot/unitree/b1/test_connection.py diff --git a/dimos/robot/unitree_webrtc/unitree_b1/unitree_b1.py b/dimos/robot/unitree/b1/unitree_b1.py similarity index 73% rename from dimos/robot/unitree_webrtc/unitree_b1/unitree_b1.py rename to dimos/robot/unitree/b1/unitree_b1.py index ff608c2b1f..a2dd6c718d 100644 --- a/dimos/robot/unitree_webrtc/unitree_b1/unitree_b1.py +++ b/dimos/robot/unitree/b1/unitree_b1.py @@ -26,13 +26,13 @@ from dimos import core from dimos.core.module_coordinator import ModuleCoordinator from dimos.core.resource import Resource +from dimos.core.transport import ROSTransport from dimos.msgs.geometry_msgs import PoseStamped, TwistStamped from dimos.msgs.nav_msgs.Odometry import Odometry from dimos.msgs.std_msgs import Int32 from dimos.msgs.tf2_msgs.TFMessage import TFMessage from dimos.robot.robot import Robot -from dimos.robot.ros_bridge import BridgeDirection, ROSBridge -from dimos.robot.unitree_webrtc.unitree_b1.connection import ( +from dimos.robot.unitree.b1.connection import ( B1ConnectionModule, MockB1ConnectionModule, ) @@ -40,21 +40,6 @@ from dimos.types.robot_capabilities import RobotCapability from dimos.utils.logging_config import setup_logger -# Handle ROS imports for environments where ROS is not available like CI -try: - from geometry_msgs.msg import ( # type: ignore[attr-defined] - TwistStamped as ROSTwistStamped, - ) - from nav_msgs.msg import Odometry as ROSOdometry # type: ignore[attr-defined] - from tf2_msgs.msg import TFMessage as ROSTFMessage # type: ignore[attr-defined] - - ROS_AVAILABLE = True -except ImportError: - ROSTwistStamped = None # type: ignore[assignment, misc] - ROSOdometry = None # type: ignore[assignment, misc] - ROSTFMessage = None # type: ignore[assignment, misc] - ROS_AVAILABLE = False - logger = setup_logger(level=logging.INFO) @@ -74,7 +59,6 @@ def __init__( output_dir: str | None = None, skill_library: SkillLibrary | None = None, enable_joystick: bool = False, - enable_ros_bridge: bool = True, test_mode: bool = False, ) -> None: """Initialize the B1 robot. @@ -85,7 +69,6 @@ def __init__( output_dir: Directory for saving outputs skill_library: Skill library instance (optional) enable_joystick: Enable pygame joystick control module - enable_ros_bridge: Enable ROS bridge for external control test_mode: Test mode - print commands instead of sending UDP """ super().__init__() @@ -93,12 +76,10 @@ def __init__( self.port = port self.output_dir = output_dir or os.path.join(os.getcwd(), "assets", "output") self.enable_joystick = enable_joystick - self.enable_ros_bridge = enable_ros_bridge self.test_mode = test_mode self.capabilities = [RobotCapability.LOCOMOTION] self.connection = None self.joystick = None - self.ros_bridge = None self._dimos = ModuleCoordinator(n=2) os.makedirs(self.output_dir, exist_ok=True) @@ -122,9 +103,14 @@ def start(self) -> None: self.connection.odom_in.transport = core.LCMTransport("/state_estimation", Odometry) # type: ignore[attr-defined] self.connection.odom_pose.transport = core.LCMTransport("/odom", PoseStamped) # type: ignore[attr-defined] + # Configure ROS transports for connection + self.connection.ros_cmd_vel.transport = ROSTransport("/cmd_vel", TwistStamped) # type: ignore[attr-defined] + self.connection.ros_odom_in.transport = ROSTransport("/state_estimation", Odometry) # type: ignore[attr-defined] + self.connection.ros_tf.transport = ROSTransport("/tf", TFMessage) # type: ignore[attr-defined] + # Deploy joystick move_vel control if self.enable_joystick: - from dimos.robot.unitree_webrtc.unitree_b1.joystick_module import JoystickModule + from dimos.robot.unitree.b1.joystick_module import JoystickModule self.joystick = self._dimos.deploy(JoystickModule) # type: ignore[assignment] self.joystick.twist_out.transport = core.LCMTransport("/cmd_vel", TwistStamped) # type: ignore[attr-defined] @@ -136,43 +122,12 @@ def start(self) -> None: self.connection.idle() # type: ignore[attr-defined] # Start in IDLE mode for safety logger.info("B1 started in IDLE mode (safety)") - # Deploy ROS bridge if enabled (matching G1 pattern) - if self.enable_ros_bridge: - self._deploy_ros_bridge() - logger.info(f"UnitreeB1 initialized - UDP control to {self.ip}:{self.port}") if self.enable_joystick: logger.info("Pygame joystick module enabled for testing") - if self.enable_ros_bridge: - logger.info("ROS bridge enabled for external control") def stop(self) -> None: self._dimos.stop() - if self.ros_bridge: - self.ros_bridge.stop() - - def _deploy_ros_bridge(self) -> None: - """Deploy and configure ROS bridge (matching G1 implementation).""" - self.ros_bridge = ROSBridge("b1_ros_bridge") # type: ignore[assignment] - - # Add /cmd_vel topic from ROS to DIMOS - self.ros_bridge.add_topic( # type: ignore[attr-defined] - "/cmd_vel", TwistStamped, ROSTwistStamped, direction=BridgeDirection.ROS_TO_DIMOS - ) - - # Add /state_estimation topic from ROS to DIMOS (external odometry) - self.ros_bridge.add_topic( # type: ignore[attr-defined] - "/state_estimation", Odometry, ROSOdometry, direction=BridgeDirection.ROS_TO_DIMOS - ) - - # Add /tf topic from ROS to DIMOS - self.ros_bridge.add_topic( # type: ignore[attr-defined] - "/tf", TFMessage, ROSTFMessage, direction=BridgeDirection.ROS_TO_DIMOS - ) - - self.ros_bridge.start() # type: ignore[attr-defined] - - logger.info("ROS bridge deployed: /cmd_vel, /state_estimation, /tf (ROS → DIMOS)") # Robot control methods (standard interface) def move(self, twist_stamped: TwistStamped, duration: float = 0.0) -> None: @@ -212,10 +167,6 @@ def main() -> None: parser.add_argument("--ip", default="192.168.12.1", help="Robot IP address") parser.add_argument("--port", type=int, default=9090, help="UDP port") parser.add_argument("--joystick", action="store_true", help="Enable pygame joystick control") - parser.add_argument("--ros-bridge", action="store_true", default=True, help="Enable ROS bridge") - parser.add_argument( - "--no-ros-bridge", dest="ros_bridge", action="store_false", help="Disable ROS bridge" - ) parser.add_argument("--output-dir", help="Output directory for logs/data") parser.add_argument( "--test", action="store_true", help="Test mode - print commands instead of UDP" @@ -228,7 +179,6 @@ def main() -> None: port=args.port, output_dir=args.output_dir, enable_joystick=args.joystick, - enable_ros_bridge=args.ros_bridge, test_mode=args.test, ) @@ -256,10 +206,7 @@ def main() -> None: # Manual control example print("\nB1 Robot ready for commands") print("Use robot.idle(), robot.stand(), robot.walk() to change modes") - if args.ros_bridge: - print("ROS bridge active - listening for /cmd_vel and /state_estimation") - else: - print("Use robot.move(TwistStamped(...)) to send velocity commands") + print("ROS topics active via ROSTransport: /cmd_vel, /state_estimation, /tf") print("Press Ctrl+C to exit\n") import time diff --git a/dimos/robot/unitree/connection/connection.py b/dimos/robot/unitree/connection.py similarity index 92% rename from dimos/robot/unitree/connection/connection.py rename to dimos/robot/unitree/connection.py index 8f4a138320..f3f8ffaafb 100644 --- a/dimos/robot/unitree/connection/connection.py +++ b/dimos/robot/unitree/connection.py @@ -17,7 +17,7 @@ import functools import threading import time -from typing import TypeAlias +from typing import Any, TypeAlias import numpy as np from numpy.typing import NDArray @@ -34,14 +34,13 @@ WebRTCConnectionMethod, ) -from dimos.core import rpc from dimos.core.resource import Resource from dimos.msgs.geometry_msgs import Pose, Transform, Twist from dimos.msgs.sensor_msgs import Image, PointCloud2 -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ImageFormat -from dimos.robot.unitree_webrtc.type.lidar import RawLidarMsg, pointcloud2_from_webrtc_lidar -from dimos.robot.unitree_webrtc.type.lowstate import LowStateMsg -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.msgs.sensor_msgs.Image import ImageFormat +from dimos.robot.unitree.type.lidar import RawLidarMsg, pointcloud2_from_webrtc_lidar +from dimos.robot.unitree.type.lowstate import LowStateMsg +from dimos.robot.unitree.type.odometry import Odometry from dimos.utils.decorators.decorators import simple_mcache from dimos.utils.reactive import backpressure, callback_to_observable @@ -228,7 +227,7 @@ def run_unsubscription() -> None: ) # Generic sync API call (we jump into the client thread) - def publish_request(self, topic: str, data: dict): # type: ignore[no-untyped-def, type-arg] + def publish_request(self, topic: str, data: dict[Any, Any]) -> Any: future = asyncio.run_coroutine_threadsafe( self.conn.datachannel.pub_sub.publish_request_new(topic, data), self.loop ) @@ -275,25 +274,13 @@ def video_stream(self) -> Observable[Image]: def lowstate_stream(self) -> Observable[LowStateMsg]: return backpressure(self.unitree_sub_stream(RTC_TOPIC["LOW_STATE"])) - def standup_ai(self) -> bool: - return self.publish_request(RTC_TOPIC["SPORT_MOD"], {"api_id": SPORT_CMD["BalanceStand"]}) # type: ignore[no-any-return] - - def standup_normal(self) -> bool: - self.publish_request(RTC_TOPIC["SPORT_MOD"], {"api_id": SPORT_CMD["StandUp"]}) - time.sleep(0.5) - self.publish_request(RTC_TOPIC["SPORT_MOD"], {"api_id": SPORT_CMD["RecoveryStand"]}) - return True - - @rpc def standup(self) -> bool: - if self.mode == "ai": - return self.standup_ai() - else: - return self.standup_normal() + return bool(self.publish_request(RTC_TOPIC["SPORT_MOD"], {"api_id": SPORT_CMD["StandUp"]})) - @rpc def liedown(self) -> bool: - return self.publish_request(RTC_TOPIC["SPORT_MOD"], {"api_id": SPORT_CMD["StandDown"]}) # type: ignore[no-any-return] + return bool( + self.publish_request(RTC_TOPIC["SPORT_MOD"], {"api_id": SPORT_CMD["StandDown"]}) + ) async def handstand(self): # type: ignore[no-untyped-def] return self.publish_request( @@ -301,7 +288,6 @@ async def handstand(self): # type: ignore[no-untyped-def] {"api_id": SPORT_CMD["Standup"], "parameter": {"data": True}}, ) - @rpc def color(self, color: VUI_COLOR = VUI_COLOR.RED, colortime: int = 60) -> bool: return self.publish_request( # type: ignore[no-any-return] RTC_TOPIC["VUI"], diff --git a/dimos/robot/unitree/connection/__init__.py b/dimos/robot/unitree/connection/__init__.py deleted file mode 100644 index 5c1dff1922..0000000000 --- a/dimos/robot/unitree/connection/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -import dimos.robot.unitree.connection.g1 as g1 -import dimos.robot.unitree.connection.go2 as go2 - -__all__ = ["g1", "go2"] diff --git a/dimos/robot/unitree_webrtc/demo_error_on_name_conflicts.py b/dimos/robot/unitree/demo_error_on_name_conflicts.py similarity index 93% rename from dimos/robot/unitree_webrtc/demo_error_on_name_conflicts.py rename to dimos/robot/unitree/demo_error_on_name_conflicts.py index b2c2aabccb..63f37ad723 100644 --- a/dimos/robot/unitree_webrtc/demo_error_on_name_conflicts.py +++ b/dimos/robot/unitree/demo_error_on_name_conflicts.py @@ -50,4 +50,4 @@ def stop(self) -> None: super().stop() -blueprint = autoconnect(ModuleA.blueprint(), ModuleB.blueprint()) +demo_error_on_name_conflicts = autoconnect(ModuleA.blueprint(), ModuleB.blueprint()) diff --git a/dimos/robot/unitree_webrtc/depth_module.py b/dimos/robot/unitree/depth_module.py similarity index 98% rename from dimos/robot/unitree_webrtc/depth_module.py rename to dimos/robot/unitree/depth_module.py index b040fbb63f..07f065caea 100644 --- a/dimos/robot/unitree_webrtc/depth_module.py +++ b/dimos/robot/unitree/depth_module.py @@ -50,7 +50,7 @@ class DepthModule(Module): def __init__( # type: ignore[no-untyped-def] self, gt_depth_scale: float = 0.5, - global_config: GlobalConfig | None = None, + cfg: GlobalConfig | None = None, **kwargs, ) -> None: """ @@ -78,8 +78,8 @@ def __init__( # type: ignore[no-untyped-def] self._processing_thread: threading.Thread | None = None self._stop_processing = threading.Event() - if global_config: - if global_config.simulation: + if cfg: + if cfg.simulation: self.gt_depth_scale = 1.0 @rpc diff --git a/dimos/robot/unitree/g1/blueprints/__init__.py b/dimos/robot/unitree/g1/blueprints/__init__.py new file mode 100644 index 0000000000..ebc18da8d3 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/__init__.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Cascaded G1 blueprints split into focused modules.""" + +import lazy_loader as lazy + +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "agentic._agentic_skills": ["_agentic_skills"], + "agentic.unitree_g1_agentic": ["unitree_g1_agentic"], + "agentic.unitree_g1_agentic_sim": ["unitree_g1_agentic_sim"], + "agentic.unitree_g1_full": ["unitree_g1_full"], + "basic.unitree_g1_basic": ["unitree_g1_basic"], + "basic.unitree_g1_basic_sim": ["unitree_g1_basic_sim"], + "basic.unitree_g1_joystick": ["unitree_g1_joystick"], + "perceptive._perception_and_memory": ["_perception_and_memory"], + "perceptive.unitree_g1": ["unitree_g1"], + "perceptive.unitree_g1_detection": ["unitree_g1_detection"], + "perceptive.unitree_g1_shm": ["unitree_g1_shm"], + "perceptive.unitree_g1_sim": ["unitree_g1_sim"], + "primitive.uintree_g1_primitive_no_nav": ["uintree_g1_primitive_no_nav", "basic_no_nav"], + }, +) diff --git a/dimos/robot/unitree/g1/blueprints/agentic/__init__.py b/dimos/robot/unitree/g1/blueprints/agentic/__init__.py new file mode 100644 index 0000000000..5e6db90d91 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/agentic/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Agentic blueprints for Unitree G1.""" diff --git a/dimos/robot/unitree/g1/blueprints/agentic/_agentic_skills.py b/dimos/robot/unitree/g1/blueprints/agentic/_agentic_skills.py new file mode 100644 index 0000000000..74ce41f7f1 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/agentic/_agentic_skills.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Agentic skills used by higher-level G1 blueprints.""" + +from dimos.agents.agent import agent +from dimos.agents.skills.navigation import navigation_skill +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.g1.skill_container import g1_skills + +_agentic_skills = autoconnect( + agent(), + navigation_skill(), + g1_skills(), +) + +__all__ = ["_agentic_skills"] diff --git a/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_agentic.py b/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_agentic.py new file mode 100644 index 0000000000..a90c2bfe2c --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_agentic.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Full G1 stack with agentic skills.""" + +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.g1.blueprints.agentic._agentic_skills import _agentic_skills +from dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1 import unitree_g1 + +unitree_g1_agentic = autoconnect( + unitree_g1, + _agentic_skills, +) + +__all__ = ["unitree_g1_agentic"] diff --git a/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_agentic_sim.py b/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_agentic_sim.py new file mode 100644 index 0000000000..b7371b96b5 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_agentic_sim.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Agentic G1 sim stack.""" + +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.g1.blueprints.agentic._agentic_skills import _agentic_skills +from dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1_sim import unitree_g1_sim + +unitree_g1_agentic_sim = autoconnect( + unitree_g1_sim, + _agentic_skills, +) + +__all__ = ["unitree_g1_agentic_sim"] diff --git a/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_full.py b/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_full.py new file mode 100644 index 0000000000..7f826f2eec --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/agentic/unitree_g1_full.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Full featured G1 stack with agentic skills and teleop.""" + +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.g1.blueprints.agentic._agentic_skills import _agentic_skills +from dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1_shm import unitree_g1_shm +from dimos.robot.unitree.keyboard_teleop import keyboard_teleop + +unitree_g1_full = autoconnect( + unitree_g1_shm, + _agentic_skills, + keyboard_teleop(), +) + +__all__ = ["unitree_g1_full"] diff --git a/dimos/robot/unitree/g1/blueprints/basic/__init__.py b/dimos/robot/unitree/g1/blueprints/basic/__init__.py new file mode 100644 index 0000000000..87e6586f56 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/basic/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Basic blueprints for Unitree G1.""" diff --git a/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_basic.py b/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_basic.py new file mode 100644 index 0000000000..1fb591e895 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_basic.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Basic G1 stack: base sensors plus real robot connection and ROS nav.""" + +from dimos.core.blueprints import autoconnect +from dimos.navigation.rosnav import ros_nav +from dimos.robot.unitree.g1.blueprints.primitive.uintree_g1_primitive_no_nav import ( + uintree_g1_primitive_no_nav, +) +from dimos.robot.unitree.g1.connection import g1_connection + +unitree_g1_basic = autoconnect( + uintree_g1_primitive_no_nav, + g1_connection(), + ros_nav(), +) + +__all__ = ["unitree_g1_basic"] diff --git a/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_basic_sim.py b/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_basic_sim.py new file mode 100644 index 0000000000..603a9535ee --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_basic_sim.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Basic G1 sim stack: base sensors plus sim connection and planner.""" + +from dimos.core.blueprints import autoconnect +from dimos.navigation.replanning_a_star.module import replanning_a_star_planner +from dimos.robot.unitree.g1.blueprints.primitive.uintree_g1_primitive_no_nav import ( + uintree_g1_primitive_no_nav, +) +from dimos.robot.unitree.g1.sim import g1_sim_connection + +unitree_g1_basic_sim = autoconnect( + uintree_g1_primitive_no_nav, + g1_sim_connection(), + replanning_a_star_planner(), +) + +__all__ = ["unitree_g1_basic_sim"] diff --git a/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_joystick.py b/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_joystick.py new file mode 100644 index 0000000000..0242556189 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/basic/unitree_g1_joystick.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""G1 stack with keyboard teleop.""" + +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.g1.blueprints.basic.unitree_g1_basic import unitree_g1_basic +from dimos.robot.unitree.keyboard_teleop import keyboard_teleop + +unitree_g1_joystick = autoconnect( + unitree_g1_basic, + keyboard_teleop(), # Pygame-based joystick control +) + +__all__ = ["unitree_g1_joystick"] diff --git a/dimos/robot/unitree/g1/blueprints/perceptive/__init__.py b/dimos/robot/unitree/g1/blueprints/perceptive/__init__.py new file mode 100644 index 0000000000..9bd838e8b8 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/perceptive/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Perceptive blueprints for Unitree G1.""" diff --git a/dimos/robot/unitree/g1/blueprints/perceptive/_perception_and_memory.py b/dimos/robot/unitree/g1/blueprints/perceptive/_perception_and_memory.py new file mode 100644 index 0000000000..47dc2588b9 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/perceptive/_perception_and_memory.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Perception and memory modules used by higher-level G1 blueprints.""" + +from dimos.core.blueprints import autoconnect +from dimos.perception.object_tracker import object_tracking +from dimos.perception.spatial_perception import spatial_memory +from dimos.utils.monitoring import utilization + +_perception_and_memory = autoconnect( + spatial_memory(), + object_tracking(frame_id="camera_link"), + utilization(), +) + +__all__ = ["_perception_and_memory"] diff --git a/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1.py b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1.py new file mode 100644 index 0000000000..483928ec54 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""G1 stack with perception and memory.""" + +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.g1.blueprints.basic.unitree_g1_basic import unitree_g1_basic +from dimos.robot.unitree.g1.blueprints.perceptive._perception_and_memory import ( + _perception_and_memory, +) + +unitree_g1 = autoconnect( + unitree_g1_basic, + _perception_and_memory, +).global_config(n_dask_workers=8) + +__all__ = ["unitree_g1"] diff --git a/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_detection.py b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_detection.py new file mode 100644 index 0000000000..6e2da40a2c --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_detection.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""G1 stack with person tracking and 3D detection.""" + +from dimos_lcm.foxglove_msgs import SceneUpdate +from dimos_lcm.foxglove_msgs.ImageAnnotations import ImageAnnotations + +from dimos.core.blueprints import autoconnect +from dimos.core.transport import LCMTransport +from dimos.hardware.sensors.camera import zed +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.msgs.sensor_msgs import Image, PointCloud2 +from dimos.msgs.vision_msgs import Detection2DArray +from dimos.perception.detection.detectors.person.yolo import YoloPersonDetector +from dimos.perception.detection.module3D import Detection3DModule, detection3d_module +from dimos.perception.detection.moduleDB import ObjectDBModule, detection_db_module +from dimos.perception.detection.person_tracker import PersonTracker, person_tracker_module +from dimos.robot.unitree.g1.blueprints.basic.unitree_g1_basic import unitree_g1_basic + +unitree_g1_detection = ( + autoconnect( + unitree_g1_basic, + # Person detection modules with YOLO + detection3d_module( + camera_info=zed.CameraInfo.SingleWebcam, + detector=YoloPersonDetector, + ), + detection_db_module( + camera_info=zed.CameraInfo.SingleWebcam, + filter=lambda det: det.class_id == 0, # Filter for person class only + ), + person_tracker_module( + cameraInfo=zed.CameraInfo.SingleWebcam, + ), + ) + .global_config(n_dask_workers=8) + .remappings( + [ + # Connect detection modules to camera and lidar + (Detection3DModule, "image", "color_image"), + (Detection3DModule, "pointcloud", "pointcloud"), + (ObjectDBModule, "image", "color_image"), + (ObjectDBModule, "pointcloud", "pointcloud"), + (PersonTracker, "image", "color_image"), + (PersonTracker, "detections", "detections_2d"), + ] + ) + .transports( + { + # Detection 3D module outputs + ("detections", Detection3DModule): LCMTransport( + "/detector3d/detections", Detection2DArray + ), + ("annotations", Detection3DModule): LCMTransport( + "/detector3d/annotations", ImageAnnotations + ), + ("scene_update", Detection3DModule): LCMTransport( + "/detector3d/scene_update", SceneUpdate + ), + ("detected_pointcloud_0", Detection3DModule): LCMTransport( + "/detector3d/pointcloud/0", PointCloud2 + ), + ("detected_pointcloud_1", Detection3DModule): LCMTransport( + "/detector3d/pointcloud/1", PointCloud2 + ), + ("detected_pointcloud_2", Detection3DModule): LCMTransport( + "/detector3d/pointcloud/2", PointCloud2 + ), + ("detected_image_0", Detection3DModule): LCMTransport("/detector3d/image/0", Image), + ("detected_image_1", Detection3DModule): LCMTransport("/detector3d/image/1", Image), + ("detected_image_2", Detection3DModule): LCMTransport("/detector3d/image/2", Image), + # Detection DB module outputs + ("detections", ObjectDBModule): LCMTransport( + "/detectorDB/detections", Detection2DArray + ), + ("annotations", ObjectDBModule): LCMTransport( + "/detectorDB/annotations", ImageAnnotations + ), + ("scene_update", ObjectDBModule): LCMTransport("/detectorDB/scene_update", SceneUpdate), + ("detected_pointcloud_0", ObjectDBModule): LCMTransport( + "/detectorDB/pointcloud/0", PointCloud2 + ), + ("detected_pointcloud_1", ObjectDBModule): LCMTransport( + "/detectorDB/pointcloud/1", PointCloud2 + ), + ("detected_pointcloud_2", ObjectDBModule): LCMTransport( + "/detectorDB/pointcloud/2", PointCloud2 + ), + ("detected_image_0", ObjectDBModule): LCMTransport("/detectorDB/image/0", Image), + ("detected_image_1", ObjectDBModule): LCMTransport("/detectorDB/image/1", Image), + ("detected_image_2", ObjectDBModule): LCMTransport("/detectorDB/image/2", Image), + # Person tracker outputs + ("target", PersonTracker): LCMTransport("/person_tracker/target", PoseStamped), + } + ) +) + +__all__ = ["unitree_g1_detection"] diff --git a/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_shm.py b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_shm.py new file mode 100644 index 0000000000..5ee4d4c9d1 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_shm.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""G1 stack with shared memory image transport.""" + +from dimos.constants import DEFAULT_CAPACITY_COLOR_IMAGE +from dimos.core.blueprints import autoconnect +from dimos.core.transport import pSHMTransport +from dimos.msgs.sensor_msgs import Image +from dimos.robot.foxglove_bridge import foxglove_bridge +from dimos.robot.unitree.g1.blueprints.perceptive.unitree_g1 import unitree_g1 + +unitree_g1_shm = autoconnect( + unitree_g1.transports( + { + ("color_image", Image): pSHMTransport( + "/color_image", default_capacity=DEFAULT_CAPACITY_COLOR_IMAGE + ), + } + ), + foxglove_bridge( + shm_channels=[ + "/color_image#sensor_msgs.Image", + ] + ), +) + +__all__ = ["unitree_g1_shm"] diff --git a/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_sim.py b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_sim.py new file mode 100644 index 0000000000..059102c7a5 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/perceptive/unitree_g1_sim.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""G1 sim stack with perception and memory.""" + +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.g1.blueprints.basic.unitree_g1_basic_sim import unitree_g1_basic_sim +from dimos.robot.unitree.g1.blueprints.perceptive._perception_and_memory import ( + _perception_and_memory, +) + +unitree_g1_sim = autoconnect( + unitree_g1_basic_sim, + _perception_and_memory, +).global_config(n_dask_workers=8) + +__all__ = ["unitree_g1_sim"] diff --git a/dimos/robot/unitree/g1/blueprints/primitive/__init__.py b/dimos/robot/unitree/g1/blueprints/primitive/__init__.py new file mode 100644 index 0000000000..833f767728 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/primitive/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Primitive blueprints for Unitree G1.""" diff --git a/dimos/robot/unitree/g1/blueprints/primitive/uintree_g1_primitive_no_nav.py b/dimos/robot/unitree/g1/blueprints/primitive/uintree_g1_primitive_no_nav.py new file mode 100644 index 0000000000..36bf569f72 --- /dev/null +++ b/dimos/robot/unitree/g1/blueprints/primitive/uintree_g1_primitive_no_nav.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Minimal G1 stack without navigation, used as a base for larger blueprints.""" + +from dimos_lcm.sensor_msgs import CameraInfo + +from dimos.core.blueprints import autoconnect +from dimos.core.global_config import global_config +from dimos.core.transport import LCMTransport +from dimos.hardware.sensors.camera import zed +from dimos.hardware.sensors.camera.module import camera_module # type: ignore[attr-defined] +from dimos.hardware.sensors.camera.webcam import Webcam +from dimos.mapping.costmapper import cost_mapper +from dimos.mapping.voxels import voxel_mapper +from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Transform, Twist, Vector3 +from dimos.msgs.nav_msgs import Odometry, Path +from dimos.msgs.sensor_msgs import Image, PointCloud2 +from dimos.msgs.std_msgs import Bool +from dimos.navigation.frontier_exploration import wavefront_frontier_explorer +from dimos.protocol.pubsub.impl.lcmpubsub import LCM +from dimos.web.websocket_vis.websocket_vis_module import websocket_vis + +rerun_config = { + "pubsubs": [LCM(autoconf=True)], + "visual_override": { + "world/camera_info": lambda camera_info: camera_info.to_rerun( + image_topic="/world/color_image", + optical_frame="camera_optical", + ), + "world/global_map": lambda grid: grid.to_rerun(voxel_size=0.1, mode="boxes"), + "world/navigation_costmap": lambda grid: grid.to_rerun( + colormap="Accent", + z_offset=0.015, + opacity=0.2, + background="#484981", + ), + }, + "static": { + "world/tf/base_link": lambda rr: [ + rr.Boxes3D( + half_sizes=[0.2, 0.15, 0.75], + colors=[(0, 255, 127)], + fill_mode="MajorWireframe", + ), + rr.Transform3D(parent_frame="tf#/base_link"), + ] + }, +} + +match global_config.viewer_backend: + case "foxglove": + from dimos.robot.foxglove_bridge import foxglove_bridge + + _with_vis = autoconnect(foxglove_bridge()) + case "rerun": + from dimos.visualization.rerun.bridge import rerun_bridge + + _with_vis = autoconnect(rerun_bridge(**rerun_config)) + case "rerun-web": + from dimos.visualization.rerun.bridge import rerun_bridge + + _with_vis = autoconnect(rerun_bridge(viewer_mode="web", **rerun_config)) + case _: + _with_vis = autoconnect() + +_camera = ( + autoconnect( + camera_module( + transform=Transform( + translation=Vector3(0.05, 0.0, 0.6), # height of camera on G1 robot + rotation=Quaternion.from_euler(Vector3(0.0, 0.2, 0.0)), + frame_id="sensor", + child_frame_id="camera_link", + ), + hardware=lambda: Webcam( + camera_index=0, + fps=15, + stereo_slice="left", + camera_info=zed.CameraInfo.SingleWebcam, + ), + ), + ) + if not global_config.simulation + else autoconnect() +) + +uintree_g1_primitive_no_nav = ( + autoconnect( + _with_vis, + _camera, + voxel_mapper(voxel_size=0.1), + cost_mapper(), + wavefront_frontier_explorer(), + # Visualization + websocket_vis(), + ) + .global_config(n_dask_workers=4, robot_model="unitree_g1") + .transports( + { + # G1 uses Twist for movement commands + ("cmd_vel", Twist): LCMTransport("/cmd_vel", Twist), + # State estimation from ROS + ("state_estimation", Odometry): LCMTransport("/state_estimation", Odometry), + # Odometry output from ROSNavigationModule + ("odom", PoseStamped): LCMTransport("/odom", PoseStamped), + # Navigation module topics from nav_bot + ("goal_req", PoseStamped): LCMTransport("/goal_req", PoseStamped), + ("goal_active", PoseStamped): LCMTransport("/goal_active", PoseStamped), + ("path_active", Path): LCMTransport("/path_active", Path), + ("pointcloud", PointCloud2): LCMTransport("/lidar", PointCloud2), + ("global_pointcloud", PointCloud2): LCMTransport("/map", PointCloud2), + # Original navigation topics for backwards compatibility + ("goal_pose", PoseStamped): LCMTransport("/goal_pose", PoseStamped), + ("goal_reached", Bool): LCMTransport("/goal_reached", Bool), + ("cancel_goal", Bool): LCMTransport("/cancel_goal", Bool), + # Camera topics + ("color_image", Image): LCMTransport("/color_image", Image), + ("camera_info", CameraInfo): LCMTransport("/camera_info", CameraInfo), + } + ) +) + +__all__ = ["uintree_g1_primitive_no_nav"] diff --git a/dimos/robot/unitree/connection/g1.py b/dimos/robot/unitree/g1/connection.py similarity index 93% rename from dimos/robot/unitree/connection/g1.py rename to dimos/robot/unitree/g1/connection.py index 1e15809146..f12d0ee0e6 100644 --- a/dimos/robot/unitree/connection/g1.py +++ b/dimos/robot/unitree/g1/connection.py @@ -19,9 +19,9 @@ from dimos import spec from dimos.core import DimosCluster, In, Module, rpc -from dimos.core.global_config import GlobalConfig +from dimos.core.global_config import GlobalConfig, global_config from dimos.msgs.geometry_msgs import Twist -from dimos.robot.unitree.connection.connection import UnitreeWebRTCConnection +from dimos.robot.unitree.connection import UnitreeWebRTCConnection from dimos.utils.logging_config import setup_logger logger = setup_logger() @@ -39,11 +39,11 @@ def __init__( self, ip: str | None = None, connection_type: str | None = None, - global_config: GlobalConfig | None = None, + cfg: GlobalConfig = global_config, *args: Any, **kwargs: Any, ) -> None: - self._global_config = global_config or GlobalConfig() + self._global_config = cfg self.ip = ip if ip is not None else self._global_config.robot_ip self.connection_type = connection_type or self._global_config.unitree_connection_type self.connection = None diff --git a/dimos/robot/unitree/g1/g1agent.py b/dimos/robot/unitree/g1/g1agent.py deleted file mode 100644 index a95a905b7d..0000000000 --- a/dimos/robot/unitree/g1/g1agent.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dimos import agents -from dimos.agents.skills.navigation import NavigationSkillContainer -from dimos.core import DimosCluster -from dimos.perception import spatial_perception -from dimos.robot.unitree.g1 import g1detector - - -def deploy(dimos: DimosCluster, ip: str): # type: ignore[no-untyped-def] - g1 = g1detector.deploy(dimos, ip) - - nav = g1.get("nav") - camera = g1.get("camera") - detector3d = g1.get("detector3d") - connection = g1.get("connection") - - spatialmem = spatial_perception.deploy(dimos, camera) - - navskills = dimos.deploy( # type: ignore[attr-defined] - NavigationSkillContainer, - spatialmem, - nav, - detector3d, - ) - navskills.start() - - agent = agents.deploy( # type: ignore[attr-defined] - dimos, - "You are controling a humanoid robot", - skill_containers=[connection, nav, camera, spatialmem, navskills], - ) - agent.run_implicit_skill("current_position") - agent.run_implicit_skill("video_stream") - - return {"agent": agent, "spatialmem": spatialmem, **g1} diff --git a/dimos/robot/unitree/g1/g1detector.py b/dimos/robot/unitree/g1/g1detector.py deleted file mode 100644 index 55986eb087..0000000000 --- a/dimos/robot/unitree/g1/g1detector.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dimos.core import DimosCluster -from dimos.perception.detection import module3D, moduleDB -from dimos.perception.detection.detectors.person.yolo import YoloPersonDetector -from dimos.robot.unitree.g1 import g1zed - - -def deploy(dimos: DimosCluster, ip: str): # type: ignore[no-untyped-def] - g1 = g1zed.deploy(dimos, ip) - - nav = g1.get("nav") - camera = g1.get("camera") - - person_detector = module3D.deploy( - dimos, - camera=camera, - lidar=nav, - detector=YoloPersonDetector, - ) - - detector3d = moduleDB.deploy( # type: ignore[attr-defined] - dimos, - camera=camera, - lidar=nav, - filter=lambda det: det.class_id != 0, - ) - - return {"person_detector": person_detector, "detector3d": detector3d, **g1} diff --git a/dimos/robot/unitree/g1/g1zed.py b/dimos/robot/unitree/g1/g1zed.py deleted file mode 100644 index cafcbec909..0000000000 --- a/dimos/robot/unitree/g1/g1zed.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TypedDict, cast - -from dimos.constants import DEFAULT_CAPACITY_COLOR_IMAGE -from dimos.core import DimosCluster, LCMTransport, pSHMTransport -from dimos.hardware.sensors.camera import zed -from dimos.hardware.sensors.camera.module import CameraModule -from dimos.hardware.sensors.camera.webcam import Webcam -from dimos.msgs.geometry_msgs import ( - Quaternion, - Transform, - Vector3, -) -from dimos.msgs.sensor_msgs import CameraInfo -from dimos.navigation import rosnav -from dimos.navigation.rosnav import ROSNav -from dimos.robot import foxglove_bridge -from dimos.robot.unitree.connection import g1 -from dimos.robot.unitree.connection.g1 import G1Connection -from dimos.utils.logging_config import setup_logger - -logger = setup_logger() - - -class G1ZedDeployResult(TypedDict): - nav: ROSNav - connection: G1Connection - camera: CameraModule - camerainfo: CameraInfo - - -def deploy_g1_monozed(dimos: DimosCluster) -> CameraModule: - camera = cast( - "CameraModule", - dimos.deploy( # type: ignore[attr-defined] - CameraModule, - frequency=4.0, - transform=Transform( - translation=Vector3(0.05, 0.0, 0.0), - rotation=Quaternion.from_euler(Vector3(0.0, 0.0, 0.0)), - frame_id="sensor", - child_frame_id="camera_link", - ), - hardware=lambda: Webcam( - camera_index=0, - fps=5, - stereo_slice="left", - camera_info=zed.CameraInfo.SingleWebcam, - ), - ), - ) - - camera.color_image.transport = pSHMTransport( - "/image", default_capacity=DEFAULT_CAPACITY_COLOR_IMAGE - ) - camera.camera_info.transport = LCMTransport("/camera_info", CameraInfo) - camera.start() - return camera - - -def deploy(dimos: DimosCluster, ip: str): # type: ignore[no-untyped-def] - nav = rosnav.deploy( # type: ignore[call-arg] - dimos, - sensor_to_base_link_transform=Transform( - frame_id="sensor", child_frame_id="base_link", translation=Vector3(0.0, 0.0, 1.5) - ), - ) - connection = g1.deploy(dimos, ip, nav) - zedcam = deploy_g1_monozed(dimos) - - foxglove_bridge.deploy(dimos) - - return { - "nav": nav, - "connection": connection, - "camera": zedcam, - } diff --git a/dimos/robot/unitree/connection/g1sim.py b/dimos/robot/unitree/g1/sim.py similarity index 59% rename from dimos/robot/unitree/connection/g1sim.py rename to dimos/robot/unitree/g1/sim.py index cd4c3e4505..6888ae74aa 100644 --- a/dimos/robot/unitree/connection/g1sim.py +++ b/dimos/robot/unitree/g1/sim.py @@ -13,13 +13,15 @@ # limitations under the License. +import threading +from threading import Thread import time from typing import TYPE_CHECKING, Any from reactivex.disposable import Disposable from dimos.core import In, Module, Out, rpc -from dimos.core.global_config import GlobalConfig +from dimos.core.global_config import GlobalConfig, global_config from dimos.msgs.geometry_msgs import ( PoseStamped, Quaternion, @@ -27,40 +29,63 @@ Twist, Vector3, ) -from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.odometry import Odometry as SimOdometry +from dimos.msgs.sensor_msgs import CameraInfo, Image, PointCloud2 +from dimos.robot.unitree.type.odometry import Odometry as SimOdometry from dimos.utils.logging_config import setup_logger if TYPE_CHECKING: - from dimos.robot.unitree_webrtc.mujoco_connection import MujocoConnection + from dimos.robot.unitree.mujoco_connection import MujocoConnection logger = setup_logger() +def _camera_info_static() -> CameraInfo: + """Camera intrinsics for rerun visualization (matches Go2 convention).""" + fx, fy, cx, cy = (819.553492, 820.646595, 625.284099, 336.808987) + width, height = (1280, 720) + + return CameraInfo( + frame_id="camera_optical", + height=height, + width=width, + distortion_model="plumb_bob", + D=[0.0, 0.0, 0.0, 0.0, 0.0], + K=[fx, 0.0, cx, 0.0, fy, cy, 0.0, 0.0, 1.0], + R=[1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0], + P=[fx, 0.0, cx, 0.0, 0.0, fy, cy, 0.0, 0.0, 0.0, 1.0, 0.0], + binning_x=0, + binning_y=0, + ) + + class G1SimConnection(Module): cmd_vel: In[Twist] lidar: Out[PointCloud2] odom: Out[PoseStamped] + color_image: Out[Image] + camera_info: Out[CameraInfo] ip: str | None _global_config: GlobalConfig + _camera_info_thread: Thread | None = None def __init__( self, ip: str | None = None, - global_config: GlobalConfig | None = None, + cfg: GlobalConfig = global_config, *args: Any, **kwargs: Any, ) -> None: - self._global_config = global_config or GlobalConfig() + self._global_config = cfg self.ip = ip if ip is not None else self._global_config.robot_ip self.connection: MujocoConnection | None = None + self._stop_event = threading.Event() super().__init__(*args, **kwargs) @rpc def start(self) -> None: super().start() - from dimos.robot.unitree_webrtc.mujoco_connection import MujocoConnection + from dimos.robot.unitree.mujoco_connection import MujocoConnection self.connection = MujocoConnection(self._global_config) assert self.connection is not None @@ -69,27 +94,51 @@ def start(self) -> None: self._disposables.add(Disposable(self.cmd_vel.subscribe(self.move))) self._disposables.add(self.connection.odom_stream().subscribe(self._publish_sim_odom)) self._disposables.add(self.connection.lidar_stream().subscribe(self.lidar.publish)) + self._disposables.add(self.connection.video_stream().subscribe(self.color_image.publish)) + + self._camera_info_thread = Thread( + target=self._publish_camera_info_loop, + daemon=True, + ) + self._camera_info_thread.start() @rpc def stop(self) -> None: + self._stop_event.set() assert self.connection is not None self.connection.stop() + if self._camera_info_thread and self._camera_info_thread.is_alive(): + self._camera_info_thread.join(timeout=1.0) super().stop() + def _publish_camera_info_loop(self) -> None: + info = _camera_info_static() + while not self._stop_event.is_set(): + self.camera_info.publish(info) + self._stop_event.wait(1.0) + def _publish_tf(self, msg: PoseStamped) -> None: self.odom.publish(msg) self.tf.publish(Transform.from_pose("base_link", msg)) - # Publish camera_link transform + # Publish camera_link and camera_optical transforms camera_link = Transform( - translation=Vector3(0.3, 0.0, 0.0), + translation=Vector3(0.05, 0.0, 0.6), rotation=Quaternion(0.0, 0.0, 0.0, 1.0), frame_id="base_link", child_frame_id="camera_link", ts=time.time(), ) + camera_optical = Transform( + translation=Vector3(0.0, 0.0, 0.0), + rotation=Quaternion(-0.5, 0.5, -0.5, 0.5), + frame_id="camera_link", + child_frame_id="camera_optical", + ts=time.time(), + ) + map_to_world = Transform( translation=Vector3(0.0, 0.0, 0.0), rotation=Quaternion(0.0, 0.0, 0.0, 1.0), @@ -98,7 +147,7 @@ def _publish_tf(self, msg: PoseStamped) -> None: ts=time.time(), ) - self.tf.publish(camera_link, map_to_world) + self.tf.publish(camera_link, camera_optical, map_to_world) def _publish_sim_odom(self, msg: SimOdometry) -> None: self._publish_tf( diff --git a/dimos/robot/unitree_webrtc/unitree_g1_skill_container.py b/dimos/robot/unitree/g1/skill_container.py similarity index 86% rename from dimos/robot/unitree_webrtc/unitree_g1_skill_container.py rename to dimos/robot/unitree/g1/skill_container.py index 99b028b4d9..7ce9730686 100644 --- a/dimos/robot/unitree_webrtc/unitree_g1_skill_container.py +++ b/dimos/robot/unitree/g1/skill_container.py @@ -19,10 +19,10 @@ import difflib +from dimos.agents.annotation import skill from dimos.core.core import rpc -from dimos.core.skill_module import SkillModule +from dimos.core.module import Module from dimos.msgs.geometry_msgs import Twist, Vector3 -from dimos.protocol.skill.skill import skill from dimos.utils.logging_config import setup_logger logger = setup_logger() @@ -61,10 +61,10 @@ } -class UnitreeG1SkillContainer(SkillModule): +class UnitreeG1SkillContainer(Module): rpc_calls: list[str] = [ - "G1ConnectionModule.move", - "G1ConnectionModule.publish_request", + "G1Connection.move", + "G1Connection.publish_request", ] @rpc @@ -75,7 +75,7 @@ def start(self) -> None: def stop(self) -> None: super().stop() - @skill() + @skill def move(self, x: float, y: float = 0.0, yaw: float = 0.0, duration: float = 0.0) -> str: """Move the robot using direct velocity commands. Determine duration required based on user distance instructions. @@ -90,23 +90,27 @@ def move(self, x: float, y: float = 0.0, yaw: float = 0.0, duration: float = 0.0 duration: How long to move (seconds) """ - move_rpc = self.get_rpc_calls("G1ConnectionModule.move") + move_rpc = self.get_rpc_calls("G1Connection.move") twist = Twist(linear=Vector3(x, y, 0), angular=Vector3(0, 0, yaw)) move_rpc(twist, duration=duration) return f"Started moving with velocity=({x}, {y}, {yaw}) for {duration} seconds" - @skill() + @skill def execute_arm_command(self, command_name: str) -> str: - return self._execute_g1_command(_ARM_COMMANDS, 7106, command_name) + return self._execute_g1_command(_ARM_COMMANDS, 7106, "rt/api/arm/request", command_name) - @skill() + @skill def execute_mode_command(self, command_name: str) -> str: - return self._execute_g1_command(_MODE_COMMANDS, 7101, command_name) + return self._execute_g1_command(_MODE_COMMANDS, 7101, "rt/api/sport/request", command_name) def _execute_g1_command( - self, command_dict: dict[str, tuple[int, str]], api_id: int, command_name: str + self, + command_dict: dict[str, tuple[int, str]], + api_id: int, + topic: str, + command_name: str, ) -> str: - publish_request_rpc = self.get_rpc_calls("G1ConnectionModule.publish_request") + publish_request_rpc = self.get_rpc_calls("G1Connection.publish_request") if command_name not in command_dict: suggestions = difflib.get_close_matches( @@ -117,9 +121,7 @@ def _execute_g1_command( id_, _ = command_dict[command_name] try: - publish_request_rpc( - "rt/api/sport/request", {"api_id": api_id, "parameter": {"data": id_}} - ) + publish_request_rpc(topic, {"api_id": api_id, "parameter": {"data": id_}}) return f"'{command_name}' command executed successfully." except Exception as e: logger.error(f"Failed to execute {command_name}: {e}") diff --git a/dimos/robot/unitree/go2/blueprints/__init__.py b/dimos/robot/unitree/go2/blueprints/__init__.py new file mode 100644 index 0000000000..cbc49694f3 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/__init__.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Cascaded GO2 blueprints split into focused modules.""" + +import lazy_loader as lazy + +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "agentic._common_agentic": ["_common_agentic"], + "agentic.unitree_go2_agentic": ["unitree_go2_agentic"], + "agentic.unitree_go2_agentic_huggingface": ["unitree_go2_agentic_huggingface"], + "agentic.unitree_go2_agentic_mcp": ["unitree_go2_agentic_mcp"], + "agentic.unitree_go2_agentic_ollama": ["unitree_go2_agentic_ollama"], + "agentic.unitree_go2_temporal_memory": ["unitree_go2_temporal_memory"], + "basic.unitree_go2_basic": ["_linux", "_mac", "unitree_go2_basic"], + "smart._with_jpeg": ["_with_jpeglcm"], + "smart.unitree_go2": ["unitree_go2"], + "smart.unitree_go2_detection": ["unitree_go2_detection"], + "smart.unitree_go2_ros": ["unitree_go2_ros"], + "smart.unitree_go2_spatial": ["unitree_go2_spatial"], + "smart.unitree_go2_vlm_stream_test": ["unitree_go2_vlm_stream_test"], + }, +) diff --git a/dimos/robot/unitree/go2/blueprints/agentic/__init__.py b/dimos/robot/unitree/go2/blueprints/agentic/__init__.py new file mode 100644 index 0000000000..84d1b41b23 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/agentic/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Agentic blueprints for Unitree GO2.""" diff --git a/dimos/robot/unitree/go2/blueprints/agentic/_common_agentic.py b/dimos/robot/unitree/go2/blueprints/agentic/_common_agentic.py new file mode 100644 index 0000000000..817d5e3a7d --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/agentic/_common_agentic.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.agents.skills.navigation import navigation_skill +from dimos.agents.skills.person_follow import person_follow_skill +from dimos.agents.skills.speak_skill import speak_skill +from dimos.agents.web_human_input import web_input +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.go2.connection import GO2Connection +from dimos.robot.unitree.unitree_skill_container import unitree_skills + +_common_agentic = autoconnect( + navigation_skill(), + person_follow_skill(camera_info=GO2Connection.camera_info_static), + unitree_skills(), + web_input(), + speak_skill(), +) + +__all__ = ["_common_agentic"] diff --git a/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic.py b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic.py new file mode 100644 index 0000000000..2fb1a4cb74 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.agents.agent import agent +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.go2.blueprints.agentic._common_agentic import _common_agentic +from dimos.robot.unitree.go2.blueprints.smart.unitree_go2_spatial import unitree_go2_spatial + +unitree_go2_agentic = autoconnect( + unitree_go2_spatial, + agent(), + _common_agentic, +) + +__all__ = ["unitree_go2_agentic"] diff --git a/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_huggingface.py b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_huggingface.py new file mode 100644 index 0000000000..1c998b7495 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_huggingface.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.agents.agent import agent +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.go2.blueprints.agentic._common_agentic import _common_agentic +from dimos.robot.unitree.go2.blueprints.smart.unitree_go2_spatial import unitree_go2_spatial + +unitree_go2_agentic_huggingface = autoconnect( + unitree_go2_spatial, + agent(model="huggingface:Qwen/Qwen2.5-1.5B-Instruct"), + _common_agentic, +) + +__all__ = ["unitree_go2_agentic_huggingface"] diff --git a/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_mcp.py b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_mcp.py new file mode 100644 index 0000000000..bbc3e4c216 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_mcp.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.blueprints import autoconnect +from dimos.protocol.mcp.mcp import MCPModule +from dimos.robot.unitree.go2.blueprints.agentic.unitree_go2_agentic import unitree_go2_agentic + +unitree_go2_agentic_mcp = autoconnect( + unitree_go2_agentic, + MCPModule.blueprint(), +) + +__all__ = ["unitree_go2_agentic_mcp"] diff --git a/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_ollama.py b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_ollama.py new file mode 100644 index 0000000000..6a518ad831 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_agentic_ollama.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.agents.agent import agent +from dimos.agents.ollama_agent import ollama_installed +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.go2.blueprints.agentic._common_agentic import _common_agentic +from dimos.robot.unitree.go2.blueprints.smart.unitree_go2_spatial import unitree_go2_spatial + +unitree_go2_agentic_ollama = autoconnect( + unitree_go2_spatial, + agent(model="ollama:qwen3:8b"), + _common_agentic, +).requirements( + ollama_installed, +) + +__all__ = ["unitree_go2_agentic_ollama"] diff --git a/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_temporal_memory.py b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_temporal_memory.py new file mode 100644 index 0000000000..017ccaba2b --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/agentic/unitree_go2_temporal_memory.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.blueprints import autoconnect +from dimos.perception.experimental.temporal_memory import temporal_memory +from dimos.robot.unitree.go2.blueprints.agentic.unitree_go2_agentic import unitree_go2_agentic + +unitree_go2_temporal_memory = autoconnect( + unitree_go2_agentic, + temporal_memory(), +) + +__all__ = ["unitree_go2_temporal_memory"] diff --git a/dimos/robot/unitree/go2/blueprints/basic/__init__.py b/dimos/robot/unitree/go2/blueprints/basic/__init__.py new file mode 100644 index 0000000000..79964b0297 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/basic/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Basic blueprints for Unitree GO2.""" diff --git a/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_basic.py b/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_basic.py new file mode 100644 index 0000000000..cfd53abe51 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/basic/unitree_go2_basic.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 + +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import platform + +from dimos.constants import DEFAULT_CAPACITY_COLOR_IMAGE +from dimos.core.blueprints import autoconnect +from dimos.core.global_config import global_config +from dimos.core.transport import pSHMTransport +from dimos.msgs.sensor_msgs import Image +from dimos.protocol.pubsub.impl.lcmpubsub import LCM +from dimos.robot.unitree.go2.connection import go2_connection +from dimos.web.websocket_vis.websocket_vis_module import websocket_vis + +# Mac has some issue with high bandwidth UDP, so we use pSHMTransport for color_image +# actually we can use pSHMTransport for all platforms, and for all streams +# TODO need a global transport toggle on blueprints/global config +_mac_transports: dict[tuple[str, type], pSHMTransport[Image]] = { + ("color_image", Image): pSHMTransport( + "color_image", default_capacity=DEFAULT_CAPACITY_COLOR_IMAGE + ), +} + +_transports_base = ( + autoconnect() if platform.system() == "Linux" else autoconnect().transports(_mac_transports) +) + +rerun_config = { + # any pubsub that supports subscribe_all and topic that supports str(topic) + # is acceptable here + "pubsubs": [LCM(autoconf=True)], + # Custom converters for specific rerun entity paths + # Normally all these would be specified in their respectative modules + # Until this is implemented we have central overrides here + # + # This is unsustainable once we move to multi robot etc + "visual_override": { + "world/camera_info": lambda camera_info: camera_info.to_rerun( + image_topic="/world/color_image", + optical_frame="camera_optical", + ), + "world/global_map": lambda grid: grid.to_rerun(voxel_size=0.1, mode="boxes"), + "world/navigation_costmap": lambda grid: grid.to_rerun( + colormap="Accent", + z_offset=0.015, + opacity=0.2, + background="#484981", + ), + }, + # slapping a go2 shaped box on top of tf/base_link + "static": { + "world/tf/base_link": lambda rr: [ + rr.Boxes3D( + half_sizes=[0.35, 0.155, 0.2], + colors=[(0, 255, 127)], + fill_mode="wireframe", + ), + rr.Transform3D(parent_frame="tf#/base_link"), + ] + }, +} + + +match global_config.viewer_backend: + case "foxglove": + from dimos.robot.foxglove_bridge import foxglove_bridge + + with_vis = autoconnect( + _transports_base, + foxglove_bridge(shm_channels=["/color_image#sensor_msgs.Image"]), + ) + case "rerun": + from dimos.visualization.rerun.bridge import rerun_bridge + + with_vis = autoconnect(_transports_base, rerun_bridge(**rerun_config)) + case "rerun-web": + from dimos.visualization.rerun.bridge import rerun_bridge + + with_vis = autoconnect(_transports_base, rerun_bridge(viewer_mode="web", **rerun_config)) + case _: + with_vis = _transports_base + +unitree_go2_basic = autoconnect( + with_vis, + go2_connection(), + websocket_vis(), +).global_config(n_dask_workers=4, robot_model="unitree_go2") + +__all__ = [ + "unitree_go2_basic", +] diff --git a/dimos/robot/unitree/go2/blueprints/smart/__init__.py b/dimos/robot/unitree/go2/blueprints/smart/__init__.py new file mode 100644 index 0000000000..7d5bdbc3ab --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/smart/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Smart blueprints for Unitree GO2.""" diff --git a/dimos/robot/unitree/go2/blueprints/smart/_with_jpeg.py b/dimos/robot/unitree/go2/blueprints/smart/_with_jpeg.py new file mode 100644 index 0000000000..9c77d599cf --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/smart/_with_jpeg.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.transport import JpegLcmTransport +from dimos.msgs.sensor_msgs import Image +from dimos.robot.unitree.go2.blueprints.smart.unitree_go2 import unitree_go2 + +_with_jpeglcm = unitree_go2.transports( + { + ("color_image", Image): JpegLcmTransport("/color_image", Image), + } +) + +__all__ = ["_with_jpeglcm"] diff --git a/dimos/robot/unitree/go2/blueprints/smart/unitree_go2.py b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2.py new file mode 100644 index 0000000000..5d096444d5 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.blueprints import autoconnect +from dimos.mapping.costmapper import cost_mapper +from dimos.mapping.voxels import voxel_mapper +from dimos.navigation.frontier_exploration import wavefront_frontier_explorer +from dimos.navigation.replanning_a_star.module import replanning_a_star_planner +from dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic import unitree_go2_basic + +unitree_go2 = autoconnect( + unitree_go2_basic, + voxel_mapper(voxel_size=0.1), + cost_mapper(), + replanning_a_star_planner(), + wavefront_frontier_explorer(), +).global_config(n_dask_workers=6, robot_model="unitree_go2") + +__all__ = ["unitree_go2"] diff --git a/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_detection.py b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_detection.py new file mode 100644 index 0000000000..f2edf2cb3b --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_detection.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos_lcm.foxglove_msgs.ImageAnnotations import ( + ImageAnnotations, # type: ignore[import-untyped] +) +from dimos_lcm.foxglove_msgs.SceneUpdate import SceneUpdate # type: ignore[import-untyped] + +from dimos.core.blueprints import autoconnect +from dimos.core.transport import LCMTransport +from dimos.msgs.sensor_msgs import Image, PointCloud2 +from dimos.msgs.vision_msgs import Detection2DArray +from dimos.perception.detection.module3D import Detection3DModule, detection3d_module +from dimos.robot.unitree.go2.blueprints.smart.unitree_go2 import unitree_go2 +from dimos.robot.unitree.go2.connection import GO2Connection + +unitree_go2_detection = ( + autoconnect( + unitree_go2, + detection3d_module( + camera_info=GO2Connection.camera_info_static, + ), + ) + .remappings( + [ + (Detection3DModule, "pointcloud", "global_map"), + ] + ) + .transports( + { + # Detection 3D module outputs + ("detections", Detection3DModule): LCMTransport( + "/detector3d/detections", Detection2DArray + ), + ("annotations", Detection3DModule): LCMTransport( + "/detector3d/annotations", ImageAnnotations + ), + ("scene_update", Detection3DModule): LCMTransport( + "/detector3d/scene_update", SceneUpdate + ), + ("detected_pointcloud_0", Detection3DModule): LCMTransport( + "/detector3d/pointcloud/0", PointCloud2 + ), + ("detected_pointcloud_1", Detection3DModule): LCMTransport( + "/detector3d/pointcloud/1", PointCloud2 + ), + ("detected_pointcloud_2", Detection3DModule): LCMTransport( + "/detector3d/pointcloud/2", PointCloud2 + ), + ("detected_image_0", Detection3DModule): LCMTransport("/detector3d/image/0", Image), + ("detected_image_1", Detection3DModule): LCMTransport("/detector3d/image/1", Image), + ("detected_image_2", Detection3DModule): LCMTransport("/detector3d/image/2", Image), + } + ) +) + +__all__ = ["unitree_go2_detection"] diff --git a/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_ros.py b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_ros.py new file mode 100644 index 0000000000..a335b1e9af --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_ros.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.transport import ROSTransport +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.msgs.sensor_msgs import Image, PointCloud2 +from dimos.robot.unitree.go2.blueprints.smart.unitree_go2 import unitree_go2 + +unitree_go2_ros = unitree_go2.transports( + { + ("lidar", PointCloud2): ROSTransport("lidar", PointCloud2), + ("global_map", PointCloud2): ROSTransport("global_map", PointCloud2), + ("odom", PoseStamped): ROSTransport("odom", PoseStamped), + ("color_image", Image): ROSTransport("color_image", Image), + } +) + +__all__ = ["unitree_go2_ros"] diff --git a/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_spatial.py b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_spatial.py new file mode 100644 index 0000000000..e2695f9bfb --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_spatial.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.blueprints import autoconnect +from dimos.perception.spatial_perception import spatial_memory +from dimos.robot.unitree.go2.blueprints.smart.unitree_go2 import unitree_go2 +from dimos.utils.monitoring import utilization + +unitree_go2_spatial = autoconnect( + unitree_go2, + spatial_memory(), + utilization(), +).global_config(n_dask_workers=8) + +__all__ = ["unitree_go2_spatial"] diff --git a/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_vlm_stream_test.py b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_vlm_stream_test.py new file mode 100644 index 0000000000..194d3973c6 --- /dev/null +++ b/dimos/robot/unitree/go2/blueprints/smart/unitree_go2_vlm_stream_test.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.agents.vlm_agent import vlm_agent +from dimos.agents.vlm_stream_tester import vlm_stream_tester +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic import unitree_go2_basic + +unitree_go2_vlm_stream_test = autoconnect( + unitree_go2_basic, + vlm_agent(), + vlm_stream_tester(), +) + +__all__ = ["unitree_go2_vlm_stream_test"] diff --git a/dimos/robot/unitree/connection/go2.py b/dimos/robot/unitree/go2/connection.py similarity index 89% rename from dimos/robot/unitree/connection/go2.py rename to dimos/robot/unitree/go2/connection.py index 96a54117c8..82aa34c97a 100644 --- a/dimos/robot/unitree/connection/go2.py +++ b/dimos/robot/unitree/go2/connection.py @@ -19,13 +19,12 @@ from reactivex.disposable import Disposable from reactivex.observable import Observable -import rerun as rr import rerun.blueprint as rrb from dimos import spec +from dimos.agents.annotation import skill from dimos.core import DimosCluster, In, LCMTransport, Module, Out, pSHMTransport, rpc -from dimos.core.global_config import GlobalConfig -from dimos.dashboard.rerun_init import connect_rerun +from dimos.core.global_config import GlobalConfig, global_config from dimos.msgs.geometry_msgs import ( PoseStamped, Quaternion, @@ -34,11 +33,11 @@ Vector3, ) from dimos.msgs.sensor_msgs import CameraInfo, Image, PointCloud2 -from dimos.msgs.sensor_msgs.image_impls.AbstractImage import ImageFormat -from dimos.robot.unitree.connection.connection import UnitreeWebRTCConnection +from dimos.msgs.sensor_msgs.Image import ImageFormat +from dimos.robot.unitree.connection import UnitreeWebRTCConnection from dimos.utils.data import get_data from dimos.utils.decorators.decorators import simple_mcache -from dimos.utils.testing import TimedSensorReplay, TimedSensorStorage +from dimos.utils.testing.replay import TimedSensorReplay, TimedSensorStorage logger = logging.getLogger(__name__) @@ -153,6 +152,7 @@ class GO2Connection(Module, spec.Camera, spec.Pointcloud): camera_info_static: CameraInfo = _camera_info_static() _global_config: GlobalConfig _camera_info_thread: Thread | None = None + _latest_video_frame: Image | None = None @classmethod def rerun_views(cls): # type: ignore[no-untyped-def] @@ -167,11 +167,11 @@ def rerun_views(cls): # type: ignore[no-untyped-def] def __init__( # type: ignore[no-untyped-def] self, ip: str | None = None, - global_config: GlobalConfig | None = None, + cfg: GlobalConfig = global_config, *args, **kwargs, ) -> None: - self._global_config = global_config or GlobalConfig() + self._global_config = cfg ip = ip if ip is not None else self._global_config.robot_ip @@ -180,7 +180,7 @@ def __init__( # type: ignore[no-untyped-def] if ip in ["fake", "mock", "replay"] or connection_type == "replay": self.connection = ReplayConnection() elif ip == "mujoco" or connection_type == "mujoco": - from dimos.robot.unitree_webrtc.mujoco_connection import MujocoConnection + from dimos.robot.unitree.mujoco_connection import MujocoConnection self.connection = MujocoConnection(self._global_config) else: @@ -192,13 +192,13 @@ def __init__( # type: ignore[no-untyped-def] @rpc def record(self, recording_name: str) -> None: lidar_store: TimedSensorStorage = TimedSensorStorage(f"{recording_name}/lidar") # type: ignore[type-arg] - lidar_store.save_stream(self.connection.lidar_stream()).subscribe(lambda x: x) # type: ignore[arg-type] + lidar_store.consume_stream(self.connection.lidar_stream()) odom_store: TimedSensorStorage = TimedSensorStorage(f"{recording_name}/odom") # type: ignore[type-arg] - odom_store.save_stream(self.connection.odom_stream()).subscribe(lambda x: x) # type: ignore[arg-type] + odom_store.consume_stream(self.connection.odom_stream()) video_store: TimedSensorStorage = TimedSensorStorage(f"{recording_name}/video") # type: ignore[type-arg] - video_store.save_stream(self.connection.video_stream()).subscribe(lambda x: x) # type: ignore[arg-type] + video_store.consume_stream(self.connection.video_stream()) @rpc def start(self) -> None: @@ -206,14 +206,9 @@ def start(self) -> None: self.connection.start() - # Connect this worker process to Rerun if it will log sensor data. - if self._global_config.viewer_backend.startswith("rerun"): - connect_rerun(global_config=self._global_config) - def onimage(image: Image) -> None: self.color_image.publish(image) - if self._global_config.viewer_backend.startswith("rerun"): - rr.log("world/robot/camera/rgb", image.to_rerun()) + self._latest_video_frame = image self._disposables.add(self.connection.lidar_stream().subscribe(self.lidar.publish)) self._disposables.add(self.connection.odom_stream().subscribe(self._publish_tf)) @@ -302,6 +297,15 @@ def publish_request(self, topic: str, data: dict[str, Any]) -> dict[Any, Any]: """ return self.connection.publish_request(topic, data) + @skill + def observe(self) -> Image | None: + """Returns the latest video frame from the robot camera. Use this skill for any visual world queries. + + This skill provides the current camera view for perception tasks. + Returns None if no frame has been captured yet. + """ + return self._latest_video_frame + go2_connection = GO2Connection.blueprint diff --git a/dimos/robot/unitree_webrtc/keyboard_teleop.py b/dimos/robot/unitree/keyboard_teleop.py similarity index 99% rename from dimos/robot/unitree_webrtc/keyboard_teleop.py rename to dimos/robot/unitree/keyboard_teleop.py index 8e0d987127..3d7d4c263e 100644 --- a/dimos/robot/unitree_webrtc/keyboard_teleop.py +++ b/dimos/robot/unitree/keyboard_teleop.py @@ -45,7 +45,7 @@ def __init__(self) -> None: self._stop_event = threading.Event() @rpc - def start(self) -> bool: + def start(self) -> None: super().start() self._keys_held = set() @@ -54,7 +54,7 @@ def start(self) -> bool: self._thread = threading.Thread(target=self._pygame_loop, daemon=True) self._thread.start() - return True + return @rpc def stop(self) -> None: diff --git a/dimos/robot/unitree_webrtc/modular/detect.py b/dimos/robot/unitree/modular/detect.py similarity index 96% rename from dimos/robot/unitree_webrtc/modular/detect.py rename to dimos/robot/unitree/modular/detect.py index 8f92d15e81..e5999e9fd8 100644 --- a/dimos/robot/unitree_webrtc/modular/detect.py +++ b/dimos/robot/unitree/modular/detect.py @@ -18,8 +18,8 @@ from dimos.msgs.sensor_msgs import Image, PointCloud2 from dimos.msgs.std_msgs import Header -from dimos.robot.unitree_webrtc.type.lidar import pointcloud2_from_webrtc_lidar -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.lidar import pointcloud2_from_webrtc_lidar +from dimos.robot.unitree.type.odometry import Odometry image_resize_factor = 1 originalwidth, originalheight = (1280, 720) @@ -141,7 +141,7 @@ def process_data(): # type: ignore[no-untyped-def] Detection2DModule, build_imageannotations, ) - from dimos.robot.unitree_webrtc.type.odometry import Odometry + from dimos.robot.unitree.type.odometry import Odometry from dimos.utils.data import get_data from dimos.utils.testing import TimedSensorReplay diff --git a/dimos/robot/unitree_webrtc/mujoco_connection.py b/dimos/robot/unitree/mujoco_connection.py similarity index 97% rename from dimos/robot/unitree_webrtc/mujoco_connection.py rename to dimos/robot/unitree/mujoco_connection.py index d4a7736fc2..f998ae1dd9 100644 --- a/dimos/robot/unitree_webrtc/mujoco_connection.py +++ b/dimos/robot/unitree/mujoco_connection.py @@ -37,7 +37,7 @@ from dimos.core.global_config import GlobalConfig from dimos.msgs.geometry_msgs import Quaternion, Twist, Vector3 from dimos.msgs.sensor_msgs import CameraInfo, Image, ImageFormat, PointCloud2 -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.odometry import Odometry from dimos.simulation.mujoco.constants import ( LAUNCHER_PATH, LIDAR_FPS, @@ -62,7 +62,7 @@ class MujocoConnection: def __init__(self, global_config: GlobalConfig) -> None: try: - import mujoco + import mujoco # noqa: F401 except ImportError: raise ImportError("'mujoco' is not installed. Use `pip install -e .[sim]`") @@ -147,7 +147,9 @@ def start(self) -> None: # Use weakref to avoid preventing garbage collection weak_self = weakref.ref(self) - def cleanup_on_exit() -> None: + def cleanup_on_exit( + weak_self: "weakref.ReferenceType[MujocoConnection]" = weak_self, + ) -> None: instance = weak_self() if instance is not None: instance.stop() diff --git a/dimos/robot/unitree_webrtc/params/front_camera_720.yaml b/dimos/robot/unitree/params/front_camera_720.yaml similarity index 100% rename from dimos/robot/unitree_webrtc/params/front_camera_720.yaml rename to dimos/robot/unitree/params/front_camera_720.yaml diff --git a/dimos/robot/unitree_webrtc/params/sim_camera.yaml b/dimos/robot/unitree/params/sim_camera.yaml similarity index 100% rename from dimos/robot/unitree_webrtc/params/sim_camera.yaml rename to dimos/robot/unitree/params/sim_camera.yaml diff --git a/dimos/robot/unitree_webrtc/rosnav.py b/dimos/robot/unitree/rosnav.py similarity index 98% rename from dimos/robot/unitree_webrtc/rosnav.py rename to dimos/robot/unitree/rosnav.py index 3244ecfd05..7a9b98b678 100644 --- a/dimos/robot/unitree_webrtc/rosnav.py +++ b/dimos/robot/unitree/rosnav.py @@ -119,7 +119,7 @@ def go_to(self, pose: PoseStamped, timeout: float = 60.0) -> bool: return False @rpc - def stop(self) -> bool: + def stop(self) -> None: """ Cancel current navigation by publishing to cancel_goal. @@ -131,6 +131,6 @@ def stop(self) -> bool: if self.cancel_goal: cancel_msg = Bool(data=True) self.cancel_goal.publish(cancel_msg) - return True + return - return False + return diff --git a/dimos/robot/unitree/run.py b/dimos/robot/unitree/run.py deleted file mode 100644 index 5b17ad7a9d..0000000000 --- a/dimos/robot/unitree/run.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Centralized runner for modular Unitree robot deployment scripts. - -Usage: - python run.py g1agent --ip 192.168.1.100 - python run.py g1/g1zed --ip $ROBOT_IP - python run.py go2/go2.py --ip $ROBOT_IP - python run.py connection/g1.py --ip $ROBOT_IP -""" - -import argparse -import importlib -import os -import sys - -from dotenv import load_dotenv - -from dimos.core import start, wait_exit - - -def main() -> None: - load_dotenv() - - parser = argparse.ArgumentParser(description="Unitree Robot Modular Deployment Runner") - parser.add_argument( - "module", - help="Module name/path to run (e.g., g1agent, g1/g1zed, go2/go2.py)", - ) - parser.add_argument( - "--ip", - default=os.getenv("ROBOT_IP"), - help="Robot IP address (default: ROBOT_IP from .env)", - ) - parser.add_argument( - "--workers", - type=int, - default=8, - help="Number of worker threads for DimosCluster (default: 8)", - ) - - args = parser.parse_args() - - # Validate IP address - if not args.ip: - print("ERROR: Robot IP address not provided") - print("Please provide --ip or set ROBOT_IP in .env") - sys.exit(1) - - # Parse the module path - module_path = args.module - - # Remove .py extension if present - if module_path.endswith(".py"): - module_path = module_path[:-3] - - # Convert path separators to dots for import - module_path = module_path.replace("/", ".") - - # Import the module - try: - # Build the full import path - full_module_path = f"dimos.robot.unitree.{module_path}" - print(f"Importing module: {full_module_path}") - module = importlib.import_module(full_module_path) - except ImportError: - # Try as a relative import from the unitree package - try: - module = importlib.import_module(f".{module_path}", package="dimos.robot.unitree") - except ImportError as e2: - import traceback - - traceback.print_exc() - - print(f"\nERROR: Could not import module '{args.module}'") - print("Tried importing as:") - print(f" 1. {full_module_path}") - print(" 2. Relative import from dimos.robot.unitree") - print("Make sure the module exists in dimos/robot/unitree/") - print(f"Import error: {e2}") - - sys.exit(1) - - # Verify deploy function exists - if not hasattr(module, "deploy"): - print(f"ERROR: Module '{args.module}' does not have a 'deploy' function") - sys.exit(1) - - print(f"Running {args.module}.deploy() with IP {args.ip}") - - # Run the standard deployment pattern - dimos = start(args.workers) - try: - module.deploy(dimos, args.ip) - wait_exit() - finally: - dimos.close_all() # type: ignore[attr-defined] - - -if __name__ == "__main__": - main() diff --git a/dimos/robot/unitree/testing/__init__.py b/dimos/robot/unitree/testing/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/robot/unitree_webrtc/testing/helpers.py b/dimos/robot/unitree/testing/helpers.py similarity index 100% rename from dimos/robot/unitree_webrtc/testing/helpers.py rename to dimos/robot/unitree/testing/helpers.py diff --git a/dimos/robot/unitree_webrtc/testing/mock.py b/dimos/robot/unitree/testing/mock.py similarity index 97% rename from dimos/robot/unitree_webrtc/testing/mock.py rename to dimos/robot/unitree/testing/mock.py index 2af1754cb4..26e6a90018 100644 --- a/dimos/robot/unitree_webrtc/testing/mock.py +++ b/dimos/robot/unitree/testing/mock.py @@ -22,7 +22,7 @@ from reactivex.observable import Observable from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.lidar import RawLidarMsg, pointcloud2_from_webrtc_lidar +from dimos.robot.unitree.type.lidar import RawLidarMsg, pointcloud2_from_webrtc_lidar class Mock: diff --git a/dimos/robot/unitree_webrtc/testing/test_actors.py b/dimos/robot/unitree/testing/test_actors.py similarity index 90% rename from dimos/robot/unitree_webrtc/testing/test_actors.py rename to dimos/robot/unitree/testing/test_actors.py index def89346e8..9366092eb6 100644 --- a/dimos/robot/unitree_webrtc/testing/test_actors.py +++ b/dimos/robot/unitree/testing/test_actors.py @@ -20,7 +20,7 @@ from dimos import core from dimos.core import Module, rpc from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.map import Map as Mapper +from dimos.robot.unitree.type.map import Map as Mapper @pytest.fixture @@ -38,6 +38,7 @@ class Consumer: def __init__(self, counter=None) -> None: self.testf = counter + self._tasks: set[asyncio.Task[None]] = set() print("consumer init with", counter) async def waitcall(self, n: int): @@ -48,7 +49,9 @@ async def task() -> None: res = await self.testf(n) print("res is", res) - asyncio.create_task(task()) + background_task = asyncio.create_task(task()) + self._tasks.add(background_task) + background_task.add_done_callback(self._tasks.discard) return n diff --git a/dimos/robot/unitree_webrtc/testing/test_tooling.py b/dimos/robot/unitree/testing/test_tooling.py similarity index 89% rename from dimos/robot/unitree_webrtc/testing/test_tooling.py rename to dimos/robot/unitree/testing/test_tooling.py index 50b689931e..d1f2eeb169 100644 --- a/dimos/robot/unitree_webrtc/testing/test_tooling.py +++ b/dimos/robot/unitree/testing/test_tooling.py @@ -16,8 +16,8 @@ import pytest -from dimos.robot.unitree_webrtc.type.lidar import pointcloud2_from_webrtc_lidar -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.lidar import pointcloud2_from_webrtc_lidar +from dimos.robot.unitree.type.odometry import Odometry from dimos.utils.reactive import backpressure from dimos.utils.testing import TimedSensorReplay diff --git a/dimos/robot/unitree/type/__init__.py b/dimos/robot/unitree/type/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/robot/unitree/type/lidar.py b/dimos/robot/unitree/type/lidar.py new file mode 100644 index 0000000000..df2909dc38 --- /dev/null +++ b/dimos/robot/unitree/type/lidar.py @@ -0,0 +1,74 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unitree WebRTC lidar message parsing utilities.""" + +import time +from typing import TypedDict + +import numpy as np +import open3d as o3d # type: ignore[import-untyped] + +from dimos.msgs.sensor_msgs import PointCloud2 + +# Backwards compatibility alias for pickled data +LidarMessage = PointCloud2 + + +class RawLidarPoints(TypedDict): + points: np.ndarray # type: ignore[type-arg] # Shape (N, 3) array of 3D points [x, y, z] + + +class RawLidarData(TypedDict): + """Data portion of the LIDAR message""" + + frame_id: str + origin: list[float] + resolution: float + src_size: int + stamp: float + width: list[int] + data: RawLidarPoints + + +class RawLidarMsg(TypedDict): + """Static type definition for raw LIDAR message from Unitree WebRTC.""" + + type: str + topic: str + data: RawLidarData + + +def pointcloud2_from_webrtc_lidar(raw_message: RawLidarMsg, ts: float | None = None) -> PointCloud2: + """Convert a raw Unitree WebRTC lidar message to PointCloud2. + + Args: + raw_message: Raw lidar message from Unitree WebRTC API + ts: Optional timestamp override. If None, uses current time. + + Returns: + PointCloud2 message with the lidar points + """ + data = raw_message["data"] + points = data["data"]["points"] + + pointcloud = o3d.geometry.PointCloud() + pointcloud.points = o3d.utility.Vector3dVector(points) + + return PointCloud2( + pointcloud=pointcloud, + # webrtc stamp is broken (e.g., "stamp": 1.758148e+09), use current time + ts=ts if ts is not None else time.time(), + frame_id="world", + ) diff --git a/dimos/robot/unitree/type/lowstate.py b/dimos/robot/unitree/type/lowstate.py new file mode 100644 index 0000000000..3e7926424a --- /dev/null +++ b/dimos/robot/unitree/type/lowstate.py @@ -0,0 +1,93 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Literal, TypedDict + +raw_odom_msg_sample = { + "type": "msg", + "topic": "rt/lf/lowstate", + "data": { + "imu_state": {"rpy": [0.008086, -0.007515, 2.981771]}, + "motor_state": [ + {"q": 0.098092, "temperature": 40, "lost": 0, "reserve": [0, 674]}, + {"q": 0.757921, "temperature": 32, "lost": 0, "reserve": [0, 674]}, + {"q": -1.490911, "temperature": 38, "lost": 6, "reserve": [0, 674]}, + {"q": -0.072477, "temperature": 42, "lost": 0, "reserve": [0, 674]}, + {"q": 1.020276, "temperature": 32, "lost": 5, "reserve": [0, 674]}, + {"q": -2.007172, "temperature": 38, "lost": 5, "reserve": [0, 674]}, + {"q": 0.071382, "temperature": 50, "lost": 5, "reserve": [0, 674]}, + {"q": 0.963379, "temperature": 36, "lost": 6, "reserve": [0, 674]}, + {"q": -1.978311, "temperature": 40, "lost": 5, "reserve": [0, 674]}, + {"q": -0.051066, "temperature": 48, "lost": 0, "reserve": [0, 674]}, + {"q": 0.73103, "temperature": 34, "lost": 10, "reserve": [0, 674]}, + {"q": -1.466473, "temperature": 38, "lost": 6, "reserve": [0, 674]}, + {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, + {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, + {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, + {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, + {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, + {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, + {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, + {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, + ], + "bms_state": { + "version_high": 1, + "version_low": 18, + "soc": 55, + "current": -2481, + "cycle": 56, + "bq_ntc": [30, 29], + "mcu_ntc": [33, 32], + }, + "foot_force": [97, 84, 81, 81], + "temperature_ntc1": 48, + "power_v": 28.331045, + }, +} + + +class MotorState(TypedDict): + q: float + temperature: int + lost: int + reserve: list[int] + + +class ImuState(TypedDict): + rpy: list[float] + + +class BmsState(TypedDict): + version_high: int + version_low: int + soc: int + current: int + cycle: int + bq_ntc: list[int] + mcu_ntc: list[int] + + +class LowStateData(TypedDict): + imu_state: ImuState + motor_state: list[MotorState] + bms_state: BmsState + foot_force: list[int] + temperature_ntc1: int + power_v: float + + +class LowStateMsg(TypedDict): + type: Literal["msg"] + topic: str + data: LowStateData diff --git a/dimos/robot/unitree/type/map.py b/dimos/robot/unitree/type/map.py new file mode 100644 index 0000000000..a771467246 --- /dev/null +++ b/dimos/robot/unitree/type/map.py @@ -0,0 +1,128 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +import time +from typing import Any + +import open3d as o3d # type: ignore[import-untyped] +from reactivex import interval +from reactivex.disposable import Disposable + +from dimos.core import DimosCluster, In, LCMTransport, Module, Out, rpc +from dimos.core.global_config import GlobalConfig, global_config +from dimos.mapping.pointclouds.accumulators.general import GeneralPointCloudAccumulator +from dimos.mapping.pointclouds.accumulators.protocol import PointCloudAccumulator +from dimos.mapping.pointclouds.occupancy import general_occupancy +from dimos.msgs.nav_msgs import OccupancyGrid +from dimos.msgs.sensor_msgs import PointCloud2 +from dimos.robot.unitree.go2.connection import Go2ConnectionProtocol + + +class Map(Module): + lidar: In[PointCloud2] + global_map: Out[PointCloud2] + global_costmap: Out[OccupancyGrid] + + _point_cloud_accumulator: PointCloudAccumulator + _global_config: GlobalConfig + _preloaded_occupancy: OccupancyGrid | None = None + + def __init__( # type: ignore[no-untyped-def] + self, + voxel_size: float = 0.05, + cost_resolution: float = 0.05, + global_publish_interval: float | None = None, + min_height: float = 0.10, + max_height: float = 0.5, + cfg: GlobalConfig = global_config, + **kwargs, + ) -> None: + self.voxel_size = voxel_size + self.cost_resolution = cost_resolution + self.global_publish_interval = global_publish_interval + self.min_height = min_height + self.max_height = max_height + self._global_config = cfg + self._point_cloud_accumulator = GeneralPointCloudAccumulator( + self.voxel_size, self._global_config + ) + + if self._global_config.simulation: + self.min_height = 0.3 + + super().__init__(**kwargs) + + @rpc + def start(self) -> None: + super().start() + + self._disposables.add(Disposable(self.lidar.subscribe(self.add_frame))) + + if self.global_publish_interval is not None: + unsub = interval(self.global_publish_interval).subscribe(self._publish) + self._disposables.add(unsub) + + @rpc + def stop(self) -> None: + super().stop() + + def to_PointCloud2(self) -> PointCloud2: + return PointCloud2( + pointcloud=self._point_cloud_accumulator.get_point_cloud(), + ts=time.time(), + ) + + # TODO: Why is this RPC? + @rpc + def add_frame(self, frame: PointCloud2) -> None: + self._point_cloud_accumulator.add(frame.pointcloud) + + @property + def o3d_geometry(self) -> o3d.geometry.PointCloud: + return self._point_cloud_accumulator.get_point_cloud() + + def _publish(self, _: Any) -> None: + self.global_map.publish(self.to_PointCloud2()) + + occupancygrid = general_occupancy( + self.to_PointCloud2(), + resolution=self.cost_resolution, + min_height=self.min_height, + max_height=self.max_height, + ) + + # When debugging occupancy navigation, load a predefined occupancy grid. + if self._global_config.mujoco_global_costmap_from_occupancy: + if self._preloaded_occupancy is None: + path = Path(self._global_config.mujoco_global_costmap_from_occupancy) + self._preloaded_occupancy = OccupancyGrid.from_path(path) + occupancygrid = self._preloaded_occupancy + + self.global_costmap.publish(occupancygrid) + + +mapper = Map.blueprint + + +def deploy(dimos: DimosCluster, connection: Go2ConnectionProtocol): # type: ignore[no-untyped-def] + mapper = dimos.deploy(Map, global_publish_interval=1.0) # type: ignore[attr-defined] + mapper.global_map.transport = LCMTransport("/global_map", PointCloud2) + mapper.global_costmap.transport = LCMTransport("/global_costmap", OccupancyGrid) + mapper.lidar.connect(connection.pointcloud) # type: ignore[attr-defined] + mapper.start() + return mapper + + +__all__ = ["Map", "mapper"] diff --git a/dimos/robot/unitree/type/odometry.py b/dimos/robot/unitree/type/odometry.py new file mode 100644 index 0000000000..aa664b32ef --- /dev/null +++ b/dimos/robot/unitree/type/odometry.py @@ -0,0 +1,102 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Literal, TypedDict + +from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Vector3 +from dimos.robot.unitree.type.timeseries import ( + Timestamped, +) +from dimos.types.timestamped import to_timestamp + +raw_odometry_msg_sample = { + "type": "msg", + "topic": "rt/utlidar/robot_pose", + "data": { + "header": {"stamp": {"sec": 1746565669, "nanosec": 448350564}, "frame_id": "odom"}, + "pose": { + "position": {"x": 5.961965, "y": -2.916958, "z": 0.319509}, + "orientation": {"x": 0.002787, "y": -0.000902, "z": -0.970244, "w": -0.242112}, + }, + }, +} + + +class TimeStamp(TypedDict): + sec: int + nanosec: int + + +class Header(TypedDict): + stamp: TimeStamp + frame_id: str + + +class RawPosition(TypedDict): + x: float + y: float + z: float + + +class Orientation(TypedDict): + x: float + y: float + z: float + w: float + + +class PoseData(TypedDict): + position: RawPosition + orientation: Orientation + + +class OdometryData(TypedDict): + header: Header + pose: PoseData + + +class RawOdometryMessage(TypedDict): + type: Literal["msg"] + topic: str + data: OdometryData + + +class Odometry(PoseStamped, Timestamped): # type: ignore[misc] + name = "geometry_msgs.PoseStamped" + + def __init__(self, frame_id: str = "base_link", *args, **kwargs) -> None: # type: ignore[no-untyped-def] + super().__init__(frame_id=frame_id, *args, **kwargs) # type: ignore[misc] + + @classmethod + def from_msg(cls, msg: RawOdometryMessage) -> "Odometry": + pose = msg["data"]["pose"] + + # Extract position + pos = Vector3( + pose["position"].get("x"), + pose["position"].get("y"), + pose["position"].get("z"), + ) + + rot = Quaternion( + pose["orientation"].get("x"), + pose["orientation"].get("y"), + pose["orientation"].get("z"), + pose["orientation"].get("w"), + ) + + ts = to_timestamp(msg["data"]["header"]["stamp"]) + return Odometry(position=pos, orientation=rot, ts=ts, frame_id="world") + + def __repr__(self) -> str: + return f"Odom pos({self.position}), rot({self.orientation})" diff --git a/dimos/robot/unitree_webrtc/type/test_lidar.py b/dimos/robot/unitree/type/test_lidar.py similarity index 83% rename from dimos/robot/unitree_webrtc/type/test_lidar.py rename to dimos/robot/unitree/type/test_lidar.py index 7543fe63a7..719088d77a 100644 --- a/dimos/robot/unitree_webrtc/type/test_lidar.py +++ b/dimos/robot/unitree/type/test_lidar.py @@ -14,9 +14,10 @@ # limitations under the License. import itertools +from typing import cast from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.lidar import pointcloud2_from_webrtc_lidar +from dimos.robot.unitree.type.lidar import RawLidarMsg, pointcloud2_from_webrtc_lidar from dimos.utils.testing import SensorReplay @@ -25,5 +26,5 @@ def test_init() -> None: for raw_frame in itertools.islice(lidar.iterate(), 5): assert isinstance(raw_frame, dict) - frame = pointcloud2_from_webrtc_lidar(raw_frame) + frame = pointcloud2_from_webrtc_lidar(cast("RawLidarMsg", raw_frame)) assert isinstance(frame, PointCloud2) diff --git a/dimos/robot/unitree_webrtc/type/test_odometry.py b/dimos/robot/unitree/type/test_odometry.py similarity index 62% rename from dimos/robot/unitree_webrtc/type/test_odometry.py rename to dimos/robot/unitree/type/test_odometry.py index e277455cdd..d0fe2b290e 100644 --- a/dimos/robot/unitree_webrtc/type/test_odometry.py +++ b/dimos/robot/unitree/type/test_odometry.py @@ -14,12 +14,9 @@ from __future__ import annotations -from operator import add, sub - import pytest -import reactivex.operators as ops -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.odometry import Odometry from dimos.utils.testing import SensorReplay _EXPECTED_TOTAL_RAD = -4.05212 @@ -38,19 +35,6 @@ def test_odometry_conversion_and_count() -> None: assert isinstance(odom, Odometry) -def test_last_yaw_value() -> None: - """Verify yaw of the final message (regression guard).""" - last_msg = SensorReplay(name="raw_odometry_rotate_walk").stream().pipe(ops.last()).run() - - assert last_msg is not None, "Replay is empty" - assert last_msg["data"]["pose"]["orientation"] == { - "x": 0.01077, - "y": 0.008505, - "z": 0.499171, - "w": -0.866395, - } - - def test_total_rotation_travel_iterate() -> None: total_rad = 0.0 prev_yaw: float | None = None @@ -63,19 +47,3 @@ def test_total_rotation_travel_iterate() -> None: prev_yaw = yaw assert total_rad == pytest.approx(_EXPECTED_TOTAL_RAD, abs=0.001) - - -def test_total_rotation_travel_rxpy() -> None: - total_rad = ( - SensorReplay(name="raw_odometry_rotate_walk", autocast=Odometry.from_msg) - .stream() - .pipe( - ops.map(lambda odom: odom.orientation.radians.z), - ops.pairwise(), # [1,2,3,4] -> [[1,2], [2,3], [3,4]] - ops.starmap(sub), # [sub(1,2), sub(2,3), sub(3,4)] - ops.reduce(add), - ) - .run() - ) - - assert total_rad == pytest.approx(4.05, abs=0.01) diff --git a/dimos/robot/unitree_webrtc/type/test_timeseries.py b/dimos/robot/unitree/type/test_timeseries.py similarity index 95% rename from dimos/robot/unitree_webrtc/type/test_timeseries.py rename to dimos/robot/unitree/type/test_timeseries.py index 2c7606d9f2..5164d91a94 100644 --- a/dimos/robot/unitree_webrtc/type/test_timeseries.py +++ b/dimos/robot/unitree/type/test_timeseries.py @@ -14,7 +14,7 @@ from datetime import datetime, timedelta -from dimos.robot.unitree_webrtc.type.timeseries import TEvent, TList +from dimos.robot.unitree.type.timeseries import TEvent, TList fixed_date = datetime(2025, 5, 13, 15, 2, 5).astimezone() start_event = TEvent(fixed_date, 1) diff --git a/dimos/robot/unitree/type/timeseries.py b/dimos/robot/unitree/type/timeseries.py new file mode 100644 index 0000000000..b75a41b932 --- /dev/null +++ b/dimos/robot/unitree/type/timeseries.py @@ -0,0 +1,149 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from datetime import datetime, timedelta, timezone +from typing import TYPE_CHECKING, Generic, TypedDict, TypeVar, Union + +if TYPE_CHECKING: + from collections.abc import Iterable + +PAYLOAD = TypeVar("PAYLOAD") + + +class RosStamp(TypedDict): + sec: int + nanosec: int + + +EpochLike = Union[int, float, datetime, RosStamp] + + +def from_ros_stamp(stamp: dict[str, int], tz: timezone | None = None) -> datetime: + """Convert ROS-style timestamp {'sec': int, 'nanosec': int} to datetime.""" + return datetime.fromtimestamp(stamp["sec"] + stamp["nanosec"] / 1e9, tz=tz) + + +def to_human_readable(ts: EpochLike) -> str: + dt = to_datetime(ts) + return dt.strftime("%Y-%m-%d %H:%M:%S") + + +def to_datetime(ts: EpochLike, tz: timezone | None = None) -> datetime: + if isinstance(ts, datetime): + # if ts.tzinfo is None: + # ts = ts.astimezone(tz) + return ts + if isinstance(ts, int | float): + return datetime.fromtimestamp(ts, tz=tz) + if isinstance(ts, dict) and "sec" in ts and "nanosec" in ts: + return datetime.fromtimestamp(ts["sec"] + ts["nanosec"] / 1e9, tz=tz) + raise TypeError("unsupported timestamp type") + + +class Timestamped(ABC): + """Abstract class for an event with a timestamp.""" + + ts: datetime + + def __init__(self, ts: EpochLike) -> None: + self.ts = to_datetime(ts) + + +class TEvent(Timestamped, Generic[PAYLOAD]): + """Concrete class for an event with a timestamp and data.""" + + def __init__(self, timestamp: EpochLike, data: PAYLOAD) -> None: + super().__init__(timestamp) + self.data = data + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TEvent): + return NotImplemented + return self.ts == other.ts and self.data == other.data + + def __repr__(self) -> str: + return f"TEvent(ts={self.ts}, data={self.data})" + + +EVENT = TypeVar("EVENT", bound=Timestamped) # any object that is a subclass of Timestamped + + +class Timeseries(ABC, Generic[EVENT]): + """Abstract class for an iterable of events with timestamps.""" + + @abstractmethod + def __iter__(self) -> Iterable[EVENT]: ... + + @property + def start_time(self) -> datetime: + """Return the timestamp of the earliest event, assuming the data is sorted.""" + return next(iter(self)).ts # type: ignore[call-overload, no-any-return, type-var] + + @property + def end_time(self) -> datetime: + """Return the timestamp of the latest event, assuming the data is sorted.""" + return next(reversed(list(self))).ts # type: ignore[call-overload, no-any-return] + + @property + def frequency(self) -> float: + """Calculate the frequency of events in Hz.""" + return len(list(self)) / (self.duration().total_seconds() or 1) # type: ignore[call-overload] + + def time_range(self) -> tuple[datetime, datetime]: + """Return (earliest_ts, latest_ts). Empty input ⇒ ValueError.""" + return self.start_time, self.end_time + + def duration(self) -> timedelta: + """Total time spanned by the iterable (Δ = last - first).""" + return self.end_time - self.start_time + + def closest_to(self, timestamp: EpochLike) -> EVENT: + """Return the event closest to the given timestamp. Assumes timeseries is sorted.""" + print("closest to", timestamp) + target = to_datetime(timestamp) + print("converted to", target) + target_ts = target.timestamp() + + closest = None + min_dist = float("inf") + + for event in self: # type: ignore[attr-defined] + dist = abs(event.ts - target_ts) + if dist > min_dist: + break + + min_dist = dist + closest = event + + print(f"closest: {closest}") + return closest # type: ignore[return-value] + + def __repr__(self) -> str: + """Return a string representation of the Timeseries.""" + return f"Timeseries(date={self.start_time.strftime('%Y-%m-%d')}, start={self.start_time.strftime('%H:%M:%S')}, end={self.end_time.strftime('%H:%M:%S')}, duration={self.duration()}, events={len(list(self))}, freq={self.frequency:.2f}Hz)" # type: ignore[call-overload] + + def __str__(self) -> str: + """Return a string representation of the Timeseries.""" + return self.__repr__() + + +class TList(list[EVENT], Timeseries[EVENT]): + """A test class that inherits from both list and Timeseries.""" + + def __repr__(self) -> str: + """Return a string representation of the TList using Timeseries repr method.""" + return Timeseries.__repr__(self) diff --git a/dimos/robot/unitree/type/vector.py b/dimos/robot/unitree/type/vector.py new file mode 100644 index 0000000000..58438c0a98 --- /dev/null +++ b/dimos/robot/unitree/type/vector.py @@ -0,0 +1,442 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import builtins +from collections.abc import Iterable +from typing import ( + Any, + Protocol, + TypeVar, + Union, + runtime_checkable, +) + +import numpy as np +from numpy.typing import NDArray + +T = TypeVar("T", bound="Vector") + + +class Vector: + """A wrapper around numpy arrays for vector operations with intuitive syntax.""" + + def __init__(self, *args: Any) -> None: + """Initialize a vector from components or another iterable. + + Examples: + Vector(1, 2) # 2D vector + Vector(1, 2, 3) # 3D vector + Vector([1, 2, 3]) # From list + Vector(np.array([1, 2, 3])) # From numpy array + """ + if len(args) == 1 and hasattr(args[0], "__iter__"): + self._data = np.array(args[0], dtype=float) + elif len(args) == 1: + self._data = np.array([args[0].x, args[0].y, args[0].z], dtype=float) + + else: + self._data = np.array(args, dtype=float) + + @property + def yaw(self) -> float: + return self.x + + @property + def tuple(self) -> tuple[float, ...]: + """Tuple representation of the vector.""" + return tuple(self._data) + + @property + def x(self) -> float: + """X component of the vector.""" + return self._data[0] if len(self._data) > 0 else 0.0 + + @property + def y(self) -> float: + """Y component of the vector.""" + return self._data[1] if len(self._data) > 1 else 0.0 + + @property + def z(self) -> float: + """Z component of the vector.""" + return self._data[2] if len(self._data) > 2 else 0.0 + + @property + def dim(self) -> int: + """Dimensionality of the vector.""" + return len(self._data) + + @property + def data(self) -> NDArray[np.float64]: + """Get the underlying numpy array.""" + return self._data + + def __len__(self) -> int: + return len(self._data) + + def __getitem__(self, idx: int) -> float: + return float(self._data[idx]) + + def __iter__(self) -> Iterable[float]: + return iter(self._data) # type: ignore[no-any-return] + + def __repr__(self) -> str: + components = ",".join(f"{x:.6g}" for x in self._data) + return f"({components})" + + def __str__(self) -> str: + if self.dim < 2: + return self.__repr__() + + def getArrow() -> str: + repr = ["←", "↖", "↑", "↗", "→", "↘", "↓", "↙"] + + if self.y == 0 and self.x == 0: + return "·" + + # Calculate angle in radians and convert to directional index + angle = np.arctan2(self.y, self.x) + # Map angle to 0-7 index (8 directions) with proper orientation + dir_index = int(((angle + np.pi) * 4 / np.pi) % 8) + # Get directional arrow symbol + return repr[dir_index] + + return f"{getArrow()} Vector {self.__repr__()}" + + def serialize(self) -> dict: # type: ignore[type-arg] + """Serialize the vector to a dictionary.""" + return {"type": "vector", "c": self._data.tolist()} + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Vector): + return np.array_equal(self._data, other._data) + return np.array_equal(self._data, np.array(other, dtype=float)) + + def __add__(self: T, other: Union["Vector", Iterable[float]]) -> T: + if isinstance(other, Vector): + return self.__class__(self._data + other._data) + return self.__class__(self._data + np.array(other, dtype=float)) + + def __sub__(self: T, other: Union["Vector", Iterable[float]]) -> T: + if isinstance(other, Vector): + return self.__class__(self._data - other._data) + return self.__class__(self._data - np.array(other, dtype=float)) + + def __mul__(self: T, scalar: float) -> T: + return self.__class__(self._data * scalar) + + def __rmul__(self: T, scalar: float) -> T: + return self.__mul__(scalar) + + def __truediv__(self: T, scalar: float) -> T: + return self.__class__(self._data / scalar) + + def __neg__(self: T) -> T: + return self.__class__(-self._data) + + def dot(self, other: Union["Vector", Iterable[float]]) -> float: + """Compute dot product.""" + if isinstance(other, Vector): + return float(np.dot(self._data, other._data)) + return float(np.dot(self._data, np.array(other, dtype=float))) + + def cross(self: T, other: Union["Vector", Iterable[float]]) -> T: + """Compute cross product (3D vectors only).""" + if self.dim != 3: + raise ValueError("Cross product is only defined for 3D vectors") + + if isinstance(other, Vector): + other_data = other._data + else: + other_data = np.array(other, dtype=float) + + if len(other_data) != 3: + raise ValueError("Cross product requires two 3D vectors") + + return self.__class__(np.cross(self._data, other_data)) + + def length(self) -> float: + """Compute the Euclidean length (magnitude) of the vector.""" + return float(np.linalg.norm(self._data)) + + def length_squared(self) -> float: + """Compute the squared length of the vector (faster than length()).""" + return float(np.sum(self._data * self._data)) + + def normalize(self: T) -> T: + """Return a normalized unit vector in the same direction.""" + length = self.length() + if length < 1e-10: # Avoid division by near-zero + return self.__class__(np.zeros_like(self._data)) + return self.__class__(self._data / length) + + def to_2d(self: T) -> T: + """Convert a vector to a 2D vector by taking only the x and y components.""" + return self.__class__(self._data[:2]) + + def distance(self, other: Union["Vector", Iterable[float]]) -> float: + """Compute Euclidean distance to another vector.""" + if isinstance(other, Vector): + return float(np.linalg.norm(self._data - other._data)) + return float(np.linalg.norm(self._data - np.array(other, dtype=float))) + + def distance_squared(self, other: Union["Vector", Iterable[float]]) -> float: + """Compute squared Euclidean distance to another vector (faster than distance()).""" + if isinstance(other, Vector): + diff = self._data - other._data + else: + diff = self._data - np.array(other, dtype=float) + return float(np.sum(diff * diff)) + + def angle(self, other: Union["Vector", Iterable[float]]) -> float: + """Compute the angle (in radians) between this vector and another.""" + if self.length() < 1e-10 or (isinstance(other, Vector) and other.length() < 1e-10): + return 0.0 + + if isinstance(other, Vector): + other_data = other._data + else: + other_data = np.array(other, dtype=float) + + cos_angle = np.clip( + np.dot(self._data, other_data) + / (np.linalg.norm(self._data) * np.linalg.norm(other_data)), + -1.0, + 1.0, + ) + return float(np.arccos(cos_angle)) + + def project(self: T, onto: Union["Vector", Iterable[float]]) -> T: + """Project this vector onto another vector.""" + if isinstance(onto, Vector): + onto_data = onto._data + else: + onto_data = np.array(onto, dtype=float) + + onto_length_sq = np.sum(onto_data * onto_data) + if onto_length_sq < 1e-10: + return self.__class__(np.zeros_like(self._data)) + + scalar_projection = np.dot(self._data, onto_data) / onto_length_sq + return self.__class__(scalar_projection * onto_data) + + @classmethod + def zeros(cls: type[T], dim: int) -> T: + """Create a zero vector of given dimension.""" + return cls(np.zeros(dim)) + + @classmethod + def ones(cls: type[T], dim: int) -> T: + """Create a vector of ones with given dimension.""" + return cls(np.ones(dim)) + + @classmethod + def unit_x(cls: type[T], dim: int = 3) -> T: + """Create a unit vector in the x direction.""" + v = np.zeros(dim) + v[0] = 1.0 + return cls(v) + + @classmethod + def unit_y(cls: type[T], dim: int = 3) -> T: + """Create a unit vector in the y direction.""" + v = np.zeros(dim) + v[1] = 1.0 + return cls(v) + + @classmethod + def unit_z(cls: type[T], dim: int = 3) -> T: + """Create a unit vector in the z direction.""" + v = np.zeros(dim) + if dim > 2: + v[2] = 1.0 + return cls(v) + + def to_list(self) -> list[float]: + """Convert the vector to a list.""" + return [float(x) for x in self._data] + + def to_tuple(self) -> builtins.tuple[float, ...]: + """Convert the vector to a tuple.""" + return tuple(self._data) + + def to_numpy(self) -> NDArray[np.float64]: + """Convert the vector to a numpy array.""" + return self._data + + +# Protocol approach for static type checking +@runtime_checkable +class VectorLike(Protocol): + """Protocol for types that can be treated as vectors.""" + + def __getitem__(self, key: int) -> float: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterable[float]: ... + + +def to_numpy(value: VectorLike) -> NDArray[np.float64]: + """Convert a vector-compatible value to a numpy array. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + Numpy array representation + """ + if isinstance(value, Vector): + return value.data + elif isinstance(value, np.ndarray): + return value + else: + return np.array(value, dtype=float) + + +def to_vector(value: VectorLike) -> Vector: + """Convert a vector-compatible value to a Vector object. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + Vector object + """ + if isinstance(value, Vector): + return value + else: + return Vector(value) + + +def to_tuple(value: VectorLike) -> tuple[float, ...]: + """Convert a vector-compatible value to a tuple. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + Tuple of floats + """ + if isinstance(value, Vector): + return tuple(float(x) for x in value.data) + elif isinstance(value, np.ndarray): + return tuple(float(x) for x in value) + elif isinstance(value, tuple): + return tuple(float(x) for x in value) + else: + # Convert to list first to ensure we have an indexable sequence + data = [value[i] for i in range(len(value))] + return tuple(float(x) for x in data) + + +def to_list(value: VectorLike) -> list[float]: + """Convert a vector-compatible value to a list. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + List of floats + """ + if isinstance(value, Vector): + return [float(x) for x in value.data] + elif isinstance(value, np.ndarray): + return [float(x) for x in value] + elif isinstance(value, list): + return [float(x) for x in value] + else: + # Convert to list using indexing + return [float(value[i]) for i in range(len(value))] + + +# Helper functions to check dimensionality +def is_2d(value: VectorLike) -> bool: + """Check if a vector-compatible value is 2D. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + True if the value is 2D + """ + if isinstance(value, Vector): + return len(value) == 2 + elif isinstance(value, np.ndarray): + return value.shape[-1] == 2 or value.size == 2 + else: + return len(value) == 2 + + +def is_3d(value: VectorLike) -> bool: + """Check if a vector-compatible value is 3D. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + True if the value is 3D + """ + if isinstance(value, Vector): + return len(value) == 3 + elif isinstance(value, np.ndarray): + return value.shape[-1] == 3 or value.size == 3 + else: + return len(value) == 3 + + +# Extraction functions for XYZ components +def x(value: VectorLike) -> float: + """Get the X component of a vector-compatible value. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + X component as a float + """ + if isinstance(value, Vector): + return value.x + else: + return float(to_numpy(value)[0]) + + +def y(value: VectorLike) -> float: + """Get the Y component of a vector-compatible value. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + Y component as a float + """ + if isinstance(value, Vector): + return value.y + else: + arr = to_numpy(value) + return float(arr[1]) if len(arr) > 1 else 0.0 + + +def z(value: VectorLike) -> float: + """Get the Z component of a vector-compatible value. + + Args: + value: Any vector-like object (Vector, numpy array, tuple, list) + + Returns: + Z component as a float + """ + if isinstance(value, Vector): + return value.z + else: + arr = to_numpy(value) + return float(arr[2]) if len(arr) > 2 else 0.0 diff --git a/dimos/robot/unitree_webrtc/unitree_skill_container.py b/dimos/robot/unitree/unitree_skill_container.py similarity index 52% rename from dimos/robot/unitree_webrtc/unitree_skill_container.py rename to dimos/robot/unitree/unitree_skill_container.py index c3dea43424..d2f15b9efe 100644 --- a/dimos/robot/unitree_webrtc/unitree_skill_container.py +++ b/dimos/robot/unitree/unitree_skill_container.py @@ -21,18 +21,165 @@ from unitree_webrtc_connect.constants import RTC_TOPIC +from dimos.agents.annotation import skill from dimos.core.core import rpc -from dimos.core.skill_module import SkillModule +from dimos.core.module import Module from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Vector3 from dimos.navigation.base import NavigationState -from dimos.protocol.skill.skill import skill -from dimos.protocol.skill.type import Reducer, Stream -from dimos.robot.unitree_webrtc.unitree_skills import UNITREE_WEBRTC_CONTROLS from dimos.utils.logging_config import setup_logger logger = setup_logger() +UNITREE_WEBRTC_CONTROLS: list[tuple[str, int, str]] = [ + # ("Damp", 1001, "Lowers the robot to the ground fully."), + ( + "BalanceStand", + 1002, + "Activates a mode that maintains the robot in a balanced standing position.", + ), + ( + "StandUp", + 1004, + "Commands the robot to transition from a sitting or prone position to a standing posture.", + ), + ( + "StandDown", + 1005, + "Instructs the robot to move from a standing position to a sitting or prone posture.", + ), + ( + "RecoveryStand", + 1006, + "Recovers the robot to a state from which it can take more commands. Useful to run after multiple dynamic commands like front flips, Must run after skills like sit and jump and standup.", + ), + ("Sit", 1009, "Commands the robot to sit down from a standing or moving stance."), + ( + "RiseSit", + 1010, + "Commands the robot to rise back to a standing position from a sitting posture.", + ), + ( + "SwitchGait", + 1011, + "Switches the robot's walking pattern or style dynamically, suitable for different terrains or speeds.", + ), + ("Trigger", 1012, "Triggers a specific action or custom routine programmed into the robot."), + ( + "BodyHeight", + 1013, + "Adjusts the height of the robot's body from the ground, useful for navigating various obstacles.", + ), + ( + "FootRaiseHeight", + 1014, + "Controls how high the robot lifts its feet during movement, which can be adjusted for different surfaces.", + ), + ( + "SpeedLevel", + 1015, + "Sets or adjusts the speed at which the robot moves, with various levels available for different operational needs.", + ), + ( + "Hello", + 1016, + "Performs a greeting action, which could involve a wave or other friendly gesture.", + ), + ("Stretch", 1017, "Engages the robot in a stretching routine."), + ( + "TrajectoryFollow", + 1018, + "Directs the robot to follow a predefined trajectory, which could involve complex paths or maneuvers.", + ), + ( + "ContinuousGait", + 1019, + "Enables a mode for continuous walking or running, ideal for long-distance travel.", + ), + ("Content", 1020, "To display or trigger when the robot is happy."), + ("Wallow", 1021, "The robot falls onto its back and rolls around."), + ( + "Dance1", + 1022, + "Performs a predefined dance routine 1, programmed for entertainment or demonstration.", + ), + ("Dance2", 1023, "Performs another variant of a predefined dance routine 2."), + ("GetBodyHeight", 1024, "Retrieves the current height of the robot's body from the ground."), + ( + "GetFootRaiseHeight", + 1025, + "Retrieves the current height at which the robot's feet are being raised during movement.", + ), + ( + "GetSpeedLevel", + 1026, + "Retrieves the current speed level setting of the robot.", + ), + ( + "SwitchJoystick", + 1027, + "Switches the robot's control mode to respond to joystick input for manual operation.", + ), + ( + "Pose", + 1028, + "Commands the robot to assume a specific pose or posture as predefined in its programming.", + ), + ("Scrape", 1029, "The robot performs a scraping motion."), + ( + "FrontFlip", + 1030, + "Commands the robot to perform a front flip, showcasing its agility and dynamic movement capabilities.", + ), + ( + "FrontJump", + 1031, + "Instructs the robot to jump forward, demonstrating its explosive movement capabilities.", + ), + ( + "FrontPounce", + 1032, + "Commands the robot to perform a pouncing motion forward.", + ), + ( + "WiggleHips", + 1033, + "The robot performs a hip wiggling motion, often used for entertainment or demonstration purposes.", + ), + ( + "GetState", + 1034, + "Retrieves the current operational state of the robot, including its mode, position, and status.", + ), + ( + "EconomicGait", + 1035, + "Engages a more energy-efficient walking or running mode to conserve battery life.", + ), + ("FingerHeart", 1036, "Performs a finger heart gesture while on its hind legs."), + ( + "Handstand", + 1301, + "Commands the robot to perform a handstand, demonstrating balance and control.", + ), + ( + "CrossStep", + 1302, + "Commands the robot to perform cross-step movements.", + ), + ( + "OnesidedStep", + 1303, + "Commands the robot to perform one-sided step movements.", + ), + ("Bound", 1304, "Commands the robot to perform bounding movements."), + ("MoonWalk", 1305, "Commands the robot to perform a moonwalk motion."), + ("LeftFlip", 1042, "Executes a flip towards the left side."), + ("RightFlip", 1043, "Performs a flip towards the right side."), + ("Backflip", 1044, "Executes a backflip, a complex and dynamic maneuver."), +] + + _UNITREE_COMMANDS = { name: (id_, description) for name, id_, description in UNITREE_WEBRTC_CONTROLS @@ -40,7 +187,7 @@ } -class UnitreeSkillContainer(SkillModule): +class UnitreeSkillContainer(Module): """Container for Unitree Go2 robot skills using the new framework.""" rpc_calls: list[str] = [ @@ -61,7 +208,7 @@ def start(self) -> None: def stop(self) -> None: super().stop() - @skill() + @skill def relative_move(self, forward: float = 0.0, left: float = 0.0, degrees: float = 0.0) -> str: """Move the robot relative to its current position. @@ -132,7 +279,7 @@ def _generate_new_goal( return PoseStamped(position=goal_position, orientation=goal_orientation) - @skill() + @skill def wait(self, seconds: float) -> str: """Wait for a specified amount of time. @@ -142,15 +289,12 @@ def wait(self, seconds: float) -> str: time.sleep(seconds) return f"Wait completed with length={seconds}s" - @skill(stream=Stream.passive, reducer=Reducer.latest, hide_skill=True) # type: ignore[arg-type] - def current_time(self): # type: ignore[no-untyped-def] - """Provides current time implicitly, don't call this skill directly.""" - print("Starting current_time skill") - while True: - yield str(datetime.datetime.now()) - time.sleep(1) + @skill + def current_time(self) -> str: + """Provides current time.""" + return str(datetime.datetime.now()) - @skill() + @skill def execute_sport_command(self, command_name: str) -> str: try: publish_request = self.get_rpc_calls("GO2Connection.publish_request") diff --git a/dimos/robot/unitree_webrtc/README.md b/dimos/robot/unitree_webrtc/README.md new file mode 100644 index 0000000000..ce39201c8b --- /dev/null +++ b/dimos/robot/unitree_webrtc/README.md @@ -0,0 +1 @@ +This directory only exists because some of the --replay tests depend on its existence (python pickle uses module names/paths so we would need to redo the pickle files). diff --git a/dimos/robot/unitree_webrtc/__init__.py b/dimos/robot/unitree_webrtc/__init__.py index e69de29bb2..4524bba226 100644 --- a/dimos/robot/unitree_webrtc/__init__.py +++ b/dimos/robot/unitree_webrtc/__init__.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Compatibility package for legacy dimos.robot.unitree_webrtc imports.""" + +from importlib import import_module +import sys + +_ALIAS_MODULES = { + "demo_error_on_name_conflicts": "dimos.robot.unitree.demo_error_on_name_conflicts", + "depth_module": "dimos.robot.unitree.depth_module", + "keyboard_teleop": "dimos.robot.unitree.keyboard_teleop", + "mujoco_connection": "dimos.robot.unitree.mujoco_connection", + "type": "dimos.robot.unitree.type", + "unitree_g1_skill_container": "dimos.robot.unitree.g1.skill_container", + "unitree_skill_container": "dimos.robot.unitree.unitree_skill_container", +} + +for alias, target in _ALIAS_MODULES.items(): + sys.modules[f"{__name__}.{alias}"] = import_module(target) diff --git a/dimos/robot/unitree_webrtc/type/__init__.py b/dimos/robot/unitree_webrtc/type/__init__.py index e69de29bb2..03ff4f4563 100644 --- a/dimos/robot/unitree_webrtc/type/__init__.py +++ b/dimos/robot/unitree_webrtc/type/__init__.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Compatibility re-exports for legacy dimos.robot.unitree_webrtc.type.* imports.""" + +import importlib + +__all__ = [] + + +def __getattr__(name: str): # type: ignore[no-untyped-def] + module = importlib.import_module("dimos.robot.unitree.type") + try: + return getattr(module, name) + except AttributeError as exc: + raise AttributeError(f"No {__name__} attribute {name}") from exc + + +def __dir__() -> list[str]: + module = importlib.import_module("dimos.robot.unitree.type") + return [name for name in dir(module) if not name.startswith("_")] diff --git a/dimos/robot/unitree_webrtc/type/lidar.py b/dimos/robot/unitree_webrtc/type/lidar.py index df2909dc38..d8dbe98fd2 100644 --- a/dimos/robot/unitree_webrtc/type/lidar.py +++ b/dimos/robot/unitree_webrtc/type/lidar.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2025-2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,63 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Unitree WebRTC lidar message parsing utilities.""" +"""Compatibility re-export for dimos.robot.unitree_webrtc.type.${name}.""" -import time -from typing import TypedDict - -import numpy as np -import open3d as o3d # type: ignore[import-untyped] - -from dimos.msgs.sensor_msgs import PointCloud2 - -# Backwards compatibility alias for pickled data -LidarMessage = PointCloud2 - - -class RawLidarPoints(TypedDict): - points: np.ndarray # type: ignore[type-arg] # Shape (N, 3) array of 3D points [x, y, z] - - -class RawLidarData(TypedDict): - """Data portion of the LIDAR message""" - - frame_id: str - origin: list[float] - resolution: float - src_size: int - stamp: float - width: list[int] - data: RawLidarPoints - - -class RawLidarMsg(TypedDict): - """Static type definition for raw LIDAR message from Unitree WebRTC.""" - - type: str - topic: str - data: RawLidarData - - -def pointcloud2_from_webrtc_lidar(raw_message: RawLidarMsg, ts: float | None = None) -> PointCloud2: - """Convert a raw Unitree WebRTC lidar message to PointCloud2. - - Args: - raw_message: Raw lidar message from Unitree WebRTC API - ts: Optional timestamp override. If None, uses current time. - - Returns: - PointCloud2 message with the lidar points - """ - data = raw_message["data"] - points = data["data"]["points"] - - pointcloud = o3d.geometry.PointCloud() - pointcloud.points = o3d.utility.Vector3dVector(points) - - return PointCloud2( - pointcloud=pointcloud, - # webrtc stamp is broken (e.g., "stamp": 1.758148e+09), use current time - ts=ts if ts is not None else time.time(), - frame_id="world", - ) +from dimos.robot.unitree.type.lidar import * # noqa: F403 diff --git a/dimos/robot/unitree_webrtc/type/lowstate.py b/dimos/robot/unitree_webrtc/type/lowstate.py index 3e7926424a..d92ee4d5b1 100644 --- a/dimos/robot/unitree_webrtc/type/lowstate.py +++ b/dimos/robot/unitree_webrtc/type/lowstate.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2025-2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,82 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Literal, TypedDict +"""Compatibility re-export for dimos.robot.unitree_webrtc.type.${name}.""" -raw_odom_msg_sample = { - "type": "msg", - "topic": "rt/lf/lowstate", - "data": { - "imu_state": {"rpy": [0.008086, -0.007515, 2.981771]}, - "motor_state": [ - {"q": 0.098092, "temperature": 40, "lost": 0, "reserve": [0, 674]}, - {"q": 0.757921, "temperature": 32, "lost": 0, "reserve": [0, 674]}, - {"q": -1.490911, "temperature": 38, "lost": 6, "reserve": [0, 674]}, - {"q": -0.072477, "temperature": 42, "lost": 0, "reserve": [0, 674]}, - {"q": 1.020276, "temperature": 32, "lost": 5, "reserve": [0, 674]}, - {"q": -2.007172, "temperature": 38, "lost": 5, "reserve": [0, 674]}, - {"q": 0.071382, "temperature": 50, "lost": 5, "reserve": [0, 674]}, - {"q": 0.963379, "temperature": 36, "lost": 6, "reserve": [0, 674]}, - {"q": -1.978311, "temperature": 40, "lost": 5, "reserve": [0, 674]}, - {"q": -0.051066, "temperature": 48, "lost": 0, "reserve": [0, 674]}, - {"q": 0.73103, "temperature": 34, "lost": 10, "reserve": [0, 674]}, - {"q": -1.466473, "temperature": 38, "lost": 6, "reserve": [0, 674]}, - {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, - {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, - {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, - {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, - {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, - {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, - {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, - {"q": 0, "temperature": 0, "lost": 0, "reserve": [0, 0]}, - ], - "bms_state": { - "version_high": 1, - "version_low": 18, - "soc": 55, - "current": -2481, - "cycle": 56, - "bq_ntc": [30, 29], - "mcu_ntc": [33, 32], - }, - "foot_force": [97, 84, 81, 81], - "temperature_ntc1": 48, - "power_v": 28.331045, - }, -} - - -class MotorState(TypedDict): - q: float - temperature: int - lost: int - reserve: list[int] - - -class ImuState(TypedDict): - rpy: list[float] - - -class BmsState(TypedDict): - version_high: int - version_low: int - soc: int - current: int - cycle: int - bq_ntc: list[int] - mcu_ntc: list[int] - - -class LowStateData(TypedDict): - imu_state: ImuState - motor_state: list[MotorState] - bms_state: BmsState - foot_force: list[int] - temperature_ntc1: int - power_v: float - - -class LowStateMsg(TypedDict): - type: Literal["msg"] - topic: str - data: LowStateData +from dimos.robot.unitree.type.lowstate import * # noqa: F403 diff --git a/dimos/robot/unitree_webrtc/type/map.py b/dimos/robot/unitree_webrtc/type/map.py index f9abd96b88..69bbb409c7 100644 --- a/dimos/robot/unitree_webrtc/type/map.py +++ b/dimos/robot/unitree_webrtc/type/map.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2025-2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,117 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pathlib import Path -import time -from typing import Any +"""Compatibility re-export for dimos.robot.unitree_webrtc.type.${name}.""" -import open3d as o3d # type: ignore[import-untyped] -from reactivex import interval -from reactivex.disposable import Disposable - -from dimos.core import DimosCluster, In, LCMTransport, Module, Out, rpc -from dimos.core.global_config import GlobalConfig -from dimos.mapping.pointclouds.accumulators.general import GeneralPointCloudAccumulator -from dimos.mapping.pointclouds.accumulators.protocol import PointCloudAccumulator -from dimos.mapping.pointclouds.occupancy import general_occupancy -from dimos.msgs.nav_msgs import OccupancyGrid -from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree.connection.go2 import Go2ConnectionProtocol - - -class Map(Module): - lidar: In[PointCloud2] - global_map: Out[PointCloud2] - global_costmap: Out[OccupancyGrid] - - _point_cloud_accumulator: PointCloudAccumulator - _global_config: GlobalConfig - _preloaded_occupancy: OccupancyGrid | None = None - - def __init__( # type: ignore[no-untyped-def] - self, - voxel_size: float = 0.05, - cost_resolution: float = 0.05, - global_publish_interval: float | None = None, - min_height: float = 0.10, - max_height: float = 0.5, - global_config: GlobalConfig | None = None, - **kwargs, - ) -> None: - self.voxel_size = voxel_size - self.cost_resolution = cost_resolution - self.global_publish_interval = global_publish_interval - self.min_height = min_height - self.max_height = max_height - self._global_config = global_config or GlobalConfig() - self._point_cloud_accumulator = GeneralPointCloudAccumulator( - self.voxel_size, self._global_config - ) - - if self._global_config.simulation: - self.min_height = 0.3 - - super().__init__(**kwargs) - - @rpc - def start(self) -> None: - super().start() - - self._disposables.add(Disposable(self.lidar.subscribe(self.add_frame))) - - if self.global_publish_interval is not None: - unsub = interval(self.global_publish_interval).subscribe(self._publish) - self._disposables.add(unsub) - - @rpc - def stop(self) -> None: - super().stop() - - def to_PointCloud2(self) -> PointCloud2: - return PointCloud2( - pointcloud=self._point_cloud_accumulator.get_point_cloud(), - ts=time.time(), - ) - - # TODO: Why is this RPC? - @rpc - def add_frame(self, frame: PointCloud2) -> None: - self._point_cloud_accumulator.add(frame.pointcloud) - - @property - def o3d_geometry(self) -> o3d.geometry.PointCloud: - return self._point_cloud_accumulator.get_point_cloud() - - def _publish(self, _: Any) -> None: - self.global_map.publish(self.to_PointCloud2()) - - occupancygrid = general_occupancy( - self.to_PointCloud2(), - resolution=self.cost_resolution, - min_height=self.min_height, - max_height=self.max_height, - ) - - # When debugging occupancy navigation, load a predefined occupancy grid. - if self._global_config.mujoco_global_costmap_from_occupancy: - if self._preloaded_occupancy is None: - path = Path(self._global_config.mujoco_global_costmap_from_occupancy) - self._preloaded_occupancy = OccupancyGrid.from_path(path) - occupancygrid = self._preloaded_occupancy - - self.global_costmap.publish(occupancygrid) - - -mapper = Map.blueprint - - -def deploy(dimos: DimosCluster, connection: Go2ConnectionProtocol): # type: ignore[no-untyped-def] - mapper = dimos.deploy(Map, global_publish_interval=1.0) # type: ignore[attr-defined] - mapper.global_map.transport = LCMTransport("/global_map", PointCloud2) - mapper.global_costmap.transport = LCMTransport("/global_costmap", OccupancyGrid) - mapper.lidar.connect(connection.pointcloud) # type: ignore[attr-defined] - mapper.start() - return mapper - - -__all__ = ["Map", "mapper"] +from dimos.robot.unitree.type.map import * # noqa: F403 diff --git a/dimos/robot/unitree_webrtc/type/odometry.py b/dimos/robot/unitree_webrtc/type/odometry.py index 9f0b400691..111ba0b945 100644 --- a/dimos/robot/unitree_webrtc/type/odometry.py +++ b/dimos/robot/unitree_webrtc/type/odometry.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2025-2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -11,92 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Literal, TypedDict -from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Vector3 -from dimos.robot.unitree_webrtc.type.timeseries import ( - Timestamped, -) -from dimos.types.timestamped import to_timestamp +"""Compatibility re-export for dimos.robot.unitree_webrtc.type.${name}.""" -raw_odometry_msg_sample = { - "type": "msg", - "topic": "rt/utlidar/robot_pose", - "data": { - "header": {"stamp": {"sec": 1746565669, "nanosec": 448350564}, "frame_id": "odom"}, - "pose": { - "position": {"x": 5.961965, "y": -2.916958, "z": 0.319509}, - "orientation": {"x": 0.002787, "y": -0.000902, "z": -0.970244, "w": -0.242112}, - }, - }, -} - - -class TimeStamp(TypedDict): - sec: int - nanosec: int - - -class Header(TypedDict): - stamp: TimeStamp - frame_id: str - - -class RawPosition(TypedDict): - x: float - y: float - z: float - - -class Orientation(TypedDict): - x: float - y: float - z: float - w: float - - -class PoseData(TypedDict): - position: RawPosition - orientation: Orientation - - -class OdometryData(TypedDict): - header: Header - pose: PoseData - - -class RawOdometryMessage(TypedDict): - type: Literal["msg"] - topic: str - data: OdometryData - - -class Odometry(PoseStamped, Timestamped): # type: ignore[misc] - name = "geometry_msgs.PoseStamped" - - def __init__(self, frame_id: str = "base_link", *args, **kwargs) -> None: # type: ignore[no-untyped-def] - super().__init__(frame_id=frame_id, *args, **kwargs) # type: ignore[misc] - - @classmethod - def from_msg(cls, msg: RawOdometryMessage) -> "Odometry": - pose = msg["data"]["pose"] - - # Extract position - pos = Vector3( - pose["position"].get("x"), - pose["position"].get("y"), - pose["position"].get("z"), - ) - - rot = Quaternion( - pose["orientation"].get("x"), - pose["orientation"].get("y"), - pose["orientation"].get("z"), - pose["orientation"].get("w"), - ) - - ts = to_timestamp(msg["data"]["header"]["stamp"]) - return Odometry(position=pos, orientation=rot, ts=ts, frame_id="world") - - def __repr__(self) -> str: - return f"Odom pos({self.position}), rot({self.orientation})" +from dimos.robot.unitree.type.odometry import * # noqa: F403 diff --git a/dimos/robot/unitree_webrtc/type/timeseries.py b/dimos/robot/unitree_webrtc/type/timeseries.py index b75a41b932..34f9587ade 100644 --- a/dimos/robot/unitree_webrtc/type/timeseries.py +++ b/dimos/robot/unitree_webrtc/type/timeseries.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2025-2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,138 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import annotations +"""Compatibility re-export for dimos.robot.unitree_webrtc.type.${name}.""" -from abc import ABC, abstractmethod -from datetime import datetime, timedelta, timezone -from typing import TYPE_CHECKING, Generic, TypedDict, TypeVar, Union - -if TYPE_CHECKING: - from collections.abc import Iterable - -PAYLOAD = TypeVar("PAYLOAD") - - -class RosStamp(TypedDict): - sec: int - nanosec: int - - -EpochLike = Union[int, float, datetime, RosStamp] - - -def from_ros_stamp(stamp: dict[str, int], tz: timezone | None = None) -> datetime: - """Convert ROS-style timestamp {'sec': int, 'nanosec': int} to datetime.""" - return datetime.fromtimestamp(stamp["sec"] + stamp["nanosec"] / 1e9, tz=tz) - - -def to_human_readable(ts: EpochLike) -> str: - dt = to_datetime(ts) - return dt.strftime("%Y-%m-%d %H:%M:%S") - - -def to_datetime(ts: EpochLike, tz: timezone | None = None) -> datetime: - if isinstance(ts, datetime): - # if ts.tzinfo is None: - # ts = ts.astimezone(tz) - return ts - if isinstance(ts, int | float): - return datetime.fromtimestamp(ts, tz=tz) - if isinstance(ts, dict) and "sec" in ts and "nanosec" in ts: - return datetime.fromtimestamp(ts["sec"] + ts["nanosec"] / 1e9, tz=tz) - raise TypeError("unsupported timestamp type") - - -class Timestamped(ABC): - """Abstract class for an event with a timestamp.""" - - ts: datetime - - def __init__(self, ts: EpochLike) -> None: - self.ts = to_datetime(ts) - - -class TEvent(Timestamped, Generic[PAYLOAD]): - """Concrete class for an event with a timestamp and data.""" - - def __init__(self, timestamp: EpochLike, data: PAYLOAD) -> None: - super().__init__(timestamp) - self.data = data - - def __eq__(self, other: object) -> bool: - if not isinstance(other, TEvent): - return NotImplemented - return self.ts == other.ts and self.data == other.data - - def __repr__(self) -> str: - return f"TEvent(ts={self.ts}, data={self.data})" - - -EVENT = TypeVar("EVENT", bound=Timestamped) # any object that is a subclass of Timestamped - - -class Timeseries(ABC, Generic[EVENT]): - """Abstract class for an iterable of events with timestamps.""" - - @abstractmethod - def __iter__(self) -> Iterable[EVENT]: ... - - @property - def start_time(self) -> datetime: - """Return the timestamp of the earliest event, assuming the data is sorted.""" - return next(iter(self)).ts # type: ignore[call-overload, no-any-return, type-var] - - @property - def end_time(self) -> datetime: - """Return the timestamp of the latest event, assuming the data is sorted.""" - return next(reversed(list(self))).ts # type: ignore[call-overload, no-any-return] - - @property - def frequency(self) -> float: - """Calculate the frequency of events in Hz.""" - return len(list(self)) / (self.duration().total_seconds() or 1) # type: ignore[call-overload] - - def time_range(self) -> tuple[datetime, datetime]: - """Return (earliest_ts, latest_ts). Empty input ⇒ ValueError.""" - return self.start_time, self.end_time - - def duration(self) -> timedelta: - """Total time spanned by the iterable (Δ = last - first).""" - return self.end_time - self.start_time - - def closest_to(self, timestamp: EpochLike) -> EVENT: - """Return the event closest to the given timestamp. Assumes timeseries is sorted.""" - print("closest to", timestamp) - target = to_datetime(timestamp) - print("converted to", target) - target_ts = target.timestamp() - - closest = None - min_dist = float("inf") - - for event in self: # type: ignore[attr-defined] - dist = abs(event.ts - target_ts) - if dist > min_dist: - break - - min_dist = dist - closest = event - - print(f"closest: {closest}") - return closest # type: ignore[return-value] - - def __repr__(self) -> str: - """Return a string representation of the Timeseries.""" - return f"Timeseries(date={self.start_time.strftime('%Y-%m-%d')}, start={self.start_time.strftime('%H:%M:%S')}, end={self.end_time.strftime('%H:%M:%S')}, duration={self.duration()}, events={len(list(self))}, freq={self.frequency:.2f}Hz)" # type: ignore[call-overload] - - def __str__(self) -> str: - """Return a string representation of the Timeseries.""" - return self.__repr__() - - -class TList(list[EVENT], Timeseries[EVENT]): - """A test class that inherits from both list and Timeseries.""" - - def __repr__(self) -> str: - """Return a string representation of the TList using Timeseries repr method.""" - return Timeseries.__repr__(self) +from dimos.robot.unitree.type.timeseries import * # noqa: F403 diff --git a/dimos/robot/unitree_webrtc/type/vector.py b/dimos/robot/unitree_webrtc/type/vector.py index 58438c0a98..20d07c76e8 100644 --- a/dimos/robot/unitree_webrtc/type/vector.py +++ b/dimos/robot/unitree_webrtc/type/vector.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Copyright 2025-2026 Dimensional Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,431 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import builtins -from collections.abc import Iterable -from typing import ( - Any, - Protocol, - TypeVar, - Union, - runtime_checkable, -) +"""Compatibility re-export for dimos.robot.unitree_webrtc.type.${name}.""" -import numpy as np -from numpy.typing import NDArray - -T = TypeVar("T", bound="Vector") - - -class Vector: - """A wrapper around numpy arrays for vector operations with intuitive syntax.""" - - def __init__(self, *args: Any) -> None: - """Initialize a vector from components or another iterable. - - Examples: - Vector(1, 2) # 2D vector - Vector(1, 2, 3) # 3D vector - Vector([1, 2, 3]) # From list - Vector(np.array([1, 2, 3])) # From numpy array - """ - if len(args) == 1 and hasattr(args[0], "__iter__"): - self._data = np.array(args[0], dtype=float) - elif len(args) == 1: - self._data = np.array([args[0].x, args[0].y, args[0].z], dtype=float) - - else: - self._data = np.array(args, dtype=float) - - @property - def yaw(self) -> float: - return self.x - - @property - def tuple(self) -> tuple[float, ...]: - """Tuple representation of the vector.""" - return tuple(self._data) - - @property - def x(self) -> float: - """X component of the vector.""" - return self._data[0] if len(self._data) > 0 else 0.0 - - @property - def y(self) -> float: - """Y component of the vector.""" - return self._data[1] if len(self._data) > 1 else 0.0 - - @property - def z(self) -> float: - """Z component of the vector.""" - return self._data[2] if len(self._data) > 2 else 0.0 - - @property - def dim(self) -> int: - """Dimensionality of the vector.""" - return len(self._data) - - @property - def data(self) -> NDArray[np.float64]: - """Get the underlying numpy array.""" - return self._data - - def __len__(self) -> int: - return len(self._data) - - def __getitem__(self, idx: int) -> float: - return float(self._data[idx]) - - def __iter__(self) -> Iterable[float]: - return iter(self._data) # type: ignore[no-any-return] - - def __repr__(self) -> str: - components = ",".join(f"{x:.6g}" for x in self._data) - return f"({components})" - - def __str__(self) -> str: - if self.dim < 2: - return self.__repr__() - - def getArrow() -> str: - repr = ["←", "↖", "↑", "↗", "→", "↘", "↓", "↙"] - - if self.y == 0 and self.x == 0: - return "·" - - # Calculate angle in radians and convert to directional index - angle = np.arctan2(self.y, self.x) - # Map angle to 0-7 index (8 directions) with proper orientation - dir_index = int(((angle + np.pi) * 4 / np.pi) % 8) - # Get directional arrow symbol - return repr[dir_index] - - return f"{getArrow()} Vector {self.__repr__()}" - - def serialize(self) -> dict: # type: ignore[type-arg] - """Serialize the vector to a dictionary.""" - return {"type": "vector", "c": self._data.tolist()} - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Vector): - return np.array_equal(self._data, other._data) - return np.array_equal(self._data, np.array(other, dtype=float)) - - def __add__(self: T, other: Union["Vector", Iterable[float]]) -> T: - if isinstance(other, Vector): - return self.__class__(self._data + other._data) - return self.__class__(self._data + np.array(other, dtype=float)) - - def __sub__(self: T, other: Union["Vector", Iterable[float]]) -> T: - if isinstance(other, Vector): - return self.__class__(self._data - other._data) - return self.__class__(self._data - np.array(other, dtype=float)) - - def __mul__(self: T, scalar: float) -> T: - return self.__class__(self._data * scalar) - - def __rmul__(self: T, scalar: float) -> T: - return self.__mul__(scalar) - - def __truediv__(self: T, scalar: float) -> T: - return self.__class__(self._data / scalar) - - def __neg__(self: T) -> T: - return self.__class__(-self._data) - - def dot(self, other: Union["Vector", Iterable[float]]) -> float: - """Compute dot product.""" - if isinstance(other, Vector): - return float(np.dot(self._data, other._data)) - return float(np.dot(self._data, np.array(other, dtype=float))) - - def cross(self: T, other: Union["Vector", Iterable[float]]) -> T: - """Compute cross product (3D vectors only).""" - if self.dim != 3: - raise ValueError("Cross product is only defined for 3D vectors") - - if isinstance(other, Vector): - other_data = other._data - else: - other_data = np.array(other, dtype=float) - - if len(other_data) != 3: - raise ValueError("Cross product requires two 3D vectors") - - return self.__class__(np.cross(self._data, other_data)) - - def length(self) -> float: - """Compute the Euclidean length (magnitude) of the vector.""" - return float(np.linalg.norm(self._data)) - - def length_squared(self) -> float: - """Compute the squared length of the vector (faster than length()).""" - return float(np.sum(self._data * self._data)) - - def normalize(self: T) -> T: - """Return a normalized unit vector in the same direction.""" - length = self.length() - if length < 1e-10: # Avoid division by near-zero - return self.__class__(np.zeros_like(self._data)) - return self.__class__(self._data / length) - - def to_2d(self: T) -> T: - """Convert a vector to a 2D vector by taking only the x and y components.""" - return self.__class__(self._data[:2]) - - def distance(self, other: Union["Vector", Iterable[float]]) -> float: - """Compute Euclidean distance to another vector.""" - if isinstance(other, Vector): - return float(np.linalg.norm(self._data - other._data)) - return float(np.linalg.norm(self._data - np.array(other, dtype=float))) - - def distance_squared(self, other: Union["Vector", Iterable[float]]) -> float: - """Compute squared Euclidean distance to another vector (faster than distance()).""" - if isinstance(other, Vector): - diff = self._data - other._data - else: - diff = self._data - np.array(other, dtype=float) - return float(np.sum(diff * diff)) - - def angle(self, other: Union["Vector", Iterable[float]]) -> float: - """Compute the angle (in radians) between this vector and another.""" - if self.length() < 1e-10 or (isinstance(other, Vector) and other.length() < 1e-10): - return 0.0 - - if isinstance(other, Vector): - other_data = other._data - else: - other_data = np.array(other, dtype=float) - - cos_angle = np.clip( - np.dot(self._data, other_data) - / (np.linalg.norm(self._data) * np.linalg.norm(other_data)), - -1.0, - 1.0, - ) - return float(np.arccos(cos_angle)) - - def project(self: T, onto: Union["Vector", Iterable[float]]) -> T: - """Project this vector onto another vector.""" - if isinstance(onto, Vector): - onto_data = onto._data - else: - onto_data = np.array(onto, dtype=float) - - onto_length_sq = np.sum(onto_data * onto_data) - if onto_length_sq < 1e-10: - return self.__class__(np.zeros_like(self._data)) - - scalar_projection = np.dot(self._data, onto_data) / onto_length_sq - return self.__class__(scalar_projection * onto_data) - - @classmethod - def zeros(cls: type[T], dim: int) -> T: - """Create a zero vector of given dimension.""" - return cls(np.zeros(dim)) - - @classmethod - def ones(cls: type[T], dim: int) -> T: - """Create a vector of ones with given dimension.""" - return cls(np.ones(dim)) - - @classmethod - def unit_x(cls: type[T], dim: int = 3) -> T: - """Create a unit vector in the x direction.""" - v = np.zeros(dim) - v[0] = 1.0 - return cls(v) - - @classmethod - def unit_y(cls: type[T], dim: int = 3) -> T: - """Create a unit vector in the y direction.""" - v = np.zeros(dim) - v[1] = 1.0 - return cls(v) - - @classmethod - def unit_z(cls: type[T], dim: int = 3) -> T: - """Create a unit vector in the z direction.""" - v = np.zeros(dim) - if dim > 2: - v[2] = 1.0 - return cls(v) - - def to_list(self) -> list[float]: - """Convert the vector to a list.""" - return [float(x) for x in self._data] - - def to_tuple(self) -> builtins.tuple[float, ...]: - """Convert the vector to a tuple.""" - return tuple(self._data) - - def to_numpy(self) -> NDArray[np.float64]: - """Convert the vector to a numpy array.""" - return self._data - - -# Protocol approach for static type checking -@runtime_checkable -class VectorLike(Protocol): - """Protocol for types that can be treated as vectors.""" - - def __getitem__(self, key: int) -> float: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterable[float]: ... - - -def to_numpy(value: VectorLike) -> NDArray[np.float64]: - """Convert a vector-compatible value to a numpy array. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - Numpy array representation - """ - if isinstance(value, Vector): - return value.data - elif isinstance(value, np.ndarray): - return value - else: - return np.array(value, dtype=float) - - -def to_vector(value: VectorLike) -> Vector: - """Convert a vector-compatible value to a Vector object. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - Vector object - """ - if isinstance(value, Vector): - return value - else: - return Vector(value) - - -def to_tuple(value: VectorLike) -> tuple[float, ...]: - """Convert a vector-compatible value to a tuple. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - Tuple of floats - """ - if isinstance(value, Vector): - return tuple(float(x) for x in value.data) - elif isinstance(value, np.ndarray): - return tuple(float(x) for x in value) - elif isinstance(value, tuple): - return tuple(float(x) for x in value) - else: - # Convert to list first to ensure we have an indexable sequence - data = [value[i] for i in range(len(value))] - return tuple(float(x) for x in data) - - -def to_list(value: VectorLike) -> list[float]: - """Convert a vector-compatible value to a list. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - List of floats - """ - if isinstance(value, Vector): - return [float(x) for x in value.data] - elif isinstance(value, np.ndarray): - return [float(x) for x in value] - elif isinstance(value, list): - return [float(x) for x in value] - else: - # Convert to list using indexing - return [float(value[i]) for i in range(len(value))] - - -# Helper functions to check dimensionality -def is_2d(value: VectorLike) -> bool: - """Check if a vector-compatible value is 2D. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - True if the value is 2D - """ - if isinstance(value, Vector): - return len(value) == 2 - elif isinstance(value, np.ndarray): - return value.shape[-1] == 2 or value.size == 2 - else: - return len(value) == 2 - - -def is_3d(value: VectorLike) -> bool: - """Check if a vector-compatible value is 3D. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - True if the value is 3D - """ - if isinstance(value, Vector): - return len(value) == 3 - elif isinstance(value, np.ndarray): - return value.shape[-1] == 3 or value.size == 3 - else: - return len(value) == 3 - - -# Extraction functions for XYZ components -def x(value: VectorLike) -> float: - """Get the X component of a vector-compatible value. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - X component as a float - """ - if isinstance(value, Vector): - return value.x - else: - return float(to_numpy(value)[0]) - - -def y(value: VectorLike) -> float: - """Get the Y component of a vector-compatible value. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - Y component as a float - """ - if isinstance(value, Vector): - return value.y - else: - arr = to_numpy(value) - return float(arr[1]) if len(arr) > 1 else 0.0 - - -def z(value: VectorLike) -> float: - """Get the Z component of a vector-compatible value. - - Args: - value: Any vector-like object (Vector, numpy array, tuple, list) - - Returns: - Z component as a float - """ - if isinstance(value, Vector): - return value.z - else: - arr = to_numpy(value) - return float(arr[2]) if len(arr) > 2 else 0.0 +from dimos.robot.unitree.type.vector import * # noqa: F403 diff --git a/dimos/robot/unitree_webrtc/unitree_g1_blueprints.py b/dimos/robot/unitree_webrtc/unitree_g1_blueprints.py deleted file mode 100644 index c79cee2a18..0000000000 --- a/dimos/robot/unitree_webrtc/unitree_g1_blueprints.py +++ /dev/null @@ -1,268 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Blueprint configurations for Unitree G1 humanoid robot. - -This module provides pre-configured blueprints for various G1 robot setups, -from basic teleoperation to full autonomous agent configurations. -""" - -from dimos_lcm.foxglove_msgs import SceneUpdate -from dimos_lcm.foxglove_msgs.ImageAnnotations import ( - ImageAnnotations, -) -from dimos_lcm.sensor_msgs import CameraInfo - -from dimos.agents.agent import llm_agent -from dimos.agents.cli.human import human_input -from dimos.agents.skills.navigation import navigation_skill -from dimos.constants import DEFAULT_CAPACITY_COLOR_IMAGE -from dimos.core.blueprints import autoconnect -from dimos.core.transport import LCMTransport, pSHMTransport -from dimos.hardware.sensors.camera import zed -from dimos.hardware.sensors.camera.module import camera_module # type: ignore[attr-defined] -from dimos.hardware.sensors.camera.webcam import Webcam -from dimos.mapping.costmapper import cost_mapper -from dimos.mapping.voxels import voxel_mapper -from dimos.msgs.geometry_msgs import ( - PoseStamped, - Quaternion, - Transform, - Twist, - Vector3, -) -from dimos.msgs.nav_msgs import Odometry, Path -from dimos.msgs.sensor_msgs import Image, PointCloud2 -from dimos.msgs.std_msgs import Bool -from dimos.msgs.vision_msgs import Detection2DArray -from dimos.navigation.frontier_exploration import wavefront_frontier_explorer -from dimos.navigation.replanning_a_star.module import replanning_a_star_planner -from dimos.navigation.rosnav import ros_nav -from dimos.perception.detection.detectors.person.yolo import YoloPersonDetector -from dimos.perception.detection.module3D import Detection3DModule, detection3d_module -from dimos.perception.detection.moduleDB import ObjectDBModule, detectionDB_module -from dimos.perception.detection.person_tracker import PersonTracker, person_tracker_module -from dimos.perception.object_tracker import object_tracking -from dimos.perception.spatial_perception import spatial_memory -from dimos.robot.foxglove_bridge import foxglove_bridge -from dimos.robot.unitree.connection.g1 import g1_connection -from dimos.robot.unitree.connection.g1sim import g1_sim_connection -from dimos.robot.unitree_webrtc.keyboard_teleop import keyboard_teleop -from dimos.robot.unitree_webrtc.unitree_g1_skill_container import g1_skills -from dimos.utils.monitoring import utilization -from dimos.web.websocket_vis.websocket_vis_module import websocket_vis - -_basic_no_nav = ( - autoconnect( - camera_module( - transform=Transform( - translation=Vector3(0.05, 0.0, 0.0), - rotation=Quaternion.from_euler(Vector3(0.0, 0.2, 0.0)), - frame_id="sensor", - child_frame_id="camera_link", - ), - hardware=lambda: Webcam( - camera_index=0, - fps=15, - stereo_slice="left", - camera_info=zed.CameraInfo.SingleWebcam, - ), - ), - voxel_mapper(voxel_size=0.1), - cost_mapper(), - wavefront_frontier_explorer(), - # Visualization - websocket_vis(), - foxglove_bridge(), - ) - .global_config(n_dask_workers=4, robot_model="unitree_g1") - .transports( - { - # G1 uses Twist for movement commands - ("cmd_vel", Twist): LCMTransport("/cmd_vel", Twist), - # State estimation from ROS - ("state_estimation", Odometry): LCMTransport("/state_estimation", Odometry), - # Odometry output from ROSNavigationModule - ("odom", PoseStamped): LCMTransport("/odom", PoseStamped), - # Navigation module topics from nav_bot - ("goal_req", PoseStamped): LCMTransport("/goal_req", PoseStamped), - ("goal_active", PoseStamped): LCMTransport("/goal_active", PoseStamped), - ("path_active", Path): LCMTransport("/path_active", Path), - ("pointcloud", PointCloud2): LCMTransport("/lidar", PointCloud2), - ("global_pointcloud", PointCloud2): LCMTransport("/map", PointCloud2), - # Original navigation topics for backwards compatibility - ("goal_pose", PoseStamped): LCMTransport("/goal_pose", PoseStamped), - ("goal_reached", Bool): LCMTransport("/goal_reached", Bool), - ("cancel_goal", Bool): LCMTransport("/cancel_goal", Bool), - # Camera topics (if camera module is added) - ("color_image", Image): LCMTransport("/g1/color_image", Image), - ("camera_info", CameraInfo): LCMTransport("/g1/camera_info", CameraInfo), - } - ) -) - -basic_ros = autoconnect( - _basic_no_nav, - g1_connection(), - ros_nav(), -) - -basic_sim = autoconnect( - _basic_no_nav, - g1_sim_connection(), - replanning_a_star_planner(), -) - -_perception_and_memory = autoconnect( - spatial_memory(), - object_tracking(frame_id="camera_link"), - utilization(), -) - -standard = autoconnect( - basic_ros, - _perception_and_memory, -).global_config(n_dask_workers=8) - -standard_sim = autoconnect( - basic_sim, - _perception_and_memory, -).global_config(n_dask_workers=8) - -# Optimized configuration using shared memory for images -standard_with_shm = autoconnect( - standard.transports( - { - ("color_image", Image): pSHMTransport( - "/g1/color_image", default_capacity=DEFAULT_CAPACITY_COLOR_IMAGE - ), - } - ), - foxglove_bridge( - shm_channels=[ - "/g1/color_image#sensor_msgs.Image", - ] - ), -) - -_agentic_skills = autoconnect( - llm_agent(), - human_input(), - navigation_skill(), - g1_skills(), -) - -# Full agentic configuration with LLM and skills -agentic = autoconnect( - standard, - _agentic_skills, -) - -agentic_sim = autoconnect( - standard_sim, - _agentic_skills, -) - -# Configuration with joystick control for teleoperation -with_joystick = autoconnect( - basic_ros, - keyboard_teleop(), # Pygame-based joystick control -) - -# Detection configuration with person tracking and 3D detection -detection = ( - autoconnect( - basic_ros, - # Person detection modules with YOLO - detection3d_module( - camera_info=zed.CameraInfo.SingleWebcam, - detector=YoloPersonDetector, - ), - detectionDB_module( - camera_info=zed.CameraInfo.SingleWebcam, - filter=lambda det: det.class_id == 0, # Filter for person class only - ), - person_tracker_module( - cameraInfo=zed.CameraInfo.SingleWebcam, - ), - ) - .global_config(n_dask_workers=8) - .remappings( - [ - # Connect detection modules to camera and lidar - (Detection3DModule, "image", "color_image"), - (Detection3DModule, "pointcloud", "pointcloud"), - (ObjectDBModule, "image", "color_image"), - (ObjectDBModule, "pointcloud", "pointcloud"), - (PersonTracker, "image", "color_image"), - (PersonTracker, "detections", "detections_2d"), - ] - ) - .transports( - { - # Detection 3D module outputs - ("detections", Detection3DModule): LCMTransport( - "/detector3d/detections", Detection2DArray - ), - ("annotations", Detection3DModule): LCMTransport( - "/detector3d/annotations", ImageAnnotations - ), - ("scene_update", Detection3DModule): LCMTransport( - "/detector3d/scene_update", SceneUpdate - ), - ("detected_pointcloud_0", Detection3DModule): LCMTransport( - "/detector3d/pointcloud/0", PointCloud2 - ), - ("detected_pointcloud_1", Detection3DModule): LCMTransport( - "/detector3d/pointcloud/1", PointCloud2 - ), - ("detected_pointcloud_2", Detection3DModule): LCMTransport( - "/detector3d/pointcloud/2", PointCloud2 - ), - ("detected_image_0", Detection3DModule): LCMTransport("/detector3d/image/0", Image), - ("detected_image_1", Detection3DModule): LCMTransport("/detector3d/image/1", Image), - ("detected_image_2", Detection3DModule): LCMTransport("/detector3d/image/2", Image), - # Detection DB module outputs - ("detections", ObjectDBModule): LCMTransport( - "/detectorDB/detections", Detection2DArray - ), - ("annotations", ObjectDBModule): LCMTransport( - "/detectorDB/annotations", ImageAnnotations - ), - ("scene_update", ObjectDBModule): LCMTransport("/detectorDB/scene_update", SceneUpdate), - ("detected_pointcloud_0", ObjectDBModule): LCMTransport( - "/detectorDB/pointcloud/0", PointCloud2 - ), - ("detected_pointcloud_1", ObjectDBModule): LCMTransport( - "/detectorDB/pointcloud/1", PointCloud2 - ), - ("detected_pointcloud_2", ObjectDBModule): LCMTransport( - "/detectorDB/pointcloud/2", PointCloud2 - ), - ("detected_image_0", ObjectDBModule): LCMTransport("/detectorDB/image/0", Image), - ("detected_image_1", ObjectDBModule): LCMTransport("/detectorDB/image/1", Image), - ("detected_image_2", ObjectDBModule): LCMTransport("/detectorDB/image/2", Image), - # Person tracker outputs - ("target", PersonTracker): LCMTransport("/person_tracker/target", PoseStamped), - } - ) -) - -# Full featured configuration with everything -full_featured = autoconnect( - standard_with_shm, - _agentic_skills, - keyboard_teleop(), -) diff --git a/dimos/robot/unitree_webrtc/unitree_go2_blueprints.py b/dimos/robot/unitree_webrtc/unitree_go2_blueprints.py deleted file mode 100644 index b683b76559..0000000000 --- a/dimos/robot/unitree_webrtc/unitree_go2_blueprints.py +++ /dev/null @@ -1,234 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pathlib import Path -import platform - -from dimos_lcm.foxglove_msgs.ImageAnnotations import ( - ImageAnnotations, # type: ignore[import-untyped] -) -from dimos_lcm.foxglove_msgs.SceneUpdate import SceneUpdate # type: ignore[import-untyped] - -from dimos.agents.agent import llm_agent -from dimos.agents.cli.human import human_input -from dimos.agents.cli.web import web_input -from dimos.agents.ollama_agent import ollama_installed -from dimos.agents.skills.navigation import navigation_skill -from dimos.agents.skills.person_follow import person_follow_skill -from dimos.agents.skills.speak_skill import speak_skill -from dimos.agents.spec import Provider -from dimos.agents.vlm_agent import vlm_agent -from dimos.agents.vlm_stream_tester import vlm_stream_tester -from dimos.constants import DEFAULT_CAPACITY_COLOR_IMAGE -from dimos.core.blueprints import autoconnect -from dimos.core.transport import ( - JpegLcmTransport, - JpegShmTransport, - LCMTransport, - ROSTransport, - pSHMTransport, -) -from dimos.dashboard.tf_rerun_module import tf_rerun -from dimos.mapping.costmapper import cost_mapper -from dimos.mapping.voxels import voxel_mapper -from dimos.msgs.geometry_msgs import PoseStamped -from dimos.msgs.sensor_msgs import Image, PointCloud2 -from dimos.msgs.vision_msgs import Detection2DArray -from dimos.navigation.frontier_exploration import ( - wavefront_frontier_explorer, -) -from dimos.navigation.replanning_a_star.module import ( - replanning_a_star_planner, -) -from dimos.perception.detection.module3D import Detection3DModule, detection3d_module -from dimos.perception.experimental.temporal_memory import temporal_memory -from dimos.perception.spatial_perception import spatial_memory -from dimos.protocol.mcp.mcp import MCPModule -from dimos.robot.foxglove_bridge import foxglove_bridge -import dimos.robot.unitree.connection.go2 as _go2_mod -from dimos.robot.unitree.connection.go2 import GO2Connection, go2_connection -from dimos.robot.unitree_webrtc.unitree_skill_container import unitree_skills -from dimos.utils.monitoring import utilization -from dimos.web.websocket_vis.websocket_vis_module import websocket_vis - -_GO2_URDF = Path(_go2_mod.__file__).parent.parent / "go2" / "go2.urdf" - -# Mac has some issue with high bandwidth UDP -# -# so we use pSHMTransport for color_image -# (Could we adress this on the system config layer? Is this fixable on mac?) -mac = autoconnect( - foxglove_bridge( - shm_channels=[ - "/color_image#sensor_msgs.Image", - ] - ), -).transports( - { - ("color_image", Image): pSHMTransport( - "color_image", default_capacity=DEFAULT_CAPACITY_COLOR_IMAGE - ), - } -) - - -linux = autoconnect(foxglove_bridge()) - -basic = autoconnect( - go2_connection(), - linux if platform.system() == "Linux" else mac, - websocket_vis(), - tf_rerun( - urdf_path=str(_GO2_URDF), - cameras=[ - ("world/robot/camera", "camera_optical", GO2Connection.camera_info_static), - ], - ), -).global_config(n_dask_workers=4, robot_model="unitree_go2") - -nav = autoconnect( - basic, - voxel_mapper(voxel_size=0.1), - cost_mapper(), - replanning_a_star_planner(), - wavefront_frontier_explorer(), -).global_config(n_dask_workers=6, robot_model="unitree_go2") - -ros = nav.transports( - { - ("lidar", PointCloud2): ROSTransport("lidar", PointCloud2), - ("global_map", PointCloud2): ROSTransport("global_map", PointCloud2), - ("odom", PoseStamped): ROSTransport("odom", PoseStamped), - ("color_image", Image): ROSTransport("color_image", Image), - } -) - -detection = ( - autoconnect( - nav, - detection3d_module( - camera_info=GO2Connection.camera_info_static, - ), - ) - .remappings( - [ - (Detection3DModule, "pointcloud", "global_map"), - ] - ) - .transports( - { - # Detection 3D module outputs - ("detections", Detection3DModule): LCMTransport( - "/detector3d/detections", Detection2DArray - ), - ("annotations", Detection3DModule): LCMTransport( - "/detector3d/annotations", ImageAnnotations - ), - ("scene_update", Detection3DModule): LCMTransport( - "/detector3d/scene_update", SceneUpdate - ), - ("detected_pointcloud_0", Detection3DModule): LCMTransport( - "/detector3d/pointcloud/0", PointCloud2 - ), - ("detected_pointcloud_1", Detection3DModule): LCMTransport( - "/detector3d/pointcloud/1", PointCloud2 - ), - ("detected_pointcloud_2", Detection3DModule): LCMTransport( - "/detector3d/pointcloud/2", PointCloud2 - ), - ("detected_image_0", Detection3DModule): LCMTransport("/detector3d/image/0", Image), - ("detected_image_1", Detection3DModule): LCMTransport("/detector3d/image/1", Image), - ("detected_image_2", Detection3DModule): LCMTransport("/detector3d/image/2", Image), - } - ) -) - - -spatial = autoconnect( - nav, - spatial_memory(), - utilization(), -).global_config(n_dask_workers=8) - -with_jpeglcm = nav.transports( - { - ("color_image", Image): JpegLcmTransport("/color_image", Image), - } -) - -with_jpegshm = autoconnect( - nav.transports( - { - ("color_image", Image): JpegShmTransport("/color_image", quality=75), - } - ), - foxglove_bridge( - jpeg_shm_channels=[ - "/color_image#sensor_msgs.Image", - ] - ), -) - -_common_agentic = autoconnect( - human_input(), - navigation_skill(), - person_follow_skill(camera_info=GO2Connection.camera_info_static), - unitree_skills(), - web_input(), - speak_skill(), -) - -agentic = autoconnect( - spatial, - llm_agent(), - _common_agentic, -) - -agentic_mcp = autoconnect( - agentic, - MCPModule.blueprint(), -) - -agentic_ollama = autoconnect( - spatial, - llm_agent( - model="qwen3:8b", - provider=Provider.OLLAMA, # type: ignore[attr-defined] - ), - _common_agentic, -).requirements( - ollama_installed, -) - -agentic_huggingface = autoconnect( - spatial, - llm_agent( - model="Qwen/Qwen2.5-1.5B-Instruct", - provider=Provider.HUGGINGFACE, # type: ignore[attr-defined] - ), - _common_agentic, -) - -vlm_stream_test = autoconnect( - basic, - vlm_agent(), - vlm_stream_tester(), -) - -temporal_memory = autoconnect( - agentic, - temporal_memory(), -) diff --git a/dimos/robot/unitree_webrtc/unitree_skills.py b/dimos/robot/unitree_webrtc/unitree_skills.py deleted file mode 100644 index 05e01f63fb..0000000000 --- a/dimos/robot/unitree_webrtc/unitree_skills.py +++ /dev/null @@ -1,357 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -import time -from typing import TYPE_CHECKING - -from pydantic import Field - -if TYPE_CHECKING: - from dimos.robot.robot import MockRobot, Robot # type: ignore[attr-defined] -else: - Robot = "Robot" - MockRobot = "MockRobot" - -from unitree_webrtc_connect.constants import RTC_TOPIC - -from dimos.msgs.geometry_msgs import Twist, Vector3 -from dimos.skills.skills import AbstractRobotSkill, AbstractSkill, SkillLibrary -from dimos.types.constants import Colors - -# Module-level constant for Unitree Go2 WebRTC control definitions -UNITREE_WEBRTC_CONTROLS: list[tuple[str, int, str]] = [ - ("Damp", 1001, "Lowers the robot to the ground fully."), - ( - "BalanceStand", - 1002, - "Activates a mode that maintains the robot in a balanced standing position.", - ), - ( - "StandUp", - 1004, - "Commands the robot to transition from a sitting or prone position to a standing posture.", - ), - ( - "StandDown", - 1005, - "Instructs the robot to move from a standing position to a sitting or prone posture.", - ), - ( - "RecoveryStand", - 1006, - "Recovers the robot to a state from which it can take more commands. Useful to run after multiple dynamic commands like front flips, Must run after skills like sit and jump and standup.", - ), - ("Sit", 1009, "Commands the robot to sit down from a standing or moving stance."), - ( - "RiseSit", - 1010, - "Commands the robot to rise back to a standing position from a sitting posture.", - ), - ( - "SwitchGait", - 1011, - "Switches the robot's walking pattern or style dynamically, suitable for different terrains or speeds.", - ), - ("Trigger", 1012, "Triggers a specific action or custom routine programmed into the robot."), - ( - "BodyHeight", - 1013, - "Adjusts the height of the robot's body from the ground, useful for navigating various obstacles.", - ), - ( - "FootRaiseHeight", - 1014, - "Controls how high the robot lifts its feet during movement, which can be adjusted for different surfaces.", - ), - ( - "SpeedLevel", - 1015, - "Sets or adjusts the speed at which the robot moves, with various levels available for different operational needs.", - ), - ( - "Hello", - 1016, - "Performs a greeting action, which could involve a wave or other friendly gesture.", - ), - ("Stretch", 1017, "Engages the robot in a stretching routine."), - ( - "TrajectoryFollow", - 1018, - "Directs the robot to follow a predefined trajectory, which could involve complex paths or maneuvers.", - ), - ( - "ContinuousGait", - 1019, - "Enables a mode for continuous walking or running, ideal for long-distance travel.", - ), - ("Content", 1020, "To display or trigger when the robot is happy."), - ("Wallow", 1021, "The robot falls onto its back and rolls around."), - ( - "Dance1", - 1022, - "Performs a predefined dance routine 1, programmed for entertainment or demonstration.", - ), - ("Dance2", 1023, "Performs another variant of a predefined dance routine 2."), - ("GetBodyHeight", 1024, "Retrieves the current height of the robot's body from the ground."), - ( - "GetFootRaiseHeight", - 1025, - "Retrieves the current height at which the robot's feet are being raised during movement.", - ), - ( - "GetSpeedLevel", - 1026, - "Retrieves the current speed level setting of the robot.", - ), - ( - "SwitchJoystick", - 1027, - "Switches the robot's control mode to respond to joystick input for manual operation.", - ), - ( - "Pose", - 1028, - "Commands the robot to assume a specific pose or posture as predefined in its programming.", - ), - ("Scrape", 1029, "The robot performs a scraping motion."), - ( - "FrontFlip", - 1030, - "Commands the robot to perform a front flip, showcasing its agility and dynamic movement capabilities.", - ), - ( - "FrontJump", - 1031, - "Instructs the robot to jump forward, demonstrating its explosive movement capabilities.", - ), - ( - "FrontPounce", - 1032, - "Commands the robot to perform a pouncing motion forward.", - ), - ( - "WiggleHips", - 1033, - "The robot performs a hip wiggling motion, often used for entertainment or demonstration purposes.", - ), - ( - "GetState", - 1034, - "Retrieves the current operational state of the robot, including its mode, position, and status.", - ), - ( - "EconomicGait", - 1035, - "Engages a more energy-efficient walking or running mode to conserve battery life.", - ), - ("FingerHeart", 1036, "Performs a finger heart gesture while on its hind legs."), - ( - "Handstand", - 1301, - "Commands the robot to perform a handstand, demonstrating balance and control.", - ), - ( - "CrossStep", - 1302, - "Commands the robot to perform cross-step movements.", - ), - ( - "OnesidedStep", - 1303, - "Commands the robot to perform one-sided step movements.", - ), - ("Bound", 1304, "Commands the robot to perform bounding movements."), - ("MoonWalk", 1305, "Commands the robot to perform a moonwalk motion."), - ("LeftFlip", 1042, "Executes a flip towards the left side."), - ("RightFlip", 1043, "Performs a flip towards the right side."), - ("Backflip", 1044, "Executes a backflip, a complex and dynamic maneuver."), -] - -# Module-level constants for Unitree G1 WebRTC control definitions -# G1 Arm Actions - all use api_id 7106 on topic "rt/api/arm/request" -G1_ARM_CONTROLS: list[tuple[str, int, str]] = [ - ("Handshake", 27, "Perform a handshake gesture with the right hand."), - ("HighFive", 18, "Give a high five with the right hand."), - ("Hug", 19, "Perform a hugging gesture with both arms."), - ("HighWave", 26, "Wave with the hand raised high."), - ("Clap", 17, "Clap hands together."), - ("FaceWave", 25, "Wave near the face level."), - ("LeftKiss", 12, "Blow a kiss with the left hand."), - ("ArmHeart", 20, "Make a heart shape with both arms overhead."), - ("RightHeart", 21, "Make a heart gesture with the right hand."), - ("HandsUp", 15, "Raise both hands up in the air."), - ("XRay", 24, "Hold arms in an X-ray pose position."), - ("RightHandUp", 23, "Raise only the right hand up."), - ("Reject", 22, "Make a rejection or 'no' gesture."), - ("CancelAction", 99, "Cancel any current arm action and return hands to neutral position."), -] - -# G1 Movement Modes - all use api_id 7101 on topic "rt/api/sport/request" -G1_MODE_CONTROLS: list[tuple[str, int, str]] = [ - ("WalkMode", 500, "Switch to normal walking mode."), - ("WalkControlWaist", 501, "Switch to walking mode with waist control."), - ("RunMode", 801, "Switch to running mode."), -] - -# region MyUnitreeSkills - - -class MyUnitreeSkills(SkillLibrary): - """My Unitree Skills for WebRTC interface.""" - - def __init__(self, robot: Robot | None = None, robot_type: str = "go2") -> None: - """Initialize Unitree skills library. - - Args: - robot: Optional robot instance - robot_type: Type of robot ("go2" or "g1"), defaults to "go2" - """ - super().__init__() - self._robot: Robot = None # type: ignore[assignment] - self.robot_type = robot_type.lower() - - if self.robot_type not in ["go2", "g1"]: - raise ValueError(f"Unsupported robot type: {robot_type}. Must be 'go2' or 'g1'") - - # Add dynamic skills to this class based on robot type - dynamic_skills = self.create_skills_live() - self.register_skills(dynamic_skills) # type: ignore[arg-type] - - @classmethod - def register_skills(cls, skill_classes: AbstractSkill | list[AbstractSkill]) -> None: - """Add multiple skill classes as class attributes. - - Args: - skill_classes: List of skill classes to add - """ - if not isinstance(skill_classes, list): - skill_classes = [skill_classes] - - for skill_class in skill_classes: - # Add to the class as a skill - setattr(cls, skill_class.__name__, skill_class) # type: ignore[attr-defined] - - def initialize_skills(self) -> None: - for skill_class in self.get_class_skills(): - self.create_instance(skill_class.__name__, robot=self._robot) # type: ignore[attr-defined] - - # Refresh the class skills - self.refresh_class_skills() - - def create_skills_live(self) -> list[AbstractRobotSkill]: - # ================================================ - # Procedurally created skills - # ================================================ - class BaseUnitreeSkill(AbstractRobotSkill): - """Base skill for dynamic skill creation.""" - - def __call__(self) -> str: - super().__call__() # type: ignore[no-untyped-call] - - # For Go2: Simple api_id based call - if hasattr(self, "_app_id"): - string = f"{Colors.GREEN_PRINT_COLOR}Executing Go2 skill: {self.__class__.__name__} with api_id={self._app_id}{Colors.RESET_COLOR}" - print(string) - self._robot.connection.publish_request( # type: ignore[attr-defined] - RTC_TOPIC["SPORT_MOD"], {"api_id": self._app_id} - ) - return f"{self.__class__.__name__} executed successfully" - - # For G1: Fixed api_id with parameter data - elif hasattr(self, "_data_value"): - string = f"{Colors.GREEN_PRINT_COLOR}Executing G1 skill: {self.__class__.__name__} with data={self._data_value}{Colors.RESET_COLOR}" - print(string) - self._robot.connection.publish_request( # type: ignore[attr-defined] - self._topic, # type: ignore[attr-defined] - {"api_id": self._api_id, "parameter": {"data": self._data_value}}, # type: ignore[attr-defined] - ) - return f"{self.__class__.__name__} executed successfully" - else: - raise RuntimeError( - f"Skill {self.__class__.__name__} missing required attributes" - ) - - skills_classes = [] - - if self.robot_type == "g1": - # Create G1 arm skills - for name, data_value, description in G1_ARM_CONTROLS: - skill_class = type( - name, - (BaseUnitreeSkill,), - { - "__doc__": description, - "_topic": "rt/api/arm/request", - "_api_id": 7106, - "_data_value": data_value, - }, - ) - skills_classes.append(skill_class) - - # Create G1 mode skills - for name, data_value, description in G1_MODE_CONTROLS: - skill_class = type( - name, - (BaseUnitreeSkill,), - { - "__doc__": description, - "_topic": "rt/api/sport/request", - "_api_id": 7101, - "_data_value": data_value, - }, - ) - skills_classes.append(skill_class) - else: - # Go2 skills (existing code) - for name, app_id, description in UNITREE_WEBRTC_CONTROLS: - if name not in ["Reverse", "Spin"]: # Exclude reverse and spin skills - skill_class = type( - name, (BaseUnitreeSkill,), {"__doc__": description, "_app_id": app_id} - ) - skills_classes.append(skill_class) - - return skills_classes # type: ignore[return-value] - - # region Class-based Skills - - class Move(AbstractRobotSkill): - """Move the robot using direct velocity commands. Determine duration required based on user distance instructions.""" - - x: float = Field(..., description="Forward velocity (m/s).") - y: float = Field(default=0.0, description="Left/right velocity (m/s)") - yaw: float = Field(default=0.0, description="Rotational velocity (rad/s)") - duration: float = Field(default=0.0, description="How long to move (seconds).") - - def __call__(self) -> str: - self._robot.move( # type: ignore[attr-defined] - Twist(linear=Vector3(self.x, self.y, 0.0), angular=Vector3(0.0, 0.0, self.yaw)), - duration=self.duration, - ) - return f"started moving with velocity={self.x}, {self.y}, {self.yaw} for {self.duration} seconds" - - class Wait(AbstractSkill): - """Wait for a specified amount of time.""" - - seconds: float = Field(..., description="Seconds to wait") - - def __call__(self) -> str: - time.sleep(self.seconds) - return f"Wait completed with length={self.seconds}s" - - # endregion - - -# endregion diff --git a/dimos/simulation/README.md b/dimos/simulation/README.md deleted file mode 100644 index 95d8b4cda1..0000000000 --- a/dimos/simulation/README.md +++ /dev/null @@ -1,98 +0,0 @@ -# Dimensional Streaming Setup - -This guide explains how to set up and run the Isaac Sim and Genesis streaming functionality via Docker. The setup is tested on Ubuntu 22.04 (recommended). - -## Prerequisites - -1. **NVIDIA Driver** - - NVIDIA Driver 535 must be installed - - Check your driver: `nvidia-smi` - - If not installed: - ```bash - sudo apt-get update - sudo apt install build-essential -y - sudo apt-get install -y nvidia-driver-535 - sudo reboot - ``` - -2. **CUDA Toolkit** - ```bash - sudo apt install -y nvidia-cuda-toolkit - ``` - -3. **Docker** - ```bash - # Install Docker - curl -fsSL https://get.docker.com -o get-docker.sh - sudo sh get-docker.sh - - # Post-install steps - sudo groupadd docker - sudo usermod -aG docker $USER - newgrp docker - ``` - -4. **NVIDIA Container Toolkit** - ```bash - # Add NVIDIA Container Toolkit repository - curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg - curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \ - sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \ - sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list - sudo apt-get update - - # Install the toolkit - sudo apt-get install -y nvidia-container-toolkit - sudo systemctl restart docker - - # Configure runtime - sudo nvidia-ctk runtime configure --runtime=docker - sudo systemctl restart docker - - # Verify installation - sudo docker run --rm --runtime=nvidia --gpus all ubuntu nvidia-smi - ``` - -5. **Pull Isaac Sim Image** - ```bash - sudo docker pull nvcr.io/nvidia/isaac-sim:4.2.0 - ``` - -6. **TO DO: Add ROS2 websocket server for client-side streaming** - -## Running the Streaming Example - -1. **Navigate to the docker/simulation directory** - ```bash - cd docker/simulation - ``` - -2. **Build and run with docker-compose** - For Isaac Sim: - ```bash - docker compose -f isaac/docker-compose.yml build - docker compose -f isaac/docker-compose.yml up - - ``` - - For Genesis: - ```bash - docker compose -f genesis/docker-compose.yml build - docker compose -f genesis/docker-compose.yml up - - ``` - -This will: -- Build the dimos_simulator image with ROS2 and required dependencies -- Start the MediaMTX RTSP server -- Run the test streaming example from either: - - `/tests/isaacsim/stream_camera.py` for Isaac Sim - - `/tests/genesissim/stream_camera.py` for Genesis - -## Viewing the Stream - -The camera stream will be available at: - -- RTSP: `rtsp://localhost:8554/stream` or `rtsp://:8554/stream` - -You can view it using VLC or any RTSP-capable player. diff --git a/dimos/simulation/engines/__init__.py b/dimos/simulation/engines/__init__.py new file mode 100644 index 0000000000..d437f9a7cd --- /dev/null +++ b/dimos/simulation/engines/__init__.py @@ -0,0 +1,25 @@ +"""Simulation engines for manipulator backends.""" + +from __future__ import annotations + +from typing import Literal + +from dimos.simulation.engines.base import SimulationEngine +from dimos.simulation.engines.mujoco_engine import MujocoEngine + +EngineType = Literal["mujoco"] + +_ENGINES: dict[EngineType, type[SimulationEngine]] = { + "mujoco": MujocoEngine, +} + + +def get_engine(engine_name: EngineType) -> type[SimulationEngine]: + return _ENGINES[engine_name] + + +__all__ = [ + "EngineType", + "SimulationEngine", + "get_engine", +] diff --git a/dimos/simulation/engines/base.py b/dimos/simulation/engines/base.py new file mode 100644 index 0000000000..d450614c62 --- /dev/null +++ b/dimos/simulation/engines/base.py @@ -0,0 +1,84 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Base interfaces for simulator engines.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pathlib import Path + + from dimos.msgs.sensor_msgs import JointState + + +class SimulationEngine(ABC): + """Abstract base class for a simulator engine instance.""" + + def __init__(self, config_path: Path, headless: bool) -> None: + self._config_path = config_path + self._headless = headless + + @property + def config_path(self) -> Path: + return self._config_path + + @property + def headless(self) -> bool: + return self._headless + + @abstractmethod + def connect(self) -> bool: + """Connect to simulation and start the engine.""" + + @abstractmethod + def disconnect(self) -> bool: + """Disconnect from simulation and stop the engine.""" + + @property + @abstractmethod + def connected(self) -> bool: + """Whether the engine is connected.""" + + @property + @abstractmethod + def num_joints(self) -> int: + """Number of joints for the loaded robot.""" + + @property + @abstractmethod + def joint_names(self) -> list[str]: + """Joint names for the loaded robot.""" + + @abstractmethod + def read_joint_positions(self) -> list[float]: + """Read joint positions in radians.""" + + @abstractmethod + def read_joint_velocities(self) -> list[float]: + """Read joint velocities in rad/s.""" + + @abstractmethod + def read_joint_efforts(self) -> list[float]: + """Read joint efforts in Nm.""" + + @abstractmethod + def write_joint_command(self, command: JointState) -> None: + """Command joints using a JointState message.""" + + @abstractmethod + def hold_current_position(self) -> None: + """Hold current joint positions.""" diff --git a/dimos/simulation/engines/mujoco_engine.py b/dimos/simulation/engines/mujoco_engine.py new file mode 100644 index 0000000000..ddaaa25ad3 --- /dev/null +++ b/dimos/simulation/engines/mujoco_engine.py @@ -0,0 +1,300 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""MuJoCo simulation engine implementation.""" + +from __future__ import annotations + +import threading +import time +from typing import TYPE_CHECKING + +import mujoco +import mujoco.viewer as viewer # type: ignore[import-untyped,import-not-found] + +from dimos.simulation.engines.base import SimulationEngine +from dimos.simulation.utils.xml_parser import JointMapping, build_joint_mappings +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from pathlib import Path + + from dimos.msgs.sensor_msgs import JointState + +logger = setup_logger() + + +class MujocoEngine(SimulationEngine): + """ + MuJoCo simulation engine. + + - starts MuJoCo simulation engine + - loads robot/environment into simulation + - applies control commands + """ + + def __init__(self, config_path: Path, headless: bool) -> None: + super().__init__(config_path=config_path, headless=headless) + + xml_path = self._resolve_xml_path(config_path) + self._model = mujoco.MjModel.from_xml_path(str(xml_path)) + self._xml_path = xml_path + + self._data = mujoco.MjData(self._model) + self._joint_mappings = build_joint_mappings(self._xml_path, self._model) + self._joint_names = [mapping.name for mapping in self._joint_mappings] + self._num_joints = len(self._joint_names) + timestep = float(self._model.opt.timestep) + self._control_frequency = 1.0 / timestep if timestep > 0.0 else 100.0 + + self._connected = False + self._lock = threading.Lock() + self._stop_event = threading.Event() + self._sim_thread: threading.Thread | None = None + + self._joint_positions = [0.0] * self._num_joints + self._joint_velocities = [0.0] * self._num_joints + self._joint_efforts = [0.0] * self._num_joints + + self._joint_position_targets = [0.0] * self._num_joints + self._joint_velocity_targets = [0.0] * self._num_joints + self._joint_effort_targets = [0.0] * self._num_joints + self._command_mode = "position" + for i, mapping in enumerate(self._joint_mappings): + current_pos = self._current_position(mapping) + self._joint_position_targets[i] = current_pos + self._joint_positions[i] = current_pos + + def _resolve_xml_path(self, config_path: Path) -> Path: + if config_path is None: + raise ValueError("config_path is required for MuJoCo simulation loading") + resolved = config_path.expanduser() + xml_path = resolved / "scene.xml" if resolved.is_dir() else resolved + if not xml_path.exists(): + raise FileNotFoundError(f"MuJoCo XML not found: {xml_path}") + return xml_path + + def _current_position(self, mapping: JointMapping) -> float: + if mapping.joint_id is not None and mapping.qpos_adr is not None: + return float(self._data.qpos[mapping.qpos_adr]) + if mapping.tendon_qpos_adrs: + return float( + sum(self._data.qpos[adr] for adr in mapping.tendon_qpos_adrs) + / len(mapping.tendon_qpos_adrs) + ) + if mapping.actuator_id is not None: + return float(self._data.actuator_length[mapping.actuator_id]) + return 0.0 + + def _apply_control(self) -> None: + with self._lock: + if self._command_mode == "effort": + targets = list(self._joint_effort_targets) + elif self._command_mode == "velocity": + targets = list(self._joint_velocity_targets) + elif self._command_mode == "position": + targets = list(self._joint_position_targets) + for i, mapping in enumerate(self._joint_mappings): + if mapping.actuator_id is None: + continue + if i < len(targets): + self._data.ctrl[mapping.actuator_id] = targets[i] + + def _update_joint_state(self) -> None: + with self._lock: + for i, mapping in enumerate(self._joint_mappings): + if mapping.joint_id is not None: + if mapping.qpos_adr is not None: + self._joint_positions[i] = float(self._data.qpos[mapping.qpos_adr]) + if mapping.dof_adr is not None: + self._joint_velocities[i] = float(self._data.qvel[mapping.dof_adr]) + self._joint_efforts[i] = float(self._data.qfrc_actuator[mapping.dof_adr]) + continue + + if mapping.tendon_qpos_adrs: + pos_sum = sum(self._data.qpos[adr] for adr in mapping.tendon_qpos_adrs) + count = len(mapping.tendon_qpos_adrs) + self._joint_positions[i] = float(pos_sum / count) + if mapping.tendon_dof_adrs: + vel_sum = sum(self._data.qvel[adr] for adr in mapping.tendon_dof_adrs) + self._joint_velocities[i] = float(vel_sum / len(mapping.tendon_dof_adrs)) + else: + self._joint_velocities[i] = 0.0 + elif mapping.actuator_id is not None: + self._joint_positions[i] = float( + self._data.actuator_length[mapping.actuator_id] + ) + self._joint_velocities[i] = 0.0 + + if mapping.actuator_id is not None: + self._joint_efforts[i] = float(self._data.actuator_force[mapping.actuator_id]) + + def connect(self) -> bool: + try: + logger.info(f"{self.__class__.__name__}: connect()") + with self._lock: + self._connected = True + self._stop_event.clear() + + if self._sim_thread is None or not self._sim_thread.is_alive(): + self._sim_thread = threading.Thread( + target=self._sim_loop, + name=f"{self.__class__.__name__}Sim", + daemon=True, + ) + self._sim_thread.start() + return True + except Exception as e: + logger.error(f"{self.__class__.__name__}: connect() failed: {e}") + return False + + def disconnect(self) -> bool: + try: + logger.info(f"{self.__class__.__name__}: disconnect()") + with self._lock: + self._connected = False + self._stop_event.set() + if self._sim_thread and self._sim_thread.is_alive(): + self._sim_thread.join(timeout=2.0) + self._sim_thread = None + return True + except Exception as e: + logger.error(f"{self.__class__.__name__}: disconnect() failed: {e}") + return False + + def _sim_loop(self) -> None: + logger.info(f"{self.__class__.__name__}: sim loop started") + dt = 1.0 / self._control_frequency + + def _step_once(sync_viewer: bool) -> None: + loop_start = time.time() + self._apply_control() + mujoco.mj_step(self._model, self._data) + if sync_viewer: + m_viewer.sync() + self._update_joint_state() + + elapsed = time.time() - loop_start + sleep_time = dt - elapsed + if sleep_time > 0: + time.sleep(sleep_time) + + if self._headless: + while not self._stop_event.is_set(): + _step_once(sync_viewer=False) + else: + with viewer.launch_passive( + self._model, self._data, show_left_ui=False, show_right_ui=False + ) as m_viewer: + while m_viewer.is_running() and not self._stop_event.is_set(): + _step_once(sync_viewer=True) + + logger.info(f"{self.__class__.__name__}: sim loop stopped") + + @property + def connected(self) -> bool: + with self._lock: + return self._connected + + @property + def num_joints(self) -> int: + return self._num_joints + + @property + def joint_names(self) -> list[str]: + return list(self._joint_names) + + @property + def model(self) -> mujoco.MjModel: + return self._model + + @property + def joint_positions(self) -> list[float]: + with self._lock: + return list(self._joint_positions) + + @property + def joint_velocities(self) -> list[float]: + with self._lock: + return list(self._joint_velocities) + + @property + def joint_efforts(self) -> list[float]: + with self._lock: + return list(self._joint_efforts) + + @property + def control_frequency(self) -> float: + return self._control_frequency + + def read_joint_positions(self) -> list[float]: + return self.joint_positions + + def read_joint_velocities(self) -> list[float]: + return self.joint_velocities + + def read_joint_efforts(self) -> list[float]: + return self.joint_efforts + + def write_joint_command(self, command: JointState) -> None: + if command.position: + self._command_mode = "position" + self._set_position_targets(command.position) + return + if command.velocity: + self._command_mode = "velocity" + self._set_velocity_targets(command.velocity) + return + if command.effort: + self._command_mode = "effort" + self._set_effort_targets(command.effort) + return + + def _set_position_targets(self, positions: list[float]) -> None: + if len(positions) > self._num_joints: + raise ValueError( + f"Position command has {len(positions)} joints, expected at most {self._num_joints}" + ) + with self._lock: + for i in range(len(positions)): + self._joint_position_targets[i] = float(positions[i]) + + def _set_velocity_targets(self, velocities: list[float]) -> None: + if len(velocities) > self._num_joints: + raise ValueError( + f"Velocity command has {len(velocities)} joints, expected at most {self._num_joints}" + ) + with self._lock: + for i in range(len(velocities)): + self._joint_velocity_targets[i] = float(velocities[i]) + + def _set_effort_targets(self, efforts: list[float]) -> None: + if len(efforts) > self._num_joints: + raise ValueError( + f"Effort command has {len(efforts)} joints, expected at most {self._num_joints}" + ) + with self._lock: + for i in range(len(efforts)): + self._joint_effort_targets[i] = float(efforts[i]) + + def hold_current_position(self) -> None: + with self._lock: + self._command_mode = "position" + for i, mapping in enumerate(self._joint_mappings): + self._joint_position_targets[i] = self._current_position(mapping) + + +__all__ = [ + "MujocoEngine", +] diff --git a/dimos/simulation/manipulators/__init__.py b/dimos/simulation/manipulators/__init__.py new file mode 100644 index 0000000000..816de0a18d --- /dev/null +++ b/dimos/simulation/manipulators/__init__.py @@ -0,0 +1,54 @@ +# Copyright 2025 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Simulation manipulator utilities.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dimos.simulation.manipulators.sim_manip_interface import SimManipInterface + from dimos.simulation.manipulators.sim_module import ( + SimulationModule, + SimulationModuleConfig, + simulation, + ) + +__all__ = [ + "SimManipInterface", + "SimulationModule", + "SimulationModuleConfig", + "simulation", +] + + +def __getattr__(name: str): # type: ignore[no-untyped-def] + if name == "SimManipInterface": + from dimos.simulation.manipulators.sim_manip_interface import SimManipInterface + + return SimManipInterface + if name in {"SimulationModule", "SimulationModuleConfig", "simulation"}: + from dimos.simulation.manipulators.sim_module import ( + SimulationModule, + SimulationModuleConfig, + simulation, + ) + + return { + "SimulationModule": SimulationModule, + "SimulationModuleConfig": SimulationModuleConfig, + "simulation": simulation, + }[name] + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/dimos/simulation/manipulators/sim_manip_interface.py b/dimos/simulation/manipulators/sim_manip_interface.py new file mode 100644 index 0000000000..c829f0c864 --- /dev/null +++ b/dimos/simulation/manipulators/sim_manip_interface.py @@ -0,0 +1,200 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Simulation-agnostic manipulator interface.""" + +from __future__ import annotations + +import logging +import math +from typing import TYPE_CHECKING + +from dimos.hardware.manipulators.spec import ControlMode, JointLimits, ManipulatorInfo +from dimos.msgs.sensor_msgs import JointState + +if TYPE_CHECKING: + from dimos.simulation.engines.base import SimulationEngine + + +class SimManipInterface: + """Adapter wrapper around a simulation engine to provide a uniform manipulator API.""" + + def __init__(self, engine: SimulationEngine) -> None: + self.logger = logging.getLogger(self.__class__.__name__) + self._engine = engine + self._joint_names = list(engine.joint_names) + self._dof = len(self._joint_names) + self._connected = False + self._servos_enabled = False + self._control_mode = ControlMode.POSITION + self._error_code = 0 + self._error_message = "" + + def connect(self) -> bool: + """Connect to the simulation engine.""" + try: + self.logger.info("Connecting to simulation engine...") + if not self._engine.connect(): + self.logger.error("Failed to connect to simulation engine") + return False + if self._engine.connected: + self._connected = True + self._servos_enabled = True + self._joint_names = list(self._engine.joint_names) + self._dof = len(self._joint_names) + self.logger.info( + "Successfully connected to simulation", + extra={"dof": self._dof}, + ) + return True + self.logger.error("Failed to connect to simulation engine") + return False + except Exception as exc: + self.logger.error(f"Sim connection failed: {exc}") + return False + + def disconnect(self) -> bool: + """Disconnect from simulation.""" + try: + return self._engine.disconnect() + except Exception as exc: + self._connected = False + self.logger.error(f"Sim disconnection failed: {exc}") + return False + + def is_connected(self) -> bool: + return bool(self._connected and self._engine.connected) + + def get_info(self) -> ManipulatorInfo: + vendor = "Simulation" + model = "Simulation" + dof = self._dof + return ManipulatorInfo( + vendor=vendor, + model=model, + dof=dof, + firmware_version=None, + serial_number=None, + ) + + def get_dof(self) -> int: + return self._dof + + def get_joint_names(self) -> list[str]: + return list(self._joint_names) + + def get_limits(self) -> JointLimits: + lower = [-math.pi] * self._dof + upper = [math.pi] * self._dof + max_vel_rad = math.radians(180.0) + return JointLimits( + position_lower=lower, + position_upper=upper, + velocity_max=[max_vel_rad] * self._dof, + ) + + def set_control_mode(self, mode: ControlMode) -> bool: + self._control_mode = mode + return True + + def get_control_mode(self) -> ControlMode: + return self._control_mode + + def read_joint_positions(self) -> list[float]: + positions = self._engine.read_joint_positions() + return positions[: self._dof] + + def read_joint_velocities(self) -> list[float]: + velocities = self._engine.read_joint_velocities() + return velocities[: self._dof] + + def read_joint_efforts(self) -> list[float]: + efforts = self._engine.read_joint_efforts() + return efforts[: self._dof] + + def read_state(self) -> dict[str, int]: + velocities = self.read_joint_velocities() + is_moving = any(abs(v) > 1e-4 for v in velocities) + mode_int = list(ControlMode).index(self._control_mode) + return { + "state": 1 if is_moving else 0, + "mode": mode_int, + } + + def read_error(self) -> tuple[int, str]: + return self._error_code, self._error_message + + def write_joint_positions(self, positions: list[float]) -> bool: + if not self._servos_enabled: + return False + self._control_mode = ControlMode.POSITION + self._engine.write_joint_command(JointState(position=positions[: self._dof])) + return True + + def write_joint_velocities(self, velocities: list[float]) -> bool: + if not self._servos_enabled: + return False + self._control_mode = ControlMode.VELOCITY + self._engine.write_joint_command(JointState(velocity=velocities[: self._dof])) + return True + + def write_joint_efforts(self, efforts: list[float]) -> bool: + if not self._servos_enabled: + return False + self._control_mode = ControlMode.TORQUE + self._engine.write_joint_command(JointState(effort=efforts[: self._dof])) + return True + + def write_stop(self) -> bool: + self._engine.hold_current_position() + return True + + def write_enable(self, enable: bool) -> bool: + self._servos_enabled = enable + return True + + def read_enabled(self) -> bool: + return self._servos_enabled + + def write_clear_errors(self) -> bool: + self._error_code = 0 + self._error_message = "" + return True + + def read_cartesian_position(self) -> dict[str, float] | None: + return None + + def write_cartesian_position( + self, + pose: dict[str, float], + velocity: float = 1.0, + ) -> bool: + _pose = pose + _velocity = velocity + return False + + def read_gripper_position(self) -> float | None: + return None + + def write_gripper_position(self, position: float) -> bool: + _ = position + return False + + def read_force_torque(self) -> list[float] | None: + return None + + +__all__ = [ + "SimManipInterface", +] diff --git a/dimos/simulation/manipulators/sim_module.py b/dimos/simulation/manipulators/sim_module.py new file mode 100644 index 0000000000..4f1bb986d3 --- /dev/null +++ b/dimos/simulation/manipulators/sim_module.py @@ -0,0 +1,247 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Simulator-agnostic manipulator simulation module.""" + +from __future__ import annotations + +from dataclasses import dataclass +import threading +import time +from typing import TYPE_CHECKING, Any + +from reactivex.disposable import Disposable + +from dimos.core import In, Module, Out, rpc +from dimos.core.module import ModuleConfig +from dimos.msgs.sensor_msgs import JointCommand, JointState, RobotState +from dimos.simulation.engines import EngineType, get_engine +from dimos.simulation.manipulators.sim_manip_interface import SimManipInterface + +if TYPE_CHECKING: + from collections.abc import Callable + from pathlib import Path + + +@dataclass(kw_only=True) +class SimulationModuleConfig(ModuleConfig): + engine: EngineType + config_path: Path | Callable[[], Path] + headless: bool = False + + +class SimulationModule(Module[SimulationModuleConfig]): + """Module wrapper for manipulator simulation across engines.""" + + default_config = SimulationModuleConfig + config: SimulationModuleConfig + + joint_state: Out[JointState] + robot_state: Out[RobotState] + joint_position_command: In[JointCommand] + joint_velocity_command: In[JointCommand] + + MIN_CONTROL_RATE = 1.0 + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self._backend: SimManipInterface | None = None + self._control_rate = 100.0 + self._monitor_rate = 100.0 + self._joint_prefix = "joint" + self._stop_event = threading.Event() + self._control_thread: threading.Thread | None = None + self._monitor_thread: threading.Thread | None = None + self._command_lock = threading.Lock() + self._pending_positions: list[float] | None = None + self._pending_velocities: list[float] | None = None + + def _create_backend(self) -> SimManipInterface: + engine_cls = get_engine(self.config.engine) + config_path = ( + self.config.config_path() + if callable(self.config.config_path) + else self.config.config_path + ) + engine = engine_cls( + config_path=config_path, + headless=self.config.headless, + ) + return SimManipInterface(engine=engine) + + @rpc + def start(self) -> None: + super().start() + if self._backend is None: + self._backend = self._create_backend() + if not self._backend.connect(): + raise RuntimeError("Failed to connect to simulation backend") + self._backend.write_enable(True) + + self._disposables.add( + Disposable(self.joint_position_command.subscribe(self._on_joint_position_command)) + ) + self._disposables.add( + Disposable(self.joint_velocity_command.subscribe(self._on_joint_velocity_command)) + ) + + self._stop_event.clear() + self._control_thread = threading.Thread( + target=self._control_loop, + daemon=True, + name=f"{self.__class__.__name__}-control", + ) + self._monitor_thread = threading.Thread( + target=self._monitor_loop, + daemon=True, + name=f"{self.__class__.__name__}-monitor", + ) + self._control_thread.start() + self._monitor_thread.start() + + @rpc + def stop(self) -> None: + self._stop_event.set() + if self._control_thread and self._control_thread.is_alive(): + self._control_thread.join(timeout=2.0) + if self._monitor_thread and self._monitor_thread.is_alive(): + self._monitor_thread.join(timeout=2.0) + if self._backend: + self._backend.disconnect() + super().stop() + + @rpc + def enable_servos(self) -> bool: + if not self._backend: + return False + return self._backend.write_enable(True) + + @rpc + def disable_servos(self) -> bool: + if not self._backend: + return False + return self._backend.write_enable(False) + + @rpc + def clear_errors(self) -> bool: + if not self._backend: + return False + return self._backend.write_clear_errors() + + @rpc + def emergency_stop(self) -> bool: + if not self._backend: + return False + return self._backend.write_stop() + + def _on_joint_position_command(self, msg: JointCommand) -> None: + with self._command_lock: + self._pending_positions = list(msg.positions) + self._pending_velocities = None + + def _on_joint_velocity_command(self, msg: JointCommand) -> None: + with self._command_lock: + self._pending_velocities = list(msg.positions) + self._pending_positions = None + + def _control_loop(self) -> None: + period = 1.0 / max(self._control_rate, self.MIN_CONTROL_RATE) + next_tick = time.monotonic() # monotonic time used to avoid time drift + while not self._stop_event.is_set(): + with self._command_lock: + positions = ( + None if self._pending_positions is None else list(self._pending_positions) + ) + velocities = ( + None if self._pending_velocities is None else list(self._pending_velocities) + ) + + if self._backend: + if positions is not None: + self._backend.write_joint_positions(positions) + elif velocities is not None: + self._backend.write_joint_velocities(velocities) + dof = self._backend.get_dof() + names = self._resolve_joint_names(dof) + positions = self._backend.read_joint_positions() + velocities = self._backend.read_joint_velocities() + efforts = self._backend.read_joint_efforts() + self.joint_state.publish( + JointState( + frame_id=self.frame_id, + name=names, + position=positions, + velocity=velocities, + effort=efforts, + ) + ) + next_tick += period + sleep_for = next_tick - time.monotonic() + if sleep_for > 0: + if self._stop_event.wait(sleep_for): + break + else: + next_tick = time.monotonic() + + def _monitor_loop(self) -> None: + period = 1.0 / max(self._monitor_rate, self.MIN_CONTROL_RATE) + next_tick = time.monotonic() # monotonic time used to avoid time drift + while not self._stop_event.is_set(): + if not self._backend: + pass + else: + dof = self._backend.get_dof() + self._resolve_joint_names(dof) + positions = self._backend.read_joint_positions() + self._backend.read_joint_velocities() + self._backend.read_joint_efforts() + state = self._backend.read_state() + error_code, _ = self._backend.read_error() + self.robot_state.publish( + RobotState( + state=state.get("state", 0), + mode=state.get("mode", 0), + error_code=error_code, + warn_code=0, + cmdnum=0, + mt_brake=0, + mt_able=1 if self._backend.read_enabled() else 0, + tcp_pose=[], + tcp_offset=[], + joints=[float(p) for p in positions], + ) + ) + next_tick += period + sleep_for = next_tick - time.monotonic() + if sleep_for > 0: + if self._stop_event.wait(sleep_for): + break + else: + next_tick = time.monotonic() + + def _resolve_joint_names(self, dof: int) -> list[str]: + if self._backend: + names = self._backend.get_joint_names() + if len(names) >= dof: + return list(names[:dof]) + return [f"{self._joint_prefix}{i + 1}" for i in range(dof)] + + +simulation = SimulationModule.blueprint + +__all__ = [ + "SimulationModule", + "SimulationModuleConfig", + "simulation", +] diff --git a/dimos/simulation/manipulators/test_sim_module.py b/dimos/simulation/manipulators/test_sim_module.py new file mode 100644 index 0000000000..334e2ce85f --- /dev/null +++ b/dimos/simulation/manipulators/test_sim_module.py @@ -0,0 +1,123 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +import threading + +import pytest + +from dimos.simulation.manipulators.sim_module import SimulationModule + + +class _DummyRPC: + def serve_module_rpc(self, _module) -> None: # type: ignore[no-untyped-def] + return None + + def start(self) -> None: + return None + + def stop(self) -> None: + return None + + +class _FakeBackend: + def __init__(self) -> None: + self._names = ["joint1", "joint2", "joint3"] + + def get_dof(self) -> int: + return len(self._names) + + def get_joint_names(self) -> list[str]: + return list(self._names) + + def read_joint_positions(self) -> list[float]: + return [0.1, 0.2, 0.3] + + def read_joint_velocities(self) -> list[float]: + return [0.0, 0.0, 0.0] + + def read_joint_efforts(self) -> list[float]: + return [0.0, 0.0, 0.0] + + def read_state(self) -> dict[str, int]: + return {"state": 1, "mode": 2} + + def read_error(self) -> tuple[int, str]: + return 0, "" + + def read_enabled(self) -> bool: + return True + + def disconnect(self) -> None: + return None + + +def _run_single_monitor_iteration(module: SimulationModule, monkeypatch) -> None: # type: ignore[no-untyped-def] + def _wait_once(_: float) -> bool: + module._stop_event.set() + raise StopIteration + + monkeypatch.setattr(module._stop_event, "wait", _wait_once) + with pytest.raises(StopIteration): + module._monitor_loop() + + +def _run_single_control_iteration(module: SimulationModule, monkeypatch) -> None: # type: ignore[no-untyped-def] + def _wait_once(_: float) -> bool: + module._stop_event.set() + raise StopIteration + + monkeypatch.setattr(module._stop_event, "wait", _wait_once) + with pytest.raises(StopIteration): + module._control_loop() + + +def test_simulation_module_publishes_joint_state(monkeypatch) -> None: + module = SimulationModule( + engine="mujoco", + config_path=Path("."), + rpc_transport=_DummyRPC, + ) + module._backend = _FakeBackend() # type: ignore[assignment] + module._stop_event = threading.Event() + + joint_states: list[object] = [] + module.joint_state.subscribe(joint_states.append) + try: + _run_single_control_iteration(module, monkeypatch) + finally: + module.stop() + + assert len(joint_states) == 1 + assert joint_states[0].name == ["joint1", "joint2", "joint3"] + + +def test_simulation_module_publishes_robot_state(monkeypatch) -> None: + module = SimulationModule( + engine="mujoco", + config_path=Path("."), + rpc_transport=_DummyRPC, + ) + module._backend = _FakeBackend() # type: ignore[assignment] + module._stop_event = threading.Event() + + robot_states: list[object] = [] + module.robot_state.subscribe(robot_states.append) + try: + _run_single_monitor_iteration(module, monkeypatch) + finally: + module.stop() + + assert len(robot_states) == 1 + assert robot_states[0].state == 1 diff --git a/dimos/simulation/mujoco/mujoco_process.py b/dimos/simulation/mujoco/mujoco_process.py index f3e6eba279..8529de976b 100755 --- a/dimos/simulation/mujoco/mujoco_process.py +++ b/dimos/simulation/mujoco/mujoco_process.py @@ -63,7 +63,8 @@ def get_command(self) -> NDArray[Any]: self._command[0] = linear[0] # forward/backward self._command[1] = linear[1] # left/right self._command[2] = angular[2] # rotation - return self._command.copy() + result: NDArray[Any] = self._command.copy() + return result def stop(self) -> None: """Stop method to satisfy InputController protocol.""" diff --git a/dimos/simulation/sim_blueprints.py b/dimos/simulation/sim_blueprints.py new file mode 100644 index 0000000000..8b91ff817a --- /dev/null +++ b/dimos/simulation/sim_blueprints.py @@ -0,0 +1,48 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from dimos.core.transport import LCMTransport +from dimos.msgs.sensor_msgs import ( # type: ignore[attr-defined] + JointCommand, + JointState, + RobotState, +) +from dimos.msgs.trajectory_msgs import JointTrajectory +from dimos.simulation.manipulators.sim_module import simulation +from dimos.utils.data import LfsPath + +xarm7_trajectory_sim = simulation( + engine="mujoco", + config_path=LfsPath("xarm7/scene.xml"), + headless=True, +).transports( + { + ("joint_state", JointState): LCMTransport("/xarm/joint_states", JointState), + ("robot_state", RobotState): LCMTransport("/xarm/robot_state", RobotState), + ("joint_position_command", JointCommand): LCMTransport( + "/xarm/joint_position_command", JointCommand + ), + ("trajectory", JointTrajectory): LCMTransport("/trajectory", JointTrajectory), + } +) + + +__all__ = [ + "simulation", + "xarm7_trajectory_sim", +] + +if __name__ == "__main__": + xarm7_trajectory_sim.build().loop() diff --git a/dimos/simulation/utils/xml_parser.py b/dimos/simulation/utils/xml_parser.py new file mode 100644 index 0000000000..052657ea95 --- /dev/null +++ b/dimos/simulation/utils/xml_parser.py @@ -0,0 +1,277 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""MuJoCo XML parsing helpers for joint/actuator metadata.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING +import xml.etree.ElementTree as ET + +import mujoco + +if TYPE_CHECKING: + from pathlib import Path + + +@dataclass(frozen=True) +class JointMapping: + name: str + joint_id: int | None + actuator_id: int | None + qpos_adr: int | None + dof_adr: int | None + tendon_qpos_adrs: tuple[int, ...] + tendon_dof_adrs: tuple[int, ...] + + +@dataclass(frozen=True) +class _ActuatorSpec: + name: str + joint: str | None + tendon: str | None + + +def build_joint_mappings(xml_path: Path, model: mujoco.MjModel) -> list[JointMapping]: + specs = _parse_actuator_specs(xml_path) + if specs: + return _build_joint_mappings_from_specs(specs, model) + if int(model.nu) > 0: + return _build_joint_mappings_from_actuators(model) + return _build_joint_mappings_from_model(model) + + +def _parse_actuator_specs(xml_path: Path) -> list[_ActuatorSpec]: + return _collect_actuator_specs(xml_path.resolve(), seen=set()) + + +def _collect_actuator_specs(xml_path: Path, seen: set[Path]) -> list[_ActuatorSpec]: + if xml_path in seen: + return [] + seen.add(xml_path) + + root = ET.parse(xml_path).getroot() + base_dir = xml_path.parent + specs: list[_ActuatorSpec] = [] + + def walk(node: ET.Element) -> None: + for child in node: + if child.tag == "include": + include_file = child.attrib.get("file") + if include_file: + include_path = (base_dir / include_file).resolve() + specs.extend(_collect_actuator_specs(include_path, seen)) + continue + if child.tag == "actuator": + specs.extend(_parse_actuator_block(child)) + continue + walk(child) + + walk(root) + return specs + + +def _parse_actuator_block(actuator_elem: ET.Element) -> list[_ActuatorSpec]: + specs: list[_ActuatorSpec] = [] + for child in actuator_elem: + joint = child.attrib.get("joint") + tendon = child.attrib.get("tendon") + if not joint and not tendon: + continue + name = child.attrib.get("name") or joint or tendon or "actuator" + specs.append(_ActuatorSpec(name=name, joint=joint, tendon=tendon)) + return specs + + +def _build_joint_mappings_from_specs( + specs: list[_ActuatorSpec], + model: mujoco.MjModel, +) -> list[JointMapping]: + mappings: list[JointMapping] = [] + for spec in specs: + if spec.joint: + mappings.append(_mapping_for_joint(spec, model)) + elif spec.tendon: + mappings.append(_mapping_for_tendon(spec, model)) + return mappings + + +def _mapping_for_joint(spec: _ActuatorSpec, model: mujoco.MjModel) -> JointMapping: + joint_id = mujoco.mj_name2id(model, mujoco.mjtObj.mjOBJ_JOINT, spec.joint) + if joint_id < 0: + raise ValueError(f"Unknown joint '{spec.joint}' in MuJoCo model") + actuator_id = _find_actuator_id_for_joint(model, joint_id, spec.name) + joint_name = mujoco.mj_id2name(model, mujoco.mjtObj.mjOBJ_JOINT, joint_id) or spec.name + return JointMapping( + name=joint_name, + joint_id=joint_id, + actuator_id=actuator_id, + qpos_adr=int(model.jnt_qposadr[joint_id]), + dof_adr=int(model.jnt_dofadr[joint_id]), + tendon_qpos_adrs=(), + tendon_dof_adrs=(), + ) + + +def _mapping_for_tendon(spec: _ActuatorSpec, model: mujoco.MjModel) -> JointMapping: + name = spec.name or spec.tendon + if not name: + raise ValueError("Tendon actuator is missing a name and tendon reference") + tendon_id = mujoco.mj_name2id(model, mujoco.mjtObj.mjOBJ_TENDON, spec.tendon) + if tendon_id < 0: + raise ValueError(f"Unknown tendon '{spec.tendon}' in MuJoCo model") + actuator_id = _find_actuator_id_for_tendon(model, tendon_id, spec.name) + joint_ids = _tendon_joint_ids(model, tendon_id) + return JointMapping( + name=name, + joint_id=None, + actuator_id=actuator_id, + qpos_adr=None, + dof_adr=None, + tendon_qpos_adrs=tuple(int(model.jnt_qposadr[joint_id]) for joint_id in joint_ids), + tendon_dof_adrs=tuple(int(model.jnt_dofadr[joint_id]) for joint_id in joint_ids), + ) + + +def _find_actuator_id_for_joint( + model: mujoco.MjModel, + joint_id: int, + actuator_name: str | None, +) -> int | None: + if actuator_name: + act_id = mujoco.mj_name2id(model, mujoco.mjtObj.mjOBJ_ACTUATOR, actuator_name) + if act_id >= 0: + return int(act_id) + for act_id in range(int(model.nu)): + trn_type = int(model.actuator_trntype[act_id]) + if trn_type != int(mujoco.mjtTrn.mjTRN_JOINT): + continue + if int(model.actuator_trnid[act_id, 0]) == joint_id: + return act_id + return None + + +def _find_actuator_id_for_tendon( + model: mujoco.MjModel, + tendon_id: int, + actuator_name: str | None, +) -> int | None: + if actuator_name: + act_id = mujoco.mj_name2id(model, mujoco.mjtObj.mjOBJ_ACTUATOR, actuator_name) + if act_id >= 0: + return int(act_id) + for act_id in range(int(model.nu)): + trn_type = int(model.actuator_trntype[act_id]) + if trn_type != int(mujoco.mjtTrn.mjTRN_TENDON): + continue + if int(model.actuator_trnid[act_id, 0]) == tendon_id: + return act_id + return None + + +def _tendon_joint_ids(model: mujoco.MjModel, tendon_id: int) -> tuple[int, ...]: + adr = int(model.tendon_adr[tendon_id]) + num = int(model.tendon_num[tendon_id]) + joint_ids: list[int] = [] + for wrap_id in range(adr, adr + num): + wrap_type = int(model.wrap_type[wrap_id]) + if wrap_type == int(mujoco.mjtWrap.mjWRAP_JOINT): + joint_ids.append(int(model.wrap_objid[wrap_id])) + return tuple(joint_ids) + + +def _build_joint_mappings_from_actuators(model: mujoco.MjModel) -> list[JointMapping]: + mappings: list[JointMapping] = [] + for actuator_id in range(int(model.nu)): + actuator_name = mujoco.mj_id2name(model, mujoco.mjtObj.mjOBJ_ACTUATOR, actuator_id) + name = actuator_name or f"actuator{actuator_id}" + trn_type = int(model.actuator_trntype[actuator_id]) + if trn_type == int(mujoco.mjtTrn.mjTRN_JOINT): + joint_id = int(model.actuator_trnid[actuator_id, 0]) + joint_name = mujoco.mj_id2name(model, mujoco.mjtObj.mjOBJ_JOINT, joint_id) + mappings.append( + JointMapping( + name=joint_name or name, + joint_id=joint_id, + actuator_id=actuator_id, + qpos_adr=int(model.jnt_qposadr[joint_id]), + dof_adr=int(model.jnt_dofadr[joint_id]), + tendon_qpos_adrs=(), + tendon_dof_adrs=(), + ) + ) + continue + + if trn_type == int(mujoco.mjtTrn.mjTRN_TENDON): + tendon_id = int(model.actuator_trnid[actuator_id, 0]) + tendon_name = mujoco.mj_id2name(model, mujoco.mjtObj.mjOBJ_TENDON, tendon_id) + if not actuator_name and tendon_name: + name = tendon_name + joint_ids = _tendon_joint_ids(model, tendon_id) + mappings.append( + JointMapping( + name=name, + joint_id=None, + actuator_id=actuator_id, + qpos_adr=None, + dof_adr=None, + tendon_qpos_adrs=tuple( + int(model.jnt_qposadr[joint_id]) for joint_id in joint_ids + ), + tendon_dof_adrs=tuple( + int(model.jnt_dofadr[joint_id]) for joint_id in joint_ids + ), + ) + ) + continue + + mappings.append( + JointMapping( + name=name, + joint_id=None, + actuator_id=actuator_id, + qpos_adr=None, + dof_adr=None, + tendon_qpos_adrs=(), + tendon_dof_adrs=(), + ) + ) + + return mappings + + +def _build_joint_mappings_from_model(model: mujoco.MjModel) -> list[JointMapping]: + mappings: list[JointMapping] = [] + for joint_id in range(int(model.njnt)): + jnt_type = int(model.jnt_type[joint_id]) + if jnt_type not in ( + int(mujoco.mjtJoint.mjJNT_HINGE), + int(mujoco.mjtJoint.mjJNT_SLIDE), + ): + continue + joint_name = mujoco.mj_id2name(model, mujoco.mjtObj.mjOBJ_JOINT, joint_id) + name = joint_name or f"joint{joint_id}" + mappings.append( + JointMapping( + name=name, + joint_id=joint_id, + actuator_id=None, + qpos_adr=int(model.jnt_qposadr[joint_id]), + dof_adr=int(model.jnt_dofadr[joint_id]), + tendon_qpos_adrs=(), + tendon_dof_adrs=(), + ) + ) + return mappings diff --git a/dimos/spec/__init__.py b/dimos/spec/__init__.py index 03c1024d12..1423bec9a1 100644 --- a/dimos/spec/__init__.py +++ b/dimos/spec/__init__.py @@ -1,13 +1,12 @@ from dimos.spec.control import LocalPlanner -from dimos.spec.map import Global3DMap, GlobalCostmap, GlobalMap +from dimos.spec.mapping import GlobalCostmap, GlobalPointcloud from dimos.spec.nav import Nav from dimos.spec.perception import Camera, Image, Pointcloud __all__ = [ "Camera", - "Global3DMap", "GlobalCostmap", - "GlobalMap", + "GlobalPointcloud", "Image", "LocalPlanner", "Nav", diff --git a/dimos/spec/map.py b/dimos/spec/mapping.py similarity index 85% rename from dimos/spec/map.py rename to dimos/spec/mapping.py index 438b77a7a6..f8e7e1a04f 100644 --- a/dimos/spec/map.py +++ b/dimos/spec/mapping.py @@ -19,12 +19,8 @@ from dimos.msgs.sensor_msgs import PointCloud2 -class Global3DMap(Protocol): - global_pointcloud: Out[PointCloud2] - - -class GlobalMap(Protocol): - global_map: Out[OccupancyGrid] +class GlobalPointcloud(Protocol): + global_map: Out[PointCloud2] class GlobalCostmap(Protocol): diff --git a/dimos/spec/perception.py b/dimos/spec/perception.py index 4dc682523f..1cecdb4d2f 100644 --- a/dimos/spec/perception.py +++ b/dimos/spec/perception.py @@ -15,7 +15,8 @@ from typing import Protocol from dimos.core import Out -from dimos.msgs.sensor_msgs import CameraInfo, Image as ImageMsg, PointCloud2 +from dimos.msgs.nav_msgs.Odometry import Odometry as OdometryMsg +from dimos.msgs.sensor_msgs import CameraInfo, Image as ImageMsg, Imu, PointCloud2 class Image(Protocol): @@ -34,3 +35,17 @@ class DepthCamera(Camera): class Pointcloud(Protocol): pointcloud: Out[PointCloud2] + + +class IMU(Protocol): + imu: Out[Imu] + + +class Odometry(Protocol): + odometry: Out[OdometryMsg] + + +class Lidar(Protocol): + """LiDAR sensor providing point clouds.""" + + lidar: Out[PointCloud2] diff --git a/dimos/spec/test_utils.py b/dimos/spec/test_utils.py new file mode 100644 index 0000000000..40fe05facc --- /dev/null +++ b/dimos/spec/test_utils.py @@ -0,0 +1,80 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Protocol + +import pytest + +from dimos.spec.utils import Spec, is_spec, spec_annotation_compliance, spec_structural_compliance + + +class NormalProtocol(Protocol): + def foo(self) -> int: ... + + +class SpecProtocol(Spec, Protocol): + def foo(self) -> int: ... + + +def test_is_spec_recognizes_spec_protocol() -> None: + assert is_spec(SpecProtocol) is True + + +def test_is_spec_rejects_plain_protocol_and_base() -> None: + assert is_spec(NormalProtocol) is False + assert is_spec(Spec) is False + + +def test_is_spec_rejects_non_type() -> None: + assert is_spec(object()) is False + + +class MySpec(Spec, Protocol): + def foo(self) -> int: + return 1 + + +class StructurallyCompliant: + def foo(self) -> str: + return "ok" + + +class FullyCompliant: + def foo(self) -> int: + return 1 + + +class NotCompliant: + pass + + +def test_spec_structural_compliance_matches_by_structure() -> None: + assert spec_structural_compliance(NotCompliant(), MySpec) is False + assert spec_structural_compliance(StructurallyCompliant(), MySpec) is True + assert spec_structural_compliance(FullyCompliant(), MySpec) is True + + +def test_spec_structural_compliance_rejects_non_spec() -> None: + with pytest.raises(TypeError): + spec_structural_compliance(StructurallyCompliant(), NormalProtocol) # type: ignore[arg-type] + + +def test_spec_annotation_compliance_requires_matching_annotations() -> None: + assert spec_annotation_compliance(StructurallyCompliant(), MySpec) is False + assert spec_annotation_compliance(FullyCompliant(), MySpec) is True + + +def test_spec_annotation_compliance_rejects_non_spec() -> None: + with pytest.raises(TypeError): + spec_annotation_compliance(StructurallyCompliant(), NormalProtocol) # type: ignore[arg-type] diff --git a/dimos/spec/utils.py b/dimos/spec/utils.py new file mode 100644 index 0000000000..b9786b91b5 --- /dev/null +++ b/dimos/spec/utils.py @@ -0,0 +1,129 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect +from typing import Any, Protocol, runtime_checkable + +from annotation_protocol import AnnotationProtocol # type: ignore[import-not-found,import-untyped] +from typing_extensions import is_protocol + + +# Allows us to differentiate plain Protocols from Module-Spec Protocols +class Spec(Protocol): + pass + + +def is_spec(cls: Any) -> bool: + """ + Example: + class NormalProtocol(Protocol): + def foo(self) -> int: ... + + class SpecProtocol(Spec, Protocol): + def foo(self) -> int: ... + + is_spec(NormalProtocol) # False + is_spec(SpecProtocol) # True + """ + return inspect.isclass(cls) and is_protocol(cls) and Spec in cls.__mro__ and cls is not Spec + + +def spec_structural_compliance( + obj: Any, + spec: Any, +) -> bool: + """ + Example: + class MySpec(Spec, Protocol): + def foo(self) -> int: ... + + class StructurallyCompliant1: + def foo(self) -> list[list[list[list[list[int]]]]]: ... + class StructurallyCompliant2: + def foo(self) -> str: ... + class FullyCompliant: + def foo(self) -> int: ... + class NotCompliant: + ... + + assert False == spec_structural_compliance(NotCompliant(), MySpec) + assert True == spec_structural_compliance(StructurallyCompliant1(), MySpec) + assert True == spec_structural_compliance(StructurallyCompliant2(), MySpec) + assert True == spec_structural_compliance(FullyCompliant(), MySpec) + """ + if not is_spec(spec): + raise TypeError("Trying to check if `obj` implements `spec` but spec itself was not a Spec") + + # python's built-in protocol check ignores annotations (only structural check) + return isinstance(obj, runtime_checkable(spec)) + + +def spec_annotation_compliance( + obj: Any, + proto: Any, +) -> bool: + """ + Example: + class MySpec(Spec, Protocol): + def foo(self) -> int: ... + + class StructurallyCompliant1: + def foo(self) -> list[list[list[list[list[int]]]]]: ... + class FullyCompliant: + def foo(self) -> int: ... + + assert False == spec_annotation_compliance(StructurallyCompliant1(), MySpec) + assert True == spec_structural_compliance(FullyCompliant(), MySpec) + """ + if not is_spec(proto): + raise TypeError("Not a Spec") + + # Build a *strict* runtime protocol dynamically + strict_proto = type( + f"Strict{proto.__name__}", + (AnnotationProtocol,), + dict(proto.__dict__), + ) + + return isinstance(obj, strict_proto) + + +def get_protocol_method_signatures(proto: type[object]) -> dict[str, inspect.Signature]: + """ + Return a mapping of method_name -> inspect.Signature + for all methods required by a Protocol. + """ + if not is_protocol(proto): + raise TypeError(f"{proto} is not a Protocol") + + methods: dict[str, inspect.Signature] = {} + + # Walk MRO so inherited protocol methods are included + for cls in reversed(proto.__mro__): + if cls is Protocol: # type: ignore[comparison-overlap] + continue + + for name, value in cls.__dict__.items(): + if name.startswith("_"): + continue + + if callable(value): + try: + sig = inspect.signature(value) + except (TypeError, ValueError): + continue + + methods[name] = sig + + return methods diff --git a/dimos/stream/video_operators.py b/dimos/stream/video_operators.py index 548bba7598..a94b6fa3a1 100644 --- a/dimos/stream/video_operators.py +++ b/dimos/stream/video_operators.py @@ -231,7 +231,7 @@ def _encode_image(image: np.ndarray) -> tuple[str, tuple[int, int]]: # type: ig _, buffer = cv2.imencode(".jpg", image) if buffer is None: raise ValueError("Failed to encode image") - base64_image = base64.b64encode(buffer).decode("utf-8") + base64_image = base64.b64encode(buffer.tobytes()).decode("utf-8") return base64_image, (width, height) except Exception as e: raise e diff --git a/dimos/teleop/README.md b/dimos/teleop/README.md new file mode 100644 index 0000000000..e64a7b43ea --- /dev/null +++ b/dimos/teleop/README.md @@ -0,0 +1,81 @@ +# Teleop Stack + +Teleoperation modules for DimOS. Currently supports Meta Quest 3 VR controllers. + +## Architecture + +``` +Quest Browser (WebXR) + │ + │ PoseStamped + Joy via WebSocket + ▼ +Deno Bridge (teleop_server.ts) + │ + │ LCM topics + ▼ +QuestTeleopModule + │ WebXR → robot frame transform + │ Pose computation + button state packing + ▼ +PoseStamped / TwistStamped / Buttons outputs +``` + +## Modules + +### QuestTeleopModule +Base teleop module. Gets controller data, computes output poses, and publishes them. Default engage: hold primary button (X/A). Subclass to customize. + +### ArmTeleopModule +Toggle-based engage — press primary button once to engage, press again to disengage. + +### TwistTeleopModule +Outputs TwistStamped (linear + angular velocity) instead of PoseStamped. + +### VisualizingTeleopModule +Adds Rerun visualization for debugging. Extends ArmTeleopModule (toggle engage). + +## Subclassing + +`QuestTeleopModule` is designed for extension. Override these methods: + +| Method | Purpose | +|--------|---------| +| `_handle_engage()` | Customize engage/disengage logic | +| `_should_publish()` | Add conditions for when to publish | +| `_get_output_pose()` | Customize pose computation | +| `_publish_msg()` | Change output format | +| `_publish_button_state()` | Change button output | + +### Rules for subclasses + +- **Do not acquire `self._lock` in overrides.** The control loop already holds it. + Access `self._controllers`, `self._current_poses`, `self._is_engaged`, etc. directly. +- **Keep overrides fast** — they run inside the control loop at `control_loop_hz`. + +## File Structure + +``` +teleop/ +├── quest/ +│ ├── quest_teleop_module.py # Base Quest teleop module +│ ├── quest_extensions.py # ArmTeleop, TwistTeleop, VisualizingTeleop +│ ├── quest_types.py # QuestControllerState, Buttons +│ └── web/ # Deno bridge + WebXR client +│ ├── teleop_server.ts +│ └── static/index.html +├── phone/ +│ ├── phone_teleop_module.py # Base Phone teleop module +│ ├── phone_extensions.py # SimplePhoneTeleop +│ ├── blueprints.py # Pre-wired configurations +│ └── web/ # Deno bridge + mobile web app +│ ├── teleop_server.ts +│ └── static/index.html +├── utils/ +│ ├── teleop_transforms.py # WebXR → robot frame math +│ └── teleop_visualization.py # Rerun visualization helpers +└── blueprints.py # Module blueprints for easy instantiation +``` + +## Quick Start + +See [Quest Web README](quest/web/README.md) for running the Deno bridge and connecting the Quest headset. diff --git a/dimos/teleop/__init__.py b/dimos/teleop/__init__.py new file mode 100644 index 0000000000..8324113111 --- /dev/null +++ b/dimos/teleop/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Teleoperation modules for DimOS.""" diff --git a/dimos/teleop/keyboard/__init__.py b/dimos/teleop/keyboard/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dimos/teleop/keyboard/keyboard_teleop_module.py b/dimos/teleop/keyboard/keyboard_teleop_module.py new file mode 100644 index 0000000000..ff42ce9a1a --- /dev/null +++ b/dimos/teleop/keyboard/keyboard_teleop_module.py @@ -0,0 +1,219 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Keyboard-based cartesian teleop module for arm teleoperation. + +Wraps a pygame UI as a DimOS Module so it can be composed with coordinator +blueprints via autoconnect. + +Keyboard controls: + W/S: +X/-X (forward/backward) + A/D: -Y/+Y (left/right) + Q/E: +Z/-Z (up/down) + R/F: +Roll/-Roll + T/G: +Pitch/-Pitch + Y/H: +Yaw/-Yaw + SPACE: Reset to home pose + ESC: Quit +""" + +from dataclasses import dataclass +import os +import threading +import time +from typing import Any + +import numpy as np + +try: + import pygame +except ImportError: + pygame = None # type: ignore[assignment] + +from dimos.control.examples.cartesian_ik_jogger import JogState +from dimos.core import Module, Out, rpc +from dimos.core.module import ModuleConfig +from dimos.msgs.geometry_msgs import PoseStamped + +# Force X11 driver to avoid OpenGL threading issues +os.environ["SDL_VIDEODRIVER"] = "x11" + +# Jog speeds +LINEAR_SPEED = 0.05 # m/s +ANGULAR_SPEED = 0.5 # rad/s + +# Workspace bounds +X_LIMITS = (-0.5, 0.5) +Y_LIMITS = (-0.5, 0.5) +Z_LIMITS = (-0.2, 0.6) + + +def _clamp(value: float, min_val: float, max_val: float) -> float: + return max(min_val, min(max_val, value)) + + +@dataclass +class KeyboardTeleopConfig(ModuleConfig): + model_path: str = "" + ee_joint_id: int = 6 + task_name: str = "cartesian_ik_arm" + + +class KeyboardTeleopModule(Module[KeyboardTeleopConfig]): + """Pygame-based cartesian keyboard teleop as a DimOS Module. + + Publishes absolute EE PoseStamped commands for CartesianIKTask. + """ + + default_config = KeyboardTeleopConfig + + cartesian_command: Out[PoseStamped] + + _stop_event: threading.Event + _thread: threading.Thread | None = None + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self._stop_event = threading.Event() + + @rpc + def start(self) -> None: + super().start() + if pygame is None: + raise ImportError("pygame not installed. Install with: pip install pygame") + + self._stop_event.clear() + self._thread = threading.Thread(target=self._pygame_loop, daemon=True) + self._thread.start() + + @rpc + def stop(self) -> None: + self._stop_event.set() + if self._thread is not None: + self._thread.join(2) + super().stop() + + def _pygame_loop(self) -> None: + model_path = str(self.config.model_path) + ee_joint_id = self.config.ee_joint_id + task_name = self.config.task_name + + # Initialize pose from forward kinematics at zero configuration + home_pose = JogState.from_fk(model_path, ee_joint_id) + current_pose = home_pose.copy() + + # Publish initial pose + self.cartesian_command.publish(current_pose.to_pose_stamped(task_name)) + + pygame.init() + screen = pygame.display.set_mode((600, 400), pygame.SWSURFACE) + pygame.display.set_caption(f"Keyboard Teleop — {task_name}") + font = pygame.font.Font(None, 28) + clock = pygame.time.Clock() + last_time = time.perf_counter() + + while not self._stop_event.is_set(): + dt = time.perf_counter() - last_time + last_time = time.perf_counter() + + for event in pygame.event.get(): + if event.type == pygame.QUIT: + self._stop_event.set() + elif event.type == pygame.KEYDOWN: + if event.key == pygame.K_ESCAPE: + self._stop_event.set() + elif event.key == pygame.K_SPACE: + current_pose = home_pose.copy() + + keys = pygame.key.get_pressed() + + # Linear motion + if keys[pygame.K_w]: + current_pose.x += LINEAR_SPEED * dt + if keys[pygame.K_s]: + current_pose.x -= LINEAR_SPEED * dt + if keys[pygame.K_a]: + current_pose.y -= LINEAR_SPEED * dt + if keys[pygame.K_d]: + current_pose.y += LINEAR_SPEED * dt + if keys[pygame.K_q]: + current_pose.z += LINEAR_SPEED * dt + if keys[pygame.K_e]: + current_pose.z -= LINEAR_SPEED * dt + + # Angular motion + if keys[pygame.K_r]: + current_pose.roll += ANGULAR_SPEED * dt + if keys[pygame.K_f]: + current_pose.roll -= ANGULAR_SPEED * dt + if keys[pygame.K_t]: + current_pose.pitch += ANGULAR_SPEED * dt + if keys[pygame.K_g]: + current_pose.pitch -= ANGULAR_SPEED * dt + if keys[pygame.K_y]: + current_pose.yaw += ANGULAR_SPEED * dt + if keys[pygame.K_h]: + current_pose.yaw -= ANGULAR_SPEED * dt + + # Clamp to workspace limits + current_pose.x = _clamp(current_pose.x, *X_LIMITS) + current_pose.y = _clamp(current_pose.y, *Y_LIMITS) + current_pose.z = _clamp(current_pose.z, *Z_LIMITS) + + # Publish + self.cartesian_command.publish(current_pose.to_pose_stamped(task_name)) + + # Draw UI + screen.fill((30, 30, 30)) + y_pos = 20 + + title = font.render(f"Keyboard Teleop — {task_name}", True, (255, 255, 255)) + screen.blit(title, (20, y_pos)) + y_pos += 40 + + pos_text = ( + f"Position: X={current_pose.x:.3f} Y={current_pose.y:.3f} Z={current_pose.z:.3f}" + ) + screen.blit(font.render(pos_text, True, (100, 255, 100)), (20, y_pos)) + y_pos += 30 + + ori_text = ( + f"Orientation: R={np.degrees(current_pose.roll):.1f}° " + f"P={np.degrees(current_pose.pitch):.1f}° " + f"Y={np.degrees(current_pose.yaw):.1f}°" + ) + screen.blit(font.render(ori_text, True, (100, 200, 255)), (20, y_pos)) + y_pos += 40 + + controls = [ + ("W/S", "+X/-X (forward/back)"), + ("A/D", "-Y/+Y (left/right)"), + ("Q/E", "+Z/-Z (up/down)"), + ("R/F", "+Roll/-Roll"), + ("T/G", "+Pitch/-Pitch"), + ("Y/H", "+Yaw/-Yaw"), + ("SPACE", "Reset to home"), + ("ESC", "Quit"), + ] + for key, desc in controls: + screen.blit(font.render(f"{key}: {desc}", True, (180, 180, 180)), (20, y_pos)) + y_pos += 25 + + pygame.display.flip() + clock.tick(50) + + pygame.quit() + + +keyboard_teleop_module = KeyboardTeleopModule.blueprint diff --git a/dimos/teleop/phone/README.md b/dimos/teleop/phone/README.md new file mode 100644 index 0000000000..dd2af02281 --- /dev/null +++ b/dimos/teleop/phone/README.md @@ -0,0 +1,70 @@ +# Phone Teleop + +Teleoperation via smartphone motion sensors. Tilt to drive. + +## Architecture + +``` +Phone Browser (DeviceOrientation + DeviceMotion) + | + | TwistStamped + Bool via WebSocket + v +Deno Bridge (teleop_server.ts) + | + | LCM topics + v +PhoneTeleopModule + | Orientation delta from home pose + | Gains -> velocity commands + v +TwistStamped / Twist outputs +``` + +## Modules + +### PhoneTeleopModule +Base module. Receives raw sensor data and button state. On engage (button hold), captures home orientation and publishes deltas as TwistStamped. Launches the Deno bridge server automatically. + +### SimplePhoneTeleop +Filters to mobile-base axes (linear.x, linear.y, angular.z) and publishes as `Twist` on `cmd_vel` for direct autoconnect wiring with any module that has `cmd_vel: In[Twist]`. + +## Subclassing + +Override these methods: + +| Method | Purpose | +|--------|---------| +| `_handle_engage()` | Customize engage/disengage logic | +| `_should_publish()` | Add conditions for when to publish | +| `_publish_msg()` | Change output format | + +**Do not acquire `self._lock` in overrides.** The control loop already holds it. + +## LCM Topics + +| Topic | Type | Description | +|-------|------|-------------| +| `/phone_sensors` | TwistStamped | linear=(roll,pitch,yaw) deg, angular=(gyro) deg/s | +| `/phone_button` | Bool | Teleop engage button (1=held) | +| `/teleop/twist` | TwistStamped | Output velocity command | + +## Running + +```bash +dimos run phone-go2-teleop # Go2 +dimos run simple-phone-teleop # Generic ground robot +``` + +Server starts on port `8444`. Open `https://:8444` on phone, accept the self-signed certificate, allow sensor permissions, connect, hold button to drive. + +## File Structure + +``` +phone/ +├── phone_teleop_module.py # Base phone teleop module +├── phone_extensions.py # SimplePhoneTeleop +├── blueprints.py # Pre-wired configurations +└── web/ + ├── teleop_server.ts # Deno WSS-to-LCM bridge + └── static/index.html # Mobile web app +``` diff --git a/dimos/teleop/phone/__init__.py b/dimos/teleop/phone/__init__.py new file mode 100644 index 0000000000..552032a47b --- /dev/null +++ b/dimos/teleop/phone/__init__.py @@ -0,0 +1,33 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Phone teleoperation module for DimOS.""" + +from dimos.teleop.phone.phone_extensions import ( + SimplePhoneTeleop, + simple_phone_teleop_module, +) +from dimos.teleop.phone.phone_teleop_module import ( + PhoneTeleopConfig, + PhoneTeleopModule, + phone_teleop_module, +) + +__all__ = [ + "PhoneTeleopConfig", + "PhoneTeleopModule", + "SimplePhoneTeleop", + "phone_teleop_module", + "simple_phone_teleop_module", +] diff --git a/dimos/teleop/phone/blueprints.py b/dimos/teleop/phone/blueprints.py new file mode 100644 index 0000000000..6328af8612 --- /dev/null +++ b/dimos/teleop/phone/blueprints.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core.blueprints import autoconnect +from dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic import unitree_go2_basic +from dimos.teleop.phone.phone_extensions import simple_phone_teleop_module + +# Simple phone teleop (mobile base axis filtering + cmd_vel output) +simple_phone_teleop = autoconnect( + simple_phone_teleop_module(), +) + +# Phone teleop wired to Unitree Go2 +phone_go2_teleop = autoconnect( + simple_phone_teleop_module(), + unitree_go2_basic, +) + + +__all__ = ["phone_go2_teleop", "simple_phone_teleop"] diff --git a/dimos/teleop/phone/phone_extensions.py b/dimos/teleop/phone/phone_extensions.py new file mode 100644 index 0000000000..f0a8fd4d01 --- /dev/null +++ b/dimos/teleop/phone/phone_extensions.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Phone teleop module extensions. + +Available subclasses: + - SimplePhoneTeleop: Filters to ground robot axes and outputs cmd_vel: Out[Twist] +""" + +from dimos.core import Out +from dimos.msgs.geometry_msgs import Twist, TwistStamped, Vector3 +from dimos.teleop.phone.phone_teleop_module import PhoneTeleopModule + + +class SimplePhoneTeleop(PhoneTeleopModule): + """Phone teleop for ground robots. + + Filters the raw 6-axis twist to mobile base axes (linear.x, linear.y, angular.z) + and publishes as Twist on cmd_vel for direct autoconnect wiring with any + module that has cmd_vel: In[Twist]. + """ + + cmd_vel: Out[Twist] + + def _publish_msg(self, output_msg: TwistStamped) -> None: + self.cmd_vel.publish( + Twist( + linear=Vector3(x=output_msg.linear.x, y=output_msg.linear.y, z=0.0), + angular=Vector3(x=0.0, y=0.0, z=output_msg.linear.z), + ) + ) + + +simple_phone_teleop_module = SimplePhoneTeleop.blueprint + +__all__ = [ + "SimplePhoneTeleop", + "simple_phone_teleop_module", +] diff --git a/dimos/teleop/phone/phone_teleop_module.py b/dimos/teleop/phone/phone_teleop_module.py new file mode 100644 index 0000000000..c0da85c27c --- /dev/null +++ b/dimos/teleop/phone/phone_teleop_module.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Phone Teleoperation Module. + +Receives raw sensor data (TwistStamped) and button state (Bool) from the +phone web app via the Deno LCM bridge. Computes orientation deltas from +a initial orientation captured on engage, converts to TwistStamped velocity +commands via configurable gains, and publishes. + +""" + +from dataclasses import dataclass +from pathlib import Path +import shutil +import signal +import subprocess +import threading +import time +from typing import Any + +from reactivex.disposable import Disposable + +from dimos.core import In, Module, Out, rpc +from dimos.core.module import ModuleConfig +from dimos.msgs.geometry_msgs import Twist, TwistStamped, Vector3 +from dimos.msgs.std_msgs.Bool import Bool +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +@dataclass +class PhoneTeleopConfig(ModuleConfig): + control_loop_hz: float = 50.0 + linear_gain: float = 1.0 / 30.0 # Gain: maps degrees of tilt to m/s. 30 deg -> 1.0 m/s + angular_gain: float = 1.0 / 30.0 # Gain: maps gyro deg/s to rad/s. 30 deg/s -> 1.0 rad/s + + +class PhoneTeleopModule(Module[PhoneTeleopConfig]): + """ + Receives raw sensor data from the phone web app: + - TwistStamped: linear=(roll, pitch, yaw) deg, angular=(gyro) deg/s + - Bool: teleop button state (True = held) + + Outputs: + - twist_output: TwistStamped (velocity command for robot) + """ + + default_config = PhoneTeleopConfig + + # Inputs from Deno bridge + phone_sensors: In[TwistStamped] + phone_button: In[Bool] + # Output: velocity command to robot + twist_output: Out[TwistStamped] + + # ------------------------------------------------------------------------- + # Initialization + # ------------------------------------------------------------------------- + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + self._is_engaged: bool = False + self._teleop_button: bool = False + self._current_sensors: TwistStamped | None = None + self._initial_sensors: TwistStamped | None = None + self._lock = threading.RLock() + + # Control loop + self._control_loop_thread: threading.Thread | None = None + self._stop_event = threading.Event() + + # Deno bridge server + self._server_process: subprocess.Popen[bytes] | None = None + self._server_script = Path(__file__).parent / "web" / "teleop_server.ts" + + # ------------------------------------------------------------------------- + # Lifecycle + # ------------------------------------------------------------------------- + + @rpc + def start(self) -> None: + super().start() + for stream, handler in ( + (self.phone_sensors, self._on_sensors), + (self.phone_button, self._on_button), + ): + self._disposables.add(Disposable(stream.subscribe(handler))) # type: ignore[attr-defined] + self._start_server() + self._start_control_loop() + + @rpc + def stop(self) -> None: + self._stop_control_loop() + self._stop_server() + super().stop() + + # ------------------------------------------------------------------------- + # Internal engage / disengage (assumes lock is held) + # ------------------------------------------------------------------------- + + def _engage(self) -> bool: + """Engage: capture current sensors as initial""" + if self._current_sensors is None: + logger.error("Engage failed: no sensor data yet") + return False + self._initial_sensors = self._current_sensors + self._is_engaged = True + logger.info("Phone teleop engaged") + return True + + def _disengage(self) -> None: + """Disengage: stop publishing""" + self._is_engaged = False + self._initial_sensors = None + logger.info("Phone teleop disengaged") + + # ------------------------------------------------------------------------- + # Callbacks + # ------------------------------------------------------------------------- + + def _on_sensors(self, msg: TwistStamped) -> None: + """Callback for raw sensor TwistStamped from the phone""" + with self._lock: + self._current_sensors = msg + + def _on_button(self, msg: Bool) -> None: + """Callback for teleop button state.""" + with self._lock: + self._teleop_button = bool(msg.data) + + # ------------------------------------------------------------------------- + # Deno Bridge Server + # ------------------------------------------------------------------------- + + def _start_server(self) -> None: + """Launch the Deno WebSocket-to-LCM bridge server as a subprocess.""" + if self._server_process is not None and self._server_process.poll() is None: + logger.warning("Deno bridge already running", pid=self._server_process.pid) + return + + if shutil.which("deno") is None: + logger.error( + "Deno is not installed. Install it with: curl -fsSL https://deno.land/install.sh | sh" + ) + return + + script = str(self._server_script) + cmd = [ + "deno", + "run", + "--allow-net", + "--allow-read", + "--allow-run", + "--allow-write", + "--unstable-net", + script, + ] + try: + self._server_process = subprocess.Popen(cmd) + logger.info(f"Deno bridge server started (pid {self._server_process.pid})") + except OSError as e: + logger.error(f"Failed to start Deno bridge: {e}") + + def _stop_server(self) -> None: + """Terminate the Deno bridge server subprocess.""" + if self._server_process is None or self._server_process.poll() is not None: + self._server_process = None + return + + logger.info("Stopping Deno bridge server", pid=self._server_process.pid) + self._server_process.send_signal(signal.SIGTERM) + try: + self._server_process.wait(timeout=3) + except subprocess.TimeoutExpired: + logger.warning( + "Deno bridge did not exit, sending SIGKILL", pid=self._server_process.pid + ) + self._server_process.kill() + try: + self._server_process.wait(timeout=5) + except subprocess.TimeoutExpired: + logger.error("Deno bridge did not exit after SIGKILL") + logger.info("Deno bridge server stopped") + self._server_process = None + + # ------------------------------------------------------------------------- + # Control Loop + # ------------------------------------------------------------------------- + + def _start_control_loop(self) -> None: + if self._control_loop_thread is not None and self._control_loop_thread.is_alive(): + return + + self._stop_event.clear() + self._control_loop_thread = threading.Thread( + target=self._control_loop, + daemon=True, + name="PhoneTeleopControlLoop", + ) + self._control_loop_thread.start() + logger.info(f"Control loop started at {self.config.control_loop_hz} Hz") + + def _stop_control_loop(self) -> None: + self._stop_event.set() + if self._control_loop_thread is not None: + self._control_loop_thread.join(timeout=1.0) + self._control_loop_thread = None + logger.info("Control loop stopped") + + def _control_loop(self) -> None: + period = 1.0 / self.config.control_loop_hz + + while not self._stop_event.is_set(): + loop_start = time.perf_counter() + with self._lock: + self._handle_engage() + + if self._is_engaged: + output_twist = self._get_output_twist() + if output_twist is not None: + self._publish_msg(output_twist) + + elapsed = time.perf_counter() - loop_start + sleep_time = period - elapsed + if sleep_time > 0: + self._stop_event.wait(sleep_time) + + # ------------------------------------------------------------------------- + # Control Loop Internal Methods + # ------------------------------------------------------------------------- + + def _handle_engage(self) -> None: + """ + Override to customize engagement logic. + Default: button hold = engaged, release = disengaged. + """ + if self._teleop_button: + if not self._is_engaged: + self._engage() + else: + if self._is_engaged: + self._disengage() + + def _get_output_twist(self) -> TwistStamped | None: + """Compute twist from orientation delta. + Override to customize twist computation (e.g., apply scaling, filtering). + Default: Computes delta angles from initial orientation, applies gains. + """ + current = self._current_sensors + initial = self._initial_sensors + if current is None or initial is None: + return None + + delta: Twist = Twist(current) - Twist(initial) + + # Handle yaw wraparound (linear.z = yaw, 0-360 degrees) + d_yaw = delta.linear.z + if d_yaw > 180: + d_yaw -= 360 + elif d_yaw < -180: + d_yaw += 360 + + cfg = self.config + return TwistStamped( + ts=current.ts, + frame_id="phone", + linear=Vector3( + x=-delta.linear.y * cfg.linear_gain, # pitch forward -> drive forward + y=-delta.linear.x * cfg.linear_gain, # roll right -> strafe right + z=d_yaw * cfg.linear_gain, # yaw delta + ), + angular=Vector3( + x=current.angular.x * cfg.angular_gain, + y=current.angular.y * cfg.angular_gain, + z=current.angular.z * cfg.angular_gain, + ), + ) + + def _publish_msg(self, output_msg: TwistStamped) -> None: + """ + Override to customize output (e.g., apply limits, remap axes). + """ + self.twist_output.publish(output_msg) + + +phone_teleop_module = PhoneTeleopModule.blueprint + +__all__ = [ + "PhoneTeleopConfig", + "PhoneTeleopModule", + "phone_teleop_module", +] diff --git a/dimos/teleop/phone/web/static/index.html b/dimos/teleop/phone/web/static/index.html new file mode 100644 index 0000000000..6fad23b6c8 --- /dev/null +++ b/dimos/teleop/phone/web/static/index.html @@ -0,0 +1,393 @@ + + + + + + DimOS Phone Teleop + + + +

DimOS Phone Teleop

+ + +
+ Sensors: off + WS: disconnected +
+ + +
+ + +
+ + +
+

Sensors

+
+
+
X
+
Y
+
Z
+ +
Ori
+
0.0°
+
0.0°
+
0.0°
+ +
Gyro
+
0.0
+
0.0
+
0.0
+
+
+ + + + + + + diff --git a/dimos/teleop/phone/web/teleop_server.ts b/dimos/teleop/phone/web/teleop_server.ts new file mode 100755 index 0000000000..26202cf166 --- /dev/null +++ b/dimos/teleop/phone/web/teleop_server.ts @@ -0,0 +1,85 @@ +#!/usr/bin/env -S deno run --allow-net --allow-read --allow-run --allow-write --unstable-net + +// WebSocket to LCM Bridge for Phone Teleop +// Forwards twist data from Phone browser to LCM + +import { LCM } from "jsr:@dimos/lcm"; +import { dirname, fromFileUrl, join } from "jsr:@std/path"; + +const PORT = 8444; + +// Resolve paths relative to script location +const scriptDir = dirname(fromFileUrl(import.meta.url)); +const certsDir = join(scriptDir, "../../../../assets/teleop_certs"); +const certPath = join(certsDir, "cert.pem"); +const keyPath = join(certsDir, "key.pem"); + +// Auto-generate self-signed certificates if they don't exist +async function ensureCerts(): Promise<{ cert: string; key: string }> { + try { + const cert = await Deno.readTextFile(certPath); + const key = await Deno.readTextFile(keyPath); + return { cert, key }; + } catch { + console.log("Generating self-signed certificates..."); + await Deno.mkdir(certsDir, { recursive: true }); + const cmd = new Deno.Command("openssl", { + args: [ + "req", "-x509", "-newkey", "rsa:2048", + "-keyout", keyPath, "-out", certPath, + "-days", "365", "-nodes", "-subj", "/CN=localhost" + ], + }); + const { code } = await cmd.output(); + if (code !== 0) { + throw new Error("Failed to generate certificates. Is openssl installed?"); + } + console.log("Certificates generated in assets/teleop_certs/"); + return { + cert: await Deno.readTextFile(certPath), + key: await Deno.readTextFile(keyPath), + }; + } +} + +const { cert, key } = await ensureCerts(); + +const lcm = new LCM(); +await lcm.start(); + +// Binds to all interfaces so the phone can reach the server over LAN. +Deno.serve({ port: PORT, cert, key }, async (req) => { + const url = new URL(req.url); + + if (req.headers.get("upgrade") === "websocket") { + const { socket, response } = Deno.upgradeWebSocket(req); + socket.onopen = () => console.log("Phone client connected"); + socket.onclose = () => console.log("Phone client disconnected"); + + // Forward binary LCM packets from browser directly to UDP + socket.binaryType = "arraybuffer"; + socket.onmessage = async (event) => { + if (event.data instanceof ArrayBuffer) { + const packet = new Uint8Array(event.data); + try { + await lcm.publishPacket(packet); + } catch (e) { + console.error("Forward error:", e); + } + } + }; + + return response; + } + + if (url.pathname === "/" || url.pathname === "/index.html") { + const html = await Deno.readTextFile(new URL("./static/index.html", import.meta.url)); + return new Response(html, { headers: { "content-type": "text/html" } }); + } + + return new Response("Not found", { status: 404 }); +}); + +console.log(`Phone Teleop Server: https://localhost:${PORT}`); + +await lcm.run(); diff --git a/dimos/teleop/quest/__init__.py b/dimos/teleop/quest/__init__.py new file mode 100644 index 0000000000..83daf4347b --- /dev/null +++ b/dimos/teleop/quest/__init__.py @@ -0,0 +1,54 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Quest teleoperation module.""" + +from dimos.teleop.quest.quest_extensions import ( + ArmTeleopModule, + TwistTeleopModule, + VisualizingTeleopModule, + arm_teleop_module, + twist_teleop_module, + visualizing_teleop_module, +) +from dimos.teleop.quest.quest_teleop_module import ( + Hand, + QuestTeleopConfig, + QuestTeleopModule, + QuestTeleopStatus, + quest_teleop_module, +) +from dimos.teleop.quest.quest_types import ( + Buttons, + QuestControllerState, + ThumbstickState, +) + +__all__ = [ + "ArmTeleopModule", + "Buttons", + "Hand", + "QuestControllerState", + "QuestTeleopConfig", + "QuestTeleopModule", + "QuestTeleopStatus", + "ThumbstickState", + "TwistTeleopModule", + "VisualizingTeleopModule", + # Blueprints + "arm_teleop_module", + "quest_teleop_module", + "twist_teleop_module", + "visualizing_teleop_module", +] diff --git a/dimos/teleop/quest/blueprints.py b/dimos/teleop/quest/blueprints.py new file mode 100644 index 0000000000..c46bf657ff --- /dev/null +++ b/dimos/teleop/quest/blueprints.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Teleop blueprints for testing and deployment.""" + +from dimos.control.blueprints import ( + coordinator_teleop_dual, + coordinator_teleop_piper, + coordinator_teleop_xarm6, +) +from dimos.core.blueprints import autoconnect +from dimos.core.transport import LCMTransport +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.teleop.quest.quest_extensions import arm_teleop_module, visualizing_teleop_module +from dimos.teleop.quest.quest_types import Buttons + +# ----------------------------------------------------------------------------- +# Quest Teleop Blueprints +# ----------------------------------------------------------------------------- + +# Arm teleop with press-and-hold engage +arm_teleop = autoconnect( + arm_teleop_module(), +).transports( + { + ("left_controller_output", PoseStamped): LCMTransport("/teleop/left_delta", PoseStamped), + ("right_controller_output", PoseStamped): LCMTransport("/teleop/right_delta", PoseStamped), + ("buttons", Buttons): LCMTransport("/teleop/buttons", Buttons), + } +) + +# Arm teleop with Rerun visualization +arm_teleop_visualizing = autoconnect( + visualizing_teleop_module(), +).transports( + { + ("left_controller_output", PoseStamped): LCMTransport("/teleop/left_delta", PoseStamped), + ("right_controller_output", PoseStamped): LCMTransport("/teleop/right_delta", PoseStamped), + ("buttons", Buttons): LCMTransport("/teleop/buttons", Buttons), + } +) + + +# ----------------------------------------------------------------------------- +# Teleop wired to Coordinator (TeleopIK) +# ----------------------------------------------------------------------------- + +# Single XArm6 teleop: right controller -> xarm6 +# Usage: dimos run arm-teleop-xarm6 + +arm_teleop_xarm6 = autoconnect( + arm_teleop_module(task_names={"right": "teleop_xarm"}), + coordinator_teleop_xarm6, +).transports( + { + ("right_controller_output", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("buttons", Buttons): LCMTransport("/teleop/buttons", Buttons), + } +) + + +# Single Piper teleop: left controller -> piper arm +# Usage: dimos run arm-teleop-piper +arm_teleop_piper = autoconnect( + arm_teleop_module(task_names={"left": "teleop_piper"}), + coordinator_teleop_piper, +).transports( + { + ("left_controller_output", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("buttons", Buttons): LCMTransport("/teleop/buttons", Buttons), + } +) + + +# Dual arm teleop: right -> piper, left -> xarm6 (TeleopIK) +arm_teleop_dual = autoconnect( + arm_teleop_module(task_names={"right": "teleop_piper", "left": "teleop_xarm"}), + coordinator_teleop_dual, +).transports( + { + ("right_controller_output", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("left_controller_output", PoseStamped): LCMTransport( + "/coordinator/cartesian_command", PoseStamped + ), + ("buttons", Buttons): LCMTransport("/teleop/buttons", Buttons), + } +) + + +__all__ = [ + "arm_teleop", + "arm_teleop_dual", + "arm_teleop_piper", + "arm_teleop_visualizing", + "arm_teleop_xarm6", +] diff --git a/dimos/teleop/quest/quest_extensions.py b/dimos/teleop/quest/quest_extensions.py new file mode 100644 index 0000000000..b4e38de546 --- /dev/null +++ b/dimos/teleop/quest/quest_extensions.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Quest teleop module extensions and subclasses. + +Available subclasses: + - ArmTeleopModule: Per-hand press-and-hold engage (X/A hold to track), task name routing + - TwistTeleopModule: Outputs Twist instead of PoseStamped + - VisualizingTeleopModule: Adds Rerun visualization (inherits press-and-hold engage) +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any + +from dimos.msgs.geometry_msgs import PoseStamped, TwistStamped +from dimos.teleop.quest.quest_teleop_module import Hand, QuestTeleopConfig, QuestTeleopModule +from dimos.teleop.utils.teleop_visualization import ( + visualize_buttons, + visualize_pose, +) + +if TYPE_CHECKING: + from dimos.core import Out + + +@dataclass +class TwistTeleopConfig(QuestTeleopConfig): + """Configuration for TwistTeleopModule.""" + + linear_scale: float = 1.0 + angular_scale: float = 1.0 + + +# Example implementation to show how to extend QuestTeleopModule for different teleop behaviors and outputs. +class TwistTeleopModule(QuestTeleopModule): + """Quest teleop that outputs TwistStamped instead of PoseStamped. + + Config: + - linear_scale: Scale factor for linear (position) values. Default 1.0. + - angular_scale: Scale factor for angular (orientation) values. Default 1.0. + + Outputs: + - left_twist: TwistStamped (linear + angular velocity) + - right_twist: TwistStamped (linear + angular velocity) + - buttons: Buttons (inherited) + """ + + default_config = TwistTeleopConfig + config: TwistTeleopConfig + + left_twist: Out[TwistStamped] + right_twist: Out[TwistStamped] + + def _publish_msg(self, hand: Hand, output_msg: PoseStamped) -> None: + """Convert PoseStamped to TwistStamped, apply scaling, and publish.""" + twist = TwistStamped( + ts=output_msg.ts, + frame_id=output_msg.frame_id, + linear=output_msg.position * self.config.linear_scale, + angular=output_msg.orientation.to_euler() * self.config.angular_scale, + ) + if hand == Hand.LEFT: + self.left_twist.publish(twist) + else: + self.right_twist.publish(twist) + + +@dataclass +class ArmTeleopConfig(QuestTeleopConfig): + """Configuration for ArmTeleopModule. + + Attributes: + task_names: Mapping of Hand -> coordinator task name. Used to set + frame_id on output PoseStamped so the coordinator routes each + hand's commands to the correct TeleopIKTask. + """ + + task_names: dict[str, str] = field(default_factory=dict) + + +class ArmTeleopModule(QuestTeleopModule): + """Quest teleop with per-hand press-and-hold engage and task name routing. + + Each controller's primary button (X for left, A for right) + engages that hand while held, disengages on release. + + When task_names is configured, output PoseStamped messages have their + frame_id set to the task name, enabling the coordinator to route + each hand's commands to the correct TeleopIKTask. + + Outputs: + - left_controller_output: PoseStamped (inherited) + - right_controller_output: PoseStamped (inherited) + - buttons: Buttons (inherited) + """ + + default_config = ArmTeleopConfig + config: ArmTeleopConfig + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + self._task_names: dict[Hand, str] = { + Hand[k.upper()]: v for k, v in self.config.task_names.items() + } + + def _publish_msg(self, hand: Hand, output_msg: PoseStamped) -> None: + """Stamp frame_id with task name and publish.""" + task_name = self._task_names.get(hand) + if task_name: + output_msg = PoseStamped( + position=output_msg.position, + orientation=output_msg.orientation, + ts=output_msg.ts, + frame_id=task_name, + ) + super()._publish_msg(hand, output_msg) + + +class VisualizingTeleopModule(ArmTeleopModule): + """Quest teleop with Rerun visualization. + + Adds visualization of controller poses and trigger values to Rerun. + Useful for debugging and development. + + Outputs: + - left_controller_output: PoseStamped (inherited) + - right_controller_output: PoseStamped (inherited) + - buttons: Buttons (inherited) + """ + + def _get_output_pose(self, hand: Hand) -> PoseStamped | None: + """Get output pose and visualize in Rerun.""" + output_pose = super()._get_output_pose(hand) + + if output_pose is not None: + current_pose = self._current_poses.get(hand) + controller = self._controllers.get(hand) + if current_pose is not None: + label = "left" if hand == Hand.LEFT else "right" + visualize_pose(current_pose, label) + + if controller: + visualize_buttons( + label, + primary=controller.primary, + secondary=controller.secondary, + grip=controller.grip, + trigger=controller.trigger, + ) + return output_pose + + +# Module blueprints for easy instantiation +twist_teleop_module = TwistTeleopModule.blueprint +arm_teleop_module = ArmTeleopModule.blueprint +visualizing_teleop_module = VisualizingTeleopModule.blueprint + +__all__ = [ + "ArmTeleopConfig", + "ArmTeleopModule", + "TwistTeleopModule", + "VisualizingTeleopModule", + "arm_teleop_module", + "twist_teleop_module", + "visualizing_teleop_module", +] diff --git a/dimos/teleop/quest/quest_teleop_module.py b/dimos/teleop/quest/quest_teleop_module.py new file mode 100644 index 0000000000..ea77bb5fc0 --- /dev/null +++ b/dimos/teleop/quest/quest_teleop_module.py @@ -0,0 +1,407 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Quest Teleoperation Module. + +Receives VR controller tracking data via LCM from Deno bridge, +transforms from WebXR to robot frame, computes deltas, and publishes PoseStamped commands. +""" + +from dataclasses import dataclass +from enum import IntEnum +from pathlib import Path +import shutil +import signal +import subprocess +import threading +import time +from typing import Any + +from reactivex.disposable import Disposable + +from dimos.core import In, Module, Out, rpc +from dimos.core.module import ModuleConfig +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.msgs.sensor_msgs import Joy +from dimos.teleop.quest.quest_types import Buttons, QuestControllerState +from dimos.teleop.utils.teleop_transforms import webxr_to_robot +from dimos.utils.logging_config import setup_logger + +logger = setup_logger() + + +class Hand(IntEnum): + """Controller hand index.""" + + LEFT = 0 + RIGHT = 1 + + +@dataclass +class QuestTeleopStatus: + """Current teleoperation status.""" + + left_engaged: bool + right_engaged: bool + left_pose: PoseStamped | None + right_pose: PoseStamped | None + buttons: Buttons + + +@dataclass +class QuestTeleopConfig(ModuleConfig): + """Configuration for Quest Teleoperation Module.""" + + control_loop_hz: float = 50.0 + + +class QuestTeleopModule(Module[QuestTeleopConfig]): + """Quest Teleoperation Module for Meta Quest controllers. + + Gets controller data from Deno bridge, computes output poses, and publishes them. Subclass to customize pose + computation, output format, and engage behavior. + + Outputs: + - left_controller_output: PoseStamped (output pose for left hand) + - right_controller_output: PoseStamped (output pose for right hand) + - buttons: Buttons (button states for both controllers) + """ + + default_config = QuestTeleopConfig + + # Inputs from Deno bridge + vr_left_pose: In[PoseStamped] + vr_right_pose: In[PoseStamped] + vr_left_joy: In[Joy] + vr_right_joy: In[Joy] + + # Outputs: delta poses for each controller + left_controller_output: Out[PoseStamped] + right_controller_output: Out[PoseStamped] + buttons: Out[Buttons] + + # ------------------------------------------------------------------------- + # Initialization + # ------------------------------------------------------------------------- + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # Engage state (per-hand) + self._is_engaged: dict[Hand, bool] = {Hand.LEFT: False, Hand.RIGHT: False} + self._initial_poses: dict[Hand, PoseStamped | None] = {Hand.LEFT: None, Hand.RIGHT: None} + self._current_poses: dict[Hand, PoseStamped | None] = {Hand.LEFT: None, Hand.RIGHT: None} + self._controllers: dict[Hand, QuestControllerState | None] = { + Hand.LEFT: None, + Hand.RIGHT: None, + } + self._lock = threading.RLock() + + # Control loop + self._control_loop_thread: threading.Thread | None = None + self._stop_event = threading.Event() + + # Deno bridge server + self._server_process: subprocess.Popen[bytes] | None = None + self._server_script = Path(__file__).parent / "web" / "teleop_server.ts" + + # ------------------------------------------------------------------------- + # Lifecycle + # ------------------------------------------------------------------------- + + @rpc + def start(self) -> None: + super().start() + + input_streams = { + "vr_left_pose": (self.vr_left_pose, lambda msg: self._on_pose(Hand.LEFT, msg)), + "vr_right_pose": (self.vr_right_pose, lambda msg: self._on_pose(Hand.RIGHT, msg)), + "vr_left_joy": (self.vr_left_joy, lambda msg: self._on_joy(Hand.LEFT, msg)), + "vr_right_joy": (self.vr_right_joy, lambda msg: self._on_joy(Hand.RIGHT, msg)), + } + connected = [] + for name, (stream, handler) in input_streams.items(): + if not (stream and stream.transport): # type: ignore[attr-defined] + logger.warning(f"Stream '{name}' has no transport — skipping") + continue + self._disposables.add(Disposable(stream.subscribe(handler))) # type: ignore[attr-defined] + connected.append(name) + + if connected: + logger.info(f"Subscribed to: {', '.join(connected)}") + + self._start_server() + logger.info("Quest Teleoperation Module started") + + @rpc + def stop(self) -> None: + self._stop_control_loop() + self._stop_server() + super().stop() + + # ------------------------------------------------------------------------- + # Internal engage/disengage (assumes lock is held) + # ------------------------------------------------------------------------- + + def _engage(self, hand: Hand | None = None) -> bool: + """Engage a hand. Assumes self._lock is held.""" + hands = [hand] if hand is not None else list(Hand) + for h in hands: + pose = self._current_poses.get(h) + if pose is None: + logger.error(f"Engage failed: {h.name.lower()} controller has no data") + return False + self._initial_poses[h] = pose + self._is_engaged[h] = True + logger.info(f"{h.name} engaged.") + return True + + def _disengage(self, hand: Hand | None = None) -> None: + """Disengage a hand. Assumes self._lock is held.""" + hands = [hand] if hand is not None else list(Hand) + for h in hands: + self._is_engaged[h] = False + logger.info(f"{h.name} disengaged.") + + def get_status(self) -> QuestTeleopStatus: + with self._lock: + left = self._controllers.get(Hand.LEFT) + right = self._controllers.get(Hand.RIGHT) + return QuestTeleopStatus( + left_engaged=self._is_engaged[Hand.LEFT], + right_engaged=self._is_engaged[Hand.RIGHT], + left_pose=self._current_poses.get(Hand.LEFT), + right_pose=self._current_poses.get(Hand.RIGHT), + buttons=Buttons.from_controllers(left, right), + ) + + # ------------------------------------------------------------------------- + # Callbacks and Control Loop + # ------------------------------------------------------------------------- + + def _on_pose(self, hand: Hand, pose_stamped: PoseStamped) -> None: + """Callback for controller pose, converting WebXR to robot frame.""" + is_left = hand == Hand.LEFT + robot_pose_stamped = webxr_to_robot(pose_stamped, is_left_controller=is_left) + with self._lock: + self._current_poses[hand] = robot_pose_stamped + + def _on_joy(self, hand: Hand, joy: Joy) -> None: + """Callback for Joy message, parsing into QuestControllerState.""" + is_left = hand == Hand.LEFT + try: + controller = QuestControllerState.from_joy(joy, is_left=is_left) + except ValueError: + logger.warning( + f"Malformed Joy for {hand.name}: axes={len(joy.axes or [])}, buttons={len(joy.buttons or [])}" + ) + return + with self._lock: + self._controllers[hand] = controller + + # ------------------------------------------------------------------------- + # Deno Bridge Server + # ------------------------------------------------------------------------- + + def _start_server(self) -> None: + """Launch the Deno WebSocket-to-LCM bridge server as a subprocess.""" + if self._server_process is not None and self._server_process.poll() is None: + logger.warning("Deno bridge already running", pid=self._server_process.pid) + return + + if shutil.which("deno") is None: + logger.error( + "Deno is not installed. Install it with: curl -fsSL https://deno.land/install.sh | sh" + ) + return + + script = str(self._server_script) + cmd = [ + "deno", + "run", + "--allow-net", + "--allow-read", + "--allow-run", + "--allow-write", + "--unstable-net", + script, + ] + try: + self._server_process = subprocess.Popen(cmd) + logger.info(f"Deno bridge server started (pid {self._server_process.pid})") + except OSError as e: + logger.error(f"Failed to start Deno bridge: {e}") + + def _stop_server(self) -> None: + """Terminate the Deno bridge server subprocess.""" + if self._server_process is None or self._server_process.poll() is not None: + self._server_process = None + return + + logger.info("Stopping Deno bridge server", pid=self._server_process.pid) + self._server_process.send_signal(signal.SIGTERM) + try: + self._server_process.wait(timeout=3) + except subprocess.TimeoutExpired: + logger.warning( + "Deno bridge did not exit, sending SIGKILL", pid=self._server_process.pid + ) + self._server_process.kill() + try: + self._server_process.wait(timeout=5) + except subprocess.TimeoutExpired: + logger.error("Deno bridge did not exit after SIGKILL") + logger.info("Deno bridge server stopped") + self._server_process = None + + def _start_control_loop(self) -> None: + """Start the control loop thread.""" + if self._control_loop_thread is not None and self._control_loop_thread.is_alive(): + return + + self._stop_event.clear() + self._control_loop_thread = threading.Thread( + target=self._control_loop, + daemon=True, + name="QuestTeleopControlLoop", + ) + self._control_loop_thread.start() + logger.info(f"Control loop started at {self.config.control_loop_hz} Hz") + + def _stop_control_loop(self) -> None: + """Stop the control loop thread.""" + self._stop_event.set() + if self._control_loop_thread is not None: + self._control_loop_thread.join(timeout=1.0) + self._control_loop_thread = None + logger.info("Control loop stopped") + + def _control_loop(self) -> None: + """ + Holds self._lock for the entire iteration so overridable methods + don't need to acquire it themselves. + """ + period = 1.0 / self.config.control_loop_hz + + while not self._stop_event.is_set(): + loop_start = time.perf_counter() + try: + with self._lock: + self._handle_engage() + + for hand in Hand: + if not self._should_publish(hand): + continue + output_pose = self._get_output_pose(hand) + if output_pose is not None: + self._publish_msg(hand, output_pose) + + # Always publish buttons regardless of engage state, + # so UI/listeners can react to button presses (e.g., trigger engage). + left = self._controllers.get(Hand.LEFT) + right = self._controllers.get(Hand.RIGHT) + self._publish_button_state(left, right) + except Exception: + logger.exception("Error in teleop control loop") + + elapsed = time.perf_counter() - loop_start + sleep_time = period - elapsed + if sleep_time > 0: + self._stop_event.wait(sleep_time) + + # ------------------------------------------------------------------------- + # Control Loop Internals + # ------------------------------------------------------------------------- + + def _handle_engage(self) -> None: + """Check for engage button press and update per-hand engage state. + + Override to customize which button/action triggers engage. + Default: Each controller's primary button (X/A) hold engages that hand. + """ + for hand in Hand: + controller = self._controllers.get(hand) + if controller is None: + continue + if controller.primary: + if not self._is_engaged[hand]: + self._engage(hand) + else: + if self._is_engaged[hand]: + self._disengage(hand) + + def _should_publish(self, hand: Hand) -> bool: + """Check if we should publish commands for a hand. + + Override to add custom conditions. + Default: Returns True if the hand is engaged. + """ + return self._is_engaged[hand] + + def _get_output_pose(self, hand: Hand) -> PoseStamped | None: + """Get the pose to publish for a controller. + + Override to customize pose computation (e.g., send absolute pose, + apply scaling, add filtering). + Default: Computes delta from initial pose. + """ + current_pose = self._current_poses.get(hand) + initial_pose = self._initial_poses.get(hand) + + if current_pose is None or initial_pose is None: + return None + + delta = current_pose - initial_pose + return PoseStamped( + position=delta.position, + orientation=delta.orientation, + ts=current_pose.ts, + frame_id=current_pose.frame_id, + ) + + def _publish_msg(self, hand: Hand, output_msg: PoseStamped) -> None: + """Publish message for a controller. + + Override to customize output (e.g., convert to Twist, scale values). + """ + if hand == Hand.LEFT: + self.left_controller_output.publish(output_msg) + else: + self.right_controller_output.publish(output_msg) + + def _publish_button_state( + self, + left: QuestControllerState | None, + right: QuestControllerState | None, + ) -> None: + """Publish button states for both controllers. + + Override to customize button output format (e.g., different bit layout, + keep analog values, add extra streams). + """ + buttons = Buttons.from_controllers(left, right) + self.buttons.publish(buttons) + + +quest_teleop_module = QuestTeleopModule.blueprint + +__all__ = [ + "Hand", + "QuestTeleopConfig", + "QuestTeleopModule", + "QuestTeleopStatus", + "quest_teleop_module", +] diff --git a/dimos/teleop/quest/quest_types.py b/dimos/teleop/quest/quest_types.py new file mode 100644 index 0000000000..9e5616101d --- /dev/null +++ b/dimos/teleop/quest/quest_types.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Quest controller types with nice API for parsing Joy messages.""" + +from dataclasses import dataclass, field +from typing import ClassVar + +from dimos.msgs.sensor_msgs import Joy +from dimos.msgs.std_msgs import UInt32 + + +@dataclass +class ThumbstickState: + """State of a thumbstick with X/Y axes.""" + + x: float = 0.0 + y: float = 0.0 + + +@dataclass +class QuestControllerState: + """Parsed Quest controller state from Joy message with no data loss. + + Preserves full-fidelity analog values (trigger, grip as floats, thumbstick axes) + from the raw Joy message in a readable format. Use this when you need analog + precision (e.g., proportional grip control). Subclasses can publish this + alongside Buttons for float access. + + Axes layout: + 0: thumbstick X, 1: thumbstick Y, 2: trigger (analog), 3: grip (analog) + Button indices (digital, 0 or 1): + 0: trigger, 1: grip, 2: touchpad, 3: thumbstick, + 4: X/A, 5: Y/B, 6: menu + """ + + EXPECTED_AXES: ClassVar[int] = 4 + EXPECTED_BUTTONS: ClassVar[int] = 7 + + is_left: bool = True + # Analog values (0.0-1.0) + trigger: float = 0.0 + grip: float = 0.0 + # Digital buttons + touchpad: bool = False + thumbstick_press: bool = False + primary: bool = False # X on left, A on right + secondary: bool = False # Y on left, B on right + menu: bool = False + # Thumbstick axes + thumbstick: ThumbstickState = field(default_factory=ThumbstickState) + + @classmethod + def from_joy(cls, joy: Joy, is_left: bool = True) -> "QuestControllerState": + """Create QuestControllerState from Joy message. + Expected axes: [thumbstick_x, thumbstick_y, trigger_analog, grip_analog] + Expected buttons: [trigger, grip, touchpad, thumbstick, X/A, Y/B, menu] + Raises: + ValueError: If Joy message doesn't have expected Quest controller format. + """ + buttons = joy.buttons or [] + axes = joy.axes or [] + + if len(buttons) < cls.EXPECTED_BUTTONS: + raise ValueError(f"Expected {cls.EXPECTED_BUTTONS} buttons, got {len(buttons)}") + if len(axes) < cls.EXPECTED_AXES: + raise ValueError(f"Expected {cls.EXPECTED_AXES} axes, got {len(axes)}") + + return cls( + is_left=is_left, + trigger=float(axes[2]), + grip=float(axes[3]), + touchpad=buttons[2] > 0.5, + thumbstick_press=buttons[3] > 0.5, + primary=buttons[4] > 0.5, + secondary=buttons[5] > 0.5, + menu=buttons[6] > 0.5, + thumbstick=ThumbstickState(x=float(axes[0]), y=float(axes[1])), + ) + + +class Buttons(UInt32): + """Packed button states for both controllers in a single UInt32. + + All values are collapsed to bools for lightweight transport. Analog values + (trigger, grip) are thresholded at 0.5. If you need the original float + values, access them from QuestControllerState and publish them in a subclass. + + Bit layout: + Left (bits 0-6): trigger, grip, touchpad, thumbstick, primary, secondary, menu + Right (bits 8-14): trigger, grip, touchpad, thumbstick, primary, secondary, menu + """ + + # Bit positions + BITS = { + "left_trigger": 0, + "left_grip": 1, + "left_touchpad": 2, + "left_thumbstick": 3, + "left_primary": 4, + "left_secondary": 5, + "left_menu": 6, + "right_trigger": 8, + "right_grip": 9, + "right_touchpad": 10, + "right_thumbstick": 11, + "right_primary": 12, + "right_secondary": 13, + "right_menu": 14, + } + + def __getattr__(self, name: str) -> bool: + if name in Buttons.BITS: + return bool(self.data & (1 << Buttons.BITS[name])) + raise AttributeError(f"'{type(self).__name__}' has no attribute '{name}'") + + def __setattr__(self, name: str, value: bool) -> None: + if name in Buttons.BITS: + if value: + self.data |= 1 << Buttons.BITS[name] + else: + self.data &= ~(1 << Buttons.BITS[name]) + else: + super().__setattr__(name, value) + + @classmethod + def from_controllers( + cls, + left: "QuestControllerState | None", + right: "QuestControllerState | None", + ) -> "Buttons": + """Create Buttons from two QuestControllerState instances.""" + # Safe: cls() calls UInt32.__init__ which sets self.data = 0 before bit ops. + buttons = cls() + + if left: + buttons.left_trigger = left.trigger > 0.5 + buttons.left_grip = left.grip > 0.5 + buttons.left_touchpad = left.touchpad + buttons.left_thumbstick = left.thumbstick_press + buttons.left_primary = left.primary + buttons.left_secondary = left.secondary + buttons.left_menu = left.menu + + if right: + buttons.right_trigger = right.trigger > 0.5 + buttons.right_grip = right.grip > 0.5 + buttons.right_touchpad = right.touchpad + buttons.right_thumbstick = right.thumbstick_press + buttons.right_primary = right.primary + buttons.right_secondary = right.secondary + buttons.right_menu = right.menu + + return buttons + + +__all__ = ["Buttons", "QuestControllerState", "ThumbstickState"] diff --git a/dimos/teleop/quest/web/README.md b/dimos/teleop/quest/web/README.md new file mode 100644 index 0000000000..9a7afbfe03 --- /dev/null +++ b/dimos/teleop/quest/web/README.md @@ -0,0 +1,69 @@ +# Quest Teleop Web + +WebXR client and server for Quest 3 VR teleoperation. + +## Components + +### teleop_server.ts + +Deno server that bridges WebSocket and LCM: +- Serves WebXR client over HTTPS (required for Quest) +- Forwards controller data from browser to LCM + +### static/index.html + +WebXR client running on Quest 3: +- Captures controller poses at ~80Hz +- Sends PoseStamped and Joy messages via WebSocket +- Requires internet connection (loads `@dimos/msgs` from CDN at runtime) + +## Running + +From the repository root (`dimos/`): + +```bash +./dimos/teleop/quest/web/teleop_server.ts +``` + +Server starts at `https://localhost:8443` + +SSL certificates are generated automatically on first run in `assets/teleop_certs/`. + +## Message Flow + +``` +Quest Browser Deno Server Python + │ │ │ + │── PoseStamped (left) ────────→ │── vr_left_pose ───────────→ │ + │── PoseStamped (right) ───────→ │── vr_right_pose ──────────→ │ + │── Joy (left controller) ─────→ │── vr_left_joy ────────────→ │ + │── Joy (right controller) ────→ │── vr_right_joy ───────────→ │ +``` + +## LCM Topics + +| Topic | Type | Description | +|-------|------|-------------| +| `vr_left_pose` | PoseStamped | Left controller pose (WebXR frame) | +| `vr_right_pose` | PoseStamped | Right controller pose (WebXR frame) | +| `vr_left_joy` | Joy | Left controller buttons/axes | +| `vr_right_joy` | Joy | Right controller buttons/axes | + +## Joy Message Format + +Quest controller data is packed into Joy messages: + +**Axes** (indices 0-3): +- 0: thumbstick X (-1.0 to 1.0) +- 1: thumbstick Y (-1.0 to 1.0) +- 2: trigger (analog 0.0-1.0) +- 3: grip (analog 0.0-1.0) + +**Buttons** (indices 0-6, digital 0 or 1): +- 0: trigger (pressed) +- 1: grip (pressed) +- 2: touchpad +- 3: thumbstick press +- 4: X/A (primary) +- 5: Y/B (secondary) +- 6: menu diff --git a/dimos/teleop/quest/web/static/index.html b/dimos/teleop/quest/web/static/index.html new file mode 100644 index 0000000000..507d493011 --- /dev/null +++ b/dimos/teleop/quest/web/static/index.html @@ -0,0 +1,409 @@ + + + + + + Quest 3 VR Teleop + + + +
+

DimOS Quest-3 Teleop

+
Ready to connect
+ + +
+ + + + + diff --git a/dimos/teleop/quest/web/teleop_server.ts b/dimos/teleop/quest/web/teleop_server.ts new file mode 100755 index 0000000000..2bff24b34f --- /dev/null +++ b/dimos/teleop/quest/web/teleop_server.ts @@ -0,0 +1,84 @@ +#!/usr/bin/env -S deno run --allow-net --allow-read --allow-run --allow-write --unstable-net + +// WebSocket to LCM Bridge for Quest VR Teleop +// Forwards controller data from browser to LCM + +import { LCM } from "jsr:@dimos/lcm"; +import { dirname, fromFileUrl, join } from "jsr:@std/path"; + +const PORT = 8443; + +// Resolve paths relative to script location +const scriptDir = dirname(fromFileUrl(import.meta.url)); +const certsDir = join(scriptDir, "../../../../assets/teleop_certs"); +const certPath = join(certsDir, "cert.pem"); +const keyPath = join(certsDir, "key.pem"); + +// Auto-generate self-signed certificates if they don't exist +async function ensureCerts(): Promise<{ cert: string; key: string }> { + try { + const cert = await Deno.readTextFile(certPath); + const key = await Deno.readTextFile(keyPath); + return { cert, key }; + } catch { + console.log("Generating self-signed certificates..."); + await Deno.mkdir(certsDir, { recursive: true }); + const cmd = new Deno.Command("openssl", { + args: [ + "req", "-x509", "-newkey", "rsa:2048", + "-keyout", keyPath, "-out", certPath, + "-days", "365", "-nodes", "-subj", "/CN=localhost" + ], + }); + const { code } = await cmd.output(); + if (code !== 0) { + throw new Error("Failed to generate certificates. Is openssl installed?"); + } + console.log("Certificates generated in assets/teleop_certs/"); + return { + cert: await Deno.readTextFile(certPath), + key: await Deno.readTextFile(keyPath), + }; + } +} + +const { cert, key } = await ensureCerts(); + +const lcm = new LCM(); +await lcm.start(); + +Deno.serve({ port: PORT, cert, key }, async (req) => { + const url = new URL(req.url); + + if (req.headers.get("upgrade") === "websocket") { + const { socket, response } = Deno.upgradeWebSocket(req); + socket.onopen = () => console.log("Client connected"); + socket.onclose = () => console.log("Client disconnected"); + + // Forward binary LCM packets from browser directly to UDP + socket.binaryType = "arraybuffer"; + socket.onmessage = async (event) => { + if (event.data instanceof ArrayBuffer) { + const packet = new Uint8Array(event.data); + try { + await lcm.publishPacket(packet); + } catch (e) { + console.error("Forward error:", e); + } + } + }; + + return response; + } + + if (url.pathname === "/" || url.pathname === "/index.html") { + const html = await Deno.readTextFile(new URL("./static/index.html", import.meta.url)); + return new Response(html, { headers: { "content-type": "text/html" } }); + } + + return new Response("Not found", { status: 404 }); +}); + +console.log(`Server: https://localhost:${PORT}`); + +await lcm.run(); diff --git a/dimos/teleop/utils/__init__.py b/dimos/teleop/utils/__init__.py new file mode 100644 index 0000000000..ae8c375e8f --- /dev/null +++ b/dimos/teleop/utils/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Teleoperation utilities.""" diff --git a/dimos/teleop/utils/teleop_transforms.py b/dimos/teleop/utils/teleop_transforms.py new file mode 100644 index 0000000000..15fd3be120 --- /dev/null +++ b/dimos/teleop/utils/teleop_transforms.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Teleop transform utilities for VR coordinate transforms.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np +from scipy.spatial.transform import Rotation as R # type: ignore[import-untyped] + +from dimos.msgs.geometry_msgs import PoseStamped +from dimos.utils.transform_utils import matrix_to_pose, pose_to_matrix + +if TYPE_CHECKING: + from numpy.typing import NDArray + +# Coordinate frame transformation from VR (WebXR) to robot frame +# WebXR: X=right, Y=up, Z=back (towards user) +# Robot: X=forward, Y=left, Z=up +VR_TO_ROBOT_FRAME: NDArray[np.float64] = np.array( + [ + [0, 0, -1, 0], # Robot X = -VR Z (forward) + [-1, 0, 0, 0], # Robot Y = -VR X (left) + [0, 1, 0, 0], # Robot Z = +VR Y (up) + [0, 0, 0, 1], + ], + dtype=np.float64, +) + + +def webxr_to_robot( + pose_stamped: PoseStamped, + is_left_controller: bool = True, +) -> PoseStamped: + """Transform VR controller pose to robot coordinate frame. + + Args: + pose_stamped: PoseStamped from VR controller in WebXR frame. + is_left_controller: True for left controller (+90 deg Z rotation), + False for right controller (-90 deg Z rotation). + + Returns: + PoseStamped in robot frame (preserves original ts and frame_id). + """ + vr_matrix = pose_to_matrix(pose_stamped) + + # Apply controller alignment rotation + # Left controller rotates +90 deg around Z, right rotates -90 deg + direction = 1 if is_left_controller else -1 + z_rotation = R.from_euler("z", 90 * direction, degrees=True).as_matrix() + vr_matrix[:3, :3] = vr_matrix[:3, :3] @ z_rotation + + # Apply VR to robot frame transformation + robot_matrix = VR_TO_ROBOT_FRAME @ vr_matrix + robot_pose = matrix_to_pose(robot_matrix) + + return PoseStamped( + position=robot_pose.position, + orientation=robot_pose.orientation, + ts=pose_stamped.ts, + frame_id=pose_stamped.frame_id, + ) + + +__all__ = ["VR_TO_ROBOT_FRAME", "webxr_to_robot"] diff --git a/dimos/teleop/utils/teleop_visualization.py b/dimos/teleop/utils/teleop_visualization.py new file mode 100644 index 0000000000..a59b0666ef --- /dev/null +++ b/dimos/teleop/utils/teleop_visualization.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Teleop visualization utilities for Rerun.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import rerun as rr + +from dimos.utils.logging_config import setup_logger + +if TYPE_CHECKING: + from dimos.msgs.geometry_msgs import PoseStamped + +logger = setup_logger() + + +def visualize_pose(pose_stamped: PoseStamped, controller_label: str) -> None: + """Visualize controller absolute pose in Rerun.""" + try: + rr.log(f"world/teleop/{controller_label}_controller", pose_stamped.to_rerun()) # type: ignore[no-untyped-call] + rr.log(f"world/teleop/{controller_label}_controller/axes", rr.TransformAxes3D(0.10)) # type: ignore[attr-defined] + except Exception as e: + logger.debug(f"Failed to log {controller_label} controller to Rerun: {e}") + + +def visualize_buttons( + controller_label: str, + primary: bool = False, + secondary: bool = False, + grip: float = 0.0, + trigger: float = 0.0, +) -> None: + """Visualize button states in Rerun as scalar time series.""" + try: + base_path = f"world/teleop/{controller_label}_controller" + rr.log(f"{base_path}/primary", rr.Scalars(float(primary))) # type: ignore[attr-defined] + rr.log(f"{base_path}/secondary", rr.Scalars(float(secondary))) # type: ignore[attr-defined] + rr.log(f"{base_path}/grip", rr.Scalars(grip)) # type: ignore[attr-defined] + rr.log(f"{base_path}/trigger", rr.Scalars(trigger)) # type: ignore[attr-defined] + except Exception as e: + logger.debug(f"Failed to log {controller_label} buttons to Rerun: {e}") + + +__all__ = ["visualize_buttons", "visualize_pose"] diff --git a/dimos/types/test_timestamped.py b/dimos/types/test_timestamped.py index 88a8d65102..7de82e8f9a 100644 --- a/dimos/types/test_timestamped.py +++ b/dimos/types/test_timestamped.py @@ -19,11 +19,11 @@ from reactivex import operators as ops from reactivex.scheduler import ThreadPoolScheduler +from dimos.memory.timeseries.inmemory import InMemoryStore from dimos.msgs.sensor_msgs import Image from dimos.types.timestamped import ( Timestamped, TimestampedBufferCollection, - TimestampedCollection, align_timestamped, to_datetime, to_ros_stamp, @@ -133,13 +133,20 @@ def sample_items(): ] +def make_store(items: list[SimpleTimestamped] | None = None) -> InMemoryStore[SimpleTimestamped]: + store: InMemoryStore[SimpleTimestamped] = InMemoryStore() + if items: + store.save(*items) + return store + + @pytest.fixture def collection(sample_items): - return TimestampedCollection(sample_items) + return make_store(sample_items) def test_empty_collection() -> None: - collection = TimestampedCollection() + collection = make_store() assert len(collection) == 0 assert collection.duration() == 0.0 assert collection.time_range() is None @@ -147,16 +154,17 @@ def test_empty_collection() -> None: def test_add_items() -> None: - collection = TimestampedCollection() + collection = make_store() item1 = SimpleTimestamped(2.0, "two") item2 = SimpleTimestamped(1.0, "one") - collection.add(item1) - collection.add(item2) + collection.save(item1) + collection.save(item2) assert len(collection) == 2 - assert collection[0].data == "one" # Should be sorted by timestamp - assert collection[1].data == "two" + items = list(collection) + assert items[0].data == "one" # Should be sorted by timestamp + assert items[1].data == "two" def test_find_closest(collection) -> None: @@ -196,21 +204,13 @@ def test_find_before_after(collection) -> None: assert collection.find_after(7.0) is None # Nothing after last item -def test_merge_collections() -> None: - collection1 = TimestampedCollection( - [ - SimpleTimestamped(1.0, "a"), - SimpleTimestamped(3.0, "c"), - ] - ) - collection2 = TimestampedCollection( - [ - SimpleTimestamped(2.0, "b"), - SimpleTimestamped(4.0, "d"), - ] - ) +def test_save_from_multiple_stores() -> None: + store1 = make_store([SimpleTimestamped(1.0, "a"), SimpleTimestamped(3.0, "c")]) + store2 = make_store([SimpleTimestamped(2.0, "b"), SimpleTimestamped(4.0, "d")]) - merged = collection1.merge(collection2) + merged = make_store() + merged.save(*store1) + merged.save(*store2) assert len(merged) == 4 assert [item.data for item in merged] == ["a", "b", "c", "d"] @@ -244,7 +244,7 @@ def test_iteration(collection) -> None: def test_single_item_collection() -> None: - single = TimestampedCollection([SimpleTimestamped(5.0, "only")]) + single = make_store([SimpleTimestamped(5.0, "only")]) assert single.duration() == 0.0 assert single.time_range() == (5.0, 5.0) @@ -264,14 +264,17 @@ def test_time_window_collection() -> None: # Add a message at t=4.0, should keep messages from t=2.0 onwards window.add(SimpleTimestamped(4.0, "msg4")) assert len(window) == 3 # msg1 should be dropped - assert window[0].data == "msg2" # oldest is now msg2 - assert window[-1].data == "msg4" # newest is msg4 + first = window.first() + last = window.last() + assert first is not None and first.data == "msg2" # oldest is now msg2 + assert last is not None and last.data == "msg4" # newest is msg4 # Add a message at t=5.5, should drop msg2 and msg3 window.add(SimpleTimestamped(5.5, "msg5")) assert len(window) == 2 # only msg4 and msg5 remain - assert window[0].data == "msg4" - assert window[1].data == "msg5" + items = list(window) + assert items[0].data == "msg4" + assert items[1].data == "msg5" # Verify time range assert window.start_ts == 4.0 diff --git a/dimos/types/timestamped.py b/dimos/types/timestamped.py index 765b1adbcb..b229a2478e 100644 --- a/dimos/types/timestamped.py +++ b/dimos/types/timestamped.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. from collections import defaultdict -from collections.abc import Iterable, Iterator from datetime import datetime, timezone from typing import Generic, TypeVar, Union @@ -22,8 +21,8 @@ # from dimos_lcm.std_msgs import Time as ROSTime from reactivex.observable import Observable -from sortedcontainers import SortedKeyList # type: ignore[import-untyped] +from dimos.memory.timeseries.inmemory import InMemoryStore from dimos.types.weaklist import WeakList from dimos.utils.logging_config import setup_logger @@ -117,152 +116,29 @@ def ros_timestamp(self) -> list[int]: T = TypeVar("T", bound=Timestamped) -class TimestampedCollection(Generic[T]): - """A collection of timestamped objects with efficient time-based operations.""" - - def __init__(self, items: Iterable[T] | None = None) -> None: - self._items = SortedKeyList(items or [], key=lambda x: x.ts) - - def add(self, item: T) -> None: - """Add a timestamped item to the collection.""" - self._items.add(item) - - def find_closest(self, timestamp: float, tolerance: float | None = None) -> T | None: - """Find the timestamped object closest to the given timestamp.""" - if not self._items: - return None - - # Use binary search to find insertion point - idx = self._items.bisect_key_left(timestamp) - - # Check exact match - if idx < len(self._items) and self._items[idx].ts == timestamp: - return self._items[idx] # type: ignore[no-any-return] - - # Find candidates: item before and after - candidates = [] - - # Item before - if idx > 0: - candidates.append((idx - 1, abs(self._items[idx - 1].ts - timestamp))) - - # Item after - if idx < len(self._items): - candidates.append((idx, abs(self._items[idx].ts - timestamp))) - - if not candidates: - return None - - # Find closest - # When distances are equal, prefer the later item (higher index) - closest_idx, closest_distance = min(candidates, key=lambda x: (x[1], -x[0])) - - # Check tolerance if provided - if tolerance is not None and closest_distance > tolerance: - return None - - return self._items[closest_idx] # type: ignore[no-any-return] - - def find_before(self, timestamp: float) -> T | None: - """Find the last item before the given timestamp.""" - idx = self._items.bisect_key_left(timestamp) - return self._items[idx - 1] if idx > 0 else None - - def find_after(self, timestamp: float) -> T | None: - """Find the first item after the given timestamp.""" - idx = self._items.bisect_key_right(timestamp) - return self._items[idx] if idx < len(self._items) else None - - def merge(self, other: "TimestampedCollection[T]") -> "TimestampedCollection[T]": - """Merge two timestamped collections into a new one.""" - result = TimestampedCollection[T]() - result._items = SortedKeyList(self._items + other._items, key=lambda x: x.ts) - return result - - def duration(self) -> float: - """Get the duration of the collection in seconds.""" - if len(self._items) < 2: - return 0.0 - return self._items[-1].ts - self._items[0].ts # type: ignore[no-any-return] - - def time_range(self) -> tuple[float, float] | None: - """Get the time range (start, end) of the collection.""" - if not self._items: - return None - return (self._items[0].ts, self._items[-1].ts) - - def slice_by_time(self, start: float, end: float) -> "TimestampedCollection[T]": - """Get a subset of items within the given time range.""" - start_idx = self._items.bisect_key_left(start) - end_idx = self._items.bisect_key_right(end) - return TimestampedCollection(self._items[start_idx:end_idx]) - - @property - def start_ts(self) -> float | None: - """Get the start timestamp of the collection.""" - return self._items[0].ts if self._items else None - - @property - def end_ts(self) -> float | None: - """Get the end timestamp of the collection.""" - return self._items[-1].ts if self._items else None - - def __len__(self) -> int: - return len(self._items) - - def __iter__(self) -> Iterator: # type: ignore[type-arg] - return iter(self._items) - - def __getitem__(self, idx: int) -> T: - return self._items[idx] # type: ignore[no-any-return] - - PRIMARY = TypeVar("PRIMARY", bound=Timestamped) SECONDARY = TypeVar("SECONDARY", bound=Timestamped) -class TimestampedBufferCollection(TimestampedCollection[T]): - """A timestamped collection that maintains a sliding time window, dropping old messages.""" - - def __init__(self, window_duration: float, items: Iterable[T] | None = None) -> None: - """ - Initialize with a time window duration in seconds. +class TimestampedBufferCollection(InMemoryStore[T]): + """A sliding time window buffer backed by InMemoryStore.""" - Args: - window_duration: Maximum age of messages to keep in seconds - items: Optional initial items - """ - super().__init__(items) + def __init__(self, window_duration: float) -> None: + super().__init__() self.window_duration = window_duration def add(self, item: T) -> None: - """Add a timestamped item and remove any items outside the time window.""" - super().add(item) - self._prune_old_messages(item.ts) - - def _prune_old_messages(self, current_ts: float) -> None: - """Remove messages older than window_duration from the given timestamp.""" - cutoff_ts = current_ts - self.window_duration - - # Find the index of the first item that should be kept - keep_idx = self._items.bisect_key_left(cutoff_ts) + """Add a timestamped item and prune items outside the time window.""" + self.save(item) + self.prune_old(item.ts - self.window_duration) - # Remove old items - if keep_idx > 0: - del self._items[:keep_idx] + def remove(self, item: T) -> bool: + """Remove a timestamped item. Returns True if found and removed.""" + return self._delete(item.ts) is not None def remove_by_timestamp(self, timestamp: float) -> bool: - """Remove an item with the given timestamp. Returns True if item was found and removed.""" - idx = self._items.bisect_key_left(timestamp) - - if idx < len(self._items) and self._items[idx].ts == timestamp: - del self._items[idx] - return True - return False - - def remove(self, item: T) -> bool: - """Remove a timestamped item from the collection. Returns True if item was found and removed.""" - return self.remove_by_timestamp(item.ts) + """Remove an item by timestamp. Returns True if found and removed.""" + return self._delete(timestamp) is not None class MatchContainer(Timestamped, Generic[PRIMARY, SECONDARY]): diff --git a/dimos/utils/cli/agentspy/agentspy.py b/dimos/utils/cli/agentspy/agentspy.py index 52760cb2da..a0ee43a62d 100644 --- a/dimos/utils/cli/agentspy/agentspy.py +++ b/dimos/utils/cli/agentspy/agentspy.py @@ -29,7 +29,7 @@ from textual.binding import Binding from textual.widgets import Footer, RichLog -from dimos.protocol.pubsub.lcmpubsub import PickleLCM +from dimos.protocol.pubsub.impl.lcmpubsub import PickleLCM from dimos.utils.cli import theme # Type alias for all message types we might receive diff --git a/dimos/utils/cli/agentspy/demo_agentspy.py b/dimos/utils/cli/agentspy/demo_agentspy.py index c747ab65f6..5229295038 100755 --- a/dimos/utils/cli/agentspy/demo_agentspy.py +++ b/dimos/utils/cli/agentspy/demo_agentspy.py @@ -25,7 +25,7 @@ ) from dimos.protocol.pubsub import lcm # type: ignore[attr-defined] -from dimos.protocol.pubsub.lcmpubsub import PickleLCM +from dimos.protocol.pubsub.impl.lcmpubsub import PickleLCM def test_publish_messages() -> None: diff --git a/dimos/utils/cli/human/humancli.py b/dimos/utils/cli/human/humancli.py index cd7ca91637..cf7fd8a258 100644 --- a/dimos/utils/cli/human/humancli.py +++ b/dimos/utils/cli/human/humancli.py @@ -18,7 +18,7 @@ import json import textwrap import threading -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolCall, ToolMessage from rich.highlighter import JSONHighlighter @@ -26,6 +26,7 @@ from textual.app import App, ComposeResult from textual.binding import Binding from textual.containers import Container +from textual.geometry import Size from textual.widgets import Input, RichLog from dimos.core import pLCMTransport @@ -33,6 +34,8 @@ from dimos.utils.generic import truncate_display_string if TYPE_CHECKING: + from collections.abc import Callable + from textual.events import Key # Custom theme for JSON highlighting @@ -49,6 +52,76 @@ ) +class ThinkingIndicator: + """Manages a throbbing 'thinking...' chat message in a RichLog.""" + + def __init__( + self, + app: App[Any], + chat_log: RichLog, + add_message_fn: Callable[[str, str, str, str], None], + ) -> None: + self._app: App[Any] = app + self._chat_log = chat_log + self._add_message = add_message_fn + self._timer: Any = None + self._strips: list[Any] = [] + self.visible = False + self._throb_dim = False + + def show(self) -> None: + if self.visible: + return + self.visible = True + self._throb_dim = False + self._write_line() + self._timer = self._app.set_interval(0.6, self._toggle_throb) + + def hide(self) -> None: + if not self.visible: + return + self.visible = False + if self._timer is not None: + self._timer.stop() + self._timer = None + self._remove_lines() + + def detach_if_needed(self) -> bool: + if self.visible and self._strips: + self._remove_lines() + return True + return False + + def reattach(self) -> None: + self._write_line() + + def _write_line(self) -> None: + before_count = len(self._chat_log.lines) + color = theme.DIM if self._throb_dim else theme.ACCENT + timestamp = datetime.now().strftime("%H:%M:%S") + self._add_message(timestamp, "", "[italic]thinking...[/italic]", color) + self._strips = list(self._chat_log.lines[before_count:]) + + def _remove_lines(self) -> None: + if not self._strips: + return + strip_ids = {id(s) for s in self._strips} + self._chat_log.lines = [line for line in self._chat_log.lines if id(line) not in strip_ids] + self._strips = [] + self._chat_log._line_cache.clear() + self._chat_log.virtual_size = Size( + self._chat_log.virtual_size.width, len(self._chat_log.lines) + ) + self._chat_log.refresh() + + def _toggle_throb(self) -> None: + if not self.visible: + return + self._remove_lines() + self._throb_dim = not self._throb_dim + self._write_line() + + class HumanCLIApp(App): # type: ignore[type-arg] """IRC-like interface for interacting with DimOS agents.""" @@ -70,6 +143,7 @@ class HumanCLIApp(App): # type: ignore[type-arg] Input {{ dock: bottom; }} + """ BINDINGS = [ @@ -80,11 +154,14 @@ class HumanCLIApp(App): # type: ignore[type-arg] def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] super().__init__(*args, **kwargs) - self.human_transport = pLCMTransport("/human_input") # type: ignore[var-annotated] - self.agent_transport = pLCMTransport("/agent") # type: ignore[var-annotated] + self._human_transport = pLCMTransport("/human_input") # type: ignore[var-annotated] + self._agent_transport = pLCMTransport("/agent") # type: ignore[var-annotated] + self._agent_idle = pLCMTransport("/agent_idle") # type: ignore[var-annotated] self.chat_log: RichLog | None = None self.input_widget: Input | None = None self._subscription_thread: threading.Thread | None = None + self._idle_subscription_thread: threading.Thread | None = None + self._thinking: ThinkingIndicator | None = None self._running = False def compose(self) -> ComposeResult: @@ -106,16 +183,22 @@ def on_mount(self) -> None: # Set custom highlighter for RichLog self.chat_log.highlighter = JSONHighlighter() # type: ignore[union-attr] - # Start subscription thread + assert self.chat_log is not None + self._thinking = ThinkingIndicator(self, self.chat_log, self._add_message) + + # Start subscription threads self._subscription_thread = threading.Thread(target=self._subscribe_to_agent, daemon=True) self._subscription_thread.start() + self._idle_subscription_thread = threading.Thread( + target=self._subscribe_to_idle, daemon=True + ) + self._idle_subscription_thread.start() # Focus on input self.input_widget.focus() # type: ignore[union-attr] self.chat_log.write(f"[{theme.ACCENT}]{theme.ascii_logo}[/{theme.ACCENT}]") # type: ignore[union-attr] - # Welcome message self._add_system_message("Connected to DimOS Agent Interface") def on_unmount(self) -> None: @@ -173,7 +256,18 @@ def receive_msg(msg) -> None: # type: ignore[no-untyped-def] self._add_message, timestamp, "human", msg.content, theme.HUMAN ) - self.agent_transport.subscribe(receive_msg) + self._agent_transport.subscribe(receive_msg) + + def _subscribe_to_idle(self) -> None: + def receive_idle(is_idle: bool) -> None: + assert self._thinking is not None + + if not self._running: + return + + self.call_from_thread(self._thinking.hide if is_idle else self._thinking.show) + + self._agent_idle.subscribe(receive_idle) def _format_tool_call(self, tool_call: ToolCall) -> str: """Format a tool call for display.""" @@ -183,7 +277,9 @@ def _format_tool_call(self, tool_call: ToolCall) -> str: return f"▶ {name}({args_str})" def _add_message(self, timestamp: str, sender: str, content: str, color: str) -> None: - """Add a message to the chat log.""" + assert self._thinking is not None + reattach = self._thinking.detach_if_needed() + # Strip leading/trailing whitespace from content content = content.strip() if content else "" @@ -238,6 +334,9 @@ def _add_message(self, timestamp: str, sender: str, content: str, color: str) -> # Empty line self.chat_log.write(indent + "│") # type: ignore[union-attr] + if reattach: + self._thinking.reattach() + def _add_system_message(self, content: str) -> None: """Add a system message to the chat.""" timestamp = datetime.now().strftime("%H:%M:%S") @@ -277,7 +376,7 @@ def on_input_submitted(self, event: Input.Submitted) -> None: return # Send to agent (message will be displayed when received back) - self.human_transport.publish(message) + self._human_transport.publish(message) def action_clear(self) -> None: """Clear the chat log.""" diff --git a/dimos/utils/cli/lcmspy/test_lcmspy.py b/dimos/utils/cli/lcmspy/test_lcmspy.py index 3016a723fe..530f081f29 100644 --- a/dimos/utils/cli/lcmspy/test_lcmspy.py +++ b/dimos/utils/cli/lcmspy/test_lcmspy.py @@ -16,7 +16,7 @@ import pytest -from dimos.protocol.pubsub.lcmpubsub import PickleLCM, Topic +from dimos.protocol.pubsub.impl.lcmpubsub import PickleLCM, Topic from dimos.utils.cli.lcmspy.lcmspy import GraphLCMSpy, GraphTopic, LCMSpy, Topic as TopicSpy @@ -175,8 +175,9 @@ def test_lcmspy_global_totals() -> None: spy.msg("/odom", b"odometry data") spy.msg("/imu", b"imu data") - # The spy itself should have accumulated all messages - assert len(spy.message_history) == 3 + # Verify each test topic received exactly one message (ignore LCM discovery packets) + for t in ("/video", "/odom", "/imu"): + assert len(spy.topic[t].message_history) == 1 # Check global statistics global_freq = spy.freq(1.0) diff --git a/dimos/utils/cli/skillspy/demo_skillspy.py b/dimos/utils/cli/skillspy/demo_skillspy.py deleted file mode 100644 index 602381020a..0000000000 --- a/dimos/utils/cli/skillspy/demo_skillspy.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Demo script that runs skills in the background while agentspy monitors them.""" - -import threading -import time - -from dimos.protocol.skill.coordinator import SkillCoordinator -from dimos.protocol.skill.skill import SkillContainer, skill - - -class DemoSkills(SkillContainer): - @skill() - def count_to(self, n: int) -> str: - """Count to n with delays.""" - for _i in range(n): - time.sleep(0.5) - return f"Counted to {n}" - - @skill() - def compute_fibonacci(self, n: int) -> int: - """Compute nth fibonacci number.""" - if n <= 1: - return n - a, b = 0, 1 - for _ in range(2, n + 1): - time.sleep(0.1) # Simulate computation - a, b = b, a + b - return b - - @skill() - def simulate_error(self) -> None: - """Skill that always errors.""" - time.sleep(0.3) - raise RuntimeError("Simulated error for testing") - - @skill() - def quick_task(self, name: str) -> str: - """Quick task that completes fast.""" - time.sleep(0.1) - return f"Quick task '{name}' done!" - - -def run_demo_skills() -> None: - """Run demo skills in background.""" - # Create and start agent interface - agent_interface = SkillCoordinator() - agent_interface.start() - - # Register skills - demo_skills = DemoSkills() - agent_interface.register_skills(demo_skills) - - # Run various skills periodically - def skill_runner() -> None: - counter = 0 - while True: - time.sleep(2) - - # Generate unique call_id for each invocation - call_id = f"demo-{counter}" - - # Run different skills based on counter - if counter % 4 == 0: - # Run multiple count_to in parallel to show parallel execution - agent_interface.call_skill(f"{call_id}-count-1", "count_to", {"args": [3]}) - agent_interface.call_skill(f"{call_id}-count-2", "count_to", {"args": [5]}) - agent_interface.call_skill(f"{call_id}-count-3", "count_to", {"args": [2]}) - elif counter % 4 == 1: - agent_interface.call_skill(f"{call_id}-fib", "compute_fibonacci", {"args": [10]}) - elif counter % 4 == 2: - agent_interface.call_skill( - f"{call_id}-quick", "quick_task", {"args": [f"task-{counter}"]} - ) - else: - agent_interface.call_skill(f"{call_id}-error", "simulate_error", {}) - - counter += 1 - - # Start skill runner in background - thread = threading.Thread(target=skill_runner, daemon=True) - thread.start() - - print("Demo skills running in background. Start agentspy in another terminal to monitor.") - print("Run: agentspy") - - # Keep running - try: - while True: - time.sleep(1) - except KeyboardInterrupt: - print("\nDemo stopped.") - - agent_interface.stop() - - -if __name__ == "__main__": - run_demo_skills() diff --git a/dimos/utils/cli/skillspy/skillspy.py b/dimos/utils/cli/skillspy/skillspy.py deleted file mode 100644 index beb2421eec..0000000000 --- a/dimos/utils/cli/skillspy/skillspy.py +++ /dev/null @@ -1,281 +0,0 @@ -# Copyright 2025-2026 Dimensional Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -import threading -import time -from typing import TYPE_CHECKING - -from rich.text import Text -from textual.app import App, ComposeResult -from textual.binding import Binding -from textual.widgets import DataTable, Footer - -from dimos.protocol.skill.coordinator import SkillCoordinator, SkillState, SkillStateEnum -from dimos.utils.cli import theme - -if TYPE_CHECKING: - from collections.abc import Callable - - from dimos.protocol.skill.comms import SkillMsg # type: ignore[attr-defined] - - -class AgentSpy: - """Spy on agent skill executions via LCM messages.""" - - def __init__(self) -> None: - self.agent_interface = SkillCoordinator() - self.message_callbacks: list[Callable[[dict[str, SkillState]], None]] = [] - self._lock = threading.Lock() - self._latest_state: dict[str, SkillState] = {} - self._running = False - - def start(self) -> None: - """Start spying on agent messages.""" - self._running = True - # Start the agent interface - self.agent_interface.start() - - # Subscribe to the agent interface's comms - self.agent_interface.skill_transport.subscribe(self._handle_message) - - def stop(self) -> None: - """Stop spying.""" - self._running = False - # Give threads a moment to finish processing - time.sleep(0.2) - self.agent_interface.stop() - - def _handle_message(self, msg: SkillMsg) -> None: # type: ignore[type-arg] - """Handle incoming skill messages.""" - if not self._running: - return - - # Small delay to ensure agent_interface has processed the message - def delayed_update() -> None: - time.sleep(0.1) - if not self._running: - return - with self._lock: - self._latest_state = self.agent_interface.generate_snapshot(clear=False) - for callback in self.message_callbacks: - callback(self._latest_state) - - # Run in separate thread to not block LCM - threading.Thread(target=delayed_update, daemon=True).start() - - def subscribe(self, callback: Callable[[dict[str, SkillState]], None]) -> None: - """Subscribe to state updates.""" - self.message_callbacks.append(callback) - - def get_state(self) -> dict[str, SkillState]: - """Get current state snapshot.""" - with self._lock: - return self._latest_state.copy() - - -def state_color(state: SkillStateEnum) -> str: - """Get color for skill state.""" - if state == SkillStateEnum.pending: - return theme.WARNING - elif state == SkillStateEnum.running: - return theme.AGENT - elif state == SkillStateEnum.completed: - return theme.SUCCESS - elif state == SkillStateEnum.error: - return theme.ERROR - return theme.FOREGROUND - - -def format_duration(duration: float) -> str: - """Format duration in human readable format.""" - if duration < 1: - return f"{duration * 1000:.0f}ms" - elif duration < 60: - return f"{duration:.1f}s" - elif duration < 3600: - return f"{duration / 60:.1f}m" - else: - return f"{duration / 3600:.1f}h" - - -class AgentSpyApp(App): # type: ignore[type-arg] - """A real-time CLI dashboard for agent skill monitoring using Textual.""" - - CSS_PATH = theme.CSS_PATH - - CSS = f""" - Screen {{ - layout: vertical; - background: {theme.BACKGROUND}; - }} - DataTable {{ - height: 100%; - border: solid $border; - background: {theme.BACKGROUND}; - }} - DataTable > .datatable--header {{ - background: transparent; - }} - Footer {{ - background: transparent; - }} - """ - - BINDINGS = [ - Binding("q", "quit", "Quit"), - Binding("c", "clear", "Clear History"), - Binding("ctrl+c", "quit", "Quit", show=False), - ] - - def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def] - super().__init__(*args, **kwargs) - self.spy = AgentSpy() - self.table: DataTable | None = None # type: ignore[type-arg] - self.skill_history: list[tuple[str, SkillState, float]] = [] # (call_id, state, start_time) - - def compose(self) -> ComposeResult: - self.table = DataTable(zebra_stripes=False, cursor_type=None) # type: ignore[arg-type] - self.table.add_column("Call ID") - self.table.add_column("Skill Name") - self.table.add_column("State") - self.table.add_column("Duration") - self.table.add_column("Messages") - self.table.add_column("Details") - - yield self.table - yield Footer() - - def on_mount(self) -> None: - """Start the spy when app mounts.""" - self.spy.subscribe(self.update_state) - self.spy.start() - - # Set up periodic refresh to update durations - self.set_interval(1.0, self.refresh_table) - - def on_unmount(self) -> None: - """Stop the spy when app unmounts.""" - self.spy.stop() - - def update_state(self, state: dict[str, SkillState]) -> None: - """Update state from spy callback. State dict is keyed by call_id.""" - # Update history with current state - current_time = time.time() - - # Add new skills or update existing ones - for call_id, skill_state in state.items(): - # Find if this call_id already in history - found = False - for i, (existing_call_id, _old_state, start_time) in enumerate(self.skill_history): - if existing_call_id == call_id: - # Update existing entry - self.skill_history[i] = (call_id, skill_state, start_time) - found = True - break - - if not found: - # Add new entry with current time as start - start_time = current_time - if skill_state.start_msg: - # Use start message timestamp if available - start_time = skill_state.start_msg.ts - self.skill_history.append((call_id, skill_state, start_time)) - - # Schedule UI update - self.call_from_thread(self.refresh_table) - - def refresh_table(self) -> None: - """Refresh the table display.""" - if not self.table: - return - - # Clear table - self.table.clear(columns=False) - - # Sort by start time (newest first) - sorted_history = sorted(self.skill_history, key=lambda x: x[2], reverse=True) - - # Get terminal height and calculate how many rows we can show - height = self.size.height - 6 # Account for header, footer, column headers - max_rows = max(1, height) - - # Show only top N entries - for call_id, skill_state, start_time in sorted_history[:max_rows]: - # Calculate how long ago it started (for progress indicator) - time_ago = time.time() - start_time - - # Duration - duration_str = format_duration(skill_state.duration()) - - # Message count - msg_count = len(skill_state) - - # Details based on state and last message - details = "" - if skill_state.state == SkillStateEnum.error and skill_state.error_msg: - # Show error message - error_content = skill_state.error_msg.content - if isinstance(error_content, dict): - details = error_content.get("msg", "Error")[:40] - else: - details = str(error_content)[:40] - elif skill_state.state == SkillStateEnum.completed and skill_state.ret_msg: - # Show return value - details = f"→ {str(skill_state.ret_msg.content)[:37]}" - elif skill_state.state == SkillStateEnum.running: - # Show progress indicator - details = "⋯ " + "▸" * min(int(time_ago), 20) - - # Format call_id for display (truncate if too long) - display_call_id = call_id - if len(call_id) > 16: - display_call_id = call_id[:13] + "..." - - # Add row with colored state - self.table.add_row( - Text(display_call_id, style=theme.BRIGHT_BLUE), - Text(skill_state.name, style=theme.YELLOW), - Text(skill_state.state.name, style=state_color(skill_state.state)), - Text(duration_str, style=theme.WHITE), - Text(str(msg_count), style=theme.YELLOW), - Text(details, style=theme.FOREGROUND), - ) - - def action_clear(self) -> None: - """Clear the skill history.""" - self.skill_history.clear() - self.refresh_table() - - -def main() -> None: - """Main entry point for agentspy CLI.""" - import sys - - # Check if running in web mode - if len(sys.argv) > 1 and sys.argv[1] == "web": - import os - - from textual_serve.server import Server # type: ignore[import-not-found] - - server = Server(f"python {os.path.abspath(__file__)}") - server.serve() - else: - app = AgentSpyApp() - app.run() - - -if __name__ == "__main__": - main() diff --git a/dimos/utils/data.py b/dimos/utils/data.py index 7666343dbb..d14ac04730 100644 --- a/dimos/utils/data.py +++ b/dimos/utils/data.py @@ -105,7 +105,7 @@ def _get_repo_root() -> Path: @cache -def _get_data_dir(extra_path: str | None = None) -> Path: +def get_data_dir(extra_path: str | None = None) -> Path: if extra_path: return _get_repo_root() / "data" / extra_path return _get_repo_root() / "data" @@ -113,7 +113,7 @@ def _get_data_dir(extra_path: str | None = None) -> Path: @cache def _get_lfs_dir() -> Path: - return _get_data_dir() / ".lfs" + return get_data_dir() / ".lfs" def _check_git_lfs_available() -> bool: @@ -174,7 +174,7 @@ def _lfs_pull(file_path: Path, repo_root: Path) -> None: def _decompress_archive(filename: str | Path) -> Path: - target_dir = _get_data_dir() + target_dir = get_data_dir() filename_path = Path(filename) with tarfile.open(filename_path, "r:gz") as tar: tar.extractall(target_dir) @@ -211,7 +211,7 @@ def _pull_lfs_archive(filename: str | Path) -> Path: return file_path -def get_data(filename: str | Path) -> Path: +def get_data(name: str | Path) -> Path: """ Get the path to a test data, downloading from LFS if needed. @@ -222,29 +222,115 @@ def get_data(filename: str | Path) -> Path: 4. Download the file from LFS if it's a pointer file 5. Return the Path object to the actual file or dir + Supports nested paths like "dataset/subdir/file.jpg" - will download and + decompress "dataset" archive but return the full nested path. + Args: - filename: Name of the test file (e.g., "lidar_sample.bin") + name: Name of the test file or dir, optionally with nested path + (e.g., "lidar_sample.bin" or "dataset/frames/001.png") Returns: - Path: Path object to the test file + Path: Path object to the test file or dir Raises: RuntimeError: If Git LFS is not available or LFS operations fail FileNotFoundError: If the test file doesn't exist Usage: - # As string path - file_path = str(testFile("sample.bin")) + # Simple file/dir + file_path = get_data("sample.bin") - # As context manager for file operations - with testFile("sample.bin").open('rb') as f: - data = f.read() + # Nested path - downloads "dataset" archive, returns path to nested file + frame = get_data("dataset/frames/001.png") """ - data_dir = _get_data_dir() - file_path = data_dir / filename + data_dir = get_data_dir() + file_path = data_dir / name # already pulled and decompressed, return it directly if file_path.exists(): return file_path - return _decompress_archive(_pull_lfs_archive(filename)) + # extract archive root (first path component) and nested path + path_parts = Path(name).parts + archive_name = path_parts[0] + nested_path = Path(*path_parts[1:]) if len(path_parts) > 1 else None + + # download and decompress the archive root + archive_path = _decompress_archive(_pull_lfs_archive(archive_name)) + + # return full path including nested components + if nested_path: + return archive_path / nested_path + return archive_path + + +class LfsPath(type(Path())): # type: ignore[misc] + """ + A Path subclass that lazily downloads LFS data when accessed. + + This is useful for both lazy loading and differentiating between LFS paths and regular paths. + + This class wraps pathlib.Path and ensures that get_data() is called + before any meaningful filesystem operation, making LFS data lazy-loaded. + + Usage: + path = LfsPath("sample_data") + # No download yet + + with path.open('rb') as f: # Downloads now if needed + data = f.read() + + # Or use any Path operation: + if path.exists(): # Downloads now if needed + files = list(path.iterdir()) + """ + + def __new__(cls, filename: str | Path) -> "LfsPath": + # Create instance with a placeholder path to satisfy Path.__new__ + # We use "." as a dummy path that always exists + instance: LfsPath = super().__new__(cls, ".") # type: ignore[call-arg] + # Store the actual filename as an instance attribute + object.__setattr__(instance, "_lfs_filename", filename) + object.__setattr__(instance, "_lfs_resolved_cache", None) + return instance + + def _ensure_downloaded(self) -> Path: + """Ensure the LFS data is downloaded and return the resolved path.""" + cache: Path | None = object.__getattribute__(self, "_lfs_resolved_cache") + if cache is None: + filename = object.__getattribute__(self, "_lfs_filename") + cache = get_data(filename) + object.__setattr__(self, "_lfs_resolved_cache", cache) + return cache + + def __getattribute__(self, name: str) -> object: + # During Path.__new__(), _lfs_filename hasn't been set yet. + # Fall through to normal Path behavior until construction is complete. + try: + object.__getattribute__(self, "_lfs_filename") + except AttributeError: + return object.__getattribute__(self, name) + + # After construction, allow access to our internal attributes directly + if name in ("_lfs_filename", "_lfs_resolved_cache", "_ensure_downloaded"): + return object.__getattribute__(self, name) + + # For all other attributes, ensure download first then delegate to resolved path + resolved = object.__getattribute__(self, "_ensure_downloaded")() + return getattr(resolved, name) + + def __str__(self) -> str: + """String representation returns resolved path.""" + return str(self._ensure_downloaded()) + + def __fspath__(self) -> str: + """Return filesystem path, downloading from LFS if needed.""" + return str(self._ensure_downloaded()) + + def __truediv__(self, other: object) -> Path: + """Path division operator - returns resolved path.""" + return self._ensure_downloaded() / other # type: ignore[operator, return-value] + + def __rtruediv__(self, other: object) -> Path: + """Reverse path division operator.""" + return other / self._ensure_downloaded() # type: ignore[operator, return-value] diff --git a/dimos/utils/docs/doclinks.py b/dimos/utils/docs/doclinks.py index 3f0af10a7b..67d5897b28 100644 --- a/dimos/utils/docs/doclinks.py +++ b/dimos/utils/docs/doclinks.py @@ -106,8 +106,8 @@ def build_doc_index(root: Path) -> dict[str, list[Path]]: """ Build an index mapping lowercase doc names to .md file paths. - For docs/concepts/modules.md, creates entry: - - "modules" -> [Path("docs/concepts/modules.md")] + For docs/usage/modules.md, creates entry: + - "modules" -> [Path("docs/usage/modules.md")] Also indexes directory index files: - "modules" -> [Path("docs/modules/index.md")] (if modules/index.md exists) diff --git a/dimos/utils/docs/test_doclinks.py b/dimos/utils/docs/test_doclinks.py index 7313ec3676..f1303a2245 100644 --- a/dimos/utils/docs/test_doclinks.py +++ b/dimos/utils/docs/test_doclinks.py @@ -256,7 +256,7 @@ def test_github_mode(self, file_index): def test_relative_mode(self, file_index): """Should generate relative paths in relative mode.""" content = "See [`service/spec.py`]()" - doc_path = REPO_ROOT / "docs/concepts/test.md" + doc_path = REPO_ROOT / "docs/usage/test.md" new_content, _changes, _errors = process_markdown( content, @@ -277,7 +277,7 @@ def test_indexes_by_stem(self, doc_index): """Should index docs by lowercase stem.""" assert "configuration" in doc_index assert "modules" in doc_index - assert "development" in doc_index + assert "blueprints" in doc_index def test_case_insensitive(self, doc_index): """Should use lowercase keys.""" @@ -349,8 +349,8 @@ def test_doc_link_github_mode(self, file_index, doc_index): def test_doc_link_relative_mode(self, file_index, doc_index): """Should generate relative paths for doc links.""" - content = "See [Development](.md)" - doc_path = REPO_ROOT / "docs/concepts/test.md" + content = "See [Blueprints](.md)" + doc_path = REPO_ROOT / "docs/usage/test.md" new_content, _changes, errors = process_markdown( content, @@ -364,8 +364,8 @@ def test_doc_link_relative_mode(self, file_index, doc_index): ) assert len(errors) == 0 - # Should be relative path from docs/concepts/ to docs/ - assert "../" in new_content + # Should be relative path from docs/usage/ to target doc + assert "[Blueprints](blueprints.md)" in new_content def test_doc_not_found_error(self, file_index, doc_index): """Should error when doc doesn't exist.""" diff --git a/dimos/utils/logging_config.py b/dimos/utils/logging_config.py index ce1494025c..a9bfc5031d 100644 --- a/dimos/utils/logging_config.py +++ b/dimos/utils/logging_config.py @@ -106,6 +106,92 @@ def _configure_structlog() -> Path: return _LOG_FILE_PATH +_CONSOLE_PATH_WIDTH = 30 +_CONSOLE_USE_COLORS = hasattr(sys.stdout, "isatty") and sys.stdout.isatty() + +_CONSOLE_LEVEL_COLORS = { + "dbg": "\033[1;36m", # bold cyan + "inf": "\033[1;32m", # bold green + "war": "\033[1;33m", # bold yellow + "err": "\033[1;31m", # bold red + "cri": "\033[1;31m", # bold red +} +_CONSOLE_RESET = "\033[0m" +_CONSOLE_FIXED = "\033[2m" # dim +_CONSOLE_TEXT = "\033[0;34m" # blue +_CONSOLE_KEY = "\033[0;36m" # cyan +_CONSOLE_VAL = "\033[0;35m" # magenta +_CONSOLE_EQ = "\033[0;37m" # white + + +def _compact_console_processor(logger: Any, method_name: str, event_dict: Mapping[str, Any]) -> str: + """Format log lines as: HH:MM:SS.mmm[lvl][file.py ] Event key=value ...""" + event_dict = dict(event_dict) + + # Time — HH:MM:SS.mmm + timestamp = event_dict.pop("timestamp", "") + if timestamp: + try: + dt = datetime.fromisoformat(timestamp.replace("Z", "+00:00")) + time_str = dt.strftime("%H:%M:%S") + f".{dt.microsecond // 1000:03d}" + except (ValueError, AttributeError): + time_str = str(timestamp)[:12] + else: + now = datetime.now() + time_str = now.strftime("%H:%M:%S") + f".{now.microsecond // 1000:03d}" + + # Level — 3-letter lowercase abbreviation + level = event_dict.pop("level", "???") + level_short = level[:3].lower() + + # File path — fixed width, truncated from the left, padded on the right + file_path = event_dict.pop("logger", "") + if len(file_path) > _CONSOLE_PATH_WIDTH: + file_path = file_path[-_CONSOLE_PATH_WIDTH:] + file_path = f"{file_path:<{_CONSOLE_PATH_WIDTH}s}" + + # Event message + event = event_dict.pop("event", "") + + # Remove internal / callsite / exception fields + for key in ( + "func_name", + "lineno", + "exception", + "exc_info", + "exception_type", + "exception_message", + "traceback_lines", + "_record", + "_from_structlog", + ): + event_dict.pop(key, None) + + # Assemble the line + if _CONSOLE_USE_COLORS: + R = _CONSOLE_RESET + color = _CONSOLE_LEVEL_COLORS.get(level_short, "") + line = ( + f"{_CONSOLE_FIXED}{time_str}{R}" + f"{color}[{level_short}]{R}" + f"{_CONSOLE_FIXED}[{file_path}]{R} " + f"{_CONSOLE_TEXT}{event}{R}" + ) + if event_dict: + kv_parts = " ".join( + f"{_CONSOLE_KEY}{k}{_CONSOLE_EQ}={_CONSOLE_VAL}{v}{R}" + for k, v in sorted(event_dict.items()) + ) + line += " " + kv_parts + else: + kv_str = " ".join(f"{k}={v}" for k, v in sorted(event_dict.items())) + line = f"{time_str} [{level_short}][{file_path}] {event}" + if kv_str: + line += " " + kv_str + + return line + + def setup_logger(*, level: int | None = None) -> Any: """Set up a structured logger using structlog. @@ -140,39 +226,10 @@ def setup_logger(*, level: int | None = None) -> Any: stdlib_logger.setLevel(level) stdlib_logger.propagate = False - # Create console handler with pretty formatting. - # We use exception_formatter=None because we handle exceptions - # separately with Rich in the global exception handler - - console_renderer = structlog.dev.ConsoleRenderer( - colors=True, - pad_event=60, - force_colors=False, - sort_keys=True, - # Don't format exceptions in console logs - exception_formatter=None, # type: ignore[arg-type] - ) - - # Wrapper to remove callsite info and exception details before rendering to console. - def console_processor_without_callsite( - logger: Any, method_name: str, event_dict: Mapping[str, Any] - ) -> str: - event_dict = dict(event_dict) - # Remove callsite info - event_dict.pop("func_name", None) - event_dict.pop("lineno", None) - # Remove exception fields since we handle them with Rich - event_dict.pop("exception", None) - event_dict.pop("exc_info", None) - event_dict.pop("exception_type", None) - event_dict.pop("exception_message", None) - event_dict.pop("traceback_lines", None) - return console_renderer(logger, method_name, event_dict) - console_handler = logging.StreamHandler(sys.stdout) console_handler.setLevel(level) console_formatter = structlog.stdlib.ProcessorFormatter( - processor=console_processor_without_callsite, + processor=_compact_console_processor, ) console_handler.setFormatter(console_formatter) stdlib_logger.addHandler(console_handler) diff --git a/dimos/utils/reactive.py b/dimos/utils/reactive.py index bfc9cd0465..4397e0171e 100644 --- a/dimos/utils/reactive.py +++ b/dimos/utils/reactive.py @@ -19,6 +19,7 @@ import reactivex as rx from reactivex import operators as ops +from reactivex.abc import DisposableBase from reactivex.disposable import Disposable from reactivex.observable import Observable from reactivex.scheduler import ThreadPoolScheduler @@ -64,7 +65,7 @@ def _subscribe(observer, sch=None): # type: ignore[no-untyped-def] return rx.defer(lambda *_: per_sub()) # type: ignore[no-untyped-call] -class LatestReader(Generic[T]): +class LatestReader(DisposableBase, Generic[T]): """A callable object that returns the latest value from an observable.""" def __init__(self, initial_value: T, subscription, connection=None) -> None: # type: ignore[no-untyped-def] @@ -193,6 +194,27 @@ def callback_to_observable( start: Callable[[CB[T]], Any], stop: Callable[[CB[T]], Any], ) -> Observable[T]: + """Convert a register/unregister callback API to an Observable. + + Use this for APIs where you register a callback with one function and + unregister with another, passing the same callback reference: + + sensor.register(callback) # start + sensor.unregister(callback) # stop - needs the same callback + + Example: + obs = callback_to_observable( + start=sensor.register, + stop=sensor.unregister, + ) + sub = obs.subscribe(lambda x: print(x)) + # ... + sub.dispose() # calls sensor.unregister(callback) + + For APIs where subscribe() returns an unsubscribe callable, use + unsub_to_observable() instead. + """ + def _subscribe(observer, _scheduler=None): # type: ignore[no-untyped-def] def _on_msg(value: T) -> None: observer.on_next(value) @@ -203,6 +225,33 @@ def _on_msg(value: T) -> None: return rx.create(_subscribe) +def to_observable( + subscribe: Callable[[Callable[[T], Any]], Callable[[], None]], +) -> Observable[T]: + """Convert a subscribe-returns-unsub API to an Observable. + + Use this for APIs where subscribe() returns an unsubscribe callable: + + unsub = pubsub.subscribe(callback) # returns unsubscribe function + unsub() # to unsubscribe + + Example: + obs = to_observable(pubsub.subscribe) + sub = obs.subscribe(lambda x: print(x)) + # ... + sub.dispose() # calls the unsub function returned by pubsub.subscribe + + For APIs with separate register/unregister functions, use + callback_to_observable() instead. + """ + + def _subscribe(observer, _scheduler=None): # type: ignore[no-untyped-def] + unsub = subscribe(observer.on_next) + return Disposable(unsub) + + return rx.create(_subscribe) + + def spy(name: str): # type: ignore[no-untyped-def] def spyfun(x): # type: ignore[no-untyped-def] print(f"SPY {name}:", x) @@ -211,7 +260,9 @@ def spyfun(x): # type: ignore[no-untyped-def] return ops.map(spyfun) -def quality_barrier(quality_func: Callable[[T], float], target_frequency: float): # type: ignore[no-untyped-def] +def quality_barrier( + quality_func: Callable[[T], float], target_frequency: float +) -> Callable[[Observable[T]], Observable[T]]: """ RxPY pipe operator that selects the highest quality item within each time window. diff --git a/dimos/utils/sequential_ids.py b/dimos/utils/sequential_ids.py new file mode 100644 index 0000000000..d467e8a22d --- /dev/null +++ b/dimos/utils/sequential_ids.py @@ -0,0 +1,27 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from threading import RLock + + +class SequentialIds: + def __init__(self) -> None: + self._value = 0 + self._lock: RLock = RLock() + + def next(self) -> int: + with self._lock: + v = self._value + self._value += 1 + return v diff --git a/dimos/utils/test_data.py b/dimos/utils/test_data.py index 01f145f60c..e5be4307c7 100644 --- a/dimos/utils/test_data.py +++ b/dimos/utils/test_data.py @@ -14,11 +14,13 @@ import hashlib import os +from pathlib import Path import subprocess import pytest from dimos.utils import data +from dimos.utils.data import LfsPath @pytest.mark.heavy @@ -26,7 +28,7 @@ def test_pull_file() -> None: repo_root = data._get_repo_root() test_file_name = "cafe.jpg" test_file_compressed = data._get_lfs_dir() / (test_file_name + ".tar.gz") - test_file_decompressed = data._get_data_dir() / test_file_name + test_file_decompressed = data.get_data_dir() / test_file_name # delete decompressed test file if it exists if test_file_decompressed.exists(): @@ -82,7 +84,7 @@ def test_pull_dir() -> None: repo_root = data._get_repo_root() test_dir_name = "ab_lidar_frames" test_dir_compressed = data._get_lfs_dir() / (test_dir_name + ".tar.gz") - test_dir_decompressed = data._get_data_dir() / test_dir_name + test_dir_decompressed = data.get_data_dir() / test_dir_name # delete decompressed test directory if it exists if test_dir_decompressed.exists(): @@ -128,3 +130,220 @@ def test_pull_dir() -> None: with file.open("rb") as f: sha256 = hashlib.sha256(f.read()).hexdigest() assert sha256 == expected_hash + + +# ============================================================================ +# LfsPath Tests +# ============================================================================ + + +def test_lfs_path_lazy_creation() -> None: + """Test that creating LfsPath doesn't trigger download.""" + lfs_path = LfsPath("test_data_file") + + # Check that the object is created + assert isinstance(lfs_path, LfsPath) + + # Check that cache is None (not downloaded yet) + cache = object.__getattribute__(lfs_path, "_lfs_resolved_cache") + assert cache is None + + # Check that filename is stored + filename = object.__getattribute__(lfs_path, "_lfs_filename") + assert filename == "test_data_file" + + +def test_lfs_path_safe_attributes() -> None: + """Test that safe attributes don't trigger download.""" + lfs_path = LfsPath("test_data_file") + + # Access safe attributes directly + filename = object.__getattribute__(lfs_path, "_lfs_filename") + cache = object.__getattribute__(lfs_path, "_lfs_resolved_cache") + ensure_fn = object.__getattribute__(lfs_path, "_ensure_downloaded") + + # Verify they exist and cache is still None + assert filename == "test_data_file" + assert cache is None + assert callable(ensure_fn) + + +def test_lfs_path_no_download_on_creation() -> None: + """Test that LfsPath construction doesn't trigger download. + + Path(lfs_path) extracts internal _raw_paths (\".\") and does NOT + call __fspath__, so it won't trigger download. The correct way to + convert is Path(str(lfs_path)), which triggers __str__ -> download. + """ + lfs_path = LfsPath("nonexistent_file") + + # Construction should not trigger download + cache = object.__getattribute__(lfs_path, "_lfs_resolved_cache") + assert cache is None + + # Accessing internal LfsPath attributes should not trigger download + filename = object.__getattribute__(lfs_path, "_lfs_filename") + assert filename == "nonexistent_file" + assert cache is None + + +@pytest.mark.heavy +def test_lfs_path_with_real_file() -> None: + """Test LfsPath with a real small LFS file.""" + # Use a small existing LFS file + filename = "three_paths.png" + lfs_path = LfsPath(filename) + + # Initially, cache should be None + cache = object.__getattribute__(lfs_path, "_lfs_resolved_cache") + assert cache is None + + # Access a Path method - this should trigger download + exists = lfs_path.exists() + + # Now cache should be populated + cache = object.__getattribute__(lfs_path, "_lfs_resolved_cache") + assert cache is not None + assert isinstance(cache, Path) + + # File should exist after download + assert exists is True + + # Should be able to get file stats + stat_result = lfs_path.stat() + assert stat_result.st_size > 0 + + # Should be able to read the file + content = lfs_path.read_bytes() + assert len(content) > 0 + + # Verify it's a PNG file + assert content.startswith(b"\x89PNG") + + +@pytest.mark.heavy +def test_lfs_path_unload_and_reload() -> None: + """Test unloading and reloading an LFS file.""" + filename = "three_paths.png" + data_dir = data.get_data_dir() + file_path = data_dir / filename + + # Clean up if file already exists + if file_path.exists(): + file_path.unlink() + + # Create LfsPath + lfs_path = LfsPath(filename) + + # Verify file doesn't exist yet + assert not file_path.exists() + + # Access the file - this triggers download + content_first = lfs_path.read_bytes() + assert file_path.exists() + + # Get hash of first download + hash_first = hashlib.sha256(content_first).hexdigest() + + # Now unload (delete the file) + file_path.unlink() + assert not file_path.exists() + + # Create a new LfsPath instance for the same file + lfs_path_2 = LfsPath(filename) + + # Access the file again - should re-download + content_second = lfs_path_2.read_bytes() + assert file_path.exists() + + # Get hash of second download + hash_second = hashlib.sha256(content_second).hexdigest() + + # Hashes should match (same file downloaded) + assert hash_first == hash_second + + # Content should be identical + assert content_first == content_second + + +@pytest.mark.heavy +def test_lfs_path_operations() -> None: + """Test various Path operations with LfsPath.""" + filename = "three_paths.png" + lfs_path = LfsPath(filename) + + # Test is_file + assert lfs_path.is_file() is True + assert lfs_path.is_dir() is False + + # Test absolute path + abs_path = lfs_path.absolute() + assert abs_path.is_absolute() + + # Test resolve + resolved = lfs_path.resolve() + assert resolved.is_absolute() + + # Test string conversion + path_str = str(lfs_path) + assert isinstance(path_str, str) + assert filename in path_str + + # Test __fspath__ + fspath_result = os.fspath(lfs_path) + assert isinstance(fspath_result, str) + assert filename in fspath_result + + +@pytest.mark.heavy +def test_lfs_path_division_operator() -> None: + """Test path division operator with LfsPath.""" + # Use a directory for testing + lfs_path = LfsPath("three_paths.png") + + # Test truediv - this should trigger download and return resolved path + result = lfs_path / "subpath" + assert isinstance(result, Path) + + # The result should be the resolved path with subpath appended + assert "three_paths.png" in str(result) + + +@pytest.mark.heavy +def test_lfs_path_multiple_instances() -> None: + """Test that multiple LfsPath instances for same file work correctly.""" + filename = "three_paths.png" + + # Create two separate instances + lfs_path_1 = LfsPath(filename) + lfs_path_2 = LfsPath(filename) + + # Both should start with None cache + cache_1 = object.__getattribute__(lfs_path_1, "_lfs_resolved_cache") + cache_2 = object.__getattribute__(lfs_path_2, "_lfs_resolved_cache") + assert cache_1 is None + assert cache_2 is None + + # Access file through first instance + content_1 = lfs_path_1.read_bytes() + + # First instance should have cache + cache_1 = object.__getattribute__(lfs_path_1, "_lfs_resolved_cache") + assert cache_1 is not None + + # Second instance cache should still be None (separate instance) + cache_2 = object.__getattribute__(lfs_path_2, "_lfs_resolved_cache") + assert cache_2 is None + + # Access through second instance + content_2 = lfs_path_2.read_bytes() + + # Now second instance should also have cache + cache_2 = object.__getattribute__(lfs_path_2, "_lfs_resolved_cache") + assert cache_2 is not None + + # Content should be the same + assert content_1 == content_2 + + # Both caches should point to the same file + assert cache_1 == cache_2 diff --git a/dimos/utils/test_foxglove_bridge.py b/dimos/utils/test_foxglove_bridge.py index c45dcde660..cbac324c26 100644 --- a/dimos/utils/test_foxglove_bridge.py +++ b/dimos/utils/test_foxglove_bridge.py @@ -29,6 +29,8 @@ def test_foxglove_bridge_import() -> None: """Test that the foxglove bridge can be imported successfully.""" try: from dimos_lcm.foxglove_bridge import FoxgloveBridge + + assert FoxgloveBridge is not None except ImportError as e: pytest.fail(f"Failed to import foxglove bridge: {e}") diff --git a/dimos/utils/testing/__init__.py b/dimos/utils/testing/__init__.py index ffb640de39..568cd3604f 100644 --- a/dimos/utils/testing/__init__.py +++ b/dimos/utils/testing/__init__.py @@ -1,11 +1,9 @@ -from dimos.utils.testing.moment import Moment, OutputMoment, SensorMoment -from dimos.utils.testing.replay import SensorReplay, TimedSensorReplay, TimedSensorStorage +import lazy_loader as lazy -__all__ = [ - "Moment", - "OutputMoment", - "SensorMoment", - "SensorReplay", - "TimedSensorReplay", - "TimedSensorStorage", -] +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "moment": ["Moment", "OutputMoment", "SensorMoment"], + "replay": ["SensorReplay", "TimedSensorReplay", "TimedSensorStorage"], + }, +) diff --git a/dimos/utils/testing/moment.py b/dimos/utils/testing/moment.py index 436240a48b..e92d771687 100644 --- a/dimos/utils/testing/moment.py +++ b/dimos/utils/testing/moment.py @@ -17,12 +17,13 @@ from typing import TYPE_CHECKING, Any, Generic, TypeVar from dimos.core.resource import Resource +from dimos.types.timestamped import Timestamped from dimos.utils.testing.replay import TimedSensorReplay if TYPE_CHECKING: from dimos.core import Transport -T = TypeVar("T") +T = TypeVar("T", bound=Timestamped) class SensorMoment(Generic[T], Resource): diff --git a/dimos/utils/testing/replay.py b/dimos/utils/testing/replay.py index 89225c322e..588b63e099 100644 --- a/dimos/utils/testing/replay.py +++ b/dimos/utils/testing/replay.py @@ -11,399 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from collections.abc import Callable, Iterator -import functools -import glob -import os -from pathlib import Path -import pickle -import re -import time -from typing import Any, Generic, TypeVar -from reactivex import ( - from_iterable, - interval, - operators as ops, -) -from reactivex.observable import Observable -from reactivex.scheduler import TimeoutScheduler +"""Shim for TimedSensorReplay/TimedSensorStorage.""" -from dimos.utils.data import _get_data_dir, get_data +from dimos.memory.timeseries.legacy import LegacyPickleStore -T = TypeVar("T") - - -class SensorReplay(Generic[T]): - """Generic sensor data replay utility. - - Args: - name: The name of the test dataset - autocast: Optional function that takes unpickled data and returns a processed result. - For example: pointcloud2_from_webrtc_lidar - """ - - def __init__(self, name: str, autocast: Callable[[Any], T] | None = None) -> None: - self.root_dir = get_data(name) - self.autocast = autocast - - def load(self, *names: int | str) -> T | Any | list[T] | list[Any]: - if len(names) == 1: - return self.load_one(names[0]) - return list(map(lambda name: self.load_one(name), names)) - - def load_one(self, name: int | str | Path) -> T | Any: - if isinstance(name, int): - full_path = self.root_dir / f"/{name:03d}.pickle" - elif isinstance(name, Path): - full_path = name - else: - full_path = self.root_dir / Path(f"{name}.pickle") - - with open(full_path, "rb") as f: - data = pickle.load(f) - if self.autocast: - return self.autocast(data) - return data - - def first(self) -> T | Any | None: - try: - return next(self.iterate()) - except StopIteration: - return None - - @functools.cached_property - def files(self) -> list[Path]: - def extract_number(filepath): # type: ignore[no-untyped-def] - """Extract last digits before .pickle extension""" - basename = os.path.basename(filepath) - match = re.search(r"(\d+)\.pickle$", basename) - return int(match.group(1)) if match else 0 - - return sorted( - glob.glob(os.path.join(self.root_dir, "*")), # type: ignore[arg-type] - key=extract_number, - ) - - def iterate(self, loop: bool = False) -> Iterator[T | Any]: - while True: - for file_path in self.files: - yield self.load_one(Path(file_path)) - if not loop: - break - - def stream(self, rate_hz: float | None = None, loop: bool = False) -> Observable[T | Any]: - if rate_hz is None: - return from_iterable(self.iterate(loop=loop)) - - sleep_time = 1.0 / rate_hz - - return from_iterable(self.iterate(loop=loop)).pipe( - ops.zip(interval(sleep_time)), - ops.map(lambda x: x[0] if isinstance(x, tuple) else x), - ) - - -class SensorStorage(Generic[T]): - """Generic sensor data storage utility - . - Creates a directory in the test data directory and stores pickled sensor data. - - Args: - name: The name of the storage directory - autocast: Optional function that takes data and returns a processed result before storage. - """ - - def __init__(self, name: str, autocast: Callable[[T], Any] | None = None) -> None: - self.name = name - self.autocast = autocast - self.cnt = 0 - - # Create storage directory in the data dir - self.root_dir = _get_data_dir() / name - - # Check if directory exists and is not empty - if self.root_dir.exists(): - existing_files = list(self.root_dir.glob("*.pickle")) - if existing_files: - raise RuntimeError( - f"Storage directory '{name}' already exists and contains {len(existing_files)} files. " - f"Please use a different name or clean the directory first." - ) - else: - # Create the directory - self.root_dir.mkdir(parents=True, exist_ok=True) - - def consume_stream(self, observable: Observable[T | Any]) -> None: - """Consume an observable stream of sensor data without saving.""" - return observable.subscribe(self.save_one) # type: ignore[arg-type, return-value] - - def save_stream(self, observable: Observable[T | Any]) -> Observable[int]: - """Save an observable stream of sensor data to pickle files.""" - return observable.pipe(ops.map(lambda frame: self.save_one(frame))) - - def save(self, *frames) -> int: # type: ignore[no-untyped-def] - """Save one or more frames to pickle files.""" - for frame in frames: - self.save_one(frame) - return self.cnt - - def save_one(self, frame) -> int: # type: ignore[no-untyped-def] - """Save a single frame to a pickle file.""" - file_name = f"{self.cnt:03d}.pickle" - full_path = self.root_dir / file_name - - if full_path.exists(): - raise RuntimeError(f"File {full_path} already exists") - - # Apply autocast if provided - data_to_save = frame - if self.autocast: - data_to_save = self.autocast(frame) - # Convert to raw message if frame has a raw_msg attribute - elif hasattr(frame, "raw_msg"): - data_to_save = frame.raw_msg - - with open(full_path, "wb") as f: - pickle.dump(data_to_save, f) - - self.cnt += 1 - return self.cnt - - -class TimedSensorStorage(SensorStorage[T]): - def save_one(self, frame: T) -> int: - return super().save_one((time.time(), frame)) - - -class TimedSensorReplay(SensorReplay[T]): - def load_one(self, name: int | str | Path) -> T | Any: - if isinstance(name, int): - full_path = self.root_dir / f"/{name:03d}.pickle" - elif isinstance(name, Path): - full_path = name - else: - full_path = self.root_dir / Path(f"{name}.pickle") - - with open(full_path, "rb") as f: - data = pickle.load(f) - if self.autocast: - return (data[0], self.autocast(data[1])) - return data - - def find_closest(self, timestamp: float, tolerance: float | None = None) -> T | Any | None: - """Find the frame closest to the given timestamp. - - Args: - timestamp: The target timestamp to search for - tolerance: Optional maximum time difference allowed - - Returns: - The data frame closest to the timestamp, or None if no match within tolerance - """ - closest_data = None - closest_diff = float("inf") - - # Check frames before and after the timestamp - for ts, data in self.iterate_ts(): - diff = abs(ts - timestamp) - - if diff < closest_diff: - closest_diff = diff - closest_data = data - elif diff > closest_diff: - # We're moving away from the target, can stop - break - - if tolerance is not None and closest_diff > tolerance: - return None - - return closest_data - - def find_closest_seek( - self, relative_seconds: float, tolerance: float | None = None - ) -> T | Any | None: - """Find the frame closest to a time relative to the start. - - Args: - relative_seconds: Seconds from the start of the dataset - tolerance: Optional maximum time difference allowed - - Returns: - The data frame closest to the relative timestamp, or None if no match within tolerance - """ - # Get the first timestamp - first_ts = self.first_timestamp() - if first_ts is None: - return None - - # Calculate absolute timestamp and use find_closest - target_timestamp = first_ts + relative_seconds - return self.find_closest(target_timestamp, tolerance) - - def first_timestamp(self) -> float | None: - """Get the timestamp of the first item in the dataset. - - Returns: - The first timestamp, or None if dataset is empty - """ - try: - ts, _ = next(self.iterate_ts()) - return ts - except StopIteration: - return None - - def iterate(self, loop: bool = False) -> Iterator[T | Any]: - return (x[1] for x in super().iterate(loop=loop)) # type: ignore[index] - - def iterate_duration(self, **kwargs: Any) -> Iterator[tuple[float, T] | Any]: - """Iterate with timestamps relative to the start of the dataset.""" - first_ts = self.first_timestamp() - if first_ts is None: - return - for ts, data in self.iterate_ts(**kwargs): - yield (ts - first_ts, data) - - def iterate_realtime(self, speed: float = 1.0, **kwargs: Any) -> Iterator[T | Any]: - """Iterate data, sleeping to match original timing. - - Args: - speed: Playback speed multiplier (1.0 = realtime, 2.0 = 2x speed) - **kwargs: Passed to iterate_ts (seek, duration, from_timestamp, loop) - """ - iterator = self.iterate_ts(**kwargs) - - try: - first_ts, first_data = next(iterator) - except StopIteration: - return - - start_time = time.time() - start_ts = first_ts - yield first_data - - for ts, data in iterator: - target_time = start_time + (ts - start_ts) / speed - sleep_duration = target_time - time.time() - if sleep_duration > 0: - time.sleep(sleep_duration) - yield data - - def iterate_ts( - self, - seek: float | None = None, - duration: float | None = None, - from_timestamp: float | None = None, - loop: bool = False, - ) -> Iterator[tuple[float, T] | Any]: - """Iterate with absolute timestamps, with optional seek and duration.""" - first_ts = None - if (seek is not None) or (duration is not None): - first_ts = self.first_timestamp() - if first_ts is None: - return - - if seek is not None: - from_timestamp = first_ts + seek # type: ignore[operator] - - end_timestamp = None - if duration is not None: - end_timestamp = (from_timestamp if from_timestamp else first_ts) + duration # type: ignore[operator] - - while True: - for ts, data in super().iterate(): # type: ignore[misc] - if from_timestamp is None or ts >= from_timestamp: - if end_timestamp is not None and ts >= end_timestamp: - break - yield (ts, data) - if not loop: - break - - def stream( # type: ignore[override] - self, - speed: float = 1.0, - seek: float | None = None, - duration: float | None = None, - from_timestamp: float | None = None, - loop: bool = False, - ) -> Observable[T | Any]: - def _subscribe(observer, scheduler=None): # type: ignore[no-untyped-def] - from reactivex.disposable import CompositeDisposable, Disposable - - scheduler = scheduler or TimeoutScheduler() - disp = CompositeDisposable() - is_disposed = False - - iterator = self.iterate_ts( - seek=seek, duration=duration, from_timestamp=from_timestamp, loop=loop - ) - - # Get first message - try: - first_ts, first_data = next(iterator) - except StopIteration: - observer.on_completed() - return Disposable() - - # Establish timing reference - start_local_time = time.time() - start_replay_time = first_ts - - # Emit first sample immediately - observer.on_next(first_data) - - # Pre-load next message - try: - next_message = next(iterator) - except StopIteration: - observer.on_completed() - return disp - - def schedule_emission(message) -> None: # type: ignore[no-untyped-def] - nonlocal next_message, is_disposed - - if is_disposed: - return - - ts, data = message - - # Pre-load the following message while we have time - try: - next_message = next(iterator) - except StopIteration: - next_message = None - - # Calculate absolute emission time - target_time = start_local_time + (ts - start_replay_time) / speed - delay = max(0.0, target_time - time.time()) - - def emit() -> None: - if is_disposed: - return - observer.on_next(data) - if next_message is not None: - schedule_emission(next_message) - else: - observer.on_completed() - # Dispose of the scheduler to clean up threads - if hasattr(scheduler, "dispose"): - scheduler.dispose() - - scheduler.schedule_relative(delay, lambda sc, _: emit()) - - schedule_emission(next_message) - - # Create a custom disposable that properly cleans up - def dispose() -> None: - nonlocal is_disposed - is_disposed = True - disp.dispose() - # Ensure scheduler is disposed to clean up any threads - if hasattr(scheduler, "dispose"): - scheduler.dispose() - - return Disposable(dispose) - - from reactivex import create - - return create(_subscribe) +SensorReplay = LegacyPickleStore +SensorStorage = LegacyPickleStore +TimedSensorReplay = LegacyPickleStore +TimedSensorStorage = LegacyPickleStore diff --git a/dimos/utils/testing/test_moment.py b/dimos/utils/testing/test_moment.py index 92b71e59ac..6764610d0e 100644 --- a/dimos/utils/testing/test_moment.py +++ b/dimos/utils/testing/test_moment.py @@ -17,7 +17,7 @@ from dimos.msgs.geometry_msgs import PoseStamped, Transform from dimos.msgs.sensor_msgs import CameraInfo, Image, PointCloud2 from dimos.protocol.tf import TF -from dimos.robot.unitree.connection import go2 +from dimos.robot.unitree.go2 import connection from dimos.utils.data import get_data from dimos.utils.testing.moment import Moment, SensorMoment @@ -43,14 +43,14 @@ def transforms(self) -> list[Transform]: # back and forth through time and foxglove doesn't get confused odom = self.odom.value odom.ts = time.time() - return go2.GO2Connection._odom_to_tf(odom) + return connection.GO2Connection._odom_to_tf(odom) def publish(self) -> None: t = TF() t.publish(*self.transforms) t.stop() - camera_info = go2._camera_info_static() + camera_info = connection._camera_info_static() camera_info.ts = time.time() camera_info_transport: LCMTransport[CameraInfo] = LCMTransport("/camera_info", CameraInfo) camera_info_transport.publish(camera_info) diff --git a/dimos/utils/testing/test_replay.py b/dimos/utils/testing/test_replay.py index 640fe92979..e3020777b4 100644 --- a/dimos/utils/testing/test_replay.py +++ b/dimos/utils/testing/test_replay.py @@ -17,33 +17,15 @@ from reactivex import operators as ops from dimos.msgs.sensor_msgs import PointCloud2 -from dimos.robot.unitree_webrtc.type.lidar import pointcloud2_from_webrtc_lidar -from dimos.robot.unitree_webrtc.type.odometry import Odometry +from dimos.robot.unitree.type.lidar import pointcloud2_from_webrtc_lidar +from dimos.robot.unitree.type.odometry import Odometry from dimos.utils.data import get_data from dimos.utils.testing import replay -def test_sensor_replay() -> None: - counter = 0 - for message in replay.SensorReplay(name="office_lidar").iterate(): - counter += 1 - assert isinstance(message, dict) - assert counter == 500 - - -def test_sensor_replay_cast() -> None: - counter = 0 - for message in replay.SensorReplay( - name="office_lidar", autocast=pointcloud2_from_webrtc_lidar - ).iterate(): - counter += 1 - assert isinstance(message, PointCloud2) - assert counter == 500 - - def test_timed_sensor_replay() -> None: get_data("unitree_office_walk") - odom_store = replay.TimedSensorReplay("unitree_office_walk/odom", autocast=Odometry.from_msg) + odom_store = replay.TimedSensorReplay("unitree_office_walk/odom") itermsgs = [] for msg in odom_store.iterate(): @@ -87,7 +69,7 @@ def test_iterate_ts_no_seek() -> None: def test_iterate_ts_with_from_timestamp() -> None: """Test iterate_ts with from_timestamp (absolute timestamp)""" - odom_store = replay.TimedSensorReplay("unitree_office_walk/odom", autocast=Odometry.from_msg) + odom_store = replay.TimedSensorReplay("unitree_office_walk/odom") # First get all messages to find a good seek point all_msgs = [] @@ -115,7 +97,7 @@ def test_iterate_ts_with_from_timestamp() -> None: def test_iterate_ts_with_relative_seek() -> None: """Test iterate_ts with seek (relative seconds after first timestamp)""" - odom_store = replay.TimedSensorReplay("unitree_office_walk/odom", autocast=Odometry.from_msg) + odom_store = replay.TimedSensorReplay("unitree_office_walk/odom") # Get first few messages to understand timing all_msgs = [] @@ -144,7 +126,7 @@ def test_iterate_ts_with_relative_seek() -> None: def test_stream_with_seek() -> None: """Test stream method with seek parameters""" - odom_store = replay.TimedSensorReplay("unitree_office_walk/odom", autocast=Odometry.from_msg) + odom_store = replay.TimedSensorReplay("unitree_office_walk/odom") # Test stream with relative seek msgs_with_seek = [] @@ -170,7 +152,7 @@ def test_stream_with_seek() -> None: def test_duration_with_loop() -> None: """Test duration parameter with looping in TimedSensorReplay""" - odom_store = replay.TimedSensorReplay("unitree_office_walk/odom", autocast=Odometry.from_msg) + odom_store = replay.TimedSensorReplay("unitree_office_walk/odom") # Collect timestamps from a small duration window collected_ts = [] diff --git a/dimos/visualization/rerun/bridge.py b/dimos/visualization/rerun/bridge.py new file mode 100644 index 0000000000..1dc104f1b4 --- /dev/null +++ b/dimos/visualization/rerun/bridge.py @@ -0,0 +1,346 @@ +# Copyright 2025-2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Rerun bridge for logging pubsub messages with to_rerun() methods.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from functools import lru_cache +from typing import ( + TYPE_CHECKING, + Any, + Literal, + Protocol, + TypeAlias, + TypeGuard, + cast, + runtime_checkable, +) + +from reactivex.disposable import Disposable +from toolz import pipe # type: ignore[import-untyped] +import typer + +from dimos.core import Module, rpc +from dimos.core.module import ModuleConfig +from dimos.protocol.pubsub.impl.lcmpubsub import LCM +from dimos.protocol.pubsub.patterns import Glob, pattern_matches +from dimos.utils.logging_config import setup_logger + +RERUN_GRPC_PORT = 9876 +RERUN_WEB_PORT = 9090 + +# TODO OUT visual annotations +# +# In the future it would be nice if modules can annotate their individual OUTs with (general or rerun specific) +# hints related to their visualization +# +# so stuff like color, update frequency etc (some Image needs to be rendered on the 3d floor like occupancy grid) +# some other image is an image to be streamed into a specific 2D view etc. +# +# To achieve this we'd feed a full blueprint into the rerun bridge. +# +# rerun bridge can then inspect all transports used, all modules with their outs, +# automatically spy an all the transports and read visualization hints +# +# Temporarily we are using these "sideloading" visual_override={} dict on the bridge +# to define custom visualizations for specific topics +# +# as well as pubsubs={} to specify which protocols to listen to. + + +# TODO better TF processing +# +# this is rerun bridge specific, rerun has a specific (better) way of handling TFs +# using entity path conventions, each of these nodes in a path are TF frames: +# +# /world/robot1/base_link/camera/optical +# +# While here since we are just listening on TFMessage messages which optionally contain +# just a subset of full TF tree we don't know the full tree structure to build full entity +# path for a transform being published +# +# This is easy to reconstruct but a service/tf.py already does this so should be integrated here +# +# we have decoupled entity paths and actual transforms (like ROS TF frames) +# https://rerun.io/docs/concepts/logging-and-ingestion/transforms +# +# tf#/world +# tf#/base_link +# tf#/camera +# +# In order to solve this, bridge needs to own it's own tf service +# and render it's tf tree into correct rerun entity paths + + +logger = setup_logger() + +if TYPE_CHECKING: + from collections.abc import Callable + + from rerun._baseclasses import Archetype + from rerun.blueprint import Blueprint + + from dimos.protocol.pubsub.spec import SubscribeAllCapable + +BlueprintFactory: TypeAlias = "Callable[[], Blueprint]" + +# to_rerun() can return a single archetype or a list of (entity_path, archetype) tuples +RerunMulti: TypeAlias = "list[tuple[str, Archetype]]" +RerunData: TypeAlias = "Archetype | RerunMulti" + + +def is_rerun_multi(data: Any) -> TypeGuard[RerunMulti]: + """Check if data is a list of (entity_path, archetype) tuples.""" + from rerun._baseclasses import Archetype + + return ( + isinstance(data, list) + and bool(data) + and isinstance(data[0], tuple) + and len(data[0]) == 2 + and isinstance(data[0][0], str) + and isinstance(data[0][1], Archetype) + ) + + +@runtime_checkable +class RerunConvertible(Protocol): + """Protocol for messages that can be converted to Rerun data.""" + + def to_rerun(self) -> RerunData: ... + + +ViewerMode = Literal["native", "web", "none"] + + +def _default_blueprint() -> Blueprint: + """Default blueprint with black background and raised grid.""" + import rerun as rr + import rerun.blueprint as rrb + + return rrb.Blueprint( # type: ignore[no-any-return] + rrb.Spatial3DView( + origin="world", + background=rrb.Background(kind="SolidColor", color=[0, 0, 0]), + line_grid=rrb.LineGrid3D( + plane=rr.components.Plane3D.XY.with_distance(0.2), + ), + ), + ) + + +@dataclass +class Config(ModuleConfig): + """Configuration for RerunBridgeModule.""" + + pubsubs: list[SubscribeAllCapable[Any, Any]] = field( + default_factory=lambda: [LCM(autoconf=True)] + ) + + visual_override: dict[Glob | str, Callable[[Any], Archetype]] = field(default_factory=dict) + + # Static items logged once after start. Maps entity_path -> callable(rr) returning Archetype + static: dict[str, Callable[[Any], Archetype]] = field(default_factory=dict) + + entity_prefix: str = "world" + topic_to_entity: Callable[[Any], str] | None = None + viewer_mode: ViewerMode = "native" + memory_limit: str = "25%" + + # Blueprint factory: callable(rrb) -> Blueprint for viewer layout configuration + # Set to None to disable default blueprint + blueprint: BlueprintFactory | None = _default_blueprint + + +class RerunBridgeModule(Module): + """Bridge that logs messages from pubsubs to Rerun. + + Spawns its own Rerun viewer and subscribes to all topics on each provided + pubsub. Any message that has a to_rerun() method is automatically logged. + + Example: + from dimos.protocol.pubsub.impl.lcmpubsub import LCM + + lcm = LCM(autoconf=True) + bridge = RerunBridgeModule(pubsubs=[lcm]) + bridge.start() + # All messages with to_rerun() are now logged to Rerun + bridge.stop() + """ + + default_config = Config + config: Config + + @lru_cache(maxsize=256) + def _visual_override_for_entity_path( + self, entity_path: str + ) -> Callable[[Any], RerunData | None]: + """Return a composed visual override for the entity path. + + Chains matching overrides from config, ending with final_convert + which handles .to_rerun() or passes through Archetypes. + """ + from rerun._baseclasses import Archetype + + # find all matching converters for this entity path + matches = [ + fn + for pattern, fn in self.config.visual_override.items() + if pattern_matches(pattern, entity_path) + ] + + # None means "suppress this topic entirely" + if any(fn is None for fn in matches): + return lambda msg: None + + # final step (ensures we return Archetype or None) + def final_convert(msg: Any) -> RerunData | None: + if isinstance(msg, Archetype): + return msg + if is_rerun_multi(msg): + return msg + if isinstance(msg, RerunConvertible): + return msg.to_rerun() + return None + + # compose all converters + return lambda msg: pipe(msg, *matches, final_convert) + + def _get_entity_path(self, topic: Any) -> str: + """Convert a topic to a Rerun entity path.""" + if self.config.topic_to_entity: + return self.config.topic_to_entity(topic) + + # Default: use topic.name if available (LCM Topic), else str + topic_str = getattr(topic, "name", None) or str(topic) + # Strip everything after # (LCM topic suffix) + topic_str = topic_str.split("#")[0] + return f"{self.config.entity_prefix}{topic_str}" + + def _on_message(self, msg: Any, topic: Any) -> None: + """Handle incoming message - log to rerun.""" + import rerun as rr + + # convert a potentially complex topic object into an str rerun entity path + entity_path: str = self._get_entity_path(topic) + + # apply visual overrides (including final_convert which handles .to_rerun()) + rerun_data: RerunData | None = self._visual_override_for_entity_path(entity_path)(msg) + + # converters can also suppress logging by returning None + if not rerun_data: + return + + # TFMessage for example returns list of (entity_path, archetype) tuples + if is_rerun_multi(rerun_data): + for path, archetype in rerun_data: + rr.log(path, archetype) + else: + rr.log(entity_path, cast("Archetype", rerun_data)) + + @rpc + def start(self) -> None: + import rerun as rr + + super().start() + + # Initialize and spawn Rerun viewer + rr.init("dimos") + + if self.config.viewer_mode == "native": + rr.spawn(connect=True, memory_limit=self.config.memory_limit) + elif self.config.viewer_mode == "web": + server_uri = rr.serve_grpc() + rr.serve_web_viewer(connect_to=server_uri, open_browser=False) + # "none" - just init, no viewer (connect externally) + + if self.config.blueprint: + rr.send_blueprint(self.config.blueprint()) + + # Start pubsubs and subscribe to all messages + for pubsub in self.config.pubsubs: + logger.info(f"bridge listening on {pubsub.__class__.__name__}") + if hasattr(pubsub, "start"): + pubsub.start() # type: ignore[union-attr] + unsub = pubsub.subscribe_all(self._on_message) + self._disposables.add(Disposable(unsub)) + + # Add pubsub stop as disposable + for pubsub in self.config.pubsubs: + if hasattr(pubsub, "stop"): + self._disposables.add(Disposable(pubsub.stop)) # type: ignore[union-attr] + + self._log_static() + + def _log_static(self) -> None: + import rerun as rr + + for entity_path, factory in self.config.static.items(): + data = factory(rr) + if isinstance(data, list): + for archetype in data: + rr.log(entity_path, archetype, static=True) + else: + rr.log(entity_path, data, static=True) + + @rpc + def stop(self) -> None: + super().stop() + + +def run_bridge( + viewer_mode: str = "native", + memory_limit: str = "25%", +) -> None: + """Start a RerunBridgeModule with default LCM config and block until interrupted.""" + import signal + + bridge = RerunBridgeModule( + viewer_mode=viewer_mode, + memory_limit=memory_limit, + # any pubsub that supports subscribe_all and topic that supports str(topic) + # is acceptable here + pubsubs=[LCM(autoconf=True)], + ) + + bridge.start() + + signal.signal(signal.SIGINT, lambda *_: bridge.stop()) + signal.pause() + + +app = typer.Typer() + + +@app.command() +def cli( + viewer_mode: str = typer.Option( + "native", help="Viewer mode: native (desktop), web (browser), none (headless)" + ), + memory_limit: str = typer.Option( + "25%", help="Memory limit for Rerun viewer (e.g., '4GB', '16GB', '25%')" + ), +) -> None: + """Rerun bridge for LCM messages.""" + run_bridge(viewer_mode=viewer_mode, memory_limit=memory_limit) + + +if __name__ == "__main__": + app() + +# you don't need to include this in your blueprint if you are not creating a +# custom rerun configuration for your deployment, you can also run rerun-bridge standalone +rerun_bridge = RerunBridgeModule.blueprint diff --git a/dimos/web/README.md b/dimos/web/README.md deleted file mode 100644 index 28f418bb55..0000000000 --- a/dimos/web/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# DimOS Robot Web Interface - -A streamlined interface for controlling and interacting with robots through DimOS. - -## Setup - -First, create an `.env` file in the root dimos directory with your configuration: - -```bash -# Example .env file -OPENAI_API_KEY=sk-your-openai-api-key -ROBOT_IP=192.168.x.x -CONN_TYPE=webrtc -WEBRTC_SERVER_HOST=0.0.0.0 -WEBRTC_SERVER_PORT=9991 -DISPLAY=:0 -``` - -## Unitree Go2 Example - -Running a full stack for Unitree Go2 requires three components: - -### 1. Start ROS2 Robot Driver - -```bash -# Source ROS environment -source /opt/ros/humble/setup.bash -source ~/your_ros_workspace/install/setup.bash - -# Launch robot driver -ros2 launch go2_robot_sdk robot.launch.py -``` - -### 2. Start DimOS Backend - -```bash -# In a new terminal, source your Python environment -source venv/bin/activate # Or your environment - -# Install requirements -pip install -r requirements.txt - -# Source ROS workspace (needed for robot communication) -source /opt/ros/humble/setup.bash -source ~/your_ros_workspace/install/setup.bash - -# Run the server with Robot() and Agent() initialization -python tests/test_unitree_agent_queries_fastapi.py -``` - -### 3. Start Frontend - -**Install yarn if not already installed** - -```bash -npm install -g yarn -``` - -**Then install dependencies and start the development server** - -```bash -# In a new terminal -cd dimos/web/dimos-interface - -# Install dependencies (first time only) -yarn install - -# Start development server -yarn dev -``` - -The frontend will be available at http://localhost:3000 - -## Using the Interface - -1. Access the web terminal at http://localhost:3000 -2. Type commands to control your robot: - - `unitree command ` - Send a command to the robot - - `unitree status` - Check connection status - - `unitree start_stream` - Start the video stream - - `unitree stop_stream` - Stop the video stream - -## Integrating DimOS with the DimOS-interface - -### Unitree Go2 Example - -```python -from dimos.agents_deprecated.agent import OpenAIAgent -from dimos.robot.unitree.unitree_go2 import UnitreeGo2 -from dimos.robot.unitree.unitree_skills import MyUnitreeSkills -from dimos.web.robot_web_interface import RobotWebInterface - -robot_ip = os.getenv("ROBOT_IP") - -# Initialize robot -logger.info("Initializing Unitree Robot") -robot = UnitreeGo2(ip=robot_ip, - connection_method=connection_method, - output_dir=output_dir) - -# Set up video stream -logger.info("Starting video stream") -video_stream = robot.get_ros_video_stream() - -# Create FastAPI server with video stream -logger.info("Initializing FastAPI server") -streams = {"unitree_video": video_stream} -web_interface = RobotWebInterface(port=5555, **streams) - -# Initialize agent with robot skills -skills_instance = MyUnitreeSkills(robot=robot) - -agent = OpenAIAgent( - dev_name="UnitreeQueryPerceptionAgent", - input_query_stream=web_interface.query_stream, - output_dir=output_dir, - skills=skills_instance, -) - -web_interface.run() -``` - -## Architecture - -- **Backend**: FastAPI server runs on port 5555 -- **Frontend**: Web application runs on port 3000 diff --git a/dimos/web/command-center-extension/README.md b/dimos/web/command-center-extension/README.md deleted file mode 100644 index efee4ec11d..0000000000 --- a/dimos/web/command-center-extension/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# command-center-extension - -This is a Foxglove extension for visualizing robot data and controlling the robot. See `dimos/web/websocket_vis/README.md` for how to use the module in your robot. - -## Build and use - -Install the Foxglove Studio desktop application. - -Install the Node dependencies: - - npm install - -Build the package and install it into Foxglove: - - npm run build && npm run local-install - -To add the panel, go to Foxglove Studio, click on the "Add panel" icon on the top right and select "command-center [local]". diff --git a/dimos/web/dimos_interface/__init__.py b/dimos/web/dimos_interface/__init__.py index 5ca28b30e5..3bdc622cee 100644 --- a/dimos/web/dimos_interface/__init__.py +++ b/dimos/web/dimos_interface/__init__.py @@ -2,6 +2,11 @@ Dimensional Interface package """ -from .api.server import FastAPIServer +import lazy_loader as lazy -__all__ = ["FastAPIServer"] +__getattr__, __dir__, __all__ = lazy.attach( + __name__, + submod_attrs={ + "api.server": ["FastAPIServer"], + }, +) diff --git a/dimos/web/websocket_vis/websocket_vis_module.py b/dimos/web/websocket_vis/websocket_vis_module.py index e02ee3491a..ad93af5c96 100644 --- a/dimos/web/websocket_vis/websocket_vis_module.py +++ b/dimos/web/websocket_vis/websocket_vis_module.py @@ -46,7 +46,7 @@ ) from dimos.core import In, Module, Out, rpc -from dimos.core.global_config import GlobalConfig +from dimos.core.global_config import GlobalConfig, global_config from dimos.mapping.occupancy.gradient import gradient from dimos.mapping.occupancy.inflation import simple_inflate from dimos.mapping.types import LatLon @@ -98,17 +98,17 @@ class WebsocketVisModule(Module): def __init__( self, port: int = 7779, - global_config: GlobalConfig | None = None, + cfg: GlobalConfig = global_config, **kwargs: Any, ) -> None: """Initialize the WebSocket visualization module. Args: port: Port to run the web server on - global_config: Optional global config for viewer backend settings + cfg: Optional global config for viewer backend settings """ super().__init__(**kwargs) - self._global_config = global_config or GlobalConfig() + self._global_config = cfg self.port = port self._uvicorn_server_thread: threading.Thread | None = None @@ -154,7 +154,7 @@ def start(self) -> None: self._uvicorn_server_thread.start() # Auto-open browser only for rerun-web (dashboard with Rerun iframe + command center) - # For rerun-native and foxglove, users access the command center manually if needed + # For rerun and foxglove, users access the command center manually if needed if self._global_config.viewer_backend == "rerun-web": url = f"http://localhost:{self.port}/" logger.info(f"Dimensional Command Center: {url}") @@ -228,8 +228,9 @@ def _create_server(self) -> None: async def serve_index(request): # type: ignore[no-untyped-def] """Serve appropriate HTML based on viewer mode.""" # If running native Rerun, redirect to standalone command center - if self._global_config.viewer_backend == "rerun-native": + if self._global_config.viewer_backend != "rerun-web": return RedirectResponse(url="/command-center") + # Otherwise serve full dashboard with Rerun iframe return FileResponse(_DASHBOARD_HTML, media_type="text/html") diff --git a/docker/navigation/.env.hardware b/docker/navigation/.env.hardware index 05e08bd375..234e58545c 100644 --- a/docker/navigation/.env.hardware +++ b/docker/navigation/.env.hardware @@ -57,6 +57,43 @@ MOTOR_SERIAL_DEVICE=/dev/ttyACM0 # Set to true if using wireless base station ENABLE_WIFI_BUFFER=false +# ============================================ +# Unitree Robot Configuration +# ============================================ +# Enable Unitree WebRTC control (for Go2, G1) +#USE_UNITREE=true + +# Unitree robot IP address +UNITREE_IP=192.168.12.1 + +# Unitree connection method (LocalAP or Ethernet) +UNITREE_CONN=LocalAP + +# ============================================ +# Navigation Options +# ============================================ +# Enable route planner (FAR planner for goal navigation) +USE_ROUTE_PLANNER=false + +# Enable RViz visualization +USE_RVIZ=false + +# Map path for localization mode (leave empty for SLAM/mapping mode) +# Set to file prefix (no .pcd extension), e.g., /ros2_ws/maps/warehouse +# The system will load: MAP_PATH.pcd for SLAM, MAP_PATH_tomogram.pickle for PCT planner +MAP_PATH= + +# ============================================ +# Device Group IDs +# ============================================ +# Group ID for /dev/input devices (joystick) +# Find with: getent group input | cut -d: -f3 +INPUT_GID=995 + +# Group ID for serial devices +# Find with: getent group dialout | cut -d: -f3 +DIALOUT_GID=20 + # ============================================ # Display Configuration # ============================================ diff --git a/docker/navigation/Dockerfile b/docker/navigation/Dockerfile index 08a24c0128..fa51fd621c 100644 --- a/docker/navigation/Dockerfile +++ b/docker/navigation/Dockerfile @@ -1,28 +1,40 @@ # ============================================================================= -# OPTIMIZED DOCKERFILE - Multi-stage build for reduced image size +# DimOS Navigation Docker Image # ============================================================================= # -# Key optimizations: -# 1. Multi-stage build with ros:desktop-full base image -# 2. Multi-stage build to discard build artifacts -# 3. No Python ML dependencies (~14 GB saved) -# 4. COPY dimos source for editable pip install (volume-mounted at runtime overlays it) -# 5. Clean up build directories after compile (~800 MB saved) -# 6. Minimal apt packages with --no-install-recommends -# 7. DDS configuration for optimized ROS 2 communication +# Multi-stage build for ROS 2 navigation with SLAM support. +# Includes both arise_slam and FASTLIO2 - select at runtime via LOCALIZATION_METHOD. # -# Supported ROS distributions: jazzy, humble -# Build with: docker build --build-arg ROS_DISTRO=humble ... +# Supported configurations: +# - ROS distributions: humble, jazzy +# - SLAM methods: arise_slam (default), fastlio (set LOCALIZATION_METHOD=fastlio) +# +# Build: +# ./build.sh --humble # Build for ROS 2 Humble +# ./build.sh --jazzy # Build for ROS 2 Jazzy +# +# Run: +# ./start.sh --hardware --route-planner # Uses arise_slam +# LOCALIZATION_METHOD=fastlio ./start.sh --hardware --route-planner # Uses FASTLIO2 # # ============================================================================= # Build argument for ROS distribution (default: humble) ARG ROS_DISTRO=humble +ARG TARGETARCH + +# ----------------------------------------------------------------------------- +# Platform-specific base images +# - amd64: Use osrf/ros desktop-full (includes Gazebo, full GUI) +# - arm64: Use ros-base (desktop-full not available for ARM) +# ----------------------------------------------------------------------------- +FROM osrf/ros:${ROS_DISTRO}-desktop-full AS base-amd64 +FROM ros:${ROS_DISTRO}-ros-base AS base-arm64 # ----------------------------------------------------------------------------- # STAGE 1: Build Stage - compile all C++ dependencies # ----------------------------------------------------------------------------- -FROM osrf/ros:${ROS_DISTRO}-desktop-full AS builder +FROM base-${TARGETARCH} AS builder ARG ROS_DISTRO ENV DEBIAN_FRONTEND=noninteractive @@ -48,12 +60,103 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ ros-${ROS_DISTRO}-cv-bridge \ && rm -rf /var/lib/apt/lists/* +# On arm64, ros-base doesn't include rviz2 (unlike desktop-full on amd64) +# Install it separately for building rviz plugins +# Note: ARG must be re-declared after FROM; placed here to maximize layer caching above +ARG TARGETARCH +RUN if [ "${TARGETARCH}" = "arm64" ]; then \ + apt-get update && apt-get install -y --no-install-recommends \ + ros-${ROS_DISTRO}-rviz2 \ + && rm -rf /var/lib/apt/lists/*; \ + fi + +# On arm64, build open3d from source (no Linux aarch64 wheels on PyPI) +# Cached as a separate layer; the wheel is copied to the runtime stage +# mkdir runs unconditionally so COPY --from=builder works on all architectures +RUN mkdir -p /opt/open3d-wheel && \ + PYTHON_MINOR=$(python3 -c "import sys; print(sys.version_info.minor)") && \ + if [ "${TARGETARCH}" = "arm64" ] && [ "$PYTHON_MINOR" -ge 12 ]; then \ + echo "Building open3d from source for arm64 + Python 3.${PYTHON_MINOR} (no PyPI wheel)..." && \ + apt-get update && apt-get install -y --no-install-recommends \ + python3-dev \ + python3-pip \ + python3-setuptools \ + python3-wheel \ + libblas-dev \ + liblapack-dev \ + libgl1-mesa-dev \ + libglib2.0-dev \ + libxinerama-dev \ + libxcursor-dev \ + libxrandr-dev \ + libxi-dev \ + gfortran \ + && rm -rf /var/lib/apt/lists/* && \ + cd /tmp && \ + git clone --depth 1 --branch v0.19.0 https://github.com/isl-org/Open3D.git && \ + cd Open3D && \ + util/install_deps_ubuntu.sh assume-yes && \ + mkdir build && cd build && \ + cmake .. \ + -DCMAKE_BUILD_TYPE=Release \ + -DBUILD_CUDA_MODULE=OFF \ + -DBUILD_GUI=OFF \ + -DBUILD_TENSORFLOW_OPS=OFF \ + -DBUILD_PYTORCH_OPS=OFF \ + -DBUILD_UNIT_TESTS=OFF \ + -DBUILD_BENCHMARKS=OFF \ + -DBUILD_EXAMPLES=OFF \ + -DBUILD_WEBRTC=OFF && \ + make -j$(($(nproc) > 4 ? 4 : $(nproc))) && \ + make pip-package -j$(($(nproc) > 4 ? 4 : $(nproc))) && \ + mkdir -p /opt/open3d-wheel && \ + cp lib/python_package/pip_package/open3d*.whl /opt/open3d-wheel/ && \ + cd / && rm -rf /tmp/Open3D; \ + fi + +# On arm64, build or-tools from source (pre-built binaries are x86_64 only) +# This is cached as a separate layer since it takes significant time to build +ENV OR_TOOLS_VERSION=9.8 +RUN if [ "${TARGETARCH}" = "arm64" ]; then \ + echo "Building or-tools v${OR_TOOLS_VERSION} from source for arm64..." && \ + apt-get update && apt-get install -y --no-install-recommends \ + lsb-release \ + wget \ + && rm -rf /var/lib/apt/lists/* && \ + cd /tmp && \ + wget -q https://github.com/google/or-tools/archive/refs/tags/v${OR_TOOLS_VERSION}.tar.gz && \ + tar xzf v${OR_TOOLS_VERSION}.tar.gz && \ + cd or-tools-${OR_TOOLS_VERSION} && \ + cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DBUILD_DEPS=ON \ + -DBUILD_SAMPLES=OFF \ + -DBUILD_EXAMPLES=OFF \ + -DBUILD_FLATZINC=OFF \ + -DUSE_SCIP=OFF \ + -DUSE_COINOR=OFF && \ + cmake --build build --config Release -j$(($(nproc) > 4 ? 4 : $(nproc))) && \ + cmake --install build --prefix /opt/or-tools && \ + rm -rf /tmp/or-tools-${OR_TOOLS_VERSION} /tmp/v${OR_TOOLS_VERSION}.tar.gz; \ + fi + # Create workspace RUN mkdir -p ${WORKSPACE}/src # Copy autonomy stack source COPY docker/navigation/ros-navigation-autonomy-stack ${WORKSPACE}/src/ros-navigation-autonomy-stack +# On arm64, replace pre-built x86_64 or-tools with arm64 built version +RUN if [ "${TARGETARCH}" = "arm64" ] && [ -d "/opt/or-tools" ]; then \ + echo "Replacing x86_64 or-tools with arm64 build..." && \ + OR_TOOLS_DIR=${WORKSPACE}/src/ros-navigation-autonomy-stack/src/exploration_planner/tare_planner/or-tools && \ + rm -rf ${OR_TOOLS_DIR}/lib/*.so* ${OR_TOOLS_DIR}/lib/*.a && \ + cp -r /opt/or-tools/lib/* ${OR_TOOLS_DIR}/lib/ && \ + rm -rf ${OR_TOOLS_DIR}/include && \ + cp -r /opt/or-tools/include ${OR_TOOLS_DIR}/ && \ + ldconfig; \ + fi + # Compatibility fix: In Humble, cv_bridge uses .h extension, but Jazzy uses .hpp # Create a symlink so code written for Jazzy works on Humble RUN if [ "${ROS_DISTRO}" = "humble" ]; then \ @@ -91,22 +194,26 @@ RUN cd ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/dependency/gtsam make -j$(nproc) && make install && ldconfig && \ rm -rf ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/dependency/gtsam/build -# Build ROS workspace (no --symlink-install for multi-stage build compatibility) +# Build ROS workspace with both SLAM systems (no --symlink-install for multi-stage build compatibility) RUN /bin/bash -c "source /opt/ros/${ROS_DISTRO}/setup.bash && \ cd ${WORKSPACE} && \ + echo 'Building with both arise_slam and FASTLIO2' && \ colcon build --cmake-args -DCMAKE_BUILD_TYPE=Release" # ----------------------------------------------------------------------------- # STAGE 2: Runtime Stage - minimal image for running # ----------------------------------------------------------------------------- ARG ROS_DISTRO -FROM osrf/ros:${ROS_DISTRO}-desktop-full AS runtime +ARG TARGETARCH +FROM base-${TARGETARCH} AS runtime ARG ROS_DISTRO ENV DEBIAN_FRONTEND=noninteractive ENV ROS_DISTRO=${ROS_DISTRO} ENV WORKSPACE=/ros2_ws ENV DIMOS_PATH=/workspace/dimos +# LOCALIZATION_METHOD: arise_slam (default) or fastlio +ENV LOCALIZATION_METHOD=arise_slam # DDS Configuration - Use FastDDS (default ROS 2 middleware) ENV RMW_IMPLEMENTATION=rmw_fastrtps_cpp @@ -120,6 +227,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ ros-${ROS_DISTRO}-foxglove-bridge \ ros-${ROS_DISTRO}-rviz2 \ ros-${ROS_DISTRO}-rqt* \ + ros-${ROS_DISTRO}-joy \ # DDS middleware (FastDDS is default, just ensure it's installed) ros-${ROS_DISTRO}-rmw-fastrtps-cpp \ # Runtime libraries @@ -164,9 +272,26 @@ COPY --from=builder ${WORKSPACE}/install ${WORKSPACE}/install COPY --from=builder ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/base_autonomy/vehicle_simulator/rviz ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/base_autonomy/vehicle_simulator/rviz COPY --from=builder ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/route_planner/far_planner/rviz ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/route_planner/far_planner/rviz COPY --from=builder ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/exploration_planner/tare_planner/rviz ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/exploration_planner/tare_planner/rviz -COPY --from=builder ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/arise_slam_mid360/config ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/arise_slam_mid360/config +# Copy SLAM config files based on SLAM_TYPE COPY --from=builder ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/utilities/livox_ros_driver2/config ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/utilities/livox_ros_driver2/config +# Copy config files for both SLAM systems +RUN --mount=from=builder,source=${WORKSPACE}/src/ros-navigation-autonomy-stack/src,target=/tmp/src \ + echo "Copying arise_slam configs" && \ + mkdir -p ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/arise_slam_mid360 && \ + cp -r /tmp/src/slam/arise_slam_mid360/config ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/arise_slam_mid360/ 2>/dev/null || true && \ + echo "Copying FASTLIO2 configs" && \ + mkdir -p ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/FASTLIO2_ROS2 && \ + for pkg in fastlio2 localizer pgo hba; do \ + if [ -d "/tmp/src/slam/FASTLIO2_ROS2/$pkg/config" ]; then \ + mkdir -p ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/FASTLIO2_ROS2/$pkg && \ + cp -r /tmp/src/slam/FASTLIO2_ROS2/$pkg/config ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/FASTLIO2_ROS2/$pkg/; \ + fi; \ + if [ -d "/tmp/src/slam/FASTLIO2_ROS2/$pkg/rviz" ]; then \ + cp -r /tmp/src/slam/FASTLIO2_ROS2/$pkg/rviz ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/FASTLIO2_ROS2/$pkg/; \ + fi; \ + done + # Copy simulation shell scripts (real robot mode uses volume mount) COPY --from=builder ${WORKSPACE}/src/ros-navigation-autonomy-stack/system_simulation*.sh ${WORKSPACE}/src/ros-navigation-autonomy-stack/ @@ -235,6 +360,15 @@ RUN python3 -m venv /opt/dimos-venv && \ /opt/dimos-venv/bin/pip install --no-cache-dir \ pyyaml +# On arm64, install open3d wheel built from source in the builder stage +COPY --from=builder /opt/open3d-wheel /opt/open3d-wheel +ARG TARGETARCH +RUN if [ "${TARGETARCH}" = "arm64" ] && ls /opt/open3d-wheel/open3d*.whl 1>/dev/null 2>&1; then \ + echo "Installing open3d from pre-built arm64 wheel..." && \ + /opt/dimos-venv/bin/pip install --no-cache-dir /opt/open3d-wheel/open3d*.whl && \ + rm -rf /opt/open3d-wheel; \ + fi + # Copy dimos source and install as editable package # The volume mount at runtime will overlay /workspace/dimos, but the editable # install creates a link that will use the volume-mounted files diff --git a/docker/navigation/README.md b/docker/navigation/README.md index 3b0bfe1eec..32483b6512 100644 --- a/docker/navigation/README.md +++ b/docker/navigation/README.md @@ -16,17 +16,17 @@ This is an optimistic overview. Use the commands below for an in depth version. ```bash cd docker/navigation -./build.sh --humble # Build with ROS 2 Humble (default) -# or -./build.sh --jazzy # Build with ROS 2 Jazzy +./build.sh --humble # Build for ROS 2 Humble +./build.sh --jazzy # Build for ROS 2 Jazzy ``` This will: -- Clone the ros-navigation-autonomy-stack repository (matching branch: humble or jazzy) -- Build a Docker image with both ROS and DimOS dependencies -- Set up the environment for both systems +- Clone the ros-navigation-autonomy-stack repository +- Build a Docker image with both arise_slam and FASTLIO2 +- Set up the environment for both ROS and DimOS -The resulting image will be named `dimos_autonomy_stack:humble` or `dimos_autonomy_stack:jazzy` depending on the option used. +The resulting image will be named `dimos_autonomy_stack:{distro}` (e.g., `humble`, `jazzy`). +Select SLAM method at runtime via `--localization arise_slam` or `--localization fastlio`. Note that the build will take a while and produce an image of approximately 24 GB. @@ -35,9 +35,9 @@ Note that the build will take a while and produce an image of approximately 24 G Use the same ROS distribution flag as your build: ```bash -./start.sh --simulation --humble # If built with --humble +./start.sh --simulation --image humble # If built with --humble # or -./start.sh --simulation --jazzy # If built with --jazzy +./start.sh --simulation --image jazzy # If built with --jazzy ```
@@ -114,12 +114,20 @@ ROBOT_IP=192.168.12.1 # For WebRTC local AP mode (optional, need additional wif #### Start with Route Planner automatically -Use --humble or --jazzy matching your build: - ```bash -./start.sh --hardware --humble --route-planner # Run route planner automatically -./start.sh --hardware --humble --route-planner --rviz # Route planner + RViz2 visualization -./start.sh --hardware --humble --dev # Development mode (mount src for config editing) +# arise_slam (default) +./start.sh --hardware --route-planner +./start.sh --hardware --route-planner --rviz + +# FASTLIO2 +./start.sh --hardware --localization fastlio --route-planner +./start.sh --hardware --localization fastlio --route-planner --rviz + +# Jazzy image +./start.sh --hardware --image jazzy --route-planner + +# Development mode (mount src for config editing) +./start.sh --hardware --dev ``` [Foxglove Studio](https://foxglove.dev/download) is the default visualization tool. It's ideal for remote operation - SSH with port forwarding to the robot's mini PC and run commands there: @@ -130,7 +138,7 @@ ssh -L 8765:localhost:8765 user@robot-ip Then on your local machine: 1. Open Foxglove and connect to `ws://localhost:8765` -2. Load the layout from `docker/navigation/Overwatch.json` (Layout menu → Import) +2. Load the layout from `dimos/assets/foxglove_dashboards/Overwatch.json` (Layout menu → Import) 3. Click in the 3D panel to drop a target pose (similar to RViz). The "Autonomy ON" indicator should be green, and "Goal Reached" will show when the robot arrives.
@@ -139,9 +147,9 @@ Then on your local machine: Start the container and leave it open. Use the same ROS distribution flag as your build: ```bash -./start.sh --hardware --humble # If built with --humble +./start.sh --hardware --image humble # If built with --humble # or -./start.sh --hardware --jazzy # If built with --jazzy +./start.sh --hardware --image jazzy # If built with --jazzy ``` It doesn't do anything by default. You have to run commands on it by `exec`-ing: diff --git a/docker/navigation/build.sh b/docker/navigation/build.sh index ea1729fb63..371db08b49 100755 --- a/docker/navigation/build.sh +++ b/docker/navigation/build.sh @@ -29,10 +29,12 @@ while [[ $# -gt 0 ]]; do echo " --jazzy Build with ROS 2 Jazzy" echo " --help, -h Show this help message" echo "" + echo "The image includes both arise_slam and FASTLIO2." + echo "Select SLAM method at runtime via LOCALIZATION_METHOD env var." + echo "" echo "Examples:" - echo " $0 # Build with ROS Humble (default)" + echo " $0 # Build with ROS Humble" echo " $0 --jazzy # Build with ROS Jazzy" - echo " $0 --humble # Build with ROS Humble" exit 0 ;; *) @@ -44,37 +46,54 @@ while [[ $# -gt 0 ]]; do done export ROS_DISTRO +export IMAGE_TAG="${ROS_DISTRO}" echo -e "${GREEN}================================================${NC}" echo -e "${GREEN}Building DimOS + ROS Autonomy Stack Docker Image${NC}" echo -e "${GREEN}ROS Distribution: ${ROS_DISTRO}${NC}" +echo -e "${GREEN}Image Tag: ${IMAGE_TAG}${NC}" +echo -e "${GREEN}SLAM: arise_slam + FASTLIO2 (both included)${NC}" echo -e "${GREEN}================================================${NC}" echo "" SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" cd "$SCRIPT_DIR" -# Clone or checkout ros-navigation-autonomy-stack with dev branch +# Use fastlio2 branch which has both arise_slam and FASTLIO2 +TARGET_BRANCH="fastlio2" +TARGET_REMOTE="origin" +CLONE_URL="https://github.com/dimensionalOS/ros-navigation-autonomy-stack.git" + +# Clone or checkout ros-navigation-autonomy-stack if [ ! -d "ros-navigation-autonomy-stack" ]; then - echo -e "${YELLOW}Cloning ros-navigation-autonomy-stack repository (dev branch)...${NC}" - git clone -b dev git@github.com:dimensionalOS/ros-navigation-autonomy-stack.git + echo -e "${YELLOW}Cloning ros-navigation-autonomy-stack repository (${TARGET_BRANCH} branch)...${NC}" + git clone -b ${TARGET_BRANCH} ${CLONE_URL} ros-navigation-autonomy-stack echo -e "${GREEN}Repository cloned successfully!${NC}" else - # Directory exists, ensure we're on the dev branch + # Directory exists, ensure we're on the correct branch cd ros-navigation-autonomy-stack + CURRENT_BRANCH=$(git branch --show-current) - if [ "$CURRENT_BRANCH" != "dev" ]; then - echo -e "${YELLOW}Switching from ${CURRENT_BRANCH} to dev branch...${NC}" + if [ "$CURRENT_BRANCH" != "${TARGET_BRANCH}" ]; then + echo -e "${YELLOW}Switching from ${CURRENT_BRANCH} to ${TARGET_BRANCH} branch...${NC}" # Stash any local changes (e.g., auto-generated config files) if git stash --quiet 2>/dev/null; then echo -e "${YELLOW}Stashed local changes${NC}" fi - git fetch origin dev - git checkout dev - git pull origin dev - echo -e "${GREEN}Switched to dev branch${NC}" + git fetch ${TARGET_REMOTE} ${TARGET_BRANCH} + git checkout -B ${TARGET_BRANCH} ${TARGET_REMOTE}/${TARGET_BRANCH} + echo -e "${GREEN}Switched to ${TARGET_BRANCH} branch${NC}" else - echo -e "${GREEN}Already on dev branch${NC}" + echo -e "${GREEN}Already on ${TARGET_BRANCH} branch${NC}" + # Check for local changes before pulling latest + if ! git diff --quiet || ! git diff --cached --quiet; then + echo -e "${RED}Local changes detected in ros-navigation-autonomy-stack.${NC}" + echo -e "${RED}Please commit or discard them before building.${NC}" + git status --short + exit 1 + fi + git fetch ${TARGET_REMOTE} ${TARGET_BRANCH} + git reset --hard ${TARGET_REMOTE}/${TARGET_BRANCH} fi cd .. fi @@ -90,7 +109,7 @@ echo -e "${YELLOW}Building Docker image with docker compose...${NC}" echo "This will take a while as it needs to:" echo " - Download base ROS ${ROS_DISTRO^} image" echo " - Install ROS packages and dependencies" -echo " - Build the autonomy stack" +echo " - Build the autonomy stack (arise_slam + FASTLIO2)" echo " - Build Livox-SDK2 for Mid-360 lidar" echo " - Build SLAM dependencies (Sophus, Ceres, GTSAM)" echo " - Install Python dependencies for DimOS" @@ -103,11 +122,12 @@ docker compose -f docker/navigation/docker-compose.yml build echo "" echo -e "${GREEN}============================================${NC}" echo -e "${GREEN}Docker image built successfully!${NC}" -echo -e "${GREEN}Image: dimos_autonomy_stack:${ROS_DISTRO}${NC}" +echo -e "${GREEN}Image: dimos_autonomy_stack:${IMAGE_TAG}${NC}" +echo -e "${GREEN}SLAM: arise_slam + FASTLIO2 (both included)${NC}" echo -e "${GREEN}============================================${NC}" echo "" echo "To run in SIMULATION mode:" -echo -e "${YELLOW} ./start.sh --${ROS_DISTRO}${NC}" +echo -e "${YELLOW} ./start.sh --simulation --${ROS_DISTRO}${NC}" echo "" echo "To run in HARDWARE mode:" echo " 1. Configure your hardware settings in .env file" @@ -115,5 +135,6 @@ echo " (copy from .env.hardware if needed)" echo " 2. Run the hardware container:" echo -e "${YELLOW} ./start.sh --hardware --${ROS_DISTRO}${NC}" echo "" -echo "The script runs in foreground. Press Ctrl+C to stop." +echo "To use FASTLIO2 instead of arise_slam, set LOCALIZATION_METHOD:" +echo -e "${YELLOW} LOCALIZATION_METHOD=fastlio ./start.sh --hardware --${ROS_DISTRO}${NC}" echo "" diff --git a/docker/navigation/docker-compose.yml b/docker/navigation/docker-compose.yml index d2abf73296..6546968757 100644 --- a/docker/navigation/docker-compose.yml +++ b/docker/navigation/docker-compose.yml @@ -7,7 +7,7 @@ services: network: host args: ROS_DISTRO: ${ROS_DISTRO:-humble} - image: dimos_autonomy_stack:${ROS_DISTRO:-humble} + image: dimos_autonomy_stack:${IMAGE_TAG:-humble} container_name: dimos_simulation_container profiles: ["", "simulation"] # Active by default (empty profile) AND with --profile simulation @@ -41,6 +41,8 @@ services: # DDS Configuration (FastDDS) - RMW_IMPLEMENTATION=rmw_fastrtps_cpp - FASTRTPS_DEFAULT_PROFILES_FILE=/ros2_ws/config/fastdds.xml + # Localization method: arise_slam (default) or fastlio + - LOCALIZATION_METHOD=${LOCALIZATION_METHOD:-arise_slam} # Volume mounts volumes: @@ -79,7 +81,7 @@ services: network: host args: ROS_DISTRO: ${ROS_DISTRO:-humble} - image: dimos_autonomy_stack:${ROS_DISTRO:-humble} + image: dimos_autonomy_stack:${IMAGE_TAG:-humble} container_name: dimos_hardware_container profiles: ["hardware"] @@ -121,6 +123,8 @@ services: # DDS Configuration (FastDDS) - RMW_IMPLEMENTATION=rmw_fastrtps_cpp - FASTRTPS_DEFAULT_PROFILES_FILE=/ros2_ws/config/fastdds.xml + # Localization method: arise_slam (default) or fastlio + - LOCALIZATION_METHOD=${LOCALIZATION_METHOD:-arise_slam} # Mid-360 Lidar configuration - LIDAR_INTERFACE=${LIDAR_INTERFACE:-} - LIDAR_COMPUTER_IP=${LIDAR_COMPUTER_IP:-192.168.1.5} @@ -137,6 +141,8 @@ services: # Unitree robot configuration - UNITREE_IP=${UNITREE_IP:-192.168.12.1} - UNITREE_CONN=${UNITREE_CONN:-LocalAP} + # Map path for localization mode (e.g., /ros2_ws/maps/warehouse) + - MAP_PATH=${MAP_PATH:-} # Volume mounts volumes: @@ -151,6 +157,8 @@ services: - ./bagfiles:/ros2_ws/bagfiles:rw # Mount config files for easy editing - ./config:/ros2_ws/config:rw + # Mount maps directory for localization + - ./maps:/ros2_ws/maps:rw # Hardware-specific volumes - ./logs:/ros2_ws/logs:rw - /etc/localtime:/etc/localtime:ro @@ -185,12 +193,25 @@ services: cd /ros2_ws source install/setup.bash source /opt/dimos-venv/bin/activate - if [ "$USE_ROUTE_PLANNER" = "true" ]; then - echo "Starting real robot system WITH route planner..." - ros2 launch vehicle_simulator system_real_robot_with_route_planner.launch.py & + # Launch with SLAM method based on LOCALIZATION_METHOD + if [ "$LOCALIZATION_METHOD" = "fastlio" ]; then + echo "Using FASTLIO2 localization" + if [ "$USE_ROUTE_PLANNER" = "true" ]; then + echo "Starting real robot system WITH route planner..." + ros2 launch vehicle_simulator system_real_robot_with_route_planner.launch.py use_fastlio2:=true & + else + echo "Starting real robot system (base autonomy)..." + ros2 launch vehicle_simulator system_real_robot.launch.py use_fastlio2:=true & + fi else - echo "Starting real robot system (base autonomy)..." - ros2 launch vehicle_simulator system_real_robot.launch.py & + echo "Using arise_slam localization" + if [ "$USE_ROUTE_PLANNER" = "true" ]; then + echo "Starting real robot system WITH route planner..." + ros2 launch vehicle_simulator system_real_robot_with_route_planner.launch.py & + else + echo "Starting real robot system (base autonomy)..." + ros2 launch vehicle_simulator system_real_robot.launch.py & + fi fi sleep 2 if [ "$USE_RVIZ" = "true" ]; then @@ -221,3 +242,112 @@ services: - NET_ADMIN # Network interface configuration - SYS_ADMIN # System operations - SYS_TIME # Time synchronization + + # Bagfile profile - for bagfile playback with use_sim_time=true + dimos_bagfile: + build: + context: ../.. + dockerfile: docker/navigation/Dockerfile + network: host + args: + ROS_DISTRO: ${ROS_DISTRO:-humble} + image: dimos_autonomy_stack:${IMAGE_TAG:-humble} + container_name: dimos_bagfile_container + profiles: ["bagfile"] + + # Shared memory size for ROS 2 FastDDS + shm_size: '8gb' + + # Enable interactive terminal + stdin_open: true + tty: true + + # Network configuration + network_mode: host + + # Use nvidia runtime for GPU acceleration (falls back to runc if not available) + runtime: ${DOCKER_RUNTIME:-nvidia} + + # Environment variables + environment: + - DISPLAY=${DISPLAY} + - QT_X11_NO_MITSHM=1 + - NVIDIA_VISIBLE_DEVICES=${NVIDIA_VISIBLE_DEVICES:-all} + - NVIDIA_DRIVER_CAPABILITIES=${NVIDIA_DRIVER_CAPABILITIES:-all} + - ROS_DOMAIN_ID=${ROS_DOMAIN_ID:-42} + # DDS Configuration (FastDDS) + - RMW_IMPLEMENTATION=rmw_fastrtps_cpp + - FASTRTPS_DEFAULT_PROFILES_FILE=/ros2_ws/config/fastdds.xml + # Localization method: arise_slam (default) or fastlio + - LOCALIZATION_METHOD=${LOCALIZATION_METHOD:-arise_slam} + # Route planner option + - USE_ROUTE_PLANNER=${USE_ROUTE_PLANNER:-false} + # RViz option + - USE_RVIZ=${USE_RVIZ:-false} + # Map path for localization mode (e.g., /ros2_ws/maps/warehouse) + - MAP_PATH=${MAP_PATH:-} + + # Volume mounts + volumes: + # X11 socket for GUI + - /tmp/.X11-unix:/tmp/.X11-unix:rw + - ${HOME}/.Xauthority:/root/.Xauthority:rw + # Mount bagfiles directory + - ./bagfiles:/ros2_ws/bagfiles:rw + # Mount config files for easy editing + - ./config:/ros2_ws/config:rw + # Mount maps directory for localization + - ./maps:/ros2_ws/maps:rw + + # Device access (for joystick controllers) + devices: + - /dev/input:/dev/input + - /dev/dri:/dev/dri + + # Working directory + working_dir: /ros2_ws + + # Command - launch bagfile system (use_sim_time=true by default in launch files) + command: + - bash + - -c + - | + source install/setup.bash + echo "Bagfile playback mode (use_sim_time=true)" + echo "" + echo "Launch files ready. Play your bagfile with:" + echo " ros2 bag play --clock /ros2_ws/bagfiles/" + echo "" + # Launch with SLAM method based on LOCALIZATION_METHOD + if [ "$LOCALIZATION_METHOD" = "fastlio" ]; then + echo "Using FASTLIO2 localization" + if [ "$USE_ROUTE_PLANNER" = "true" ]; then + echo "Starting bagfile system WITH route planner..." + ros2 launch vehicle_simulator system_bagfile_with_route_planner.launch.py use_fastlio2:=true & + else + echo "Starting bagfile system (base autonomy)..." + ros2 launch vehicle_simulator system_bagfile.launch.py use_fastlio2:=true & + fi + else + echo "Using arise_slam localization" + if [ "$USE_ROUTE_PLANNER" = "true" ]; then + echo "Starting bagfile system WITH route planner..." + ros2 launch vehicle_simulator system_bagfile_with_route_planner.launch.py & + else + echo "Starting bagfile system (base autonomy)..." + ros2 launch vehicle_simulator system_bagfile.launch.py & + fi + fi + sleep 2 + if [ "$USE_RVIZ" = "true" ]; then + echo "Starting RViz2..." + if [ "$USE_ROUTE_PLANNER" = "true" ]; then + ros2 run rviz2 rviz2 -d /ros2_ws/src/ros-navigation-autonomy-stack/src/route_planner/far_planner/rviz/default.rviz & + else + ros2 run rviz2 rviz2 -d /ros2_ws/src/ros-navigation-autonomy-stack/src/base_autonomy/vehicle_simulator/rviz/vehicle_simulator.rviz & + fi + fi + # Keep container running + echo "" + echo "Container ready. Waiting for bagfile playback..." + wait diff --git a/docker/navigation/start.sh b/docker/navigation/start.sh index 9b27a1a3ce..be45908a33 100755 --- a/docker/navigation/start.sh +++ b/docker/navigation/start.sh @@ -13,6 +13,7 @@ USE_ROUTE_PLANNER="false" USE_RVIZ="false" DEV_MODE="false" ROS_DISTRO="humble" +LOCALIZATION_METHOD="${LOCALIZATION_METHOD:-arise_slam}" while [[ $# -gt 0 ]]; do case $1 in --hardware) @@ -23,6 +24,10 @@ while [[ $# -gt 0 ]]; do MODE="simulation" shift ;; + --bagfile) + MODE="bagfile" + shift + ;; --route-planner) USE_ROUTE_PLANNER="true" shift @@ -35,35 +40,48 @@ while [[ $# -gt 0 ]]; do DEV_MODE="true" shift ;; - --humble) - ROS_DISTRO="humble" - shift + --image) + if [ -z "$2" ] || [[ "$2" == --* ]]; then + echo -e "${RED}--image requires a value (humble or jazzy)${NC}" + exit 1 + fi + ROS_DISTRO="$2" + shift 2 ;; - --jazzy) - ROS_DISTRO="jazzy" - shift + --localization) + if [ -z "$2" ] || [[ "$2" == --* ]]; then + echo -e "${RED}--localization requires a value (arise_slam or fastlio)${NC}" + exit 1 + fi + LOCALIZATION_METHOD="$2" + shift 2 ;; --help|-h) echo "Usage: $0 [OPTIONS]" echo "" - echo "Options:" - echo " --simulation Start simulation container (default)" - echo " --hardware Start hardware container for real robot" - echo " --route-planner Enable FAR route planner (for hardware mode)" - echo " --rviz Launch RViz2 visualization" - echo " --dev Development mode (mount src for config editing)" - echo " --humble Use ROS 2 Humble image (default)" - echo " --jazzy Use ROS 2 Jazzy image" - echo " --help, -h Show this help message" + echo "Mode (mutually exclusive):" + echo " --simulation Start simulation container (default)" + echo " --hardware Start hardware container" + echo " --bagfile Start bagfile playback container (use_sim_time=true)" + echo "" + echo "Image and localization:" + echo " --image ROS 2 distribution: humble (default), jazzy" + echo " --localization SLAM method: arise_slam (default), fastlio" + echo "" + echo "Additional options:" + echo " --route-planner Enable FAR route planner (for hardware mode)" + echo " --rviz Launch RViz2 visualization" + echo " --dev Development mode (mount src for config editing)" + echo " --help, -h Show this help message" echo "" echo "Examples:" - echo " $0 # Start simulation (Humble)" - echo " $0 --jazzy # Start simulation (Jazzy)" - echo " $0 --hardware # Start hardware (base autonomy, Humble)" - echo " $0 --hardware --jazzy # Start hardware (Jazzy)" - echo " $0 --hardware --route-planner # Hardware with route planner" - echo " $0 --hardware --route-planner --rviz # Hardware with route planner + RViz" - echo " $0 --hardware --dev # Hardware with src mounted for development" + echo " $0 --simulation # Start simulation" + echo " $0 --hardware --image jazzy # Hardware with Jazzy" + echo " $0 --hardware --localization fastlio # Hardware with FASTLIO2" + echo " $0 --hardware --route-planner --rviz # Hardware with route planner + RViz" + echo " $0 --hardware --dev # Hardware with src mounted" + echo " $0 --bagfile # Bagfile playback" + echo " $0 --bagfile --localization fastlio --route-planner # Bagfile with FASTLIO2 + route planner" echo "" echo "Press Ctrl+C to stop the container" exit 0 @@ -77,6 +95,8 @@ while [[ $# -gt 0 ]]; do done export ROS_DISTRO +export LOCALIZATION_METHOD +export IMAGE_TAG="${ROS_DISTRO}" SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" cd "$SCRIPT_DIR" @@ -85,9 +105,14 @@ echo -e "${GREEN}================================================${NC}" echo -e "${GREEN}Starting DimOS Docker Container${NC}" echo -e "${GREEN}Mode: ${MODE}${NC}" echo -e "${GREEN}ROS Distribution: ${ROS_DISTRO}${NC}" +echo -e "${GREEN}ROS Domain ID: ${ROS_DOMAIN_ID:-42}${NC}" +echo -e "${GREEN}Localization: ${LOCALIZATION_METHOD}${NC}" +echo -e "${GREEN}Image Tag: ${IMAGE_TAG}${NC}" echo -e "${GREEN}================================================${NC}" echo "" +# Pull image option removed - use build.sh to build locally + # Hardware-specific checks if [ "$MODE" = "hardware" ]; then # Check if .env file exists @@ -218,10 +243,12 @@ if [ "$MODE" = "hardware" ]; then fi -# Check if the correct ROS distro image exists -if ! docker images | grep -q "dimos_autonomy_stack.*${ROS_DISTRO}"; then - echo -e "${YELLOW}Docker image for ROS ${ROS_DISTRO} not found. Building...${NC}" - ./build.sh --${ROS_DISTRO} +# Check if the image exists +if ! docker images --format '{{.Repository}}:{{.Tag}}' | grep -q "^dimos_autonomy_stack:${IMAGE_TAG}$"; then + echo -e "${RED}Docker image dimos_autonomy_stack:${IMAGE_TAG} not found.${NC}" + echo -e "${YELLOW}Please build it first with:${NC}" + echo -e " ./build.sh --${ROS_DISTRO}" + exit 1 fi # Check for X11 display @@ -268,6 +295,8 @@ fi # Set container name for reference if [ "$MODE" = "hardware" ]; then CONTAINER_NAME="dimos_hardware_container" +elif [ "$MODE" = "bagfile" ]; then + CONTAINER_NAME="dimos_bagfile_container" else CONTAINER_NAME="dimos_simulation_container" fi @@ -303,6 +332,27 @@ if [ "$MODE" = "hardware" ]; then echo "" echo "To enter the container from another terminal:" echo -e " ${YELLOW}docker exec -it ${CONTAINER_NAME} bash${NC}" +elif [ "$MODE" = "bagfile" ]; then + if [ "$USE_ROUTE_PLANNER" = "true" ]; then + echo "Bagfile mode - Starting bagfile playback system WITH route planner" + echo "" + echo "The container will run (use_sim_time=true):" + echo " - ROS navigation stack (system_bagfile_with_route_planner.launch)" + echo " - FAR Planner for goal-based navigation" + else + echo "Bagfile mode - Starting bagfile playback system (base autonomy)" + echo "" + echo "The container will run (use_sim_time=true):" + echo " - ROS navigation stack (system_bagfile.launch)" + fi + if [ "$USE_RVIZ" = "true" ]; then + echo " - RViz2 visualization" + fi + echo "" + echo -e "${YELLOW}Remember to play bagfile with: ros2 bag play --clock ${NC}" + echo "" + echo "To enter the container from another terminal:" + echo -e " ${YELLOW}docker exec -it ${CONTAINER_NAME} bash${NC}" else echo "Simulation mode - Auto-starting ROS simulation and DimOS" echo "" @@ -317,7 +367,14 @@ fi # Note: DISPLAY is now passed directly via environment variable # No need to write RUNTIME_DISPLAY to .env for local host running -# Build compose command with optional dev mode +# Create required directories +if [ "$MODE" = "hardware" ]; then + mkdir -p bagfiles config logs maps +elif [ "$MODE" = "bagfile" ]; then + mkdir -p bagfiles config maps +fi + +# Build compose command COMPOSE_CMD="docker compose -f docker-compose.yml" if [ "$DEV_MODE" = "true" ]; then COMPOSE_CMD="$COMPOSE_CMD -f docker-compose.dev.yml" @@ -325,6 +382,8 @@ fi if [ "$MODE" = "hardware" ]; then $COMPOSE_CMD --profile hardware up +elif [ "$MODE" = "bagfile" ]; then + $COMPOSE_CMD --profile bagfile up else $COMPOSE_CMD up fi diff --git a/docker/python/Dockerfile b/docker/python/Dockerfile index b85404f51a..30c9fda8eb 100644 --- a/docker/python/Dockerfile +++ b/docker/python/Dockerfile @@ -2,8 +2,7 @@ ARG FROM_IMAGE=ghcr.io/dimensionalos/ros:dev FROM ${FROM_IMAGE} # Install basic requirements -RUN apt-get update -RUN apt-get install -y \ +RUN apt-get update && apt-get install -y \ python-is-python3 \ curl \ gnupg2 \ @@ -13,7 +12,7 @@ RUN apt-get install -y \ portaudio19-dev \ git \ mesa-utils \ - libgl1-mesa-glx \ + libgl1 \ libgl1-mesa-dri \ software-properties-common \ libxcb1-dev \ @@ -49,4 +48,7 @@ COPY . /app/ # Install dependencies with UV (10-100x faster than pip) RUN uv pip install --upgrade 'pip>=24' 'setuptools>=70' 'wheel' 'packaging>=24' && \ - uv pip install '.[misc,cpu,sim,drone,unitree,web,perception,visualization]' + uv pip install '.[misc,cpu,sim,drone,unitree,web,perception,visualization,manipulation]' + +# Remove pydrake .pyi stubs that use Python 3.12 syntax (breaks mypy on 3.10) +RUN find /usr/local/lib/python3.10/dist-packages/pydrake -name '*.pyi' -delete diff --git a/docker/python/module-install.sh b/docker/python/module-install.sh new file mode 100644 index 0000000000..ab0aea1032 --- /dev/null +++ b/docker/python/module-install.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash +# DimOS Module Install (generic) +# Converts any Dockerfile into a DimOS module container +# +# Usage in Dockerfile: +# RUN --mount=from=ghcr.io/dimensionalos/ros-python:dev,source=/app,target=/tmp/d \ +# bash /tmp/d/docker/python/module-install.sh /tmp/d +# ENTRYPOINT ["/dimos/entrypoint.sh"] + +set -euo pipefail + +SRC="${1:-/tmp/d}" + +# ---- Copy source into image (skip if already at /dimos/source) ---- +if [ "${SRC}" != "/dimos/source" ]; then + mkdir -p /dimos/source + cp -r "${SRC}/dimos" "${SRC}/pyproject.toml" /dimos/source/ + [ -f "${SRC}/README.md" ] && cp "${SRC}/README.md" /dimos/source/ || true +fi + +# ---- Find Python + Pip (conda env > venv > uv > system) ---- +PYTHON="" +PIP="" + +# 1. Check for Conda environment +if [ -z "$PYTHON" ] && command -v conda >/dev/null 2>&1; then + DIMOS_CONDA_ENV="${DIMOS_CONDA_ENV:-app}" + if conda env list 2>/dev/null | awk '{print $1}' | grep -qx "${DIMOS_CONDA_ENV}"; then + PYTHON="conda run --no-capture-output -n ${DIMOS_CONDA_ENV} python" + PIP="conda run -n ${DIMOS_CONDA_ENV} pip" + echo "Using Conda env: ${DIMOS_CONDA_ENV}" + fi +fi + +# 2. Check for venv (including uv's .venv) +if [ -z "$PYTHON" ]; then + for v in /opt/venv /app/venv /venv /app/.venv /.venv; do + if [ -x "${v}/bin/python" ] && [ -x "${v}/bin/pip" ]; then + PYTHON="${v}/bin/python" + PIP="${v}/bin/pip" + echo "Using venv: ${v}" + break + fi + done +fi + +# 3. Check for uv (uses system python but manages deps) +if [ -z "$PYTHON" ] && command -v uv >/dev/null 2>&1; then + PYTHON="python" + PIP="uv pip" + echo "Using uv" +fi + +# 4. Fallback to system Python +if [ -z "$PYTHON" ]; then + PYTHON="python" + PIP="pip" + echo "Using system Python" +fi + +# ---- Install DimOS (deps from pyproject.toml[docker]) ---- +${PIP} install --no-cache-dir -e "/dimos/source[docker]" + +# ---- Create entrypoint ---- +cat > /dimos/entrypoint.sh < /dev/null @@ -87,5 +83,3 @@ RUN rosdep update # Source ROS2 and workspace in bashrc RUN echo "source /opt/ros/${ROS_DISTRO}/setup.bash" >> /root/.bashrc - -# Trigger docker workflow rerun 1 diff --git a/docs/VIEWER_BACKENDS.md b/docs/VIEWER_BACKENDS.md deleted file mode 100644 index 51fa20d655..0000000000 --- a/docs/VIEWER_BACKENDS.md +++ /dev/null @@ -1,321 +0,0 @@ -# Viewer Backends - -Dimos supports three visualization backends: Rerun (web or native) and Foxglove. - -## Quick Start - -Choose your viewer backend via the CLI (preferred): - -```bash -# Rerun native viewer (default) - native Rerun window + teleop panel at http://localhost:7779 -dimos run unitree-go2 - -# Explicitly select the viewer backend: -dimos --viewer-backend rerun-native run unitree-go2 -dimos --viewer-backend rerun-web run unitree-go2 -dimos --viewer-backend foxglove run unitree-go2 -``` - -Alternative (environment variable): - -```bash -VIEWER_BACKEND=rerun-native dimos run unitree-go2 - -# Rerun web viewer - Full dashboard in browser -VIEWER_BACKEND=rerun-web dimos run unitree-go2 - -# Foxglove - Use Foxglove Studio instead of Rerun -VIEWER_BACKEND=foxglove dimos run unitree-go2 -``` - -## Viewer Modes Explained - -### Rerun Web (`rerun-web`) - -**What you get:** -- Full dashboard at http://localhost:7779 -- Rerun 3D viewer + command center sidebar in one page -- Works in browser, no display required (headless-friendly) - ---- - -### Rerun Native (`rerun-native`) - -**What you get:** -- Native Rerun application (separate window opens automatically) -- Command center at http://localhost:7779 -- Better performance with larger maps/higher resolution - ---- - -### Foxglove (`foxglove`) - -**What you get:** -- Foxglove bridge on ws://localhost:8765 -- No Rerun (saves resources) -- Better performance with larger maps/higher resolution -- Open layout: `dimos/assets/foxglove_dashboards/go2.json` - ---- - -## Performance Tuning - -### Symptom: Slow Map Updates - -If you notice: -- Robot appears to "walk across empty space" -- Costmap updates lag behind the robot -- Visualization stutters or freezes - -This happens on lower-end hardware (NUC, older laptops) with large maps. - -### Increase Voxel Size - -Edit [`dimos/robot/unitree_webrtc/unitree_go2_blueprints.py`](/dimos/robot/unitree_webrtc/unitree_go2_blueprints.py) line 82: - -```python -# Before (high detail, slower on large maps) -voxel_mapper(voxel_size=0.05), # 5cm voxels - -# After (lower detail, 8x faster) -voxel_mapper(voxel_size=0.1), # 10cm voxels -``` - -**Trade-off:** -- Larger voxels = fewer voxels = faster updates -- But slightly less detail in the map - ---- - -## How to use Rerun on `dev` (and the TF/entity nuances) - -Rerun on `dev` is **module-driven**: modules decide what to log, and `ModuleBlueprintSet.build()` sets up the shared viewer + default layout. - -### Rerun lifecycle (what happens automatically vs what modules must do) - -- **Server/viewer startup happens in the build step** - - [`dimos/core/blueprints.py`](/dimos/core/blueprints.py#L26) calls `init_rerun_server()` (from [`dimos/dashboard/rerun_init.py`](/dimos/dashboard/rerun_init.py#L18)) when `GlobalConfig.viewer_backend` starts with `rerun` and `rerun_enabled=True`. - -- **Worker processes must connect before logging** - - If a module is going to call `rr.log(...)`, it should call `connect_rerun(global_config=...)` first (see examples in: - - [`dimos/robot/unitree/connection/go2.py`](/dimos/robot/unitree/connection/go2.py) - - [`dimos/mapping/costmapper.py`](/dimos/mapping/costmapper.py) - - [`dimos/mapping/voxels.py`](/dimos/mapping/voxels.py) - - [`dimos/navigation/replanning_a_star/module.py`](/dimos/navigation/replanning_a_star/module.py) - ). - -### Panels and how to use `rr.log` in DimOS - -On `dev`, the default layout is composed from modules’ `rerun_views()` contributions: - -- Implement `@classmethod rerun_views()` on your module to add panels (2D/3D/time series). -- The build step aggregates them and sends a composed blueprint (see [`dimos/core/blueprints.py`](/dimos/core/blueprints.py)). - -If you `rr.log("some/new/entity", ...)` but don’t see it where you expect: -- it may not be included by any existing view panel, so add a `rerun_views()` panel that points at your entity path. - -### TF visualization + scene wiring: the `tf_rerun()` helper - -The intended pattern for TF visualization on `dev` is: - -- **Publish transforms normally** via `self.tf.publish(...)` (TF is available on every module). -- In blueprints, add `tf_rerun(...)` which composes two modules: - 1. **`TFRerunModule`**: Polls `self.tf.buffers` at a configurable rate and logs the latest transform per edge under `world/tf/{child}` using `Transform.to_rerun()`. - 2. **`RerunSceneWiringModule`**: Logs static scene setup (view coordinates, entity-path attachments, optional URDF, axes, camera pinholes). - -The `tf_rerun()` helper accepts scene configuration: - -```python -from dimos.dashboard.tf_rerun_module import tf_rerun - -# Default: TF polling + minimal scene wiring (world + world/robot attachments) -tf_rerun() - -# With URDF and camera: -tf_rerun( - urdf_path=Path("path/to/robot.urdf"), - cameras=[ - ("world/robot/camera", "camera_optical", GO2Connection.camera_info_static), - ], -) - -# Multi-camera robot: -tf_rerun( - cameras=[ - ("world/robot/cameras/front", "cam_front_optical", front_cam_info), - ("world/robot/cameras/left", "cam_left_optical", left_cam_info), - ("world/robot/cameras/right", "cam_right_optical", right_cam_info), - ("world/robot/cameras/rear", "cam_rear_optical", rear_cam_info), - ], -) - -# Disable scene wiring entirely (TF polling only): -tf_rerun(scene=False) -``` - -### Entity paths vs TF frames - -Rerun has two “spaces” you’re always juggling: - -- **Entity paths**: strings like `world/robot/camera/rgb` (how data is organized/browsed). -- **Transform frames**: names like `base_link`, `camera_optical` (how motion is defined). - -In DimOS on `dev`: -- TF frames come from the TF system (`Transform.frame_id` / `Transform.child_frame_id`) and are logged via `Transform.to_rerun()` (see [`dimos/msgs/geometry_msgs/Transform.py`](/dimos/msgs/geometry_msgs/Transform.py)). -- Visualization entity paths should be treated as **semantic organization** (`world/**`, `metrics/**`, etc). - -**Rule of thumb**: -- Put geometry and sensor data under semantic paths (e.g. `world/robot/**`, `world/nav/**`). -- Drive motion through TF by emitting transforms via `self.tf.publish(...)`. -- If you need a semantic entity to "live in" a particular TF frame (e.g. camera frustum under `camera_optical`), configure it via `tf_rerun(cameras=[...])` which handles the attachment in [`dimos/dashboard/rerun_scene_wiring.py`](/dimos/dashboard/rerun_scene_wiring.py#L20). - -### Cameras: pinhole projection vs lens distortion - -- The camera frustum/projection comes from `CameraInfo.to_rerun()` → `rr.Pinhole(...)` (see [`dimos/msgs/sensor_msgs/CameraInfo.py`](/dimos/msgs/sensor_msgs/CameraInfo.py)). -- Rerun pinholes model **intrinsics** (focal length / principal point / resolution). Lens distortion coefficients are **not** part of the pinhole archetype. If you need distortion-correct visualization, you must undistort upstream and log the undistorted image. - -## Appendix: Where Rerun is used in the codebase - -This appendix is an **inventory of every current Rerun touchpoint** in the repository (as of this doc), grouped by role (good for reference). - -### Rerun lifecycle (server, viewer, client connections) - -- **`GlobalConfig` flags** - - **File**: [`dimos/core/global_config.py`](/dimos/core/global_config.py) - - **What**: Defines `rerun_enabled`, `viewer_backend` (`rerun-web`, `rerun-native`, `foxglove`), and `rerun_server_addr`. - -- **Rerun process lifecycle** - - **File**: [`dimos/dashboard/rerun_init.py`](/dimos/dashboard/rerun_init.py) - - **What**: - - `init_rerun_server()` starts the gRPC server and optionally the native/web viewer (`rr.spawn`, `rr.serve_grpc`, `rr.serve_web_viewer`). - - `connect_rerun()` connects a process to the shared recording (`rr.connect_grpc`). - - `shutdown_rerun()` disconnects (`rr.disconnect`). - -- **Dashboard re-exports** - - **File**: [`dimos/dashboard/__init__.py`](/dimos/dashboard/__init__.py) - - **What**: Re-exports `connect_rerun`, `init_rerun_server`, `shutdown_rerun`. - -### Blueprint/layout composition (Rerun UI) - -- **Blueprint composition and server init during build** - - **File**: [`dimos/core/blueprints.py`](/dimos/core/blueprints.py) - - **What**: - - Calls `init_rerun_server()` during `ModuleBlueprintSet.build()` when backend is Rerun. - - Collects per-module `rerun_views()` panels and composes a default `rrb.Blueprint(...)`. - - Sends the blueprint via `rr.send_blueprint(...)`. - -### TF visualization - -- **TF visualization module (polling snapshot)** - - **File**: [`dimos/dashboard/tf_rerun_module.py`](/dimos/dashboard/tf_rerun_module.py) - - **What**: Polls `self.tf.buffers` at a configurable rate (`poll_hz`) and logs the latest transform per TF edge to `world/tf/{child}` using `Transform.to_rerun()`. - -- **Scene wiring module (static setup)** - - **File**: [`dimos/dashboard/rerun_scene_wiring.py`](/dimos/dashboard/rerun_scene_wiring.py) - - **What**: Logs all static Rerun scene setup once at startup: - - View coordinates (`rr.ViewCoordinates.RIGHT_HAND_Z_UP`) - - Entity-path attachments under named TF frames (`world` → `world`, `world/robot` → `base_link`) - - Optional URDF load under `world/robot` - - Optional axes gizmo at `world/robot/axes` - - Camera entity-path attachments + pinholes (configurable via tuple list) - - **Why**: Keeps robot modules focused on I/O + TF publishing; centralizes visualization scene wiring. - -- **`tf_rerun()` helper (composed blueprint)** - - **File**: [`dimos/dashboard/tf_rerun_module.py`](/dimos/dashboard/tf_rerun_module.py) - - **What**: Returns a `ModuleBlueprintSet` that composes `TFRerunModule` + `RerunSceneWiringModule` via `autoconnect(...)`. Blueprints add one line (`tf_rerun(...)`) to get both TF polling and scene wiring. - -- **TF message → Rerun entity mapping** - - **File**: [`dimos/msgs/tf2_msgs/TFMessage.py`](/dimos/msgs/tf2_msgs/TFMessage.py) - - **What**: `TFMessage.to_rerun()` returns `(entity_path, rr.Transform3D)` pairs for each transform, currently under `world/tf/{child_frame_id}`. - -- **Transform → Rerun transform archetype** - - **File**: [`dimos/msgs/geometry_msgs/Transform.py`](/dimos/msgs/geometry_msgs/Transform.py) - - **What**: `Transform.to_rerun()` produces `rr.Transform3D(parent_frame=..., child_frame=...)`. - -### Robot/device visualization (GO2) - -- **GO2 connection: sensor data logging + TF publishing** - - **File**: [`dimos/robot/unitree/connection/go2.py`](/dimos/robot/unitree/connection/go2.py) - - **What**: - - Connects to Rerun via `connect_rerun()` (only to log sensor data). - - Publishes TF transforms via `self.tf.publish(...)` (base_link, camera_link, camera_optical). - - Logs camera images to `world/robot/camera/rgb`. - - Contributes a camera panel via `rerun_views()` (`rrb.Spatial2DView(origin="world/robot/camera/rgb")`). - - **Note**: Static scene setup (view coordinates, URDF, axes, camera entity attachments, pinholes) is handled by `RerunSceneWiringModule` via `tf_rerun(...)` in the blueprint. GO2 no longer does this directly. - -### Mapping/navigation visualization (modules) - -- **Costmap visualization + metrics** - - **File**: [`dimos/mapping/costmapper.py`](/dimos/mapping/costmapper.py) - - **What**: - - Logs 2D costmap image at `world/nav/costmap/image` (`OccupancyGrid.to_rerun(mode="image")`). - - Logs 3D floor overlay at `world/nav/costmap/floor` (`mode="mesh"`). - - Logs time series metrics under `metrics/costmap/*` via `rr.Scalars`. - - Contributes a 2D panel (`rrb.Spatial2DView(origin="world/nav/costmap/image")`) and metrics panel via `rerun_views()`. - -- **Voxel map visualization + metrics** - - **File**: [`dimos/mapping/voxels.py`](/dimos/mapping/voxels.py) - - **What**: - - Logs voxel map at `world/map` via `PointCloud2.to_rerun(mode="boxes", ...)`. - - Logs time series metrics under `metrics/voxel_map/*` via `rr.Scalars`. - - Contributes metrics panels via `rerun_views()`. - -- **Planner debugging path logging** - - **File**: [`dimos/navigation/replanning_a_star/module.py`](/dimos/navigation/replanning_a_star/module.py) - - **What**: Logs navigation path at `world/nav/path` using `Path.to_rerun()` when Rerun backend is active. - -### Metrics helpers - -- **Timing decorator (logs to Rerun scalars)** - - **File**: [`dimos/utils/metrics.py`](/dimos/utils/metrics.py) - - **What**: `log_timing_to_rerun(entity_path)` wraps a function and logs its duration to `rr.Scalars(...)` at the given path. - -### Message-level `to_rerun()` implementations (conversion layer) - -These pull `rerun` into the message layer by returning Rerun archetypes. - -- **Camera intrinsics → pinhole** - - **File**: [`dimos/msgs/sensor_msgs/CameraInfo.py`](/dimos/msgs/sensor_msgs/CameraInfo.py) - - **What**: `CameraInfo.to_rerun()` returns `rr.Pinhole(...)` for frustum/projection (intrinsics only). - -- **PointCloud2 → points/boxes** - - **File**: [`dimos/msgs/sensor_msgs/PointCloud2.py`](/dimos/msgs/sensor_msgs/PointCloud2.py) - - **What**: `PointCloud2.to_rerun()` returns `rr.Points3D(...)` or `rr.Boxes3D(...)` depending on mode. - -- **Image/DepthImage formatting** - - **File**: [`dimos/msgs/sensor_msgs/image_impls/AbstractImage.py`](/dimos/msgs/sensor_msgs/image_impls/AbstractImage.py) - - **What**: Helpers that construct `rr.Image(...)` / `rr.DepthImage(...)` with appropriate color model. - -- **OccupancyGrid → image/mesh/points** - - **File**: [`dimos/msgs/nav_msgs/OccupancyGrid.py`](/dimos/msgs/nav_msgs/OccupancyGrid.py) - - **What**: `OccupancyGrid.to_rerun(mode="image"|"mesh"|"points")` returns `rr.Image`, `rr.Mesh3D`, or `rr.Points3D`. - -- **Path → line strips** - - **File**: [`dimos/msgs/nav_msgs/Path.py`](/dimos/msgs/nav_msgs/Path.py) - - **What**: `Path.to_rerun()` returns `rr.LineStrips3D(...)`. - -- **PoseStamped → transform (and arrows)** - - **File**: [`dimos/msgs/geometry_msgs/PoseStamped.py`](/dimos/msgs/geometry_msgs/PoseStamped.py) - - **What**: `PoseStamped.to_rerun()` returns `rr.Transform3D(...)` (and includes arrow helpers using `rr.Arrows3D`). - -### Web UI embedding (split-screen dashboard) - -- **Split-screen dashboard HTML** - - **File**: [`dimos/web/templates/rerun_dashboard.html`](/dimos/web/templates/rerun_dashboard.html) - - **What**: Embeds: - - Rerun web viewer iframe (`http://localhost:9090/?url=...9876/proxy`) - - command center iframe (`http://localhost:7779/command-center`) - -- **Websocket visualization server** - - **File**: [`dimos/web/websocket_vis/websocket_vis_module.py`](/dimos/web/websocket_vis/websocket_vis_module.py) - - **What**: Serves either the split-screen dashboard or command-center-only depending on `viewer_backend`. - -- **Command center client** - - **File**: [`dimos/web/command-center-extension/src/Connection.ts`](/dimos/web/command-center-extension/src/Connection.ts) - - **What**: Connects to the websocket server on port `7779` (not Rerun SDK, but part of the Rerun-web dashboard experience). - -### Related documentation - -- **TF and transforms concepts** - - **File**: [`docs/api/transforms.md`](/docs/api/transforms.md) - - **What**: Explains frames/transforms and how `self.tf` is intended to be used. diff --git a/docs/agents/docs/doclinks.md b/docs/agents/docs/doclinks.md index 07facdcbe4..d5533c5983 100644 --- a/docs/agents/docs/doclinks.md +++ b/docs/agents/docs/doclinks.md @@ -1,4 +1,4 @@ -When writing or editing markdown documentation, use the `doclinks` tool to resolve file references. +When writing or editing markdown documentation, use `doclinks` tool to resolve file references. Full documentation if needed: [`utils/docs/doclinks.md`](/dimos/utils/docs/doclinks.md) diff --git a/docs/agents/docs/index.md b/docs/agents/docs/index.md index 94dd64b72a..bec2ce79e6 100644 --- a/docs/agents/docs/index.md +++ b/docs/agents/docs/index.md @@ -2,31 +2,27 @@ # Code Blocks **All code blocks must be executable.** -Never write illustrative/pseudocode blocks. +Never write illustrative/pseudo code blocks. If you're showing an API usage pattern, create a minimal working example that actually runs. This ensures documentation stays correct as the codebase evolves. -After writing a code block in your markdown file, you can run it by executing: -```bash -md-babel-py run document.md -``` +After writing a code block in your markdown file, you can run it by executing +`md-babel-py run document.md` -More information on this tool is in [codeblocks](/docs/agents/docs/codeblocks.md). +more information on this tool is in [codeblocks](/docs/agents/docs_agent/codeblocks.md) # Code or Docs Links -After adding a link to a doc, run +After adding a link to a doc run -```bash -doclinks document.md -``` +`doclinks document.md` ### Code file references ```markdown See [`service/spec.py`](/dimos/protocol/service/spec.py) for the implementation. ``` -After running doclinks, it becomes: +After running doclinks, becomes: ```markdown See [`service/spec.py`](/dimos/protocol/service/spec.py) for the implementation. ``` @@ -52,7 +48,7 @@ Becomes: See [Configuration](/docs/concepts/configuration.md) for more details. ``` -More information on this is in [doclinks](/docs/agents/docs/doclinks.md). +More information on this in [doclinks](/docs/agents/docs_agent/doclinks.md) # Pikchr diff --git a/docs/agents/index.md b/docs/agents/index.md new file mode 100644 index 0000000000..ec9d66e886 --- /dev/null +++ b/docs/agents/index.md @@ -0,0 +1,19 @@ +# For Agents + +These docs are mostly for coding agents + +```sh +tree . -P '*.md' --prune +``` + + +``` +. +├── docs +│   ├── codeblocks.md +│   ├── doclinks.md +│   └── index.md +└── index.md + +2 directories, 4 files +``` diff --git a/docs/capabilities/agents/readme.md b/docs/capabilities/agents/readme.md new file mode 100644 index 0000000000..57be659e9c --- /dev/null +++ b/docs/capabilities/agents/readme.md @@ -0,0 +1 @@ +# Agents diff --git a/docs/capabilities/manipulation/readme.md b/docs/capabilities/manipulation/readme.md new file mode 100644 index 0000000000..91dada0395 --- /dev/null +++ b/docs/capabilities/manipulation/readme.md @@ -0,0 +1,112 @@ +# Manipulation + +Motion planning and teleoperation for robotic manipulators. Uses Drake for physics simulation and Meshcat for 3D visualization. + +## Quick Start + +### Keyboard Teleop (single command) + +Each blueprint launches the full stack — keyboard UI, mock controller, IK solver, and Drake visualization: + +```bash +dimos run keyboard-teleop-piper # Piper 6-DOF +dimos run keyboard-teleop-xarm6 # XArm6 6-DOF +dimos run keyboard-teleop-xarm7 # XArm7 7-DOF +``` + +Open the Meshcat URL printed in the terminal (default `http://localhost:7000`) to see the robot. + +Keyboard controls: + +| Key | Action | +|-----|--------| +| W/S | +X/-X (forward/back) | +| A/D | -Y/+Y (left/right) | +| Q/E | +Z/-Z (up/down) | +| R/F | +Roll/-Roll | +| T/G | +Pitch/-Pitch | +| Y/H | +Yaw/-Yaw | +| SPACE | Reset to home pose | +| ESC | Quit | + +### Motion Planning (two terminals) + +```bash +# Terminal 1: Mock coordinator +dimos run coordinator-mock + +# Terminal 2: Planner with Drake visualization +dimos run xarm7-planner-coordinator +``` + +Then use the IPython client: + +```bash +python -m dimos.manipulation.planning.examples.manipulation_client +``` + +```python +joints() # Get current joints +plan([0.1] * 7) # Plan to target +preview() # Preview in Meshcat +execute() # Execute via coordinator +``` + +### Perception + Agent + +```bash +# Terminal 1: Coordinator with real xarm7 +dimos run coordinator-xarm7 + +# Terminal 2: Perception + manipulation + LLM agent +dimos run xarm-perception-agent +``` + +## Architecture + +``` +KeyboardTeleopModule ──→ ControlCoordinator ──→ ManipulationModule + (pygame UI) (100Hz tick loop) (Drake + Meshcat) + │ │ │ + PoseStamped CartesianIK task RRT planner + commands (Pinocchio IK) JacobianIK + │ DrakeWorld + JointState ────────────→ (visualization) +``` + +- **KeyboardTeleopModule** — Pygame UI publishing cartesian pose commands +- **ControlCoordinator** — 100Hz control loop with mock or real hardware adapters +- **ManipulationModule** — Drake physics, Meshcat viz, RRT motion planning, obstacle management + +## Blueprints + +| Blueprint | Description | +|-----------|-------------| +| `keyboard-teleop-piper` | Piper 6-DOF keyboard teleop with Drake viz | +| `keyboard-teleop-xarm6` | XArm6 6-DOF keyboard teleop with Drake viz | +| `keyboard-teleop-xarm7` | XArm7 7-DOF keyboard teleop with Drake viz | +| `xarm6-planner-only` | XArm6 standalone planner (no coordinator) | +| `xarm7-planner-coordinator` | XArm7 planner with coordinator integration | +| `dual-xarm6-planner` | Dual XArm6 planning | +| `xarm-perception` | XArm7 + RealSense camera for perception | +| `xarm-perception-agent` | XArm7 perception + LLM agent | + +## Supported Robots + +| Robot | DOF | Teleop | Planning | Perception | +|-------|-----|--------|----------|------------| +| Piper | 6 | Y | Y | — | +| XArm6 | 6 | Y | Y | — | +| XArm7 | 7 | Y | Y | Y | + +## Key Files + +| File | Description | +|------|-------------| +| [`manipulation_module.py`](/dimos/manipulation/manipulation_module.py) | Main module (RPC interface, state machine) | +| [`manipulation_blueprints.py`](/dimos/manipulation/manipulation_blueprints.py) | Planner and perception blueprints | +| [`robot/manipulators/piper/blueprints.py`](/dimos/robot/manipulators/piper/blueprints.py) | Piper keyboard teleop blueprint | +| [`robot/manipulators/xarm/blueprints.py`](/dimos/robot/manipulators/xarm/blueprints.py) | XArm keyboard teleop blueprints | +| [`teleop/keyboard/keyboard_teleop_module.py`](/dimos/teleop/keyboard/keyboard_teleop_module.py) | Keyboard teleop module | +| [`planning/world/drake_world.py`](/dimos/manipulation/planning/world/drake_world.py) | Drake physics backend | +| [`planning/planners/rrt_planner.py`](/dimos/manipulation/planning/planners/rrt_planner.py) | RRT-Connect motion planner | diff --git a/docs/capabilities/navigation/native/assets/1-lidar.png b/docs/capabilities/navigation/native/assets/1-lidar.png new file mode 100644 index 0000000000..6584ee90cb --- /dev/null +++ b/docs/capabilities/navigation/native/assets/1-lidar.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d76742ada18d20dc0e3a3be04159d3412e7df6acee8596ff37916f0f269d3e0 +size 597386 diff --git a/docs/capabilities/navigation/native/assets/2-globalmap.png b/docs/capabilities/navigation/native/assets/2-globalmap.png new file mode 100644 index 0000000000..55541a8fcb --- /dev/null +++ b/docs/capabilities/navigation/native/assets/2-globalmap.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc2f27ec2dcc4048acde6b53229c7596b3a7f6ed6afad30c4cd062cf5751bd24 +size 1104485 diff --git a/docs/capabilities/navigation/native/assets/3-globalcostmap.png b/docs/capabilities/navigation/native/assets/3-globalcostmap.png new file mode 100644 index 0000000000..907d0b0448 --- /dev/null +++ b/docs/capabilities/navigation/native/assets/3-globalcostmap.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1f9e6c142b220f1a4be7b08950f628a2d34e26caba8a1f5c100726bec6c88ef +size 793366 diff --git a/docs/capabilities/navigation/native/assets/4-navcostmap.png b/docs/capabilities/navigation/native/assets/4-navcostmap.png new file mode 100644 index 0000000000..6c40bce0e0 --- /dev/null +++ b/docs/capabilities/navigation/native/assets/4-navcostmap.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ee4332e3d92162ddf41a0137c2ab5b6a885d758aa5a27037e413cdd4d946436 +size 741912 diff --git a/docs/capabilities/navigation/native/assets/5-all.png b/docs/capabilities/navigation/native/assets/5-all.png new file mode 100644 index 0000000000..655be72c1c --- /dev/null +++ b/docs/capabilities/navigation/native/assets/5-all.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a777d315beac6f4773adcb5c27384fd983720083941b4f62060958ddf6c16d2 +size 1209867 diff --git a/docs/capabilities/navigation/native/assets/go2_blueprint.svg b/docs/capabilities/navigation/native/assets/go2_blueprint.svg new file mode 100644 index 0000000000..51b0e7c40f --- /dev/null +++ b/docs/capabilities/navigation/native/assets/go2_blueprint.svg @@ -0,0 +1,188 @@ + + + + + + +modules + +cluster_mapping + +mapping + + +cluster_navigation + +navigation + + +cluster_robot + +robot + + +cluster_visualization + +visualization + + + +CostMapper + +CostMapper + + + +chan_global_costmap_OccupancyGrid + + + +global_costmap:OccupancyGrid + + + +CostMapper->chan_global_costmap_OccupancyGrid + + + + +VoxelGridMapper + +VoxelGridMapper + + + +chan_global_map_PointCloud2 + + + +global_map:PointCloud2 + + + +VoxelGridMapper->chan_global_map_PointCloud2 + + + + +ReplanningAStarPlanner + +ReplanningAStarPlanner + + + +chan_cmd_vel_Twist + + + +cmd_vel:Twist + + + +ReplanningAStarPlanner->chan_cmd_vel_Twist + + + + +chan_goal_reached_Bool + + + +goal_reached:Bool + + + +ReplanningAStarPlanner->chan_goal_reached_Bool + + + + +WavefrontFrontierExplorer + +WavefrontFrontierExplorer + + + +chan_goal_request_PoseStamped + + + +goal_request:PoseStamped + + + +WavefrontFrontierExplorer->chan_goal_request_PoseStamped + + + + +GO2Connection + +GO2Connection + + + +chan_lidar_PointCloud2 + + + +lidar:PointCloud2 + + + +GO2Connection->chan_lidar_PointCloud2 + + + + +RerunBridgeModule + +RerunBridgeModule + + + +chan_cmd_vel_Twist->GO2Connection + + + + + +chan_global_costmap_OccupancyGrid->ReplanningAStarPlanner + + + + + +chan_global_costmap_OccupancyGrid->WavefrontFrontierExplorer + + + + + +chan_global_map_PointCloud2->CostMapper + + + + + +chan_goal_reached_Bool->WavefrontFrontierExplorer + + + + + +chan_goal_request_PoseStamped->ReplanningAStarPlanner + + + + + +chan_lidar_PointCloud2->VoxelGridMapper + + + + + diff --git a/docs/capabilities/navigation/native/assets/go2nav_dataflow.svg b/docs/capabilities/navigation/native/assets/go2nav_dataflow.svg new file mode 100644 index 0000000000..94bb3e39ee --- /dev/null +++ b/docs/capabilities/navigation/native/assets/go2nav_dataflow.svg @@ -0,0 +1,22 @@ + + +Go2 + + + +VoxelGridMapper + + + +CostMapper + + + +Navigation +PointCloud2 +PointCloud2 +OccupancyGrid + + +Twist + diff --git a/docs/capabilities/navigation/native/assets/noros_nav.gif b/docs/capabilities/navigation/native/assets/noros_nav.gif new file mode 100644 index 0000000000..ab47bb9cb5 --- /dev/null +++ b/docs/capabilities/navigation/native/assets/noros_nav.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60f842cd2fda539338443b3c501197fbb875f5c5f3883ba3ffdd17005e9bd786 +size 612786 diff --git a/docs/capabilities/navigation/native/index.md b/docs/capabilities/navigation/native/index.md new file mode 100644 index 0000000000..115c6f0ee2 --- /dev/null +++ b/docs/capabilities/navigation/native/index.md @@ -0,0 +1,144 @@ +# Go2 Non-ROS Navigation + + + +The Go2 navigation stack runs entirely without ROS. It uses a **column-carving voxel map** strategy: each new LiDAR frame replaces the corresponding region of the global map entirely, ensuring the map always reflects the latest observations. + +## Data Flow + +
+diagram source + +```pikchr fold output=assets/go2nav_dataflow.svg +color = white +fill = none + +Go2: box "Go2" rad 5px fit wid 170% ht 170% +arrow right 0.5in +Vox: box "VoxelGridMapper" rad 5px fit wid 170% ht 170% +arrow right 0.5in +Cost: box "CostMapper" rad 5px fit wid 170% ht 170% +arrow right 0.5in +Nav: box "Navigation" rad 5px fit wid 170% ht 170% + +M1: dot at 1/2 way between Go2.e and Vox.w invisible +text "PointCloud2" italic at (M1.x, Go2.n.y + 0.15in) + +M2: dot at 1/2 way between Vox.e and Cost.w invisible +text "PointCloud2" italic at (M2.x, Vox.n.y + 0.15in) + +M3: dot at 1/2 way between Cost.e and Nav.w invisible +text "OccupancyGrid" italic at (M3.x, Cost.n.y + 0.15in) + +arrow dashed from Nav.s down 0.3in then left until even with Go2.s then to Go2.s +M4: dot at 1/2 way between Go2.s and Nav.s invisible +text "Twist" italic at (M4.x, Nav.s.y - 0.45in) +``` + +
+ + +![output](assets/go2nav_dataflow.svg) +## Pipeline Steps + +### 1. LiDAR Frame — [`GO2Connection`](/dimos/robot/unitree/go2/connection.py) + +We don't connect to the LiDAR directly — instead we use Unitree's WebRTC client (via [legion's webrtc driver](https://github.com/legion1581/unitree_webrtc_connect)), which streams a heavily preprocessed 5cm voxel grid rather than raw point cloud data. This allows us to support stock, unjailbroken Go2 Air and Pro models out of the box. + +![LiDAR frame](assets/1-lidar.png) + +### 2. Global Voxel Map — [`VoxelGridMapper`](/dimos/mapping/voxels.py) + +The [`VoxelGridMapper`](/dimos/mapping/voxels.py) maintains a sparse 3D occupancy grid using Open3D's `VoxelBlockGrid` backed by a hash map. Each voxel is a 5cm cube by default. + +Voxel hash map provides O(1) insert/erase/lookup, so this is efficient even with millions of voxels. The grid runs on **CUDA** by default for speed, with CPU fallback. + +Each incoming LiDAR frame is spliced into the global map via column carving. We consider any previously mapped voxels in the space of a received LiDAR frame stale, by erasing entire Z-columns in the footprint, we guarantee: + +- No ghost obstacles from previous passes +- Dynamic objects (people, doors) get cleared automatically +- The latest observation always wins + +We don't have proper loop closure and stable odometry, we trust the data go2 odom reports, which is surprisingly stable but does drift eventually, You will reliably map and nav through very large spaces (500sqm in our tests) but you won't go down the street to a super market. + + +#### Configuration + +| Parameter | Default | Description | +|--------------------|-----------|---------------------------------------------------------| +| `voxel_size` | 0.05 | Voxel cube size in meters | +| `block_count` | 2,000,000 | Max voxels in hash map | +| `device` | `CUDA:0` | Compute device (`CUDA:0` or `CPU:0`) | +| `carve_columns` | `true` | Enable column carving (disable for append-only mapping) | +| `publish_interval` | 0 | Seconds between map publishes (0 = every frame) | + +![Global map](assets/2-globalmap.png) + +### 3. Global Costmap — [`CostMapper`](/dimos/mapping/costmapper.py) + +The [`CostMapper`](/dimos/mapping/costmapper.py) converts the 3D voxel map into a 2D occupancy grid. The default algorithm (`height_cost`) maps rate of change of Z, with some smoothing. + +algo settings are in [`occupancy.py`](/dimos/mapping/pointclouds/occupancy.py) and can be configured per robot + + +#### Configuration + +```python skip +@dataclass(frozen=True) +class HeightCostConfig(OccupancyConfig): + """Config for height-cost based occupancy (terrain slope analysis).""" + can_pass_under: float = 0.6 + can_climb: float = 0.15 + ignore_noise: float = 0.05 + smoothing: float = 1.0 +``` + +| Cost | Meaning | +|------|----------------------------------------------------------| +| 0 | Flat, easy to traverse | +| 50 | Moderate slope (~7.5cm rise per cell in case of go2) | +| 100 | Steep or impassable (≥15cm rise per cell in case of go2) | +| -1 | Unknown (no observations) | + +![Global costmap](assets/3-globalcostmap.png) + +### 4. Navigation Costmap — [`ReplanningAStarPlanner`](/dimos/navigation/replanning_a_star/module.py) + +The planner will process the terrain gradient and compute it's own algo-relevant costmap, prioritizing safe free paths, while be willing to path aggressively through tight spaces if it has to + +We run the planner in a constant loop so it will dynamically react to obstacles encountered. + +![Navigation costmap with path](assets/4-navcostmap.png) + +### 5. All Layers Combined + +All visualization layers shown together + +![All layers](assets/5-all.png) + +## Blueprint Composition + +The navigation stack is composed in the [`unitree_go2`](/dimos/robot/unitree/go2/blueprints/__init__.py) blueprint: + +```python fold output=assets/go2_blueprint.svg +from dimos.core.blueprints import autoconnect +from dimos.core.introspection import to_svg +from dimos.mapping.costmapper import cost_mapper +from dimos.mapping.voxels import voxel_mapper +from dimos.navigation.frontier_exploration import wavefront_frontier_explorer +from dimos.navigation.replanning_a_star.module import replanning_a_star_planner +from dimos.robot.unitree.go2.blueprints.basic.unitree_go2_basic import unitree_go2_basic + +unitree_go2 = autoconnect( + unitree_go2_basic, # robot connection + visualization + voxel_mapper(voxel_size=0.05), # 3D voxel mapping + cost_mapper(), # 2D costmap generation + replanning_a_star_planner(), # path planning + wavefront_frontier_explorer(), # exploration +).global_config(n_dask_workers=6, robot_model="unitree_go2") + +to_svg(unitree_go2, "assets/go2_blueprint.svg") +``` + + +![output](assets/go2_blueprint.svg) diff --git a/docs/capabilities/navigation/readme.md b/docs/capabilities/navigation/readme.md new file mode 100644 index 0000000000..af26c07f94 --- /dev/null +++ b/docs/capabilities/navigation/readme.md @@ -0,0 +1,10 @@ +# Navigation + + +## Non-ROS + +- [Go2 Navigation](native/index.md) — column-carving voxel mapping + slope-based costmap + +## ROS + +See [ROS Transports](/docs/api/transports.md) for bridging DimOS streams to ROS topics. diff --git a/docs/capabilities/perception/readme.md b/docs/capabilities/perception/readme.md new file mode 100644 index 0000000000..5d6e089dbf --- /dev/null +++ b/docs/capabilities/perception/readme.md @@ -0,0 +1,3 @@ +# Perception + +## Detections diff --git a/docs/concepts/transports.md b/docs/concepts/transports.md deleted file mode 100644 index 62279b6baf..0000000000 --- a/docs/concepts/transports.md +++ /dev/null @@ -1,374 +0,0 @@ - -# Dimos Transports - -Transports enable communication between [modules](modules.md) across process boundaries and networks. When modules run in different processes or on different machines, they need a transport layer to exchange messages. - -While the interface is called "PubSub", transports aren't limited to traditional pub-sub services. A topic can be anything that identifies a communication channel: an IP address and port, a shared memory segment name, a file path, or a Redis channel. The abstraction is flexible enough to support any communication pattern that can publish and subscribe to named channels. - -## The PubSub Interface - -At the core of all transports is the `PubSub` abstract class. Any transport implementation must provide two methods: - -```python session=pubsub_demo ansi=false -from dimos.protocol.pubsub.spec import PubSub - -# The interface every transport must implement: -import inspect -print(inspect.getsource(PubSub.publish)) -print(inspect.getsource(PubSub.subscribe)) -``` - - -``` -Session process exited unexpectedly: -/home/lesh/coding/dimos/.venv/bin/python3: No module named md_babel_py.session_server - -``` - -Key points: -- `publish(topic, message)` - Send a message to all subscribers on a topic -- `subscribe(topic, callback)` - Register a callback, returns an unsubscribe function - -## Implementing a Simple Transport - -The simplest transport is `Memory`, which works within a single process: - -```python session=memory_demo ansi=false -from dimos.protocol.pubsub.memory import Memory - -# Create a memory transport -bus = Memory() - -# Track received messages -received = [] - -# Subscribe to a topic -unsubscribe = bus.subscribe("sensor/data", lambda msg, topic: received.append(msg)) - -# Publish messages -bus.publish("sensor/data", {"temperature": 22.5}) -bus.publish("sensor/data", {"temperature": 23.0}) - -print(f"Received {len(received)} messages:") -for msg in received: - print(f" {msg}") - -# Unsubscribe when done -unsubscribe() -``` - - -``` -Received 2 messages: - {'temperature': 22.5} - {'temperature': 23.0} -``` - -The full implementation is minimal. See [`memory.py`](/dimos/protocol/pubsub/memory.py) for the complete source. - -## Available Transports - -Dimos includes several transport implementations: - -| Transport | Use Case | Process Boundary | Network | -|-----------|----------|------------------|---------| -| `Memory` | Testing, single process | No | No | -| `SharedMemory` | Multi-process on same machine | Yes | No | -| `LCM` | Network communication (UDP multicast) | Yes | Yes | -| `Redis` | Network communication via Redis server | Yes | Yes | - -### SharedMemory Transport - -For inter-process communication on the same machine, `SharedMemory` provides high-performance message passing: - -```python session=shm_demo ansi=false -from dimos.protocol.pubsub.shmpubsub import PickleSharedMemory - -shm = PickleSharedMemory(prefer="cpu") -shm.start() - -received = [] -shm.subscribe("test/topic", lambda msg, topic: received.append(msg)) -shm.publish("test/topic", {"data": [1, 2, 3]}) - -import time -time.sleep(0.1) # Allow message to propagate - -print(f"Received: {received}") -shm.stop() -``` - - -``` -Received: [{'data': [1, 2, 3]}] -``` - -### LCM Transport - -For network communication, LCM uses UDP multicast and supports typed messages: - -```python session=lcm_demo ansi=false -from dimos.protocol.pubsub.lcmpubsub import LCM, Topic -from dimos.msgs.geometry_msgs import Vector3 - -lcm = LCM(autoconf=True) -lcm.start() - -received = [] -topic = Topic(topic="/robot/velocity", lcm_type=Vector3) - -lcm.subscribe(topic, lambda msg, t: received.append(msg)) -lcm.publish(topic, Vector3(1.0, 0.0, 0.5)) - -import time -time.sleep(0.1) - -print(f"Received velocity: x={received[0].x}, y={received[0].y}, z={received[0].z}") -lcm.stop() -``` - - -``` -Received velocity: x=1.0, y=0.0, z=0.5 -``` - -### Inspecting LCM traffic (CLI) - -- `dimos lcmspy` shows topic frequency/bandwidth stats. -- `dimos topic echo /topic` listens on typed channels like `/topic#pkg.Msg` and decodes automatically. -- `dimos topic echo /topic TypeName` is the explicit legacy form. - -## Encoder Mixins - -Transports can use encoder mixins to serialize messages. The `PubSubEncoderMixin` pattern wraps publish/subscribe to encode/decode automatically: - -```python session=encoder_demo ansi=false -from dimos.protocol.pubsub.spec import PubSubEncoderMixin, PickleEncoderMixin - -# PickleEncoderMixin provides: -# - encode(msg, topic) -> bytes (uses pickle.dumps) -# - decode(bytes, topic) -> msg (uses pickle.loads) - -# Create a transport with pickle encoding by mixing in: -from dimos.protocol.pubsub.memory import Memory - -class PickleMemory(PickleEncoderMixin, Memory): - pass - -bus = PickleMemory() -received = [] -bus.subscribe("data", lambda msg, t: received.append(msg)) -bus.publish("data", {"complex": [1, 2, 3], "nested": {"key": "value"}}) - -print(f"Received: {received[0]}") -``` - - -``` -Received: {'complex': [1, 2, 3], 'nested': {'key': 'value'}} -``` - -## Using Transports with Modules - -Modules use the `Transport` wrapper class which adapts `PubSub` to the stream interface. You can set a transport on any module stream: - -```python session=module_transport ansi=false -from dimos.core.transport import pLCMTransport, pSHMTransport - -# Transport wrappers for module streams: -# - pLCMTransport: Pickle-encoded LCM -# - LCMTransport: Native LCM encoding -# - pSHMTransport: Pickle-encoded SharedMemory -# - SHMTransport: Native SharedMemory -# - JpegShmTransport: JPEG-compressed images via SharedMemory -# - JpegLcmTransport: JPEG-compressed images via LCM - -# Example: Set a transport on a module output -# camera.set_transport("color_image", pSHMTransport("camera/color")) -print("Available transport wrappers in dimos.core.transport:") -from dimos.core import transport -print([name for name in dir(transport) if "Transport" in name]) -``` - - -``` -Available transport wrappers in dimos.core.transport: -['JpegLcmTransport', 'JpegShmTransport', 'LCMTransport', 'PubSubTransport', 'SHMTransport', 'ZenohTransport', 'pLCMTransport', 'pSHMTransport'] -``` - -## Testing Custom Transports - -The test suite in [`pubsub/test_spec.py`](/dimos/protocol/pubsub/test_spec.py) uses pytest parametrization to run the same tests against all transport implementations. To add your custom transport to the test grid: - -```python session=test_grid ansi=false -# The test grid pattern from test_spec.py: -test_pattern = """ -from contextlib import contextmanager - -@contextmanager -def my_transport_context(): - transport = MyCustomTransport() - transport.start() - yield transport - transport.stop() - -# Add to testdata list: -testdata.append( - (my_transport_context, "my_topic", ["value1", "value2", "value3"]) -) -""" -print(test_pattern) -``` - - -``` - -from contextlib import contextmanager - -@contextmanager -def my_transport_context(): - transport = MyCustomTransport() - transport.start() - yield transport - transport.stop() - -# Add to testdata list: -testdata.append( - (my_transport_context, "my_topic", ["value1", "value2", "value3"]) -) - -``` - -The test suite validates: -- Basic publish/subscribe -- Multiple subscribers receiving the same message -- Unsubscribe functionality -- Multiple messages in order -- Async iteration -- High-volume message handling (10,000 messages) - -Run the tests with: -```bash -pytest dimos/protocol/pubsub/test_spec.py -v -``` - -## Creating a Custom Transport - -To implement a new transport: - -1. **Subclass `PubSub`** and implement `publish()` and `subscribe()` -2. **Add encoding** if needed via `PubSubEncoderMixin` -3. **Create a `Transport` wrapper** by subclassing `PubSubTransport` -4. **Add to the test grid** in `test_spec.py` - -Here's a minimal template: - -```python session=custom_transport ansi=false -template = ''' -from dimos.protocol.pubsub.spec import PubSub, PickleEncoderMixin -from dimos.core.transport import PubSubTransport - -class MyPubSub(PubSub[str, bytes]): - """Custom pub/sub implementation.""" - - def __init__(self): - self._subscribers = {} - - def start(self): - # Initialize connection/resources - pass - - def stop(self): - # Cleanup - pass - - def publish(self, topic: str, message: bytes) -> None: - # Send message to all subscribers on topic - for cb in self._subscribers.get(topic, []): - cb(message, topic) - - def subscribe(self, topic, callback): - # Register callback, return unsubscribe function - if topic not in self._subscribers: - self._subscribers[topic] = [] - self._subscribers[topic].append(callback) - - def unsubscribe(): - self._subscribers[topic].remove(callback) - return unsubscribe - - -# With pickle encoding -class MyPicklePubSub(PickleEncoderMixin, MyPubSub): - pass - - -# Transport wrapper for use with modules -class MyTransport(PubSubTransport): - def __init__(self, topic: str): - super().__init__(topic) - self.pubsub = MyPicklePubSub() - - def broadcast(self, _, msg): - self.pubsub.publish(self.topic, msg) - - def subscribe(self, callback, selfstream=None): - return self.pubsub.subscribe(self.topic, lambda msg, t: callback(msg)) -''' -print(template) -``` - - -``` - -from dimos.protocol.pubsub.spec import PubSub, PickleEncoderMixin -from dimos.core.transport import PubSubTransport - -class MyPubSub(PubSub[str, bytes]): - """Custom pub/sub implementation.""" - - def __init__(self): - self._subscribers = {} - - def start(self): - # Initialize connection/resources - pass - - def stop(self): - # Cleanup - pass - - def publish(self, topic: str, message: bytes) -> None: - # Send message to all subscribers on topic - for cb in self._subscribers.get(topic, []): - cb(message, topic) - - def subscribe(self, topic, callback): - # Register callback, return unsubscribe function - if topic not in self._subscribers: - self._subscribers[topic] = [] - self._subscribers[topic].append(callback) - - def unsubscribe(): - self._subscribers[topic].remove(callback) - return unsubscribe - - -# With pickle encoding -class MyPicklePubSub(PickleEncoderMixin, MyPubSub): - pass - - -# Transport wrapper for use with modules -class MyTransport(PubSubTransport): - def __init__(self, topic: str): - super().__init__(topic) - self.pubsub = MyPicklePubSub() - - def broadcast(self, _, msg): - self.pubsub.publish(self.topic, msg) - - def subscribe(self, callback, selfstream=None): - return self.pubsub.subscribe(self.topic, lambda msg, t: callback(msg)) - -``` diff --git a/docs/development.md b/docs/development.md deleted file mode 100644 index 0109e42768..0000000000 --- a/docs/development.md +++ /dev/null @@ -1,180 +0,0 @@ -# Development Environment Guide - -## Approach - -We optimise for flexibility—if your favourite editor is **notepad.exe**, you’re good to go. Everything below is tooling for convenience. - ---- - -## Dev Containers - -Dev containers give us a reproducible, container-based workspace identical to CI. - -### Why use them? - -* Consistent toolchain across all OSs. -* Unified formatting, linting and type-checking. -* Zero host-level dependencies (apart from Docker). - -### IDE quick start - -Install the *Dev Containers* plug-in for VS Code, Cursor, or your IDE of choice (you’ll likely be prompted automatically when you open our repo). - -### Shell only quick start - -The terminal within your IDE should use devcontainer transparently given you installed the plugin, but in case you want to run our shell without an IDE, you can use `./bin/dev`. -(It depends on npm/node being installed.) - -```sh -./bin/dev -devcontainer CLI (https://github.com/devcontainers/cli) not found. Install into repo root? (y/n): y - -added 1 package, and audited 2 packages in 8s -found 0 vulnerabilities - -[1 ms] @devcontainers/cli 0.76.0. Node.js v20.19.0. linux 6.12.27-amd64 x64. -[4838 ms] Start: Run: docker start f0355b6574d9bd277d6eb613e1dc32e3bc18e7493e5b170e335d0e403578bcdb -[5299 ms] f0355b6574d9bd277d6eb613e1dc32e3bc18e7493e5b170e335d0e403578bcdb -{"outcome":"success","containerId":"f0355b6574d9bd277d6eb613e1dc32e3bc18e7493e5b170e335d0e403578bcdb","remoteUser":"root","remoteWorkspaceFolder":"/workspaces/dimos"} - - ██████╗ ██╗███╗ ███╗███████╗███╗ ██╗███████╗██╗ ██████╗ ███╗ ██╗ █████╗ ██╗ - ██╔══██╗██║████╗ ████║██╔════╝████╗ ██║██╔════╝██║██╔═══██╗████╗ ██║██╔══██╗██║ - ██║ ██║██║██╔████╔██║█████╗ ██╔██╗ ██║███████╗██║██║ ██║██╔██╗ ██║███████║██║ - ██║ ██║██║██║╚██╔╝██║██╔══╝ ██║╚██╗██║╚════██║██║██║ ██║██║╚██╗██║██╔══██║██║ - ██████╔╝██║██║ ╚═╝ ██║███████╗██║ ╚████║███████║██║╚██████╔╝██║ ╚████║██║ ██║███████╗ - ╚═════╝ ╚═╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═══╝╚══════╝╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚══════╝ - - v_unknown:unknown | Wed May 28 09:23:33 PM UTC 2025 - -root@dimos:/workspaces/dimos # -``` - -The script will: - -* Offer to npm install `@devcontainers/cli` locally (if not available globally) on first run. -* Pull `ghcr.io/dimensionalos/dev:dev` if not present (external contributors: we plan to mirror to Docker Hub). - -You’ll land in the workspace as **root** with all project tooling available. - -## Pre-Commit Hooks - -We use [pre-commit](https://pre-commit.com) (config in `.pre-commit-config.yaml`) to enforce formatting, licence headers, EOLs, LFS checks, etc. Hooks run in **milliseconds**. -Hooks also run in CI. Any auto-fixes are committed back to your PR, so local installation is optional — but gives faster feedback. - -```sh -CRLF end-lines checker...................................................Passed -CRLF end-lines remover...................................................Passed -Insert license in comments...............................................Passed -ruff format..............................................................Passed -check for case conflicts.................................................Passed -check json...............................................................Passed -check toml...............................................................Passed -check yaml...............................................................Passed -format json..............................................................Passed -LFS data.................................................................Passed - -``` -Given your editor uses ruff via devcontainers (which it should), the auto-commit hook won't ever reformat your code. Your IDE will have already done this. - -### Running hooks manually - -Given your editor uses git via devcontainers (which it should), auto-commit hooks will run automatically. This is in case you want to run them manually. - -Inside the dev container (Your IDE will likely run this transparently for each commit if using devcontainer plugin): - -```sh -pre-commit run --all-files -``` - -### Installing pre-commit on your host - -```sh -apt install pre-commit # or brew install pre-commit -pre-commit install # install git hook -pre-commit run --all-files -``` - - ---- - -## Testing - -All tests run with **pytest** inside the dev container, ensuring local results match CI. - -### Basic usage - -```sh -./bin/dev # start container -pytest # run all tests beneath the current directory -``` - -Depending on which dir you are in, only tests from that directory will run, which is convenient when developing. You can frequently validate your feature tree. - -Your vibe coding agent will know to use these tests via the devcontainer so it can validate its work. - - -#### Useful options - -| Purpose | Command | -| -------------------------- | ----------------------- | -| Show `print()` output | `pytest -s` | -| Filter by name substring | `pytest -k ""` | -| Run tests with a given tag | `pytest -m ` | - - -We use tags for special tests, like `vis` or `tool` for things that aren't meant to be ran in CI and when casually developing, something that requires hardware or visual inspection (pointcloud merging vis etc). - -You can enable a tag by selecting `-m `. These are configured in `./pyproject.toml`. - -```sh -root@dimos:/workspaces/dimos/dimos # pytest -sm vis -k my_visualization -... -``` - -Classic development run within a subtree: - -```sh -./bin/dev - -... container init ... - -root@dimos:/workspaces/dimos # cd dimos/robot/unitree_webrtc/ -root@dimos:/workspaces/dimos/dimos/robot/unitree_webrtc # pytest -collected 27 items / 22 deselected / 5 selected - -type/test_map.py::test_robot_mapping PASSED -type/test_timeseries.py::test_repr PASSED -type/test_timeseries.py::test_equals PASSED -type/test_timeseries.py::test_range PASSED -type/test_timeseries.py::test_duration PASSED - -``` - -Showing prints: - -```sh -root@dimos:/workspaces/dimos/dimos/robot/unitree_webrtc/type # pytest -s test_odometry.py -test_odometry.py::test_odometry_conversion_and_count Odom ts(2025-05-30 13:52:03) pos(→ Vector Vector([0.432199 0.108042 0.316589])), rot(↑ Vector Vector([ 7.7200000e-04 -9.1280000e-03 3.006 -8621e+00])) yaw(172.3°) -Odom ts(2025-05-30 13:52:03) pos(→ Vector Vector([0.433629 0.105965 0.316143])), rot(↑ Vector Vector([ 0.003814 -0.006436 2.99591235])) yaw(171.7°) -Odom ts(2025-05-30 13:52:04) pos(→ Vector Vector([0.434459 0.104739 0.314794])), rot(↗ Vector Vector([ 0.005558 -0.004183 3.00068456])) yaw(171.9°) -Odom ts(2025-05-30 13:52:04) pos(→ Vector Vector([0.435621 0.101699 0.315852])), rot(↑ Vector Vector([ 0.005391 -0.006002 3.00246893])) yaw(172.0°) -Odom ts(2025-05-30 13:52:04) pos(→ Vector Vector([0.436457 0.09857 0.315254])), rot(↑ Vector Vector([ 0.003358 -0.006916 3.00347172])) yaw(172.1°) -Odom ts(2025-05-30 13:52:04) pos(→ Vector Vector([0.435535 0.097022 0.314399])), rot(↑ Vector Vector([ 1.88300000e-03 -8.17800000e-03 3.00573432e+00])) yaw(172.2°) -Odom ts(2025-05-30 13:52:04) pos(→ Vector Vector([0.433739 0.097553 0.313479])), rot(↑ Vector Vector([ 8.10000000e-05 -8.71700000e-03 3.00729616e+00])) yaw(172.3°) -Odom ts(2025-05-30 13:52:04) pos(→ Vector Vector([0.430924 0.09859 0.31322 ])), rot(↑ Vector Vector([ 1.84000000e-04 -9.68700000e-03 3.00945623e+00])) yaw(172.4°) -... etc -``` ---- - -## Cheatsheet - -| Action | Command | -| --------------------------- | ---------------------------- | -| Enter dev container | `./bin/dev` | -| Run all pre-commit hooks | `pre-commit run --all-files` | -| Install hooks in local repo | `pre-commit install` | -| Run tests in current path | `pytest` | -| Filter tests by name | `pytest -k ""` | -| Enable stdout in tests | `pytest -s` | -| Run tagged tests | `pytest -m ` | diff --git a/docs/development/README.md b/docs/development/README.md index 9517fda6a1..130e86fdaa 100644 --- a/docs/development/README.md +++ b/docs/development/README.md @@ -1,6 +1,6 @@ # Development Guide -1. [How to setup your system](#1-setup) (pick one: system install, nix flake + direnv, pure nix flake) +1. [How to set up your system](#1-setup) (pick one: system install, nix flake + direnv, pure nix flake) 2. [How to hack on DimOS](#2-how-to-hack-on-dimos) (which files to edit, debugging help, etc) 3. [How to make a PR](#3-how-to-make-a-pr) (our expectations for a PR) @@ -16,12 +16,12 @@ All the setup options are for your convenience. If you can get DimOS running on ### Why pick this option? (pros/cons/when-to-use) -* Downside: mutates your global system, causing (and receiving) side effects causes it to be unreliable +* Downside: mutates your global system, which can create side effects and make it less reliable * Upside: Often good for a quick hack or exploring * Upside: Sometimes easier for CUDA/GPU acceleration * Use when: you understand system package management (arch linux user) or you don't care about making changes to your system -### How to setup DimOS +### How to set up DimOS ```bash # System dependencies @@ -56,26 +56,21 @@ uv pip install -e '.[base,dev,manipulation,misc,unitree,drone]' # setup pre-commit pre-commit install -# test the install (takes about 3 minutes) +# test the install (takes about 1 minute) uv run pytest dimos ``` Note, a few dependencies do not have PyPI packages and need to be installed from their Git repositories. These are only required for specific features: - **CLIP** and **detectron2**: Required for the Detic open-vocabulary object detector -- **contact_graspnet_pytorch**: Required for robotic grasp prediction You can install them with: ```bash uv add git+https://github.com/openai/CLIP.git -uv add git+https://github.com/dimensionalOS/contact_graspnet_pytorch.git uv add git+https://github.com/facebookresearch/detectron2.git ``` - - - ## Setup Option B: Nix Flake + direnv ### Why pick this option? (pros/cons/when-to-use) @@ -228,15 +221,13 @@ This will save the rerun data to `rerun.json` in the current directory. ## Where is `` located? (Architecture) - -* If you want to add a `dimos run ` command see [dimos_run.md](/dimos/robot/cli/README.md) -* If you want to add a camera driver see [depth_camera_integration.md](/docs/depth_camera_integration.md) - -* For edits to manipulation see [manipulation.md](/dimos/hardware/manipulators/README.md) and [manipulation base](/dimos/hardware/manipulators/base/component_based_architecture.md) +* If you want to add a `dimos run ` command see [dimos_run.md](/docs/development/dimos_run.md) +* If you want to add a camera driver see [depth_camera_integration.md](/docs/development/depth_camera_integration.md) +* For edits to manipulation see [manipulation](/dimos/hardware/manipulators/README.md) and the related modules under `dimos/manipulation/`. * `dimos/core/`: Is where stuff like `Module`, `In`, `Out`, and `RPC` live. * `dimos/robot/`: Robot-specific modules live here. * `dimos/hardware/`: Are for sensors, end-effectors, and related individual hardware pieces. -* `dimos/msgs/`: If you're trying to find a type to send a type over a stream, look here. +* `dimos/msgs/`: If you're trying to find a message type to send over a stream, look here. * `dimos/dashboard/`: Contains code related to visualization. * `dimos/protocol/`: Defines low level stuff for communication between modules. * See `dimos/` for the remainder @@ -258,7 +249,7 @@ pytest # run all tests at or below the current directory | Enable stdout in tests | `pytest -s` | | Run tagged tests | `pytest -m ` | -We use tags for special tests, like `vis` or `tool` for things that aren't meant to be ran in CI and when casually developing, something that requires hardware or visual inspection (pointcloud merging vis etc) +We use tags for special tests, like `tool` for things that aren't meant to be run in CI and for cases that require hardware or visual inspection (pointcloud merging visualization, etc). You can enable a tag by selecting -m - these are configured in `./pyproject.toml` @@ -268,12 +259,12 @@ You can enable a tag by selecting -m - these are configured in `./pyp - Open the PR against the `dev` branch (not `main`). - **No matter what, provide a few-lines that, when run, let a reviewer test the feature you added** (assuming you changed functional python code). - Less changed files = better. -- If you're writing documentation, see [writing docs](/docs/agents/docs/index.md) for how to write code blocks. +- If you're writing documentation, see [writing docs](/docs/development/writing_docs.md) - If you get mypy errors, please fix them. Don't just add # type: ignore. Please first understand why mypy is complaining and try to fix it. It's only okay to ignore if the issue cannot be fixed. - If you made a change that is likely going to involve a debate, open the github UI and add a graphical comment on that code. Justify your choice and explain downsides of alternatives. - We don't require 100% test coverage, but if you're making a PR of notable python changes you should probably either have unit tests or good reason why not (ex: visualization stuff is hard to test so we don't). - Have the name of your PR start with `WIP:` if its not ready to merge but you want to show someone the changes. -- If you have large (>500kb) files, see [large file management](/docs/data.md) for how to store and load them (don't just commit them). +- If you have large (>500kb) files, see [large file management](/docs/development/large_file_management.md) for how to store and load them (don't just commit them). - So long as you don't disable pre-commit hooks the formatting, license headers, EOLs, LFS checks, etc will be handled automatically by [pre-commit](https://pre-commit.com). If something goes wrong with the hooks you can run the step manually with `pre-commit run --all-files`. - If you're a new hire at DimOS: - Did we mention smaller PR's are better? Smaller PR's are better. diff --git a/docs/development/adding_a_custom_arm.md b/docs/development/adding_a_custom_arm.md new file mode 100644 index 0000000000..2b435a50fe --- /dev/null +++ b/docs/development/adding_a_custom_arm.md @@ -0,0 +1,730 @@ +# How to Integrate a New Manipulator Arm + +This guide walks through integrating a new robot arm with DimOS, from writing the hardware adapter to creating blueprints for planning and control. + +## Architecture Overview + +DimOS uses a **Protocol-based adapter pattern** — no base class inheritance required. Your adapter wraps the vendor SDK and exposes a standard interface that the rest of the system consumes: + +``` +┌──────────────────────────────────────────────────────────────┐ +│ ManipulationModule (Planning) │ +│ - Plans collision-free trajectories using Drake │ +│ - Sends trajectories to coordinator via RPC │ +└───────────────────────┬──────────────────────────────────────┘ + │ RPC: execute trajectory +┌───────────────────────▼──────────────────────────────────────┐ +│ ControlCoordinator (100Hz control loop) │ +│ - Reads state from all adapters │ +│ - Runs tasks (trajectory, servo, velocity) │ +│ - Arbitrates per-joint conflicts (priority-based) │ +│ - Routes commands to the correct adapter │ +│ - Publishes aggregated joint state │ +└───────────────────────┬──────────────────────────────────────┘ + │ uses +┌───────────────────────▼──────────────────────────────────────┐ +│ Your Adapter (implements Protocol) │ +│ - Wraps vendor SDK (TCP/IP, CAN, serial, etc.) │ +│ - Converts between vendor units and SI units │ +│ - Handles connection lifecycle │ +└──────────────────────────────────────────────────────────────┘ +``` + +> See also: `dimos/hardware/manipulators/README.md` for a quick reference. + +## Prerequisites + +1. **Vendor SDK** — The Python SDK for your robot arm (e.g., `xarm-python-sdk`, `piper-sdk`) +2. **URDF/xacro** — A robot description file (only needed if you want motion planning) +3. **Connection info** — IP address, CAN port, serial device, etc. + +## Step 1: Create the Adapter + +Create a new directory for your arm under `dimos/hardware/manipulators/`: + +``` +dimos/hardware/manipulators/ +├── spec.py # ManipulatorAdapter Protocol (don't modify) +├── registry.py # Auto-discovery registry (don't modify) +├── mock/ +├── xarm/ +├── piper/ +└── yourarm/ # ← New directory + ├── __init__.py + └── adapter.py +``` + +### adapter.py — Full Skeleton + +Below is a complete annotated adapter. Implement each method by wrapping your vendor SDK calls. All values crossing the adapter boundary **must use SI units**. + +| Quantity | SI Unit | +|------------------|----------| +| Angles | radians | +| Angular velocity | rad/s | +| Torque | Nm | +| Position | meters | +| Force | Newtons | + +```python +"""YourArm adapter — implements ManipulatorAdapter protocol. + +SDK Units: +DimOS Units: angles=radians, distance=meters, velocity=rad/s +""" + +from __future__ import annotations + +import math +from typing import TYPE_CHECKING + +# Import your vendor SDK +from yourarm_sdk import YourArmSDK + +if TYPE_CHECKING: + from dimos.hardware.manipulators.registry import AdapterRegistry + +from dimos.hardware.manipulators.spec import ( + ControlMode, + JointLimits, + ManipulatorInfo, +) + +# Unit conversion constants (if your SDK doesn't use SI units) +MM_TO_M = 0.001 +M_TO_MM = 1000.0 + + +class YourArmAdapter: + """YourArm hardware adapter. + + Implements ManipulatorAdapter protocol via duck typing. + No inheritance required — just match the method signatures in spec.py. + """ + + def __init__(self, address: str, dof: int = 6) -> None: + """Initialize the adapter. + + Args: + address: Connection address (IP, CAN port, serial device, etc.) + dof: Degrees of freedom. + """ + if not address: + raise ValueError("address is required for YourArmAdapter") + self._address = address + self._dof = dof + self._sdk: YourArmSDK | None = None + self._control_mode: ControlMode = ControlMode.POSITION + + # ========================================================================= + # Connection + # ========================================================================= + + def connect(self) -> bool: + """Connect to hardware. Returns True on success.""" + try: + self._sdk = YourArmSDK(self._address) + self._sdk.connect() + # Verify connection succeeded + if not self._sdk.is_alive(): + print(f"ERROR: Arm at {self._address} not reachable") + return False + return True + except Exception as e: + print(f"ERROR: Failed to connect to arm at {self._address}: {e}") + return False + + def disconnect(self) -> None: + """Disconnect from hardware.""" + if self._sdk: + self._sdk.disconnect() + self._sdk = None + + def is_connected(self) -> bool: + """Check if connected.""" + return self._sdk is not None and self._sdk.is_alive() + + # ========================================================================= + # Info + # ========================================================================= + + def get_info(self) -> ManipulatorInfo: + """Get manipulator info (vendor, model, DOF).""" + return ManipulatorInfo( + vendor="YourVendor", + model="YourModel", + dof=self._dof, + firmware_version=None, # Optional: query from SDK if available + serial_number=None, # Optional: query from SDK if available + ) + + def get_dof(self) -> int: + """Get degrees of freedom.""" + return self._dof + + def get_limits(self) -> JointLimits: + """Get joint position and velocity limits in SI units. + + Either hardcode known limits or query them from the SDK. + """ + return JointLimits( + position_lower=[-math.pi] * self._dof, # radians + position_upper=[math.pi] * self._dof, # radians + velocity_max=[math.pi] * self._dof, # rad/s + ) + + # ========================================================================= + # Control Mode + # ========================================================================= + + def set_control_mode(self, mode: ControlMode) -> bool: + """Set control mode. + + Map DimOS ControlMode enum values to your SDK's mode codes. + Return False for modes your arm doesn't support. + """ + if not self._sdk: + return False + + mode_map = { + ControlMode.POSITION: 0, # Your SDK's position mode code + ControlMode.SERVO_POSITION: 1, # High-frequency servo mode + ControlMode.VELOCITY: 4, # Velocity mode + # Add other supported modes... + } + + sdk_mode = mode_map.get(mode) + if sdk_mode is None: + return False # Unsupported mode + + success = self._sdk.set_mode(sdk_mode) + if success: + self._control_mode = mode + return success + + def get_control_mode(self) -> ControlMode: + """Get current control mode.""" + return self._control_mode + + # ========================================================================= + # State Reading + # ========================================================================= + + def read_joint_positions(self) -> list[float]: + """Read current joint positions in radians. + + Convert from SDK units to radians. + """ + if not self._sdk: + raise RuntimeError("Not connected") + raw_positions = self._sdk.get_joint_positions() + return [math.radians(p) for p in raw_positions[:self._dof]] + + def read_joint_velocities(self) -> list[float]: + """Read current joint velocities in rad/s. + + If your SDK doesn't provide velocity feedback, return zeros. + The coordinator can estimate velocity via finite differences. + """ + if not self._sdk: + return [0.0] * self._dof + # If SDK supports velocity reading: + # raw_velocities = self._sdk.get_joint_velocities() + # return [math.radians(v) for v in raw_velocities[:self._dof]] + return [0.0] * self._dof + + def read_joint_efforts(self) -> list[float]: + """Read current joint torques in Nm. + + If your SDK doesn't provide torque feedback, return zeros. + """ + if not self._sdk: + return [0.0] * self._dof + # If SDK supports torque reading: + # return list(self._sdk.get_joint_torques()[:self._dof]) + return [0.0] * self._dof + + def read_state(self) -> dict[str, int]: + """Read robot state (mode, state code, etc).""" + if not self._sdk: + return {"state": 0, "mode": 0} + return { + "state": self._sdk.get_state(), + "mode": self._sdk.get_mode(), + } + + def read_error(self) -> tuple[int, str]: + """Read error code and message. (0, '') means no error.""" + if not self._sdk: + return 0, "" + code = self._sdk.get_error_code() + if code == 0: + return 0, "" + return code, f"YourArm error {code}" + + # ========================================================================= + # Motion Control (Joint Space) + # ========================================================================= + + def write_joint_positions( + self, + positions: list[float], + velocity: float = 1.0, + ) -> bool: + """Command joint positions in radians. + + Args: + positions: Target positions in radians. + velocity: Speed as fraction of max (0-1). + + Convert from radians to SDK units before sending. + """ + if not self._sdk: + return False + sdk_positions = [math.degrees(p) for p in positions] + return self._sdk.set_joint_positions(sdk_positions) + + def write_joint_velocities(self, velocities: list[float]) -> bool: + """Command joint velocities in rad/s. + + Return False if velocity control is not supported. + """ + if not self._sdk: + return False + sdk_velocities = [math.degrees(v) for v in velocities] + return self._sdk.set_joint_velocities(sdk_velocities) + + def write_stop(self) -> bool: + """Stop all motion immediately.""" + if not self._sdk: + return False + return self._sdk.emergency_stop() + + # ========================================================================= + # Servo Control + # ========================================================================= + + def write_enable(self, enable: bool) -> bool: + """Enable or disable servos.""" + if not self._sdk: + return False + return self._sdk.enable_motors(enable) + + def read_enabled(self) -> bool: + """Check if servos are enabled.""" + if not self._sdk: + return False + return self._sdk.motors_enabled() + + def write_clear_errors(self) -> bool: + """Clear error state.""" + if not self._sdk: + return False + return self._sdk.clear_errors() + + # ========================================================================= + # Optional: Cartesian Control + # Return None/False if not supported by your arm. + # ========================================================================= + + def read_cartesian_position(self) -> dict[str, float] | None: + """Read end-effector pose. + + Returns dict with keys: x, y, z (meters), roll, pitch, yaw (radians). + Return None if not supported. + """ + return None # Or implement if your SDK supports it + + def write_cartesian_position( + self, + pose: dict[str, float], + velocity: float = 1.0, + ) -> bool: + """Command end-effector pose. Return False if not supported.""" + return False + + # ========================================================================= + # Optional: Gripper + # ========================================================================= + + def read_gripper_position(self) -> float | None: + """Read gripper position in meters. Return None if no gripper.""" + return None + + def write_gripper_position(self, position: float) -> bool: + """Command gripper position in meters. Return False if no gripper.""" + return False + + # ========================================================================= + # Optional: Force/Torque Sensor + # ========================================================================= + + def read_force_torque(self) -> list[float] | None: + """Read F/T sensor data [fx, fy, fz, tx, ty, tz]. None if no sensor.""" + return None + + +# ── Registry hook (required for auto-discovery) ─────────────────── +def register(registry: AdapterRegistry) -> None: + """Register this adapter with the registry.""" + registry.register("yourarm", YourArmAdapter) + + +__all__ = ["YourArmAdapter"] +``` + +### Key implementation notes + +- **Unsupported features** — Return `None` for reads and `False` for writes. Never raise exceptions for optional features. +- **Velocity/effort feedback** — If your SDK doesn't provide these, return zeros. The coordinator handles this gracefully. +- **Lazy SDK import** — If the vendor SDK is an optional dependency, you can import it inside `connect()` instead of at module level (see Piper adapter for this pattern): + ```python + def connect(self) -> bool: + try: + from yourarm_sdk import YourArmSDK + self._sdk = YourArmSDK(self._address) + ... + except ImportError: + print("ERROR: yourarm-sdk not installed. Run: pip install yourarm-sdk") + return False + ``` + +## Step 2: Create Package Files + +### \_\_init\_\_.py + +```python +"""YourArm manipulator hardware adapter. + +Usage: + >>> from dimos.hardware.manipulators.yourarm import YourArmAdapter + >>> adapter = YourArmAdapter(address="192.168.1.100", dof=6) + >>> adapter.connect() + >>> positions = adapter.read_joint_positions() +""" + +from dimos.hardware.manipulators.yourarm.adapter import YourArmAdapter + +__all__ = ["YourArmAdapter"] +``` + +### How auto-discovery works + +The `AdapterRegistry` in `dimos/hardware/manipulators/registry.py` automatically discovers your adapter at import time: + +1. It iterates over all subpackages under `dimos/hardware/manipulators/` +2. For each subpackage, it tries to import `.adapter` +3. If that module has a `register()` function, it calls it + +This means **no manual registration is needed** — just having the `register()` function in your `adapter.py` is sufficient. + +You can verify discovery works: + +```python +from dimos.hardware.manipulators.registry import adapter_registry +print(adapter_registry.available()) # Should include "yourarm" +``` + +## Step 3: Create Your Robot Folder and Blueprints + +Each robot in DimOS gets its own folder under `dimos/robot/`. This is where you define all blueprints for your arm — coordinator, planning, perception, etc. This follows the same pattern as Unitree robots (`dimos/robot/unitree/`). + +### 3a. Create the robot directory + +``` +dimos/robot/ +├── unitree/ # Unitree robots (reference example) +│ ├── go2/ +│ │ └── blueprints/ +│ └── g1/ +│ └── blueprints/ +└── yourarm/ # ← New directory for your robot + ├── __init__.py + └── blueprints.py +``` + +### 3b. Define your blueprints + +Create `dimos/robot/yourarm/blueprints.py` with your coordinator and (optionally) planning blueprints: + +```python +"""Blueprints for YourArm robot. + +Usage: + # Run via CLI: + dimos run coordinator-yourarm # Start coordinator with real hardware + dimos run yourarm-planner # Start planner (optional, for motion planning) + + # Or programmatically: + from dimos.robot.yourarm.blueprints import coordinator_yourarm + coordinator = coordinator_yourarm.build() + coordinator.loop() +""" + +from __future__ import annotations + +from pathlib import Path + +from dimos.control.components import HardwareComponent, HardwareType, make_joints +from dimos.control.coordinator import TaskConfig, control_coordinator +from dimos.core.transport import LCMTransport +from dimos.msgs.sensor_msgs import JointState + +# ============================================================================= +# Coordinator Blueprints +# ============================================================================= + +# YourArm (6-DOF) — real hardware +coordinator_yourarm = control_coordinator( + tick_rate=100.0, # Control loop frequency (Hz) + publish_joint_state=True, # Publish aggregated joint state + joint_state_frame_id="coordinator", + hardware=[ + HardwareComponent( + hardware_id="arm", # Unique ID for this hardware + hardware_type=HardwareType.MANIPULATOR, + joints=make_joints("arm", 6), # Creates ["arm_joint1", ..., "arm_joint6"] + adapter_type="yourarm", # Must match registry name + address="192.168.1.100", # Passed to adapter __init__ + auto_enable=True, # Auto-enable servos on start + ), + ], + tasks=[ + TaskConfig( + name="traj_arm", # Task name (used by ManipulationModule RPC) + type="trajectory", # Trajectory execution task + joint_names=[f"arm_joint{i+1}" for i in range(6)], + priority=10, # Higher priority wins arbitration + ), + ], +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } +) + + +``` + +### Blueprint field reference + +| Field | Description | +|-------|-------------| +| `hardware_id` | Unique name for this hardware component. Used to route commands. | +| `adapter_type` | Name registered with `adapter_registry` (e.g., `"yourarm"`). | +| `address` | Connection info passed to adapter's `__init__` as `address` kwarg. | +| `joints` | List of joint names. `make_joints("arm", 6)` creates `["arm_joint1", ..., "arm_joint6"]`. | +| `auto_enable` | If `True`, servos are enabled automatically when the coordinator starts. | +| `task.name` | Name used by the ManipulationModule to invoke trajectory execution via RPC. | +| `task.type` | Task type: `"trajectory"`, `"servo"`, `"velocity"`, or `"cartesian_ik"`. | +| `task.priority` | Priority for per-joint arbitration. Higher number wins. | + +## Step 4: Add URDF and Planning Integration (Optional) + +If you want motion planning (collision-free trajectories via Drake), you need a URDF and a planning blueprint. Add these to your robot's own `blueprints.py`. + +### 4a. Add your URDF + +Place your URDF/xacro files under LFS data so they can be resolved via `LfsPath`. `LfsPath` is a `Path` subclass that lazily downloads LFS data on first access — this avoids downloading at import time when the blueprint module is loaded. + +```python +from dimos.utils.data import LfsPath +from dimos.manipulation.manipulation_module import manipulation_module +from dimos.manipulation.planning.spec import RobotModelConfig +from dimos.msgs.geometry_msgs import PoseStamped, Quaternion, Vector3 + +# LfsPath defers download until the path is actually accessed +_YOURARM_URDF_PATH = LfsPath("yourarm_description/urdf/yourarm.urdf") +_YOURARM_PACKAGE_PATH = LfsPath("yourarm_description") + + +def _make_base_pose(x=0.0, y=0.0, z=0.0) -> PoseStamped: + return PoseStamped( + position=Vector3(x=x, y=y, z=z), + orientation=Quaternion(0.0, 0.0, 0.0, 1.0), + ) +``` + +### 4b. Create a robot model config helper + +```python +def _make_yourarm_config( + name: str = "arm", + y_offset: float = 0.0, + joint_prefix: str = "", + coordinator_task: str | None = None, +) -> RobotModelConfig: + """Create YourArm robot config for planning. + + Args: + name: Robot name in the Drake planning world. + y_offset: Y-axis offset for multi-arm setups. + joint_prefix: Prefix for joint name mapping to coordinator namespace. + coordinator_task: Coordinator task name for trajectory execution via RPC. + """ + # These must match the joint names in your URDF + joint_names = ["joint1", "joint2", "joint3", "joint4", "joint5", "joint6"] + joint_mapping = {f"{joint_prefix}{j}": j for j in joint_names} if joint_prefix else {} + + return RobotModelConfig( + name=name, + urdf_path=_YOURARM_URDF_PATH, + base_pose=_make_base_pose(y=y_offset), + joint_names=joint_names, + end_effector_link="link6", # Last link in your URDF's kinematic chain + base_link="base_link", # Root link of your URDF + package_paths={"yourarm_description": _YOURARM_PACKAGE_PATH}, + xacro_args={}, # Xacro arguments if using .xacro files + collision_exclusion_pairs=[], # Pairs of links that can touch (e.g., gripper fingers) + auto_convert_meshes=True, # Convert DAE/STL meshes for Drake + max_velocity=1.0, # Max velocity scaling factor + max_acceleration=2.0, # Max acceleration scaling factor + joint_name_mapping=joint_mapping, + coordinator_task_name=coordinator_task, + ) +``` + +### 4c. Create a planning blueprint + +Add this to your `dimos/robot/yourarm/blueprints.py` alongside the coordinator blueprint: + +```python +# ============================================================================= +# Planner Blueprints (requires URDF) +# ============================================================================= + +yourarm_planner = manipulation_module( + robots=[_make_yourarm_config("arm", joint_prefix="arm_", coordinator_task="traj_arm")], + planning_timeout=10.0, + enable_viz=True, +).transports( + { + ("joint_state", JointState): LCMTransport("/coordinator/joint_state", JointState), + } +) +``` + +### Key config fields + +| Field | Description | +|-------|-------------| +| `urdf_path` | Path to `.urdf` or `.xacro` file | +| `joint_names` | Ordered list of controlled joints (must match URDF) | +| `end_effector_link` | Link to use as the end-effector for IK | +| `base_link` | Root link of the robot model | +| `package_paths` | Maps `package://` URIs to filesystem paths (for xacro) | +| `joint_name_mapping` | Maps coordinator names (e.g., `"arm_joint1"`) to URDF names (e.g., `"joint1"`) | +| `coordinator_task_name` | Must match the `TaskConfig.name` in your coordinator blueprint | +| `collision_exclusion_pairs` | List of `(link_a, link_b)` tuples for links that may legitimately touch (e.g., gripper fingers) | + +## Step 5: Register Blueprints + +The blueprint registry in `dimos/robot/all_blueprints.py` is **auto-generated** by scanning the codebase for blueprint declarations. After adding your blueprints: + +1. Run the generation test to update the registry: + ```bash + pytest dimos/robot/test_all_blueprints_generation.py + ``` +3. Now you can run your arm via CLI: + ```bash + dimos run coordinator-yourarm + dimos run yourarm-planner # If you added a planning blueprint + ``` + +## Step 6: Testing + +### Verify adapter registration + +```python +from dimos.hardware.manipulators.registry import adapter_registry + +# Check your adapter shows up +assert "yourarm" in adapter_registry.available() + +# Create an instance via registry (same path the coordinator uses) +adapter = adapter_registry.create("yourarm", address="192.168.1.100", dof=6) +``` + +### Unit test with mock + +You can test coordinator logic without hardware by using `unittest.mock`: + +```python +import pytest +from unittest.mock import MagicMock +from dimos.hardware.manipulators.spec import ManipulatorAdapter + +@pytest.fixture +def mock_adapter(): + adapter = MagicMock(spec=ManipulatorAdapter) + adapter.get_dof.return_value = 6 + adapter.read_joint_positions.return_value = [0.0] * 6 + adapter.read_joint_velocities.return_value = [0.0] * 6 + adapter.read_joint_efforts.return_value = [0.0] * 6 + adapter.write_joint_positions.return_value = True + adapter.read_enabled.return_value = True + adapter.is_connected.return_value = True + return adapter + +def test_read_positions(mock_adapter): + assert mock_adapter.read_joint_positions() == [0.0] * 6 + +def test_write_positions(mock_adapter): + target = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] + assert mock_adapter.write_joint_positions(target) is True +``` + +### Integration test with coordinator + +```python +from dimos.control.blueprints import coordinator_mock + +# Build and start coordinator with mock hardware +coordinator = coordinator_mock.build() +coordinator.start() + +# Your adapter is tested through the same coordinator interface +# Just swap adapter_type="mock" to adapter_type="yourarm" in a blueprint +``` + +### Test the real adapter standalone + +```python +from dimos.hardware.manipulators.yourarm import YourArmAdapter + +adapter = YourArmAdapter(address="192.168.1.100", dof=6) +assert adapter.connect() is True +assert adapter.is_connected() is True + +# Read state +positions = adapter.read_joint_positions() +assert len(positions) == 6 +print(f"Joint positions (rad): {positions}") + +# Enable and move +adapter.write_enable(True) +adapter.write_joint_positions([0.0] * 6) + +# Cleanup +adapter.write_stop() +adapter.disconnect() +``` + +## Quick Reference Checklist + +Files to create: + +- [ ] `dimos/hardware/manipulators/yourarm/__init__.py` +- [ ] `dimos/hardware/manipulators/yourarm/adapter.py` (implements Protocol + `register()`) +- [ ] `dimos/robot/yourarm/__init__.py` +- [ ] `dimos/robot/yourarm/blueprints.py` (coordinator + planning blueprints) + +Files to modify: + +- [ ] `pyproject.toml` — Add vendor SDK to optional dependencies *(if applicable)* + +Verification: + +- [ ] `adapter_registry.available()` includes `"yourarm"` +- [ ] `pytest dimos/robot/test_all_blueprints_generation.py` passes (regenerates `all_blueprints.py`) +- [ ] `dimos run coordinator-yourarm` starts successfully diff --git a/docs/development/assets/docker-hierarchy.svg b/docs/development/assets/docker-hierarchy.svg new file mode 100644 index 0000000000..7c84d6aa9f --- /dev/null +++ b/docs/development/assets/docker-hierarchy.svg @@ -0,0 +1,31 @@ + + +ubuntu:22.04 + +ubuntu:22.04 +Non-ROS Track +ROS Track + + + +python + + + +dev + + + +ros + + + +ros-python + + + +ros-dev + + +same dockerfiles + diff --git a/docs/assets/get_data_flow.svg b/docs/development/assets/get_data_flow.svg similarity index 100% rename from docs/assets/get_data_flow.svg rename to docs/development/assets/get_data_flow.svg diff --git a/docs/depth_camera_integration.md b/docs/development/depth_camera_integration.md similarity index 92% rename from docs/depth_camera_integration.md rename to docs/development/depth_camera_integration.md index 4fca10da4e..e152394262 100644 --- a/docs/depth_camera_integration.md +++ b/docs/development/depth_camera_integration.md @@ -6,7 +6,7 @@ Use this guide to add a new depth camera, wire TF correctly, and publish the req ## Add a New Depth Camera 1) **Create a new driver module** - - Path: `dimos/dimos/hardware/sensors/camera//camera.py` + - Path: `dimos/hardware/sensors/camera//camera.py` - Export a blueprint in `/__init__.py` (match the `realsense` / `zed` pattern). 2) **Define config** @@ -57,7 +57,7 @@ Use this guide to add a new depth camera, wire TF correctly, and publish the req ## TF: Required Frames and Transforms -Frame names are defined by the abstract depth camera spec (`dimos/dimos/hardware/sensors/camera/spec.py`). +Frame names are defined by the abstract depth camera spec (`dimos/hardware/sensors/camera/spec.py`). Use the properties below to ensure consistent naming: - `_camera_link`: base link for the camera module (usually `{camera_name}_link`) @@ -111,8 +111,8 @@ For `ObjectSceneRegistrationModule`, the required inputs are: - Overlay annotations and aggregated pointclouds See: -- `dimos/dimos/perception/object_scene_registration.py` -- `dimos/dimos/perception/demo_object_scene_registration.py` +- `dimos/perception/object_scene_registration.py` +- `dimos/perception/demo_object_scene_registration.py` Quick wiring example: @@ -140,8 +140,8 @@ Install Foxglove from: ## Modules and Skills (Short Intro) - **Modules** are typed components with `In[...]` / `Out[...]` streams and `start()` / `stop()` lifecycles. -- **Skills** are callable methods (decorated with `@skill`) exposed by `SkillModule` for agents. +- **Skills** are callable methods (decorated with `@skill`) on any `Module`, automatically discovered by agents. Reference: -- Modules overview: `dimos/docs/concepts/modules.md` -- TF fundamentals: `dimos/docs/api/transforms.md` +- Modules overview: `/docs/usage/modules.md` +- TF fundamentals: `/docs/usage/transforms.md` diff --git a/dimos/robot/cli/README.md b/docs/development/dimos_run.md similarity index 79% rename from dimos/robot/cli/README.md rename to docs/development/dimos_run.md index 63087f48b8..3e6bee65e6 100644 --- a/dimos/robot/cli/README.md +++ b/docs/development/dimos_run.md @@ -1,6 +1,20 @@ -# Robot CLI +# DimOS Run -To avoid having so many runfiles, I created a common script to run any blueprint. +#### Warning: If you just want to run a blueprint you don't need to add it to `dimos run`: + +`your_code.py` +```python +from dimos.robot.unitree_webrtc.unitree_go2_blueprints import basic as example_blueprint + +if __name__ == "__main__": + example_blueprint.build().loop() +``` + +```sh +python ./your_code.py +``` + +## Usage For example, to run the standard Unitree Go2 blueprint run: @@ -17,10 +31,10 @@ dimos run unitree-go2-agentic You can dynamically connect additional modules. For example: ```bash -dimos run unitree-go2 --extra-module llm_agent --extra-module human_input --extra-module navigation_skill +dimos run unitree-go2 --extra-module agent --extra-module navigation_skill ``` -## Definitions +## Adding your own Blueprints can be defined anywhere, but they're all linked together in `dimos/robot/all_blueprints.py`. E.g.: diff --git a/docs/development/docker.md b/docs/development/docker.md new file mode 100644 index 0000000000..2f3f4a98ec --- /dev/null +++ b/docs/development/docker.md @@ -0,0 +1,162 @@ +# Docker Images + +Dimos uses parallel Docker image hierarchies for ROS and non-ROS builds, allowing you to choose the environment that fits your use case. + +## Image Hierarchy + +
Pikchr + +```pikchr fold output=assets/docker-hierarchy.svg +color = white +fill = none + +# Base images +U1: box "ubuntu:22.04" rad 5px fit wid 170% ht 170% +U2: box "ubuntu:22.04" rad 5px fit wid 170% ht 170% at (U1.x + 2.5in, U1.y) + +# Labels +text "Non-ROS Track" at (U1.x, U1.y + 0.5in) +text "ROS Track" at (U2.x, U2.y + 0.5in) + +# Non-ROS track +arrow from U1.s down 0.4in +P: box "python" rad 5px fit wid 170% ht 170% +arrow from P.s down 0.4in +D: box "dev" rad 5px fit wid 170% ht 170% + +# ROS track +arrow from U2.s down 0.4in +R: box "ros" rad 5px fit wid 170% ht 170% +arrow from R.s down 0.4in +RP: box "ros-python" rad 5px fit wid 170% ht 170% +arrow from RP.s down 0.4in +RD: box "ros-dev" rad 5px fit wid 170% ht 170% + +# Cross-reference: same dockerfiles reused +line dashed from P.e right 0.3in then down until even with RP then right to RP.w +line dashed from D.e right 0.3in then down until even with RD then right to RD.w +text "same dockerfiles" at (D.e.x + 1.2in, D.e.y + 0.4in) +``` + +
+ + +![output](assets/docker-hierarchy.svg) + + +## Images + +All images are published to `ghcr.io/dimensionalos/`. + +| Image | Base | Purpose | +|--------------|-----------------------------|----------------------------------------------------| +| `python` | ubuntu:22.04 | Core dimos with Python dependencies, no ROS | +| `dev` | python | Development environment (editors, git, pre-commit) | +| `ros` | ubuntu:22.04 | ROS2 Humble with navigation packages | +| `ros-python` | ros | ROS + dimos Python dependencies | +| `ros-dev` | ros-python | Full ROS development environment | + +## Tags + +Images are tagged based on the git branch: + +| Branch | Tag | +|------------------|-------------------------------------------------| +| `main` | `latest` | +| `dev` | `dev` | +| feature branches | sanitized branch name (e.g., `feature_foo_bar`) | + +## When to Use Each Image + +### Non-ROS Track (`python` → `dev`) + +```sh skip +docker run -it ghcr.io/dimensionalos/dev:latest bash +``` + +### ROS Track (`ros` → `ros-python` → `ros-dev`) + +Use when you need ROS2 integration: +- Robot hardware control via ROS topics +- Navigation stack integration +- ROS message passing between components +- Running ROS tests (`pytest -m ros`) + +```sh skip +docker run -it ghcr.io/dimensionalos/ros-dev:latest bash +``` + +## Local Development + +### Building Images Locally + +Use the helper script: + +```sh skip +./bin/dockerbuild python # Build python image +./bin/dockerbuild dev # Build dev image +./bin/dockerbuild ros # Build ros image +``` + +## CI/CD Pipeline + +The workflow in [`.github/workflows/docker.yml`](/.github/workflows/docker.yml) handles: + +1. **Change detection** - Only rebuilds images when relevant files change +2. **Parallel builds** - ROS and non-ROS tracks build independently +3. **Cascade rebuilds** - Changes to base images trigger downstream rebuilds +4. **Test execution** - Tests run in the freshly built images + +### Trigger Paths + +| Image | Triggers on changes to | +|----------|------------------------------------------------------| +| `ros` | `docker/ros/**`, workflow files | +| `python` | `docker/python/**`, `pyproject.toml`, workflow files | +| `dev` | `docker/dev/**` | + +### Test Jobs + +After images build, tests run in parallel: + +| Job | Image | Command | +|-------------------------|---------|---------------------------| +| `run-tests` | dev | `pytest` | +| `run-ros-tests` | ros-dev | `pytest && pytest -m ros` | +| `run-heavy-tests` | dev | `pytest -m heavy` | +| `run-lcm-tests` | dev | `pytest -m lcm` | +| `run-integration-tests` | dev | `pytest -m integration` | +| `run-mypy` | ros-dev | `mypy dimos` | + +## Dockerfile Structure + +### Common Patterns + +All Dockerfiles accept a `FROM_IMAGE` build arg for flexibility: + +```dockerfile skip +ARG FROM_IMAGE=ubuntu:22.04 +FROM ${FROM_IMAGE} +``` + +This allows the same Dockerfile (e.g., `python`) to build on different bases. + +### Python Package Installation + +Images use [uv](https://github.com/astral-sh/uv) for fast dependency installation: + +```dockerfile skip +ENV UV_SYSTEM_PYTHON=1 +RUN curl -LsSf https://astral.sh/uv/install.sh | sh +RUN uv pip install '.[misc,cpu,sim,drone,unitree,web,perception,visualization]' +``` + +### Dev Image Features + +The dev image ([`docker/dev/Dockerfile`](/docker/dev/Dockerfile)) adds: +- Git, git-lfs, pre-commit +- Editors (nano, vim) +- tmux with custom config +- Node.js (via nvm) +- Custom bash prompt with version info +- Entrypoint script that sources ROS setup diff --git a/docs/development/grid_testing.md b/docs/development/grid_testing.md new file mode 100644 index 0000000000..e5daab7b32 --- /dev/null +++ b/docs/development/grid_testing.md @@ -0,0 +1,116 @@ +# Grid Testing Strategy + +Grid tests run the same test logic across multiple implementations or configurations using pytest's parametrize feature. + +## Case Type Pattern + +Define a `Case` dataclass that holds everything needed to run tests against a specific implementation: + +```python +from collections.abc import Callable, Iterator +from contextlib import AbstractContextManager +from dataclasses import dataclass, field +from typing import Any, Generic + +@dataclass +class Case(Generic[TopicT, MsgT]): + name: str # For pytest id + pubsub_context: Callable[[], AbstractContextManager[...]] # Context manager factory + topic_values: list[tuple[TopicT, MsgT]] # Pre-generated test data (always 3 pairs) + tags: set[str] = field(default_factory=set) # Capability tags for filtering + + def __iter__(self) -> Iterator[Any]: + """Makes Case work with pytest.parametrize unpacking.""" + return iter((self.pubsub_context, self.topic_values)) +``` + +## Capability Tags + +Use tags to indicate what features each implementation supports: + +```python +testcases = [ + Case( + name="lcm_typed", + pubsub_context=lcm_typed_context, + topic_values=[...], + tags={"all", "glob", "regex"}, # LCM supports all pattern types + ), + Case( + name="shm_pickle", + pubsub_context=shm_context, + topic_values=[...], + tags={"all"}, # SharedMemory only supports subscribe_all + ), +] +``` + +## Filtered Test Lists + +Build separate lists for each capability to use with parametrize: + +```python +all_cases = [c for c in testcases if "all" in c.tags] +glob_cases = [c for c in testcases if "glob" in c.tags] +regex_cases = [c for c in testcases if "regex" in c.tags] +``` + +## Test Functions + +Use the filtered lists in parametrize decorators: + +```python +@pytest.mark.parametrize("case", all_cases, ids=lambda c: c.name) +def test_subscribe_all(case: Case) -> None: + with case.pubsub_context() as pubsub: + # Test logic using case.topic_values + ... + +@pytest.mark.parametrize("case", glob_cases, ids=lambda c: c.name) +def test_subscribe_glob(case: Case) -> None: + if not glob_cases: + pytest.skip("no implementations support glob") + with case.pubsub_context() as pubsub: + ... +``` + +## Context Managers + +Each implementation provides a context manager factory: + +```python +@contextmanager +def lcm_typed_context() -> Generator[LCM, None, None]: + lcm = LCM(autoconf=True) + lcm.start() + yield lcm + lcm.stop() +``` + +## Test Data Guidelines + +- Always provide exactly 3 topic/value pairs for consistency +- For typed implementations, use different types per topic to verify type handling +- For bytes implementations, use simple distinguishable byte strings + +```python +# Typed test data - different types per topic +typed_topic_values = [ + (Topic("/sensor/position", Vector3), Vector3(1, 2, 3)), + (Topic("/sensor/orientation", Quaternion), Quaternion(0, 0, 0, 1)), + (Topic("/robot/pose", Pose), Pose(...)), +] + +# Bytes test data +bytes_topic_values = [ + (Topic("/topic1"), b"msg1"), + (Topic("/topic2"), b"msg2"), + (Topic("/topic3"), b"msg3"), +] +``` + +## Examples + +- `dimos/protocol/pubsub/test_spec.py` - Basic pubsub operations +- `dimos/protocol/pubsub/test_subscribe_all.py` - Pattern subscriptions +- `dimos/protocol/pubsub/benchmark/testdata.py` - Benchmark cases diff --git a/docs/data.md b/docs/development/large_file_management.md similarity index 100% rename from docs/data.md rename to docs/development/large_file_management.md diff --git a/docs/development/testing.md b/docs/development/testing.md new file mode 100644 index 0000000000..c27a8c5dec --- /dev/null +++ b/docs/development/testing.md @@ -0,0 +1,122 @@ +# Testing + +For development, you should install all dependencies so that tests have access to them. + +```bash +uv sync --all-extras --no-extra dds +``` + +## Types of tests + +There are different types of tests based on what their goal is: + +| Type | Description | Mocking | Speed | +|------|-------------|---------|-------| +| Unit | Test a small individual piece of code | All dependencies | Very fast | +| Integration | Test the integration between multiple units of code | Most dependencies | Some fast, some slow | +| Functional | Test a particular desired functionality | Some dependencies | Some fast, some slow | +| End-to-end | Test the entire system as a whole from the perspective of the user | None | Very slow | + +The distinction between unit, integration, and functional tests is often debated and rarely productive. + +Rather than waste time on classifying tests, it's better to separate tests by how they are used: + +* **fast tests**: tests which you can run after each code change (people often run them with filesystem watchers: whenever a file is saved, automatically run the tests) +* **slow tests**: tests which you run every once in a while to make sure you haven't broken anything (maybe every commit, but definitely before publishing a PR) + +The purpose of running tests in a loop is to get immediate feedback. The faster the loop, the easier it is to identify a problem since the source is the tiny bit of code you changed. + +## Usage + +### Fast tests + +Run the fast tests: + +```bash +./bin/pytest-fast +``` + +This is the same as: + +```bash +pytest dimos +``` + +The default `addopts` in `pyproject.toml` includes a `-m` filter that excludes slow markers (like `integration`, `heavy`, `e2e`, etc.), so plain `pytest dimos` only runs fast tests. + +### Slow tests + +Run the slow tests: + +```bash +./bin/pytest-slow +``` + +This overrides the default `-m` filter to include most markers. When writing or debugging a specific slow test, override `-m` yourself: + +```bash +pytest -m integration dimos/path/to/test_something.py +``` + +Note: passing `-m` on the command line overrides the default from `addopts`, so you get exactly the marker set you asked for. + +## Writing tests + +Test files live next to the code they test. If you have `dimos/core/pubsub.py`, its tests go in `dimos/core/test_pubsub.py`. + +When writing tests you probably want to limit the run to whatever tests you're writing: + +```bash +pytest -sv dimos/core/test_my_code.py +``` + +### Fixtures + +Pytest fixtures are very useful for making sure test failures don't affect other tests. + +Whenever you have something that needs to be cleaned up when the test is over (disconnect, close, delete temp files, etc.), you should use a fixture. + +Simple example code: + +```python +@pytest.fixture +def arm(): + arm = RobotArm(device="/dev/ttyUSB0") + arm.connect() + yield arm + arm.disconnect() + +def test_arm_moves_to_position(arm): + arm.move_to(x=0.5, y=0.3, z=0.1) + assert arm.position == (0.5, 0.3, 0.1) +``` + +The `yield` is key: everything before it is setup, everything after is teardown. The teardown runs even if the test fails, so you never leak resources between tests. + +### Mocking + +It's easier to use the `mocker` fixture instead of `unittest.mock`. It automatically undoes all patches when the test ends, so you don't need `with` blocks. + +Patching a method: + +```python +def test_uses_cached_position(mocker): + mocker.patch("dimos.hardware.RobotArm.get_position", return_value=(0.0, 0.0, 0.0)) + arm = RobotArm() + assert arm.get_position() == (0.0, 0.0, 0.0) +``` + +There are other useful things in `mocker`, like `mocker.MagicMock()` for creating fake objects. + +## Useful pytest options + +| Option | Description | +|--------|-------------| +| `-s` | Show stdout/stderr output | +| `-v` | More verbose test names | +| `-x` | Stop on first failure | +| `-k foo` | Only run tests matching `foo` | +| `--lf` | Rerun only the tests that failed last time | +| `--pdb` | Drop into the debugger when a test fails | +| `--tb=short` | Shorter tracebacks | +| `--durations=0` | Measure the speed of each test | diff --git a/docs/development/writing_docs.md b/docs/development/writing_docs.md new file mode 100644 index 0000000000..58466d6592 --- /dev/null +++ b/docs/development/writing_docs.md @@ -0,0 +1,7 @@ +# Writing Docs + +1. Where to put your docs: + - If it only matters to people who contribute to dimos (like this doc), put them in `docs/development` + - Otherwise put them in `docs/usage` +2. Run `bin/gen_diagrams` to generate the svg's for your diagrams. We use [pikchr](https://pikchr.org/home/doc/trunk/doc/userman.md) as a diagram language. +3. Use [md-babel-py](https://github.com/leshy/md-babel-py/) (`md-babel-py run thing.md`) to make sure your code examples work. diff --git a/docs/hardware/integration_guide.md b/docs/hardware/integration_guide.md new file mode 100644 index 0000000000..805e6ad418 --- /dev/null +++ b/docs/hardware/integration_guide.md @@ -0,0 +1,3 @@ +# New Hardware Integration Guide + +TODO: Document how to add support for new hardware platforms. diff --git a/docs/installation/nix.md b/docs/installation/nix.md new file mode 100644 index 0000000000..557bb6608a --- /dev/null +++ b/docs/installation/nix.md @@ -0,0 +1,57 @@ +# Nix install (required for nix managed dimos) + +You need to have [nix](https://nixos.org/) installed and [flakes](https://nixos.wiki/wiki/Flakes) enabled, + +[official install docs](https://nixos.org/download/) recommended, but here is a quickstart: + +```sh +# Install Nix https://nixos.org/download/ +curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +# make sure nix-flakes are enabled +mkdir -p "$HOME/.config/nix"; echo "experimental-features = nix-command flakes" >> "$HOME/.config/nix/nix.conf" +``` + +# Using DimOS as a library + +```sh +mkdir myproject && cd myproject + +# pull the flake (needed for nix develop outside the repo) +wget https://raw.githubusercontent.com/dimensionalOS/dimos/refs/heads/main/flake.nix +wget https://raw.githubusercontent.com/dimensionalOS/dimos/refs/heads/main/flake.lock + +# enter the nix development shell (provides system deps) +nix develop + +python3 -m venv .venv +source .venv/bin/activate + +# install everything (depending on your use case you might not need all extras, +# check your respective platform guides) +pip install "dimos[misc,sim,visualization,agents,web,perception,unitree,manipulation,cpu,dev]" +``` + +# Developing on DimOS + +```sh +# this allows getting large files on-demand (and not pulling all immediately) +export GIT_LFS_SKIP_SMUDGE=1 +git clone -b dev https://github.com/dimensionalOS/dimos.git +cd dimos + +# enter the nix development shell (provides system deps) +nix develop + +python3 -m venv .venv +source .venv/bin/activate + +pip install -e ".[misc,sim,visualization,agents,web,perception,unitree,manipulation,cpu,dev]" + +# type check +mypy dimos + +# tests (around a minute to run) +pytest dimos +``` diff --git a/docs/installation/osx.md b/docs/installation/osx.md new file mode 100644 index 0000000000..da8916dd1a --- /dev/null +++ b/docs/installation/osx.md @@ -0,0 +1,41 @@ +# macOS Install (12.6 or newer) + +```sh +# install homebrew +/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" +# install dependencies +brew install gnu-sed gcc portaudio git-lfs libjpeg-turbo python pre-commit + +# install uv +curl -LsSf https://astral.sh/uv/install.sh | sh && export PATH="$HOME/.local/bin:$PATH" +``` + +# Using DimOS as a library + +```sh +mkdir myproject && cd myproject + +uv venv --python 3.12 +source .venv/bin/activate + +# install everything (depending on your use case you might not need all extras, +# check your respective platform guides) +uv pip install dimos[misc,sim,visualization,agents,web,perception,unitree,manipulation,cpu,dev] +``` + +# Developing on DimOS + +```sh +# this allows getting large files on-demand (and not pulling all immediately) +export GIT_LFS_SKIP_SMUDGE=1 +git clone -b dev https://github.com/dimensionalOS/dimos.git +cd dimos + +uv sync --all-extras --no-extra dds + +# type check +uv run mypy dimos + +# tests (around a minute to run) +uv run pytest dimos +``` diff --git a/docs/installation/ubuntu.md b/docs/installation/ubuntu.md new file mode 100644 index 0000000000..8ff47329d5 --- /dev/null +++ b/docs/installation/ubuntu.md @@ -0,0 +1,39 @@ +# System Dependencies Install (Ubuntu 22.04 or 24.04) + +```sh +sudo apt-get update +sudo apt-get install -y curl g++ portaudio19-dev git-lfs libturbojpeg python3-dev pre-commit + +# install uv +curl -LsSf https://astral.sh/uv/install.sh | sh && export PATH="$HOME/.local/bin:$PATH" +``` + +# Using DimOS as a library + +```sh +mkdir myproject && cd myproject + +uv venv --python 3.12 +source .venv/bin/activate + +# install everything (depending on your use case you might not need all extras, +# check your respective platform guides) +uv pip install dimos[misc,sim,visualization,agents,web,perception,unitree,manipulation,cpu,dev] +``` + +# Developing on DimOS + +```sh +# this allows getting large files on-demand (and not pulling all immediately) +export GIT_LFS_SKIP_SMUDGE=1 +git clone -b dev https://github.com/dimensionalOS/dimos.git +cd dimos + +uv sync --all-extras --no-extra dds + +# type check +uv run mypy dimos + +# tests (around a minute to run) +uv run pytest dimos +``` diff --git a/docs/old/ci.md b/docs/old/ci.md deleted file mode 100644 index ac9b11115a..0000000000 --- a/docs/old/ci.md +++ /dev/null @@ -1,146 +0,0 @@ -# Continuous Integration Guide - -> *If you are ******not****** editing CI-related files, you can safely ignore this document.* - -Our GitHub Actions pipeline lives in **`.github/workflows/`** and is split into three top-level workflows: - -| Workflow | File | Purpose | -| ----------- | ------------- | -------------------------------------------------------------------- | -| **cleanup** | `cleanup.yml` | Auto-formats code with *pre-commit* and pushes fixes to your branch. | -| **docker** | `docker.yml` | Builds (and caches) our Docker image hierarchy. | -| **tests** | `tests.yml` | Pulls the *dev* image and runs the test suite. | - ---- - -## `cleanup.yml` - -* Checks out the branch. -* Executes **pre-commit** hooks. -* If hooks modify files, commits and pushes the changes back to the same branch. - -> This guarantees consistent formatting even if the developer has not installed pre-commit locally. - ---- - -## `tests.yml` - -* Pulls the pre-built **dev** container image. -* Executes: - -```bash -pytest -``` - -That’s it—making the job trivial to reproduce locally via: - -```bash -./bin/dev # enter container -pytest # run tests -``` - ---- - -## `docker.yml` - -### Objectives - -1. **Layered images**: each image builds on its parent, enabling parallel builds once dependencies are ready. -2. **Speed**: build children as soon as parents finish; leverage aggressive caching. -3. **Minimal work**: skip images whose context hasn’t changed. - -### Current hierarchy - - -``` - ┌──────┐ - │ubuntu│ - └┬────┬┘ - ┌▽──┐┌▽───────┐ - │ros││python │ - └┬──┘└───────┬┘ - ┌▽─────────┐┌▽──┐ - │ros-python││dev│ - └┬─────────┘└───┘ - ┌▽──────┐ - │ros-dev│ - └───────┘ -``` - -* ghcr.io/dimensionalos/ros:dev -* ghcr.io/dimensionalos/python:dev -* ghcr.io/dimensionalos/ros-python:dev -* ghcr.io/dimensionalos/ros-dev:dev -* ghcr.io/dimensionalos/dev:dev - -> **Note**: The diagram shows only currently active images; the system is extensible—new combinations are possible, builds can be run per branch and as parallel as possible - - -``` - ┌──────┐ - │ubuntu│ - └┬────┬┘ - ┌▽──┐┌▽────────────────────────┐ - │ros││python │ - └┬──┘└───────────────────┬────┬┘ - ┌▽─────────────────────┐┌▽──┐┌▽──────┐ - │ros-python ││dev││unitree│ - └┬────────┬───────────┬┘└───┘└───────┘ - ┌▽──────┐┌▽─────────┐┌▽──────────┐ - │ros-dev││ros-jetson││ros-unitree│ - └───────┘└──────────┘└───────────┘ -``` - -### Branch-aware tagging - -When a branch triggers a build: - -* Only images whose context changed are rebuilt. -* New images receive the tag `:`. -* Unchanged parents are pulled from the registry, e.g. - -given we made python requirements.txt changes, but no ros changes, image dep graph would look like this: - -``` -ghcr.io/dimensionalos/ros:dev → ghcr.io/dimensionalos/ros-python:my_branch → ghcr.io/dimensionalos/dev:my_branch -``` - -### Job matrix & the **check-changes** step - -To decide what to build we run a `check-changes` job that compares the diff against path filters: - -```yaml -filters: | - ros: - - .github/workflows/_docker-build-template.yml - - .github/workflows/docker.yml - - docker/base-ros/** - - python: - - docker/base-python/** - - requirements*.txt - - dev: - - docker/dev/** -``` - -This populates a build matrix (ros, python, dev) with `true/false` flags. - -### The dependency execution issue - -Ideally a child job (e.g. **ros-python**) should depend on both: - -* **check-changes** (to know if it *should* run) -* Its **parent image job** (to wait for the artifact) - -GitHub Actions can’t express “run only if *both* conditions are true *and* the parent job wasn’t skipped”. - -We are using `needs: [check-changes, ros]` to ensure the job runs after the ros build, but if ros build has been skipped we need `if: always()` to ensure that the build runs anyway. -Adding `always` for some reason completely breaks the conditional check, we cannot have OR, AND operators, it just makes the job _always_ run, which means we build python even if we don't need to. - -This is unfortunate as the build takes ~30 min first time (a few minutes afterwards thanks to caching) and I've spent a lot of time on this, lots of viable seeming options didn't pan out and probably we need to completely rewrite and own the actions runner and not depend on github structure at all. Single job called `CI` or something, within our custom docker image. - ---- - -## `run-tests` (job inside `docker.yml`) - -After all requested images are built, this job triggers **tests.yml**, passing the freshly created *dev* image tag so the suite runs against the branch-specific environment. diff --git a/docs/old/jetson.MD b/docs/old/jetson.MD deleted file mode 100644 index a4d06e3255..0000000000 --- a/docs/old/jetson.MD +++ /dev/null @@ -1,72 +0,0 @@ -# DimOS Jetson Setup Instructions -Tested on Jetpack 6.2, CUDA 12.6 - -## Required system dependencies -`sudo apt install portaudio19-dev python3-pyaudio` - -## Installing cuSPARSELt -https://ninjalabo.ai/blogs/jetson_pytorch.html - -```bash -wget https://developer.download.nvidia.com/compute/cusparselt/0.7.0/local_installers/cusparselt-local-tegra-repo-ubuntu2204-0.7.0_1.0-1_arm64.deb -sudo dpkg -i cusparselt-local-tegra-repo-ubuntu2204-0.7.0_1.0-1_arm64.deb -sudo cp /var/cusparselt-local-tegra-repo-ubuntu2204-0.7.0/cusparselt-*-keyring.gpg /usr/share/keyrings/ -sudo apt-get update -sudo apt-get install libcusparselt0 libcusparselt-dev -ldconfig -``` -## Install Torch and Torchvision wheels - -Enter virtualenv -```bash -python3 -m venv venv -source venv/bin/activate -``` - -Wheels for jp6/cu126 -https://pypi.jetson-ai-lab.io/jp6/cu126 - -Check compatibility: -https://docs.nvidia.com/deeplearning/frameworks/install-pytorch-jetson-platform-release-notes/pytorch-jetson-rel.html - -### Working torch wheel tested on Jetpack 6.2, CUDA 12.6 -`pip install --no-cache https://developer.download.nvidia.com/compute/redist/jp/v61/pytorch/torch-2.5.0a0+872d972e41.nv24.08.17622132-cp310-cp310-linux_aarch64.whl` - -### Install torchvision from source: -```bash -# Set version by checking above torchvision<-->torch compatibility - -# We use 0.20.0 -export VERSION=20 - -sudo apt-get install libjpeg-dev zlib1g-dev libpython3-dev libopenblas-dev libavcodec-dev libavformat-dev libswscale-dev -git clone --branch release/0.$VERSION https://github.com/pytorch/vision torchvision -cd torchvision -export BUILD_VERSION=0.$VERSION.0 -python3 setup.py install --user # remove --user if installing in virtualenv -``` - -### Verify success: -```bash -$ python3 -import torch -print(torch.__version__) -print('CUDA available: ' + str(torch.cuda.is_available())) # Should be True -print('cuDNN version: ' + str(torch.backends.cudnn.version())) -a = torch.cuda.FloatTensor(2).zero_() -print('Tensor a = ' + str(a)) -b = torch.randn(2).cuda() -print('Tensor b = ' + str(b)) -c = a + b -print('Tensor c = ' + str(c)) - -$ python3 -import torchvision -print(torchvision.__version__) -``` - -## Install Onnxruntime-gpu - -Find pre-build wheels here for your specific JP/CUDA version: https://pypi.jetson-ai-lab.io/jp6 - -`pip install https://pypi.jetson-ai-lab.io/jp6/cu126/+f/4eb/e6a8902dc7708/onnxruntime_gpu-1.23.0-cp310-cp310-linux_aarch64.whl#sha256=4ebe6a8902dc7708434b2e1541b3fe629ebf434e16ab5537d1d6a622b42c622b` diff --git a/docs/old/modules.md b/docs/old/modules.md deleted file mode 100644 index 9cdbf586ac..0000000000 --- a/docs/old/modules.md +++ /dev/null @@ -1,165 +0,0 @@ -# Dimensional Modules - -The DimOS Module system enables distributed, multiprocess robotics applications using Dask for compute distribution and LCM (Lightweight Communications and Marshalling) for high-performance IPC. - -## Core Concepts - -### 1. Module Definition -Modules are Python classes that inherit from `dimos.core.Module` and define inputs, outputs, and RPC methods: - -```python -from dimos.core import Module, In, Out, rpc -from dimos.msgs.geometry_msgs import Vector3 - -class MyModule(Module): - # Declare inputs/outputs as class attributes initialized to None - data_in: In[Vector3] = None - data_out: Out[Vector3] = None - - def __init__(): - # Call parent Module init - super().__init__() - - @rpc - def remote_method(self, param): - """Methods decorated with @rpc can be called remotely""" - return param * 2 -``` - -### 2. Module Deployment -Modules are deployed across Dask workers using the `dimos.deploy()` method: - -```python -from dimos import core - -# Start Dask cluster with N workers -dimos = core.start(4) - -# Deploying modules allows for passing initialization parameters. -# In this case param1 and param2 are passed into Module init -module = dimos.deploy(Module, param1="value1", param2=123) -``` - -### 3. Stream Connections -Modules communicate via reactive streams using LCM transport: - -```python -# Configure LCM transport for outputs -module1.data_out.transport = core.LCMTransport("/topic_name", MessageType) - -# Connect module inputs to outputs -module2.data_in.connect(module1.data_out) - -# Access the underlying Observable stream -stream = module1.data_out.observable() -stream.subscribe(lambda msg: print(f"Received: {msg}")) -``` - -### 4. Module Lifecycle -```python -# Start modules to begin processing -module.start() # Calls the @rpc start() method if defined - -# Inspect module I/O configuration -print(module.io().result()) # Shows inputs, outputs, and RPC methods - -# Clean shutdown -dimos.shutdown() -``` - -## Real-World Example: Robot Control System - -```python -# Connection module wraps robot hardware/simulation -connection = dimos.deploy(ConnectionModule, ip=robot_ip) -connection.lidar.transport = core.LCMTransport("/lidar", LidarMessage) -connection.video.transport = core.LCMTransport("/video", Image) - -# Perception module processes sensor data -perception = dimos.deploy(PersonTrackingStream, camera_intrinsics=[...]) -perception.video.connect(connection.video) -perception.tracking_data.transport = core.pLCMTransport("/person_tracking") - -# Start processing -connection.start() -perception.start() - -# Enable tracking via RPC -perception.enable_tracking() - -# Get latest tracking data -data = perception.get_tracking_data() -``` - -## LCM Transport Configuration - -```python -# Standard LCM transport for simple types like lidar -connection.lidar.transport = core.LCMTransport("/lidar", LidarMessage) - -# Pickle-based transport for complex Python objects / dictionaries -connection.tracking_data.transport = core.pLCMTransport("/person_tracking") - -# Auto-configure LCM system buffers (required in containers) -from dimos.protocol import pubsub -pubsub.lcm.autoconf() -``` - -This architecture enables building complex robotic systems as composable, distributed modules that communicate efficiently via streams and RPC, scaling from single machines to clusters. - -# Dimensional Install -## Python Installation (Ubuntu 22.04) - -```bash -sudo apt install python3-venv - -# Clone the repository (dev branch, no submodules) -git clone -b dev https://github.com/dimensionalOS/dimos.git -cd dimos - -# Create and activate virtual environment -python3 -m venv venv -source venv/bin/activate - -sudo apt install portaudio19-dev python3-pyaudio - -# Install torch and torchvision if not already installed -# Example CUDA 11.7, Pytorch 2.0.1 (replace with your required pytorch version if different) -pip install torch==2.0.1 torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118 -``` - -### Install dependencies -```bash -# CPU only (reccomended to attempt first) -pip install .[cpu,dev] - -# CUDA install -pip install .[cuda,dev] - -# Copy and configure environment variables -cp default.env .env -``` - -### Test install -```bash -# Run standard tests -pytest -s dimos/ - -# Test modules functionality -pytest -s -m module dimos/ - -# Test LCM communication -pytest -s -m lcm dimos/ -``` - -# Unitree Go2 Quickstart - -To quickly test the modules system, you can run the Unitree Go2 multiprocess example directly: - -```bash -# Make sure you have the required environment variables set -export ROBOT_IP= - -# Run the multiprocess Unitree Go2 example -python dimos/robot/unitree_webrtc/multiprocess/unitree_go2.py -``` diff --git a/docs/old/modules_CN.md b/docs/old/modules_CN.md deleted file mode 100644 index 89e16c7112..0000000000 --- a/docs/old/modules_CN.md +++ /dev/null @@ -1,188 +0,0 @@ -# Dimensional 模块系统 - -DimOS 模块系统使用 Dask 进行计算分布和 LCM(轻量级通信和编组)进行高性能进程间通信,实现分布式、多进程的机器人应用。 - -## 核心概念 - -### 1. 模块定义 -模块是继承自 `dimos.core.Module` 的 Python 类,定义输入、输出和 RPC 方法: - -```python -from dimos.core import Module, In, Out, rpc -from dimos.msgs.geometry_msgs import Vector3 - -class MyModule(Module): # ROS Node - # 将输入/输出声明为初始化为 None 的类属性 - data_in: In[Vector3] = None # ROS Subscriber - data_out: Out[Vector3] = None # ROS Publisher - - def __init__(): - # 调用父类 Module 初始化 - super().__init__() - - @rpc - def remote_method(self, param): - """使用 @rpc 装饰的方法可以远程调用""" - return param * 2 -``` - -### 2. 模块部署 -使用 `dimos.deploy()` 方法在 Dask 工作进程中部署模块: - -```python -from dimos import core - -# 启动具有 N 个工作进程的 Dask 集群 -dimos = core.start(4) - -# 部署模块时可以传递初始化参数 -# 在这种情况下,param1 和 param2 被传递到模块初始化中 -module = dimos.deploy(Module, param1="value1", param2=123) -``` - -### 3. 流连接 -模块通过使用 LCM 传输的响应式流进行通信: - -```python -# 为输出配置 LCM 传输 -module1.data_out.transport = core.LCMTransport("/topic_name", MessageType) - -# 将模块输入连接到输出 -module2.data_in.connect(module1.data_out) - -# 访问底层的 Observable 流 -stream = module1.data_out.observable() -stream.subscribe(lambda msg: print(f"接收到: {msg}")) -``` - -### 4. 模块生命周期 -```python -# 启动模块以开始处理 -module.start() # 如果定义了 @rpc start() 方法,则调用它 - -# 检查模块 I/O 配置 -print(module.io().result()) # 显示输入、输出和 RPC 方法 - -# 优雅关闭 -dimos.shutdown() -``` - -## 实际示例:机器人控制系统 - -```python -# 连接模块封装机器人硬件/仿真 -connection = dimos.deploy(ConnectionModule, ip=robot_ip) -connection.lidar.transport = core.LCMTransport("/lidar", LidarMessage) -connection.video.transport = core.LCMTransport("/video", Image) - -# 感知模块处理传感器数据 -perception = dimos.deploy(PersonTrackingStream, camera_intrinsics=[...]) -perception.video.connect(connection.video) -perception.tracking_data.transport = core.pLCMTransport("/person_tracking") - -# 开始处理 -connection.start() -perception.start() - -# 通过 RPC 启用跟踪 -perception.enable_tracking() - -# 获取最新的跟踪数据 -data = perception.get_tracking_data() -``` - -## LCM 传输配置 - -```python -# 用于简单类型(如激光雷达)的标准 LCM 传输 -connection.lidar.transport = core.LCMTransport("/lidar", LidarMessage) - -# 用于复杂 Python 对象/字典的基于 pickle 的传输 -connection.tracking_data.transport = core.pLCMTransport("/person_tracking") - -# 自动配置 LCM 系统缓冲区(在容器中必需) -from dimos.protocol import pubsub -pubsub.lcm.autoconf() -``` - -这种架构使得能够将复杂的机器人系统构建为可组合的分布式模块,这些模块通过流和 RPC 高效通信,从单机扩展到集群。 - -# Dimensional 安装指南 -## Python 安装(Ubuntu 22.04) - -```bash -sudo apt install python3-venv - -# 克隆仓库(dev 分支,无子模块) -git clone -b dev https://github.com/dimensionalOS/dimos.git -cd dimos - -# 创建并激活虚拟环境 -python3 -m venv venv -source venv/bin/activate - -sudo apt install portaudio19-dev python3-pyaudio - -# 如果尚未安装,请安装 torch 和 torchvision -# 示例 CUDA 11.7,Pytorch 2.0.1(如果需要不同的 pytorch 版本,请替换) -pip install torch==2.0.1 torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118 -``` - -### 安装依赖 -```bash -# 仅 CPU(建议首先尝试) -pip install .[cpu,dev] - -# CUDA 安装 -pip install .[cuda,dev] - -# 复制并配置环境变量 -cp default.env .env -``` - -### 测试安装 -```bash -# 运行标准测试 -pytest -s dimos/ - -# 测试模块功能 -pytest -s -m module dimos/ - -# 测试 LCM 通信 -pytest -s -m lcm dimos/ -``` - -# Unitree Go2 快速开始 - -要快速测试模块系统,您可以直接运行 Unitree Go2 多进程示例: - -```bash -# 确保设置了所需的环境变量 -export ROBOT_IP= - -# 运行多进程 Unitree Go2 示例 -python dimos/robot/unitree_webrtc/multiprocess/unitree_go2.py -``` - -## 模块系统的高级特性 - -### 分布式计算 -DimOS 模块系统建立在 Dask 之上,提供了强大的分布式计算能力: - -- **自动负载均衡**:模块自动分布在可用的工作进程中 -- **容错性**:如果工作进程失败,模块可以在其他工作进程上重新启动 -- **可扩展性**:从单机到集群的无缝扩展 - -### 响应式编程模型 -使用 RxPY 实现的响应式流提供了: - -- **异步处理**:非阻塞的数据流处理 -- **背压处理**:自动管理快速生产者和慢速消费者 -- **操作符链**:使用 map、filter、merge 等操作符进行流转换 - -### 性能优化 -LCM 传输针对机器人应用进行了优化: - -- **零拷贝**:大型消息的高效内存使用 -- **低延迟**:微秒级的消息传递 -- **多播支持**:一对多的高效通信 diff --git a/docs/old/ros_navigation.md b/docs/old/ros_navigation.md deleted file mode 100644 index 4a74500b2f..0000000000 --- a/docs/old/ros_navigation.md +++ /dev/null @@ -1,284 +0,0 @@ -# Autonomy Stack API Documentation - -## Prerequisites - -- Ubuntu 24.04 -- [ROS 2 Jazzy Installation](https://docs.ros.org/en/jazzy/Installation.html) - -Add the following line to your `~/.bashrc` to source the ROS 2 Jazzy setup script automatically: - -``` echo "source /opt/ros/jazzy/setup.bash" >> ~/.bashrc``` - -## MID360 Ethernet Configuration (skip for sim) - -### Step 1: Configure Network Interface - -1. Open Network Settings in Ubuntu -2. Find your Ethernet connection to the MID360 -3. Click the gear icon to edit settings -4. Go to IPv4 tab -5. Change Method from "Automatic (DHCP)" to "Manual" -6. Add the following settings: - - **Address**: 192.168.1.5 - - **Netmask**: 255.255.255.0 - - **Gateway**: 192.168.1.1 -7. Click "Apply" - -### Step 2: Configure MID360 IP in JSON - -1. Find your MID360 serial number (on sticker under QR code) -2. Note the last 2 digits (e.g., if serial ends in 89, use 189) -3. Edit the configuration file: - -```bash -cd ~/autonomy_stack_mecanum_wheel_platform -nano src/utilities/livox_ros_driver2/config/MID360_config.json -``` - -4. Update line 28 with your IP (192.168.1.1xx where xx = last 2 digits): - -```json -"ip" : "192.168.1.1xx", -``` - -5. Save and exit - -### Step 3: Verify Connection - -```bash -ping 192.168.1.1xx # Replace xx with your last 2 digits -``` - -## Robot Configuration - -### Setting Robot Type - -The system supports different robot configurations. Set the `ROBOT_CONFIG_PATH` environment variable to specify which robot configuration to use: - -```bash -# For Unitree G1 (default if not set) -export ROBOT_CONFIG_PATH="unitree/unitree_g1" - -# Add to ~/.bashrc to make permanent -echo 'export ROBOT_CONFIG_PATH="unitree/unitree_g1"' >> ~/.bashrc -``` - -Available robot configurations: -- `unitree/unitree_g1` - Unitree G1 robot (default) -- Add your custom robot configs in `src/base_autonomy/local_planner/config/` - -## Build the system - -You must do this every you make a code change, this is not Python - -```colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release``` - -## System Launch - -### Simulation Mode - -```bash -cd ~/autonomy_stack_mecanum_wheel_platform - -# Base autonomy only -./system_simulation.sh - -# With route planner -./system_simulation_with_route_planner.sh - -# With exploration planner -./system_simulation_with_exploration_planner.sh -``` - -### Real Robot Mode - -```bash -cd ~/autonomy_stack_mecanum_wheel_platform - -# Base autonomy only -./system_real_robot.sh - -# With route planner -./system_real_robot_with_route_planner.sh - -# With exploration planner -./system_real_robot_with_exploration_planner.sh -``` - -## Quick Troubleshooting - -- **Cannot ping MID360**: Check Ethernet cable and network settings -- **SLAM drift**: Press clear-terrain-map button on joystick controller -- **Joystick not recognized**: Unplug and replug USB dongle - - -## ROS Topics - -### Input Topics (Commands) - -| Topic | Type | Description | -|-------|------|-------------| -| `/way_point` | `geometry_msgs/PointStamped` | Send navigation goal (position only) | -| `/goal_pose` | `geometry_msgs/PoseStamped` | Send goal with orientation | -| `/cancel_goal` | `std_msgs/Bool` | Cancel current goal (data: true) | -| `/joy` | `sensor_msgs/Joy` | Joystick input | -| `/stop` | `std_msgs/Int8` | Soft Stop (2=stop all commmand, 0 = release) | -| `/navigation_boundary` | `geometry_msgs/PolygonStamped` | Set navigation boundaries | -| `/added_obstacles` | `sensor_msgs/PointCloud2` | Virtual obstacles | - -### Output Topics (Status) - -| Topic | Type | Description | -|-------|------|-------------| -| `/state_estimation` | `nav_msgs/Odometry` | Robot pose from SLAM | -| `/registered_scan` | `sensor_msgs/PointCloud2` | Aligned lidar point cloud | -| `/terrain_map` | `sensor_msgs/PointCloud2` | Local terrain map | -| `/terrain_map_ext` | `sensor_msgs/PointCloud2` | Extended terrain map | -| `/path` | `nav_msgs/Path` | Local path being followed | -| `/cmd_vel` | `geometry_msgs/Twist` | Velocity commands to motors | -| `/goal_reached` | `std_msgs/Bool` | True when goal reached, false when cancelled/new goal | - -### Map Topics - -| Topic | Type | Description | -|-------|------|-------------| -| `/overall_map` | `sensor_msgs/PointCloud2` | Global map (only in sim)| -| `/registered_scan` | `sensor_msgs/PointCloud2` | Current scan in map frame | -| `/terrain_map` | `sensor_msgs/PointCloud2` | Local obstacle map | - -## Usage Examples - -### Send Goal -```bash -ros2 topic pub /way_point geometry_msgs/msg/PointStamped "{ - header: {frame_id: 'map'}, - point: {x: 5.0, y: 3.0, z: 0.0} -}" --once -``` - -### Cancel Goal -```bash -ros2 topic pub /cancel_goal std_msgs/msg/Bool "data: true" --once -``` - -### Monitor Robot State -```bash -ros2 topic echo /state_estimation -``` - -## Configuration Parameters - -### Vehicle Parameters (`localPlanner`) - -| Parameter | Default | Description | -|-----------|---------|-------------| -| `vehicleLength` | 0.5 | Robot length (m) | -| `vehicleWidth` | 0.5 | Robot width (m) | -| `maxSpeed` | 0.875 | Maximum speed (m/s) | -| `autonomySpeed` | 0.875 | Autonomous mode speed (m/s) | - -### Goal Tolerance Parameters - -| Parameter | Default | Description | -|-----------|---------|-------------| -| `goalReachedThreshold` | 0.3-0.5 | Distance to consider goal reached (m) | -| `goalClearRange` | 0.35-0.6 | Extra clearance around goal (m) | -| `goalBehindRange` | 0.35-0.8 | Stop pursuing if goal behind within this distance (m) | -| `omniDirGoalThre` | 1.0 | Distance for omnidirectional approach (m) | - -### Obstacle Avoidance - -| Parameter | Default | Description | -|-----------|---------|-------------| -| `obstacleHeightThre` | 0.1-0.2 | Height threshold for obstacles (m) | -| `adjacentRange` | 3.5 | Sensor range for planning (m) | -| `minRelZ` | -0.4 | Minimum relative height to consider (m) | -| `maxRelZ` | 0.3 | Maximum relative height to consider (m) | - -### Path Planning - -| Parameter | Default | Description | -|-----------|---------|-------------| -| `pathScale` | 0.875 | Path resolution scale | -| `minPathScale` | 0.675 | Minimum path scale when blocked | -| `minPathRange` | 0.8 | Minimum planning range (m) | -| `dirThre` | 90.0 | Direction threshold (degrees) | - -### Control Parameters (`pathFollower`) - -| Parameter | Default | Description | -|-----------|---------|-------------| -| `lookAheadDis` | 0.5 | Look-ahead distance (m) | -| `maxAccel` | 2.0 | Maximum acceleration (m/s²) | -| `slowDwnDisThre` | 0.875 | Slow down distance threshold (m) | - -### SLAM Blind Zones (`feature_extraction_node`) - -| Parameter | Mecanum | Description | -|-----------|---------|-------------| -| `blindFront` | 0.1 | Front blind zone (m) | -| `blindBack` | -0.2 | Back blind zone (m) | -| `blindLeft` | 0.1 | Left blind zone (m) | -| `blindRight` | -0.1 | Right blind zone (m) | -| `blindDiskRadius` | 0.4 | Cylindrical blind zone radius (m) | - -## Operating Modes - -### Mode Control -- **Joystick L2**: Hold for autonomy mode -- **Joystick R2**: Hold to disable obstacle checking - -### Speed Control -The robot automatically adjusts speed based on: -1. Obstacle proximity -2. Path complexity -3. Goal distance - -## Tuning Guide - -### For Tighter Navigation -- Decrease `goalReachedThreshold` (e.g., 0.2) -- Decrease `goalClearRange` (e.g., 0.3) -- Decrease `vehicleLength/Width` slightly - -### For Smoother Navigation -- Increase `goalReachedThreshold` (e.g., 0.5) -- Increase `lookAheadDis` (e.g., 0.7) -- Decrease `maxAccel` (e.g., 1.5) - -### For Aggressive Obstacle Avoidance -- Increase `obstacleHeightThre` (e.g., 0.15) -- Increase `adjacentRange` (e.g., 4.0) -- Increase blind zone parameters - -## Common Issues - -### Robot Oscillates at Goal -- Increase `goalReachedThreshold` -- Increase `goalBehindRange` - -### Robot Stops Too Far from Goal -- Decrease `goalReachedThreshold` -- Decrease `goalClearRange` - -### Robot Hits Low Obstacles -- Decrease `obstacleHeightThre` -- Adjust `minRelZ` to include lower points - -## SLAM Configuration - -### Localization Mode -Set in `livox_mid360.yaml`: -```yaml -local_mode: true -init_x: 0.0 -init_y: 0.0 -init_yaw: 0.0 -``` - -### Mapping Performance -```yaml -mapping_line_resolution: 0.1 # Decrease for higher quality -mapping_plane_resolution: 0.2 # Decrease for higher quality -max_iterations: 5 # Increase for better accuracy -``` diff --git a/docs/old/running_without_devcontainer.md b/docs/old/running_without_devcontainer.md deleted file mode 100644 index d06785e359..0000000000 --- a/docs/old/running_without_devcontainer.md +++ /dev/null @@ -1,21 +0,0 @@ -install nix, - -https://nixos.wiki/wiki/Nix_Installation_Guide -```sh -sudo install -d -m755 -o $(id -u) -g $(id -g) /nix -curl -L https://nixos.org/nix/install | sh -``` - -install direnv -https://direnv.net/ -```sh -apt-get install direnv -echo 'eval "$(direnv hook bash)"' >> ~/.bashrc -``` - -allow direnv in dimos will take a bit to pull the packages, -from that point on your env is standardized -```sh -cd dimos -direnv allow -``` diff --git a/docs/old/testing_stream_reply.md b/docs/old/testing_stream_reply.md deleted file mode 100644 index e3189bb5e8..0000000000 --- a/docs/old/testing_stream_reply.md +++ /dev/null @@ -1,174 +0,0 @@ -# Sensor Replay & Storage Toolkit - -A lightweight framework for **recording, storing, and replaying binary data streams for automated tests**. It keeps your repository small (data lives in Git LFS) while giving you Python‑first ergonomics for working with RxPY streams, point‑clouds, videos, command logs—anything you can pickle. - ---- - -## 1 At a Glance - -| Need | One liner | -| ------------------------------ | ------------------------------------------------------------- | -| **Iterate over every message** | `SensorReplay("raw_odometry_rotate_walk").iterate(print)` | -| **RxPY stream for piping** | `SensorReplay("raw_odometry_rotate_walk").stream().pipe(...)` | -| **Throttle replay rate** | `SensorReplay("raw_odometry_rotate_walk").stream(rate_hz=10)` | -| **Raw path to a blob/dir** | `path = testData("raw_odometry_rotate_walk")` | -| **Store a new stream** | see [`SensorStorage`](#5-storing-new-streams) | - -> If the requested blob is missing locally, it is transparently downloaded from Git LFS, extracted to `tests/data//`, and cached for subsequent runs. - ---- - -## 2 Goals - -* **Zero setup for CI & collaborators** – data is fetched on demand. -* **No repo bloat** – binaries live in Git LFS; the working tree stays trim. -* **Symmetric API** – `SensorReplay` ↔︎ `SensorStorage`; same name, different direction. -* **Format agnostic** – replay *anything* you can pickle (protobuf, numpy, JPEG, …). -* **Data type agnostic** – with testData("raw_odometry_rotate_walk") you get a Path object back, can be a raw video file, whole codebase, ML model etc - - ---- - -## 3 Replaying Data - -### 3.1 Iterating Messages - -```python -from sensor_tools import SensorReplay - -# Print every stored Odometry message -SensorReplay(name="raw_odometry_rotate_walk").iterate(print) -``` - -### 3.2 RxPY Streaming - -```python -from rx import operators as ops -from operator import sub, add -from dimos.utils.testing import SensorReplay, SensorStorage -from dimos.robot.unitree_webrtc.type.odometry import Odometry - -# Compute total yaw rotation (radians) - -total_rad = ( - SensorReplay("raw_odometry_rotate_walk", autocast=Odometry.from_msg) - .stream() - .pipe( - ops.map(lambda odom: odom.rot.z), - ops.pairwise(), # [1,2,3,4] -> [[1,2], [2,3], [3,4]] - ops.starmap(sub), # [sub(1,2), sub(2,3), sub(3,4)] - ops.reduce(add), - ) - .run() -) - -assert total_rad == pytest.approx(4.05, abs=0.01) -``` - -### 3.3 Lidar Mapping Example (200MB blob) - -```python -from dimos.utils.testing import SensorReplay, SensorStorage -from dimos.robot.unitree_webrtc.type.map import Map - -lidar_stream = SensorReplay("office_lidar", autocast=LidarMessage.from_msg) -map_ = Map(voxel_size=0.5) - -# Blocks until the stream is consumed -map_.consume(lidar_stream.stream()).run() - -assert map_.costmap.grid.shape == (404, 276) -``` - ---- - -## 4 Low Level Access - -If you want complete control, call **`testData(name)`** to get a `Path` to the extracted file or directory — no pickling assumptions: - -```python -absolute_path: Path = testData("some_name") -``` - -Do whatever you like: open a video file, load a model checkpoint, etc. - ---- - -## 5 Storing New Streams - -1. **Write a test marked `@pytest.mark.tool`** so CI skips it by default. -2. Use `SensorStorage` to persist the stream into `tests/data//*.pickle`. - -```python -@pytest.mark.tool -def test_store_odometry_stream(): - load_dotenv() - - robot = UnitreeGo2(ip=os.getenv("ROBOT_IP"), mode="ai") - robot.standup() - - storage = SensorStorage("raw_odometry_rotate_walk2") - storage.save_stream(robot.raw_odom_stream()) # ← records until interrupted - - try: - while True: - time.sleep(0.1) - except KeyboardInterrupt: - robot.liedown() -``` - -### 5.1 Behind the Scenes - -* Any new file/dir under `tests/data/` is treated as a **data blob**. -* `./bin/lfs_push` compresses it into `tests/data/.lfs/.tar.gz` *and* uploads it to Git LFS. -* Only the `.lfs/` archive is committed; raw binaries remain `.gitignored`. - ---- - -## 6 Storing Arbitrary Binary Data - -Just copy to `tests/data/whatever` -* `./bin/lfs_push` compresses it into `tests/data/.lfs/.tar.gz` *and* uploads it to Git LFS. - ---- - -## 7 Developer Workflow Checklist - -1. **Drop new data** into `tests/data/`. -2. Run your new tests that use SensorReplay or testData calls, make sure all works -3. Run `./bin/lfs_push` (or let the pre commit hook nag you). -4. Commit the resulting `tests/data/.lfs/.tar.gz`. -5. Optional - you can delete `tests/data/your_new_stuff` and re-run the test to ensure it gets downloaded from LFS correclty -6. Push/PR - -### 7.1 Pre commit Setup (optional but recommended) - -```sh -sudo apt install pre-commit -pre-commit install # inside repo root -``` - -Now each commit checks formatting, linting, *and* whether you forgot to push new blobs: - -``` -$ echo test > tests/data/foo.txt -$ git add tests/data/foo.txt && git commit -m "demo" -LFS data ......................................................... Failed -✗ New test data detected at /tests/data: - foo.txt -Either delete or run ./bin/lfs_push -``` - ---- - -## 8 Future Work - -- A replay rate that mirrors the **original message timestamps** can be implemented downstream (e.g., an RxPY operator) -- Likely this same system should be used for production binary data delivery as well (Models etc) - ---- - -## 9 Existing Examples - -* `dimos/robot/unitree_webrtc/type/test_odometry.py` -* `dimos/robot/unitree_webrtc/type/test_map.py` diff --git a/docs/package_usage.md b/docs/package_usage.md deleted file mode 100644 index 328708122e..0000000000 --- a/docs/package_usage.md +++ /dev/null @@ -1,62 +0,0 @@ -# Package Usage - -## With `uv` - -Init your repo if not already done: - -```bash -uv init -``` - -Install: - -```bash -uv add dimos[base,dev,unitree] -``` - -Test the Unitree Go2 robot in the simulator: - -```bash -uv run dimos --simulation run unitree-go2 -``` - -Run your actual robot: - -```bash -uv run dimos --robot-ip=192.168.X.XXX run unitree-go2 -``` - -### Without installing - -With `uv` you can run tools without having to explicitly install: - -```bash -uvx --from dimos[base,unitree] dimos --robot-ip=192.168.X.XXX run unitree-go2 -``` - -## With `pip` - -Create an environment if not already done: - -```bash -python -m venv .venv -. .venv/bin/activate -``` - -Install: - -```bash -pip install dimos[base,dev,unitree] -``` - -Test the Unitree Go2 robot in the simulator: - -```bash -dimos --simulation run unitree-go2 -``` - -Run your actual robot: - -```bash -dimos --robot-ip=192.168.X.XXX run unitree-go2 -``` diff --git a/docs/platforms/quadruped/go2/index.md b/docs/platforms/quadruped/go2/index.md new file mode 100644 index 0000000000..3ce7c425de --- /dev/null +++ b/docs/platforms/quadruped/go2/index.md @@ -0,0 +1 @@ +# GO2 Getting started diff --git a/docs/todo.md b/docs/todo.md new file mode 100644 index 0000000000..464090415c --- /dev/null +++ b/docs/todo.md @@ -0,0 +1 @@ +# TODO diff --git a/docs/usage/README.md b/docs/usage/README.md new file mode 100644 index 0000000000..071b6fc0b2 --- /dev/null +++ b/docs/usage/README.md @@ -0,0 +1,12 @@ +# Concepts + +This page explains general concepts. + +## Table of Contents + +- [Modules](/docs/usage/modules.md): The primary units of deployment in DimOS, modules run in parallel and are python classes. +- [Streams](/docs/usage/sensor_streams/README.md): How modules communicate, a Pub / Sub system. +- [Blueprints](/docs/usage/blueprints.md): a way to group modules together and define their connections to each other. +- [RPC](/docs/usage/blueprints.md#calling-the-methods-of-other-modules): how one module can call a method on another module (arguments get serialized to JSON-like binary data). +- [Skills](/docs/usage/blueprints.md#defining-skills): An RPC function, except it can be called by an AI agent (a tool for an AI). +- Agents: AI that has an objective, access to stream data, and is capable of calling skills as tools. diff --git a/docs/usage/assets/abstraction_layers.svg b/docs/usage/assets/abstraction_layers.svg new file mode 100644 index 0000000000..0903cfbf27 --- /dev/null +++ b/docs/usage/assets/abstraction_layers.svg @@ -0,0 +1,20 @@ + + +Blueprints + + + +Modules + + + +Transports + + + +PubSub +robot configs +camera, nav +LCM, SHM, ROS +pub/sub API + diff --git a/docs/concepts/assets/camera_module.svg b/docs/usage/assets/camera_module.svg similarity index 100% rename from docs/concepts/assets/camera_module.svg rename to docs/usage/assets/camera_module.svg diff --git a/docs/concepts/assets/go2_agentic.svg b/docs/usage/assets/go2_agentic.svg similarity index 100% rename from docs/concepts/assets/go2_agentic.svg rename to docs/usage/assets/go2_agentic.svg diff --git a/docs/concepts/assets/go2_nav.svg b/docs/usage/assets/go2_nav.svg similarity index 100% rename from docs/concepts/assets/go2_nav.svg rename to docs/usage/assets/go2_nav.svg diff --git a/docs/usage/assets/lcmspy.png b/docs/usage/assets/lcmspy.png new file mode 100644 index 0000000000..6e68fde03a --- /dev/null +++ b/docs/usage/assets/lcmspy.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91da9ef9f7797cce332da448739e28591f7ecfc0fd674e8b4be973cf28331438 +size 7118 diff --git a/docs/usage/assets/pubsub_benchmark.png b/docs/usage/assets/pubsub_benchmark.png new file mode 100644 index 0000000000..759a8b3977 --- /dev/null +++ b/docs/usage/assets/pubsub_benchmark.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:728484a4358df18ced7b5763a88a962701c2b02b5d319eb9a8b28c6c72d009fe +size 23946 diff --git a/docs/api/assets/transforms.png b/docs/usage/assets/transforms.png similarity index 100% rename from docs/api/assets/transforms.png rename to docs/usage/assets/transforms.png diff --git a/docs/api/assets/transforms_chain.svg b/docs/usage/assets/transforms_chain.svg similarity index 100% rename from docs/api/assets/transforms_chain.svg rename to docs/usage/assets/transforms_chain.svg diff --git a/docs/api/assets/transforms_modules.svg b/docs/usage/assets/transforms_modules.svg similarity index 100% rename from docs/api/assets/transforms_modules.svg rename to docs/usage/assets/transforms_modules.svg diff --git a/docs/api/assets/transforms_tree.svg b/docs/usage/assets/transforms_tree.svg similarity index 100% rename from docs/api/assets/transforms_tree.svg rename to docs/usage/assets/transforms_tree.svg diff --git a/docs/usage/blueprints.md b/docs/usage/blueprints.md new file mode 100644 index 0000000000..54b52ba3c0 --- /dev/null +++ b/docs/usage/blueprints.md @@ -0,0 +1,316 @@ +# Blueprints + +Blueprints (`_BlueprintAtom`) are instructions for how to initialize a `Module`. + +You don't typically want to run a single module, so multiple blueprints are handled together in `Blueprint`. + +You create a `Blueprint` from a single module (say `ConnectionModule`) with: + +```python session=blueprint-ex1 +from dimos.core.blueprints import Blueprint +from dimos.core import Module, rpc + +class ConnectionModule(Module): + def __init__(self, arg1, arg2, kwarg='value') -> None: + super().__init__() + +blueprint = Blueprint.create(ConnectionModule, 'arg1', 'arg2', kwarg='value') +``` + +But the same thing can be accomplished more succinctly as: + +```python session=blueprint-ex1 +connection = ConnectionModule.blueprint +``` + +Now you can create the blueprint with: + +```python session=blueprint-ex1 +blueprint = connection('arg1', 'arg2', kwarg='value') +``` + +## Linking blueprints + +You can link multiple blueprints together with `autoconnect`: + +```python session=blueprint-ex1 +from dimos.core.blueprints import autoconnect + +class Module1(Module): + def __init__(self, arg1) -> None: + super().__init__() + +class Module2(Module): + ... + +class Module3(Module): + ... + +module1 = Module1.blueprint +module2 = Module2.blueprint +module3 = Module3.blueprint + +blueprint = autoconnect( + module1(), + module2(), + module3(), +) +``` + +`blueprint` itself is a `Blueprint` so you can link it with other modules: + +```python session=blueprint-ex1 +class Module4(Module): + ... + +class Module5(Module): + ... + +module4 = Module4.blueprint +module5 = Module5.blueprint + +expanded_blueprint = autoconnect( + blueprint, + module4(), + module5(), +) +``` + +Blueprints are frozen data classes, and `autoconnect()` always constructs an expanded blueprint so you never have to worry about changes in one affecting the other. + +### Duplicate module handling + +If the same module appears multiple times in `autoconnect`, the **later blueprint wins** and overrides earlier ones: + +```python session=blueprint-ex1 +blueprint = autoconnect( + module1(arg1=1), + module2(), + module1(arg1=2), # This one is used, the first is discarded +) +``` + +This is so you can "inherit" from one blueprint but override something you need to change. + +## How transports are linked + +Imagine you have this code: + +```python session=blueprint-ex1 +from functools import partial + +from dimos.core.blueprints import Blueprint, autoconnect +from dimos.core import Module, rpc, Out, In +from dimos.msgs.sensor_msgs import Image + +class ModuleA(Module): + image: Out[Image] + start_explore: Out[bool] + +class ModuleB(Module): + image: In[Image] + begin_explore: In[bool] + +module_a = partial(Blueprint.create, ModuleA) +module_b = partial(Blueprint.create, ModuleB) + +autoconnect(module_a(), module_b()) +``` + +Connections are linked based on `(property_name, object_type)`. In this case `('image', Image)` will be connected between the two modules, but `begin_explore` will not be linked to `start_explore`. + +## Topic names + +By default, the name of the property is used to generate the topic name. So for `image`, the topic will be `/image`. + +The property name is used only if it's unique. If two modules have the same property name with different types, then both get a random topic such as `/SGVsbG8sIFdvcmxkI`. + +If you don't like the name you can always override it like in the next section. + +## Which transport is used? + +By default `LCMTransport` is used if the object supports `lcm_encode`. If it doesn't `pLCMTransport` is used (meaning "pickled LCM"). + +You can override transports with the `transports` method. It returns a new blueprint in which the override is set. + +```python session=blueprint-ex1 +from dimos.core.transport import pSHMTransport, pLCMTransport + +base_blueprint = autoconnect( + module1(arg1=1), + module2(), +) +expanded_blueprint = autoconnect( + base_blueprint, + module4(), + module5(), +) +base_blueprint = base_blueprint.transports({ + ("image", Image): pSHMTransport( + "/go2/color_image", default_capacity=1920 * 1080 * 3, # 1920x1080 frame x 3 (RGB) x uint8 + ), + ("start_explore", bool): pLCMTransport("/start_explore"), +}) +``` + +Note: `expanded_blueprint` does not get the transport overrides because it's created from the initial value of `base_blueprint`, not the second. + +## Remapping connections + +Sometimes you need to rename a connection to match what other modules expect. You can use `remappings` to rename module connections: + +```python session=blueprint-ex2 +from dimos.core.blueprints import autoconnect +from dimos.core import Module, rpc, Out, In +from dimos.msgs.sensor_msgs import Image + +class ConnectionModule(Module): + color_image: Out[Image] # Outputs on 'color_image' + +class ProcessingModule(Module): + rgb_image: In[Image] # Expects input on 'rgb_image' + +# Without remapping, these wouldn't connect automatically +# With remapping, color_image is renamed to rgb_image +blueprint = ( + autoconnect( + ConnectionModule.blueprint(), + ProcessingModule.blueprint(), + ) + .remappings([ + (ConnectionModule, 'color_image', 'rgb_image'), + ]) +) +``` + +After remapping: +- The `color_image` output from `ConnectionModule` is treated as `rgb_image` +- It automatically connects to any module with an `rgb_image` input of type `Image` +- The topic name becomes `/rgb_image` instead of `/color_image` + +If you want to override the topic, you still have to do it manually: + +```python session=blueprint-ex2 +from dimos.core.transport import LCMTransport +blueprint.remappings([ + (ConnectionModule, 'color_image', 'rgb_image'), +]).transports({ + ("rgb_image", Image): LCMTransport("/custom/rgb/image", Image), +}) +``` + +## Overriding global configuration. + +Each module can optionally take global config as a `cfg` option in `__init__`. E.g.: + +```python session=blueprint-ex3 +from dimos.core import Module, rpc +from dimos.core.global_config import GlobalConfig + +class ModuleA(Module): + + def __init__(self, cfg: GlobalConfig | None = None): + self._global_config: GlobalConfig = cfg + ... +``` + +The config is normally taken from .env or from environment variables. But you can specifically override the values for a specific blueprint: + +```python session=blueprint-ex3 +blueprint = ModuleA.blueprint().global_config(n_dask_workers=8) +``` + +## Calling the methods of other modules + +Imagine you have this code: + +```python session=blueprint-ex3 +from dimos.core import Module, rpc + +class Drone(Module): + + @rpc + def get_time(self) -> str: + ... + +class HelperModule(Module): + def set_alarm_clock(self) -> None: + ... +``` + +And you want to call `ModuleA.get_time` in `ModuleB.request_the_time`. + +To do this, you can request a module reference. + +```python session=blueprint-ex3 +from dimos.core import Module, rpc + +class HelperModule(Module): + drone_module: Drone + + def set_alarm_clock(self) -> None: + print(self.drone_module.get_time_rpc()) +``` + +But what if we want `HelperModule` to work for more than just `Drone`? For that we can use a spec. + +```python session=blueprint-ex3 +from dimos.spec.utils import Spec +from typing import Protocol + +class Drone(Module): + def get_time(self) -> str: + return "1:00 PM" + +class Car(Module): + def get_time(self) -> str: + return "2:00 PM" + +# Your Spec +class AnyModuleWithGetTime(Spec, Protocol): + def get_time(self) -> str: ... + +class ModuleB(Module): + device: AnyModuleWithGetTime + + def request_the_time(self) -> None: + # autoconnect() will automatically find whatever module has a get_time() method + print(self.device.get_time()) +``` + +## Defining skills + +Skills are methods on a `Module` decorated with `@skill`. The agent automatically discovers all skills from launched modules at startup. + +```python session=blueprint-ex4 +from dimos.core import Module, rpc +from dimos.agents.annotation import skill +from dimos.core.global_config import GlobalConfig + +class SomeSkill(Module): + + @skill + def some_skill(self) -> str: + """Description of the skill for the LLM.""" + return "result" +``` + +## Building + +All you have to do to build a blueprint is call: + +```python session=blueprint-ex4 +module_coordinator = SomeSkill.blueprint().build(global_config=GlobalConfig()) +``` + +This returns a `ModuleCoordinator` instance that manages all deployed modules. + +### Running and shutting down + +You can block the thread until it exits with: + +```python session=blueprint-ex4 +module_coordinator.loop() +``` + +This will wait for Ctrl+C and then automatically stop all modules and clean up resources. diff --git a/docs/api/configuration.md b/docs/usage/configuration.md similarity index 87% rename from docs/api/configuration.md rename to docs/usage/configuration.md index a9c8de0268..eaad4a9271 100644 --- a/docs/api/configuration.md +++ b/docs/usage/configuration.md @@ -45,7 +45,7 @@ Error: Config.__init__() got an unexpected keyword argument 'something' # Configurable Modules -[Modules]() inherit from `Configurable`, so all of the above applies. Module configs should inherit from `ModuleConfig` ([`core/module.py`](/dimos/core/module.py#L40)), which includes shared configuration for all modules like transport protocols, frame_ids, etc. +[Modules](/docs/usage/modules.md) inherit from `Configurable`, so all of the above applies. Module configs should inherit from `ModuleConfig` ([`core/module.py`](/dimos/core/module.py#L40)), which includes shared configuration for all modules like transport protocols, frame IDs, etc. ```python from dataclasses import dataclass diff --git a/docs/api/sensor_streams/index.md b/docs/usage/data_streams/README.md similarity index 100% rename from docs/api/sensor_streams/index.md rename to docs/usage/data_streams/README.md diff --git a/docs/api/sensor_streams/advanced_streams.md b/docs/usage/data_streams/advanced_streams.md similarity index 100% rename from docs/api/sensor_streams/advanced_streams.md rename to docs/usage/data_streams/advanced_streams.md diff --git a/docs/api/sensor_streams/assets/alignment_flow.svg b/docs/usage/data_streams/assets/alignment_flow.svg similarity index 100% rename from docs/api/sensor_streams/assets/alignment_flow.svg rename to docs/usage/data_streams/assets/alignment_flow.svg diff --git a/docs/api/sensor_streams/assets/alignment_overview.svg b/docs/usage/data_streams/assets/alignment_overview.svg similarity index 100% rename from docs/api/sensor_streams/assets/alignment_overview.svg rename to docs/usage/data_streams/assets/alignment_overview.svg diff --git a/docs/api/sensor_streams/assets/alignment_timeline.png b/docs/usage/data_streams/assets/alignment_timeline.png similarity index 100% rename from docs/api/sensor_streams/assets/alignment_timeline.png rename to docs/usage/data_streams/assets/alignment_timeline.png diff --git a/docs/api/sensor_streams/assets/alignment_timeline2.png b/docs/usage/data_streams/assets/alignment_timeline2.png similarity index 100% rename from docs/api/sensor_streams/assets/alignment_timeline2.png rename to docs/usage/data_streams/assets/alignment_timeline2.png diff --git a/docs/api/sensor_streams/assets/alignment_timeline3.png b/docs/usage/data_streams/assets/alignment_timeline3.png similarity index 100% rename from docs/api/sensor_streams/assets/alignment_timeline3.png rename to docs/usage/data_streams/assets/alignment_timeline3.png diff --git a/docs/api/sensor_streams/assets/backpressure.svg b/docs/usage/data_streams/assets/backpressure.svg similarity index 100% rename from docs/api/sensor_streams/assets/backpressure.svg rename to docs/usage/data_streams/assets/backpressure.svg diff --git a/docs/api/sensor_streams/assets/backpressure_solution.svg b/docs/usage/data_streams/assets/backpressure_solution.svg similarity index 100% rename from docs/api/sensor_streams/assets/backpressure_solution.svg rename to docs/usage/data_streams/assets/backpressure_solution.svg diff --git a/docs/api/sensor_streams/assets/frame_mosaic.jpg b/docs/usage/data_streams/assets/frame_mosaic.jpg similarity index 100% rename from docs/api/sensor_streams/assets/frame_mosaic.jpg rename to docs/usage/data_streams/assets/frame_mosaic.jpg diff --git a/docs/api/sensor_streams/assets/frame_mosaic2.jpg b/docs/usage/data_streams/assets/frame_mosaic2.jpg similarity index 100% rename from docs/api/sensor_streams/assets/frame_mosaic2.jpg rename to docs/usage/data_streams/assets/frame_mosaic2.jpg diff --git a/docs/api/sensor_streams/assets/getter_hot_cold.svg b/docs/usage/data_streams/assets/getter_hot_cold.svg similarity index 100% rename from docs/api/sensor_streams/assets/getter_hot_cold.svg rename to docs/usage/data_streams/assets/getter_hot_cold.svg diff --git a/docs/api/sensor_streams/assets/observable_flow.svg b/docs/usage/data_streams/assets/observable_flow.svg similarity index 100% rename from docs/api/sensor_streams/assets/observable_flow.svg rename to docs/usage/data_streams/assets/observable_flow.svg diff --git a/docs/api/sensor_streams/assets/sharpness_graph.svg b/docs/usage/data_streams/assets/sharpness_graph.svg similarity index 100% rename from docs/api/sensor_streams/assets/sharpness_graph.svg rename to docs/usage/data_streams/assets/sharpness_graph.svg diff --git a/docs/api/sensor_streams/assets/sharpness_graph2.svg b/docs/usage/data_streams/assets/sharpness_graph2.svg similarity index 100% rename from docs/api/sensor_streams/assets/sharpness_graph2.svg rename to docs/usage/data_streams/assets/sharpness_graph2.svg diff --git a/docs/api/sensor_streams/quality_filter.md b/docs/usage/data_streams/quality_filter.md similarity index 97% rename from docs/api/sensor_streams/quality_filter.md rename to docs/usage/data_streams/quality_filter.md index 26d40733fd..db21da9c54 100644 --- a/docs/api/sensor_streams/quality_filter.md +++ b/docs/usage/data_streams/quality_filter.md @@ -50,7 +50,7 @@ Qualities: [0.9] For camera streams, we provide `sharpness_barrier` which uses the image's sharpness score. -Let's use real camera data from the Unitree Go2 robot to demonstrate. We use the [Sensor Replay](/docs/old/testing_stream_reply.md) toolkit, which provides access to recorded robot data: +Let's use real camera data from the Unitree Go2 robot to demonstrate. We use the [Sensor Storage & Replay](/docs/usage/sensor_streams/storage_replay.md) toolkit, which provides access to recorded robot data: ```python session=qb from dimos.utils.testing import TimedSensorReplay @@ -243,7 +243,7 @@ class CameraModule(Module): The sharpness score (0.0 to 1.0) is computed using Sobel edge detection: -from [`NumpyImage.py`](/dimos/msgs/sensor_msgs/image_impls/NumpyImage.py) +from [`Image.py`](/dimos/msgs/sensor_msgs/Image.py) ```python session=qb import cv2 diff --git a/docs/api/sensor_streams/reactivex.md b/docs/usage/data_streams/reactivex.md similarity index 89% rename from docs/api/sensor_streams/reactivex.md rename to docs/usage/data_streams/reactivex.md index a80318e02d..45873b471b 100644 --- a/docs/api/sensor_streams/reactivex.md +++ b/docs/usage/data_streams/reactivex.md @@ -311,7 +311,16 @@ got 4 ## Creating Observables -### From callback-based APIs +There are two common callback patterns in APIs. Use the appropriate helper: + +| Pattern | Example | Helper | +|---------|---------|--------| +| Register/unregister with same callback | `sensor.register(cb)` / `sensor.unregister(cb)` | `callback_to_observable` | +| Subscribe returns unsub function | `unsub = pubsub.subscribe(cb)` | `to_observable` | + +### From register/unregister APIs + +Use `callback_to_observable` when the API has separate register and unregister functions that take the same callback reference: ```python session=create import reactivex as rx @@ -353,6 +362,44 @@ received: ['reading_1', 'reading_2'] callbacks after dispose: 0 ``` +### From subscribe-returns-unsub APIs + +Use `to_observable` when the subscribe function returns an unsubscribe callable: + +```python session=create +from dimos.utils.reactive import to_observable + +class MockPubSub: + def __init__(self): + self._callbacks = [] + def subscribe(self, cb): + self._callbacks.append(cb) + return lambda: self._callbacks.remove(cb) # returns unsub function + def publish(self, value): + for cb in self._callbacks: + cb(value) + +pubsub = MockPubSub() + +obs = to_observable(pubsub.subscribe) + +received = [] +sub = obs.subscribe(lambda x: received.append(x)) + +pubsub.publish("msg_1") +pubsub.publish("msg_2") +print("received:", received) + +sub.dispose() +print("callbacks after dispose:", len(pubsub._callbacks)) +``` + + +``` +received: ['msg_1', 'msg_2'] +callbacks after dispose: 0 +``` + ### From scratch with `rx.create` ```python session=create diff --git a/docs/api/sensor_streams/storage_replay.md b/docs/usage/data_streams/storage_replay.md similarity index 96% rename from docs/api/sensor_streams/storage_replay.md rename to docs/usage/data_streams/storage_replay.md index 66e913b197..c5cbe306a8 100644 --- a/docs/api/sensor_streams/storage_replay.md +++ b/docs/usage/data_streams/storage_replay.md @@ -159,7 +159,7 @@ replay.stream( ## Usage: Stub Connections for Testing -A common pattern is creating replay-based connection stubs for testing without hardware. From [`robot/unitree/connection/go2.py`](/dimos/robot/unitree/connection/go2.py#L83): +A common pattern is creating replay-based connection stubs for testing without hardware. From [`robot/unitree/go2/connection.py`](/dimos/robot/unitree/go2/connection.py#L83): This is a bit primitive. We'd like to write a higher-order API for recording full module I/O for any module, but this is a work in progress at the moment. @@ -202,7 +202,7 @@ Each pickle file contains a tuple `(timestamp, data)`: Files are numbered sequentially: `000.pickle`, `001.pickle`, etc. -Recordings are stored in the `data/` directory. See [Data Loading](/docs/data.md) for how data storage works, including Git LFS handling for large datasets. +Recordings are stored in the `data/` directory. See [Data Loading](/docs/development/large_file_management.md) for how data storage works, including Git LFS handling for large datasets. ## API Reference diff --git a/docs/api/sensor_streams/temporal_alignment.md b/docs/usage/data_streams/temporal_alignment.md similarity index 98% rename from docs/api/sensor_streams/temporal_alignment.md rename to docs/usage/data_streams/temporal_alignment.md index b552ac54cc..66230c9d54 100644 --- a/docs/api/sensor_streams/temporal_alignment.md +++ b/docs/usage/data_streams/temporal_alignment.md @@ -34,7 +34,7 @@ Below we set up replay of real camera and lidar data from the Unitree Go2 robot.
Stream Setup -You can read more about [sensor storage here](storage_replay.md) and [LFS data store here](/docs/data.md). +You can read more about [sensor storage here](storage_replay.md) and [LFS data storage here](/docs/development/large_file_management.md). ```python session=align no-result from reactivex import Subject @@ -70,7 +70,7 @@ lidar_stream = lidar_replay.stream(from_timestamp=seek_ts, duration=2.0).pipe(
-Streams would normally come from an actual robot into your module via `IN` inputs. [`detection/module3D.py`](/dimos/perception/detection/module3D.py#L11) is a good example of this. +Streams would normally come from an actual robot into your module via `In` inputs. [`detection/module3D.py`](/dimos/perception/detection/module3D.py#L11) is a good example of this. Assume we have them. Let's align them. diff --git a/docs/concepts/lcm.md b/docs/usage/lcm.md similarity index 70% rename from docs/concepts/lcm.md rename to docs/usage/lcm.md index f1b41cfc9f..99437a2458 100644 --- a/docs/concepts/lcm.md +++ b/docs/usage/lcm.md @@ -1,9 +1,29 @@ - # LCM Messages -[LCM (Lightweight Communications and Marshalling)](https://github.com/lcm-proj/lcm) is a message-passing system with bindings for many languages (C, C++, Python, Java, Lua, Go). While LCM includes a UDP multicast transport, its real power is the message definition format - classes that can encode themselves to a compact binary representation. +DimOS uses [LCM (Lightweight Communications and Marshalling)](https://github.com/lcm-proj/lcm) for inter-process communication on a local machine (similar to how ROS uses DDS). LCM is a simple [UDP multicast](https://lcm-proj.github.io/lcm/content/udp-multicast-protocol.html#lcm-udp-multicast-protocol-description) pubsub protocol with a straightforward [message definition language](https://lcm-proj.github.io/lcm/content/lcm-type-ref.html#lcm-type-specification-language). + +The LCM project provides pubsub clients and code generators for many languages. For us the power of LCM is its message definition format, multi-language classes that encode themselves to a compact binary format. This means LCM messages can be sent over any transport (WebSocket, SSH, shared memory, etc.) between differnt programming languages. + +Our messages are ported from ROS (they are structurally compatible in order to facilitate easy communication to ROS if needed) +Repo that hosts our message definitions and autogenerators is at [dimos-lcm](https://github.com/dimensionalOS/dimos-lcm/) + +our LCM implementation significantly [outperforms ROS for local communication](/docs/usage/transports.md#benchmarks) + +## Supported languages + +Apart from python, we have examples of LCM integrations for: +- [**C++**](/examples/language-interop/cpp/README.md) +- [**TypeScript**](/examples/language-interop/ts/README.md) +- [**Lua**](/examples/language-interop/lua/README.md) + +In our [/examples/language-interop/](/examples/language-interop/) dir + +Types generated (but no examples yet) for: +[**C#**](https://github.com/dimensionalOS/dimos-lcm/tree/main/generated/csharp) and [**Java**](https://github.com/dimensionalOS/dimos-lcm/tree/main/generated/java) + +### Native Modules -Dimos uses LCM message definitions for all inter-module communication. Because messages serialize to binary, they can be sent over any transport - not just LCM's UDP multicast, but also shared memory, Redis, WebSockets, or any other channel. +Given LCM is so portable, we can easily run dimos [Modules](/docs/usage/modules.md) written in [third party languages](/docs/usage/native_modules.md) ## dimos-lcm Package diff --git a/docs/concepts/modules.md b/docs/usage/modules.md similarity index 92% rename from docs/concepts/modules.md rename to docs/usage/modules.md index ee7fbaf2c9..e2f91b58ee 100644 --- a/docs/concepts/modules.md +++ b/docs/usage/modules.md @@ -1,5 +1,5 @@ -# Dimos Modules +# DimOS Modules Modules are subsystems on a robot that operate autonomously and communicate with other subsystems using standardized messages. @@ -47,10 +47,10 @@ print(CameraModule.io()) ├─ color_image: Image ├─ camera_info: CameraInfo │ - ├─ RPC set_transport(stream_name: str, transport: Transport) -> bool ├─ RPC start() + ├─ RPC stop() │ - ├─ Skill video_stream (stream=passive, reducer=latest_reducer, output=image) + ├─ Skill take_a_picture ``` We can see that the camera module outputs two streams: @@ -58,9 +58,9 @@ We can see that the camera module outputs two streams: - `color_image` with [sensor_msgs.Image](https://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/Image.html) type - `camera_info` with [sensor_msgs.CameraInfo](https://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/CameraInfo.html) type -It offers two RPC calls: `start()` and `stop()`. +It offers two RPC calls: `start()` and `stop()` (lifecycle methods). -As well as an agentic [Skill](skills.md) called `video_stream` (more about this later, in [Skills Tutorial](skills.md)). +It also exposes an agentic [skill](/docs/usage/blueprints.md#defining-skills) called `take_a_picture` (more on skills in the Blueprints guide). We can start this module and explore the output of its streams in real time (this will use your webcam). @@ -120,7 +120,7 @@ print(Detection2DModule.io()) ├─ RPC stop() -> None ``` -TODO: add easy way to print config + Looks like the detector just needs an image input and outputs some sort of detection and annotation messages. Let's connect it to a camera. @@ -174,3 +174,6 @@ to_svg(agentic, "assets/go2_agentic.svg") ![output](assets/go2_agentic.svg) + + +To see more information on how to use Blueprints, see [Blueprints](/docs/usage/blueprints.md). diff --git a/docs/usage/native_modules.md b/docs/usage/native_modules.md new file mode 100644 index 0000000000..5a9362839f --- /dev/null +++ b/docs/usage/native_modules.md @@ -0,0 +1,282 @@ +# Native Modules + +Prerequisite for this is to understand dimos [Modules](/docs/usage/modules.md) and [Blueprints](/docs/usage/blueprints.md). + +Native modules let you wrap **any executable** as a first-class DimOS module, given it speaks LCM. + +Python will handle blueprint wiring, lifecycle, and logging. Native binary handles the actual computation, publishing and subscribing directly on LCM. + +Python module **never touches the pubsub data**. It just passes configuration and LCM topic to use via CLI args to your executable. + +On how to speak LCM with the rest of dimos, you can read our [LCM intro](/docs/usage/lcm.md) + +## Defining a native module + +Python side native module is just a definition of a **config** dataclass and **module** class specifying pubsub I/O. + +Both the config dataclass and pubsub topics get converted to CLI args passed down to your executable once the module is started. + +```python no-result session=nativemodule +from dataclasses import dataclass +from dimos.core import Out, LCMTransport +from dimos.core.native_module import NativeModule, NativeModuleConfig +from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 +from dimos.msgs.sensor_msgs.Imu import Imu +import time + +@dataclass(kw_only=True) +class MyLidarConfig(NativeModuleConfig): + executable: str = "./build/my_lidar" + host_ip: str = "192.168.1.5" + frequency: float = 10.0 + +class MyLidar(NativeModule): + default_config = MyLidarConfig + pointcloud: Out[PointCloud2] + imu: Out[Imu] + + +``` + +That's it. `MyLidar` is a full DimOS module. You can use it with `autoconnect`, blueprints, transport overrides, and specs. Once this module is started, your `./build/my_lidar` will get called with specific CLI args. + + +## How it works + +When `start()` is called, NativeModule: + +1. **Builds the executable** if it doesn't exist and `build_command` is set. +2. **Collects topics** from blueprint-assigned transports on each declared port. +3. **Builds the command line**: ` -- ... -- ...` +4. **Launches the subprocess** with `Popen`, piping stdout/stderr. +5. **Starts a watchdog** thread that calls `stop()` if the process crashes. + +For the example above, the launched command would look like: + +```sh +./build/my_lidar \ + --pointcloud '/pointcloud#sensor_msgs.PointCloud2' \ + --imu '/imu#sensor_msgs.Imu' \ + --host_ip 192.168.1.5 \ + --frequency 10.0 +``` + +```python ansi=false session=nativemodule skip +mylidar = MyLidar() +mylidar.pointcloud.transport = LCMTransport("/lidar", PointCloud2) +mylidar.imu.transport = LCMTransport("/imu", Imu) +mylidar.start() +``` + + +``` +2026-02-14T11:22:12.123963Z [info ] Starting native process [dimos/core/native_module.py] cmd='./build/my_lidar --pointcloud /lidar#sensor_msgs.PointCloud2 --imu /imu#sensor_msgs.Imu --host_ip 192.168.1.5 --frequency 10.0' cwd=/home/lesh/coding/dimos/docs/usage/build +``` + +Topic strings use the format `/#`, which is the LCM channel name that Python `LCMTransport` subscribers use. The native binary publishes on these exact channels. + +When `stop()` is called, the process receives SIGTERM. If it doesn't exit within `shutdown_timeout` seconds (default 10), it gets SIGKILL. + +## Config + +`NativeModuleConfig` extends `ModuleConfig` with subprocess fields: + +| Field | Type | Default | Description | +|--------------------|------------------|---------------|-------------------------------------------------------------| +| `executable` | `str` | *(required)* | Path to the native binary (relative to `cwd` if set) | +| `build_command` | `str \| None` | `None` | Shell command to run if executable is missing (auto-build) | +| `cwd` | `str \| None` | `None` | Working directory for build and runtime. Relative paths are resolved against the Python file defining the module | +| `extra_args` | `list[str]` | `[]` | Additional CLI arguments appended after auto-generated ones | +| `extra_env` | `dict[str, str]` | `{}` | Extra environment variables for the subprocess | +| `shutdown_timeout` | `float` | `10.0` | Seconds to wait for SIGTERM before SIGKILL | +| `log_format` | `LogFormat` | `TEXT` | How to parse subprocess output (`TEXT` or `JSON`) | +| `cli_exclude` | `frozenset[str]` | `frozenset()` | Config fields to skip when generating CLI args | + +### Auto CLI arg generation + +Any field you add to your config subclass automatically becomes a `--name value` CLI arg. Fields from `NativeModuleConfig` itself (like `executable`, `extra_args`, `cwd`) are **not** passed — they're for Python-side orchestration only. + +```python skip + +class LogFormat(enum.Enum): + TEXT = "text" + JSON = "json" + +@dataclass(kw_only=True) +class MyConfig(NativeModuleConfig): + executable: str = "./build/my_module" # relative or absolute path to your executable + host_ip: str = "192.168.1.5" # becomes --host_ip 192.168.1.5 + frequency: float = 10.0 # becomes --frequency 10.0 + enable_imu: bool = True # becomes --enable_imu true + filters: list[str] = field(default_factory=lambda: ["a", "b"]) # becomes --filters a,b +``` + +- `None` values are skipped. +- Booleans are lowercased (`true`/`false`). +- Lists are comma-joined. + +### Excluding fields + +If a config field shouldn't be a CLI arg, add it to `cli_exclude`: + +```python skip +@dataclass(kw_only=True) +class FastLio2Config(NativeModuleConfig): + executable: str = "./build/fastlio2" + config: str = "mid360.yaml" # human-friendly name + config_path: str | None = None # resolved absolute path + cli_exclude: frozenset[str] = frozenset({"config"}) # only config_path is passed + + def __post_init__(self) -> None: + if self.config_path is None: + self.config_path = str(Path(self.config).resolve()) +``` + +## Using with blueprints + +Native modules work with `autoconnect` exactly like Python modules: + +```python skip +from dimos.core.blueprints import autoconnect + +class PointCloudConsumer(Module): + pointcloud: In[PointCloud2] + imu: In[Imu] + +autoconnect( + MyLidar.blueprint(host_ip="192.168.1.10"), + PointCloudConsumer.blueprint(), +).build().loop() +``` + +`autoconnect` matches ports by `(name, type)`, assigns LCM topics, and passes them to the native binary as CLI args. You can override transports as usual: + +```python skip +blueprint = autoconnect( + MyLidar.blueprint(), + PointCloudConsumer.blueprint(), +).transports({ + ("pointcloud", PointCloud2): LCMTransport("/my/custom/lidar", PointCloud2), +}) +``` + +## Logging + +NativeModule pipes subprocess stdout and stderr through structlog: + +- **stdout** is logged at `info` level. +- **stderr** is logged at `warning` level. + +### JSON log format + +If your native binary outputs structured JSON lines, set `log_format=LogFormat.JSON`: + +```python skip +@dataclass(kw_only=True) +class MyConfig(NativeModuleConfig): + executable: str = "./build/my_module" + log_format: LogFormat = LogFormat.JSON +``` + +The module will parse each line as JSON and feed the key-value pairs into structlog. The `event` key becomes the log message: + +```json +{"event": "sensor initialized", "device": "/dev/ttyUSB0", "baud": 115200} +``` + +Malformed lines fall back to plain text logging. + +## Writing the C++ side + +A header-only helper is provided at [`dimos/hardware/sensors/lidar/common/dimos_native_module.hpp`](/dimos/hardware/sensors/lidar/common/dimos_native_module.hpp): + +```cpp +#include "dimos_native_module.hpp" +#include "sensor_msgs/PointCloud2.hpp" + +int main(int argc, char** argv) { + dimos::NativeModule mod(argc, argv); + + // Get the LCM channel for a declared port + std::string pc_topic = mod.topic("pointcloud"); + + // Get config values + float freq = mod.arg_float("frequency", 10.0); + std::string ip = mod.arg("host_ip", "192.168.1.5"); + + // Set up LCM publisher and publish on pc_topic... +} +``` + +The helper provides: + +| Method | Description | +|---------------------------|----------------------------------------------------------------| +| `topic(port)` | Get the full LCM channel string (`/topic#msg_type`) for a port | +| `arg(key, default)` | Get a string config value | +| `arg_float(key, default)` | Get a float config value | +| `arg_int(key, default)` | Get an int config value | +| `has(key)` | Check if a port/arg was provided | + +It also includes `make_header()` and `time_from_seconds()` for building ROS-compatible stamped messages. + +## Examples + +For language interop examples (subscribing to DimOS topics from C++, TypeScript, Lua), see [/examples/language-interop/](/examples/language-interop/README.md). + +### Livox Mid-360 Module + +The Livox Mid-360 LiDAR driver is a complete example at [`dimos/hardware/sensors/lidar/livox/module.py`](/dimos/hardware/sensors/lidar/livox/module.py): + +```python skip +from dimos.core import Out +from dimos.core.native_module import NativeModule, NativeModuleConfig +from dimos.msgs.sensor_msgs.PointCloud2 import PointCloud2 +from dimos.msgs.sensor_msgs.Imu import Imu +from dimos.spec import perception + +@dataclass(kw_only=True) +class Mid360Config(NativeModuleConfig): + cwd: str | None = "cpp" + executable: str = "result/bin/mid360_native" + build_command: str | None = "nix build .#mid360_native" + host_ip: str = "192.168.1.5" + lidar_ip: str = "192.168.1.155" + frequency: float = 10.0 + enable_imu: bool = True + frame_id: str = "lidar_link" + # ... SDK port configuration + +class Mid360(NativeModule, perception.Lidar, perception.IMU): + default_config = Mid360Config + lidar: Out[PointCloud2] + imu: Out[Imu] +``` + +Usage: + +```python skip +from dimos.hardware.sensors.lidar.livox.module import Mid360 + +autoconnect( + Mid360.blueprint(host_ip="192.168.1.5"), + SomeConsumer.blueprint(), +) +``` + +## Auto Building + +If `build_command` is set in the module config, and the executable doesn't exist when `start()` is called, NativeModule runs the build command automatically. +Build output is piped through structlog (stdout at `info`, stderr at `warning`). + +```python skip +@dataclass(kw_only=True) +class MyLidarConfig(NativeModuleConfig): + cwd: str | None = "cpp" + executable: str = "result/bin/my_lidar" + build_command: str | None = "nix build .#my_lidar" +``` + +`cwd` is used for both the build command and the runtime subprocess. Relative paths are resolved against the directory of the Python file that defines the module + +If the executable already exists, the build step is skipped entirely. diff --git a/docs/usage/sensor_streams/README.md b/docs/usage/sensor_streams/README.md new file mode 100644 index 0000000000..dc2ce6c91d --- /dev/null +++ b/docs/usage/sensor_streams/README.md @@ -0,0 +1,41 @@ +# Sensor Streams + +Dimos uses reactive streams (RxPY) to handle sensor data. This approach naturally fits robotics where multiple sensors emit data asynchronously at different rates, and downstream processors may be slower than the data sources. + +## Guides + +| Guide | Description | +|----------------------------------------------|---------------------------------------------------------------| +| [ReactiveX Fundamentals](reactivex.md) | Observables, subscriptions, and disposables | +| [Advanced Streams](advanced_streams.md) | Backpressure, parallel subscribers, synchronous getters | +| [Quality-Based Filtering](quality_filter.md) | Select highest quality frames when downsampling streams | +| [Temporal Alignment](temporal_alignment.md) | Match messages from multiple sensors by timestamp | +| [Storage & Replay](storage_replay.md) | Record sensor streams to disk and replay with original timing | + +## Quick Example + +```python +from reactivex import operators as ops +from dimos.utils.reactive import backpressure +from dimos.types.timestamped import align_timestamped +from dimos.msgs.sensor_msgs.Image import sharpness_barrier + +# Camera at 30fps, lidar at 10Hz +camera_stream = camera.observable() +lidar_stream = lidar.observable() + +# Pipeline: filter blurry frames -> align with lidar -> handle slow consumers +processed = ( + camera_stream.pipe( + sharpness_barrier(10.0), # Keep sharpest frame per 100ms window (10Hz) + ) +) + +aligned = align_timestamped( + backpressure(processed), # Camera as primary + lidar_stream, # Lidar as secondary + match_tolerance=0.1, +) + +aligned.subscribe(lambda pair: process_frame_with_pointcloud(*pair)) +``` diff --git a/docs/usage/sensor_streams/advanced_streams.md b/docs/usage/sensor_streams/advanced_streams.md new file mode 100644 index 0000000000..187d432af2 --- /dev/null +++ b/docs/usage/sensor_streams/advanced_streams.md @@ -0,0 +1,295 @@ +# Advanced Stream Handling + +> **Prerequisite:** Read [ReactiveX Fundamentals](reactivex.md) first for Observable basics. + +## Backpressure and Parallel Subscribers to Hardware + +In robotics, we deal with hardware that produces data at its own pace - a camera outputs 30fps whether you're ready or not. We can't tell the camera to slow down. And we often have multiple consumers: one module wants every frame for recording, another runs slow ML inference and only needs the latest frame. + +**The problem:** A fast producer can overwhelm a slow consumer, causing memory buildup or dropped frames. We might have multiple subscribers to the same hardware that operate at different speeds. + + +
Pikchr + +```pikchr fold output=assets/backpressure.svg +color = white +fill = none + +Fast: box "Camera" "60 fps" rad 5px fit wid 130% ht 130% +arrow right 0.4in +Queue: box "queue" rad 5px fit wid 170% ht 170% +arrow right 0.4in +Slow: box "ML Model" "2 fps" rad 5px fit wid 130% ht 130% + +text "items pile up!" at (Queue.x, Queue.y - 0.45in) +``` + +
+ + +![output](assets/backpressure.svg) + + +**The solution:** The `backpressure()` wrapper handles this by: + +1. **Sharing the source** - Camera runs once, all subscribers share the stream +2. **Per-subscriber speed** - Fast subscribers get every frame, slow ones get the latest when ready +3. **No blocking** - Slow subscribers never block the source or each other + +```python session=bp +import time +import reactivex as rx +from reactivex import operators as ops +from reactivex.scheduler import ThreadPoolScheduler +from dimos.utils.reactive import backpressure + +# We need this scaffolding here. Normally DimOS handles this. +scheduler = ThreadPoolScheduler(max_workers=4) + +# Simulate fast source +source = rx.interval(0.05).pipe(ops.take(20)) +safe = backpressure(source, scheduler=scheduler) + +fast_results = [] +slow_results = [] + +safe.subscribe(lambda x: fast_results.append(x)) + +def slow_handler(x): + time.sleep(0.15) + slow_results.append(x) + +safe.subscribe(slow_handler) + +time.sleep(1.5) +print(f"fast got {len(fast_results)} items: {fast_results[:5]}...") +print(f"slow got {len(slow_results)} items (skipped {len(fast_results) - len(slow_results)})") +scheduler.executor.shutdown(wait=True) +``` + + +``` +fast got 20 items: [0, 1, 2, 3, 4]... +slow got 7 items (skipped 13) +``` + +### How it works + + +
Pikchr + +```pikchr fold output=assets/backpressure_solution.svg +color = white +fill = none +linewid = 0.3in + +Source: box "Camera" "60 fps" rad 5px fit wid 170% ht 170% +arrow +Core: box "backpressure" rad 5px fit wid 170% ht 170% +arrow from Core.e right 0.3in then up 0.35in then right 0.3in +Fast: box "Fast Sub" rad 5px fit wid 170% ht 170% +arrow from Core.e right 0.3in then down 0.35in then right 0.3in +SlowPre: box "LATEST" rad 5px fit wid 170% ht 170% +arrow +Slow: box "Slow Sub" rad 5px fit wid 170% ht 170% +``` + +
+ + +![output](assets/backpressure_solution.svg) + +The `LATEST` strategy means: when the slow subscriber finishes processing, it gets whatever the most recent value is, skipping any values that arrived while it was busy. + +### Usage in modules + +Most module streams offer backpressured observables. + +```python session=bp +from dimos.core import Module, In +from dimos.msgs.sensor_msgs import Image + +class MLModel(Module): + color_image: In[Image] + def start(self): + # no reactivex, simple callback + self.color_image.subscribe(...) + # backpressured + self.color_image.observable().subscribe(...) + # non-backpressured - will pile up queue + self.color_image.pure_observable().subscribe(...) + + +``` + +## Getting Values Synchronously + +Sometimes you don't want a stream, you just want to call a function and get the latest value. + +If you are doing this periodically as a part of a processing loop, it is very likely that your code will be much cleaner and safer using actual reactivex pipeline. So bias towards checking our [reactivex quick guide](reactivex.md) and [official docs](https://rxpy.readthedocs.io/) + +(TODO we should actually make this example actually executable) + +```python skip + self.color_image.observable().pipe( + # takes the best image from a stream every 200ms, + # ensuring we are feeding our detector with highest quality frames + quality_barrier(lambda x: x["quality"], target_frequency=0.2), + + # converts Image into Person detections + ops.map(detect_person), + + # converts Detection2D to Twist pointing in the direction of a detection + ops.map(detection2d_to_twist), + + # emits the latest value every 50ms making our control loop run at 20hz + # despite detections running at 200ms + ops.sample(0.05), + ).subscribe(self.twist.publish) # shoots off the Twist out of the module +``` + + +If you'd still like to switch to synchronous fetching, we provide two approaches, `getter_hot()` and `getter_cold()` + +| | `getter_hot()` | `getter_cold()` | +|------------------|--------------------------------|----------------------------------| +| **Subscription** | Stays active in background | Fresh subscription each call | +| **Read speed** | Instant (value already cached) | Slower (waits for value) | +| **Resources** | Keeps connection open | Opens/closes each call | +| **Use when** | Frequent reads, need latest | Occasional reads, save resources | + +
+diagram source + +```pikchr fold output=assets/getter_hot_cold.svg +color = white +fill = none + +H_Title: box "getter_hot()" rad 5px fit wid 170% ht 170% + +Sub: box "subscribe" rad 5px fit wid 170% ht 170% with .n at H_Title.s + (0, -0.5in) +arrow from H_Title.s to Sub.n +arrow right from Sub.e +Cache: box "Cache" rad 5px fit wid 170% ht 170% + +# blocking box around subscribe->cache (one-time setup) +Blk0: box dashed color 0x5c9ff0 with .nw at Sub.nw + (-0.1in, 0.25in) wid (Cache.e.x - Sub.w.x + 0.2in) ht 0.7in rad 5px +text "blocking" italic with .n at Blk0.n + (0, -0.05in) + +arrow right from Cache.e +Getter: box "getter" rad 5px fit wid 170% ht 170% + +arrow from Getter.e right 0.3in then down 0.25in then right 0.2in +G1: box invis "call()" color 0x8cbdf2 fit wid 150% +arrow right 0.4in from G1.e +box invis "instant" fit wid 150% + +arrow from Getter.e right 0.3in then down 0.7in then right 0.2in +G2: box invis "call()" color 0x8cbdf2 fit wid 150% +arrow right 0.4in from G2.e +box invis "instant" fit wid 150% + +text "always subscribed" italic with .n at Blk0.s + (0, -0.1in) + + +# === getter_cold section === +C_Title: box "getter_cold()" rad 5px fit wid 170% ht 170% with .nw at H_Title.sw + (0, -1.6in) + +arrow down 0.3in from C_Title.s +ColdGetter: box "getter" rad 5px fit wid 170% ht 170% + +# Branch to first call +arrow from ColdGetter.e right 0.3in then down 0.3in then right 0.2in +Cold1: box invis "call()" color 0x8cbdf2 fit wid 150% +arrow right 0.4in from Cold1.e +Sub1: box invis "subscribe" fit wid 150% +arrow right 0.4in from Sub1.e +Wait1: box invis "wait" fit wid 150% +arrow right 0.4in from Wait1.e +Val1: box invis "value" fit wid 150% +arrow right 0.4in from Val1.e +Disp1: box invis "dispose " fit wid 150% + +# blocking box around first row +Blk1: box dashed color 0x5c9ff0 with .nw at Cold1.nw + (-0.1in, 0.25in) wid (Disp1.e.x - Cold1.w.x + 0.2in) ht 0.7in rad 5px +text "blocking" italic with .n at Blk1.n + (0, -0.05in) + +# Branch to second call +arrow from ColdGetter.e right 0.3in then down 1.2in then right 0.2in +Cold2: box invis "call()" color 0x8cbdf2 fit wid 150% +arrow right 0.4in from Cold2.e +Sub2: box invis "subscribe" fit wid 150% +arrow right 0.4in from Sub2.e +Wait2: box invis "wait" fit wid 150% +arrow right 0.4in from Wait2.e +Val2: box invis "value" fit wid 150% +arrow right 0.4in from Val2.e +Disp2: box invis "dispose " fit wid 150% + +# blocking box around second row +Blk2: box dashed color 0x5c9ff0 with .nw at Cold2.nw + (-0.1in, 0.25in) wid (Disp2.e.x - Cold2.w.x + 0.2in) ht 0.7in rad 5px +text "blocking" italic with .n at Blk2.n + (0, -0.05in) +``` + +
+ + +![output](assets/getter_hot_cold.svg) + + +**Prefer `getter_cold()`** when you can afford to wait and warmup isn't expensive. It's simpler (no cleanup needed) and doesn't hold resources. Only use `getter_hot()` when you need instant reads or the source is expensive to start. + +### `getter_hot()` - Background subscription, instant reads + +Subscribes immediately and keeps updating in the background. Each call returns the cached latest value instantly. + +```python session=sync +import time +import reactivex as rx +from reactivex import operators as ops +from dimos.utils.reactive import getter_hot + +source = rx.interval(0.1).pipe(ops.take(10)) + +get_val = getter_hot(source, timeout=5.0) # blocks until first message, with 5s timeout +# alternatively not to block (but get_val() might return None) +# get_val = getter_hot(source, nonblocking=True) + +print("first call:", get_val()) # instant - value already there +time.sleep(0.35) +print("after 350ms:", get_val()) # instant - returns cached latest +time.sleep(0.35) +print("after 700ms:", get_val()) + +get_val.dispose() # Don't forget to clean up! +``` + + +``` +first call: 0 +after 350ms: 3 +after 700ms: 6 +``` + +### `getter_cold()` - Fresh subscription each call + +Each call creates a new subscription, waits for one value, and cleans up. Slower but doesn't hold resources: + +```python session=sync +from dimos.utils.reactive import getter_cold + +source = rx.of(0, 1, 2, 3, 4) +get_val = getter_cold(source, timeout=5.0) + +# Each call creates fresh subscription, gets first value +print("call 1:", get_val()) # subscribes, gets 0, disposes +print("call 2:", get_val()) # subscribes again, gets 0, disposes +print("call 3:", get_val()) # subscribes again, gets 0, disposes +``` + + +``` +call 1: 0 +call 2: 0 +call 3: 0 +``` diff --git a/docs/usage/sensor_streams/assets/alignment_flow.svg b/docs/usage/sensor_streams/assets/alignment_flow.svg new file mode 100644 index 0000000000..72aeb337f3 --- /dev/null +++ b/docs/usage/sensor_streams/assets/alignment_flow.svg @@ -0,0 +1,22 @@ + + +Primary +arrives + + + +Check +secondaries + + + +Emit +match +all found + + + +Buffer +primary +waiting... + diff --git a/docs/usage/sensor_streams/assets/alignment_overview.svg b/docs/usage/sensor_streams/assets/alignment_overview.svg new file mode 100644 index 0000000000..8abada6d02 --- /dev/null +++ b/docs/usage/sensor_streams/assets/alignment_overview.svg @@ -0,0 +1,18 @@ + + +Camera +30 fps + + + +align_timestamped + +Lidar +10 Hz + + + + + +(image, pointcloud) + diff --git a/docs/usage/sensor_streams/assets/alignment_timeline.png b/docs/usage/sensor_streams/assets/alignment_timeline.png new file mode 100644 index 0000000000..235ddd7be0 --- /dev/null +++ b/docs/usage/sensor_streams/assets/alignment_timeline.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfea5a6aac40182b25decb9ddaeb387ed97a7708e2c51a48f47453c8df7adf57 +size 16136 diff --git a/docs/usage/sensor_streams/assets/alignment_timeline2.png b/docs/usage/sensor_streams/assets/alignment_timeline2.png new file mode 100644 index 0000000000..2bf8ec5eef --- /dev/null +++ b/docs/usage/sensor_streams/assets/alignment_timeline2.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:22b64923637d05f8f40c9f7c0f0597ee894dc4f31a0f10674aeb809101b54765 +size 23471 diff --git a/docs/usage/sensor_streams/assets/alignment_timeline3.png b/docs/usage/sensor_streams/assets/alignment_timeline3.png new file mode 100644 index 0000000000..61ddc3b54b --- /dev/null +++ b/docs/usage/sensor_streams/assets/alignment_timeline3.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8e9589dcd5308f511a2ec7d41bd36978204ccfe1441907bd139029b0489d605 +size 9969 diff --git a/docs/usage/sensor_streams/assets/backpressure.svg b/docs/usage/sensor_streams/assets/backpressure.svg new file mode 100644 index 0000000000..b3d69af6fb --- /dev/null +++ b/docs/usage/sensor_streams/assets/backpressure.svg @@ -0,0 +1,15 @@ + + +Camera +60 fps + + + +queue + + + +ML Model +2 fps +items pile up! + diff --git a/docs/usage/sensor_streams/assets/backpressure_solution.svg b/docs/usage/sensor_streams/assets/backpressure_solution.svg new file mode 100644 index 0000000000..454a8f460b --- /dev/null +++ b/docs/usage/sensor_streams/assets/backpressure_solution.svg @@ -0,0 +1,21 @@ + + +Camera +60 fps + + + +backpressure + + + +Fast Sub + + + +LATEST + + + +Slow Sub + diff --git a/docs/usage/sensor_streams/assets/frame_mosaic.jpg b/docs/usage/sensor_streams/assets/frame_mosaic.jpg new file mode 100644 index 0000000000..5c3fbf8350 --- /dev/null +++ b/docs/usage/sensor_streams/assets/frame_mosaic.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e83934e1179651fbca6c9b62cceb7425d1b2f0e8da18a63d4d95bcb4e6ac33ca +size 88206 diff --git a/docs/usage/sensor_streams/assets/frame_mosaic2.jpg b/docs/usage/sensor_streams/assets/frame_mosaic2.jpg new file mode 100644 index 0000000000..5e3032acf2 --- /dev/null +++ b/docs/usage/sensor_streams/assets/frame_mosaic2.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d73f683e92fda39bac9d1bb840f1fc375c821b4099714829e81f3e739f4d602 +size 91036 diff --git a/docs/usage/sensor_streams/assets/getter_hot_cold.svg b/docs/usage/sensor_streams/assets/getter_hot_cold.svg new file mode 100644 index 0000000000..d2f336459c --- /dev/null +++ b/docs/usage/sensor_streams/assets/getter_hot_cold.svg @@ -0,0 +1,71 @@ + + +getter_hot() + +subscribe + + + + + +Cache + +blocking + + + +getter + + +call() + + +instant + + +call() + + +instant +always subscribed + +getter_cold() + + + +getter + + +call() + + +subscribe + + +wait + + +value + + +dispose   + +blocking + + +call() + + +subscribe + + +wait + + +value + + +dispose   + +blocking + diff --git a/docs/usage/sensor_streams/assets/observable_flow.svg b/docs/usage/sensor_streams/assets/observable_flow.svg new file mode 100644 index 0000000000..d7e0e021d6 --- /dev/null +++ b/docs/usage/sensor_streams/assets/observable_flow.svg @@ -0,0 +1,16 @@ + + +observable + + + +.pipe(ops) + + + +.subscribe() + + + +callback + diff --git a/docs/usage/sensor_streams/assets/sharpness_graph.svg b/docs/usage/sensor_streams/assets/sharpness_graph.svg new file mode 100644 index 0000000000..3d61d12d7c --- /dev/null +++ b/docs/usage/sensor_streams/assets/sharpness_graph.svg @@ -0,0 +1,1414 @@ + + + + + + + + 1980-01-01T00:00:00+00:00 + image/svg+xml + + + Matplotlib v3.10.8, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/usage/sensor_streams/assets/sharpness_graph2.svg b/docs/usage/sensor_streams/assets/sharpness_graph2.svg new file mode 100644 index 0000000000..37c1032de0 --- /dev/null +++ b/docs/usage/sensor_streams/assets/sharpness_graph2.svg @@ -0,0 +1,1429 @@ + + + + + + + + 1980-01-01T00:00:00+00:00 + image/svg+xml + + + Matplotlib v3.10.8, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/usage/sensor_streams/quality_filter.md b/docs/usage/sensor_streams/quality_filter.md new file mode 100644 index 0000000000..db21da9c54 --- /dev/null +++ b/docs/usage/sensor_streams/quality_filter.md @@ -0,0 +1,316 @@ +# Quality-Based Stream Filtering + +When processing sensor streams, you often want to reduce frequency while keeping the best quality data. For discrete data like images that can't be averaged or merged, instead of blindly dropping frames, `quality_barrier` selects the highest quality item within each time window. + +## The Problem + +A camera outputs 30fps, but your ML model only needs 2fps. Simple approaches: + +- **`sample(0.5)`** - Takes whatever frame happens to land on the interval tick +- **`throttle_first(0.5)`** - Takes the first frame, ignores the rest + +Both ignore quality. You might get a blurry frame when a sharp one was available. + +## The Solution: `quality_barrier` + +```python session=qb +import reactivex as rx +from reactivex import operators as ops +from dimos.utils.reactive import quality_barrier + +# Simulated sensor data with quality scores +data = [ + {"id": 1, "quality": 0.3}, + {"id": 2, "quality": 0.9}, # best in first window + {"id": 3, "quality": 0.5}, + {"id": 4, "quality": 0.2}, + {"id": 5, "quality": 0.8}, # best in second window + {"id": 6, "quality": 0.4}, +] + +source = rx.of(*data) + +# Select best quality item per window (2 items per second = 0.5s windows) +result = source.pipe( + quality_barrier(lambda x: x["quality"], target_frequency=2.0), + ops.to_list(), +).run() + +print("Selected:", [r["id"] for r in result]) +print("Qualities:", [r["quality"] for r in result]) +``` + + +``` +Selected: [2] +Qualities: [0.9] +``` + +## Image Sharpness Filtering + +For camera streams, we provide `sharpness_barrier` which uses the image's sharpness score. + +Let's use real camera data from the Unitree Go2 robot to demonstrate. We use the [Sensor Storage & Replay](/docs/usage/sensor_streams/storage_replay.md) toolkit, which provides access to recorded robot data: + +```python session=qb +from dimos.utils.testing import TimedSensorReplay +from dimos.msgs.sensor_msgs.Image import Image, sharpness_barrier + +# Load recorded Go2 camera frames +video_replay = TimedSensorReplay("unitree_go2_bigoffice/video") + +# Use stream() with seek to skip blank frames, speed=10x to collect faster +input_frames = video_replay.stream(seek=5.0, duration=1.4, speed=10.0).pipe( + ops.to_list() +).run() + +def show_frames(frames): + for i, frame in enumerate(frames[:10]): + print(f" Frame {i}: {frame.sharpness:.3f}") + +print(f"Loaded {len(input_frames)} frames from Go2 camera") +print(f"Frame resolution: {input_frames[0].width}x{input_frames[0].height}") +print("Sharpness scores:") +show_frames(input_frames) +``` + + +``` +Loaded 20 frames from Go2 camera +Frame resolution: 1280x720 +Sharpness scores: + Frame 0: 0.351 + Frame 1: 0.227 + Frame 2: 0.223 + Frame 3: 0.267 + Frame 4: 0.295 + Frame 5: 0.307 + Frame 6: 0.328 + Frame 7: 0.348 + Frame 8: 0.346 + Frame 9: 0.322 +``` + +Using `sharpness_barrier` to select the sharpest frames: + +```python session=qb +# Create a stream from the recorded frames + +sharp_frames = video_replay.stream(seek=5.0, duration=1.5, speed=1.0).pipe( + sharpness_barrier(2.0), + ops.to_list() +).run() + +print(f"Output: {len(sharp_frames)} frame(s) (selected sharpest per window)") +show_frames(sharp_frames) +``` + + +``` +Output: 3 frame(s) (selected sharpest per window) + Frame 0: 0.351 + Frame 1: 0.352 + Frame 2: 0.360 +``` + +
+Visualization helpers + +```python session=qb fold no-result +import matplotlib +import matplotlib.pyplot as plt +import math + +def plot_mosaic(frames, selected, path, cols=5): + matplotlib.use('Agg') + rows = math.ceil(len(frames) / cols) + aspect = frames[0].width / frames[0].height + fig_w, fig_h = 12, 12 * rows / (cols * aspect) + + fig, axes = plt.subplots(rows, cols, figsize=(fig_w, fig_h)) + fig.patch.set_facecolor('black') + for i, ax in enumerate(axes.flat): + if i < len(frames): + ax.imshow(frames[i].data) + for spine in ax.spines.values(): + spine.set_color('lime' if frames[i] in selected else 'black') + spine.set_linewidth(4 if frames[i] in selected else 0) + ax.set_xticks([]); ax.set_yticks([]) + else: + ax.axis('off') + plt.subplots_adjust(wspace=0.02, hspace=0.02, left=0, right=1, top=1, bottom=0) + plt.savefig(path, facecolor='black', dpi=100, bbox_inches='tight', pad_inches=0) + plt.close() + +def plot_sharpness(frames, selected, path): + matplotlib.use('svg') + plt.style.use('dark_background') + sharpness = [f.sharpness for f in frames] + selected_idx = [i for i, f in enumerate(frames) if f in selected] + + plt.figure(figsize=(10, 3)) + plt.plot(sharpness, 'o-', label='All frames', color='#b5e4f4', alpha=0.7) + for i, idx in enumerate(selected_idx): + plt.axvline(x=idx, color='lime', linestyle='--', label='Selected' if i == 0 else None) + plt.xlabel('Frame'); plt.ylabel('Sharpness') + plt.xticks(range(len(sharpness))) + plt.legend(); plt.grid(alpha=0.3); plt.tight_layout() + plt.savefig(path, transparent=True) + plt.close() +``` + +
+ +Visualizing which frames were selected (green border = selected as sharpest in window): + +```python session=qb output=assets/frame_mosaic.jpg +plot_mosaic(input_frames, sharp_frames, '{output}') +``` + + +![output](assets/frame_mosaic.jpg) + +```python session=qb output=assets/sharpness_graph.svg +plot_sharpness(input_frames, sharp_frames, '{output}') +``` + + +![output](assets/sharpness_graph.svg) + +Let's request a higher frequency. + +```python session=qb +sharp_frames = video_replay.stream(seek=5.0, duration=1.5, speed=1.0).pipe( + sharpness_barrier(4.0), + ops.to_list() +).run() + +print(f"Output: {len(sharp_frames)} frame(s) (selected sharpest per window)") +show_frames(sharp_frames) +``` + + +``` +Output: 6 frame(s) (selected sharpest per window) + Frame 0: 0.351 + Frame 1: 0.348 + Frame 2: 0.346 + Frame 3: 0.352 + Frame 4: 0.360 + Frame 5: 0.329 +``` + +```python session=qb output=assets/frame_mosaic2.jpg +plot_mosaic(input_frames, sharp_frames, '{output}') +``` + + +![output](assets/frame_mosaic2.jpg) + + +```python session=qb output=assets/sharpness_graph2.svg +plot_sharpness(input_frames, sharp_frames, '{output}') +``` + + +![output](assets/sharpness_graph2.svg) + +As we can see the system is trying to strike a balance between requested frequency and quality that's available + +### Usage in Camera Module + +Here's how it's used in the actual camera module: + +```python skip +from dimos.core.module import Module + +class CameraModule(Module): + frequency: float = 2.0 # Target output frequency + @rpc + def start(self) -> None: + stream = self.hardware.image_stream() + + if self.config.frequency > 0: + stream = stream.pipe(sharpness_barrier(self.config.frequency)) + + self._disposables.add( + stream.subscribe(self.color_image.publish), + ) + +``` + +### How Sharpness is Calculated + +The sharpness score (0.0 to 1.0) is computed using Sobel edge detection: + +from [`Image.py`](/dimos/msgs/sensor_msgs/Image.py) + +```python session=qb +import cv2 + +# Get a frame and show the calculation +img = input_frames[10] +gray = img.to_grayscale() + +# Sobel gradients - use .data to get the underlying numpy array +sx = cv2.Sobel(gray.data, cv2.CV_32F, 1, 0, ksize=5) +sy = cv2.Sobel(gray.data, cv2.CV_32F, 0, 1, ksize=5) +magnitude = cv2.magnitude(sx, sy) + +print(f"Mean gradient magnitude: {magnitude.mean():.2f}") +print(f"Normalized sharpness: {img.sharpness:.3f}") +``` + + +``` +Mean gradient magnitude: 230.00 +Normalized sharpness: 0.332 +``` + +## Custom Quality Functions + +You can use `quality_barrier` with any quality metric: + +```python session=qb +# Example: select by "confidence" field +detections = [ + {"name": "cat", "confidence": 0.7}, + {"name": "dog", "confidence": 0.95}, # best + {"name": "bird", "confidence": 0.6}, +] + +result = rx.of(*detections).pipe( + quality_barrier(lambda d: d["confidence"], target_frequency=2.0), + ops.to_list(), +).run() + +print(f"Selected: {result[0]['name']} (conf: {result[0]['confidence']})") +``` + + +``` +Selected: dog (conf: 0.95) +``` + +## API Reference + +### `quality_barrier(quality_func, target_frequency)` + +RxPY pipe operator that selects the highest quality item within each time window. + +| Parameter | Type | Description | +|--------------------|------------------------|------------------------------------------------------| +| `quality_func` | `Callable[[T], float]` | Function that returns a quality score for each item | +| `target_frequency` | `float` | Output frequency in Hz (e.g., 2.0 for 2 items/second)| + +**Returns:** A pipe operator for use with `.pipe()` + +### `sharpness_barrier(target_frequency)` + +Convenience wrapper for images that uses `image.sharpness` as the quality function. + +| Parameter | Type | Description | +|--------------------|---------|--------------------------| +| `target_frequency` | `float` | Output frequency in Hz | + +**Returns:** A pipe operator for use with `.pipe()` diff --git a/docs/usage/sensor_streams/reactivex.md b/docs/usage/sensor_streams/reactivex.md new file mode 100644 index 0000000000..45873b471b --- /dev/null +++ b/docs/usage/sensor_streams/reactivex.md @@ -0,0 +1,494 @@ +# ReactiveX (RxPY) Quick Reference + +RxPY provides composable asynchronous data streams. This is a practical guide focused on common patterns in this codebase. + +## Quick Start: Using an Observable + +Given a function that returns an `Observable`, here's how to use it: + +```python session=rx +import reactivex as rx +from reactivex import operators as ops + +# Create an observable that emits 0,1,2,3,4 +source = rx.of(0, 1, 2, 3, 4) + +# Subscribe and print each value +received = [] +source.subscribe(lambda x: received.append(x)) +print("received:", received) +``` + + +``` +received: [0, 1, 2, 3, 4] +``` + +## The `.pipe()` Pattern + +Chain operators using `.pipe()`: + +```python session=rx +# Transform values: multiply by 2, then filter > 4 +result = [] + +# We build another observable. It's passive until `subscribe` is called. +observable = source.pipe( + ops.map(lambda x: x * 2), + ops.filter(lambda x: x > 4), +) + +observable.subscribe(lambda x: result.append(x)) + +print("transformed:", result) +``` + + +``` +transformed: [6, 8] +``` + +## Common Operators + +### Transform: `map` + +```python session=rx +rx.of(1, 2, 3).pipe( + ops.map(lambda x: f"item_{x}") +).subscribe(print) +``` + + +``` +item_1 +item_2 +item_3 + +``` + +### Filter: `filter` + +```python session=rx +rx.of(1, 2, 3, 4, 5).pipe( + ops.filter(lambda x: x % 2 == 0) +).subscribe(print) +``` + + +``` +2 +4 + +``` + +### Limit emissions: `take` + +```python session=rx +rx.of(1, 2, 3, 4, 5).pipe( + ops.take(3) +).subscribe(print) +``` + + +``` +1 +2 +3 + +``` + +### Flatten nested observables: `flat_map` + +```python session=rx +# For each input, emit multiple values +rx.of(1, 2).pipe( + ops.flat_map(lambda x: rx.of(x, x * 10, x * 100)) +).subscribe(print) +``` + + +``` +1 +10 +100 +2 +20 +200 + +``` + +## Rate Limiting + +### `sample(interval)` - Emit latest value every N seconds + +Takes the most recent value at each interval. Good for continuous streams where you want the freshest data. + +```python session=rx +# Use blocking .run() to collect results properly +results = rx.interval(0.05).pipe( + ops.take(10), + ops.sample(0.2), + ops.to_list(), +).run() +print("sample() got:", results) +``` + + +``` +sample() got: [2, 6, 9] +``` + +### `throttle_first(interval)` - Emit first, then block for N seconds + +Takes the first value then ignores subsequent values for the interval. Good for user input debouncing. + +```python session=rx +results = rx.interval(0.05).pipe( + ops.take(10), + ops.throttle_first(0.15), + ops.to_list(), +).run() +print("throttle_first() got:", results) +``` + + +``` +throttle_first() got: [0, 3, 6, 9] +``` + +### Difference Between `sample` and `throttle_first` + +```python session=rx +# sample: takes LATEST value at each interval tick +# throttle_first: takes FIRST value then blocks + +# With fast emissions (0,1,2,3,4,5,6,7,8,9) every 50ms: +# sample(0.2s) -> gets value at 200ms, 400ms marks -> [2, 6, 9] +# throttle_first(0.15s) -> gets 0, blocks, then 3, blocks, then 6... -> [0,3,6,9] +print("sample: latest value at each tick") +print("throttle_first: first value, then block") +``` + + +``` +sample: latest value at each tick +throttle_first: first value, then block +``` + + +## What is an Observable? + +An Observable is like a list, but instead of holding all values at once, it produces values over time. + +| | List | Iterator | Observable | +|-------------|-----------------------|-----------------------|------------------| +| **Values** | All exist now | Generated on demand | Arrive over time | +| **Control** | You pull (`for x in`) | You pull (`next()`) | Pushed to you | +| **Size** | Finite | Can be infinite | Can be infinite | +| **Async** | No | Yes (with asyncio) | Yes | +| **Cancel** | N/A | Stop calling `next()` | `.dispose()` | + +The key difference from iterators: with an Observable, **you don't control when values arrive**. A camera produces frames at 30fps whether you're ready or not. An iterator waits for you to call `next()`. + +**Observables are lazy.** An Observable is just a description of work to be done - it sits there doing nothing until you call `.subscribe()`. That's when it "wakes up" and starts producing values. + +This means you can build complex pipelines, pass them around, and nothing happens until someone subscribes. + +**The three things an Observable can tell you:** + +1. **"Here's a value"** (`on_next`) - A new value arrived +2. **"Something went wrong"** (`on_error`) - An error occurred, stream stops +3. **"I'm done"** (`on_completed`) - No more values coming + +**The basic pattern:** + +``` +observable.subscribe(what_to_do_with_each_value) +``` + +That's it. You create or receive an Observable, then subscribe to start receiving values. + +When you subscribe, data flows through a pipeline: + +
+diagram source + +```pikchr fold output=assets/observable_flow.svg +color = white +fill = none + +Obs: box "observable" rad 5px fit wid 170% ht 170% +arrow right 0.3in +Pipe: box ".pipe(ops)" rad 5px fit wid 170% ht 170% +arrow right 0.3in +Sub: box ".subscribe()" rad 5px fit wid 170% ht 170% +arrow right 0.3in +Handler: box "callback" rad 5px fit wid 170% ht 170% +``` + +
+ + +![output](assets/observable_flow.svg) + + +**Key property: Observables are lazy.** Nothing happens until you call `.subscribe()`. This means you can build up complex pipelines without any work being done, then start the flow when ready. + +Here's the full subscribe signature with all three callbacks: + +```python session=rx +rx.of(1, 2, 3).subscribe( + on_next=lambda x: print(f"value: {x}"), + on_error=lambda e: print(f"error: {e}"), + on_completed=lambda: print("done") +) +``` + + +``` +value: 1 +value: 2 +value: 3 +done + +``` + +## Disposables: Cancelling Subscriptions + +When you subscribe, you get back a `Disposable`. This is your "cancel button": + +```python session=rx +import reactivex as rx + +source = rx.interval(0.1) # emits 0, 1, 2, ... every 100ms forever +subscription = source.subscribe(lambda x: print(x)) + +# Later, when you're done: +subscription.dispose() # Stop receiving values, clean up resources +print("disposed") +``` + + +``` +disposed +``` + +**Why does this matter?** + +- Observables can be infinite (sensor feeds, websockets, timers) +- Without disposing, you leak memory and keep processing values forever +- Disposing also cleans up any resources the Observable opened (connections, file handles, etc.) + +**Rule of thumb:** Whenever you subscribe, save the disposable because you have to unsubscribe at some point by calling `disposable.dispose()`. + +**In dimos modules:** Every `Module` has a `self._disposables` (a `CompositeDisposable`) that automatically disposes everything when the module closes: + +```python session=rx +import time +from dimos.core import Module + +class MyModule(Module): + def start(self): + source = rx.interval(0.05) + self._disposables.add(source.subscribe(lambda x: print(f"got {x}"))) + +module = MyModule() +module.start() +time.sleep(0.25) + +# unsubscribes disposables +module.stop() +``` + + +``` +got 0 +got 1 +got 2 +got 3 +got 4 +``` + +## Creating Observables + +There are two common callback patterns in APIs. Use the appropriate helper: + +| Pattern | Example | Helper | +|---------|---------|--------| +| Register/unregister with same callback | `sensor.register(cb)` / `sensor.unregister(cb)` | `callback_to_observable` | +| Subscribe returns unsub function | `unsub = pubsub.subscribe(cb)` | `to_observable` | + +### From register/unregister APIs + +Use `callback_to_observable` when the API has separate register and unregister functions that take the same callback reference: + +```python session=create +import reactivex as rx +from reactivex import operators as ops +from dimos.utils.reactive import callback_to_observable + +class MockSensor: + def __init__(self): + self._callbacks = [] + def register(self, cb): + self._callbacks.append(cb) + def unregister(self, cb): + self._callbacks.remove(cb) + def emit(self, value): + for cb in self._callbacks: + cb(value) + +sensor = MockSensor() + +obs = callback_to_observable( + start=sensor.register, + stop=sensor.unregister +) + +received = [] +sub = obs.subscribe(lambda x: received.append(x)) + +sensor.emit("reading_1") +sensor.emit("reading_2") +print("received:", received) + +sub.dispose() +print("callbacks after dispose:", len(sensor._callbacks)) +``` + + +``` +received: ['reading_1', 'reading_2'] +callbacks after dispose: 0 +``` + +### From subscribe-returns-unsub APIs + +Use `to_observable` when the subscribe function returns an unsubscribe callable: + +```python session=create +from dimos.utils.reactive import to_observable + +class MockPubSub: + def __init__(self): + self._callbacks = [] + def subscribe(self, cb): + self._callbacks.append(cb) + return lambda: self._callbacks.remove(cb) # returns unsub function + def publish(self, value): + for cb in self._callbacks: + cb(value) + +pubsub = MockPubSub() + +obs = to_observable(pubsub.subscribe) + +received = [] +sub = obs.subscribe(lambda x: received.append(x)) + +pubsub.publish("msg_1") +pubsub.publish("msg_2") +print("received:", received) + +sub.dispose() +print("callbacks after dispose:", len(pubsub._callbacks)) +``` + + +``` +received: ['msg_1', 'msg_2'] +callbacks after dispose: 0 +``` + +### From scratch with `rx.create` + +```python session=create +from reactivex.disposable import Disposable + +def custom_subscribe(observer, scheduler=None): + observer.on_next("first") + observer.on_next("second") + observer.on_completed() + return Disposable(lambda: print("cleaned up")) + +obs = rx.create(custom_subscribe) + +results = [] +obs.subscribe( + on_next=lambda x: results.append(x), + on_completed=lambda: results.append("DONE") +) +print("results:", results) +``` + + +``` +cleaned up +results: ['first', 'second', 'DONE'] +``` + +## CompositeDisposable + +As we know we can always dispose subscriptions when done to prevent leaks: + +```python session=dispose +import time +import reactivex as rx +from reactivex import operators as ops + +source = rx.interval(0.1).pipe(ops.take(100)) +received = [] + +subscription = source.subscribe(lambda x: received.append(x)) +time.sleep(0.25) +subscription.dispose() +time.sleep(0.2) + +print(f"received {len(received)} items before dispose") +``` + + +``` +received 2 items before dispose +``` + +For multiple subscriptions, use `CompositeDisposable`: + +```python session=dispose +from reactivex.disposable import CompositeDisposable + +disposables = CompositeDisposable() + +s1 = rx.of(1,2,3).subscribe(lambda x: None) +s2 = rx.of(4,5,6).subscribe(lambda x: None) + +disposables.add(s1) +disposables.add(s2) + +print("subscriptions:", len(disposables)) +disposables.dispose() +print("after dispose:", disposables.is_disposed) +``` + + +``` +subscriptions: 2 +after dispose: True +``` + +## Reference + +| Operator | Purpose | Example | +|-----------------------|------------------------------------------|---------------------------------------| +| `map(fn)` | Transform each value | `ops.map(lambda x: x * 2)` | +| `filter(pred)` | Keep values matching predicate | `ops.filter(lambda x: x > 0)` | +| `take(n)` | Take first n values | `ops.take(10)` | +| `first()` | Take first value only | `ops.first()` | +| `sample(sec)` | Emit latest every interval | `ops.sample(0.5)` | +| `throttle_first(sec)` | Emit first, block for interval | `ops.throttle_first(0.5)` | +| `flat_map(fn)` | Map + flatten nested observables | `ops.flat_map(lambda x: rx.of(x, x))` | +| `observe_on(sched)` | Switch scheduler | `ops.observe_on(pool_scheduler)` | +| `replay(n)` | Cache last n values for late subscribers | `ops.replay(buffer_size=1)` | +| `timeout(sec)` | Error if no value within timeout | `ops.timeout(5.0)` | + +See [RxPY documentation](https://rxpy.readthedocs.io/) for complete operator reference. diff --git a/docs/usage/sensor_streams/storage_replay.md b/docs/usage/sensor_streams/storage_replay.md new file mode 100644 index 0000000000..c5cbe306a8 --- /dev/null +++ b/docs/usage/sensor_streams/storage_replay.md @@ -0,0 +1,231 @@ +# Sensor Storage and Replay + +Record sensor streams to disk and replay them with original timing. Useful for testing, debugging, and creating reproducible datasets. + +## Quick Start + +### Recording + +```python skip +from dimos.utils.testing.replay import TimedSensorStorage + +# Create storage (directory in data folder) +storage = TimedSensorStorage("my_recording") + +# Save frames from a stream +camera_stream.subscribe(storage.save_one) + +# Or save manually +storage.save(frame1, frame2, frame3) +``` + +### Replaying + +```python skip +from dimos.utils.testing.replay import TimedSensorReplay + +# Load recording +replay = TimedSensorReplay("my_recording") + +# Iterate at original speed +for frame in replay.iterate_realtime(): + process(frame) + +# Or as an Observable stream +replay.stream(speed=1.0).subscribe(process) +``` + +## TimedSensorStorage + +Stores sensor data with timestamps as pickle files. Each frame is saved as `000.pickle`, `001.pickle`, etc. + +```python skip +from dimos.utils.testing.replay import TimedSensorStorage + +storage = TimedSensorStorage("lidar_capture") + +# Save individual frames +storage.save_one(lidar_msg) # Returns frame count + +# Save multiple frames +storage.save(frame1, frame2, frame3) + +# Subscribe to a stream +lidar_stream.subscribe(storage.save_one) + +# Or pipe through (emits frame count) +lidar_stream.pipe( + ops.flat_map(storage.save_stream) +).subscribe() +``` + +**Storage location:** Files are saved to the data directory under the given name. The directory must not already contain pickle files (prevents accidental overwrites). + +**What gets stored:** By default, if a frame has a `.raw_msg` attribute, that's pickled instead of the full object. You can customize with the `autocast` parameter: + +```python skip +# Custom serialization +storage = TimedSensorStorage( + "custom_capture", + autocast=lambda frame: frame.to_dict() +) +``` + +## TimedSensorReplay + +Replays stored sensor data with timestamp-aware iteration and seeking. + +### Basic Iteration + +```python skip +from dimos.utils.testing.replay import TimedSensorReplay + +replay = TimedSensorReplay("lidar_capture") + +# Iterate all frames (ignores timing) +for frame in replay.iterate(): + process(frame) + +# Iterate with timestamps +for ts, frame in replay.iterate_ts(): + print(f"Frame at {ts}: {frame}") + +# Iterate with relative timestamps (from start) +for relative_ts, frame in replay.iterate_duration(): + print(f"At {relative_ts:.2f}s: {frame}") +``` + +### Realtime Playback + +```python skip +# Play at original speed (blocks between frames) +for frame in replay.iterate_realtime(): + process(frame) + +# Play at 2x speed +for frame in replay.iterate_realtime(speed=2.0): + process(frame) + +# Play at half speed +for frame in replay.iterate_realtime(speed=0.5): + process(frame) +``` + +### Seeking and Slicing + +```python skip +# Start 10 seconds into the recording +for ts, frame in replay.iterate_ts(seek=10.0): + process(frame) + +# Play only 5 seconds starting at 10s +for ts, frame in replay.iterate_ts(seek=10.0, duration=5.0): + process(frame) + +# Loop forever +for frame in replay.iterate(loop=True): + process(frame) +``` + +### Finding Specific Frames + +```python skip +# Find frame closest to absolute timestamp +frame = replay.find_closest(1704067200.0) + +# Find frame closest to relative time (30s from start) +frame = replay.find_closest_seek(30.0) + +# With tolerance (returns None if no match within 0.1s) +frame = replay.find_closest(timestamp, tolerance=0.1) +``` + +### Observable Stream + +The `.stream()` method returns an Observable that emits frames with original timing: + +```python skip +# Stream at original speed +replay.stream(speed=1.0).subscribe(process) + +# Stream at 2x with seeking +replay.stream( + speed=2.0, + seek=10.0, # Start 10s in + duration=30.0, # Play for 30s + loop=True # Loop forever +).subscribe(process) +``` + +## Usage: Stub Connections for Testing + +A common pattern is creating replay-based connection stubs for testing without hardware. From [`robot/unitree/go2/connection.py`](/dimos/robot/unitree/go2/connection.py#L83): + +This is a bit primitive. We'd like to write a higher-order API for recording full module I/O for any module, but this is a work in progress at the moment. + + +```python skip +class ReplayConnection(UnitreeWebRTCConnection): + dir_name = "unitree_go2_bigoffice" + + def __init__(self, **kwargs) -> None: + get_data(self.dir_name) + self.replay_config = { + "loop": kwargs.get("loop"), + "seek": kwargs.get("seek"), + "duration": kwargs.get("duration"), + } + + def lidar_stream(self): + lidar_store = TimedSensorReplay(f"{self.dir_name}/lidar") + return lidar_store.stream(**self.replay_config) + + def video_stream(self): + video_store = TimedSensorReplay(f"{self.dir_name}/video") + return video_store.stream(**self.replay_config) +``` + +This allows running the full perception pipeline against recorded data: + +```python skip +# Use replay connection instead of real hardware +connection = ReplayConnection(loop=True, seek=5.0) +robot = GO2Connection(connection=connection) +``` + +## Data Format + +Each pickle file contains a tuple `(timestamp, data)`: + +- **timestamp**: Unix timestamp (float) when the frame was captured +- **data**: The sensor data (or result of `autocast` if provided) + +Files are numbered sequentially: `000.pickle`, `001.pickle`, etc. + +Recordings are stored in the `data/` directory. See [Data Loading](/docs/development/large_file_management.md) for how data storage works, including Git LFS handling for large datasets. + +## API Reference + +### TimedSensorStorage + +| Method | Description | +|------------------------------|------------------------------------------| +| `save_one(frame)` | Save a single frame, returns frame count | +| `save(*frames)` | Save multiple frames | +| `save_stream(observable)` | Pipe an observable through storage | +| `consume_stream(observable)` | Subscribe and save without returning | + +### TimedSensorReplay + +| Method | Description | +|--------------------------------------------------|---------------------------------------| +| `iterate(loop=False)` | Iterate frames (no timing) | +| `iterate_ts(seek, duration, loop)` | Iterate with absolute timestamps | +| `iterate_duration(...)` | Iterate with relative timestamps | +| `iterate_realtime(speed, ...)` | Iterate with blocking to match timing | +| `stream(speed, seek, duration, loop)` | Observable with original timing | +| `find_closest(timestamp, tolerance)` | Find frame by absolute timestamp | +| `find_closest_seek(relative_seconds, tolerance)` | Find frame by relative time | +| `first()` | Get first frame | +| `first_timestamp()` | Get first timestamp | +| `load(name)` | Load specific frame by name/index | diff --git a/docs/usage/sensor_streams/temporal_alignment.md b/docs/usage/sensor_streams/temporal_alignment.md new file mode 100644 index 0000000000..66230c9d54 --- /dev/null +++ b/docs/usage/sensor_streams/temporal_alignment.md @@ -0,0 +1,313 @@ +# Temporal Message Alignment + +Robots have multiple sensors emitting data at different rates and latencies. A camera might run at 30fps, while lidar scans at 10Hz, and each has different processing delays. For perception tasks like projecting 2D detections into 3D pointclouds, we need to match data from these streams by timestamp. + +`align_timestamped` solves this by buffering messages and matching them within a time tolerance. + +
Pikchr + +```pikchr fold output=assets/alignment_overview.svg +color = white +fill = none + +Cam: box "Camera" "30 fps" rad 5px fit wid 170% ht 170% +arrow from Cam.e right 0.4in then down 0.35in then right 0.4in +Align: box "align_timestamped" rad 5px fit wid 170% ht 170% + +Lidar: box "Lidar" "10 Hz" rad 5px fit wid 170% ht 170% with .s at (Cam.s.x, Cam.s.y - 0.7in) +arrow from Lidar.e right 0.4in then up 0.35in then right 0.4in + +arrow from Align.e right 0.4in +Out: box "(image, pointcloud)" rad 5px fit wid 170% ht 170% +``` + +
+ + +![output](assets/alignment_overview.svg) + + +## Basic Usage + +Below we set up replay of real camera and lidar data from the Unitree Go2 robot. You can check it if you're interested. + +
+Stream Setup + +You can read more about [sensor storage here](storage_replay.md) and [LFS data storage here](/docs/development/large_file_management.md). + +```python session=align no-result +from reactivex import Subject +from dimos.utils.testing import TimedSensorReplay +from dimos.types.timestamped import Timestamped, align_timestamped +from reactivex import operators as ops +import reactivex as rx + +# Load recorded Go2 sensor streams +video_replay = TimedSensorReplay("unitree_go2_bigoffice/video") +lidar_replay = TimedSensorReplay("unitree_go2_bigoffice/lidar") + +# This is a bit tricky. We find the first video frame timestamp, then add 2 seconds to it. +seek_ts = video_replay.first_timestamp() + 2 + +# Lists to collect items as they flow through streams +video_frames = [] +lidar_scans = [] + +# We are using from_timestamp=... and not seek=... because seek seeks through recording +# timestamps, from_timestamp matches actual message timestamp. +# It's possible for sensor data to come in late, but with correct capture time timestamps +video_stream = video_replay.stream(from_timestamp=seek_ts, duration=2.0).pipe( + ops.do_action(lambda x: video_frames.append(x)) +) + +lidar_stream = lidar_replay.stream(from_timestamp=seek_ts, duration=2.0).pipe( + ops.do_action(lambda x: lidar_scans.append(x)) +) + +``` + + +
+ +Streams would normally come from an actual robot into your module via `In` inputs. [`detection/module3D.py`](/dimos/perception/detection/module3D.py#L11) is a good example of this. + +Assume we have them. Let's align them. + +```python session=align +# Align video (primary) with lidar (secondary) +# match_tolerance: max time difference for a match (seconds) +# buffer_size: how long to keep messages waiting for matches (seconds) +aligned_pairs = align_timestamped( + video_stream, + lidar_stream, + match_tolerance=0.025, # 25ms tolerance + buffer_size=5.0, # how long to wait for match +).pipe(ops.to_list()).run() + +print(f"Video: {len(video_frames)} frames, Lidar: {len(lidar_scans)} scans") +print(f"Aligned pairs: {len(aligned_pairs)} out of {len(video_frames)} video frames") + +# Show a matched pair +if aligned_pairs: + img, pc = aligned_pairs[0] + dt = abs(img.ts - pc.ts) + print(f"\nFirst matched pair: Δ{dt*1000:.1f}ms") +``` + + +``` +Video: 29 frames, Lidar: 15 scans +Aligned pairs: 11 out of 29 video frames + +First matched pair: Δ11.3ms +``` + +
+Visualization helper + +```python session=align fold no-result +import matplotlib +import matplotlib.pyplot as plt + +def plot_alignment_timeline(video_frames, lidar_scans, aligned_pairs, path): + """Single timeline: video above axis, lidar below, green lines for matches.""" + matplotlib.use('Agg') + plt.style.use('dark_background') + + # Get base timestamp for relative times (frames have .ts attribute) + base_ts = video_frames[0].ts + video_ts = [f.ts - base_ts for f in video_frames] + lidar_ts = [s.ts - base_ts for s in lidar_scans] + + # Find matched timestamps + matched_video_ts = set(img.ts for img, _ in aligned_pairs) + matched_lidar_ts = set(pc.ts for _, pc in aligned_pairs) + + fig, ax = plt.subplots(figsize=(12, 2.5)) + + # Video markers above axis (y=0.3) - circles, cyan when matched + for frame in video_frames: + rel_ts = frame.ts - base_ts + matched = frame.ts in matched_video_ts + ax.plot(rel_ts, 0.3, 'o', color='cyan' if matched else '#688', markersize=8) + + # Lidar markers below axis (y=-0.3) - squares, orange when matched + for scan in lidar_scans: + rel_ts = scan.ts - base_ts + matched = scan.ts in matched_lidar_ts + ax.plot(rel_ts, -0.3, 's', color='orange' if matched else '#a86', markersize=8) + + # Green lines connecting matched pairs + for img, pc in aligned_pairs: + img_rel = img.ts - base_ts + pc_rel = pc.ts - base_ts + ax.plot([img_rel, pc_rel], [0.3, -0.3], '-', color='lime', alpha=0.6, linewidth=1) + + # Axis styling + ax.axhline(y=0, color='white', linewidth=0.5, alpha=0.3) + ax.set_xlim(-0.1, max(video_ts + lidar_ts) + 0.1) + ax.set_ylim(-0.6, 0.6) + ax.set_xlabel('Time (s)') + ax.set_yticks([0.3, -0.3]) + ax.set_yticklabels(['Video', 'Lidar']) + ax.set_title(f'{len(aligned_pairs)} matched from {len(video_frames)} video + {len(lidar_scans)} lidar') + plt.tight_layout() + plt.savefig(path, transparent=True) + plt.close() +``` + +
+ +```python session=align output=assets/alignment_timeline.png +plot_alignment_timeline(video_frames, lidar_scans, aligned_pairs, '{output}') +``` + + +![output](assets/alignment_timeline.png) + +If we loosen up our match tolerance, we might get multiple pairs matching the same lidar frame. + +```python session=align +aligned_pairs = align_timestamped( + video_stream, + lidar_stream, + match_tolerance=0.05, # 50ms tolerance + buffer_size=5.0, # how long to wait for match +).pipe(ops.to_list()).run() + +print(f"Video: {len(video_frames)} frames, Lidar: {len(lidar_scans)} scans") +print(f"Aligned pairs: {len(aligned_pairs)} out of {len(video_frames)} video frames") +``` + + +``` +Video: 58 frames, Lidar: 30 scans +Aligned pairs: 23 out of 58 video frames +``` + + +```python session=align output=assets/alignment_timeline2.png +plot_alignment_timeline(video_frames, lidar_scans, aligned_pairs, '{output}') +``` + + +![output](assets/alignment_timeline2.png) + +## Combine Frame Alignment with a Quality Filter + +More on [quality filtering here](quality_filter.md). + +```python session=align +from dimos.msgs.sensor_msgs.Image import Image, sharpness_barrier + +# Lists to collect items as they flow through streams +video_frames = [] +lidar_scans = [] + +video_stream = video_replay.stream(from_timestamp=seek_ts, duration=2.0).pipe( + sharpness_barrier(3.0), + ops.do_action(lambda x: video_frames.append(x)) +) + +lidar_stream = lidar_replay.stream(from_timestamp=seek_ts, duration=2.0).pipe( + ops.do_action(lambda x: lidar_scans.append(x)) +) + +aligned_pairs = align_timestamped( + video_stream, + lidar_stream, + match_tolerance=0.025, # 25ms tolerance + buffer_size=5.0, # how long to wait for match +).pipe(ops.to_list()).run() + +print(f"Video: {len(video_frames)} frames, Lidar: {len(lidar_scans)} scans") +print(f"Aligned pairs: {len(aligned_pairs)} out of {len(video_frames)} video frames") + +``` + + +``` +Video: 6 frames, Lidar: 15 scans +Aligned pairs: 1 out of 6 video frames +``` + +```python session=align output=assets/alignment_timeline3.png +plot_alignment_timeline(video_frames, lidar_scans, aligned_pairs, '{output}') +``` + + +![output](assets/alignment_timeline3.png) + +We are very picky but data is high quality. Best frame, with closest lidar match in this window. + +## How It Works + +The primary stream (first argument) drives emissions. When a primary message arrives: + +1. **Immediate match**: If matching secondaries already exist in buffers, emit immediately +2. **Deferred match**: If secondaries are missing, buffer the primary and wait + +When secondary messages arrive: +1. Add to buffer for future primary matches +2. Check buffered primaries - if this completes a match, emit + +
+diagram source + +```pikchr fold output=assets/alignment_flow.svg +color = white +fill = none +linewid = 0.35in + +Primary: box "Primary" "arrives" rad 5px fit wid 170% ht 170% +arrow +Check: box "Check" "secondaries" rad 5px fit wid 170% ht 170% + +arrow from Check.e right 0.35in then up 0.4in then right 0.35in +Emit: box "Emit" "match" rad 5px fit wid 170% ht 170% +text "all found" at (Emit.w.x - 0.4in, Emit.w.y + 0.15in) + +arrow from Check.e right 0.35in then down 0.4in then right 0.35in +Buffer: box "Buffer" "primary" rad 5px fit wid 170% ht 170% +text "waiting..." at (Buffer.w.x - 0.4in, Buffer.w.y - 0.15in) +``` + +
+ + +![output](assets/alignment_flow.svg) + +## Parameters + +| Parameter | Type | Default | Description | +|--------------------------|--------------------|----------|-------------------------------------------------| +| `primary_observable` | `Observable[T]` | required | Primary stream that drives output timing | +| `*secondary_observables` | `Observable[S]...` | required | One or more secondary streams to align | +| `match_tolerance` | `float` | 0.1 | Max time difference for a match (seconds) | +| `buffer_size` | `float` | 1.0 | How long to buffer unmatched messages (seconds) | + + + +## Usage in Modules + +Every module `In` port exposes an `.observable()` method that returns a backpressured stream of incoming messages. This makes it easy to align inputs from multiple sensors. + +From [`detection/module3D.py`](/dimos/perception/detection/module3D.py), projecting 2D detections into 3D pointclouds: + +```python skip +class Detection3DModule(Detection2DModule): + color_image: In[Image] + pointcloud: In[PointCloud2] + + def start(self): + # Align 2D detections with pointcloud data + self.detection_stream_3d = align_timestamped( + backpressure(self.detection_stream_2d()), + self.pointcloud.observable(), + match_tolerance=0.25, + buffer_size=20.0, + ).pipe(ops.map(detection2d_to_3d)) +``` + +The 2D detection stream (camera + ML model) is the primary, matched with raw pointcloud data from lidar. The longer `buffer_size=20.0` accounts for variable ML inference times. diff --git a/docs/api/transforms.md b/docs/usage/transforms.md similarity index 98% rename from docs/api/transforms.md rename to docs/usage/transforms.md index b64b133dea..2435839feb 100644 --- a/docs/api/transforms.md +++ b/docs/usage/transforms.md @@ -465,5 +465,5 @@ For the mathematical foundations, the ROS documentation provides detailed backgr - [ROS REP 105 - Coordinate Frames for Mobile Platforms](https://www.ros.org/reps/rep-0105.html) See also: -- [Modules](/docs/concepts/modules/index.md) for understanding the module system -- [Configuration](/docs/concepts/configuration.md) for module configuration patterns +- [Modules](/docs/usage/modules.md) for understanding the module system +- [Configuration](/docs/usage/configuration.md) for module configuration patterns diff --git a/docs/usage/transports.md b/docs/usage/transports.md new file mode 100644 index 0000000000..4c80776531 --- /dev/null +++ b/docs/usage/transports.md @@ -0,0 +1,437 @@ +# Transports + +Transports connect **module streams** across **process boundaries** and/or **networks**. + +* **Module**: a running component (e.g., camera, mapping, nav). +* **Stream**: a unidirectional flow of messages owned by a module (one broadcaster → many receivers). +* **Topic**: the name/identifier used by a transport or pubsub backend. +* **Message**: payload carried on a stream (often `dimos.msgs.*`, but can be bytes / images / pointclouds / etc.). + +Each edge in the graph is a **transported stream** (potentially different protocols). Each node is a **module**: + +![go2_nav](assets/go2_nav.svg) + +## What the transport layer guarantees (and what it doesn’t) + +Modules **don’t** know or care *how* data moves. They just: + +* emit messages (broadcast) +* subscribe to messages (receive) + +A transport is responsible for the mechanics of delivery (IPC, sockets, Redis, ROS 2, etc.). + +**Important:** delivery semantics depend on the backend: + +* Some are **best-effort** (e.g., UDP multicast / LCM): loss can happen. +* Some can be **reliable** (e.g., TCP-backed, Redis, some DDS configs) but may add latency/backpressure. + +So: treat the API as uniform, but pick a backend whose semantics match the task. + +--- + +## Benchmarks + +Quick view on performance of our pubsub backends: + +```sh skip +python -m pytest -svm tool -k "not bytes" dimos/protocol/pubsub/benchmark/test_benchmark.py +``` + +![Benchmark results](assets/pubsub_benchmark.png) + +--- + +## Abstraction layers + +
Pikchr + +```pikchr output=assets/abstraction_layers.svg fold +color = white +fill = none +linewid = 0.5in +boxwid = 1.0in +boxht = 0.4in + +# Boxes with labels +B: box "Blueprints" rad 10px +arrow +M: box "Modules" rad 5px +arrow +T: box "Transports" rad 5px +arrow +P: box "PubSub" rad 5px + +# Descriptions below +text "robot configs" at B.s + (0.1, -0.2in) +text "camera, nav" at M.s + (0, -0.2in) +text "LCM, SHM, ROS" at T.s + (0, -0.2in) +text "pub/sub API" at P.s + (0, -0.2in) +``` + +
+ + +![output](assets/abstraction_layers.svg) + +We’ll go through these layers top-down. + +--- + +## Using transports with blueprints + +See [Blueprints](blueprints.md) for the blueprint API. + +From [`unitree/go2/blueprints/__init__.py`](/dimos/robot/unitree/go2/blueprints/__init__.py). + +Example: rebind a few streams from the default `LCMTransport` to `ROSTransport` (defined at [`transport.py`](/dimos/core/transport.py#L226)) so you can visualize in **rviz2**. + +```python skip +nav = autoconnect( + basic, + voxel_mapper(voxel_size=0.1), + cost_mapper(), + replanning_a_star_planner(), + wavefront_frontier_explorer(), +).global_config(n_dask_workers=6, robot_model="unitree_go2") + +ros = nav.transports( + { + ("lidar", PointCloud2): ROSTransport("lidar", PointCloud2), + ("global_map", PointCloud2): ROSTransport("global_map", PointCloud2), + ("odom", PoseStamped): ROSTransport("odom", PoseStamped), + ("color_image", Image): ROSTransport("color_image", Image), + } +) +``` + +--- + +## Using transports with modules + +Each **stream** on a module can use a different transport. Set `.transport` on the stream **before starting** modules. + +```python ansi=false +import time + +from dimos.core import In, Module, start +from dimos.core.transport import LCMTransport +from dimos.hardware.sensors.camera.module import CameraModule +from dimos.msgs.sensor_msgs import Image + + +class ImageListener(Module): + image: In[Image] + + def start(self): + super().start() + self.image.subscribe(lambda img: print(f"Received: {img.shape}")) + + +if __name__ == "__main__": + # Start local cluster and deploy modules to separate processes + dimos = start(2) + + camera = dimos.deploy(CameraModule, frequency=2.0) + listener = dimos.deploy(ImageListener) + + # Choose a transport for the stream (example: LCM typed channel) + camera.color_image.transport = LCMTransport("/camera/rgb", Image) + + # Connect listener input to camera output + listener.image.connect(camera.color_image) + + camera.start() + listener.start() + + time.sleep(2) + dimos.stop() +``` + + + +``` +Initialized dimos local cluster with 2 workers, memory limit: auto +2026-01-24T13:17:50.190559Z [info ] Deploying module. [dimos/core/__init__.py] module=CameraModule +2026-01-24T13:17:50.218466Z [info ] Deployed module. [dimos/core/__init__.py] module=CameraModule worker_id=1 +2026-01-24T13:17:50.229474Z [info ] Deploying module. [dimos/core/__init__.py] module=ImageListener +2026-01-24T13:17:50.250199Z [info ] Deployed module. [dimos/core/__init__.py] module=ImageListener worker_id=0 +Received: (480, 640, 3) +Received: (480, 640, 3) +Received: (480, 640, 3) +``` + +See [Modules](modules.md) for more on module architecture. + +--- + +## Inspecting LCM traffic (CLI) + +`lcmspy` shows topic frequency/bandwidth stats: + +![lcmspy](assets/lcmspy.png) + +`dimos topic echo /topic` listens on typed channels like `/topic#pkg.Msg` and decodes automatically: + +```sh skip +Listening on /camera/rgb (inferring from typed LCM channels like '/camera/rgb#pkg.Msg')... (Ctrl+C to stop) +Image(shape=(480, 640, 3), format=RGB, dtype=uint8, dev=cpu, ts=2026-01-24 20:28:59) +``` + +--- + +## Implementing a transport + +At the stream layer, a transport is implemented by subclassing `Transport` (see [`core/stream.py`](/dimos/core/stream.py#L83)) and implementing: + +* `broadcast(...)` +* `subscribe(...)` + +Your `Transport.__init__` args can be anything meaningful for your backend: + +* `(ip, port)` +* a shared-memory segment name +* a filesystem path +* a Redis channel + +Encoding is an implementation detail, but we encourage using LCM-compatible message types when possible. + +### Encoding helpers + +Many of our message types provide `lcm_encode` / `lcm_decode` for compact, language-agnostic binary encoding (often faster than pickle). For details, see [LCM](/docs/usage/lcm.md). + +--- + +## PubSub transports + +Even though transport can be anything (TCP connection, unix socket) for now all our transport backends implement the `PubSub` interface. + +* `publish(topic, message)` +* `subscribe(topic, callback) -> unsubscribe` + +```python +from dimos.protocol.pubsub.spec import PubSub +import inspect + +print(inspect.getsource(PubSub.publish)) +print(inspect.getsource(PubSub.subscribe)) +``` + + +```python + @abstractmethod + def publish(self, topic: TopicT, message: MsgT) -> None: + """Publish a message to a topic.""" + ... + + @abstractmethod + def subscribe( + self, topic: TopicT, callback: Callable[[MsgT, TopicT], None] + ) -> Callable[[], None]: + """Subscribe to a topic with a callback. returns unsubscribe function""" + ... +``` + +Topic/message types are flexible: bytes, JSON, or our ROS-compatible [LCM](/docs/usage/lcm.md) types. We also have pickle-based transports for arbitrary Python objects. + +### LCM (UDP multicast) + +LCM is UDP multicast. It’s very fast on a robot LAN, but it’s **best-effort** (packets can drop). +For local emission it autoconfigures system in a way in which it's more robust and faster then other more common protocols like ROS, DDS + +```python +from dimos.protocol.pubsub.lcmpubsub import LCM, Topic +from dimos.msgs.geometry_msgs import Vector3 + +lcm = LCM(autoconf=True) +lcm.start() + +received = [] +topic = Topic("/robot/velocity", Vector3) + +lcm.subscribe(topic, lambda msg, t: received.append(msg)) +lcm.publish(topic, Vector3(1.0, 0.0, 0.5)) + +import time +time.sleep(0.1) + +print(f"Received velocity: x={received[0].x}, y={received[0].y}, z={received[0].z}") +lcm.stop() +``` + + +``` +Received velocity: x=1.0, y=0.0, z=0.5 +``` + +### Shared memory (IPC) + +Shared memory is highest performance, but only works on the **same machine**. + +```python +from dimos.protocol.pubsub.shmpubsub import PickleSharedMemory + +shm = PickleSharedMemory(prefer="cpu") +shm.start() + +received = [] +shm.subscribe("test/topic", lambda msg, topic: received.append(msg)) +shm.publish("test/topic", {"data": [1, 2, 3]}) + +import time +time.sleep(0.1) + +print(f"Received: {received}") +shm.stop() +``` + + +``` +Received: [{'data': [1, 2, 3]}] +``` + +### DDS Transport + +For network communication, DDS uses the Data Distribution Service (DDS) protocol: + +```python session=dds_demo ansi=false +from dataclasses import dataclass +from cyclonedds.idl import IdlStruct + +from dimos.protocol.pubsub.impl.ddspubsub import DDS, Topic + +@dataclass +class SensorReading(IdlStruct): + value: float + +dds = DDS() +dds.start() + +received = [] +sensor_topic = Topic(name="sensors/temperature", data_type=SensorReading) + +dds.subscribe(sensor_topic, lambda msg, t: received.append(msg)) +dds.publish(sensor_topic, SensorReading(value=22.5)) + +import time +time.sleep(0.1) + +print(f"Received: {received}") +dds.stop() +``` + + +``` +Received: [SensorReading(value=22.5)] +``` + +--- + +## A minimal transport: `Memory` + +The simplest toy backend is `Memory` (single process). Start from there when implementing a new pubsub backend. + +```python +from dimos.protocol.pubsub.memory import Memory + +bus = Memory() +received = [] + +unsubscribe = bus.subscribe("sensor/data", lambda msg, topic: received.append(msg)) + +bus.publish("sensor/data", {"temperature": 22.5}) +bus.publish("sensor/data", {"temperature": 23.0}) + +print(f"Received {len(received)} messages:") +for msg in received: + print(f" {msg}") + +unsubscribe() +``` + + +``` +Received 2 messages: + {'temperature': 22.5} + {'temperature': 23.0} +``` + +See [`memory.py`](/dimos/protocol/pubsub/impl/memory.py) for the complete source. + +--- + +## Encode/decode mixins + +Transports often need to serialize messages before sending and deserialize after receiving. + +`PubSubEncoderMixin` at [`pubsub/spec.py`](/dimos/protocol/pubsub/spec.py#L95) provides a clean way to add encoding/decoding to any pubsub implementation. + +### Available mixins + +| Mixin | Encoding | Use case | +|----------------------|-----------------|------------------------------------| +| `PickleEncoderMixin` | Python pickle | Any Python object, Python-only | +| `LCMEncoderMixin` | LCM binary | Cross-language (C/C++/Python/Go/…) | +| `JpegEncoderMixin` | JPEG compressed | Image data, reduces bandwidth | + +`LCMEncoderMixin` is especially useful: you can use LCM message definitions with *any* transport (not just UDP multicast). See [LCM](/docs/usage/lcm.md) for details. + +### Creating a custom mixin + +```python session=jsonencoder no-result +from dimos.protocol.pubsub.spec import PubSubEncoderMixin +import json + +class JsonEncoderMixin(PubSubEncoderMixin[str, dict, bytes]): + def encode(self, msg: dict, topic: str) -> bytes: + return json.dumps(msg).encode("utf-8") + + def decode(self, msg: bytes, topic: str) -> dict: + return json.loads(msg.decode("utf-8")) +``` + +Combine with a pubsub implementation via multiple inheritance: + +```python session=jsonencoder no-result +from dimos.protocol.pubsub.memory import Memory + +class MyJsonPubSub(JsonEncoderMixin, Memory): + pass +``` + +Swap serialization by changing the mixin: + +```python session=jsonencoder no-result +from dimos.protocol.pubsub.spec import PickleEncoderMixin + +class MyPicklePubSub(PickleEncoderMixin, Memory): + pass +``` + +--- + +## Testing and benchmarks + +### Spec tests + +See [`pubsub/test_spec.py`](/dimos/protocol/pubsub/test_spec.py) for the grid tests your new backend should pass. + +### Benchmarks + +Add your backend to benchmarks to compare in context: + +```sh skip +python -m pytest -svm tool -k "not bytes" dimos/protocol/pubsub/benchmark/test_benchmark.py +``` + +--- + +# Available transports + +| Transport | Use case | Cross-process | Network | Notes | +|----------------|-------------------------------------|---------------|---------|--------------------------------------| +| `Memory` | Testing only, single process | No | No | Minimal reference impl | +| `SharedMemory` | Multi-process on same machine | Yes | No | Highest throughput (IPC) | +| `LCM` | Robot LAN broadcast (UDP multicast) | Yes | Yes | Best-effort; can drop packets on LAN | +| `Redis` | Network pubsub via Redis server | Yes | Yes | Central broker; adds hop | +| `ROS` | ROS 2 topic communication | Yes | Yes | Integrates with RViz/ROS tools | +| `DDS` | Cyclone DDS without ROS (WIP) | Yes | Yes | WIP | diff --git a/docs/usage/transports/dds.md b/docs/usage/transports/dds.md new file mode 100644 index 0000000000..1aec0bafe5 --- /dev/null +++ b/docs/usage/transports/dds.md @@ -0,0 +1,26 @@ +# Installing DDS Transport Libs on Ubuntu + +The `dds` extra provides DDS (Data Distribution Service) transport support via [Eclipse Cyclone DDS](https://cyclonedds.io/docs/cyclonedds-python/latest/). This requires installing system libraries before the Python package can be built. + +```bash +# Install the CycloneDDS development library +sudo apt install cyclonedds-dev + +# Create a compatibility directory structure +# (required because Ubuntu's multiarch layout doesn't match the expected CMake layout) +sudo mkdir -p /opt/cyclonedds/{lib,bin,include} +sudo ln -sf /usr/lib/x86_64-linux-gnu/libddsc.so* /opt/cyclonedds/lib/ +sudo ln -sf /usr/lib/x86_64-linux-gnu/libcycloneddsidl.so* /opt/cyclonedds/lib/ +sudo ln -sf /usr/bin/idlc /opt/cyclonedds/bin/ +sudo ln -sf /usr/bin/ddsperf /opt/cyclonedds/bin/ +sudo ln -sf /usr/include/dds /opt/cyclonedds/include/ + +# Install with the dds extra +CYCLONEDDS_HOME=/opt/cyclonedds uv pip install -e '.[dds]' +``` + +To install all extras including DDS: + +```bash +CYCLONEDDS_HOME=/opt/cyclonedds uv sync --extra dds +``` diff --git a/docs/usage/transports/index.md b/docs/usage/transports/index.md new file mode 100644 index 0000000000..748cf03aa1 --- /dev/null +++ b/docs/usage/transports/index.md @@ -0,0 +1,437 @@ +# Transports + +Transports connect **module streams** across **process boundaries** and/or **networks**. + +* **Module**: a running component (e.g., camera, mapping, nav). +* **Stream**: a unidirectional flow of messages owned by a module (one broadcaster → many receivers). +* **Topic**: the name/identifier used by a transport or pubsub backend. +* **Message**: payload carried on a stream (often `dimos.msgs.*`, but can be bytes / images / pointclouds / etc.). + +Each edge in the graph is a **transported stream** (potentially different protocols). Each node is a **module**: + +![go2_nav](../assets/go2_nav.svg) + +## What the transport layer guarantees (and what it doesn’t) + +Modules **don’t** know or care *how* data moves. They just: + +* emit messages (broadcast) +* subscribe to messages (receive) + +A transport is responsible for the mechanics of delivery (IPC, sockets, Redis, ROS 2, etc.). + +**Important:** delivery semantics depend on the backend: + +* Some are **best-effort** (e.g., UDP multicast / LCM): loss can happen. +* Some can be **reliable** (e.g., TCP-backed, Redis, some DDS configs) but may add latency/backpressure. + +So: treat the API as uniform, but pick a backend whose semantics match the task. + +--- + +## Benchmarks + +Quick view on performance of our pubsub backends: + +```sh skip +python -m pytest -svm tool -k "not bytes" dimos/protocol/pubsub/benchmark/test_benchmark.py +``` + +![Benchmark results](../assets/pubsub_benchmark.png) + +--- + +## Abstraction layers + +
Pikchr + +```pikchr output=../assets/abstraction_layers.svg fold +color = white +fill = none +linewid = 0.5in +boxwid = 1.0in +boxht = 0.4in + +# Boxes with labels +B: box "Blueprints" rad 10px +arrow +M: box "Modules" rad 5px +arrow +T: box "Transports" rad 5px +arrow +P: box "PubSub" rad 5px + +# Descriptions below +text "robot configs" at B.s + (0.1, -0.2in) +text "camera, nav" at M.s + (0, -0.2in) +text "LCM, SHM, ROS" at T.s + (0, -0.2in) +text "pub/sub API" at P.s + (0, -0.2in) +``` + +
+ + +![output](../assets/abstraction_layers.svg) + +We’ll go through these layers top-down. + +--- + +## Using transports with blueprints + +See [Blueprints](blueprints.md) for the blueprint API. + +From [`unitree/go2/blueprints/__init__.py`](/dimos/robot/unitree/go2/blueprints/__init__.py). + +Example: rebind a few streams from the default `LCMTransport` to `ROSTransport` (defined at [`transport.py`](/dimos/core/transport.py#L226)) so you can visualize in **rviz2**. + +```python skip +nav = autoconnect( + basic, + voxel_mapper(voxel_size=0.1), + cost_mapper(), + replanning_a_star_planner(), + wavefront_frontier_explorer(), +).global_config(n_dask_workers=6, robot_model="unitree_go2") + +ros = nav.transports( + { + ("lidar", PointCloud2): ROSTransport("lidar", PointCloud2), + ("global_map", PointCloud2): ROSTransport("global_map", PointCloud2), + ("odom", PoseStamped): ROSTransport("odom", PoseStamped), + ("color_image", Image): ROSTransport("color_image", Image), + } +) +``` + +--- + +## Using transports with modules + +Each **stream** on a module can use a different transport. Set `.transport` on the stream **before starting** modules. + +```python ansi=false +import time + +from dimos.core import In, Module, start +from dimos.core.transport import LCMTransport +from dimos.hardware.sensors.camera.module import CameraModule +from dimos.msgs.sensor_msgs import Image + + +class ImageListener(Module): + image: In[Image] + + def start(self): + super().start() + self.image.subscribe(lambda img: print(f"Received: {img.shape}")) + + +if __name__ == "__main__": + # Start local cluster and deploy modules to separate processes + dimos = start(2) + + camera = dimos.deploy(CameraModule, frequency=2.0) + listener = dimos.deploy(ImageListener) + + # Choose a transport for the stream (example: LCM typed channel) + camera.color_image.transport = LCMTransport("/camera/rgb", Image) + + # Connect listener input to camera output + listener.image.connect(camera.color_image) + + camera.start() + listener.start() + + time.sleep(2) + dimos.stop() +``` + + + +``` +Initialized dimos local cluster with 2 workers, memory limit: auto +2026-01-24T13:17:50.190559Z [info ] Deploying module. [dimos/core/__init__.py] module=CameraModule +2026-01-24T13:17:50.218466Z [info ] Deployed module. [dimos/core/__init__.py] module=CameraModule worker_id=1 +2026-01-24T13:17:50.229474Z [info ] Deploying module. [dimos/core/__init__.py] module=ImageListener +2026-01-24T13:17:50.250199Z [info ] Deployed module. [dimos/core/__init__.py] module=ImageListener worker_id=0 +Received: (480, 640, 3) +Received: (480, 640, 3) +Received: (480, 640, 3) +``` + +See [Modules](modules.md) for more on module architecture. + +--- + +## Inspecting LCM traffic (CLI) + +`lcmspy` shows topic frequency/bandwidth stats: + +![lcmspy](../assets/lcmspy.png) + +`dimos topic echo /topic` listens on typed channels like `/topic#pkg.Msg` and decodes automatically: + +```sh skip +Listening on /camera/rgb (inferring from typed LCM channels like '/camera/rgb#pkg.Msg')... (Ctrl+C to stop) +Image(shape=(480, 640, 3), format=RGB, dtype=uint8, dev=cpu, ts=2026-01-24 20:28:59) +``` + +--- + +## Implementing a transport + +At the stream layer, a transport is implemented by subclassing `Transport` (see [`core/stream.py`](/dimos/core/stream.py#L83)) and implementing: + +* `broadcast(...)` +* `subscribe(...)` + +Your `Transport.__init__` args can be anything meaningful for your backend: + +* `(ip, port)` +* a shared-memory segment name +* a filesystem path +* a Redis channel + +Encoding is an implementation detail, but we encourage using LCM-compatible message types when possible. + +### Encoding helpers + +Many of our message types provide `lcm_encode` / `lcm_decode` for compact, language-agnostic binary encoding (often faster than pickle). For details, see [LCM](/docs/usage/lcm.md). + +--- + +## PubSub transports + +Even though transport can be anything (TCP connection, unix socket) for now all our transport backends implement the `PubSub` interface. + +* `publish(topic, message)` +* `subscribe(topic, callback) -> unsubscribe` + +```python +from dimos.protocol.pubsub.spec import PubSub +import inspect + +print(inspect.getsource(PubSub.publish)) +print(inspect.getsource(PubSub.subscribe)) +``` + + +```python + @abstractmethod + def publish(self, topic: TopicT, message: MsgT) -> None: + """Publish a message to a topic.""" + ... + + @abstractmethod + def subscribe( + self, topic: TopicT, callback: Callable[[MsgT, TopicT], None] + ) -> Callable[[], None]: + """Subscribe to a topic with a callback. returns unsubscribe function""" + ... +``` + +Topic/message types are flexible: bytes, JSON, or our ROS-compatible [LCM](/docs/usage/lcm.md) types. We also have pickle-based transports for arbitrary Python objects. + +### LCM (UDP multicast) + +LCM is UDP multicast. It’s very fast on a robot LAN, but it’s **best-effort** (packets can drop). +For local emission it autoconfigures system in a way in which it's more robust and faster then other more common protocols like ROS, DDS + +```python +from dimos.protocol.pubsub.lcmpubsub import LCM, Topic +from dimos.msgs.geometry_msgs import Vector3 + +lcm = LCM(autoconf=True) +lcm.start() + +received = [] +topic = Topic("/robot/velocity", Vector3) + +lcm.subscribe(topic, lambda msg, t: received.append(msg)) +lcm.publish(topic, Vector3(1.0, 0.0, 0.5)) + +import time +time.sleep(0.1) + +print(f"Received velocity: x={received[0].x}, y={received[0].y}, z={received[0].z}") +lcm.stop() +``` + + +``` +Received velocity: x=1.0, y=0.0, z=0.5 +``` + +### Shared memory (IPC) + +Shared memory is highest performance, but only works on the **same machine**. + +```python +from dimos.protocol.pubsub.shmpubsub import PickleSharedMemory + +shm = PickleSharedMemory(prefer="cpu") +shm.start() + +received = [] +shm.subscribe("test/topic", lambda msg, topic: received.append(msg)) +shm.publish("test/topic", {"data": [1, 2, 3]}) + +import time +time.sleep(0.1) + +print(f"Received: {received}") +shm.stop() +``` + + +``` +Received: [{'data': [1, 2, 3]}] +``` + +### DDS Transport + +For network communication, DDS uses the Data Distribution Service (DDS) protocol: + +```python session=dds_demo ansi=false +from dataclasses import dataclass +from cyclonedds.idl import IdlStruct + +from dimos.protocol.pubsub.impl.ddspubsub import DDS, Topic + +@dataclass +class SensorReading(IdlStruct): + value: float + +dds = DDS() +dds.start() + +received = [] +sensor_topic = Topic(name="sensors/temperature", data_type=SensorReading) + +dds.subscribe(sensor_topic, lambda msg, t: received.append(msg)) +dds.publish(sensor_topic, SensorReading(value=22.5)) + +import time +time.sleep(0.1) + +print(f"Received: {received}") +dds.stop() +``` + + +``` +Received: [SensorReading(value=22.5)] +``` + +--- + +## A minimal transport: `Memory` + +The simplest toy backend is `Memory` (single process). Start from there when implementing a new pubsub backend. + +```python +from dimos.protocol.pubsub.memory import Memory + +bus = Memory() +received = [] + +unsubscribe = bus.subscribe("sensor/data", lambda msg, topic: received.append(msg)) + +bus.publish("sensor/data", {"temperature": 22.5}) +bus.publish("sensor/data", {"temperature": 23.0}) + +print(f"Received {len(received)} messages:") +for msg in received: + print(f" {msg}") + +unsubscribe() +``` + + +``` +Received 2 messages: + {'temperature': 22.5} + {'temperature': 23.0} +``` + +See [`memory.py`](/dimos/protocol/pubsub/impl/memory.py) for the complete source. + +--- + +## Encode/decode mixins + +Transports often need to serialize messages before sending and deserialize after receiving. + +`PubSubEncoderMixin` at [`pubsub/spec.py`](/dimos/protocol/pubsub/spec.py#L95) provides a clean way to add encoding/decoding to any pubsub implementation. + +### Available mixins + +| Mixin | Encoding | Use case | +|----------------------|-----------------|------------------------------------| +| `PickleEncoderMixin` | Python pickle | Any Python object, Python-only | +| `LCMEncoderMixin` | LCM binary | Cross-language (C/C++/Python/Go/…) | +| `JpegEncoderMixin` | JPEG compressed | Image data, reduces bandwidth | + +`LCMEncoderMixin` is especially useful: you can use LCM message definitions with *any* transport (not just UDP multicast). See [LCM](/docs/usage/lcm.md) for details. + +### Creating a custom mixin + +```python session=jsonencoder no-result +from dimos.protocol.pubsub.spec import PubSubEncoderMixin +import json + +class JsonEncoderMixin(PubSubEncoderMixin[str, dict, bytes]): + def encode(self, msg: dict, topic: str) -> bytes: + return json.dumps(msg).encode("utf-8") + + def decode(self, msg: bytes, topic: str) -> dict: + return json.loads(msg.decode("utf-8")) +``` + +Combine with a pubsub implementation via multiple inheritance: + +```python session=jsonencoder no-result +from dimos.protocol.pubsub.memory import Memory + +class MyJsonPubSub(JsonEncoderMixin, Memory): + pass +``` + +Swap serialization by changing the mixin: + +```python session=jsonencoder no-result +from dimos.protocol.pubsub.spec import PickleEncoderMixin + +class MyPicklePubSub(PickleEncoderMixin, Memory): + pass +``` + +--- + +## Testing and benchmarks + +### Spec tests + +See [`pubsub/test_spec.py`](/dimos/protocol/pubsub/test_spec.py) for the grid tests your new backend should pass. + +### Benchmarks + +Add your backend to benchmarks to compare in context: + +```sh skip +python -m pytest -svm tool -k "not bytes" dimos/protocol/pubsub/benchmark/test_benchmark.py +``` + +--- + +# Available transports + +| Transport | Use case | Cross-process | Network | Notes | +|----------------|-------------------------------------|---------------|---------|--------------------------------------| +| `Memory` | Testing only, single process | No | No | Minimal reference impl | +| `SharedMemory` | Multi-process on same machine | Yes | No | Highest throughput (IPC) | +| `LCM` | Robot LAN broadcast (UDP multicast) | Yes | Yes | Best-effort; can drop packets on LAN | +| `Redis` | Network pubsub via Redis server | Yes | Yes | Central broker; adds hop | +| `ROS` | ROS 2 topic communication | Yes | Yes | Integrates with RViz/ROS tools | +| `DDS` | Cyclone DDS without ROS (WIP) | Yes | Yes | WIP | diff --git a/docs/usage/visualization.md b/docs/usage/visualization.md new file mode 100644 index 0000000000..56d0f006f0 --- /dev/null +++ b/docs/usage/visualization.md @@ -0,0 +1,115 @@ +# Viewer Backends + +Dimos supports three visualization backends: Rerun (web or native) and Foxglove. + +## Quick Start + +Choose your viewer backend via the CLI (preferred): + +```bash +# Rerun web viewer (default) - Full vis dashboard and teleop panel in browser +dimos run unitree-go2 + +# Explicitly select the viewer backend: +dimos --viewer-backend rerun run unitree-go2 +dimos --viewer-backend rerun-web run unitree-go2 +dimos --viewer-backend foxglove run unitree-go2 +``` + +Alternative (environment variable): + +```bash +# Rerun web viewer - Full dashboard in browser +VIEWER_BACKEND=rerun-web dimos run unitree-go2 + +# Rerun native viewer - native Rerun window + teleop panel at http://localhost:7779 +VIEWER_BACKEND=rerun dimos run unitree-go2 + +# Foxglove - Use Foxglove Studio instead of Rerun +VIEWER_BACKEND=foxglove dimos run unitree-go2 +``` + +## Viewer Modes Explained + +### Rerun Web (`rerun-web`) + +**What you get:** +- Full dashboard at http://localhost:7779 +- Rerun 3D viewer + command center sidebar in one page +- Works in browser, no display required (headless-friendly) + +--- + +### Rerun Native (`rerun`) + +**What you get:** +- Native Rerun application (separate window opens automatically) +- Command center at http://localhost:7779 +- Better performance with larger maps/higher resolution + +--- + +### Foxglove (`foxglove`) + +**What you get:** +- Foxglove bridge on ws://localhost:8765 +- No Rerun (saves resources) +- Better performance with larger maps/higher resolution +- Open layout: `assets/foxglove_dashboards/old/foxglove_unitree_lcm_dashboard.json` + +--- + +## Rendering with Custom Blueprints + +To enable rerun within your own blueprint simply include `RerunBridgeModule`: + +```python +from dimos.visualization.rerun.bridge import RerunBridgeModule +from dimos.hardware.sensors.camera.module import CameraModule +from dimos.protocol.pubsub.impl.lcmpubsub import LCM + +camera_demo = autoconnect( + CameraModule.blueprint(), + RerunBridgeModule.blueprint( + viewer_mode="native", # native (desktop), web (browser), none (headless) + ), +) + +if __name__ == "__main__": + camera_demo.build().loop() +``` + +Every LCM stream, such as `color_image` (output by CameraModule), that uses a data type (like `Image`) that has a `.to_rerun` method will get rendered (`rr.log`) using the LCM topic as the rerun entity path. In other words: to render something, simply log it to a stream and it will automatically be available in rerun. + +## Performance Tuning + +### Symptom: Slow Map Updates + +If you notice: +- Robot appears to "walk across empty space" +- Costmap updates lag behind the robot +- Visualization stutters or freezes + +This happens on lower-end hardware (NUC, older laptops) with large maps. + +### Increase Voxel Size + +Edit [`dimos/robot/unitree/go2/blueprints/__init__.py`](/dimos/robot/unitree/go2/blueprints/__init__.py) line 82: + +```python +# Before (high detail, slower on large maps) +voxel_mapper(voxel_size=0.05), # 5cm voxels + +# After (lower detail, 8x faster) +voxel_mapper(voxel_size=0.1), # 10cm voxels +``` + +**Trade-off:** +- Larger voxels = fewer voxels = faster updates +- But slightly less detail in the map + +--- + +## How to use Rerun on `dev` (and the TF/entity nuances) + +Rerun on `dev` is **module-driven**: modules decide what to log, and `Blueprint.build()` sets up the shared viewer + default layout. diff --git a/examples/camera_grayscale.py b/examples/camera_grayscale.py new file mode 100644 index 0000000000..0d21a0449f --- /dev/null +++ b/examples/camera_grayscale.py @@ -0,0 +1,38 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dimos.core import In, Module, Out, autoconnect, rpc +from dimos.hardware.sensors.camera.module import CameraModule +from dimos.msgs.sensor_msgs.Image import Image +from dimos.visualization.rerun.bridge import RerunBridgeModule + + +class Grayscale(Module): + color_image: In[Image] + gray_image: Out[Image] + + @rpc + def start(self): + self.color_image.subscribe(self._publish_grayscale) + + def _publish_grayscale(self, image: Image): + self.gray_image.publish(image.to_grayscale()) + + +if __name__ == "__main__": + autoconnect( + CameraModule.blueprint(), + Grayscale.blueprint(), + RerunBridgeModule.blueprint(), + ).build().loop() diff --git a/examples/rpc_calls.py b/examples/rpc_calls.py new file mode 100644 index 0000000000..c39ff8d4e8 --- /dev/null +++ b/examples/rpc_calls.py @@ -0,0 +1,56 @@ +# Copyright 2026 Dimensional Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Protocol + +from dimos.core import Module, rpc +from dimos.core.blueprints import autoconnect +from dimos.spec.utils import Spec + + +# this would be defined in some other file (this could be imported from a library) +class Calculator(Module): + @rpc + def compute1(self, a: int, b: int) -> int: + return a + b + + @rpc + def compute2(self, a: float, b: float) -> float: + return a + b + + +# what your module needs/expects +class ComputeSpec(Spec, Protocol): + @rpc + def compute1(self, a: int, b: int) -> int: ... + + @rpc + def compute2(self, a: float, b: float) -> float: ... + + +class Client(Module): + # this says: "hey dimos, give me access to a module that has a compute1 and compute2 method" + calc: ComputeSpec + + @rpc + def start(self) -> None: + print("compute1:", self.calc.compute1(2, 3)) + print("compute2:", self.calc.compute2(1.5, 2.5)) + + +if __name__ == "__main__": + autoconnect( + Calculator.blueprint(), + Client.blueprint(), + ).build().loop() diff --git a/examples/simplerobot/README.md b/examples/simplerobot/README.md index bc6686f77c..8c7b6dfbc7 100644 --- a/examples/simplerobot/README.md +++ b/examples/simplerobot/README.md @@ -11,7 +11,7 @@ A minimal virtual robot for testing and development. It implements some of the s Physical robots typically publish multiple poses in a relationship as `TransformStamped` in a TF tree, while SimpleRobot publishes `PoseStamped` directly for simplicity. -For details on this check [Transforms](/docs/api/transforms.md) +For details on this check [Transforms](/docs/usage/transforms.md) ## Usage diff --git a/flake.nix b/flake.nix index 3a70a0bf2f..68dbf0ee8c 100644 --- a/flake.nix +++ b/flake.nix @@ -28,10 +28,11 @@ { vals.pkg=pkgs.gh; flags={}; } { vals.pkg=pkgs.stdenv.cc.cc.lib; flags.ldLibraryGroup=true; } { vals.pkg=pkgs.stdenv.cc; flags.ldLibraryGroup=true; } + { vals.pkg=pkgs.gfortran.cc.lib; flags.ldLibraryGroup=true; } { vals.pkg=pkgs.cctools; flags={}; onlyIf=pkgs.stdenv.isDarwin; } # for pip install opencv-python { vals.pkg=pkgs.pcre2; flags={ ldLibraryGroup=pkgs.stdenv.isDarwin; packageConfGroup=pkgs.stdenv.isDarwin; }; } { vals.pkg=pkgs.libsysprof-capture; flags.packageConfGroup=true; onlyIf=pkgs.stdenv.isDarwin; } - { vals.pkg=pkgs.xcbuild; flags={}; } + { vals.pkg=pkgs.xcbuild; flags={}; onlyIf=pkgs.stdenv.isDarwin; } { vals.pkg=pkgs.git-lfs; flags={}; } { vals.pkg=pkgs.gnugrep; flags={}; } { vals.pkg=pkgs.gnused; flags={}; } @@ -128,6 +129,7 @@ { vals.pkg=pkgs.libjpeg; flags.ldLibraryGroup=true; } { vals.pkg=pkgs.libjpeg_turbo; flags.ldLibraryGroup=true; } { vals.pkg=pkgs.libpng; flags={}; } + { vals.pkg=pkgs.libidn2; flags.ldLibraryGroup=true; } ### Docs generators { vals.pkg=pkgs.pikchr; flags={}; } @@ -165,6 +167,7 @@ } ); } + { vals.pkg=pkgs.cyclonedds; flags.ldLibraryGroup=true; flags.packageConfGroup=true; } ]; # ------------------------------------------------------------ @@ -211,6 +214,8 @@ export GI_TYPELIB_PATH="${giTypelibPackagesString}:$GI_TYPELIB_PATH" export PKG_CONFIG_PATH=${lib.escapeShellArg packageConfPackagesString} export PYTHONPATH="$PYTHONPATH:"${lib.escapeShellArg manualPythonPackages} + export CYCLONEDDS_HOME="${pkgs.cyclonedds}" + export CMAKE_PREFIX_PATH="${pkgs.cyclonedds}:$CMAKE_PREFIX_PATH" # CC, CFLAGS, and LDFLAGS are bascially all for `pip install pyaudio` export CFLAGS="$(pkg-config --cflags portaudio-2.0) $CFLAGS" export LDFLAGS="-L$(pkg-config --variable=libdir portaudio-2.0) $LDFLAGS" @@ -224,7 +229,7 @@ fi [ -f "$PROJECT_ROOT/motd" ] && cat "$PROJECT_ROOT/motd" - [ -f "$PROJECT_ROOT/.pre-commit-config.yaml" ] && pre-commit install --install-hooks + [ -f "$PROJECT_ROOT/.pre-commit-config.yaml" ] && [ ! -f "$PROJECT_ROOT/.git/hooks/pre-commit" ] && pre-commit install --install-hooks ''; devShells = { # basic shell (blends with your current environment) diff --git a/pyproject.toml b/pyproject.toml index 2dcf5d548e..9dea7e1921 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ name = "dimos" authors = [ {name = "Dimensional Team", email = "build@dimensionalOS.com"}, ] -version = "0.0.9" +version = "0.0.10.post1" description = "Powering agentive generalist robotics" requires-python = ">=3.10" readme = "README.md" @@ -32,52 +32,52 @@ dependencies = [ # Transport Protocols "dimos-lcm", "PyTurboJPEG==1.8.2", - # Core "numpy>=1.26.4", "scipy>=1.15.1", + "pin>=3.3.0", # Pinocchio IK library "reactivex", "asyncio==3.4.3", "sortedcontainers==2.4.0", "pydantic", "python-dotenv", + "annotation-protocol>=1.4.0", + "lazy_loader", # Multiprocess "dask[complete]==2025.5.1", "plum-dispatch==2.5.7", - # Logging "structlog>=25.5.0,<26", "colorlog==6.9.0", - # Core Msgs "opencv-python", - "open3d", - + "open3d-unofficial-arm; platform_system == 'Linux' and platform_machine == 'aarch64'", + "open3d>=0.18.0; platform_system != 'Linux' or platform_machine != 'aarch64'", # CLI "pydantic-settings>=2.11.0,<3", "textual==3.7.1", "terminaltexteffects==0.12.2", "typer>=0.19.2,<1", "plotext==5.3.2", - # Used for calculating the occupancy map. "numba>=0.60.0", # First version supporting Python 3.12 "llvmlite>=0.42.0", # Required by numba 0.60+ - # TODO: rerun shouldn't be required but rn its in core (there is NO WAY to use dimos without rerun rn) # remove this once rerun is optional in core "rerun-sdk>=0.20.0", + "toolz>=1.1.0", ] [project.scripts] lcmspy = "dimos.utils.cli.lcmspy.run_lcmspy:main" foxglove-bridge = "dimos.utils.cli.foxglove_bridge.run_foxglove_bridge:main" -skillspy = "dimos.utils.cli.skillspy.skillspy:main" agentspy = "dimos.utils.cli.agentspy.agentspy:main" humancli = "dimos.utils.cli.human.humanclianim:main" dimos = "dimos.robot.cli.dimos:main" +rerun-bridge = "dimos.visualization.rerun.bridge:app" +doclinks = "dimos.utils.docs.doclinks:main" [project.optional-dependencies] misc = [ @@ -126,9 +126,9 @@ visualization = [ ] agents = [ - "langchain>=1,<2", + "langchain==1.2.3", "langchain-chroma>=1,<2", - "langchain-core>=1,<2", + "langchain-core==1.2.3", "langchain-openai>=1,<2", "langchain-text-splitters>=1,<2", "langchain-huggingface>=1,<2", @@ -171,25 +171,22 @@ unitree = [ ] manipulation = [ - # Contact Graspnet Dependencies - "h5py>=3.7.0", - "pyrender>=0.1.45", - "trimesh>=3.22.0", - "python-fcl>=0.7.0.4", - "pyquaternion>=0.9.9", - "matplotlib>=3.7.1", - "rtree", - "pandas>=1.5.2", - "tqdm>=4.65.0", - "pyyaml>=6.0", - "contact-graspnet-pytorch", + # Planning (Drake) + "drake==1.45.0; sys_platform == 'darwin' and platform_machine != 'aarch64'", + "drake>=1.40.0; sys_platform != 'darwin' and platform_machine != 'aarch64'", - # piper arm + # Hardware SDKs "piper-sdk", + "xarm-python-sdk>=1.17.0", # Visualization (Optional) "kaleido>=0.2.1", "plotly>=5.9.0", + "xacro", + + # Other + "matplotlib>=3.7.1", + "pyyaml>=6.0", ] @@ -200,13 +197,11 @@ cpu = [ ] cuda = [ - "cupy-cuda12x==13.6.0", - "nvidia-nvimgcodec-cu12[all]", - "onnxruntime-gpu>=1.17.1", # Only versions supporting both cuda11 and cuda12 + "cupy-cuda12x==13.6.0; platform_machine == 'x86_64'", + "nvidia-nvimgcodec-cu12[all]; platform_machine == 'x86_64'", + "onnxruntime-gpu>=1.17.1; platform_machine == 'x86_64'", # Only versions supporting both cuda11 and cuda12 "ctransformers[cuda]==0.2.27", - "mmengine>=0.10.3", - "mmcv>=2.1.0", - "xformers>=0.0.20", + "xformers>=0.0.20; platform_machine == 'x86_64'", ] dev = [ @@ -226,6 +221,10 @@ dev = [ # docs "md-babel-py==1.1.1", + # LSP + "python-lsp-server[all]==1.14.0", + "python-lsp-ruff==2.3.0", + # Types "lxml-stubs>=0.5.1,<1", "pandas-stubs>=2.3.2.250926,<3", @@ -245,6 +244,14 @@ dev = [ "types-tabulate>=0.9.0.20241207,<1", "types-tensorflow>=2.18.0.20251008,<3", "types-tqdm>=4.67.0.20250809,<5", + "types-psycopg2>=2.9.21.20251012", + + # Tools + "py-spy", +] + +psql = [ + "psycopg2-binary>=2.9.11" ] sim = [ @@ -268,6 +275,32 @@ drone = [ "pymavlink" ] +dds = [ + "dimos[dev]", + "cyclonedds>=0.10.5", +] + +# Minimal dependencies for Docker modules that communicate with the DimOS host +docker = [ + "dimos-lcm", + "numpy>=1.26.4", + "scipy>=1.15.1", + "reactivex", + "dask[distributed]==2025.5.1", + "plum-dispatch==2.5.7", + "structlog>=25.5.0,<26", + "pydantic", + "pydantic-settings>=2.11.0,<3", + "typer>=0.19.2,<1", + "opencv-python-headless", + "lcm", + "sortedcontainers", + "PyTurboJPEG", + "rerun-sdk", + "open3d-unofficial-arm; platform_system == 'Linux' and platform_machine == 'aarch64'", + "open3d>=0.18.0; platform_system != 'Linux' or platform_machine != 'aarch64'", +] + base = [ "dimos[agents,web,perception,visualization,sim]", ] @@ -295,7 +328,7 @@ exclude = [ [tool.ruff.lint] extend-select = ["E", "W", "F", "B", "UP", "N", "I", "C90", "A", "RUF", "TCH"] # TODO: All of these should be fixed, but it's easier commit autofixes first -ignore = ["A001", "A002", "B008", "B017", "B019", "B023", "B024", "B026", "B904", "C901", "E402", "E501", "E721", "E722", "E741", "F401", "F403", "F811", "F821", "F821", "F821", "N801", "N802", "N803", "N806", "N812", "N813", "N813", "N816", "N817", "N999", "RUF002", "RUF003", "RUF006", "RUF009", "RUF012", "RUF034", "RUF043", "RUF059", "UP007"] +ignore = ["A001", "A002", "B008", "B017", "B019", "B024", "B026", "B904", "C901", "E402", "E501", "E721", "E722", "E741", "F811", "F821", "F821", "F821", "N801", "N802", "N803", "N806", "N817", "N999", "RUF003", "RUF009", "RUF012", "RUF034", "RUF043", "RUF059", "UP007"] [tool.ruff.lint.per-file-ignores] "dimos/models/Detic/*" = ["ALL"] @@ -314,27 +347,36 @@ exclude = "^dimos/models/Detic(/|$)|^dimos/rxpy_backpressure(/|$)|.*/test_.|.*/c [[tool.mypy.overrides]] module = [ - "rclpy.*", - "std_msgs.*", + "annotation_protocol", + "cyclonedds", + "cyclonedds.*", + "dimos_lcm.*", + "etils", "geometry_msgs.*", - "sensor_msgs.*", - "nav_msgs.*", - "tf2_msgs.*", + "lazy_loader", "mujoco", "mujoco_playground.*", - "etils", - "xarm.*", - "dimos_lcm.*", + "nav_msgs.*", + "open_clip", + "pinocchio", "piper_sdk.*", + "plotext", + "pydrake", + "pydrake.*", "plum.*", - "pycuda.*", "pycuda", - "plotext", - "torchreid", - "open_clip", - "pyzed.*", + "pycuda.*", "pyzed", + "pyzed.*", + "rclpy.*", + "sam2.*", + "sensor_msgs.*", + "std_msgs.*", + "tf2_msgs.*", + "torchreid", + "ultralytics.*", "unitree_webrtc_connect.*", + "xarm.*", ] ignore_missing_imports = true @@ -342,6 +384,10 @@ ignore_missing_imports = true module = ["dimos.rxpy_backpressure", "dimos.rxpy_backpressure.*"] follow_imports = "skip" +[[tool.mypy.overrides]] +module = ["pydrake", "pydrake.*"] +follow_imports = "skip" + [tool.pytest.ini_options] testpaths = ["dimos"] markers = [ @@ -351,7 +397,7 @@ markers = [ "lcm: tests that run actual LCM bus (can't execute in CI)", "module: tests that need to run directly as modules", "gpu: tests that require GPU", - "tofix: temporarily disabled test", + "cuda: tests which require CUDA (specifically CUDA not just GPU acceleration)", "e2e: end to end tests", "integration: slower integration tests", "neverending: they don't finish", @@ -361,7 +407,7 @@ env = [ "GOOGLE_MAPS_API_KEY=AIzafake_google_key", "PYTHONWARNINGS=ignore:cupyx.jit.rawkernel is experimental:FutureWarning", ] -addopts = "-v -p no:warnings -ra --color=yes -m 'not (vis or exclude or tool or lcm or ros or heavy or gpu or module or tofix or e2e or integration or neverending or mujoco)'" +addopts = "-v -s -p no:warnings -ra --color=yes -m 'not (vis or exclude or tool or lcm or ros or heavy or gpu or module or e2e or integration or neverending or mujoco)'" asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" @@ -372,13 +418,5 @@ ignore = [ "*/package-lock.json", "dimos/dashboard/dimos.rbl", "dimos/web/dimos_interface/themes.json", + "dimos/manipulation/manipulation_module.py", ] - -[tool.uv] -# Build dependencies for packages that don't declare them properly -extra-build-dependencies = { contact-graspnet-pytorch = ["numpy"] } - -default-groups = [] - -[tool.uv.sources] -contact-graspnet-pytorch = { git = "https://github.com/dimensionalOS/contact_graspnet_pytorch.git" } diff --git a/setup.py b/setup.py index 013ff731a8..50ac0a37ab 100644 --- a/setup.py +++ b/setup.py @@ -13,20 +13,63 @@ # limitations under the License. import os +from pathlib import Path +import struct +import sys from pybind11.setup_helpers import Pybind11Extension, build_ext from setuptools import find_packages, setup + +def python_is_macos_universal_binary(executable: str | None = None) -> bool: + """ + Returns True if the given executable is a macOS universal (fat) binary. + """ + FAT_MAGIC = 0xCAFEBABE # big-endian fat + FAT_CIGAM = 0xBEBAFECA # little-endian fat + FAT_MAGIC_64 = 0xCAFEBABF # big-endian fat 64 + FAT_CIGAM_64 = 0xBFBAFECA # little-endian fat 64 + + if executable is None: + executable = sys.executable + + path = Path(executable) + if not path.exists(): + return False + + try: + with path.open("rb") as f: + header = f.read(4) + if len(header) < 4: + return False + + magic = struct.unpack(">I", header)[0] + return magic in { + FAT_MAGIC, + FAT_CIGAM, + FAT_MAGIC_64, + FAT_CIGAM_64, + } + except OSError: + return False + + +extra_compile_args = [ + "-O3", # Maximum optimization + "-ffast-math", # Fast floating point +] +# when the python exe is a universal binary, this option fails because the compiler +# call tries to build a matching (e.g. universal) binary, clang doesn't support this option for universal binaries +# if the user is using an arm64 specific binary (ex: nix build) then the optimization exists and is useful +if not python_is_macos_universal_binary(): + extra_compile_args.append("-march=native") + # C++ extensions ext_modules = [ Pybind11Extension( "dimos.navigation.replanning_a_star.min_cost_astar_ext", [os.path.join("dimos", "navigation", "replanning_a_star", "min_cost_astar_cpp.cpp")], - extra_compile_args=[ - "-O3", # Maximum optimization - "-march=native", # Optimize for current CPU - "-ffast-math", # Fast floating point - ], + extra_compile_args=extra_compile_args, define_macros=[ ("NDEBUG", "1"), ], diff --git a/uv.lock b/uv.lock index 7380a40780..d971dcfeaa 100644 --- a/uv.lock +++ b/uv.lock @@ -2,12 +2,16 @@ version = 1 revision = 3 requires-python = ">=3.10" resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -22,11 +26,11 @@ resolution-markers = [ [[package]] name = "absl-py" -version = "2.3.1" +version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/10/2a/c93173ffa1b39c1d0395b7e842bbdc62e556ca9d8d3b5572926f3e4ca752/absl_py-2.3.1.tar.gz", hash = "sha256:a97820526f7fbfd2ec1bce83f3f25e3a14840dac0d8e02a0b71cd75db3f77fc9", size = 116588, upload-time = "2025-07-03T09:31:44.05Z" } +sdist = { url = "https://files.pythonhosted.org/packages/64/c7/8de93764ad66968d19329a7e0c147a2bb3c7054c554d4a119111b8f9440f/absl_py-2.4.0.tar.gz", hash = "sha256:8c6af82722b35cf71e0f4d1d47dcaebfff286e27110a99fc359349b247dfb5d4", size = 116543, upload-time = "2026-01-28T10:17:05.322Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/aa/ba0014cc4659328dc818a28827be78e6d97312ab0cb98105a770924dc11e/absl_py-2.3.1-py3-none-any.whl", hash = "sha256:eeecf07f0c2a93ace0772c92e596ace6d3d3996c042b2128459aaae2a76de11d", size = 135811, upload-time = "2025-07-03T09:31:42.253Z" }, + { url = "https://files.pythonhosted.org/packages/18/a6/907a406bb7d359e6a63f99c313846d9eec4f7e6f7437809e03aa00fa3074/absl_py-2.4.0-py3-none-any.whl", hash = "sha256:88476fd881ca8aab94ffa78b7b6c632a782ab3ba1cd19c9bd423abc4fb4cd28d", size = 135750, upload-time = "2026-01-28T10:17:04.19Z" }, ] [[package]] @@ -115,9 +119,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] +[[package]] +name = "annotation-protocol" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/fd/612c96531b1c1d1c06e5d79547faea3f805785d67481b350f3f6a9cf6dc5/annotation_protocol-1.4.0.tar.gz", hash = "sha256:15d846a4984339bab6cbf80a44623219b8cb06b4f4fee0f22c31a255d16900f8", size = 8470, upload-time = "2026-01-19T08:48:27.051Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/8b/71a5e1392dd3aca7ffeef0c3b10ea9b0e62959b5f39889702a06e11eda96/annotation_protocol-1.4.0-py3-none-any.whl", hash = "sha256:6fc66f1506f015db16fdd50fad18520cbb126a7902b27257c9fa521eb5efec60", size = 7834, upload-time = "2026-01-19T08:48:25.848Z" }, +] + [[package]] name = "anthropic" -version = "0.75.0" +version = "0.79.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -129,9 +142,9 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/1f/08e95f4b7e2d35205ae5dcbb4ae97e7d477fc521c275c02609e2931ece2d/anthropic-0.75.0.tar.gz", hash = "sha256:e8607422f4ab616db2ea5baacc215dd5f028da99ce2f022e33c7c535b29f3dfb", size = 439565, upload-time = "2025-11-24T20:41:45.28Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/b1/91aea3f8fd180d01d133d931a167a78a3737b3fd39ccef2ae8d6619c24fd/anthropic-0.79.0.tar.gz", hash = "sha256:8707aafb3b1176ed6c13e2b1c9fb3efddce90d17aee5d8b83a86c70dcdcca871", size = 509825, upload-time = "2026-02-07T18:06:18.388Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/1c/1cd02b7ae64302a6e06724bf80a96401d5313708651d277b1458504a1730/anthropic-0.75.0-py3-none-any.whl", hash = "sha256:ea8317271b6c15d80225a9f3c670152746e88805a7a61e14d4a374577164965b", size = 388164, upload-time = "2025-11-24T20:41:43.587Z" }, + { url = "https://files.pythonhosted.org/packages/95/b2/cc0b8e874a18d7da50b0fda8c99e4ac123f23bf47b471827c5f6f3e4a767/anthropic-0.79.0-py3-none-any.whl", hash = "sha256:04cbd473b6bbda4ca2e41dd670fe2f829a911530f01697d0a1e37321eb75f3cf", size = 405918, upload-time = "2026-02-07T18:06:20.246Z" }, ] [[package]] @@ -163,6 +176,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, ] +[[package]] +name = "astroid" +version = "4.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/63/0adf26577da5eff6eb7a177876c1cfa213856be9926a000f65c4add9692b/astroid-4.0.4.tar.gz", hash = "sha256:986fed8bcf79fb82c78b18a53352a0b287a73817d6dbcfba3162da36667c49a0", size = 406358, upload-time = "2026-02-07T23:35:07.509Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/cf/1c5f42b110e57bc5502eb80dbc3b03d256926062519224835ef08134f1f9/astroid-4.0.4-py3-none-any.whl", hash = "sha256:52f39653876c7dec3e3afd4c2696920e05c83832b9737afc21928f2d2eb7a753", size = 276445, upload-time = "2026-02-07T23:35:05.344Z" }, +] + [[package]] name = "asttokens" version = "3.0.1" @@ -190,61 +215,74 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] +[[package]] +name = "autopep8" +version = "2.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycodestyle" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/8a/9be661f5400867a09706e29f5ab99a59987fd3a4c337757365e7491fa90b/autopep8-2.0.4.tar.gz", hash = "sha256:2913064abd97b3419d1cc83ea71f042cb821f87e45b9c88cad5ad3c4ea87fe0c", size = 116472, upload-time = "2023-08-26T13:49:59.375Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/f2/e63c9f9c485cd90df8e4e7ae90fa3be2469c9641888558c7b45fa98a76f8/autopep8-2.0.4-py2.py3-none-any.whl", hash = "sha256:067959ca4a07b24dbd5345efa8325f5f58da4298dab0dde0443d5ed765de80cb", size = 45340, upload-time = "2023-08-26T13:49:56.111Z" }, +] + [[package]] name = "av" -version = "16.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/c3/fd72a0315bc6c943ced1105aaac6e0ec1be57c70d8a616bd05acaa21ffee/av-16.0.1.tar.gz", hash = "sha256:dd2ce779fa0b5f5889a6d9e00fbbbc39f58e247e52d31044272648fe16ff1dbf", size = 3904030, upload-time = "2025-10-13T12:28:51.082Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/3c/eefa29b7d0f5afdf7af9197bbecad8ec2ad06bcb5ac7e909c05a624b00a6/av-16.0.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:8b141aaa29a3afc96a1d467d106790782c1914628b57309eaadb8c10c299c9c0", size = 27206679, upload-time = "2025-10-13T12:24:41.145Z" }, - { url = "https://files.pythonhosted.org/packages/ac/89/a474feb07d5b94aa5af3771b0fe328056e2e0a840039b329f4fa2a1fd13a/av-16.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:4b8a08a59a5be0082af063d3f4b216e3950340121c6ea95b505a3f5f5cc8f21d", size = 21774556, upload-time = "2025-10-13T12:24:44.332Z" }, - { url = "https://files.pythonhosted.org/packages/be/e5/4361010dcac398bc224823e4b2a47803845e159af9f95164662c523770dc/av-16.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:792e7fc3c08eae005ff36486983966476e553cbb55aaeb0ec99adc4909377320", size = 38176763, upload-time = "2025-10-13T12:24:46.98Z" }, - { url = "https://files.pythonhosted.org/packages/d4/db/b27bdd20c9dc80de5b8792dae16dd6f4edf16408c0c7b28070c6228a8057/av-16.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4e8ef5df76d8d0ee56139789f80bb90ad1a82a7e6df6e080e2e95c06fa22aea7", size = 39696277, upload-time = "2025-10-13T12:24:50.951Z" }, - { url = "https://files.pythonhosted.org/packages/4e/c8/dd48e6a3ac1e922c141475a0dc30e2b6dfdef9751b3274829889a9281cce/av-16.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4f7a6985784a7464f078e419c71f5528c3e550ee5d605e7149b4a37a111eb136", size = 39576660, upload-time = "2025-10-13T12:24:55.773Z" }, - { url = "https://files.pythonhosted.org/packages/b9/f0/223d047e2e60672a2fb5e51e28913de8d52195199f3e949cbfda1e6cd64b/av-16.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3f45c8d7b803b6faa2a25a26de5964a0a897de68298d9c9672c7af9d65d8b48a", size = 40752775, upload-time = "2025-10-13T12:25:00.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/73/73acad21c9203bc63d806e8baf42fe705eb5d36dafd1996b71ab5861a933/av-16.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:58e6faf1d9328d8cc6be14c5aadacb7d2965ed6d6ae1af32696993096543ff00", size = 32302328, upload-time = "2025-10-13T12:25:06.042Z" }, - { url = "https://files.pythonhosted.org/packages/49/d3/f2a483c5273fccd556dfa1fce14fab3b5d6d213b46e28e54e254465a2255/av-16.0.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:e310d1fb42879df9bad2152a8db6d2ff8bf332c8c36349a09d62cc122f5070fb", size = 27191982, upload-time = "2025-10-13T12:25:10.622Z" }, - { url = "https://files.pythonhosted.org/packages/e0/39/dff28bd252131b3befd09d8587992fe18c09d5125eaefc83a6434d5f56ff/av-16.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:2f4b357e5615457a84e6b6290916b22864b76b43d5079e1a73bc27581a5b9bac", size = 21760305, upload-time = "2025-10-13T12:25:14.882Z" }, - { url = "https://files.pythonhosted.org/packages/4a/4d/2312d50a09c84a9b4269f7fea5de84f05dd2b7c7113dd961d31fad6c64c4/av-16.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:286665c77034c3a98080169b8b5586d5568a15da81fbcdaf8099252f2d232d7c", size = 38691616, upload-time = "2025-10-13T12:25:20.063Z" }, - { url = "https://files.pythonhosted.org/packages/15/9a/3d2d30b56252f998e53fced13720e2ce809c4db477110f944034e0fa4c9f/av-16.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f88de8e5b8ea29e41af4d8d61df108323d050ccfbc90f15b13ec1f99ce0e841e", size = 40216464, upload-time = "2025-10-13T12:25:24.848Z" }, - { url = "https://files.pythonhosted.org/packages/98/cb/3860054794a47715b4be0006105158c7119a57be58d9e8882b72e4d4e1dd/av-16.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0cdb71ebe4d1b241cf700f8f0c44a7d2a6602b921e16547dd68c0842113736e1", size = 40094077, upload-time = "2025-10-13T12:25:30.238Z" }, - { url = "https://files.pythonhosted.org/packages/41/58/79830fb8af0a89c015250f7864bbd427dff09c70575c97847055f8a302f7/av-16.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:28c27a65d40e8cf82b6db2543f8feeb8b56d36c1938f50773494cd3b073c7223", size = 41279948, upload-time = "2025-10-13T12:25:35.24Z" }, - { url = "https://files.pythonhosted.org/packages/83/79/6e1463b04382f379f857113b851cf5f9d580a2f7bd794211cd75352f4e04/av-16.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ffea39ac7574f234f5168f9b9602e8d4ecdd81853238ec4d661001f03a6d3f64", size = 32297586, upload-time = "2025-10-13T12:25:39.826Z" }, - { url = "https://files.pythonhosted.org/packages/44/78/12a11d7a44fdd8b26a65e2efa1d8a5826733c8887a989a78306ec4785956/av-16.0.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:e41a8fef85dfb2c717349f9ff74f92f9560122a9f1a94b1c6c9a8a9c9462ba71", size = 27206375, upload-time = "2025-10-13T12:25:44.423Z" }, - { url = "https://files.pythonhosted.org/packages/27/19/3a4d3882852a0ee136121979ce46f6d2867b974eb217a2c9a070939f55ad/av-16.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:6352a64b25c9f985d4f279c2902db9a92424e6f2c972161e67119616f0796cb9", size = 21752603, upload-time = "2025-10-13T12:25:49.122Z" }, - { url = "https://files.pythonhosted.org/packages/cb/6e/f7abefba6e008e2f69bebb9a17ba38ce1df240c79b36a5b5fcacf8c8fcfd/av-16.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5201f7b4b5ed2128118cb90c2a6d64feedb0586ca7c783176896c78ffb4bbd5c", size = 38931978, upload-time = "2025-10-13T12:25:55.021Z" }, - { url = "https://files.pythonhosted.org/packages/b2/7a/1305243ab47f724fdd99ddef7309a594e669af7f0e655e11bdd2c325dfae/av-16.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:daecc2072b82b6a942acbdaa9a2e00c05234c61fef976b22713983c020b07992", size = 40549383, upload-time = "2025-10-13T12:26:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/32/b2/357cc063185043eb757b4a48782bff780826103bcad1eb40c3ddfc050b7e/av-16.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6573da96e8bebc3536860a7def108d7dbe1875c86517072431ced702447e6aea", size = 40241993, upload-time = "2025-10-13T12:26:06.993Z" }, - { url = "https://files.pythonhosted.org/packages/20/bb/ced42a4588ba168bf0ef1e9d016982e3ba09fde6992f1dda586fd20dcf71/av-16.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4bc064e48a8de6c087b97dd27cf4ef8c13073f0793108fbce3ecd721201b2502", size = 41532235, upload-time = "2025-10-13T12:26:12.488Z" }, - { url = "https://files.pythonhosted.org/packages/15/37/c7811eca0f318d5fd3212f7e8c3d8335f75a54907c97a89213dc580b8056/av-16.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0c669b6b6668c8ae74451c15ec6d6d8a36e4c3803dc5d9910f607a174dd18f17", size = 32296912, upload-time = "2025-10-13T12:26:19.187Z" }, - { url = "https://files.pythonhosted.org/packages/86/59/972f199ccc4f8c9e51f59e0f8962a09407396b3f6d11355e2c697ba555f9/av-16.0.1-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:4c61c6c120f5c5d95c711caf54e2c4a9fb2f1e613ac0a9c273d895f6b2602e44", size = 27170433, upload-time = "2025-10-13T12:26:24.673Z" }, - { url = "https://files.pythonhosted.org/packages/53/9d/0514cbc185fb20353ab25da54197fbd169a233e39efcbb26533c36a9dbb9/av-16.0.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:7ecc2e41320c69095f44aff93470a0d32c30892b2dbad0a08040441c81efa379", size = 21717654, upload-time = "2025-10-13T12:26:29.12Z" }, - { url = "https://files.pythonhosted.org/packages/32/8c/881409dd124b4e07d909d2b70568acb21126fc747656390840a2238651c9/av-16.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:036f0554d6faef3f4a94acaeb0cedd388e3ab96eb0eb5a14ec27c17369c466c9", size = 38651601, upload-time = "2025-10-13T12:26:33.919Z" }, - { url = "https://files.pythonhosted.org/packages/35/fd/867ba4cc3ab504442dc89b0c117e6a994fc62782eb634c8f31304586f93e/av-16.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:876415470a62e4a3550cc38db2fc0094c25e64eea34d7293b7454125d5958190", size = 40278604, upload-time = "2025-10-13T12:26:39.2Z" }, - { url = "https://files.pythonhosted.org/packages/b3/87/63cde866c0af09a1fa9727b4f40b34d71b0535785f5665c27894306f1fbc/av-16.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:56902a06bd0828d13f13352874c370670882048267191ff5829534b611ba3956", size = 39984854, upload-time = "2025-10-13T12:26:44.581Z" }, - { url = "https://files.pythonhosted.org/packages/71/3b/8f40a708bff0e6b0f957836e2ef1f4d4429041cf8d99a415a77ead8ac8a3/av-16.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe988c2bf0fc2d952858f791f18377ea4ae4e19ba3504793799cd6c2a2562edf", size = 41270352, upload-time = "2025-10-13T12:26:50.817Z" }, - { url = "https://files.pythonhosted.org/packages/1e/b5/c114292cb58a7269405ae13b7ba48c7d7bfeebbb2e4e66c8073c065a4430/av-16.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:708a66c248848029bf518f0482b81c5803846f1b597ef8013b19c014470b620f", size = 32273242, upload-time = "2025-10-13T12:26:55.788Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e9/a5b714bc078fdcca8b46c8a0b38484ae5c24cd81d9c1703d3e8ae2b57259/av-16.0.1-cp313-cp313t-macosx_11_0_x86_64.whl", hash = "sha256:79a77ee452537030c21a0b41139bedaf16629636bf764b634e93b99c9d5f4558", size = 27248984, upload-time = "2025-10-13T12:27:00.564Z" }, - { url = "https://files.pythonhosted.org/packages/06/ef/ff777aaf1f88e3f6ce94aca4c5806a0c360e68d48f9d9f0214e42650f740/av-16.0.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:080823a6ff712f81e7089ae9756fb1512ca1742a138556a852ce50f58e457213", size = 21828098, upload-time = "2025-10-13T12:27:05.433Z" }, - { url = "https://files.pythonhosted.org/packages/34/d7/a484358d24a42bedde97f61f5d6ee568a7dd866d9df6e33731378db92d9e/av-16.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:04e00124afa8b46a850ed48951ddda61de874407fb8307d6a875bba659d5727e", size = 40051697, upload-time = "2025-10-13T12:27:10.525Z" }, - { url = "https://files.pythonhosted.org/packages/73/87/6772d6080837da5d5c810a98a95bde6977e1f5a6e2e759e8c9292af9ec69/av-16.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:bc098c1c6dc4e7080629a7e9560e67bd4b5654951e17e5ddfd2b1515cfcd37db", size = 41352596, upload-time = "2025-10-13T12:27:16.217Z" }, - { url = "https://files.pythonhosted.org/packages/bd/58/fe448c60cf7f85640a0ed8936f16bac874846aa35e1baa521028949c1ea3/av-16.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e6ffd3559a72c46a76aa622630751a821499ba5a780b0047ecc75105d43a6b61", size = 41183156, upload-time = "2025-10-13T12:27:21.574Z" }, - { url = "https://files.pythonhosted.org/packages/85/c6/a039a0979d0c278e1bed6758d5a6186416c3ccb8081970df893fdf9a0d99/av-16.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7a3f1a36b550adadd7513f4f5ee956f9e06b01a88e59f3150ef5fec6879d6f79", size = 42302331, upload-time = "2025-10-13T12:27:26.953Z" }, - { url = "https://files.pythonhosted.org/packages/18/7b/2ca4a9e3609ff155436dac384e360f530919cb1e328491f7df294be0f0dc/av-16.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:c6de794abe52b8c0be55d8bb09ade05905efa74b1a5ab4860b4b9c2bfb6578bf", size = 32462194, upload-time = "2025-10-13T12:27:32.942Z" }, - { url = "https://files.pythonhosted.org/packages/14/9a/6d17e379906cf53a7a44dfac9cf7e4b2e7df2082ba2dbf07126055effcc1/av-16.0.1-cp314-cp314-macosx_11_0_x86_64.whl", hash = "sha256:4b55ba69a943ae592ad7900da67129422954789de9dc384685d6b529925f542e", size = 27167101, upload-time = "2025-10-13T12:27:38.886Z" }, - { url = "https://files.pythonhosted.org/packages/6c/34/891816cd82d5646cb5a51d201d20be0a578232536d083b7d939734258067/av-16.0.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:d4a0c47b6c9bbadad8909b82847f5fe64a608ad392f0b01704e427349bcd9a47", size = 21722708, upload-time = "2025-10-13T12:27:43.29Z" }, - { url = "https://files.pythonhosted.org/packages/1d/20/c24ad34038423ab8c9728cef3301e0861727c188442dcfd70a4a10834c63/av-16.0.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:8bba52f3035708456f6b1994d10b0371b45cfd8f917b5e84ff81aef4ec2f08bf", size = 38638842, upload-time = "2025-10-13T12:27:49.776Z" }, - { url = "https://files.pythonhosted.org/packages/d7/32/034412309572ba3ad713079d07a3ffc13739263321aece54a3055d7a4f1f/av-16.0.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:08e34c7e7b5e55e29931180bbe21095e1874ac120992bf6b8615d39574487617", size = 40197789, upload-time = "2025-10-13T12:27:55.688Z" }, - { url = "https://files.pythonhosted.org/packages/fb/9c/40496298c32f9094e7df28641c5c58aa6fb07554dc232a9ac98a9894376f/av-16.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0d6250ab9db80c641b299987027c987f14935ea837ea4c02c5f5182f6b69d9e5", size = 39980829, upload-time = "2025-10-13T12:28:01.507Z" }, - { url = "https://files.pythonhosted.org/packages/4a/7e/5c38268ac1d424f309b13b2de4597ad28daea6039ee5af061e62918b12a8/av-16.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7b621f28d8bcbb07cdcd7b18943ddc040739ad304545715ae733873b6e1b739d", size = 41205928, upload-time = "2025-10-13T12:28:08.431Z" }, - { url = "https://files.pythonhosted.org/packages/e3/07/3176e02692d8753a6c4606021c60e4031341afb56292178eee633b6760a4/av-16.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:92101f49082392580c9dba4ba2fe5b931b3bb0fb75a1a848bfb9a11ded68be91", size = 32272836, upload-time = "2025-10-13T12:28:13.405Z" }, - { url = "https://files.pythonhosted.org/packages/8a/47/10e03b88de097385d1550cbb6d8de96159131705c13adb92bd9b7e677425/av-16.0.1-cp314-cp314t-macosx_11_0_x86_64.whl", hash = "sha256:07c464bf2bc362a154eccc82e235ef64fd3aaf8d76fc8ed63d0ae520943c6d3f", size = 27248864, upload-time = "2025-10-13T12:28:17.467Z" }, - { url = "https://files.pythonhosted.org/packages/b1/60/7447f206bec3e55e81371f1989098baa2fe9adb7b46c149e6937b7e7c1ca/av-16.0.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:750da0673864b669c95882c7b25768cd93ece0e47010d74ebcc29dbb14d611f8", size = 21828185, upload-time = "2025-10-13T12:28:21.461Z" }, - { url = "https://files.pythonhosted.org/packages/68/48/ee2680e7a01bc4911bbe902b814346911fa2528697a44f3043ee68e0f07e/av-16.0.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:0b7c0d060863b2e341d07cd26851cb9057b7979814148b028fb7ee5d5eb8772d", size = 40040572, upload-time = "2025-10-13T12:28:26.585Z" }, - { url = "https://files.pythonhosted.org/packages/da/68/2c43d28871721ae07cde432d6e36ae2f7035197cbadb43764cc5bf3d4b33/av-16.0.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:e67c2eca6023ca7d76b0709c5f392b23a5defba499f4c262411f8155b1482cbd", size = 41344288, upload-time = "2025-10-13T12:28:32.512Z" }, - { url = "https://files.pythonhosted.org/packages/ec/7f/1d801bff43ae1af4758c45eee2eaae64f303bbb460e79f352f08587fd179/av-16.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e3243d54d84986e8fbdc1946db634b0c41fe69b6de35a99fa8b763e18503d040", size = 41175142, upload-time = "2025-10-13T12:28:38.356Z" }, - { url = "https://files.pythonhosted.org/packages/e4/06/bb363138687066bbf8997c1433dbd9c81762bae120955ea431fb72d69d26/av-16.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bcf73efab5379601e6510abd7afe5f397d0f6defe69b1610c2f37a4a17996b", size = 42293932, upload-time = "2025-10-13T12:28:43.442Z" }, - { url = "https://files.pythonhosted.org/packages/92/15/5e713098a085f970ccf88550194d277d244464d7b3a7365ad92acb4b6dc1/av-16.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6368d4ff153d75469d2a3217bc403630dc870a72fe0a014d9135de550d731a86", size = 32460624, upload-time = "2025-10-13T12:28:48.767Z" }, +version = "16.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/78/cd/3a83ffbc3cc25b39721d174487fb0d51a76582f4a1703f98e46170ce83d4/av-16.1.0.tar.gz", hash = "sha256:a094b4fd87a3721dacf02794d3d2c82b8d712c85b9534437e82a8a978c175ffd", size = 4285203, upload-time = "2026-01-11T07:31:33.772Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/51/2217a9249409d2e88e16e3f16f7c0def9fd3e7ffc4238b2ec211f9935bdb/av-16.1.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:2395748b0c34fe3a150a1721e4f3d4487b939520991b13e7b36f8926b3b12295", size = 26942590, upload-time = "2026-01-09T20:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/a7070f4febc76a327c38808e01e2ff6b94531fe0b321af54ea3915165338/av-16.1.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:72d7ac832710a158eeb7a93242370aa024a7646516291c562ee7f14a7ea881fd", size = 21507910, upload-time = "2026-01-09T20:18:02.309Z" }, + { url = "https://files.pythonhosted.org/packages/ae/30/ec812418cd9b297f0238fe20eb0747d8a8b68d82c5f73c56fe519a274143/av-16.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6cbac833092e66b6b0ac4d81ab077970b8ca874951e9c3974d41d922aaa653ed", size = 38738309, upload-time = "2026-01-09T20:18:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b8/6c5795bf1f05f45c5261f8bce6154e0e5e86b158a6676650ddd77c28805e/av-16.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:eb990672d97c18f99c02f31c8d5750236f770ffe354b5a52c5f4d16c5e65f619", size = 40293006, upload-time = "2026-01-09T20:18:07.238Z" }, + { url = "https://files.pythonhosted.org/packages/a7/44/5e183bcb9333fc3372ee6e683be8b0c9b515a506894b2d32ff465430c074/av-16.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:05ad70933ac3b8ef896a820ea64b33b6cca91a5fac5259cb9ba7fa010435be15", size = 40123516, upload-time = "2026-01-09T20:18:09.955Z" }, + { url = "https://files.pythonhosted.org/packages/12/1d/b5346d582a3c3d958b4d26a2cc63ce607233582d956121eb20d2bbe55c2e/av-16.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d831a1062a3c47520bf99de6ec682bd1d64a40dfa958e5457bb613c5270e7ce3", size = 41463289, upload-time = "2026-01-09T20:18:12.459Z" }, + { url = "https://files.pythonhosted.org/packages/fa/31/acc946c0545f72b8d0d74584cb2a0ade9b7dfe2190af3ef9aa52a2e3c0b1/av-16.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:358ab910fef3c5a806c55176f2b27e5663b33c4d0a692dafeb049c6ed71f8aff", size = 31754959, upload-time = "2026-01-09T20:18:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/48/d0/b71b65d1b36520dcb8291a2307d98b7fc12329a45614a303ff92ada4d723/av-16.1.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:e88ad64ee9d2b9c4c5d891f16c22ae78e725188b8926eb88187538d9dd0b232f", size = 26927747, upload-time = "2026-01-09T20:18:16.976Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/720a5a6ccdee06eafa211b945b0a450e3a0b8fc3d12922f0f3c454d870d2/av-16.1.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cb296073fa6935724de72593800ba86ae49ed48af03960a4aee34f8a611f442b", size = 21492232, upload-time = "2026-01-09T20:18:19.266Z" }, + { url = "https://files.pythonhosted.org/packages/8e/4f/a1ba8d922f2f6d1a3d52419463ef26dd6c4d43ee364164a71b424b5ae204/av-16.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:720edd4d25aa73723c1532bb0597806d7b9af5ee34fc02358782c358cfe2f879", size = 39291737, upload-time = "2026-01-09T20:18:21.513Z" }, + { url = "https://files.pythonhosted.org/packages/1a/31/fc62b9fe8738d2693e18d99f040b219e26e8df894c10d065f27c6b4f07e3/av-16.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c7f2bc703d0df260a1fdf4de4253c7f5500ca9fc57772ea241b0cb241bcf972e", size = 40846822, upload-time = "2026-01-09T20:18:24.275Z" }, + { url = "https://files.pythonhosted.org/packages/53/10/ab446583dbce730000e8e6beec6ec3c2753e628c7f78f334a35cad0317f4/av-16.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d69c393809babada7d54964d56099e4b30a3e1f8b5736ca5e27bd7be0e0f3c83", size = 40675604, upload-time = "2026-01-09T20:18:26.866Z" }, + { url = "https://files.pythonhosted.org/packages/31/d7/1003be685277005f6d63fd9e64904ee222fe1f7a0ea70af313468bb597db/av-16.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:441892be28582356d53f282873c5a951592daaf71642c7f20165e3ddcb0b4c63", size = 42015955, upload-time = "2026-01-09T20:18:29.461Z" }, + { url = "https://files.pythonhosted.org/packages/2f/4a/fa2a38ee9306bf4579f556f94ecbc757520652eb91294d2a99c7cf7623b9/av-16.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:273a3e32de64819e4a1cd96341824299fe06f70c46f2288b5dc4173944f0fd62", size = 31750339, upload-time = "2026-01-09T20:18:32.249Z" }, + { url = "https://files.pythonhosted.org/packages/9c/84/2535f55edcd426cebec02eb37b811b1b0c163f26b8d3f53b059e2ec32665/av-16.1.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:640f57b93f927fba8689f6966c956737ee95388a91bd0b8c8b5e0481f73513d6", size = 26945785, upload-time = "2026-01-09T20:18:34.486Z" }, + { url = "https://files.pythonhosted.org/packages/b6/17/ffb940c9e490bf42e86db4db1ff426ee1559cd355a69609ec1efe4d3a9eb/av-16.1.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ae3fb658eec00852ebd7412fdc141f17f3ddce8afee2d2e1cf366263ad2a3b35", size = 21481147, upload-time = "2026-01-09T20:18:36.716Z" }, + { url = "https://files.pythonhosted.org/packages/15/c1/e0d58003d2d83c3921887d5c8c9b8f5f7de9b58dc2194356a2656a45cfdc/av-16.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:27ee558d9c02a142eebcbe55578a6d817fedfde42ff5676275504e16d07a7f86", size = 39517197, upload-time = "2026-01-11T09:57:31.937Z" }, + { url = "https://files.pythonhosted.org/packages/32/77/787797b43475d1b90626af76f80bfb0c12cfec5e11eafcfc4151b8c80218/av-16.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7ae547f6d5fa31763f73900d43901e8c5fa6367bb9a9840978d57b5a7ae14ed2", size = 41174337, upload-time = "2026-01-11T09:57:35.792Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ac/d90df7f1e3b97fc5554cf45076df5045f1e0a6adf13899e10121229b826c/av-16.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8cf065f9d438e1921dc31fc7aa045790b58aee71736897866420d80b5450f62a", size = 40817720, upload-time = "2026-01-11T09:57:39.039Z" }, + { url = "https://files.pythonhosted.org/packages/80/6f/13c3a35f9dbcebafd03fe0c4cbd075d71ac8968ec849a3cfce406c35a9d2/av-16.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a345877a9d3cc0f08e2bc4ec163ee83176864b92587afb9d08dff50f37a9a829", size = 42267396, upload-time = "2026-01-11T09:57:42.115Z" }, + { url = "https://files.pythonhosted.org/packages/c8/b9/275df9607f7fb44317ccb1d4be74827185c0d410f52b6e2cd770fe209118/av-16.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:f49243b1d27c91cd8c66fdba90a674e344eb8eb917264f36117bf2b6879118fd", size = 31752045, upload-time = "2026-01-11T09:57:45.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/2a/63797a4dde34283dd8054219fcb29294ba1c25d68ba8c8c8a6ae53c62c45/av-16.1.0-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:ce2a1b3d8bf619f6c47a9f28cfa7518ff75ddd516c234a4ee351037b05e6a587", size = 26916715, upload-time = "2026-01-11T09:57:47.682Z" }, + { url = "https://files.pythonhosted.org/packages/d2/c4/0b49cf730d0ae8cda925402f18ae814aef351f5772d14da72dd87ff66448/av-16.1.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:408dbe6a2573ca58a855eb8cd854112b33ea598651902c36709f5f84c991ed8e", size = 21452167, upload-time = "2026-01-11T09:57:50.606Z" }, + { url = "https://files.pythonhosted.org/packages/51/23/408806503e8d5d840975aad5699b153aaa21eb6de41ade75248a79b7a37f/av-16.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:57f657f86652a160a8a01887aaab82282f9e629abf94c780bbdbb01595d6f0f7", size = 39215659, upload-time = "2026-01-11T09:57:53.757Z" }, + { url = "https://files.pythonhosted.org/packages/c4/19/a8528d5bba592b3903f44c28dab9cc653c95fcf7393f382d2751a1d1523e/av-16.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:adbad2b355c2ee4552cac59762809d791bda90586d134a33c6f13727fb86cb3a", size = 40874970, upload-time = "2026-01-11T09:57:56.802Z" }, + { url = "https://files.pythonhosted.org/packages/e8/24/2dbcdf0e929ad56b7df078e514e7bd4ca0d45cba798aff3c8caac097d2f7/av-16.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f42e1a68ec2aebd21f7eb6895be69efa6aa27eec1670536876399725bbda4b99", size = 40530345, upload-time = "2026-01-11T09:58:00.421Z" }, + { url = "https://files.pythonhosted.org/packages/54/27/ae91b41207f34e99602d1c72ab6ffd9c51d7c67e3fbcd4e3a6c0e54f882c/av-16.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58fe47aeaef0f100c40ec8a5de9abbd37f118d3ca03829a1009cf288e9aef67c", size = 41972163, upload-time = "2026-01-11T09:58:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7a/22158fb923b2a9a00dfab0e96ef2e8a1763a94dd89e666a5858412383d46/av-16.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:565093ebc93b2f4b76782589564869dadfa83af5b852edebedd8fee746457d06", size = 31729230, upload-time = "2026-01-11T09:58:07.254Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f1/878f8687d801d6c4565d57ebec08449c46f75126ebca8e0fed6986599627/av-16.1.0-cp313-cp313t-macosx_11_0_x86_64.whl", hash = "sha256:574081a24edb98343fd9f473e21ae155bf61443d4ec9d7708987fa597d6b04b2", size = 27008769, upload-time = "2026-01-11T09:58:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/30/f1/bd4ce8c8b5cbf1d43e27048e436cbc9de628d48ede088a1d0a993768eb86/av-16.1.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:9ab00ea29c25ebf2ea1d1e928d7babb3532d562481c5d96c0829212b70756ad0", size = 21590588, upload-time = "2026-01-11T09:58:12.629Z" }, + { url = "https://files.pythonhosted.org/packages/1d/dd/c81f6f9209201ff0b5d5bed6da6c6e641eef52d8fbc930d738c3f4f6f75d/av-16.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:a84a91188c1071f238a9523fd42dbe567fb2e2607b22b779851b2ce0eac1b560", size = 40638029, upload-time = "2026-01-11T09:58:15.399Z" }, + { url = "https://files.pythonhosted.org/packages/15/4d/07edff82b78d0459a6e807e01cd280d3180ce832efc1543de80d77676722/av-16.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c2cd0de4dd022a7225ff224fde8e7971496d700be41c50adaaa26c07bb50bf97", size = 41970776, upload-time = "2026-01-11T09:58:19.075Z" }, + { url = "https://files.pythonhosted.org/packages/da/9d/1f48b354b82fa135d388477cd1b11b81bdd4384bd6a42a60808e2ec2d66b/av-16.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0816143530624a5a93bc5494f8c6eeaf77549b9366709c2ac8566c1e9bff6df5", size = 41764751, upload-time = "2026-01-11T09:58:22.788Z" }, + { url = "https://files.pythonhosted.org/packages/2f/c7/a509801e98db35ec552dd79da7bdbcff7104044bfeb4c7d196c1ce121593/av-16.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e3a28053af29644696d0c007e897d19b1197585834660a54773e12a40b16974c", size = 43034355, upload-time = "2026-01-11T09:58:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/36/8b/e5f530d9e8f640da5f5c5f681a424c65f9dd171c871cd255d8a861785a6e/av-16.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e3e67144a202b95ed299d165232533989390a9ea3119d37eccec697dc6dbb0c", size = 31947047, upload-time = "2026-01-11T09:58:31.867Z" }, + { url = "https://files.pythonhosted.org/packages/df/18/8812221108c27d19f7e5f486a82c827923061edf55f906824ee0fcaadf50/av-16.1.0-cp314-cp314-macosx_11_0_x86_64.whl", hash = "sha256:39a634d8e5a87e78ea80772774bfd20c0721f0d633837ff185f36c9d14ffede4", size = 26916179, upload-time = "2026-01-11T09:58:36.506Z" }, + { url = "https://files.pythonhosted.org/packages/38/ef/49d128a9ddce42a2766fe2b6595bd9c49e067ad8937a560f7838a541464e/av-16.1.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:0ba32fb9e9300948a7fa9f8a3fc686e6f7f77599a665c71eb2118fdfd2c743f9", size = 21460168, upload-time = "2026-01-11T09:58:39.231Z" }, + { url = "https://files.pythonhosted.org/packages/e6/a9/b310d390844656fa74eeb8c2750e98030877c75b97551a23a77d3f982741/av-16.1.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:ca04d17815182d34ce3edc53cbda78a4f36e956c0fd73e3bab249872a831c4d7", size = 39210194, upload-time = "2026-01-11T09:58:42.138Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7b/e65aae179929d0f173af6e474ad1489b5b5ad4c968a62c42758d619e54cf/av-16.1.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:ee0e8de2e124a9ef53c955fe2add6ee7c56cc8fd83318265549e44057db77142", size = 40811675, upload-time = "2026-01-11T09:58:45.871Z" }, + { url = "https://files.pythonhosted.org/packages/54/3f/5d7edefd26b6a5187d6fac0f5065ee286109934f3dea607ef05e53f05b31/av-16.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:22bf77a2f658827043a1e184b479c3bf25c4c43ab32353677df2d119f080e28f", size = 40543942, upload-time = "2026-01-11T09:58:49.759Z" }, + { url = "https://files.pythonhosted.org/packages/1b/24/f8b17897b67be0900a211142f5646a99d896168f54d57c81f3e018853796/av-16.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2dd419d262e6a71cab206d80bbf28e0a10d0f227b671cdf5e854c028faa2d043", size = 41924336, upload-time = "2026-01-11T09:58:53.344Z" }, + { url = "https://files.pythonhosted.org/packages/1c/cf/d32bc6bbbcf60b65f6510c54690ed3ae1c4ca5d9fafbce835b6056858686/av-16.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:53585986fd431cd436f290fba662cfb44d9494fbc2949a183de00acc5b33fa88", size = 31735077, upload-time = "2026-01-11T09:58:56.684Z" }, + { url = "https://files.pythonhosted.org/packages/53/f4/9b63dc70af8636399bd933e9df4f3025a0294609510239782c1b746fc796/av-16.1.0-cp314-cp314t-macosx_11_0_x86_64.whl", hash = "sha256:76f5ed8495cf41e1209a5775d3699dc63fdc1740b94a095e2485f13586593205", size = 27014423, upload-time = "2026-01-11T09:58:59.703Z" }, + { url = "https://files.pythonhosted.org/packages/d1/da/787a07a0d6ed35a0888d7e5cfb8c2ffa202f38b7ad2c657299fac08eb046/av-16.1.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:8d55397190f12a1a3ae7538be58c356cceb2bf50df1b33523817587748ce89e5", size = 21595536, upload-time = "2026-01-11T09:59:02.508Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f4/9a7d8651a611be6e7e3ab7b30bb43779899c8cac5f7293b9fb634c44a3f3/av-16.1.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:9d51d9037437218261b4bbf9df78a95e216f83d7774fbfe8d289230b5b2e28e2", size = 40642490, upload-time = "2026-01-11T09:59:05.842Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e4/eb79bc538a94b4ff93cd4237d00939cba797579f3272490dd0144c165a21/av-16.1.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:0ce07a89c15644407f49d942111ca046e323bbab0a9078ff43ee57c9b4a50dad", size = 41976905, upload-time = "2026-01-11T09:59:09.169Z" }, + { url = "https://files.pythonhosted.org/packages/5e/f5/f6db0dd86b70167a4d55ee0d9d9640983c570d25504f2bde42599f38241e/av-16.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:cac0c074892ea97113b53556ff41c99562db7b9f09f098adac1f08318c2acad5", size = 41770481, upload-time = "2026-01-11T09:59:12.74Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/33651d658e45e16ab7671ea5fcf3d20980ea7983234f4d8d0c63c65581a5/av-16.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7dec3dcbc35a187ce450f65a2e0dda820d5a9e6553eea8344a1459af11c98649", size = 43036824, upload-time = "2026-01-11T09:59:16.507Z" }, + { url = "https://files.pythonhosted.org/packages/83/41/7f13361db54d7e02f11552575c0384dadaf0918138f4eaa82ea03a9f9580/av-16.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:6f90dc082ff2068ddbe77618400b44d698d25d9c4edac57459e250c16b33d700", size = 31948164, upload-time = "2026-01-11T09:59:19.501Z" }, ] [[package]] @@ -372,6 +410,50 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1d/4f/02d3cb62a1b0b5a1ca7ff03dce3606be1bf3ead4744f47eb762dbf471069/bitsandbytes-0.49.1-py3-none-manylinux_2_24_x86_64.whl", hash = "sha256:e7940bf32457dc2e553685285b2a86e82f5ec10b2ae39776c408714f9ae6983c", size = 59054193, upload-time = "2026-01-08T14:31:31.743Z" }, ] +[[package]] +name = "black" +version = "26.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "pytokens" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/13/88/560b11e521c522440af991d46848a2bde64b5f7202ec14e1f46f9509d328/black-26.1.0.tar.gz", hash = "sha256:d294ac3340eef9c9eb5d29288e96dc719ff269a88e27b396340459dd85da4c58", size = 658785, upload-time = "2026-01-18T04:50:11.993Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/1b/523329e713f965ad0ea2b7a047eeb003007792a0353622ac7a8cb2ee6fef/black-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ca699710dece84e3ebf6e92ee15f5b8f72870ef984bf944a57a777a48357c168", size = 1849661, upload-time = "2026-01-18T04:59:12.425Z" }, + { url = "https://files.pythonhosted.org/packages/14/82/94c0640f7285fa71c2f32879f23e609dd2aa39ba2641f395487f24a578e7/black-26.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e8e75dabb6eb83d064b0db46392b25cabb6e784ea624219736e8985a6b3675d", size = 1689065, upload-time = "2026-01-18T04:59:13.993Z" }, + { url = "https://files.pythonhosted.org/packages/f0/78/474373cbd798f9291ed8f7107056e343fd39fef42de4a51c7fd0d360840c/black-26.1.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb07665d9a907a1a645ee41a0df8a25ffac8ad9c26cdb557b7b88eeeeec934e0", size = 1751502, upload-time = "2026-01-18T04:59:15.971Z" }, + { url = "https://files.pythonhosted.org/packages/29/89/59d0e350123f97bc32c27c4d79563432d7f3530dca2bff64d855c178af8b/black-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:7ed300200918147c963c87700ccf9966dceaefbbb7277450a8d646fc5646bf24", size = 1400102, upload-time = "2026-01-18T04:59:17.8Z" }, + { url = "https://files.pythonhosted.org/packages/e1/bc/5d866c7ae1c9d67d308f83af5462ca7046760158bbf142502bad8f22b3a1/black-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:c5b7713daea9bf943f79f8c3b46f361cc5229e0e604dcef6a8bb6d1c37d9df89", size = 1207038, upload-time = "2026-01-18T04:59:19.543Z" }, + { url = "https://files.pythonhosted.org/packages/30/83/f05f22ff13756e1a8ce7891db517dbc06200796a16326258268f4658a745/black-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cee1487a9e4c640dc7467aaa543d6c0097c391dc8ac74eb313f2fbf9d7a7cb5", size = 1831956, upload-time = "2026-01-18T04:59:21.38Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/b2c570550e39bedc157715e43927360312d6dd677eed2cc149a802577491/black-26.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d62d14ca31c92adf561ebb2e5f2741bf8dea28aef6deb400d49cca011d186c68", size = 1672499, upload-time = "2026-01-18T04:59:23.257Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d7/990d6a94dc9e169f61374b1c3d4f4dd3037e93c2cc12b6f3b12bc663aa7b/black-26.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb1dafbbaa3b1ee8b4550a84425aac8874e5f390200f5502cf3aee4a2acb2f14", size = 1735431, upload-time = "2026-01-18T04:59:24.729Z" }, + { url = "https://files.pythonhosted.org/packages/36/1c/cbd7bae7dd3cb315dfe6eeca802bb56662cc92b89af272e014d98c1f2286/black-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:101540cb2a77c680f4f80e628ae98bd2bd8812fb9d72ade4f8995c5ff019e82c", size = 1400468, upload-time = "2026-01-18T04:59:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/59/b1/9fe6132bb2d0d1f7094613320b56297a108ae19ecf3041d9678aec381b37/black-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:6f3977a16e347f1b115662be07daa93137259c711e526402aa444d7a88fdc9d4", size = 1207332, upload-time = "2026-01-18T04:59:28.711Z" }, + { url = "https://files.pythonhosted.org/packages/f5/13/710298938a61f0f54cdb4d1c0baeb672c01ff0358712eddaf29f76d32a0b/black-26.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6eeca41e70b5f5c84f2f913af857cf2ce17410847e1d54642e658e078da6544f", size = 1878189, upload-time = "2026-01-18T04:59:30.682Z" }, + { url = "https://files.pythonhosted.org/packages/79/a6/5179beaa57e5dbd2ec9f1c64016214057b4265647c62125aa6aeffb05392/black-26.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd39eef053e58e60204f2cdf059e2442e2eb08f15989eefe259870f89614c8b6", size = 1700178, upload-time = "2026-01-18T04:59:32.387Z" }, + { url = "https://files.pythonhosted.org/packages/8c/04/c96f79d7b93e8f09d9298b333ca0d31cd9b2ee6c46c274fd0f531de9dc61/black-26.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9459ad0d6cd483eacad4c6566b0f8e42af5e8b583cee917d90ffaa3778420a0a", size = 1777029, upload-time = "2026-01-18T04:59:33.767Z" }, + { url = "https://files.pythonhosted.org/packages/49/f9/71c161c4c7aa18bdda3776b66ac2dc07aed62053c7c0ff8bbda8c2624fe2/black-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a19915ec61f3a8746e8b10adbac4a577c6ba9851fa4a9e9fbfbcf319887a5791", size = 1406466, upload-time = "2026-01-18T04:59:35.177Z" }, + { url = "https://files.pythonhosted.org/packages/4a/8b/a7b0f974e473b159d0ac1b6bcefffeb6bec465898a516ee5cc989503cbc7/black-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:643d27fb5facc167c0b1b59d0315f2674a6e950341aed0fc05cf307d22bf4954", size = 1216393, upload-time = "2026-01-18T04:59:37.18Z" }, + { url = "https://files.pythonhosted.org/packages/79/04/fa2f4784f7237279332aa735cdfd5ae2e7730db0072fb2041dadda9ae551/black-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ba1d768fbfb6930fc93b0ecc32a43d8861ded16f47a40f14afa9bb04ab93d304", size = 1877781, upload-time = "2026-01-18T04:59:39.054Z" }, + { url = "https://files.pythonhosted.org/packages/cf/ad/5a131b01acc0e5336740a039628c0ab69d60cf09a2c87a4ec49f5826acda/black-26.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b807c240b64609cb0e80d2200a35b23c7df82259f80bef1b2c96eb422b4aac9", size = 1699670, upload-time = "2026-01-18T04:59:41.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/7c/b05f22964316a52ab6b4265bcd52c0ad2c30d7ca6bd3d0637e438fc32d6e/black-26.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1de0f7d01cc894066a1153b738145b194414cc6eeaad8ef4397ac9abacf40f6b", size = 1775212, upload-time = "2026-01-18T04:59:42.545Z" }, + { url = "https://files.pythonhosted.org/packages/a6/a3/e8d1526bea0446e040193185353920a9506eab60a7d8beb062029129c7d2/black-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:91a68ae46bf07868963671e4d05611b179c2313301bd756a89ad4e3b3db2325b", size = 1409953, upload-time = "2026-01-18T04:59:44.357Z" }, + { url = "https://files.pythonhosted.org/packages/c7/5a/d62ebf4d8f5e3a1daa54adaab94c107b57be1b1a2f115a0249b41931e188/black-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:be5e2fe860b9bd9edbf676d5b60a9282994c03fbbd40fe8f5e75d194f96064ca", size = 1217707, upload-time = "2026-01-18T04:59:45.719Z" }, + { url = "https://files.pythonhosted.org/packages/6a/83/be35a175aacfce4b05584ac415fd317dd6c24e93a0af2dcedce0f686f5d8/black-26.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9dc8c71656a79ca49b8d3e2ce8103210c9481c57798b48deeb3a8bb02db5f115", size = 1871864, upload-time = "2026-01-18T04:59:47.586Z" }, + { url = "https://files.pythonhosted.org/packages/a5/f5/d33696c099450b1274d925a42b7a030cd3ea1f56d72e5ca8bbed5f52759c/black-26.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b22b3810451abe359a964cc88121d57f7bce482b53a066de0f1584988ca36e79", size = 1701009, upload-time = "2026-01-18T04:59:49.443Z" }, + { url = "https://files.pythonhosted.org/packages/1b/87/670dd888c537acb53a863bc15abbd85b22b429237d9de1b77c0ed6b79c42/black-26.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:53c62883b3f999f14e5d30b5a79bd437236658ad45b2f853906c7cbe79de00af", size = 1767806, upload-time = "2026-01-18T04:59:50.769Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9c/cd3deb79bfec5bcf30f9d2100ffeec63eecce826eb63e3961708b9431ff1/black-26.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:f016baaadc423dc960cdddf9acae679e71ee02c4c341f78f3179d7e4819c095f", size = 1433217, upload-time = "2026-01-18T04:59:52.218Z" }, + { url = "https://files.pythonhosted.org/packages/4e/29/f3be41a1cf502a283506f40f5d27203249d181f7a1a2abce1c6ce188035a/black-26.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:66912475200b67ef5a0ab665011964bf924745103f51977a78b4fb92a9fc1bf0", size = 1245773, upload-time = "2026-01-18T04:59:54.457Z" }, + { url = "https://files.pythonhosted.org/packages/e4/3d/51bdb3ecbfadfaf825ec0c75e1de6077422b4afa2091c6c9ba34fbfc0c2d/black-26.1.0-py3-none-any.whl", hash = "sha256:1054e8e47ebd686e078c0bb0eaf31e6ce69c966058d122f2c0c950311f9f3ede", size = 204010, upload-time = "2026-01-18T04:50:09.978Z" }, +] + [[package]] name = "blinker" version = "1.9.0" @@ -393,7 +475,8 @@ dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "packaging" }, - { name = "pandas" }, + { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "pandas", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pillow" }, { name = "pyyaml" }, { name = "tornado", marker = "sys_platform != 'emscripten'" }, @@ -406,7 +489,7 @@ wheels = [ [[package]] name = "brax" -version = "0.14.0" +version = "0.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "absl-py" }, @@ -414,11 +497,11 @@ dependencies = [ { name = "flask" }, { name = "flask-cors" }, { name = "flax", version = "0.10.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "flax", version = "0.12.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "flax", version = "0.12.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jax", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jax", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jaxlib", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jaxlib", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxlib", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jaxopt" }, { name = "jinja2" }, { name = "ml-collections" }, @@ -430,14 +513,14 @@ dependencies = [ { name = "orbax-checkpoint" }, { name = "pillow" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "tensorboardx" }, { name = "trimesh" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/2d/2ea28b8c445730452a019118e667416309f217e130fe004e378e6575a15b/brax-0.14.0.tar.gz", hash = "sha256:1102e890040493263e21163f962dd5b850e199726dfd62dc9075657c7d3371b3", size = 205787, upload-time = "2025-12-16T21:04:19.901Z" } +sdist = { url = "https://files.pythonhosted.org/packages/39/8f/480ec7af5570dd8e8f03e226eea3f26e11c1053d3fdc319c4d5fbd6af248/brax-0.14.1.tar.gz", hash = "sha256:e2641b2a0ac151da4bb2bae69443a8e8080a0a85907431ec49b42ce72e3097df", size = 206577, upload-time = "2026-02-12T23:21:51.164Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/6e/831f7903b21c2ffa61dc15e5703bd148084651e4aa2c354b140a3ae44dab/brax-0.14.0-py3-none-any.whl", hash = "sha256:4306b41d7f2f70726657426754c43367e572dca01199a7f1a96d115c13f4352f", size = 350172, upload-time = "2025-12-16T21:04:18.477Z" }, + { url = "https://files.pythonhosted.org/packages/f4/8f/ff354be75b3b0142e3a890cb8312b46fc5853b85e87432a146803f654935/brax-0.14.1-py3-none-any.whl", hash = "sha256:2cd82259a9857f3280d422c1c5103725429904295d22685b4f60c27996933ca9", size = 351008, upload-time = "2026-02-12T23:21:49.99Z" }, ] [[package]] @@ -472,21 +555,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/99/1b/50316bd6f95c50686b35799abebb6168d90ee18b7c03e3065f587f010f7c/catkin_pkg-1.1.0-py3-none-any.whl", hash = "sha256:7f5486b4f5681b5f043316ce10fc638c8d0ba8127146e797c85f4024e4356027", size = 76369, upload-time = "2025-09-10T17:34:35.639Z" }, ] +[[package]] +name = "cattrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6e/00/2432bb2d445b39b5407f0a90e01b9a271475eea7caf913d7a86bcb956385/cattrs-25.3.0.tar.gz", hash = "sha256:1ac88d9e5eda10436c4517e390a4142d88638fe682c436c93db7ce4a277b884a", size = 509321, upload-time = "2025-10-07T12:26:08.737Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/2b/a40e1488fdfa02d3f9a653a61a5935ea08b3c2225ee818db6a76c7ba9695/cattrs-25.3.0-py3-none-any.whl", hash = "sha256:9896e84e0a5bf723bc7b4b68f4481785367ce07a8a02e7e9ee6eb2819bc306ff", size = 70738, upload-time = "2025-10-07T12:26:06.603Z" }, +] + [[package]] name = "cerebras-cloud-sdk" -version = "1.64.1" +version = "1.67.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "distro" }, - { name = "httpx" }, + { name = "httpx", extra = ["http2"] }, { name = "pydantic" }, { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/98/96fa7704e101b5b33c7030a61d081f07c6c117f96d742852d7e06da56345/cerebras_cloud_sdk-1.64.1.tar.gz", hash = "sha256:7200b0cbafaff32e4e6bcb88d5342fc19295435a7a7681fa10156018d79ae67a", size = 131448, upload-time = "2025-12-31T17:54:55.402Z" } +sdist = { url = "https://files.pythonhosted.org/packages/92/12/c201f07582068141e88f9a523ab02fdc97de58f2f7c0df775c6c52b9d8dd/cerebras_cloud_sdk-1.67.0.tar.gz", hash = "sha256:3aed6f86c6c7a83ee9d4cfb08a2acea089cebf2af5b8aed116ef79995a4f4813", size = 131536, upload-time = "2026-01-29T23:31:27.306Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/96/138ed6ecf48f3b17f68ca06bb5c2b506d34c84d73502c9e3a620da7d44a9/cerebras_cloud_sdk-1.64.1-py3-none-any.whl", hash = "sha256:c9c049586e89dcc4e4a8d726d4725bdba28acb9e8b0f2a9640138b24b92444a0", size = 97798, upload-time = "2025-12-31T17:54:54.072Z" }, + { url = "https://files.pythonhosted.org/packages/7a/5e/36a364f3d1bab4073454b75e7c91dc7ec6879b960063d1a9c929f1c7ea71/cerebras_cloud_sdk-1.67.0-py3-none-any.whl", hash = "sha256:658b79ca2e9c16f75cc6b4e5d523ee014c9e54a88bd39f88905c28ecb33daae1", size = 97807, upload-time = "2026-01-29T23:31:25.77Z" }, ] [[package]] @@ -706,12 +803,16 @@ name = "chex" version = "0.1.91" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -721,8 +822,8 @@ resolution-markers = [ ] dependencies = [ { name = "absl-py", marker = "python_full_version >= '3.11'" }, - { name = "jax", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "jaxlib", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxlib", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "toolz", marker = "python_full_version >= '3.11'" }, { name = "typing-extensions", marker = "python_full_version >= '3.11'" }, @@ -747,7 +848,7 @@ wheels = [ [[package]] name = "chromadb" -version = "1.4.0" +version = "1.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "bcrypt" }, @@ -779,13 +880,13 @@ dependencies = [ { name = "typing-extensions" }, { name = "uvicorn", extra = ["standard"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/54/2bc73eac5d8fd7ffc41f8e6e4dd13ad0fd916f8973f85b1411011ba1e05b/chromadb-1.4.0.tar.gz", hash = "sha256:5b4e6d1ede4faaaf12ec772c3c603ea19f39b255ef0795855b40dd79f00a4183", size = 2001752, upload-time = "2025-12-24T02:58:18.326Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/a9/88d14ec43948ba164c45a2b8a80df26f68b69d963b4fbdf6e777c7ee6ab9/chromadb-1.5.0.tar.gz", hash = "sha256:357c5516ede08305db65f078d1dd4e001b8ecca80a13fd0db0b45bc473554ecb", size = 2343898, upload-time = "2026-02-09T08:46:05.077Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/d5/7ce34021304bdf1a5eefaaf434d2be078828dd71aa3871d89eeeecedfb19/chromadb-1.4.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ab4ad96c21d0038f6d8d84b9cac2010ce1f448926e9a2ee35251552f2e85da07", size = 20882057, upload-time = "2025-12-24T02:58:15.916Z" }, - { url = "https://files.pythonhosted.org/packages/76/6d/9fbf794f3672bfaf227b0e8642b1af6e1ef7d5f5b20f7505ac684ff0b155/chromadb-1.4.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:4d3c8abd762f092f73482e3eb1dae560a8a1c2674575d11eaac0dddf35e9cc6d", size = 20148106, upload-time = "2025-12-24T02:58:12.915Z" }, - { url = "https://files.pythonhosted.org/packages/1f/cc/d33e24258027c6a14a49a5abf94c75dd6f82e5ab5ed44fe622c0de303420/chromadb-1.4.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29fe47563c460a6cadbdc481b503c520ab4e424730c97d6a85d488a13009b6ce", size = 20759866, upload-time = "2025-12-24T02:58:06.987Z" }, - { url = "https://files.pythonhosted.org/packages/96/da/048ea86c7cb04a873aaab912be62d90b403a8b15a98ae7781ea777371373/chromadb-1.4.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1942e1ee074c7d1e421ea04391a1fccfd18a4b3b94a8e61e853d88dc6924abfa", size = 21666411, upload-time = "2025-12-24T02:58:10.044Z" }, - { url = "https://files.pythonhosted.org/packages/a0/49/933091cf12ee4ce4527a8e99b778f768f63df67e7d3ed9c20eecc0385169/chromadb-1.4.0-cp39-abi3-win_amd64.whl", hash = "sha256:2ec0485e715357a41078c20ebed65d5d5b941bf2fff418c6f1c64176dc36f837", size = 21930010, upload-time = "2025-12-24T02:58:20.138Z" }, + { url = "https://files.pythonhosted.org/packages/11/8c/22b8c965551ce41646d6d0c2b30ce6868b5471e04611d30180823226f273/chromadb-1.5.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:4dc035ed075ddf80dfcdcd6bbedf6cd7c81052132333f03e6a71cdeac5ea0899", size = 20609722, upload-time = "2026-02-09T08:46:00.376Z" }, + { url = "https://files.pythonhosted.org/packages/13/75/b1354faa6e55ff1cfc916884da1b78629e689a3ddf57871000a62644e583/chromadb-1.5.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:3ae46c642c0bf3b86319b3883456ce8bb4a097a1d0552e7ce8cd4836a0cd1f22", size = 19850671, upload-time = "2026-02-09T08:45:57.065Z" }, + { url = "https://files.pythonhosted.org/packages/ba/6e/c9a9be7b3ca3fbcb59561464fe713637a475e39fc72e2dd7c60b2f360480/chromadb-1.5.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20dbfcd178cb93159891e3a0ff085659b8b3e4cbeef3dae311091c325791f4cc", size = 20498323, upload-time = "2026-02-09T08:45:49.941Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2d/d9faa17c38f49212ed66ed8f7923ee327a9d5a218dd9b7565f28f538bfa7/chromadb-1.5.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5258d5b578c48b7c78effb6b582050ee13b1ac2e9eade4c83cd66de1a78c33", size = 21402789, upload-time = "2026-02-09T08:45:54.005Z" }, + { url = "https://files.pythonhosted.org/packages/b8/7c/791d03e23ebcfaff35db5b1e6e7eb5c572046d2a562932305de63d0898fc/chromadb-1.5.0-cp39-abi3-win_amd64.whl", hash = "sha256:8298cde5ffe448ca5a9794450c8b9700393e824ef8951be425ba2691330e78e6", size = 21724723, upload-time = "2026-02-09T08:46:07.76Z" }, ] [[package]] @@ -810,24 +911,254 @@ wheels = [ ] [[package]] -name = "colorama" -version = "0.4.6" +name = "cmeel" +version = "0.59.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +dependencies = [ + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/58/2448af92b3761a1b321014a653f79d322026681728f96ebe9f419ae0d6b8/cmeel-0.59.0.tar.gz", hash = "sha256:d9871f96ad0499c1cf8671e69622c805265a6be4383a1abfd18f20b4a33e3e3a", size = 14890, upload-time = "2026-01-19T11:48:25.431Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/90/c7/f7a2ea2e88cba4828c9b5bba5b8448ad6e6cbd652d782cc97bb14a54e6a6/cmeel-0.59.0-py3-none-any.whl", hash = "sha256:04a24b960e602484306721ce148610ddda4cbc83b8c5f27ef915366a86901e06", size = 20991, upload-time = "2026-01-19T11:48:24.259Z" }, +] + +[[package]] +name = "cmeel-assimp" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, + { name = "cmeel-zlib" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/24/960751adf9ae9725d1fc9642919b6f5a7ab54df2321f04b54d25f658e5f7/cmeel_assimp-6.0.2-0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:f5bebbb5f9aba6825421f07bd41a02297c51589e26bfa171a8f1f442fd1614cd", size = 9970438, upload-time = "2025-10-15T20:19:18.168Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2a/0ada16dae4638b0b9f31688ed3756f903f2bd390e45c1bbc6ca815b43b38/cmeel_assimp-6.0.2-0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7ca055b64aff80ada91bfca21282484b60aff507609c06957af128322d74d7a8", size = 9164044, upload-time = "2025-10-15T20:19:20.835Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f2/f343a56b4627fbd31fda09055f112f8ec78fd5bd7184be5c5a9b39fba1ec/cmeel_assimp-6.0.2-0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:d8de7cba2bb47b9d59dfe6e5c62e6ca327835690c574f4891dee91d8f522eb21", size = 13672921, upload-time = "2025-10-15T20:19:23.465Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d1/bd37525a1c1835a5e089e27e2b2160ff5d1a214b3ce6fdab97751cfe6772/cmeel_assimp-6.0.2-0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:0cfe56553cfa3dfbcd71d420a154e1e3ac34bbd3341bb7c6c8730eca047866e9", size = 14807515, upload-time = "2025-10-15T20:19:25.996Z" }, + { url = "https://files.pythonhosted.org/packages/2a/14/c09ec9e0cd6343d9bb5f394350d4346a532ad29254d26764b9f3765c717a/cmeel_assimp-6.0.2-0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3683ad2ae72bc4b0f1fe4397bb3c65c20123a159820d5289a7100e6ba27ac55a", size = 14085316, upload-time = "2025-10-15T20:19:27.995Z" }, + { url = "https://files.pythonhosted.org/packages/98/24/dee37d3e1a8eb5256412b538bd3fb68827a878d8cc89172cbf5fcc463f37/cmeel_assimp-6.0.2-0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9c8a7071d0f3b5ab3f613caf9e1faa969b4a081d1c4565bbfa88f2208734317c", size = 15086416, upload-time = "2025-10-15T20:19:30.472Z" }, +] + +[[package]] +name = "cmeel-boost" +version = "1.89.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/7c/4d9bbc00d4f9286a48d38ffdcf030fe50c99fe00d3601303270740f22424/cmeel_boost-1.89.0.tar.gz", hash = "sha256:e28d4aa61f4b8dbcb6cb83e732e1076fe4f5a3a0d338d73d1c0821944b37a332", size = 4158, upload-time = "2025-08-22T22:29:36.37Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/6b/36e51770bfa546bb182eacc0a9c88cfb9817aa2914305cfd8d31ff7d5ae5/cmeel_boost-1.89.0-0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:509729c9a3549753df5b219773fef76b1be90e052089e40a6193d1fea4861f80", size = 30666822, upload-time = "2025-08-22T22:28:19.58Z" }, + { url = "https://files.pythonhosted.org/packages/40/35/89b58a680189f511d7543a89a7e647943d0058e81639badde448b8deaa23/cmeel_boost-1.89.0-0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad20fdf62d41eb74c3cba4c0f8d32d49f40bc2bf0b1fb508d97f63f911f7cece", size = 30565392, upload-time = "2025-08-22T22:28:23.378Z" }, + { url = "https://files.pythonhosted.org/packages/45/10/f59c72182391176fc18f76cab503df57bf10234d5b13856dd1f44237679c/cmeel_boost-1.89.0-0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:82aaaed1bb6703664f87bc1ec666703ef917ff67baa61491c8796e13853bbd91", size = 35676696, upload-time = "2025-08-22T22:28:27.258Z" }, + { url = "https://files.pythonhosted.org/packages/79/85/7f61c694bf55a239a3821c65767e5d104adaf0faa890c9c63d8ed4dc44b1/cmeel_boost-1.89.0-0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:78fd13be11d7c570e67400f5e734cf00787fe268cee7fd6bdde466ffd13e12be", size = 36013432, upload-time = "2025-08-22T22:28:32.225Z" }, + { url = "https://files.pythonhosted.org/packages/01/63/3069c1f50ebf7054226b9788cdf7cd950ac5823aa24dea3906e2f3e68262/cmeel_boost-1.89.0-0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a7b8a4ef8ccf9e9efb04b0b0b44edf3019d68ed986332fcc4e6918ef87b1b4", size = 30666832, upload-time = "2025-08-22T22:28:36.161Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ad/0354d4d2b2635b6cdd766b2eb1cceb595b1e1d3d103317267c5ab3821161/cmeel_boost-1.89.0-0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2c4c70dee56fd9caf40459267dcdcbdea891e5ad2e7c431ff07719ea55ba2872", size = 30565408, upload-time = "2025-08-22T22:28:39.979Z" }, + { url = "https://files.pythonhosted.org/packages/4e/37/f09082881d49d86b9e18fd42a5862a96746cb90be72b56107aa9ce02b38a/cmeel_boost-1.89.0-0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:ab744d059a78df3df17b8d412b19c0d13f94b5c8b044e32a7f8be293f7c168df", size = 35675964, upload-time = "2025-08-22T22:28:43.241Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/03861623b94c94dfbdcf41131bdf77eb7b863525a138cdc44a04323519f7/cmeel_boost-1.89.0-0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2a42ba5886762f273a118dab1399934e217b4429bff0a0c51e8bf3479edd37d2", size = 36013073, upload-time = "2025-08-22T22:28:47.944Z" }, + { url = "https://files.pythonhosted.org/packages/97/59/15b74f016279299ec3aef3d36fbe20a5838a122b528b0f7be07f66b1d423/cmeel_boost-1.89.0-0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7b65289d73476e8a28665faa3bfd9c3567053787945395e0ae14796e9ddc6e3f", size = 30669752, upload-time = "2025-08-22T22:28:51.965Z" }, + { url = "https://files.pythonhosted.org/packages/0d/65/d5dec74ed4a64a95fe694c5580b862a4da44abc2925e0660a3d7e6ea8ecf/cmeel_boost-1.89.0-0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f628b40a6c73be41fd76b8be9ae4ca3ad6010fb496e991a1dee2bb30cdcfbc", size = 30566986, upload-time = "2025-08-22T22:28:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/04/9f/b32b6ab06fb5840f19df4d01cd1a836cda54bb53d1d05b54dfb18ff68d1b/cmeel_boost-1.89.0-0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:7ef14b1e2904059e70260ffeb6e295a9df94d4092c0c623d5b06092d45ccb427", size = 35677140, upload-time = "2025-08-22T22:28:58.848Z" }, + { url = "https://files.pythonhosted.org/packages/80/f7/08849f3ca2254daff09d9f0cbc9ad34e5d9552f8eb902f650294944ccc16/cmeel_boost-1.89.0-0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:624ff4401f054fd777f89ae81fc1bf2e16a44ccdff0f2a19ecd3ae74a42c8ba9", size = 36017953, upload-time = "2025-08-22T22:29:02.823Z" }, + { url = "https://files.pythonhosted.org/packages/6d/71/270925d5a51ae348db3b3cd6e45956b7a54bdacd90fa3aa4e4e9cc27ec85/cmeel_boost-1.89.0-0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:5fc5ec4e11086e5766c57c91a03021b3c1d7a2ea82d8fcf8873af0c027eab563", size = 30669798, upload-time = "2025-08-22T22:29:08.619Z" }, + { url = "https://files.pythonhosted.org/packages/07/26/2161982a3b3ab46658a8d3eda4525b9164218b3ff3a8f49a0cfe199666dc/cmeel_boost-1.89.0-0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:00603ac77c1875bae2a6d265dabec46436240d7c2e91d0736e615b58611fc8d3", size = 30567033, upload-time = "2025-08-22T22:29:12.349Z" }, + { url = "https://files.pythonhosted.org/packages/03/c6/42f60dbd47f20214ee554d08bf0ff73f4758f37aaad0bcaa7fcc5848aef7/cmeel_boost-1.89.0-0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:b109d58fb3c352e496d3abad409c42547886c58ce362fd488bdddc94752f9f25", size = 35677367, upload-time = "2025-08-22T22:29:16.179Z" }, + { url = "https://files.pythonhosted.org/packages/1c/73/ad4e7ca9ef30b077160d78a8c00c7ebca03dc00cbb7c65296383376f1d20/cmeel_boost-1.89.0-0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:73f308ed3d4ba94beb4bd271d3b1f8c0afd9fe050d24e080a392eb6599109004", size = 36018012, upload-time = "2025-08-22T22:29:19.711Z" }, + { url = "https://files.pythonhosted.org/packages/82/39/4ff562a699082dfc93a521418655653fe6223a61480cc58aab4333093864/cmeel_boost-1.89.0-1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b7f0c174c7a9216cf0221b36bb0bac81e62f9cc1c25a6082474b1dedee9c62e", size = 30658750, upload-time = "2025-10-15T23:59:22.997Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e6/e468de28bcb1140d8a61d45f6eb88c8cd79b1fa3ccdd538f58204dbae681/cmeel_boost-1.89.0-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e76274a984c3f69bd57bbb8923e8acd68b1e65fe7d73df69393e2a5bca601094", size = 30551561, upload-time = "2025-10-15T23:59:26.451Z" }, + { url = "https://files.pythonhosted.org/packages/a8/d8/129cd530587cbcf032120f80176d67f06592e9dc62dddd7dde5f8ce80edc/cmeel_boost-1.89.0-1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:849308a26b07c268f4b2b68a2a8730ffa82e3dc8569bb016088c542c89117580", size = 35676698, upload-time = "2025-10-15T23:59:29.852Z" }, + { url = "https://files.pythonhosted.org/packages/74/96/3b6a784577ff6e69a32b65c970d27ad1a8705645c7cdef181fa475fafd0d/cmeel_boost-1.89.0-1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2d35dd5a8bc06f75d3c0c1d2db38e700693bf87c7f40a7cbd52af29a51df64c0", size = 36013428, upload-time = "2025-10-15T23:59:33.402Z" }, + { url = "https://files.pythonhosted.org/packages/24/fb/2b34be1e74e2b32fe7915cd7b61f85a68583738a8ef0601669aafc444592/cmeel_boost-1.89.0-1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f5c10faf11f1c679a8f756a1b7332dbd7d3d31251e0a36bcdd1b04102632ab57", size = 30658753, upload-time = "2025-10-15T23:59:36.577Z" }, + { url = "https://files.pythonhosted.org/packages/9a/0a/8af1c2b3f36b21b39052e70c4ef3dcaed981956cda08d0cb45e85890e111/cmeel_boost-1.89.0-1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f06143f546983c190957ffc9bdca970661c2d20576d4b4b290f7541bb60923c", size = 30551569, upload-time = "2025-10-15T23:59:39.674Z" }, + { url = "https://files.pythonhosted.org/packages/96/12/6ac1d6d292e5f135c4187fb0e5dd4f18c4fd20aa52ae7434f48f547958bb/cmeel_boost-1.89.0-1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b6d8b6e3f3de8373b5b174d47b8cd95a013a17d3d226837752b725b27c26e567", size = 35675961, upload-time = "2025-10-15T23:59:43.064Z" }, + { url = "https://files.pythonhosted.org/packages/5a/db/f36899392766281352ac3c6935ebf8fb4465cef00cc44a221a18c1e3894c/cmeel_boost-1.89.0-1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7f714d604c9aabe5e8e65324d2051a34c683be260a957ca31f17a16f36575ef9", size = 36013075, upload-time = "2025-10-15T23:59:46.114Z" }, + { url = "https://files.pythonhosted.org/packages/cf/40/e033edcc740a559d417698348e0c39f99acc22e76863db3344399c735fb3/cmeel_boost-1.89.0-1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d1b1bcb47c946c70bf9d06ee6a1d48d32993a929d69fd615ea0b7bdef14e7a33", size = 30661931, upload-time = "2025-10-15T23:59:49.829Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2d/1b3f417d82aaedc84d51cd625b44f7a7e832fcf2a2b23e3712dacd3642b8/cmeel_boost-1.89.0-1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e2aaec12844c197fcbfb88cd2e2318592308b4c7393cff581582a7ff50a48d7", size = 30553328, upload-time = "2025-10-15T23:59:53.174Z" }, + { url = "https://files.pythonhosted.org/packages/8c/fe/b8cfa242d9a3f4a77c1646976c27ce3085f96918266d72dc1072452a73e0/cmeel_boost-1.89.0-1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:727392d2e98654f08775cd9aca11292fa9006ce6579b6a5180edadf40558ff78", size = 35677139, upload-time = "2025-10-15T23:59:56.774Z" }, + { url = "https://files.pythonhosted.org/packages/0f/60/a7d1d88a4af68db913707385d5ba3abf2419fa03dacff443cc8ddf128a22/cmeel_boost-1.89.0-1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:5602a74998ed6d89466a78e6f684418846c0a90b114431f80efd5327a6aa6564", size = 36017957, upload-time = "2025-10-15T23:59:59.761Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d0/a496896682c670626df92a409e623fb37b4f4592f35f6d1e9b39840fd188/cmeel_boost-1.89.0-1-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:cab231c4c7c9919a8b14548079aa92903d55a677f6c1c4377704a3928ea671a3", size = 30661948, upload-time = "2025-10-16T00:00:03.158Z" }, + { url = "https://files.pythonhosted.org/packages/77/75/105c192e54ad7c28f11eb8df316be5e1b4c5c8de69d9de2948c50170630f/cmeel_boost-1.89.0-1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:210bd45616ae335cea14f5f61b8b1285e72f45429b2e0f6049ccd8239d71b966", size = 30553320, upload-time = "2025-10-16T00:00:06.641Z" }, + { url = "https://files.pythonhosted.org/packages/ab/11/7d6b28639a9c3fb4c4da36b8f99b35c5c74089f6b7d97a6fdee097da2c07/cmeel_boost-1.89.0-1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2c7840865b31f087a6d011d6f14f5633bcd4792692129f1f001c5e1f196100dc", size = 35677361, upload-time = "2025-10-16T00:00:10.173Z" }, + { url = "https://files.pythonhosted.org/packages/95/ad/ee74e0505a4d9271a886f65fed6c3128e88f97d570f81ae3756eb93eb427/cmeel_boost-1.89.0-1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4fc5d368240d1055c63d22514b33f8746a453dcd65a25437ad1ac778140885ee", size = 36018012, upload-time = "2025-10-16T00:00:13.695Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/98b28e0eb55948c44b81bba866327bad3f7506adaf1bbb5de549189be0be/cmeel_boost-1.89.0-1-cp314-cp314-macosx_10_9_x86_64.whl", hash = "sha256:8b8193d5a07fd5157ac121dbe6d07fedea5200ccac436df891143a74bee6fbc9", size = 30664665, upload-time = "2025-10-16T00:00:16.667Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7d/a1afced6a647497c2d7db79ade56edd530f447d12131d5bd200b9c4a21eb/cmeel_boost-1.89.0-1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a57c710fcbc433475d84a57922d350f76545c077c31cf8ae5629f1ad2711bad", size = 30553850, upload-time = "2025-10-16T00:00:20.162Z" }, + { url = "https://files.pythonhosted.org/packages/41/3a/395e727e0256473fccf58d9e2badad005491ff90a4de79dabc2e70fd1274/cmeel_boost-1.89.0-1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:60b890495c38e3ce98d2bb68b03da005c3f0fc5e28965848f2ff6b1bac5d57e2", size = 35683786, upload-time = "2025-10-16T00:00:23.786Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f8/6341b1b5c2a5edf724a59ce97594b73388e282a13daf2e1dd068340dba90/cmeel_boost-1.89.0-1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:ced81a026b2c4a6b38ae15ddd9059fc63a0e5d41d4abd0927da52f24d9aefc94", size = 36024423, upload-time = "2025-10-16T00:00:27.291Z" }, +] + +[[package]] +name = "cmeel-console-bridge" +version = "1.0.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/13/2e9e9d23db8548aef975564055bdb4fb6da8a397a1e7df8cb61f5afebefb/cmeel_console_bridge-1.0.2.3.tar.gz", hash = "sha256:3b2837da7ab408e9d1a775c83c0a7772356062b3a3672e4ce247f2da71a8ecd9", size = 262061, upload-time = "2025-03-19T18:22:06.845Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/a7/527fa060e5881acb3b0a07bf1d803ccb831cb87739abb62b6bcd14f5aed3/cmeel_console_bridge-1.0.2.3-0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:7aa19b2d006073a1fad55d32968c7d0c7136749e06f98405f4f73a71038a5c41", size = 21341, upload-time = "2025-03-19T18:21:56.834Z" }, + { url = "https://files.pythonhosted.org/packages/bb/db/f8643a8766e8909e0dbfcda6191ca92454cf9a3fadd89be417db261601a1/cmeel_console_bridge-1.0.2.3-0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c47d8c97cb120feed1c01f30845d16c67e4e8205941e3977951018972b9b8721", size = 21286, upload-time = "2025-03-19T18:21:57.984Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b4/9c79177152a220ab2e4ffa0140722165035f6a5c2abbed2912352bd7e7b9/cmeel_console_bridge-1.0.2.3-0-py3-none-manylinux_2_17_i686.whl", hash = "sha256:cad9723ac44ab563cd23bf361b604733623d11847c4edf2a2b4ebd1d984ade09", size = 23740, upload-time = "2025-03-19T18:21:59.683Z" }, + { url = "https://files.pythonhosted.org/packages/92/65/5741de6f550fe701d0780546d97b283306676315a3e1f379a6038e8c0ab0/cmeel_console_bridge-1.0.2.3-0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:372942e9c44f681bfff377fba25b348801283aa6f3826a00e4195089bda9737a", size = 25762, upload-time = "2025-03-19T18:22:01.055Z" }, + { url = "https://files.pythonhosted.org/packages/50/a5/70e23c5570506bb39b56aa4d0f3a4a414e38082ddb33e86a48b546620121/cmeel_console_bridge-1.0.2.3-0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:5bb1115ed38441b2396e732e10ec63d1e68445674f9f5d321f7985eb10e9aeef", size = 24477, upload-time = "2025-03-19T18:22:02.091Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/bfd5a255348902e39243ccc6eba693bce714b891cd3be5603a9bd50c6de5/cmeel_console_bridge-1.0.2.3-0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2b8d084b797f592942208c2040b08e06b82f8832aa6c5e582ba6f1a4a653505b", size = 24970, upload-time = "2025-03-19T18:22:03.075Z" }, + { url = "https://files.pythonhosted.org/packages/b3/02/3ae074e9acb9e150a4d5d97f341c2064573cd5fe9e5af20ab58bf8c0020a/cmeel_console_bridge-1.0.2.3-0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:fb6753a9864217d969c4965389d66a476ac978136c03eadf1063b1619c359220", size = 24689, upload-time = "2025-03-19T18:22:04.084Z" }, + { url = "https://files.pythonhosted.org/packages/69/d0/321f74b7d4167a6c59bb7714a6899ba402d9fad611f62573b9d646107320/cmeel_console_bridge-1.0.2.3-0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9d446c0fc541413d8d2ceea3c1cfb9cbfd57938d6659c113121eca6c245caafe", size = 24404, upload-time = "2025-03-19T18:22:05.232Z" }, +] + +[[package]] +name = "cmeel-octomap" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/ab/2fed2dbee13e4b39949591685419f1dbb691295e32a6bbbaf87edc005922/cmeel_octomap-1.10.0.tar.gz", hash = "sha256:bd79d1d17adede534de242e42e13ef0d9f04bdd27daf7d56c57f7c43670c9b05", size = 1694189, upload-time = "2025-01-06T17:57:05.477Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/22/ea67d35df31ec4bb2ed6e594b173c572c72dbd2a87e96906eac67b4af930/cmeel_octomap-1.10.0-4-py3-none-macosx_14_0_arm64.whl", hash = "sha256:c116eb151920d26ee2b2c1f656cd7526862006739817205f11f9366ab0ef6cb4", size = 639956, upload-time = "2025-01-06T17:56:56.826Z" }, + { url = "https://files.pythonhosted.org/packages/b2/da/07725a8c11224881f536ad252e97a3d9801b48e5e776017d5f00fb39b17f/cmeel_octomap-1.10.0-4-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:76cc42553f54bae97584aaf0c7bc33753ff287e2738aa2ecac4820121101dd46", size = 1044402, upload-time = "2025-01-06T17:56:58.553Z" }, + { url = "https://files.pythonhosted.org/packages/a2/15/9617b7039afd6d17d3148f6f970d953f5e265d7736f8fdbca09c86e976a0/cmeel_octomap-1.10.0-4-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:5fdb04546fff3accac5f8626c3fc15c3b99e94ab887793565e0b92cedaf96468", size = 1105037, upload-time = "2025-01-06T17:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/51/69/88c1d1eca1abf2387ee8263ac7e12708c8b1b5b70b46a0bd9f43b485165b/cmeel_octomap-1.10.0-4-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:84a7376cfced954bb7e3e347afbd02bdc1c83066b995afbdd0fb1e2d9f57ebec", size = 1108359, upload-time = "2025-01-06T17:57:04.085Z" }, + { url = "https://files.pythonhosted.org/packages/f5/59/57b3b38cf7a382855902b9d24266c283c29d977706438e6b7af62df74e2b/cmeel_octomap-1.10.0-5-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:042b4a21b5e5e19ee78a9a7db78e1b06fb8a287c832031788aec0d3fcabbfecd", size = 748832, upload-time = "2025-02-12T11:57:34.252Z" }, + { url = "https://files.pythonhosted.org/packages/55/8b/f5ec7676808a48c0185e216c0da700e34cb13ba233f13a4557a5ec56324a/cmeel_octomap-1.10.0-5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:79c15a0ece5ca3746170088ef2a377dfb3df8326fafde9bdba688852219758b9", size = 706924, upload-time = "2025-02-12T11:57:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/09/50/56de5a4d9f8ca58100146f16f42c4e2fbb49c0957bfe40d3fd2bc910afe4/cmeel_octomap-1.10.0-5-py3-none-manylinux_2_17_i686.whl", hash = "sha256:e2923bf593ebdafed86b6f3890a122c62fbd9cc9f325d60dbecb72b6b60d78fb", size = 1073973, upload-time = "2025-02-12T11:57:37.914Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/8dddf5cdd31176288acd85cc8bf0262b7c3de81d5cb2cb33aa6646f44eb1/cmeel_octomap-1.10.0-5-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:d9e6f9c826905e8de632e9df8cc20e59ce2eb5d1e0b368d8d4abbbc5c0829c1a", size = 1044533, upload-time = "2025-02-12T11:57:39.672Z" }, + { url = "https://files.pythonhosted.org/packages/d6/14/b85bd33bb05c9bb7e87b9ac8401793c12a80a6d594b3ca4bcb5e971a24b7/cmeel_octomap-1.10.0-5-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:b0b54fac180dce4f483afe7029c29cc55f6f2b21be8413e8e2275845b0c204d7", size = 1105199, upload-time = "2025-02-12T11:57:41.286Z" }, + { url = "https://files.pythonhosted.org/packages/99/ee/fe3360441159974ebdbb4c013a92ad0425d5f8bf414868d5161060e40660/cmeel_octomap-1.10.0-5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c8691e665bab7c12b6f51e6c5fbbb83ee6f91dce9d15d9d0387553950e7fb5ee", size = 1092962, upload-time = "2025-02-12T11:57:43.297Z" }, + { url = "https://files.pythonhosted.org/packages/82/a6/074166544cc0ce3a5d7844f97dfd13d1b3ec7bff6a6e2cfb18d66a671a7f/cmeel_octomap-1.10.0-5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:735c0ad84dacbbcc8c4237f127c57244c236b7d6c7500b5c45a4c225e19daac1", size = 1083321, upload-time = "2025-02-12T11:57:46.121Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a3/b19ea0d30837369091141b248936b0757ee17f58b809007399bad0b398e4/cmeel_octomap-1.10.0-5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5f86a83f6bd60de290cd327f0374d525328369e76591e3ab2ad1bc0b183678c4", size = 1109207, upload-time = "2025-02-12T11:57:48.709Z" }, +] + +[[package]] +name = "cmeel-qhull" +version = "8.0.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/51/dd/8d0bcfb18771b2ea02bf85dfbbc587c97b274496fb5419b72134eb69430b/cmeel_qhull-8.0.2.1.tar.gz", hash = "sha256:68e8d41d95f61830f2d460af1e4d760f0dbe4d46413d7c736f0ed701153ebe52", size = 1308055, upload-time = "2023-11-17T14:21:06.003Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/b4/d72ebd5e9ee711b68ad466e7bd4c0edcb45b0c2c8a358fdcdb64b092666a/cmeel_qhull-8.0.2.1-0-py3-none-macosx_12_0_arm64.whl", hash = "sha256:39f5183a6e026754c3c043239bac005bf1825240d72e1d8fdf090a0f3ea27307", size = 2804225, upload-time = "2023-11-17T14:15:39.958Z" }, + { url = "https://files.pythonhosted.org/packages/29/dc/4bfb8d51a09401cf740e66d10bdb388eacd7c73bae12ef78149cbbc93e83/cmeel_qhull-8.0.2.1-0-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:f135c5a4f4c8ed53f061bc86b794aaca2c0c34761c9269c06b71329c9da56f82", size = 2972481, upload-time = "2023-11-17T14:20:58.418Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7c/74b5c781cbfc8e4a9bb73b71659cc595bc0163223fd700b18133dbcf2831/cmeel_qhull-8.0.2.1-0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:17f519106df79aed9fc5ec92833d4958d132d23021f02a78a9564cdf83a36c7c", size = 3078962, upload-time = "2023-11-17T14:21:00.183Z" }, + { url = "https://files.pythonhosted.org/packages/b4/16/ef7b6201835ba2492753c9c91b266d047b6664507be42ec858e2b24673b5/cmeel_qhull-8.0.2.1-0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:c513abafa40e2b8eb7cd3640e3f92d5391fbd6ec0f4182dbf9536934d8a8ea3e", size = 3194917, upload-time = "2023-11-17T14:21:01.879Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ae/200bdf257507e2c95d0656bf02278cd666d49f0a9e2e6d281ea76d7d085c/cmeel_qhull-8.0.2.1-0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:20a69cb34b6250aee1f018412989734c9ddcad6ff66717a7c5516fc19f55d5ff", size = 3290068, upload-time = "2023-11-17T14:21:03.828Z" }, + { url = "https://files.pythonhosted.org/packages/01/1b/de3fa6091ef58ab40f02653e777c8943acf7cec486184d6007885123571d/cmeel_qhull-8.0.2.1-1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:b5d47b113c1cb8f519bc813cf015d0d01f8ce5b08912733a24a6018f7caa6e96", size = 2902499, upload-time = "2025-02-12T11:51:16.999Z" }, + { url = "https://files.pythonhosted.org/packages/05/0c/5e5d9a033c683eb272508ccf560c03ac6bf5d397b038fe05f896a2283eaf/cmeel_qhull-8.0.2.1-1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:33a0169f4ee37d093c450195b0ef73d4fe0d9d62abb7899ebe79f778b36e1f36", size = 2773563, upload-time = "2025-02-12T11:51:19.893Z" }, + { url = "https://files.pythonhosted.org/packages/52/9b/00c73069348e60fbbdf6a5a10de046083f7d1ad36844958bbf12163ac688/cmeel_qhull-8.0.2.1-1-py3-none-manylinux_2_17_i686.whl", hash = "sha256:a577e76ac94d128f2966b137ead9f088749513df63749728e2b588f4564b7fdf", size = 3228684, upload-time = "2025-02-12T11:51:21.888Z" }, + { url = "https://files.pythonhosted.org/packages/c0/4a/81b8c88b444935a64d8c83b41e662f696c36dd5937c3ca687113ac4778d0/cmeel_qhull-8.0.2.1-1-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:fd0b2d4ce749b102c3cdead4588249befd34f1a660628f6bfc090ce942925aac", size = 3156051, upload-time = "2025-02-12T11:51:24.594Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c1/44874cd8bfc1e3f7cb15678c836c7a1d5537f34f5a727a0207e01f395598/cmeel_qhull-8.0.2.1-1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:2371a7c80a14f3e874876359ae3e3094861f081fcdd7a03987c3e880d14e07b9", size = 3262508, upload-time = "2025-02-12T11:51:27.147Z" }, + { url = "https://files.pythonhosted.org/packages/54/0e/425d9ce1f2a831025d39fa5b6479b856bd4d73614c9caa690ac72bbfca04/cmeel_qhull-8.0.2.1-1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:197c14c2006dbeba8f5a5771700a7afea72c1a441aab7cdeaaf10b4ed8c1137d", size = 3172646, upload-time = "2025-02-12T11:51:28.967Z" }, + { url = "https://files.pythonhosted.org/packages/00/c1/e973e287a7d793911b8e6497b17586e601a678f2379ba2c615f72bd76480/cmeel_qhull-8.0.2.1-1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:886d1be24b31842286ae42755af5c312a43a4199632826e4110185ec36dc5c6a", size = 3530837, upload-time = "2025-02-12T11:51:31.651Z" }, + { url = "https://files.pythonhosted.org/packages/fd/65/c6cd54f04b5fcaa4ec52f5b57692c1dcef812ff9ee86545e5607369d365e/cmeel_qhull-8.0.2.1-1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1a49ce7f8492c9a8b49f930e34cce75b5e9b9843b015033dd0a25421441159fc", size = 3301908, upload-time = "2025-02-12T11:51:34.53Z" }, +] + +[[package]] +name = "cmeel-tinyxml2" +version = "10.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/9f/030eca702c485f7a641f975f167fa93164911b3329f005fb0730ff5e793f/cmeel_tinyxml2-10.0.0.tar.gz", hash = "sha256:00252aefc1c94a55b89f25ad08ee79fda2da8d1d94703e051598ddb52a9088fe", size = 645297, upload-time = "2025-02-06T10:29:00.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/5d/bc3a932eb7996a0a789979426a9bb8a3948bf57f3f17bab87dddbef62433/cmeel_tinyxml2-10.0.0-0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:924499bb1b60b9a17bd001d12a9af88ddbee4ca888638ae684ba7f0f3ce49e87", size = 111913, upload-time = "2025-02-06T10:28:45.723Z" }, + { url = "https://files.pythonhosted.org/packages/92/bf/67d11e123313c034712896e94038291fe506bb099bdb75a136392002ffd0/cmeel_tinyxml2-10.0.0-0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:26a1eb30c2a00bfc172e89ed015a18b8efb2b383546252ca8859574aed684686", size = 109487, upload-time = "2025-02-06T10:28:47.546Z" }, + { url = "https://files.pythonhosted.org/packages/ca/48/d8c81ce19b4b278ed0e8f81f93ae8670209bf3a9ac20141b9c386bb40cc7/cmeel_tinyxml2-10.0.0-0-py3-none-manylinux_2_17_i686.whl", hash = "sha256:53d86e02864c712f51f9a9adfcd8b6046b2ed51d44a0c34a8438d93b72b48325", size = 160118, upload-time = "2025-02-06T10:28:49.627Z" }, + { url = "https://files.pythonhosted.org/packages/87/4e/62193e27c9581f8ba7aeaeca7805632a64f2f4a824b1db37ad02ee953e8a/cmeel_tinyxml2-10.0.0-0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:74112e2e9473afbf6ee2d25c9942553e9f6a40465e714533db72db48bc7658e1", size = 158477, upload-time = "2025-02-06T10:28:51.667Z" }, + { url = "https://files.pythonhosted.org/packages/14/f9/d0420c39e9ade99beeec61cd3abc68880fe6e14d85e9df292af8fabe65c8/cmeel_tinyxml2-10.0.0-0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:ecd6e99caa2a06ac0d4b333b740c20fca526d0ca426f99eb5c0a0039117afdb6", size = 147025, upload-time = "2025-02-06T10:28:53.944Z" }, + { url = "https://files.pythonhosted.org/packages/66/9e/df63147fc162ab487217fa5596778ab7a81a82d9b3ce4236fd3a1e48cecb/cmeel_tinyxml2-10.0.0-0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:30993fffb7032a45d5d3b1e5670cb879dad667a13144cd68c8f4e0371a8a3d2e", size = 150958, upload-time = "2025-02-06T10:28:55.301Z" }, + { url = "https://files.pythonhosted.org/packages/0e/a8/b03567275fd83f5af33ddb61de942689dec72c5b21bec01e6a5b11101aa5/cmeel_tinyxml2-10.0.0-0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8c09ede51784af54211a6225884dc7ddbb02ea1681656d173060c7ad2a5b9a3c", size = 160300, upload-time = "2025-02-06T10:28:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ec/2781635b66c1059ca1243ae0f5a0410e171a5d8b8a71be3e34cb172f9f2d/cmeel_tinyxml2-10.0.0-0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3bd511d6d0758224efdebc23d3ead6e94f0755b04141ebf7d5493377829e8332", size = 149184, upload-time = "2025-02-06T10:28:58.734Z" }, +] + +[[package]] +name = "cmeel-urdfdom" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, + { name = "cmeel-console-bridge" }, + { name = "cmeel-tinyxml2" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/09/be81a5e7db56f34b6ccdbe7afe855c95a18c8439e173519e0146e9276a8c/cmeel_urdfdom-4.0.1.tar.gz", hash = "sha256:2e3f41e8483889e195b574acb326a4464cf11a3c0a8724031ac28bcda2223efc", size = 291511, upload-time = "2025-02-12T12:07:09.699Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/d0/20147dd6bb723afc44a58d89ea624df2bad1bed7b898a2df112aaca4a479/cmeel_urdfdom-4.0.1-0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:2fe56939c6b47f6ec57021aac154123da47ecdcd79a217f3a5e3c4b705a07dee", size = 300860, upload-time = "2025-02-12T12:06:58.536Z" }, + { url = "https://files.pythonhosted.org/packages/8e/98/f832bca347e2d987c6b0ebb6930caf7b2c402535324aeed466b6aa2c4513/cmeel_urdfdom-4.0.1-0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:00a0aba78b68c428b27abeed1db58d73e65319ed966911a0e97b37367442e756", size = 300616, upload-time = "2025-02-12T12:07:00.556Z" }, + { url = "https://files.pythonhosted.org/packages/cf/10/bf5765b6f388037cff166a754a0958ac2fee34ca3c0975ef64d0324e4647/cmeel_urdfdom-4.0.1-0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:a701a8f9671331f11b18ecf37a6537db546a21e6a0e5d0ff53341fea0693ed7f", size = 385951, upload-time = "2025-02-12T12:07:02.556Z" }, + { url = "https://files.pythonhosted.org/packages/c3/82/cb3f8f587d293a17bdbea15b50cdaa4a1e28e04583eb4cb4821685b89466/cmeel_urdfdom-4.0.1-0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:12e39fc388c077d79fc9b3841d3d972a1da90b90de754d3363194c1540e18abf", size = 399619, upload-time = "2025-02-12T12:07:04.388Z" }, + { url = "https://files.pythonhosted.org/packages/24/77/322d7ac92c692d8dfaeda9de2d937087d15e2b564dc457d656e5fde3991d/cmeel_urdfdom-4.0.1-0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c4a83925df1d5923c4485c3eb2b80b3a61b14f119ab724fb5bd04cec494690ee", size = 373969, upload-time = "2025-02-12T12:07:06.222Z" }, + { url = "https://files.pythonhosted.org/packages/9f/63/bdc6b55cc8bd99bb9dce6be801b30feffaa1c3841ecb7f4fe4d137424518/cmeel_urdfdom-4.0.1-0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4c4f44270971b3d05c45a4e21b1fb2df7e05a750363ae918f59532bff0bfe0e1", size = 388237, upload-time = "2025-02-12T12:07:08.326Z" }, + { url = "https://files.pythonhosted.org/packages/1d/2d/8463fc23230612daf4da1e31d3229f47708381f3ae4d1500f0f007ac0f92/cmeel_urdfdom-4.0.1-1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:f7535158f45992eb2ba79e90d9db1bf9adc3846d9c7ed3e7a8c1c4d5343afa37", size = 301006, upload-time = "2025-02-13T11:42:08.8Z" }, + { url = "https://files.pythonhosted.org/packages/0f/d5/c8cdf500e49300d85624cbc3ef804107ddcdc9c541b1d3f726bfb58a9fc1/cmeel_urdfdom-4.0.1-1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fef2a01a00d61d41b3d35dd4958bba973e9025c26eea1d3c9880932f4dba89a5", size = 300758, upload-time = "2025-02-13T11:42:10.449Z" }, + { url = "https://files.pythonhosted.org/packages/cf/b3/2f7bac1544113a7f8e0f6d8b1fab5e75c6a3d27ffbb584b03267251b2165/cmeel_urdfdom-4.0.1-1-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:7a52eb36950ce982014d99a55717ca29985da056e3705f20746f15d3244c1f7a", size = 386043, upload-time = "2025-02-13T11:42:11.923Z" }, + { url = "https://files.pythonhosted.org/packages/86/03/8bdeb36ba6a3e8125d523ecfc010403049e463fe589f9896858d4bdcaf1e/cmeel_urdfdom-4.0.1-1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:9f3b9c80b10d7246821ff61c2573f799e3da23d483e6f7367ddcad8a48baf58f", size = 399719, upload-time = "2025-02-13T11:42:14.325Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ed/43f99e7512460294cd8acc5753ba25f8a20bdf28d62e143eaf3ec7a28bb6/cmeel_urdfdom-4.0.1-1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2de69f47e8312cc09157624802d5bdaad6406443f863fb4b9ec62a19b4de3c72", size = 374073, upload-time = "2025-02-13T11:42:17.907Z" }, + { url = "https://files.pythonhosted.org/packages/17/c6/2e9bde6d7c02c1cf203ea896f8ce1afd441412f09b44830f1ee4a96d77de/cmeel_urdfdom-4.0.1-1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7708c1402de450fbeab21f7ca264a9a4676ed4c1cdf8d84d840bc5d057aac920", size = 388337, upload-time = "2025-02-13T11:42:19.657Z" }, +] + +[[package]] +name = "cmeel-zlib" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/74/b458f2fbfb652479c06400937cd67022e50d312033221602a9eca75022bc/cmeel_zlib-1.3.1.tar.gz", hash = "sha256:ebb34c54d1b7921dee5e7cd7003c9203b3297a5ba9d93983f1b7d3bb04976c3a", size = 3051, upload-time = "2025-02-11T12:20:39.574Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/dd/1bc2bc50c4ea217a993b2c9d3a7dd5959f839bc2b941556326b1ce71b961/cmeel_zlib-1.3.1-0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:810779922c64d8074a3d12fcc471b1f62255e4402a1ca5f91f5749cc89214b93", size = 268796, upload-time = "2025-02-11T12:20:26.953Z" }, + { url = "https://files.pythonhosted.org/packages/a1/94/cf7e4554b7e2e4348da3f456be3c495774d1972a8dba384b6558b8f0e66b/cmeel_zlib-1.3.1-0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2ccfac8fc80c6ee94ac61a9991f2ac18a5ea3a6cc2e753c221eb7c82729e839d", size = 191024, upload-time = "2025-02-11T12:20:28.737Z" }, + { url = "https://files.pythonhosted.org/packages/a2/cf/92d5a06071326ce3208f6cabc6d07d6c285b415df67e7ea9b87f0b46d44b/cmeel_zlib-1.3.1-0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:f59862cde12d0dcd51fc8f35c408a51e0f279f9d8d9103d5497fe82572e194e4", size = 286338, upload-time = "2025-02-11T12:20:30.784Z" }, + { url = "https://files.pythonhosted.org/packages/21/10/13b53ce0f693085cbad31be9fceb1b6a2b4e3bae5851c1f114c3e7b3c447/cmeel_zlib-1.3.1-0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:7f95b4ed5090fb0fef195f52485f3719dd60213e67a4c07ac4718660bd24da25", size = 282556, upload-time = "2025-02-11T12:20:32.337Z" }, + { url = "https://files.pythonhosted.org/packages/2b/2e/58b295975403b147e5df681e3e3470ba1802feed06a836843f02386d6506/cmeel_zlib-1.3.1-0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2864a55ab1dad1d86749c8410693f3bca6e866cbb5ac16286be686aedb781f6e", size = 287625, upload-time = "2025-02-11T12:20:34.471Z" }, + { url = "https://files.pythonhosted.org/packages/56/f3/4da9d5c5308ef2019ab65a8a9f519ac95004446902d01e859f9ac6b8cdd6/cmeel_zlib-1.3.1-0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1e36ac8dccca22ff1f6e4df428ae5597f6288d9e6f85b08c9b767dc63e90fb55", size = 285662, upload-time = "2025-02-11T12:20:37.298Z" }, ] [[package]] -name = "coloredlogs" -version = "15.0.1" +name = "coal" +version = "3.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "humanfriendly" }, + { name = "cmeel" }, + { name = "cmeel-assimp" }, + { name = "cmeel-boost" }, + { name = "cmeel-octomap" }, + { name = "cmeel-qhull" }, + { name = "eigenpy" }, + { name = "libcoal" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/4f/9b1f2cb921827aa877c09f6e727215fb633e4e3671682bd2a6559cd42d09/coal-3.0.2.tar.gz", hash = "sha256:7ca3f961fe72962b543894492efb33ee71bdc1091d93b87dc6988cdf0d4dedca", size = 1463955, upload-time = "2025-10-16T00:52:33.982Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, + { url = "https://files.pythonhosted.org/packages/51/da/8b4758f8183d6808e542f97b5719b191ceda8f23e5958a1c3324535b9049/coal-3.0.2-1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eab5b68f1e25babd10a5d788bdce2ae61196c3e548c900ff8d060462e60e5194", size = 1612332, upload-time = "2025-10-16T00:51:56.269Z" }, + { url = "https://files.pythonhosted.org/packages/5e/14/21ba9435ce088452f903cc54312e04fd337d00f63f1a5cc90ceb37511dba/coal-3.0.2-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb21e74d9071f87629026c1177b3c346145630869dc136cc4704899f3dfbf9db", size = 1505686, upload-time = "2025-10-16T00:51:57.948Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/68c42fe06b1ee8c5962edb4c9cecd9e8a042ebc5f850510d76dcb5beea0b/coal-3.0.2-1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6d97c0137b22a41e03090d044824596f76ccc065407f6fc538af7aedb2995306", size = 2218478, upload-time = "2025-10-16T00:51:59.583Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/6977e63ca97451b6888f69531d26513b64ce94235aa06ea49b24b0e2bb12/coal-3.0.2-1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89af2fcc4f74474487e8e42ced4a2222db81ec50d27f0f9482fec9ca6309cad4", size = 2216338, upload-time = "2025-10-16T00:52:01.211Z" }, + { url = "https://files.pythonhosted.org/packages/14/ac/cee49d27d602e49c92b920414fa38d2c8ba0c245bfe840d5f0fc42893eeb/coal-3.0.2-1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1b76ab4101a779482dd25cf90ac66f81d6b90941ef5a559a6227053ff9d65f60", size = 1612334, upload-time = "2025-10-16T00:52:02.942Z" }, + { url = "https://files.pythonhosted.org/packages/a1/86/1f16a0227aa77b6539fe8056f4ac539238e5148aff6d29b86f5cdf1878e1/coal-3.0.2-1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:59ebb247b091dd7e97035d860e5a929ab04d4a3449d1cb30ed0a0c24aad3e705", size = 1505700, upload-time = "2025-10-16T00:52:04.697Z" }, + { url = "https://files.pythonhosted.org/packages/0b/84/e4185042b73f1e6f99fa1a32dd09dede94e8c4f7c2876649b650ffacf4d7/coal-3.0.2-1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e00e0ab0306c6db3ca5cd9ee70287fe8e457dba63439318d108806da67761213", size = 2218114, upload-time = "2025-10-16T00:52:06.739Z" }, + { url = "https://files.pythonhosted.org/packages/bd/bf/a2b18c35608f031d14ada9ff2217c421ba4459f1a87de914322a076798e1/coal-3.0.2-1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d43e3c61bc96068e561a924af0f2190490292e5b8b9af99ce5bb6e417a0b6c3", size = 2215822, upload-time = "2025-10-16T00:52:08.365Z" }, + { url = "https://files.pythonhosted.org/packages/06/09/522c4023c8871c70b32960709fde7f14d91ee4e0b1bbf5058ed7da106784/coal-3.0.2-1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ee24e2118bae43ec5abee45e1d228da3355dde10050db574be6f5b9eb9834bab", size = 1626929, upload-time = "2025-10-16T00:52:10.024Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ad/c5c2de5acf88c87596e1fdf0480e1ff369348b80dbcee63c3c0261b1356e/coal-3.0.2-1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1027e59bf17a0c4264e4fd2a87a1b7415e93fbbda94375e1ab7c001195dc1400", size = 1516707, upload-time = "2025-10-16T00:52:11.31Z" }, + { url = "https://files.pythonhosted.org/packages/9a/37/18811f130072d612ef32933b51fe8e090f93fcb2d55ef5a543ba2d155476/coal-3.0.2-1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:69b028b281fb0417a0dbeaa5a59c916ba5b04e037b717b0861da50f60ee81ad7", size = 2189981, upload-time = "2025-10-16T00:52:12.689Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f3/b895cb74d85b3e39c7f4d41976381f2006f370d15d6e83f5e5c8121b559f/coal-3.0.2-1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4a840b976f445455dde40f68e0e808daee9a3343dacf9a95ba98ea5c1f8c5995", size = 2201654, upload-time = "2025-10-16T00:52:14.329Z" }, + { url = "https://files.pythonhosted.org/packages/fa/be/e45f18c63e0ff84630a3fc00fbd572eb610b4b6cfc0dbdc952d87ba6c784/coal-3.0.2-1-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:e9358b17ea61c1041bd9b4498eed0864192be3b15c572a48760107f027ea9ac5", size = 1626929, upload-time = "2025-10-16T00:52:16.147Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/3d49e31d934530458279d3689edd54306b517d8f87fdeb061ddc4abe1f3e/coal-3.0.2-1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d279c77926838cb5d60c4fb96dcd96d5773462c86ace43705a9d872d000650e3", size = 1516710, upload-time = "2025-10-16T00:52:17.718Z" }, + { url = "https://files.pythonhosted.org/packages/7e/f0/53833a83e74cf34592cdf2fd7aecdbc9684997fe5c0b8fd3ddfb22030e4b/coal-3.0.2-1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:df9bb9ea76f6df5dbaabb3b07dd82437e295c13e420f063238bb9fc058059dc3", size = 2189977, upload-time = "2025-10-16T00:52:18.989Z" }, + { url = "https://files.pythonhosted.org/packages/29/f1/96fb0b8e98b8ce873cba5b0e9237d3cb3c0c750974df990f3e9182e2902f/coal-3.0.2-1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:d6ab1c6961df4a5064b51bc8c76db05a16b03fcd1977f7d2fffe1c8dc5f4d3c3", size = 2201659, upload-time = "2025-10-16T00:52:20.826Z" }, + { url = "https://files.pythonhosted.org/packages/90/a9/8436d58720bd08d4039f5cef557f524612fb15448419982a7a3145d4c498/coal-3.0.2-1-cp314-cp314-macosx_10_9_x86_64.whl", hash = "sha256:38a10d82120768bd618227c102b958bf3d3d647269e3e5736d947285027a1449", size = 1628018, upload-time = "2025-10-16T00:52:22.838Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c8/c381f70f19c1d16e50e37cc5b8d8d48d5bd0815f148543f4b6de6eb822d9/coal-3.0.2-1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae569844c064863ff0e84c338c4a32f4993a5a1ee3d6d76304369a7e47d2b4a0", size = 1517130, upload-time = "2025-10-16T00:52:24.504Z" }, + { url = "https://files.pythonhosted.org/packages/a7/cd/a99d4c84b6e7ed422c411a9c5b966ea0e5f535dfd641ebaf51cb6ff8c7d4/coal-3.0.2-1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:936161b2bb5096af101b51aaebdf3deeb21876e7d4c42db3cd029a692812e333", size = 2197006, upload-time = "2025-10-16T00:52:25.965Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7f/3f358742302090aa3064b2873084d833e8c67568d655c4c8e013a6d68cdf/coal-3.0.2-1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:41c1e84d3b6050892250287aa750d0f1d791abf0819b0a30a4eeb24f141b6741", size = 2204039, upload-time = "2025-10-16T00:52:27.287Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] [[package]] @@ -860,11 +1191,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/31/28/d28211d29bcc3620b1fece85a65ce5bb22f18670a03cd28ea4b75ede270c/configargparse-1.7.1-py3-none-any.whl", hash = "sha256:8b586a31f9d873abd1ca527ffbe58863c99f36d896e2829779803125e83be4b6", size = 25607, upload-time = "2025-05-23T14:26:15.923Z" }, ] -[[package]] -name = "contact-graspnet-pytorch" -version = "0.0.0" -source = { git = "https://github.com/dimensionalOS/contact_graspnet_pytorch.git#9f9c7d5df5e8bbf3757fe9c786c67a921809336b" } - [[package]] name = "contourpy" version = "1.3.2" @@ -943,12 +1269,16 @@ name = "contourpy" version = "1.3.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -1036,166 +1366,175 @@ wheels = [ [[package]] name = "coverage" -version = "7.13.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/9a/3742e58fd04b233df95c012ee9f3dfe04708a5e1d32613bd2d47d4e1be0d/coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147", size = 218633, upload-time = "2025-12-28T15:40:10.165Z" }, - { url = "https://files.pythonhosted.org/packages/7e/45/7e6bdc94d89cd7c8017ce735cf50478ddfe765d4fbf0c24d71d30ea33d7a/coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d", size = 219147, upload-time = "2025-12-28T15:40:12.069Z" }, - { url = "https://files.pythonhosted.org/packages/f7/38/0d6a258625fd7f10773fe94097dc16937a5f0e3e0cdf3adef67d3ac6baef/coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0", size = 245894, upload-time = "2025-12-28T15:40:13.556Z" }, - { url = "https://files.pythonhosted.org/packages/27/58/409d15ea487986994cbd4d06376e9860e9b157cfbfd402b1236770ab8dd2/coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90", size = 247721, upload-time = "2025-12-28T15:40:15.37Z" }, - { url = "https://files.pythonhosted.org/packages/da/bf/6e8056a83fd7a96c93341f1ffe10df636dd89f26d5e7b9ca511ce3bcf0df/coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d", size = 249585, upload-time = "2025-12-28T15:40:17.226Z" }, - { url = "https://files.pythonhosted.org/packages/f4/15/e1daff723f9f5959acb63cbe35b11203a9df77ee4b95b45fffd38b318390/coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b", size = 246597, upload-time = "2025-12-28T15:40:19.028Z" }, - { url = "https://files.pythonhosted.org/packages/74/a6/1efd31c5433743a6ddbc9d37ac30c196bb07c7eab3d74fbb99b924c93174/coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6", size = 247626, upload-time = "2025-12-28T15:40:20.846Z" }, - { url = "https://files.pythonhosted.org/packages/6d/9f/1609267dd3e749f57fdd66ca6752567d1c13b58a20a809dc409b263d0b5f/coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e", size = 245629, upload-time = "2025-12-28T15:40:22.397Z" }, - { url = "https://files.pythonhosted.org/packages/e2/f6/6815a220d5ec2466383d7cc36131b9fa6ecbe95c50ec52a631ba733f306a/coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae", size = 245901, upload-time = "2025-12-28T15:40:23.836Z" }, - { url = "https://files.pythonhosted.org/packages/ac/58/40576554cd12e0872faf6d2c0eb3bc85f71d78427946ddd19ad65201e2c0/coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29", size = 246505, upload-time = "2025-12-28T15:40:25.421Z" }, - { url = "https://files.pythonhosted.org/packages/3b/77/9233a90253fba576b0eee81707b5781d0e21d97478e5377b226c5b096c0f/coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f", size = 221257, upload-time = "2025-12-28T15:40:27.217Z" }, - { url = "https://files.pythonhosted.org/packages/e0/43/e842ff30c1a0a623ec80db89befb84a3a7aad7bfe44a6ea77d5a3e61fedd/coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1", size = 222191, upload-time = "2025-12-28T15:40:28.916Z" }, - { url = "https://files.pythonhosted.org/packages/b4/9b/77baf488516e9ced25fc215a6f75d803493fc3f6a1a1227ac35697910c2a/coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88", size = 218755, upload-time = "2025-12-28T15:40:30.812Z" }, - { url = "https://files.pythonhosted.org/packages/d7/cd/7ab01154e6eb79ee2fab76bf4d89e94c6648116557307ee4ebbb85e5c1bf/coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3", size = 219257, upload-time = "2025-12-28T15:40:32.333Z" }, - { url = "https://files.pythonhosted.org/packages/01/d5/b11ef7863ffbbdb509da0023fad1e9eda1c0eaea61a6d2ea5b17d4ac706e/coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9", size = 249657, upload-time = "2025-12-28T15:40:34.1Z" }, - { url = "https://files.pythonhosted.org/packages/f7/7c/347280982982383621d29b8c544cf497ae07ac41e44b1ca4903024131f55/coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee", size = 251581, upload-time = "2025-12-28T15:40:36.131Z" }, - { url = "https://files.pythonhosted.org/packages/82/f6/ebcfed11036ade4c0d75fa4453a6282bdd225bc073862766eec184a4c643/coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf", size = 253691, upload-time = "2025-12-28T15:40:37.626Z" }, - { url = "https://files.pythonhosted.org/packages/02/92/af8f5582787f5d1a8b130b2dcba785fa5e9a7a8e121a0bb2220a6fdbdb8a/coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3", size = 249799, upload-time = "2025-12-28T15:40:39.47Z" }, - { url = "https://files.pythonhosted.org/packages/24/aa/0e39a2a3b16eebf7f193863323edbff38b6daba711abaaf807d4290cf61a/coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef", size = 251389, upload-time = "2025-12-28T15:40:40.954Z" }, - { url = "https://files.pythonhosted.org/packages/73/46/7f0c13111154dc5b978900c0ccee2e2ca239b910890e674a77f1363d483e/coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851", size = 249450, upload-time = "2025-12-28T15:40:42.489Z" }, - { url = "https://files.pythonhosted.org/packages/ac/ca/e80da6769e8b669ec3695598c58eef7ad98b0e26e66333996aee6316db23/coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb", size = 249170, upload-time = "2025-12-28T15:40:44.279Z" }, - { url = "https://files.pythonhosted.org/packages/af/18/9e29baabdec1a8644157f572541079b4658199cfd372a578f84228e860de/coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba", size = 250081, upload-time = "2025-12-28T15:40:45.748Z" }, - { url = "https://files.pythonhosted.org/packages/00/f8/c3021625a71c3b2f516464d322e41636aea381018319050a8114105872ee/coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19", size = 221281, upload-time = "2025-12-28T15:40:47.232Z" }, - { url = "https://files.pythonhosted.org/packages/27/56/c216625f453df6e0559ed666d246fcbaaa93f3aa99eaa5080cea1229aa3d/coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a", size = 222215, upload-time = "2025-12-28T15:40:49.19Z" }, - { url = "https://files.pythonhosted.org/packages/5c/9a/be342e76f6e531cae6406dc46af0d350586f24d9b67fdfa6daee02df71af/coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c", size = 220886, upload-time = "2025-12-28T15:40:51.067Z" }, - { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" }, - { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" }, - { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" }, - { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" }, - { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" }, - { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" }, - { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" }, - { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" }, - { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" }, - { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" }, - { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" }, - { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" }, - { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" }, - { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, - { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, - { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, - { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, - { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, - { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, - { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, - { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, - { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, - { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, - { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, - { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, - { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, - { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, - { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, - { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, - { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, - { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, - { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, - { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, - { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, - { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, - { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, - { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" }, - { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" }, - { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" }, - { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" }, - { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" }, - { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" }, - { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" }, - { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" }, - { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" }, - { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" }, - { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" }, - { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" }, - { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" }, - { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" }, - { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" }, - { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" }, - { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" }, - { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" }, - { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" }, - { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, +version = "7.13.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/d4/7827d9ffa34d5d4d752eec907022aa417120936282fc488306f5da08c292/coverage-7.13.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fc31c787a84f8cd6027eba44010517020e0d18487064cd3d8968941856d1415", size = 219152, upload-time = "2026-02-09T12:56:11.974Z" }, + { url = "https://files.pythonhosted.org/packages/35/b0/d69df26607c64043292644dbb9dc54b0856fabaa2cbb1eeee3331cc9e280/coverage-7.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a32ebc02a1805adf637fc8dec324b5cdacd2e493515424f70ee33799573d661b", size = 219667, upload-time = "2026-02-09T12:56:13.33Z" }, + { url = "https://files.pythonhosted.org/packages/82/a4/c1523f7c9e47b2271dbf8c2a097e7a1f89ef0d66f5840bb59b7e8814157b/coverage-7.13.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e24f9156097ff9dc286f2f913df3a7f63c0e333dcafa3c196f2c18b4175ca09a", size = 246425, upload-time = "2026-02-09T12:56:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/f8/02/aa7ec01d1a5023c4b680ab7257f9bfde9defe8fdddfe40be096ac19e8177/coverage-7.13.4-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8041b6c5bfdc03257666e9881d33b1abc88daccaf73f7b6340fb7946655cd10f", size = 248229, upload-time = "2026-02-09T12:56:16.31Z" }, + { url = "https://files.pythonhosted.org/packages/35/98/85aba0aed5126d896162087ef3f0e789a225697245256fc6181b95f47207/coverage-7.13.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a09cfa6a5862bc2fc6ca7c3def5b2926194a56b8ab78ffcf617d28911123012", size = 250106, upload-time = "2026-02-09T12:56:18.024Z" }, + { url = "https://files.pythonhosted.org/packages/96/72/1db59bd67494bc162e3e4cd5fbc7edba2c7026b22f7c8ef1496d58c2b94c/coverage-7.13.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:296f8b0af861d3970c2a4d8c91d48eb4dd4771bcef9baedec6a9b515d7de3def", size = 252021, upload-time = "2026-02-09T12:56:19.272Z" }, + { url = "https://files.pythonhosted.org/packages/9d/97/72899c59c7066961de6e3daa142d459d47d104956db43e057e034f015c8a/coverage-7.13.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e101609bcbbfb04605ea1027b10dc3735c094d12d40826a60f897b98b1c30256", size = 247114, upload-time = "2026-02-09T12:56:21.051Z" }, + { url = "https://files.pythonhosted.org/packages/39/1f/f1885573b5970235e908da4389176936c8933e86cb316b9620aab1585fa2/coverage-7.13.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aa3feb8db2e87ff5e6d00d7e1480ae241876286691265657b500886c98f38bda", size = 248143, upload-time = "2026-02-09T12:56:22.585Z" }, + { url = "https://files.pythonhosted.org/packages/a8/cf/e80390c5b7480b722fa3e994f8202807799b85bc562aa4f1dde209fbb7be/coverage-7.13.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4fc7fa81bbaf5a02801b65346c8b3e657f1d93763e58c0abdf7c992addd81a92", size = 246152, upload-time = "2026-02-09T12:56:23.748Z" }, + { url = "https://files.pythonhosted.org/packages/44/bf/f89a8350d85572f95412debb0fb9bb4795b1d5b5232bd652923c759e787b/coverage-7.13.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:33901f604424145c6e9c2398684b92e176c0b12df77d52db81c20abd48c3794c", size = 249959, upload-time = "2026-02-09T12:56:25.209Z" }, + { url = "https://files.pythonhosted.org/packages/f7/6e/612a02aece8178c818df273e8d1642190c4875402ca2ba74514394b27aba/coverage-7.13.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:bb28c0f2cf2782508a40cec377935829d5fcc3ad9a3681375af4e84eb34b6b58", size = 246416, upload-time = "2026-02-09T12:56:26.475Z" }, + { url = "https://files.pythonhosted.org/packages/cb/98/b5afc39af67c2fa6786b03c3a7091fc300947387ce8914b096db8a73d67a/coverage-7.13.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d107aff57a83222ddbd8d9ee705ede2af2cc926608b57abed8ef96b50b7e8f9", size = 247025, upload-time = "2026-02-09T12:56:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/51/30/2bba8ef0682d5bd210c38fe497e12a06c9f8d663f7025e9f5c2c31ce847d/coverage-7.13.4-cp310-cp310-win32.whl", hash = "sha256:a6f94a7d00eb18f1b6d403c91a88fd58cfc92d4b16080dfdb774afc8294469bf", size = 221758, upload-time = "2026-02-09T12:56:29.051Z" }, + { url = "https://files.pythonhosted.org/packages/78/13/331f94934cf6c092b8ea59ff868eb587bc8fe0893f02c55bc6c0183a192e/coverage-7.13.4-cp310-cp310-win_amd64.whl", hash = "sha256:2cb0f1e000ebc419632bbe04366a8990b6e32c4e0b51543a6484ffe15eaeda95", size = 222693, upload-time = "2026-02-09T12:56:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/b4/ad/b59e5b451cf7172b8d1043dc0fa718f23aab379bc1521ee13d4bd9bfa960/coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053", size = 219278, upload-time = "2026-02-09T12:56:31.673Z" }, + { url = "https://files.pythonhosted.org/packages/f1/17/0cb7ca3de72e5f4ef2ec2fa0089beafbcaaaead1844e8b8a63d35173d77d/coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11", size = 219783, upload-time = "2026-02-09T12:56:33.104Z" }, + { url = "https://files.pythonhosted.org/packages/ab/63/325d8e5b11e0eaf6d0f6a44fad444ae58820929a9b0de943fa377fe73e85/coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa", size = 250200, upload-time = "2026-02-09T12:56:34.474Z" }, + { url = "https://files.pythonhosted.org/packages/76/53/c16972708cbb79f2942922571a687c52bd109a7bd51175aeb7558dff2236/coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7", size = 252114, upload-time = "2026-02-09T12:56:35.749Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c2/7ab36d8b8cc412bec9ea2d07c83c48930eb4ba649634ba00cb7e4e0f9017/coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00", size = 254220, upload-time = "2026-02-09T12:56:37.796Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4d/cf52c9a3322c89a0e6febdfbc83bb45c0ed3c64ad14081b9503adee702e7/coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef", size = 256164, upload-time = "2026-02-09T12:56:39.016Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/eb1dd17bd6de8289df3580e967e78294f352a5df8a57ff4671ee5fc3dcd0/coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903", size = 250325, upload-time = "2026-02-09T12:56:40.668Z" }, + { url = "https://files.pythonhosted.org/packages/71/07/8c1542aa873728f72267c07278c5cc0ec91356daf974df21335ccdb46368/coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f", size = 251913, upload-time = "2026-02-09T12:56:41.97Z" }, + { url = "https://files.pythonhosted.org/packages/74/d7/c62e2c5e4483a748e27868e4c32ad3daa9bdddbba58e1bc7a15e252baa74/coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299", size = 249974, upload-time = "2026-02-09T12:56:43.323Z" }, + { url = "https://files.pythonhosted.org/packages/98/9f/4c5c015a6e98ced54efd0f5cf8d31b88e5504ecb6857585fc0161bb1e600/coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505", size = 253741, upload-time = "2026-02-09T12:56:45.155Z" }, + { url = "https://files.pythonhosted.org/packages/bd/59/0f4eef89b9f0fcd9633b5d350016f54126ab49426a70ff4c4e87446cabdc/coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6", size = 249695, upload-time = "2026-02-09T12:56:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2c/b7476f938deb07166f3eb281a385c262675d688ff4659ad56c6c6b8e2e70/coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9", size = 250599, upload-time = "2026-02-09T12:56:48.13Z" }, + { url = "https://files.pythonhosted.org/packages/b8/34/c3420709d9846ee3785b9f2831b4d94f276f38884032dca1457fa83f7476/coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9", size = 221780, upload-time = "2026-02-09T12:56:50.479Z" }, + { url = "https://files.pythonhosted.org/packages/61/08/3d9c8613079d2b11c185b865de9a4c1a68850cfda2b357fae365cf609f29/coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f", size = 222715, upload-time = "2026-02-09T12:56:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/18/1a/54c3c80b2f056164cc0a6cdcb040733760c7c4be9d780fe655f356f433e4/coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f", size = 221385, upload-time = "2026-02-09T12:56:53.194Z" }, + { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, + { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, + { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, + { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, + { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, + { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" }, + { url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" }, + { url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" }, + { url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" }, + { url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" }, + { url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" }, + { url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, + { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, + { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, + { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, + { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, + { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, + { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, + { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, + { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, + { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, + { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, ] [[package]] name = "cryptography" -version = "46.0.3" +version = "46.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, - { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, - { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, - { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, - { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, - { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, - { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, - { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, - { url = "https://files.pythonhosted.org/packages/d9/cd/1a8633802d766a0fa46f382a77e096d7e209e0817892929655fe0586ae32/cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32", size = 3689163, upload-time = "2025-10-15T23:18:13.821Z" }, - { url = "https://files.pythonhosted.org/packages/4c/59/6b26512964ace6480c3e54681a9859c974172fb141c38df11eadd8416947/cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c", size = 3429474, upload-time = "2025-10-15T23:18:15.477Z" }, - { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, - { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, - { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, - { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, - { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, + { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, + { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, + { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, + { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, + { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, + { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, + { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, ] [[package]] @@ -1213,10 +1552,35 @@ wheels = [ [package.optional-dependencies] cuda = [ - { name = "nvidia-cublas-cu12", version = "12.8.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, - { name = "nvidia-cublas-cu12", version = "12.9.1.4", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin'" }, - { name = "nvidia-cuda-runtime-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, - { name = "nvidia-cuda-runtime-cu12", version = "12.9.79", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin'" }, + { name = "nvidia-cublas-cu12", version = "12.8.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')" }, + { name = "nvidia-cublas-cu12", version = "12.9.1.4", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin' or sys_platform == 'win32'" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.9.79", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin' or sys_platform == 'win32'" }, +] + +[[package]] +name = "cuda-bindings" +version = "12.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cuda-pathfinder", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/d8/b546104b8da3f562c1ff8ab36d130c8fe1dd6a045ced80b4f6ad74f7d4e1/cuda_bindings-12.9.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d3c842c2a4303b2a580fe955018e31aea30278be19795ae05226235268032e5", size = 12148218, upload-time = "2025-10-21T14:51:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/45/e7/b47792cc2d01c7e1d37c32402182524774dadd2d26339bd224e0e913832e/cuda_bindings-12.9.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c912a3d9e6b6651853eed8eed96d6800d69c08e94052c292fec3f282c5a817c9", size = 12210593, upload-time = "2025-10-21T14:51:36.574Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c1/dabe88f52c3e3760d861401bb994df08f672ec893b8f7592dc91626adcf3/cuda_bindings-12.9.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fda147a344e8eaeca0c6ff113d2851ffca8f7dfc0a6c932374ee5c47caa649c8", size = 12151019, upload-time = "2025-10-21T14:51:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/63/56/e465c31dc9111be3441a9ba7df1941fe98f4aa6e71e8788a3fb4534ce24d/cuda_bindings-12.9.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:32bdc5a76906be4c61eb98f546a6786c5773a881f3b166486449b5d141e4a39f", size = 11906628, upload-time = "2025-10-21T14:51:49.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/84/1e6be415e37478070aeeee5884c2022713c1ecc735e6d82d744de0252eee/cuda_bindings-12.9.4-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56e0043c457a99ac473ddc926fe0dc4046694d99caef633e92601ab52cbe17eb", size = 11925991, upload-time = "2025-10-21T14:51:56.535Z" }, + { url = "https://files.pythonhosted.org/packages/d1/af/6dfd8f2ed90b1d4719bc053ff8940e494640fe4212dc3dd72f383e4992da/cuda_bindings-12.9.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8b72ee72a9cc1b531db31eebaaee5c69a8ec3500e32c6933f2d3b15297b53686", size = 11922703, upload-time = "2025-10-21T14:52:03.585Z" }, + { url = "https://files.pythonhosted.org/packages/6c/19/90ac264acc00f6df8a49378eedec9fd2db3061bf9263bf9f39fd3d8377c3/cuda_bindings-12.9.4-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d80bffc357df9988dca279734bc9674c3934a654cab10cadeed27ce17d8635ee", size = 11924658, upload-time = "2025-10-21T14:52:10.411Z" }, +] + +[[package]] +name = "cuda-pathfinder" +version = "1.3.4" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/5e/db279a3bfbd18d59d0598922a3b3c1454908d0969e8372260afec9736376/cuda_pathfinder-1.3.4-py3-none-any.whl", hash = "sha256:fb983f6e0d43af27ef486e14d5989b5f904ef45cedf40538bfdcbffa6bb01fb2", size = 30878, upload-time = "2026-02-11T18:50:31.008Z" }, ] [[package]] @@ -1224,21 +1588,17 @@ name = "cupy-cuda12x" version = "13.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "fastrlock" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "fastrlock", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64') or (python_full_version < '3.11' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64') or (python_full_version >= '3.11' and sys_platform != 'linux')" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/2e/db22c5148884e4e384f6ebbc7971fa3710f3ba67ca492798890a0fdebc45/cupy_cuda12x-13.6.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9e37f60f27ff9625dfdccc4688a09852707ec613e32ea9404f425dd22a386d14", size = 126341714, upload-time = "2025-08-18T08:24:08.335Z" }, { url = "https://files.pythonhosted.org/packages/53/2b/8064d94a6ab6b5c4e643d8535ab6af6cabe5455765540931f0ef60a0bc3b/cupy_cuda12x-13.6.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:e78409ea72f5ac7d6b6f3d33d99426a94005254fa57e10617f430f9fd7c3a0a1", size = 112238589, upload-time = "2025-08-18T08:24:15.541Z" }, { url = "https://files.pythonhosted.org/packages/de/7b/bac3ca73e164d2b51c6298620261637c7286e06d373f597b036fc45f5563/cupy_cuda12x-13.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f33c9c975782ef7a42c79b6b4fb3d5b043498f9b947126d792592372b432d393", size = 89874119, upload-time = "2025-08-18T08:24:20.628Z" }, - { url = "https://files.pythonhosted.org/packages/54/64/71c6e08f76c06639e5112f69ee3bc1129be00054ad5f906d7fd3138af579/cupy_cuda12x-13.6.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:c790d012fd4d86872b9c89af9f5f15d91c30b8e3a4aa4dd04c2610f45f06ac44", size = 128016458, upload-time = "2025-08-18T08:24:26.394Z" }, { url = "https://files.pythonhosted.org/packages/fc/d9/5c5077243cd92368c3eccecdbf91d76db15db338169042ffd1647533c6b1/cupy_cuda12x-13.6.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:77ba6745a130d880c962e687e4e146ebbb9014f290b0a80dbc4e4634eb5c3b48", size = 113039337, upload-time = "2025-08-18T08:24:31.814Z" }, { url = "https://files.pythonhosted.org/packages/88/f5/02bea5cdf108e2a66f98e7d107b4c9a6709e5dbfedf663340e5c11719d83/cupy_cuda12x-13.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:a20b7acdc583643a623c8d8e3efbe0db616fbcf5916e9c99eedf73859b6133af", size = 89885526, upload-time = "2025-08-18T08:24:37.258Z" }, - { url = "https://files.pythonhosted.org/packages/12/c5/7e7fc4816d0de0154e5d9053242c3a08a0ca8b43ee656a6f7b3b95055a7b/cupy_cuda12x-13.6.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a6970ceefe40f9acbede41d7fe17416bd277b1bd2093adcde457b23b578c5a59", size = 127334633, upload-time = "2025-08-18T08:24:43.065Z" }, { url = "https://files.pythonhosted.org/packages/e0/95/d7e1295141e7d530674a3cc567e13ed0eb6b81524cb122d797ed996b5bea/cupy_cuda12x-13.6.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:79b0cacb5e8b190ef409f9e03f06ac8de1b021b0c0dda47674d446f5557e0eb1", size = 112886268, upload-time = "2025-08-18T08:24:49.294Z" }, { url = "https://files.pythonhosted.org/packages/ae/8c/14555b63fd78cfac7b88af0094cea0a3cb845d243661ec7da69f7b3ea0de/cupy_cuda12x-13.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca06fede7b8b83ca9ad80062544ef2e5bb8d4762d1c4fc3ac8349376de9c8a5e", size = 89785108, upload-time = "2025-08-18T08:24:54.527Z" }, - { url = "https://files.pythonhosted.org/packages/19/ec/f62cb991f11fb41291c4c15b6936d7b67ffa71ddb344ad6e8894e06ce58d/cupy_cuda12x-13.6.0-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e5426ae3b1b9cf59927481e457a89e3f0b50a35b114a8034ec9110e7a833434c", size = 126904601, upload-time = "2025-08-18T08:24:59.951Z" }, { url = "https://files.pythonhosted.org/packages/f8/b8/30127bcdac53a25f94ee201bf4802fcd8d012145567d77c54174d6d01c01/cupy_cuda12x-13.6.0-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:52d9e7f83d920da7d81ec2e791c2c2c747fdaa1d7b811971b34865ce6371e98a", size = 112654824, upload-time = "2025-08-18T08:25:05.944Z" }, { url = "https://files.pythonhosted.org/packages/72/36/c9e24acb19f039f814faea880b3704a3661edaa6739456b73b27540663e3/cupy_cuda12x-13.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:297b4268f839de67ef7865c2202d3f5a0fb8d20bd43360bc51b6e60cb4406447", size = 89750580, upload-time = "2025-08-18T08:25:10.972Z" }, ] @@ -1253,46 +1613,24 @@ wheels = [ ] [[package]] -name = "cython" -version = "3.2.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/85/7574c9cd44b69a27210444b6650f6477f56c75fee1b70d7672d3e4166167/cython-3.2.4.tar.gz", hash = "sha256:84226ecd313b233da27dc2eb3601b4f222b8209c3a7216d8733b031da1dc64e6", size = 3280291, upload-time = "2026-01-04T14:14:14.473Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/10/720e0fb84eab4c927c4dd6b61eb7993f7732dd83d29ba6d73083874eade9/cython-3.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02cb0cc0f23b9874ad262d7d2b9560aed9c7e2df07b49b920bda6f2cc9cb505e", size = 2960836, upload-time = "2026-01-04T14:14:51.103Z" }, - { url = "https://files.pythonhosted.org/packages/7d/3d/b26f29092c71c36e0462752885bdfb18c23c176af4de953fdae2772a8941/cython-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f136f379a4a54246facd0eb6f1ee15c3837cb314ce87b677582ec014db4c6845", size = 3370134, upload-time = "2026-01-04T14:14:53.627Z" }, - { url = "https://files.pythonhosted.org/packages/56/9e/539fb0d09e4f5251b5b14f8daf77e71fee021527f1013791038234618b6b/cython-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:35ab0632186057406ec729374c737c37051d2eacad9d515d94e5a3b3e58a9b02", size = 3537552, upload-time = "2026-01-04T14:14:56.852Z" }, - { url = "https://files.pythonhosted.org/packages/10/c6/82d19a451c050d1be0f05b1a3302267463d391db548f013ee88b5348a8e9/cython-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:ca2399dc75796b785f74fb85c938254fa10c80272004d573c455f9123eceed86", size = 2766191, upload-time = "2026-01-04T14:14:58.709Z" }, - { url = "https://files.pythonhosted.org/packages/85/cc/8f06145ec3efa121c8b1b67f06a640386ddacd77ee3e574da582a21b14ee/cython-3.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff9af2134c05e3734064808db95b4dd7341a39af06e8945d05ea358e1741aaed", size = 2953769, upload-time = "2026-01-04T14:15:00.361Z" }, - { url = "https://files.pythonhosted.org/packages/55/b0/706cf830eddd831666208af1b3058c2e0758ae157590909c1f634b53bed9/cython-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67922c9de058a0bfb72d2e75222c52d09395614108c68a76d9800f150296ddb3", size = 3243841, upload-time = "2026-01-04T14:15:02.066Z" }, - { url = "https://files.pythonhosted.org/packages/ac/25/58893afd4ef45f79e3d4db82742fa4ff874b936d67a83c92939053920ccd/cython-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b362819d155fff1482575e804e43e3a8825332d32baa15245f4642022664a3f4", size = 3378083, upload-time = "2026-01-04T14:15:04.248Z" }, - { url = "https://files.pythonhosted.org/packages/32/e4/424a004d7c0d8a4050c81846ebbd22272ececfa9a498cb340aa44fccbec2/cython-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:1a64a112a34ec719b47c01395647e54fb4cf088a511613f9a3a5196694e8e382", size = 2769990, upload-time = "2026-01-04T14:15:06.53Z" }, - { url = "https://files.pythonhosted.org/packages/91/4d/1eb0c7c196a136b1926f4d7f0492a96c6fabd604d77e6cd43b56a3a16d83/cython-3.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64d7f71be3dd6d6d4a4c575bb3a4674ea06d1e1e5e4cd1b9882a2bc40ed3c4c9", size = 2970064, upload-time = "2026-01-04T14:15:08.567Z" }, - { url = "https://files.pythonhosted.org/packages/03/1c/46e34b08bea19a1cdd1e938a4c123e6299241074642db9d81983cef95e9f/cython-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:869487ea41d004f8b92171f42271fbfadb1ec03bede3158705d16cd570d6b891", size = 3226757, upload-time = "2026-01-04T14:15:10.812Z" }, - { url = "https://files.pythonhosted.org/packages/12/33/3298a44d201c45bcf0d769659725ae70e9c6c42adf8032f6d89c8241098d/cython-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:55b6c44cd30821f0b25220ceba6fe636ede48981d2a41b9bbfe3c7902ce44ea7", size = 3388969, upload-time = "2026-01-04T14:15:12.45Z" }, - { url = "https://files.pythonhosted.org/packages/bb/f3/4275cd3ea0a4cf4606f9b92e7f8766478192010b95a7f516d1b7cf22cb10/cython-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:767b143704bdd08a563153448955935844e53b852e54afdc552b43902ed1e235", size = 2756457, upload-time = "2026-01-04T14:15:14.67Z" }, - { url = "https://files.pythonhosted.org/packages/18/b5/1cfca43b7d20a0fdb1eac67313d6bb6b18d18897f82dd0f17436bdd2ba7f/cython-3.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:28e8075087a59756f2d059273184b8b639fe0f16cf17470bd91c39921bc154e0", size = 2960506, upload-time = "2026-01-04T14:15:16.733Z" }, - { url = "https://files.pythonhosted.org/packages/71/bb/8f28c39c342621047fea349a82fac712a5e2b37546d2f737bbde48d5143d/cython-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:03893c88299a2c868bb741ba6513357acd104e7c42265809fd58dce1456a36fc", size = 3213148, upload-time = "2026-01-04T14:15:18.804Z" }, - { url = "https://files.pythonhosted.org/packages/7a/d2/16fa02f129ed2b627e88d9d9ebd5ade3eeb66392ae5ba85b259d2d52b047/cython-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f81eda419b5ada7b197bbc3c5f4494090e3884521ffd75a3876c93fbf66c9ca8", size = 3375764, upload-time = "2026-01-04T14:15:20.817Z" }, - { url = "https://files.pythonhosted.org/packages/91/3f/deb8f023a5c10c0649eb81332a58c180fad27c7533bb4aae138b5bc34d92/cython-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:83266c356c13c68ffe658b4905279c993d8a5337bb0160fa90c8a3e297ea9a2e", size = 2754238, upload-time = "2026-01-04T14:15:23.001Z" }, - { url = "https://files.pythonhosted.org/packages/ee/d7/3bda3efce0c5c6ce79cc21285dbe6f60369c20364e112f5a506ee8a1b067/cython-3.2.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d4b4fd5332ab093131fa6172e8362f16adef3eac3179fd24bbdc392531cb82fa", size = 2971496, upload-time = "2026-01-04T14:15:25.038Z" }, - { url = "https://files.pythonhosted.org/packages/89/ed/1021ffc80b9c4720b7ba869aea8422c82c84245ef117ebe47a556bdc00c3/cython-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3b5ac54e95f034bc7fb07313996d27cbf71abc17b229b186c1540942d2dc28e", size = 3256146, upload-time = "2026-01-04T14:15:26.741Z" }, - { url = "https://files.pythonhosted.org/packages/0c/51/ca221ec7e94b3c5dc4138dcdcbd41178df1729c1e88c5dfb25f9d30ba3da/cython-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90f43be4eaa6afd58ce20d970bb1657a3627c44e1760630b82aa256ba74b4acb", size = 3383458, upload-time = "2026-01-04T14:15:28.425Z" }, - { url = "https://files.pythonhosted.org/packages/79/2e/1388fc0243240cd54994bb74f26aaaf3b2e22f89d3a2cf8da06d75d46ca2/cython-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:983f9d2bb8a896e16fa68f2b37866ded35fa980195eefe62f764ddc5f9f5ef8e", size = 2791241, upload-time = "2026-01-04T14:15:30.448Z" }, - { url = "https://files.pythonhosted.org/packages/0a/8b/fd393f0923c82be4ec0db712fffb2ff0a7a131707b842c99bf24b549274d/cython-3.2.4-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:36bf3f5eb56d5281aafabecbaa6ed288bc11db87547bba4e1e52943ae6961ccf", size = 2875622, upload-time = "2026-01-04T14:15:39.749Z" }, - { url = "https://files.pythonhosted.org/packages/73/48/48530d9b9d64ec11dbe0dd3178a5fe1e0b27977c1054ecffb82be81e9b6a/cython-3.2.4-cp39-abi3-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6d5267f22b6451eb1e2e1b88f6f78a2c9c8733a6ddefd4520d3968d26b824581", size = 3210669, upload-time = "2026-01-04T14:15:41.911Z" }, - { url = "https://files.pythonhosted.org/packages/5e/91/4865fbfef1f6bb4f21d79c46104a53d1a3fa4348286237e15eafb26e0828/cython-3.2.4-cp39-abi3-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3b6e58f73a69230218d5381817850ce6d0da5bb7e87eb7d528c7027cbba40b06", size = 2856835, upload-time = "2026-01-04T14:15:43.815Z" }, - { url = "https://files.pythonhosted.org/packages/fa/39/60317957dbef179572398253f29d28f75f94ab82d6d39ea3237fb6c89268/cython-3.2.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e71efb20048358a6b8ec604a0532961c50c067b5e63e345e2e359fff72feaee8", size = 2994408, upload-time = "2026-01-04T14:15:45.422Z" }, - { url = "https://files.pythonhosted.org/packages/8d/30/7c24d9292650db4abebce98abc9b49c820d40fa7c87921c0a84c32f4efe7/cython-3.2.4-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:28b1e363b024c4b8dcf52ff68125e635cb9cb4b0ba997d628f25e32543a71103", size = 2891478, upload-time = "2026-01-04T14:15:47.394Z" }, - { url = "https://files.pythonhosted.org/packages/86/70/03dc3c962cde9da37a93cca8360e576f904d5f9beecfc9d70b1f820d2e5f/cython-3.2.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:31a90b4a2c47bb6d56baeb926948348ec968e932c1ae2c53239164e3e8880ccf", size = 3225663, upload-time = "2026-01-04T14:15:49.446Z" }, - { url = "https://files.pythonhosted.org/packages/b1/97/10b50c38313c37b1300325e2e53f48ea9a2c078a85c0c9572057135e31d5/cython-3.2.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e65e4773021f8dc8532010b4fbebe782c77f9a0817e93886e518c93bd6a44e9d", size = 3115628, upload-time = "2026-01-04T14:15:51.323Z" }, - { url = "https://files.pythonhosted.org/packages/8f/b1/d6a353c9b147848122a0db370863601fdf56de2d983b5c4a6a11e6ee3cd7/cython-3.2.4-cp39-abi3-win32.whl", hash = "sha256:2b1f12c0e4798293d2754e73cd6f35fa5bbdf072bdc14bc6fc442c059ef2d290", size = 2437463, upload-time = "2026-01-04T14:15:53.787Z" }, - { url = "https://files.pythonhosted.org/packages/2d/d8/319a1263b9c33b71343adfd407e5daffd453daef47ebc7b642820a8b68ed/cython-3.2.4-cp39-abi3-win_arm64.whl", hash = "sha256:3b8e62049afef9da931d55de82d8f46c9a147313b69d5ff6af6e9121d545ce7a", size = 2442754, upload-time = "2026-01-04T14:15:55.382Z" }, - { url = "https://files.pythonhosted.org/packages/ff/fa/d3c15189f7c52aaefbaea76fb012119b04b9013f4bf446cb4eb4c26c4e6b/cython-3.2.4-py3-none-any.whl", hash = "sha256:732fc93bc33ae4b14f6afaca663b916c2fdd5dcbfad7114e17fb2434eeaea45c", size = 1257078, upload-time = "2026-01-04T14:14:12.373Z" }, +name = "cyclonedds" +version = "0.10.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rich-click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/cf/28eb9c823dfc245c540f5286d71b44aeee2a51021fc85b25bb9562be78cc/cyclonedds-0.10.5.tar.gz", hash = "sha256:63fc4d6fdb2fd35181c40f4e90757149f2def5f570ef19fb71edc4f568755f8a", size = 156919, upload-time = "2024-06-05T18:50:42.999Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/c3/69ba063a51c06ba24fa4fd463157d4cc2bc54ab1a2ab8ebdf88e8f3dde25/cyclonedds-0.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:03644e406d0c1cac45887b378d35054a0033c48f2e29d9aab3bfc1ee6c4b9aa6", size = 864591, upload-time = "2024-06-05T18:50:46.563Z" }, + { url = "https://files.pythonhosted.org/packages/cf/98/08508aff65c87bcef473e23a51506a100fb35bf70450c40eb227a576a018/cyclonedds-0.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a0d9fa8747827dc9bd678d73ed6f12b0ab9853b2cb7ebadbf3d8d89625f0e34", size = 799626, upload-time = "2024-06-05T18:50:48.17Z" }, + { url = "https://files.pythonhosted.org/packages/99/0d/02da52ffd27b92b85b64997cc449106479456648da17aa44a09124e8ebe5/cyclonedds-0.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:861d2ffd9513126d6a62ad9f842e85122518a7db1fb0a11d6e4fa86e3cacf61c", size = 6631487, upload-time = "2024-06-05T18:50:50.747Z" }, + { url = "https://files.pythonhosted.org/packages/e4/2b/d8fff5008c2c62882c2ffc185bdb0d4d1c9caf7bc5aaaef77bd9739bdc12/cyclonedds-0.10.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8276b2bc347540e3ca892adf976421dbce4c6d2672934a32409db121a1431b86", size = 6653044, upload-time = "2024-06-05T18:50:52.786Z" }, + { url = "https://files.pythonhosted.org/packages/07/ab/acaa119f552019bdb2b06478553cf712967672f5970be80ecc9b4ca805f4/cyclonedds-0.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:103a681e9490229f12c151a125e00c4db8fdb344c8e12e35ee515cd9d5d1ecd7", size = 1200672, upload-time = "2024-06-05T18:50:54.303Z" }, ] [[package]] name = "dash" -version = "3.3.0" +version = "4.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "flask" }, @@ -1305,9 +1643,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e2/f9/516671861cf190bda37f6afa696d8a6a6ac593f23d8cf198e16faca044f5/dash-3.3.0.tar.gz", hash = "sha256:eaaa7a671540b5e1db8066f4966d0277d21edc2c7acdaec2fd6d198366a8b0df", size = 7579436, upload-time = "2025-11-12T15:51:54.919Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/dd/3aed9bfd81dfd8f44b3a5db0583080ac9470d5e92ee134982bd5c69e286e/dash-4.0.0.tar.gz", hash = "sha256:c5f2bca497af288f552aea3ae208f6a0cca472559003dac84ac21187a1c3a142", size = 6943263, upload-time = "2026-02-03T19:42:27.92Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/cf/a4853e5b2b2bea55ae909095a8720b3ed50d07bdd40cbeafcedb5a6c47da/dash-3.3.0-py3-none-any.whl", hash = "sha256:8f52415977f7490492dd8a3872279160be8ff253ca9f4d49a4e3ba747fa4bd91", size = 7919707, upload-time = "2025-11-12T15:51:47.432Z" }, + { url = "https://files.pythonhosted.org/packages/0b/8c/dd63d210b28a7589f4bc1e84880525368147425c717d12834ab562f52d14/dash-4.0.0-py3-none-any.whl", hash = "sha256:e36b4b4eae9e1fa4136bf4f1450ed14ef76063bc5da0b10f8ab07bd57a7cb1ab", size = 7247521, upload-time = "2026-02-03T19:42:25.01Z" }, ] [[package]] @@ -1337,37 +1675,54 @@ complete = [ { name = "lz4" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "pandas" }, + { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "pandas", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pyarrow" }, ] +distributed = [ + { name = "distributed" }, +] + +[[package]] +name = "dataclasses-json" +version = "0.6.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marshmallow", marker = "python_full_version >= '3.11'" }, + { name = "typing-inspect", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227, upload-time = "2024-06-09T16:20:19.103Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" }, +] [[package]] name = "debugpy" -version = "1.8.19" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/75/9e12d4d42349b817cd545b89247696c67917aab907012ae5b64bbfea3199/debugpy-1.8.19.tar.gz", hash = "sha256:eea7e5987445ab0b5ed258093722d5ecb8bb72217c5c9b1e21f64efe23ddebdb", size = 1644590, upload-time = "2025-12-15T21:53:28.044Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/98/d57054371887f37d3c959a7a8dc3c76b763acb65f5e78d849d7db7cadc5b/debugpy-1.8.19-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:fce6da15d73be5935b4438435c53adb512326a3e11e4f90793ea87cd9f018254", size = 2098493, upload-time = "2025-12-15T21:53:30.149Z" }, - { url = "https://files.pythonhosted.org/packages/ee/dd/c517b9aa3500157a30e4f4c4f5149f880026bd039d2b940acd2383a85d8e/debugpy-1.8.19-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:e24b1652a1df1ab04d81e7ead446a91c226de704ff5dde6bd0a0dbaab07aa3f2", size = 3087875, upload-time = "2025-12-15T21:53:31.511Z" }, - { url = "https://files.pythonhosted.org/packages/d8/57/3d5a5b0da9b63445253107ead151eff29190c6ad7440c68d1a59d56613aa/debugpy-1.8.19-cp310-cp310-win32.whl", hash = "sha256:327cb28c3ad9e17bc925efc7f7018195fd4787c2fe4b7af1eec11f1d19bdec62", size = 5239378, upload-time = "2025-12-15T21:53:32.979Z" }, - { url = "https://files.pythonhosted.org/packages/a6/36/7f9053c4c549160c87ae7e43800138f2695578c8b65947114c97250983b6/debugpy-1.8.19-cp310-cp310-win_amd64.whl", hash = "sha256:b7dd275cf2c99e53adb9654f5ae015f70415bbe2bacbe24cfee30d54b6aa03c5", size = 5271129, upload-time = "2025-12-15T21:53:35.085Z" }, - { url = "https://files.pythonhosted.org/packages/80/e2/48531a609b5a2aa94c6b6853afdfec8da05630ab9aaa96f1349e772119e9/debugpy-1.8.19-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:c5dcfa21de1f735a4f7ced4556339a109aa0f618d366ede9da0a3600f2516d8b", size = 2207620, upload-time = "2025-12-15T21:53:37.1Z" }, - { url = "https://files.pythonhosted.org/packages/1b/d4/97775c01d56071969f57d93928899e5616a4cfbbf4c8cc75390d3a51c4a4/debugpy-1.8.19-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:806d6800246244004625d5222d7765874ab2d22f3ba5f615416cf1342d61c488", size = 3170796, upload-time = "2025-12-15T21:53:38.513Z" }, - { url = "https://files.pythonhosted.org/packages/8d/7e/8c7681bdb05be9ec972bbb1245eb7c4c7b0679bb6a9e6408d808bc876d3d/debugpy-1.8.19-cp311-cp311-win32.whl", hash = "sha256:783a519e6dfb1f3cd773a9bda592f4887a65040cb0c7bd38dde410f4e53c40d4", size = 5164287, upload-time = "2025-12-15T21:53:40.857Z" }, - { url = "https://files.pythonhosted.org/packages/f2/a8/aaac7ff12ddf5d68a39e13a423a8490426f5f661384f5ad8d9062761bd8e/debugpy-1.8.19-cp311-cp311-win_amd64.whl", hash = "sha256:14035cbdbb1fe4b642babcdcb5935c2da3b1067ac211c5c5a8fdc0bb31adbcaa", size = 5188269, upload-time = "2025-12-15T21:53:42.359Z" }, - { url = "https://files.pythonhosted.org/packages/4a/15/d762e5263d9e25b763b78be72dc084c7a32113a0bac119e2f7acae7700ed/debugpy-1.8.19-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:bccb1540a49cde77edc7ce7d9d075c1dbeb2414751bc0048c7a11e1b597a4c2e", size = 2549995, upload-time = "2025-12-15T21:53:43.773Z" }, - { url = "https://files.pythonhosted.org/packages/a7/88/f7d25c68b18873b7c53d7c156ca7a7ffd8e77073aa0eac170a9b679cf786/debugpy-1.8.19-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:e9c68d9a382ec754dc05ed1d1b4ed5bd824b9f7c1a8cd1083adb84b3c93501de", size = 4309891, upload-time = "2025-12-15T21:53:45.26Z" }, - { url = "https://files.pythonhosted.org/packages/c5/4f/a65e973aba3865794da65f71971dca01ae66666132c7b2647182d5be0c5f/debugpy-1.8.19-cp312-cp312-win32.whl", hash = "sha256:6599cab8a783d1496ae9984c52cb13b7c4a3bd06a8e6c33446832a5d97ce0bee", size = 5286355, upload-time = "2025-12-15T21:53:46.763Z" }, - { url = "https://files.pythonhosted.org/packages/d8/3a/d3d8b48fec96e3d824e404bf428276fb8419dfa766f78f10b08da1cb2986/debugpy-1.8.19-cp312-cp312-win_amd64.whl", hash = "sha256:66e3d2fd8f2035a8f111eb127fa508469dfa40928a89b460b41fd988684dc83d", size = 5328239, upload-time = "2025-12-15T21:53:48.868Z" }, - { url = "https://files.pythonhosted.org/packages/71/3d/388035a31a59c26f1ecc8d86af607d0c42e20ef80074147cd07b180c4349/debugpy-1.8.19-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:91e35db2672a0abaf325f4868fcac9c1674a0d9ad9bb8a8c849c03a5ebba3e6d", size = 2538859, upload-time = "2025-12-15T21:53:50.478Z" }, - { url = "https://files.pythonhosted.org/packages/4a/19/c93a0772d0962294f083dbdb113af1a7427bb632d36e5314297068f55db7/debugpy-1.8.19-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:85016a73ab84dea1c1f1dcd88ec692993bcbe4532d1b49ecb5f3c688ae50c606", size = 4292575, upload-time = "2025-12-15T21:53:51.821Z" }, - { url = "https://files.pythonhosted.org/packages/5c/56/09e48ab796b0a77e3d7dc250f95251832b8bf6838c9632f6100c98bdf426/debugpy-1.8.19-cp313-cp313-win32.whl", hash = "sha256:b605f17e89ba0ecee994391194285fada89cee111cfcd29d6f2ee11cbdc40976", size = 5286209, upload-time = "2025-12-15T21:53:53.602Z" }, - { url = "https://files.pythonhosted.org/packages/fb/4e/931480b9552c7d0feebe40c73725dd7703dcc578ba9efc14fe0e6d31cfd1/debugpy-1.8.19-cp313-cp313-win_amd64.whl", hash = "sha256:c30639998a9f9cd9699b4b621942c0179a6527f083c72351f95c6ab1728d5b73", size = 5328206, upload-time = "2025-12-15T21:53:55.433Z" }, - { url = "https://files.pythonhosted.org/packages/f6/b9/cbec520c3a00508327476c7fce26fbafef98f412707e511eb9d19a2ef467/debugpy-1.8.19-cp314-cp314-macosx_15_0_universal2.whl", hash = "sha256:1e8c4d1bd230067bf1bbcdbd6032e5a57068638eb28b9153d008ecde288152af", size = 2537372, upload-time = "2025-12-15T21:53:57.318Z" }, - { url = "https://files.pythonhosted.org/packages/88/5e/cf4e4dc712a141e10d58405c58c8268554aec3c35c09cdcda7535ff13f76/debugpy-1.8.19-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:d40c016c1f538dbf1762936e3aeb43a89b965069d9f60f9e39d35d9d25e6b809", size = 4268729, upload-time = "2025-12-15T21:53:58.712Z" }, - { url = "https://files.pythonhosted.org/packages/82/a3/c91a087ab21f1047db328c1d3eb5d1ff0e52de9e74f9f6f6fa14cdd93d58/debugpy-1.8.19-cp314-cp314-win32.whl", hash = "sha256:0601708223fe1cd0e27c6cce67a899d92c7d68e73690211e6788a4b0e1903f5b", size = 5286388, upload-time = "2025-12-15T21:54:00.687Z" }, - { url = "https://files.pythonhosted.org/packages/17/b8/bfdc30b6e94f1eff09f2dc9cc1f9cd1c6cde3d996bcbd36ce2d9a4956e99/debugpy-1.8.19-cp314-cp314-win_amd64.whl", hash = "sha256:8e19a725f5d486f20e53a1dde2ab8bb2c9607c40c00a42ab646def962b41125f", size = 5327741, upload-time = "2025-12-15T21:54:02.148Z" }, - { url = "https://files.pythonhosted.org/packages/25/3e/e27078370414ef35fafad2c06d182110073daaeb5d3bf734b0b1eeefe452/debugpy-1.8.19-py2.py3-none-any.whl", hash = "sha256:360ffd231a780abbc414ba0f005dad409e71c78637efe8f2bd75837132a41d38", size = 5292321, upload-time = "2025-12-15T21:54:16.024Z" }, +version = "1.8.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/b7/cd8080344452e4874aae67c40d8940e2b4d47b01601a8fd9f44786c757c7/debugpy-1.8.20.tar.gz", hash = "sha256:55bc8701714969f1ab89a6d5f2f3d40c36f91b2cbe2f65d98bf8196f6a6a2c33", size = 1645207, upload-time = "2026-01-29T23:03:28.199Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/be/8bd693a0b9d53d48c8978fa5d889e06f3b5b03e45fd1ea1e78267b4887cb/debugpy-1.8.20-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:157e96ffb7f80b3ad36d808646198c90acb46fdcfd8bb1999838f0b6f2b59c64", size = 2099192, upload-time = "2026-01-29T23:03:29.707Z" }, + { url = "https://files.pythonhosted.org/packages/77/1b/85326d07432086a06361d493d2743edd0c4fc2ef62162be7f8618441ac37/debugpy-1.8.20-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:c1178ae571aff42e61801a38b007af504ec8e05fde1c5c12e5a7efef21009642", size = 3088568, upload-time = "2026-01-29T23:03:31.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/60/3e08462ee3eccd10998853eb35947c416e446bfe2bc37dbb886b9044586c/debugpy-1.8.20-cp310-cp310-win32.whl", hash = "sha256:c29dd9d656c0fbd77906a6e6a82ae4881514aa3294b94c903ff99303e789b4a2", size = 5284399, upload-time = "2026-01-29T23:03:33.678Z" }, + { url = "https://files.pythonhosted.org/packages/72/43/09d49106e770fe558ced5e80df2e3c2ebee10e576eda155dcc5670473663/debugpy-1.8.20-cp310-cp310-win_amd64.whl", hash = "sha256:3ca85463f63b5dd0aa7aaa933d97cbc47c174896dcae8431695872969f981893", size = 5316388, upload-time = "2026-01-29T23:03:35.095Z" }, + { url = "https://files.pythonhosted.org/packages/51/56/c3baf5cbe4dd77427fd9aef99fcdade259ad128feeb8a786c246adb838e5/debugpy-1.8.20-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:eada6042ad88fa1571b74bd5402ee8b86eded7a8f7b827849761700aff171f1b", size = 2208318, upload-time = "2026-01-29T23:03:36.481Z" }, + { url = "https://files.pythonhosted.org/packages/9a/7d/4fa79a57a8e69fe0d9763e98d1110320f9ecd7f1f362572e3aafd7417c9d/debugpy-1.8.20-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:7de0b7dfeedc504421032afba845ae2a7bcc32ddfb07dae2c3ca5442f821c344", size = 3171493, upload-time = "2026-01-29T23:03:37.775Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/1e8f8affe51e12a26f3a8a8a4277d6e60aa89d0a66512f63b1e799d424a4/debugpy-1.8.20-cp311-cp311-win32.whl", hash = "sha256:773e839380cf459caf73cc533ea45ec2737a5cc184cf1b3b796cd4fd98504fec", size = 5209240, upload-time = "2026-01-29T23:03:39.109Z" }, + { url = "https://files.pythonhosted.org/packages/d5/92/1cb532e88560cbee973396254b21bece8c5d7c2ece958a67afa08c9f10dc/debugpy-1.8.20-cp311-cp311-win_amd64.whl", hash = "sha256:1f7650546e0eded1902d0f6af28f787fa1f1dbdbc97ddabaf1cd963a405930cb", size = 5233481, upload-time = "2026-01-29T23:03:40.659Z" }, + { url = "https://files.pythonhosted.org/packages/14/57/7f34f4736bfb6e00f2e4c96351b07805d83c9a7b33d28580ae01374430f7/debugpy-1.8.20-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:4ae3135e2089905a916909ef31922b2d733d756f66d87345b3e5e52b7a55f13d", size = 2550686, upload-time = "2026-01-29T23:03:42.023Z" }, + { url = "https://files.pythonhosted.org/packages/ab/78/b193a3975ca34458f6f0e24aaf5c3e3da72f5401f6054c0dfd004b41726f/debugpy-1.8.20-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:88f47850a4284b88bd2bfee1f26132147d5d504e4e86c22485dfa44b97e19b4b", size = 4310588, upload-time = "2026-01-29T23:03:43.314Z" }, + { url = "https://files.pythonhosted.org/packages/c1/55/f14deb95eaf4f30f07ef4b90a8590fc05d9e04df85ee379712f6fb6736d7/debugpy-1.8.20-cp312-cp312-win32.whl", hash = "sha256:4057ac68f892064e5f98209ab582abfee3b543fb55d2e87610ddc133a954d390", size = 5331372, upload-time = "2026-01-29T23:03:45.526Z" }, + { url = "https://files.pythonhosted.org/packages/a1/39/2bef246368bd42f9bd7cba99844542b74b84dacbdbea0833e610f384fee8/debugpy-1.8.20-cp312-cp312-win_amd64.whl", hash = "sha256:a1a8f851e7cf171330679ef6997e9c579ef6dd33c9098458bd9986a0f4ca52e3", size = 5372835, upload-time = "2026-01-29T23:03:47.245Z" }, + { url = "https://files.pythonhosted.org/packages/15/e2/fc500524cc6f104a9d049abc85a0a8b3f0d14c0a39b9c140511c61e5b40b/debugpy-1.8.20-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:5dff4bb27027821fdfcc9e8f87309a28988231165147c31730128b1c983e282a", size = 2539560, upload-time = "2026-01-29T23:03:48.738Z" }, + { url = "https://files.pythonhosted.org/packages/90/83/fb33dcea789ed6018f8da20c5a9bc9d82adc65c0c990faed43f7c955da46/debugpy-1.8.20-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:84562982dd7cf5ebebfdea667ca20a064e096099997b175fe204e86817f64eaf", size = 4293272, upload-time = "2026-01-29T23:03:50.169Z" }, + { url = "https://files.pythonhosted.org/packages/a6/25/b1e4a01bfb824d79a6af24b99ef291e24189080c93576dfd9b1a2815cd0f/debugpy-1.8.20-cp313-cp313-win32.whl", hash = "sha256:da11dea6447b2cadbf8ce2bec59ecea87cc18d2c574980f643f2d2dfe4862393", size = 5331208, upload-time = "2026-01-29T23:03:51.547Z" }, + { url = "https://files.pythonhosted.org/packages/13/f7/a0b368ce54ffff9e9028c098bd2d28cfc5b54f9f6c186929083d4c60ba58/debugpy-1.8.20-cp313-cp313-win_amd64.whl", hash = "sha256:eb506e45943cab2efb7c6eafdd65b842f3ae779f020c82221f55aca9de135ed7", size = 5372930, upload-time = "2026-01-29T23:03:53.585Z" }, + { url = "https://files.pythonhosted.org/packages/33/2e/f6cb9a8a13f5058f0a20fe09711a7b726232cd5a78c6a7c05b2ec726cff9/debugpy-1.8.20-cp314-cp314-macosx_15_0_universal2.whl", hash = "sha256:9c74df62fc064cd5e5eaca1353a3ef5a5d50da5eb8058fcef63106f7bebe6173", size = 2538066, upload-time = "2026-01-29T23:03:54.999Z" }, + { url = "https://files.pythonhosted.org/packages/c5/56/6ddca50b53624e1ca3ce1d1e49ff22db46c47ea5fb4c0cc5c9b90a616364/debugpy-1.8.20-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:077a7447589ee9bc1ff0cdf443566d0ecf540ac8aa7333b775ebcb8ce9f4ecad", size = 4269425, upload-time = "2026-01-29T23:03:56.518Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d9/d64199c14a0d4c476df46c82470a3ce45c8d183a6796cfb5e66533b3663c/debugpy-1.8.20-cp314-cp314-win32.whl", hash = "sha256:352036a99dd35053b37b7803f748efc456076f929c6a895556932eaf2d23b07f", size = 5331407, upload-time = "2026-01-29T23:03:58.481Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d9/1f07395b54413432624d61524dfd98c1a7c7827d2abfdb8829ac92638205/debugpy-1.8.20-cp314-cp314-win_amd64.whl", hash = "sha256:a98eec61135465b062846112e5ecf2eebb855305acc1dfbae43b72903b8ab5be", size = 5372521, upload-time = "2026-01-29T23:03:59.864Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c3/7f67dea8ccf8fdcb9c99033bbe3e90b9e7395415843accb81428c441be2d/debugpy-1.8.20-py2.py3-none-any.whl", hash = "sha256:5be9bed9ae3be00665a06acaa48f8329d2b9632f15fd09f6a9a8c8d9907e54d7", size = 5337658, upload-time = "2026-01-29T23:04:17.404Z" }, ] [[package]] @@ -1379,21 +1734,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, ] +[[package]] +name = "dill" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/81/e1/56027a71e31b02ddc53c7d65b01e68edf64dea2932122fe7746a516f75d5/dill-0.4.1.tar.gz", hash = "sha256:423092df4182177d4d8ba8290c8a5b640c66ab35ec7da59ccfa00f6fa3eea5fa", size = 187315, upload-time = "2026-01-19T02:36:56.85Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/77/dc8c558f7593132cf8fefec57c4f60c83b16941c574ac5f619abb3ae7933/dill-0.4.1-py3-none-any.whl", hash = "sha256:1e1ce33e978ae97fcfcff5638477032b801c46c7c65cf717f95fbc2248f79a9d", size = 120019, upload-time = "2026-01-19T02:36:55.663Z" }, +] + [[package]] name = "dimos" -version = "0.0.9" +version = "0.0.10.post1" source = { editable = "." } dependencies = [ + { name = "annotation-protocol" }, { name = "asyncio" }, { name = "colorlog" }, { name = "dask", extra = ["complete"] }, { name = "dimos-lcm" }, + { name = "lazy-loader" }, { name = "llvmlite" }, { name = "numba" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "open3d" }, + { name = "open3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "open3d-unofficial-arm", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" }, { name = "opencv-python" }, + { name = "pin" }, { name = "plotext" }, { name = "plum-dispatch" }, { name = "pydantic" }, @@ -1403,11 +1771,12 @@ dependencies = [ { name = "reactivex" }, { name = "rerun-sdk" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sortedcontainers" }, { name = "structlog" }, { name = "terminaltexteffects" }, { name = "textual" }, + { name = "toolz" }, { name = "typer" }, ] @@ -1467,12 +1836,48 @@ cpu = [ ] cuda = [ { name = "ctransformers", extra = ["cuda"] }, - { name = "cupy-cuda12x" }, - { name = "mmcv" }, - { name = "mmengine" }, - { name = "nvidia-nvimgcodec-cu12", extra = ["all"] }, - { name = "onnxruntime-gpu" }, - { name = "xformers" }, + { name = "cupy-cuda12x", marker = "platform_machine == 'x86_64'" }, + { name = "nvidia-nvimgcodec-cu12", extra = ["all"], marker = "platform_machine == 'x86_64'" }, + { name = "onnxruntime-gpu", marker = "platform_machine == 'x86_64'" }, + { name = "xformers", marker = "platform_machine == 'x86_64'" }, +] +dds = [ + { name = "coverage" }, + { name = "cyclonedds" }, + { name = "lxml-stubs" }, + { name = "md-babel-py" }, + { name = "mypy" }, + { name = "pandas-stubs" }, + { name = "pre-commit" }, + { name = "py-spy" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-env" }, + { name = "pytest-mock" }, + { name = "pytest-timeout" }, + { name = "python-lsp-ruff" }, + { name = "python-lsp-server", extra = ["all"] }, + { name = "requests-mock" }, + { name = "ruff" }, + { name = "terminaltexteffects" }, + { name = "types-colorama" }, + { name = "types-defusedxml" }, + { name = "types-gevent" }, + { name = "types-greenlet" }, + { name = "types-jmespath" }, + { name = "types-jsonschema" }, + { name = "types-networkx" }, + { name = "types-protobuf" }, + { name = "types-psutil" }, + { name = "types-psycopg2" }, + { name = "types-pysocks" }, + { name = "types-pytz" }, + { name = "types-pyyaml" }, + { name = "types-simplejson" }, + { name = "types-tabulate" }, + { name = "types-tensorflow" }, + { name = "types-tqdm" }, + { name = "watchdog" }, ] dev = [ { name = "coverage" }, @@ -1481,11 +1886,14 @@ dev = [ { name = "mypy" }, { name = "pandas-stubs" }, { name = "pre-commit" }, + { name = "py-spy" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-env" }, { name = "pytest-mock" }, { name = "pytest-timeout" }, + { name = "python-lsp-ruff" }, + { name = "python-lsp-server", extra = ["all"] }, { name = "requests-mock" }, { name = "ruff" }, { name = "terminaltexteffects" }, @@ -1498,6 +1906,7 @@ dev = [ { name = "types-networkx" }, { name = "types-protobuf" }, { name = "types-psutil" }, + { name = "types-psycopg2" }, { name = "types-pysocks" }, { name = "types-pytz" }, { name = "types-pyyaml" }, @@ -1507,24 +1916,40 @@ dev = [ { name = "types-tqdm" }, { name = "watchdog" }, ] +docker = [ + { name = "dask", extra = ["distributed"] }, + { name = "dimos-lcm" }, + { name = "lcm" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "open3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "open3d-unofficial-arm", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" }, + { name = "opencv-python-headless" }, + { name = "plum-dispatch" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "pyturbojpeg" }, + { name = "reactivex" }, + { name = "rerun-sdk" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sortedcontainers" }, + { name = "structlog" }, + { name = "typer" }, +] drone = [ { name = "pymavlink" }, ] manipulation = [ - { name = "contact-graspnet-pytorch" }, - { name = "h5py" }, + { name = "drake", version = "1.45.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine != 'aarch64' and sys_platform == 'darwin'" }, + { name = "drake", version = "1.49.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine != 'aarch64' and sys_platform != 'darwin'" }, { name = "kaleido" }, { name = "matplotlib" }, - { name = "pandas" }, { name = "piper-sdk" }, { name = "plotly" }, - { name = "pyquaternion" }, - { name = "pyrender" }, - { name = "python-fcl" }, { name = "pyyaml" }, - { name = "rtree" }, - { name = "tqdm" }, - { name = "trimesh" }, + { name = "xacro" }, + { name = "xarm-python-sdk" }, ] misc = [ { name = "catkin-pkg" }, @@ -1562,6 +1987,9 @@ perception = [ { name = "transformers", extra = ["torch"] }, { name = "ultralytics" }, ] +psql = [ + { name = "psycopg2-binary" }, +] sim = [ { name = "mujoco" }, { name = "playground" }, @@ -1614,21 +2042,27 @@ web = [ [package.metadata] requires-dist = [ + { name = "annotation-protocol", specifier = ">=1.4.0" }, { name = "anthropic", marker = "extra == 'agents'", specifier = ">=0.19.0" }, { name = "asyncio", specifier = "==3.4.3" }, { name = "bitsandbytes", marker = "sys_platform == 'linux' and extra == 'agents'", specifier = ">=0.48.2,<1.0" }, { name = "catkin-pkg", marker = "extra == 'misc'" }, { name = "cerebras-cloud-sdk", marker = "extra == 'misc'" }, { name = "colorlog", specifier = "==6.9.0" }, - { name = "contact-graspnet-pytorch", marker = "extra == 'manipulation'", git = "https://github.com/dimensionalOS/contact_graspnet_pytorch.git" }, { name = "coverage", marker = "extra == 'dev'", specifier = ">=7.0" }, { name = "ctransformers", marker = "extra == 'cpu'", specifier = "==0.2.27" }, { name = "ctransformers", extras = ["cuda"], marker = "extra == 'cuda'", specifier = "==0.2.27" }, - { name = "cupy-cuda12x", marker = "extra == 'cuda'", specifier = "==13.6.0" }, + { name = "cupy-cuda12x", marker = "platform_machine == 'x86_64' and extra == 'cuda'", specifier = "==13.6.0" }, + { name = "cyclonedds", marker = "extra == 'dds'", specifier = ">=0.10.5" }, { name = "dask", extras = ["complete"], specifier = "==2025.5.1" }, + { name = "dask", extras = ["distributed"], marker = "extra == 'docker'", specifier = "==2025.5.1" }, { name = "dimos", extras = ["agents", "web", "perception", "visualization", "sim"], marker = "extra == 'base'" }, { name = "dimos", extras = ["base"], marker = "extra == 'unitree'" }, + { name = "dimos", extras = ["dev"], marker = "extra == 'dds'" }, { name = "dimos-lcm" }, + { name = "dimos-lcm", marker = "extra == 'docker'" }, + { name = "drake", marker = "platform_machine != 'aarch64' and sys_platform == 'darwin' and extra == 'manipulation'", specifier = "==1.45.0" }, + { name = "drake", marker = "platform_machine != 'aarch64' and sys_platform != 'darwin' and extra == 'manipulation'", specifier = ">=1.40.0" }, { name = "edgetam-dimos", marker = "extra == 'misc'" }, { name = "einops", marker = "extra == 'misc'", specifier = "==0.8.1" }, { name = "empy", marker = "extra == 'misc'", specifier = "==3.3.4" }, @@ -1637,82 +2071,95 @@ requires-dist = [ { name = "filterpy", marker = "extra == 'perception'", specifier = ">=1.4.5" }, { name = "gdown", marker = "extra == 'misc'", specifier = "==5.2.0" }, { name = "googlemaps", marker = "extra == 'misc'", specifier = ">=4.10.0" }, - { name = "h5py", marker = "extra == 'manipulation'", specifier = ">=3.7.0" }, { name = "hydra-core", marker = "extra == 'perception'", specifier = ">=1.3.0" }, { name = "ipykernel", marker = "extra == 'misc'" }, { name = "kaleido", marker = "extra == 'manipulation'", specifier = ">=0.2.1" }, - { name = "langchain", marker = "extra == 'agents'", specifier = ">=1,<2" }, + { name = "langchain", marker = "extra == 'agents'", specifier = "==1.2.3" }, { name = "langchain-chroma", marker = "extra == 'agents'", specifier = ">=1,<2" }, - { name = "langchain-core", marker = "extra == 'agents'", specifier = ">=1,<2" }, + { name = "langchain-core", marker = "extra == 'agents'", specifier = "==1.2.3" }, { name = "langchain-huggingface", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "langchain-ollama", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "langchain-openai", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "langchain-text-splitters", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "lap", marker = "extra == 'perception'", specifier = ">=0.5.12" }, { name = "lark", marker = "extra == 'misc'" }, + { name = "lazy-loader" }, + { name = "lcm", marker = "extra == 'docker'" }, { name = "llvmlite", specifier = ">=0.42.0" }, { name = "lxml-stubs", marker = "extra == 'dev'", specifier = ">=0.5.1,<1" }, { name = "matplotlib", marker = "extra == 'manipulation'", specifier = ">=3.7.1" }, { name = "mcp", marker = "extra == 'agents'", specifier = ">=1.0.0" }, { name = "md-babel-py", marker = "extra == 'dev'", specifier = "==1.1.1" }, - { name = "mmcv", marker = "extra == 'cuda'", specifier = ">=2.1.0" }, - { name = "mmengine", marker = "extra == 'cuda'", specifier = ">=0.10.3" }, { name = "moondream", marker = "extra == 'perception'" }, { name = "mujoco", marker = "extra == 'sim'", specifier = ">=3.3.4" }, { name = "mypy", marker = "extra == 'dev'", specifier = "==1.19.0" }, { name = "numba", specifier = ">=0.60.0" }, { name = "numpy", specifier = ">=1.26.4" }, - { name = "nvidia-nvimgcodec-cu12", extras = ["all"], marker = "extra == 'cuda'" }, + { name = "numpy", marker = "extra == 'docker'", specifier = ">=1.26.4" }, + { name = "nvidia-nvimgcodec-cu12", extras = ["all"], marker = "platform_machine == 'x86_64' and extra == 'cuda'" }, { name = "ollama", marker = "extra == 'agents'", specifier = ">=0.6.0" }, { name = "omegaconf", marker = "extra == 'perception'", specifier = ">=2.3.0" }, { name = "onnx", marker = "extra == 'misc'" }, { name = "onnxruntime", marker = "extra == 'cpu'" }, - { name = "onnxruntime-gpu", marker = "extra == 'cuda'", specifier = ">=1.17.1" }, + { name = "onnxruntime-gpu", marker = "platform_machine == 'x86_64' and extra == 'cuda'", specifier = ">=1.17.1" }, { name = "open-clip-torch", marker = "extra == 'misc'", specifier = "==3.2.0" }, - { name = "open3d" }, + { name = "open3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'", specifier = ">=0.18.0" }, + { name = "open3d", marker = "(platform_machine != 'aarch64' and extra == 'docker') or (sys_platform != 'linux' and extra == 'docker')", specifier = ">=0.18.0" }, + { name = "open3d-unofficial-arm", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" }, + { name = "open3d-unofficial-arm", marker = "platform_machine == 'aarch64' and sys_platform == 'linux' and extra == 'docker'" }, { name = "openai", marker = "extra == 'agents'" }, { name = "openai-whisper", marker = "extra == 'agents'" }, { name = "opencv-contrib-python", marker = "extra == 'misc'", specifier = "==4.10.0.84" }, { name = "opencv-python" }, - { name = "pandas", marker = "extra == 'manipulation'", specifier = ">=1.5.2" }, + { name = "opencv-python-headless", marker = "extra == 'docker'" }, { name = "pandas-stubs", marker = "extra == 'dev'", specifier = ">=2.3.2.250926,<3" }, { name = "pillow", marker = "extra == 'perception'" }, + { name = "pin", specifier = ">=3.3.0" }, { name = "piper-sdk", marker = "extra == 'manipulation'" }, { name = "playground", marker = "extra == 'sim'", specifier = ">=0.0.5" }, { name = "plotext", specifier = "==5.3.2" }, { name = "plotly", marker = "extra == 'manipulation'", specifier = ">=5.9.0" }, { name = "plum-dispatch", specifier = "==2.5.7" }, + { name = "plum-dispatch", marker = "extra == 'docker'", specifier = "==2.5.7" }, { name = "pre-commit", marker = "extra == 'dev'", specifier = "==4.2.0" }, + { name = "psycopg2-binary", marker = "extra == 'psql'", specifier = ">=2.9.11" }, + { name = "py-spy", marker = "extra == 'dev'" }, { name = "pydantic" }, + { name = "pydantic", marker = "extra == 'docker'" }, { name = "pydantic-settings", specifier = ">=2.11.0,<3" }, + { name = "pydantic-settings", marker = "extra == 'docker'", specifier = ">=2.11.0,<3" }, { name = "pygame", marker = "extra == 'sim'", specifier = ">=2.6.1" }, { name = "pymavlink", marker = "extra == 'drone'" }, - { name = "pyquaternion", marker = "extra == 'manipulation'", specifier = ">=0.9.9" }, - { name = "pyrender", marker = "extra == 'manipulation'", specifier = ">=0.1.45" }, { name = "pytest", marker = "extra == 'dev'", specifier = "==8.3.5" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = "==0.26.0" }, { name = "pytest-env", marker = "extra == 'dev'", specifier = "==1.1.5" }, { name = "pytest-mock", marker = "extra == 'dev'", specifier = "==3.15.0" }, { name = "pytest-timeout", marker = "extra == 'dev'", specifier = "==2.4.0" }, { name = "python-dotenv" }, - { name = "python-fcl", marker = "extra == 'manipulation'", specifier = ">=0.7.0.4" }, + { name = "python-lsp-ruff", marker = "extra == 'dev'", specifier = "==2.3.0" }, + { name = "python-lsp-server", extras = ["all"], marker = "extra == 'dev'", specifier = "==1.14.0" }, { name = "python-multipart", marker = "extra == 'misc'", specifier = "==0.0.20" }, { name = "pyturbojpeg", specifier = "==1.8.2" }, + { name = "pyturbojpeg", marker = "extra == 'docker'" }, { name = "pyyaml", marker = "extra == 'manipulation'", specifier = ">=6.0" }, { name = "reactivex" }, + { name = "reactivex", marker = "extra == 'docker'" }, { name = "requests-mock", marker = "extra == 'dev'", specifier = "==1.12.1" }, { name = "rerun-sdk", specifier = ">=0.20.0" }, + { name = "rerun-sdk", marker = "extra == 'docker'" }, { name = "rerun-sdk", marker = "extra == 'visualization'", specifier = ">=0.20.0" }, - { name = "rtree", marker = "extra == 'manipulation'" }, { name = "ruff", marker = "extra == 'dev'", specifier = "==0.14.3" }, { name = "scikit-learn", marker = "extra == 'misc'" }, { name = "scipy", specifier = ">=1.15.1" }, + { name = "scipy", marker = "extra == 'docker'", specifier = ">=1.15.1" }, { name = "sentence-transformers", marker = "extra == 'misc'" }, { name = "sortedcontainers", specifier = "==2.4.0" }, + { name = "sortedcontainers", marker = "extra == 'docker'" }, { name = "sounddevice", marker = "extra == 'agents'" }, { name = "soundfile", marker = "extra == 'web'" }, { name = "sse-starlette", marker = "extra == 'web'", specifier = ">=2.2.1" }, { name = "structlog", specifier = ">=25.5.0,<26" }, + { name = "structlog", marker = "extra == 'docker'", specifier = ">=25.5.0,<26" }, { name = "tensorboard", marker = "extra == 'misc'", specifier = "==2.20.0" }, { name = "tensorzero", marker = "extra == 'misc'", specifier = "==2025.7.5" }, { name = "terminaltexteffects", specifier = "==0.12.2" }, @@ -1720,12 +2167,12 @@ requires-dist = [ { name = "textual", specifier = "==3.7.1" }, { name = "tiktoken", marker = "extra == 'misc'", specifier = ">=0.8.0" }, { name = "timm", marker = "extra == 'misc'", specifier = ">=1.0.15" }, + { name = "toolz", specifier = ">=1.1.0" }, { name = "torchreid", marker = "extra == 'misc'", specifier = "==0.2.5" }, - { name = "tqdm", marker = "extra == 'manipulation'", specifier = ">=4.65.0" }, { name = "transformers", extras = ["torch"], marker = "extra == 'perception'", specifier = "==4.49.0" }, - { name = "trimesh", marker = "extra == 'manipulation'", specifier = ">=3.22.0" }, { name = "typeguard", marker = "extra == 'misc'" }, { name = "typer", specifier = ">=0.19.2,<1" }, + { name = "typer", marker = "extra == 'docker'", specifier = ">=0.19.2,<1" }, { name = "types-colorama", marker = "extra == 'dev'", specifier = ">=0.4.15.20250801,<1" }, { name = "types-defusedxml", marker = "extra == 'dev'", specifier = ">=0.7.0.20250822,<1" }, { name = "types-gevent", marker = "extra == 'dev'", specifier = ">=25.4.0.20250915,<26" }, @@ -1735,6 +2182,7 @@ requires-dist = [ { name = "types-networkx", marker = "extra == 'dev'", specifier = ">=3.5.0.20251001,<4" }, { name = "types-protobuf", marker = "extra == 'dev'", specifier = ">=6.32.1.20250918,<7" }, { name = "types-psutil", marker = "extra == 'dev'", specifier = ">=7.0.0.20251001,<8" }, + { name = "types-psycopg2", marker = "extra == 'dev'", specifier = ">=2.9.21.20251012" }, { name = "types-pysocks", marker = "extra == 'dev'", specifier = ">=1.7.1.20251001,<2" }, { name = "types-pytz", marker = "extra == 'dev'", specifier = ">=2025.2.0.20250809,<2026" }, { name = "types-pyyaml", marker = "extra == 'dev'", specifier = ">=6.0.12.20250915,<7" }, @@ -1746,15 +2194,17 @@ requires-dist = [ { name = "unitree-webrtc-connect-leshy", marker = "extra == 'unitree'", specifier = ">=2.0.7" }, { name = "uvicorn", marker = "extra == 'web'", specifier = ">=0.34.0" }, { name = "watchdog", marker = "extra == 'dev'", specifier = ">=3.0.0" }, + { name = "xacro", marker = "extra == 'manipulation'" }, + { name = "xarm-python-sdk", marker = "extra == 'manipulation'", specifier = ">=1.17.0" }, { name = "xarm-python-sdk", marker = "extra == 'misc'", specifier = ">=1.17.0" }, - { name = "xformers", marker = "extra == 'cuda'", specifier = ">=0.0.20" }, + { name = "xformers", marker = "platform_machine == 'x86_64' and extra == 'cuda'", specifier = ">=0.0.20" }, { name = "yapf", marker = "extra == 'misc'", specifier = "==0.40.2" }, ] -provides-extras = ["misc", "visualization", "agents", "web", "perception", "unitree", "manipulation", "cpu", "cuda", "dev", "sim", "drone", "base"] +provides-extras = ["misc", "visualization", "agents", "web", "perception", "unitree", "manipulation", "cpu", "cuda", "dev", "psql", "sim", "drone", "dds", "docker", "base"] [[package]] name = "dimos-lcm" -version = "0.1.1" +version = "0.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "foxglove-websocket" }, @@ -1762,9 +2212,9 @@ dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/2f/c24d06fc33f0042b3caa4cb2177493a13661f587c5e26788f42c25aed530/dimos_lcm-0.1.1.tar.gz", hash = "sha256:7ef035c3b0bae8a422dc3b38669757982626a7efbd366907be4f8b47700d8289", size = 83474, upload-time = "2026-01-03T13:20:05.194Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/d8/6e366f73f54733872d8c487a5ebd0ffd2eae2f0242d65b3552cdf71f5771/dimos_lcm-0.1.2.tar.gz", hash = "sha256:a0e193f974afdf07907be427a639e695ddd68c160e4737f847a53a1902674c30", size = 122337, upload-time = "2026-01-30T15:44:38.458Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/01/00065b713b1b9c23371b67292383e1f4ca83766fb258b9efb612a330188b/dimos_lcm-0.1.1-py3-none-any.whl", hash = "sha256:4e0906fa98ce57be6015b26f3e5e1e7a701219f65805540e4f6ff1edcb527453", size = 497467, upload-time = "2026-01-03T13:20:03.865Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a9/9d938d6a84c873e3ea4765541a0babd216dfe730fc6c1044a63b4ab1097e/dimos_lcm-0.1.2-py3-none-any.whl", hash = "sha256:fb65258388e8658d0ff94577d6cb5e7c3d657070556a4289ad1b322939503552", size = 588426, upload-time = "2026-01-30T15:44:37.093Z" }, ] [[package]] @@ -1829,6 +2279,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, ] +[[package]] +name = "docstring-to-markdown" +version = "0.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/d8/8abe80d62c5dce1075578031bcfde07e735bcf0afe2886dd48b470162ab4/docstring_to_markdown-0.17.tar.gz", hash = "sha256:df72a112294c7492487c9da2451cae0faeee06e86008245c188c5761c9590ca3", size = 32260, upload-time = "2025-05-02T15:09:07.932Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/7b/af3d0da15bed3a8665419bb3a630585756920f4ad67abfdfef26240ebcc0/docstring_to_markdown-0.17-py3-none-any.whl", hash = "sha256:fd7d5094aa83943bf5f9e1a13701866b7c452eac19765380dead666e36d3711c", size = 23479, upload-time = "2025-05-02T15:09:06.676Z" }, +] + [[package]] name = "docutils" version = "0.22.4" @@ -1838,6 +2301,62 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" }, ] +[[package]] +name = "drake" +version = "1.45.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", + "python_full_version == '3.12.*' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "python_full_version < '3.11' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "matplotlib", marker = "sys_platform == 'darwin'" }, + { name = "mosek", version = "11.0.24", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'darwin'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' and sys_platform == 'darwin'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' and sys_platform == 'darwin'" }, + { name = "pydot", marker = "sys_platform == 'darwin'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/31/aa4f1f5523381539e1028354cc535d5a3307d28fd33872f2b403454d8391/drake-1.45.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b0d9bd6196dc6d3b0e660fc6351fcf236727a45ef6a7123f8dc96f85b8662ac3", size = 57314509, upload-time = "2025-09-16T19:02:10.195Z" }, + { url = "https://files.pythonhosted.org/packages/97/cc/a4e1909d8f69f6aaa2d572b6695a942395205f140c16cc2352b880670325/drake-1.45.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:a1d429e95c43b3fe1af156489381d3129c8ef4dd95b80d8c2a2a51a74a2adb24", size = 57315511, upload-time = "2025-09-16T19:02:16.937Z" }, +] + +[[package]] +name = "drake" +version = "1.49.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version < '3.11' and sys_platform == 'win32'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", +] +dependencies = [ + { name = "matplotlib", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "mosek", version = "11.1.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.15' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.15' and sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "pydot", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "pyyaml", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/26/2ce3a9caf431f24e39f8b1fc7b3ebba4faafef1d61c849db3194e8d2e21d/drake-1.49.0-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:6c73dbd061fcb442e82b7b5a94dadcfbf4c44949035d03394df29412114647b2", size = 41482505, upload-time = "2026-01-15T19:44:08.313Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2c/b147eaeee97986d970c0618144b28049cf078c20ba73209f4db14cf9a531/drake-1.49.0-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:b897f5f1516d13627ef18a8395b15f56413016d3c91c902cada76860b5cbb12c", size = 41516482, upload-time = "2026-01-15T19:44:11.342Z" }, + { url = "https://files.pythonhosted.org/packages/84/dc/c55dc5678a61e5befd3694b28e0dc5737a8422334b774a4174b517c67c22/drake-1.49.0-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:b9a5b528d430764ce1670918b8679cabbb209c8daa2440824ac3a9832c686591", size = 41432263, upload-time = "2026-01-15T19:44:14.486Z" }, + { url = "https://files.pythonhosted.org/packages/3f/a8/1a46831f5f802088df9cd92c204b888aef4e3659d9702128533aa4e5ebaa/drake-1.49.0-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:0a51abf867d534cef1343381ce79883acc606d52fc56debf2dd9e306982e8910", size = 41438880, upload-time = "2026-01-15T19:44:20.265Z" }, + { url = "https://files.pythonhosted.org/packages/d8/60/cdbc3101bb2bd57706a6b6c5a7fc68a03270f002af1d448da875f3eff5df/drake-1.49.0-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:775740e9500ab8cb2e0af0e69ab162018ac03f7553b6fe03fc6b4f03c4b01092", size = 41509337, upload-time = "2026-01-15T19:44:25.879Z" }, +] + [[package]] name = "durationpy" version = "0.10" @@ -1863,6 +2382,54 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/7a/ea/bec55e18e19b6e43ed5f18bfcb699933ab82744fa8a52209ac6e94a6d6d8/edgetam_dimos-1.0.tar.gz", hash = "sha256:4fea5fd5a5aa17f9145dc4f35abc41de9426acaa0d59cae9b467cf26e657d4a7", size = 74935, upload-time = "2026-01-19T22:53:39.159Z" } +[[package]] +name = "eigenpy" +version = "3.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, + { name = "cmeel-boost" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1e/a5/7ec1dc873df269332c84e5b79b033fe53d55c5fd6517bd6d8bb5fb24e707/eigenpy-3.12.0.tar.gz", hash = "sha256:e9d07219df1e61e45db6e42001697c5637743b3ad3e0bfcf069fc94c5fab218d", size = 6556548, upload-time = "2025-08-23T17:54:34.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/6d/25e69e262ec336c3b51eebfae9da536f57e93970b434dc7d8506ed3ee3f7/eigenpy-3.12.0-0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:27525792572d6d2cdc7dc407b253280b7f52b9b4dda900ad9cc4b27879e251f2", size = 5635279, upload-time = "2025-08-23T17:53:58.956Z" }, + { url = "https://files.pythonhosted.org/packages/05/cf/fc8729917859ce949d7ab07f0853fe4df45555322e56765f98152e5f2bf1/eigenpy-3.12.0-0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a3e993c2adc4029673d0578dadec20c5a683c3de5b7768abf98129767e57c40a", size = 4865885, upload-time = "2025-08-23T17:54:01.012Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b0/651af0c2db36f1ad62b2a40d439b65c13da8b38bd0e0a1bf67f3af6d0034/eigenpy-3.12.0-0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:291c21c38a1faeb1823e6f821be1910b7751e0fecf12047217f98c3ab748183b", size = 6200013, upload-time = "2025-08-23T17:54:03.107Z" }, + { url = "https://files.pythonhosted.org/packages/c1/90/c0fc4227cf3f02b60cbdcf36946eb1239683273057d242b97e9af7a44ee3/eigenpy-3.12.0-0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:5d94b52d087d9f317e1029a48cdf170bfca7f0e1c10581b4ce79b1d067fc5e1a", size = 6004382, upload-time = "2025-08-23T17:54:04.667Z" }, + { url = "https://files.pythonhosted.org/packages/58/4c/eed4d4ab07fe3e8a05c599c4955e8053de25ba23c51a8083dabd73714835/eigenpy-3.12.0-0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e6f43554f88ff02f29480dcb278093494f4701f9a823820b98fa20c82a3d963", size = 5635357, upload-time = "2025-08-23T17:54:06.584Z" }, + { url = "https://files.pythonhosted.org/packages/02/75/a35cb968b524d05173027b9841b3ebd209265c4f8e4040aa27d53dcc8574/eigenpy-3.12.0-0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ed6ff1ab3c77932a5619db33c4b34945608692111a48cf7ed12e844ea495218", size = 4865912, upload-time = "2025-08-23T17:54:08.405Z" }, + { url = "https://files.pythonhosted.org/packages/6a/df/ee81fc527c3f056190e848a7741938af90293b0e6f71bab5e89ad1cc540f/eigenpy-3.12.0-0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9b842736a5d554692827ff02429a5f927bfbb84df9cf7de86eab86ae7733d315", size = 6199469, upload-time = "2025-08-23T17:54:09.858Z" }, + { url = "https://files.pythonhosted.org/packages/a6/76/569c7fec07d4dd62fd34ed47c1c048f4f32acb0a2b95d593b743a79d7872/eigenpy-3.12.0-0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a90025bb6986860e6cdb50e5dbc05b55aa7e71bff503e214a3d24842ff15da02", size = 6004355, upload-time = "2025-08-23T17:54:12.658Z" }, + { url = "https://files.pythonhosted.org/packages/3a/57/e024b4644b4a5c48cfa527c7f56064efee6286260b479d1bf2d06f616509/eigenpy-3.12.0-0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e10c04912483bb43cb3c49cf51d73138417383475b113cc4e96c426ba6925d11", size = 5626038, upload-time = "2025-08-23T17:54:14.113Z" }, + { url = "https://files.pythonhosted.org/packages/10/d8/2bf4bf06cf89b95b924482e0cd632a9c4c41043c0b8b53b58b5615239b32/eigenpy-3.12.0-0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b2a5cab1c2b7cb6cc0c798170687e168644d44988492e65b0f0ab522bda7f6a8", size = 4877878, upload-time = "2025-08-23T17:54:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/fa/d7/6b9a39ef606002f9cf20f54dd8741deb9d884d576b4997162671227cdacb/eigenpy-3.12.0-0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:1b27737ffceee5915c88b408821ec3e64d1d409abf1c02ed3617cd949773cafa", size = 6193411, upload-time = "2025-08-23T17:54:17.841Z" }, + { url = "https://files.pythonhosted.org/packages/12/af/3942b89ea486bbd9bee160353b028bf98547e9bfdcb5562c975e683f8c2b/eigenpy-3.12.0-0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:15e7f3f7b4d099fc942fc5a14157022d2314aebf146fd63b927d5c339a7b0d01", size = 6012102, upload-time = "2025-08-23T17:54:19.739Z" }, + { url = "https://files.pythonhosted.org/packages/80/aa/50b418bc747273c2e7c83dfeaabbdbeaab809dc20c935e52f0bbc1c779e8/eigenpy-3.12.0-0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:e88ee2013e4f81adcb041850e60dc63bd85a2bcdefd3dbf8168f6141dfdc3174", size = 5626045, upload-time = "2025-08-23T17:54:21.308Z" }, + { url = "https://files.pythonhosted.org/packages/4a/bb/bac62e442d1727e7b9797e57f99ba9d8040c2296a979881dc9c1bab7dbc4/eigenpy-3.12.0-0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:560886481b2e38a0a7796f3e93c6e6e707c677f504e8cc723773e4a9f22299dd", size = 4877879, upload-time = "2025-08-23T17:54:22.786Z" }, + { url = "https://files.pythonhosted.org/packages/73/95/f9a4286f6f9139ba33196d5c79449981abc3767c7ab3fcdc05551f01485c/eigenpy-3.12.0-0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:bd50b2a708201d439987cf55e224eb0a9656f7d1383a06f7175926aa0a8b1971", size = 6193411, upload-time = "2025-08-23T17:54:24.338Z" }, + { url = "https://files.pythonhosted.org/packages/18/c8/086b66d5310e4c62db564dbe76556f0bfa7aa0d969765de2ad554a75e6ae/eigenpy-3.12.0-0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:9bd338f32475af374c55e1803cc5e7936cc3fccf172cbb01623b846d270d8294", size = 6012109, upload-time = "2025-08-23T17:54:25.957Z" }, + { url = "https://files.pythonhosted.org/packages/f4/53/d6c7ef75acd8ac099b1ae9e2209936fb0ada9c675dfd6762c4392be19df5/eigenpy-3.12.0-1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6d660cd9ebdff808f4e9d49027e9ae5621d19de069deef67ef226e5d48cdcfb2", size = 5415254, upload-time = "2025-10-15T20:13:13.051Z" }, + { url = "https://files.pythonhosted.org/packages/5d/31/a7358942489a31edbded67e00771ff18261c67efddb9f37bdb353633e493/eigenpy-3.12.0-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37c05f8431f9edbd5db2ab04a01d004b894929b70c6faea0f80004f97ddb2e1f", size = 4226332, upload-time = "2025-10-15T20:13:15.092Z" }, + { url = "https://files.pythonhosted.org/packages/2e/38/4d06a01b1fe3efb9c9bc10521d9718d5a263b316306429220bd2da8e134e/eigenpy-3.12.0-1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c5cbbee1f043baed9900a99918fb4326151be0145f793f8129fd1b0b938f2974", size = 6200413, upload-time = "2025-10-15T20:13:16.952Z" }, + { url = "https://files.pythonhosted.org/packages/fe/38/5aff8d72ebaf891a07f1368176508f75cc9e7bf8967d07f7fa1baedc6ac7/eigenpy-3.12.0-1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f579f3b23754f2156b43f27906481bc00fd46030c4e061e8e770cd02aacc3d88", size = 6037835, upload-time = "2025-10-15T20:13:18.826Z" }, + { url = "https://files.pythonhosted.org/packages/cb/40/653fc67abc9fd2fe9adcddd8d61a5f4e219e03ae952f6f4d3fd365df3671/eigenpy-3.12.0-1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd337ed66182e9afd5deb847edda7e0b0cb173b6a28d3123c375634f10888b3a", size = 5415262, upload-time = "2025-10-15T20:13:20.719Z" }, + { url = "https://files.pythonhosted.org/packages/fb/7b/2eb71b204e78d656c04037a8d31c5e57fc984323f7e5617647609f1e4cc6/eigenpy-3.12.0-1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:42e2bfd63338e4357cd0edff072b0c612fcdb8dbd89b811feba701a9cf909d0a", size = 4226325, upload-time = "2025-10-15T20:13:22.549Z" }, + { url = "https://files.pythonhosted.org/packages/fb/ce/9d1e4b7a6dd16380893ce8252c4809c04688b530c21570ac7fc8f5a588b8/eigenpy-3.12.0-1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:54cb321401766e0e8df5870f9a497d1a74e23963d74c49df569b6cd6728ceb14", size = 6200017, upload-time = "2025-10-15T20:13:24.042Z" }, + { url = "https://files.pythonhosted.org/packages/ee/c9/c97ee17b2cdae3b1b9ce3a048469d9d5ca03ab8baf2692392cea610c7323/eigenpy-3.12.0-1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a59d2b8e3e2cc01370cb7ceff76102972b0c8369d4cdcb8b5f0c95d8860d4dcf", size = 6037983, upload-time = "2025-10-15T20:13:25.661Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e5/f0f4da14c543f9d91e15b835e48f708c839ae51e186ecebf0950ac0503f2/eigenpy-3.12.0-1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:96f5cf42a3fa538a0b7672c36e295c8f63a022bcbf5acdc9a127cc1189f151aa", size = 5460186, upload-time = "2025-10-15T20:13:27.174Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6b/b6ee7ad7f54c6f17c07f4de09686a100ef02a480ac4bc2a1ab2f2f211105/eigenpy-3.12.0-1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:165323fea20b17d5fc2d81c606a32ae85b4af0cd4b08e0f860efdb8cefd1766c", size = 4247646, upload-time = "2025-10-15T20:13:29.229Z" }, + { url = "https://files.pythonhosted.org/packages/ec/26/c7aded82e75e655fdfc879e25309b6e42f46f903f341d193f0cca1c53d19/eigenpy-3.12.0-1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f7d4fd7db04df9513fbdfff443aa4fe11a83b14dbef1d03992d94cffb9a905fe", size = 6202696, upload-time = "2025-10-15T20:13:31.403Z" }, + { url = "https://files.pythonhosted.org/packages/c4/97/a39ac2226f82cfbd4df632d2d4a9f480fbfea51fccbc5b2d5809443b47ca/eigenpy-3.12.0-1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:8c1a82275bf1b478b7c201ac260e2d48c8da14ff568deeafcb8d37ef8bf798ec", size = 6047814, upload-time = "2025-10-15T20:13:33.44Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/99bb78a32f3bf3a0e3502f5e6c33887404b6ad7a77ca2b692730239fdad8/eigenpy-3.12.0-1-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:8f43adceaf0f53767d974b21f822d8c9d2854e7c868e3dcfb0e5ca25c54e2281", size = 5460188, upload-time = "2025-10-15T20:13:35.15Z" }, + { url = "https://files.pythonhosted.org/packages/52/f6/89d161787fb83e4dcf54c01b8a3e43f4b4346f4249819add6efa03ec74d6/eigenpy-3.12.0-1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f9f53a5df469acbcf7055a36d5c5fe3581faa1b0468e3c78919dcc92233fcde4", size = 4247653, upload-time = "2025-10-15T20:13:37.126Z" }, + { url = "https://files.pythonhosted.org/packages/17/7c/74b63227d3bb1a2cfdd6f5ac0ddb6b359b24f1165466d9741e8c0c7624d7/eigenpy-3.12.0-1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:6e419f03d8a6e2f86a72c22950d16514c065fdfeca909afe70467c0728271162", size = 6202697, upload-time = "2025-10-15T20:13:38.813Z" }, + { url = "https://files.pythonhosted.org/packages/55/f5/a2d18432641c570618da23ae661aeb75b58f1e4db76352c43eae2d3c7794/eigenpy-3.12.0-1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:fda5e71f972af7790a8a0935ec0efafb386ab8293b9cb187e05b89f68cc964d3", size = 6047816, upload-time = "2025-10-15T20:13:40.573Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3e/66f1b28849863a52c291e941f88e341a17de59ad0d64c406c195d8214c94/eigenpy-3.12.0-1-cp314-cp314-macosx_10_9_x86_64.whl", hash = "sha256:9a032ccee69712b4c0697034441960a2853f53d0f05bf5b5e29484d7c670d472", size = 5464486, upload-time = "2025-10-15T20:13:42.299Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3d/5d23552a6233e2cab6b6de182f6f838af1451feed96df9bb00ca63e068ee/eigenpy-3.12.0-1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:17c3df16edd597f0cb32616af9bd7331b82772b376d559d948e57dec85ac1edd", size = 4252388, upload-time = "2025-10-15T20:13:43.912Z" }, + { url = "https://files.pythonhosted.org/packages/07/a6/515a94095e325bb92f300353214a683395f01d14c6cded9dac9f7669d70b/eigenpy-3.12.0-1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:1af9e6a14a5ddeeff19698b0c47db029d19601ce8246dd68f6038271801a2d3e", size = 6216613, upload-time = "2025-10-15T20:13:45.746Z" }, + { url = "https://files.pythonhosted.org/packages/52/66/95f253a4d2a684fc90d3aa1bef3a10fb1772277310d432f87ae852723195/eigenpy-3.12.0-1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:4537fcc048f45e8a5ada205d0cc8798c558f34500153dc0e33862669150cd0f7", size = 6062999, upload-time = "2025-10-15T20:13:47.507Z" }, +] + [[package]] name = "einops" version = "0.8.1" @@ -1921,17 +2488,18 @@ wheels = [ [[package]] name = "fastapi" -version = "0.128.0" +version = "0.129.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, + { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" } +sdist = { url = "https://files.pythonhosted.org/packages/48/47/75f6bea02e797abff1bca968d5997793898032d9923c1935ae2efdece642/fastapi-0.129.0.tar.gz", hash = "sha256:61315cebd2e65df5f97ec298c888f9de30430dd0612d59d6480beafbc10655af", size = 375450, upload-time = "2026-02-12T13:54:52.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" }, + { url = "https://files.pythonhosted.org/packages/9e/dd/d0ee25348ac58245ee9f90b6f3cbb666bf01f69be7e0911f9851bddbda16/fastapi-0.129.0-py3-none-any.whl", hash = "sha256:b4946880e48f462692b31c083be0432275cbfb6e2274566b1be91479cc1a84ec", size = 102950, upload-time = "2026-02-12T13:54:54.528Z" }, ] [[package]] @@ -2065,31 +2633,21 @@ source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/73/b1/1c3d635d955f2b4bf34d45abf8f35492e04dbd7804e94ce65d9f928ef3ec/fastrlock-0.8.3.tar.gz", hash = "sha256:4af6734d92eaa3ab4373e6c9a1dd0d5ad1304e172b1521733c6c3b3d73c8fa5d", size = 79327, upload-time = "2024-12-17T11:03:39.638Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/e7/02/3f771177380d8690812d5b2b7736dc6b6c8cd1c317e4572e65f823eede08/fastrlock-0.8.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:cc5fa9166e05409f64a804d5b6d01af670979cdb12cd2594f555cb33cdc155bd", size = 55094, upload-time = "2024-12-17T11:01:49.721Z" }, - { url = "https://files.pythonhosted.org/packages/be/b4/aae7ed94b8122c325d89eb91336084596cebc505dc629b795fcc9629606d/fastrlock-0.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7a77ebb0a24535ef4f167da2c5ee35d9be1e96ae192137e9dc3ff75b8dfc08a5", size = 48220, upload-time = "2024-12-17T11:01:51.071Z" }, - { url = "https://files.pythonhosted.org/packages/96/87/9807af47617fdd65c68b0fcd1e714542c1d4d3a1f1381f591f1aa7383a53/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:d51f7fb0db8dab341b7f03a39a3031678cf4a98b18533b176c533c122bfce47d", size = 49551, upload-time = "2024-12-17T11:01:52.316Z" }, { url = "https://files.pythonhosted.org/packages/9d/12/e201634810ac9aee59f93e3953cb39f98157d17c3fc9d44900f1209054e9/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:767ec79b7f6ed9b9a00eb9ff62f2a51f56fdb221c5092ab2dadec34a9ccbfc6e", size = 49398, upload-time = "2024-12-17T11:01:53.514Z" }, { url = "https://files.pythonhosted.org/packages/15/a1/439962ed439ff6f00b7dce14927e7830e02618f26f4653424220a646cd1c/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d6a77b3f396f7d41094ef09606f65ae57feeb713f4285e8e417f4021617ca62", size = 53334, upload-time = "2024-12-17T11:01:55.518Z" }, - { url = "https://files.pythonhosted.org/packages/b5/9e/1ae90829dd40559ab104e97ebe74217d9da794c4bb43016da8367ca7a596/fastrlock-0.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:92577ff82ef4a94c5667d6d2841f017820932bc59f31ffd83e4a2c56c1738f90", size = 52495, upload-time = "2024-12-17T11:01:57.76Z" }, { url = "https://files.pythonhosted.org/packages/e5/8c/5e746ee6f3d7afbfbb0d794c16c71bfd5259a4e3fb1dda48baf31e46956c/fastrlock-0.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3df8514086e16bb7c66169156a8066dc152f3be892c7817e85bf09a27fa2ada2", size = 51972, upload-time = "2024-12-17T11:02:01.384Z" }, { url = "https://files.pythonhosted.org/packages/76/a7/8b91068f00400931da950f143fa0f9018bd447f8ed4e34bed3fe65ed55d2/fastrlock-0.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:001fd86bcac78c79658bac496e8a17472d64d558cd2227fdc768aa77f877fe40", size = 30946, upload-time = "2024-12-17T11:02:03.491Z" }, { url = "https://files.pythonhosted.org/packages/90/9e/647951c579ef74b6541493d5ca786d21a0b2d330c9514ba2c39f0b0b0046/fastrlock-0.8.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:f68c551cf8a34b6460a3a0eba44bd7897ebfc820854e19970c52a76bf064a59f", size = 55233, upload-time = "2024-12-17T11:02:04.795Z" }, - { url = "https://files.pythonhosted.org/packages/be/91/5f3afba7d14b8b7d60ac651375f50fff9220d6ccc3bef233d2bd74b73ec7/fastrlock-0.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:55d42f6286b9d867370af4c27bc70d04ce2d342fe450c4a4fcce14440514e695", size = 48911, upload-time = "2024-12-17T11:02:06.173Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/e37bd72d7d70a8a551b3b4610d028bd73ff5d6253201d5d3cf6296468bee/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:bbc3bf96dcbd68392366c477f78c9d5c47e5d9290cb115feea19f20a43ef6d05", size = 50357, upload-time = "2024-12-17T11:02:07.418Z" }, { url = "https://files.pythonhosted.org/packages/0d/ef/a13b8bab8266840bf38831d7bf5970518c02603d00a548a678763322d5bf/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:77ab8a98417a1f467dafcd2226718f7ca0cf18d4b64732f838b8c2b3e4b55cb5", size = 50222, upload-time = "2024-12-17T11:02:08.745Z" }, { url = "https://files.pythonhosted.org/packages/01/e2/5e5515562b2e9a56d84659377176aef7345da2c3c22909a1897fe27e14dd/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:04bb5eef8f460d13b8c0084ea5a9d3aab2c0573991c880c0a34a56bb14951d30", size = 54553, upload-time = "2024-12-17T11:02:10.925Z" }, - { url = "https://files.pythonhosted.org/packages/c0/8f/65907405a8cdb2fc8beaf7d09a9a07bb58deff478ff391ca95be4f130b70/fastrlock-0.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c9d459ce344c21ff03268212a1845aa37feab634d242131bc16c2a2355d5f65", size = 53362, upload-time = "2024-12-17T11:02:12.476Z" }, { url = "https://files.pythonhosted.org/packages/ec/b9/ae6511e52738ba4e3a6adb7c6a20158573fbc98aab448992ece25abb0b07/fastrlock-0.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33e6fa4af4f3af3e9c747ec72d1eadc0b7ba2035456c2afb51c24d9e8a56f8fd", size = 52836, upload-time = "2024-12-17T11:02:13.74Z" }, { url = "https://files.pythonhosted.org/packages/88/3e/c26f8192c93e8e43b426787cec04bb46ac36e72b1033b7fe5a9267155fdf/fastrlock-0.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:5e5f1665d8e70f4c5b4a67f2db202f354abc80a321ce5a26ac1493f055e3ae2c", size = 31046, upload-time = "2024-12-17T11:02:15.033Z" }, { url = "https://files.pythonhosted.org/packages/00/df/56270f2e10c1428855c990e7a7e5baafa9e1262b8e789200bd1d047eb501/fastrlock-0.8.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8cb2cf04352ea8575d496f31b3b88c42c7976e8e58cdd7d1550dfba80ca039da", size = 55727, upload-time = "2024-12-17T11:02:17.26Z" }, - { url = "https://files.pythonhosted.org/packages/57/21/ea1511b0ef0d5457efca3bf1823effb9c5cad4fc9dca86ce08e4d65330ce/fastrlock-0.8.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:85a49a1f1e020097d087e1963e42cea6f307897d5ebe2cb6daf4af47ffdd3eed", size = 52201, upload-time = "2024-12-17T11:02:19.512Z" }, { url = "https://files.pythonhosted.org/packages/80/07/cdecb7aa976f34328372f1c4efd6c9dc1b039b3cc8d3f38787d640009a25/fastrlock-0.8.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f13ec08f1adb1aa916c384b05ecb7dbebb8df9ea81abd045f60941c6283a670", size = 53924, upload-time = "2024-12-17T11:02:20.85Z" }, - { url = "https://files.pythonhosted.org/packages/88/6d/59c497f8db9a125066dd3a7442fab6aecbe90d6fec344c54645eaf311666/fastrlock-0.8.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0ea4e53a04980d646def0f5e4b5e8bd8c7884288464acab0b37ca0c65c482bfe", size = 52140, upload-time = "2024-12-17T11:02:22.263Z" }, { url = "https://files.pythonhosted.org/packages/62/04/9138943c2ee803d62a48a3c17b69de2f6fa27677a6896c300369e839a550/fastrlock-0.8.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:38340f6635bd4ee2a4fb02a3a725759fe921f2ca846cb9ca44531ba739cc17b4", size = 53261, upload-time = "2024-12-17T11:02:24.418Z" }, { url = "https://files.pythonhosted.org/packages/e2/4b/db35a52589764c7745a613b6943bbd018f128d42177ab92ee7dde88444f6/fastrlock-0.8.3-cp312-cp312-win_amd64.whl", hash = "sha256:da06d43e1625e2ffddd303edcd6d2cd068e1c486f5fd0102b3f079c44eb13e2c", size = 31235, upload-time = "2024-12-17T11:02:25.708Z" }, { url = "https://files.pythonhosted.org/packages/92/74/7b13d836c3f221cff69d6f418f46c2a30c4b1fe09a8ce7db02eecb593185/fastrlock-0.8.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5264088185ca8e6bc83181dff521eee94d078c269c7d557cc8d9ed5952b7be45", size = 54157, upload-time = "2024-12-17T11:02:29.196Z" }, - { url = "https://files.pythonhosted.org/packages/06/77/f06a907f9a07d26d0cca24a4385944cfe70d549a2c9f1c3e3217332f4f12/fastrlock-0.8.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a98ba46b3e14927550c4baa36b752d0d2f7387b8534864a8767f83cce75c160", size = 50954, upload-time = "2024-12-17T11:02:32.12Z" }, { url = "https://files.pythonhosted.org/packages/f9/4e/94480fb3fd93991dd6f4e658b77698edc343f57caa2870d77b38c89c2e3b/fastrlock-0.8.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbdea6deeccea1917c6017d353987231c4e46c93d5338ca3e66d6cd88fbce259", size = 52535, upload-time = "2024-12-17T11:02:33.402Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a7/ee82bb55b6c0ca30286dac1e19ee9417a17d2d1de3b13bb0f20cefb86086/fastrlock-0.8.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6e5bfecbc0d72ff07e43fed81671747914d6794e0926700677ed26d894d4f4f", size = 50942, upload-time = "2024-12-17T11:02:34.688Z" }, { url = "https://files.pythonhosted.org/packages/63/1d/d4b7782ef59e57dd9dde69468cc245adafc3674281905e42fa98aac30a79/fastrlock-0.8.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2a83d558470c520ed21462d304e77a12639859b205759221c8144dd2896b958a", size = 52044, upload-time = "2024-12-17T11:02:36.613Z" }, { url = "https://files.pythonhosted.org/packages/28/a3/2ad0a0a69662fd4cf556ab8074f0de978ee9b56bff6ddb4e656df4aa9e8e/fastrlock-0.8.3-cp313-cp313-win_amd64.whl", hash = "sha256:8d1d6a28291b4ace2a66bd7b49a9ed9c762467617febdd9ab356b867ed901af8", size = 30472, upload-time = "2024-12-17T11:02:37.983Z" }, ] @@ -2108,11 +2666,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.20.2" +version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c1/e0/a75dbe4bca1e7d41307323dad5ea2efdd95408f74ab2de8bd7dba9b51a1a/filelock-3.20.2.tar.gz", hash = "sha256:a2241ff4ddde2a7cebddf78e39832509cb045d18ec1a09d7248d6bfc6bfbbe64", size = 19510, upload-time = "2026-01-02T15:33:32.582Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/f7/5e0dec5165ca52203d9f2c248db0a72dd31d6f15aad0b1e4a874f2187452/filelock-3.23.0.tar.gz", hash = "sha256:f64442f6f4707b9385049bb490be0bc48e3ab8e74ad27d4063435252917f4d4b", size = 32798, upload-time = "2026-02-14T02:53:58.703Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/30/ab407e2ec752aa541704ed8f93c11e2a5d92c168b8a755d818b74a3c5c2d/filelock-3.20.2-py3-none-any.whl", hash = "sha256:fbba7237d6ea277175a32c54bb71ef814a8546d8601269e1bfc388de333974e8", size = 16697, upload-time = "2026-01-02T15:33:31.133Z" }, + { url = "https://files.pythonhosted.org/packages/0b/10/da216e25ef2f3c9dfa75574aa27f5f4c7e5fb5540308f04e4d8c4d834ecb/filelock-3.23.0-py3-none-any.whl", hash = "sha256:4203c3f43983c7c95e4bbb68786f184f6acb7300899bf99d686bb82d526bdf62", size = 22227, upload-time = "2026-02-14T02:53:56.122Z" }, ] [[package]] @@ -2124,10 +2682,24 @@ dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f6/1d/ac8914360460fafa1990890259b7fa5ef7ba4cd59014e782e4ab3ab144d8/filterpy-1.4.5.zip", hash = "sha256:4f2a4d39e4ea601b9ab42b2db08b5918a9538c168cff1c6895ae26646f3d73b1", size = 177985, upload-time = "2018-10-10T22:38:24.63Z" } +[[package]] +name = "flake8" +version = "7.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pyflakes" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/16/3f2a0bb700ad65ac9663262905a025917c020a3f92f014d2ba8964b4602c/flake8-7.1.2.tar.gz", hash = "sha256:c586ffd0b41540951ae41af572e6790dbd49fc12b3aa2541685d253d9bd504bd", size = 48119, upload-time = "2025-02-16T18:45:44.296Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/f8/08d37b2cd89da306e3520bd27f8a85692122b42b56c0c2c3784ff09c022f/flake8-7.1.2-py2.py3-none-any.whl", hash = "sha256:1cbc62e65536f65e6d754dfe6f1bada7f5cf392d6f5db3c2b85892466c3e7c1a", size = 57745, upload-time = "2025-02-16T18:45:42.351Z" }, +] + [[package]] name = "flask" version = "3.1.2" @@ -2207,15 +2779,19 @@ wheels = [ [[package]] name = "flax" -version = "0.12.2" +version = "0.12.4" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -2224,20 +2800,21 @@ resolution-markers = [ "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", ] dependencies = [ - { name = "jax", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "msgpack", marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "optax", marker = "python_full_version >= '3.11'" }, { name = "orbax-checkpoint", marker = "python_full_version >= '3.11'" }, + { name = "orbax-export", marker = "python_full_version >= '3.11'" }, { name = "pyyaml", marker = "python_full_version >= '3.11'" }, { name = "rich", marker = "python_full_version >= '3.11'" }, - { name = "tensorstore", version = "0.1.80", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "tensorstore", version = "0.1.81", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "treescope", marker = "python_full_version >= '3.11'" }, { name = "typing-extensions", marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6b/7e/c4c66ab9b41149cf7a1961907d9a844832af1e76b121b35235a618c92825/flax-0.12.2.tar.gz", hash = "sha256:e9723b0881e571abe61885bb8770f53fdb3c383b6b3f5a923dcf6f1e9a687905", size = 5008370, upload-time = "2025-12-18T22:36:19.988Z" } +sdist = { url = "https://files.pythonhosted.org/packages/48/81/802fd686d3f47d7560a83f73b23efff03de7e3a0342e4f0fc41680136709/flax-0.12.4.tar.gz", hash = "sha256:5e924734a0595ddfa06a824568617e5440c7948e744772cbe6101b7ae06d66a9", size = 5070824, upload-time = "2026-02-12T19:10:17.048Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/6b/7b75508251f4220df8f68e7718b476ee3d614a2a51f9eace97393ee91b46/flax-0.12.2-py3-none-any.whl", hash = "sha256:912fdd8a7c623ec8b2694b28d2827608e7fc82a3a6f8fff17ec5038f2bca66f4", size = 488031, upload-time = "2025-12-18T22:36:18.01Z" }, + { url = "https://files.pythonhosted.org/packages/16/e9/bf4bbcf9d3a5634531cb0bcbec96db13353a9113fdc424464223234780fb/flax-0.12.4-py3-none-any.whl", hash = "sha256:cf90707923cb8a6d1a542039dd61e470c94bb11d7cac2349941a07f66605b19e", size = 493441, upload-time = "2026-02-12T19:10:14.847Z" }, ] [[package]] @@ -2309,27 +2886,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c4/73/3a3e6cb864ddf98800a9236ad497d32e5b50eb1682ac659f7d669d92faec/foxglove_websocket-0.1.4-py3-none-any.whl", hash = "sha256:772e24e2c98bdfc704df53f7177c8ff5bab0abc4dac59a91463aca16debdd83a", size = 14392, upload-time = "2025-07-14T20:26:26.899Z" }, ] -[[package]] -name = "freetype-py" -version = "2.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/9c/61ba17f846b922c2d6d101cc886b0e8fb597c109cedfcb39b8c5d2304b54/freetype-py-2.5.1.zip", hash = "sha256:cfe2686a174d0dd3d71a9d8ee9bf6a2c23f5872385cf8ce9f24af83d076e2fbd", size = 851738, upload-time = "2024-08-29T18:32:26.37Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/a8/258dd138ebe60c79cd8cfaa6d021599208a33f0175a5e29b01f60c9ab2c7/freetype_py-2.5.1-py3-none-macosx_10_9_universal2.whl", hash = "sha256:d01ded2557694f06aa0413f3400c0c0b2b5ebcaabeef7aaf3d756be44f51e90b", size = 1747885, upload-time = "2024-08-29T18:32:17.604Z" }, - { url = "https://files.pythonhosted.org/packages/a2/93/280ad06dc944e40789b0a641492321a2792db82edda485369cbc59d14366/freetype_py-2.5.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d2f6b3d68496797da23204b3b9c4e77e67559c80390fc0dc8b3f454ae1cd819", size = 1051055, upload-time = "2024-08-29T18:32:19.153Z" }, - { url = "https://files.pythonhosted.org/packages/b6/36/853cad240ec63e21a37a512ee19c896b655ce1772d803a3dd80fccfe63fe/freetype_py-2.5.1-py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:289b443547e03a4f85302e3ac91376838e0d11636050166662a4f75e3087ed0b", size = 1043856, upload-time = "2024-08-29T18:32:20.565Z" }, - { url = "https://files.pythonhosted.org/packages/93/6f/fcc1789e42b8c6617c3112196d68e87bfe7d957d80812d3c24d639782dcb/freetype_py-2.5.1-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:cd3bfdbb7e1a84818cfbc8025fca3096f4f2afcd5d4641184bf0a3a2e6f97bbf", size = 1108180, upload-time = "2024-08-29T18:32:21.871Z" }, - { url = "https://files.pythonhosted.org/packages/2a/1b/161d3a6244b8a820aef188e4397a750d4a8196316809576d015f26594296/freetype_py-2.5.1-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:3c1aefc4f0d5b7425f014daccc5fdc7c6f914fb7d6a695cc684f1c09cd8c1660", size = 1106792, upload-time = "2024-08-29T18:32:23.134Z" }, - { url = "https://files.pythonhosted.org/packages/93/6e/bd7fbfacca077bc6f34f1a1109800a2c41ab50f4704d3a0507ba41009915/freetype_py-2.5.1-py3-none-win_amd64.whl", hash = "sha256:0b7f8e0342779f65ca13ef8bc103938366fecade23e6bb37cb671c2b8ad7f124", size = 814608, upload-time = "2024-08-29T18:32:24.648Z" }, -] - [[package]] name = "fsspec" -version = "2025.12.0" +version = "2026.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/27/954057b0d1f53f086f681755207dda6de6c660ce133c829158e8e8fe7895/fsspec-2025.12.0.tar.gz", hash = "sha256:c505de011584597b1060ff778bb664c1bc022e87921b0e4f10cc9c44f9635973", size = 309748, upload-time = "2025-12-03T15:23:42.687Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/7c/f60c259dcbf4f0c47cc4ddb8f7720d2dcdc8888c8e5ad84c73ea4531cc5b/fsspec-2026.2.0.tar.gz", hash = "sha256:6544e34b16869f5aacd5b90bdf1a71acb37792ea3ddf6125ee69a22a53fb8bff", size = 313441, upload-time = "2026-02-05T21:50:53.743Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/c7/b64cae5dba3a1b138d7123ec36bb5ccd39d39939f18454407e5468f4763f/fsspec-2025.12.0-py3-none-any.whl", hash = "sha256:8bf1fe301b7d8acfa6e8571e3b1c3d158f909666642431cc78a1b7b4dbc5ec5b", size = 201422, upload-time = "2025-12-03T15:23:41.434Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437", size = 202505, upload-time = "2026-02-05T21:50:51.819Z" }, ] [[package]] @@ -2382,19 +2945,14 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/02/6e639e90f181dc9127046e00d0528f9f7ad12d428972e3a5378b9aefdb0b/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.p39.p310.p311.p312.p313-none-manylinux_2_28_x86_64.whl", hash = "sha256:7916034efa867927892635733a3b6af8cd95ceb10566fd7f1e0d2763c2ee8b12", size = 243525, upload-time = "2025-09-12T08:54:34.006Z" }, { url = "https://files.pythonhosted.org/packages/84/06/cb588ca65561defe0fc48d1df4c2ac12569b81231ae4f2b52ab37007d0bd/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.p39.p310.p311.p312.p313-none-win32.whl", hash = "sha256:6c9549da71b93e367b4d71438798daae1da2592039fd14204a80a1a2348ae127", size = 552685, upload-time = "2025-09-12T08:54:35.723Z" }, { url = "https://files.pythonhosted.org/packages/86/27/00c9c96af18ac0a5eac2ff61cbe306551a2d770d7173f396d0792ee1a59e/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.p39.p310.p311.p312.p313-none-win_amd64.whl", hash = "sha256:6292d5d6634d668cd23d337e6089491d3945a9aa4ac6e1667b0003520d7caa51", size = 559466, upload-time = "2025-09-12T08:54:37.661Z" }, -] - -[[package]] -name = "google-auth" -version = "2.47.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyasn1-modules" }, - { name = "rsa" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/60/3c/ec64b9a275ca22fa1cd3b6e77fefcf837b0732c890aa32d2bd21313d9b33/google_auth-2.47.0.tar.gz", hash = "sha256:833229070a9dfee1a353ae9877dcd2dec069a8281a4e72e72f77d4a70ff945da", size = 323719, upload-time = "2026-01-06T21:55:31.045Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/18/79e9008530b79527e0d5f79e7eef08d3b179b7f851cfd3a2f27822fbdfa9/google_auth-2.47.0-py3-none-any.whl", hash = "sha256:c516d68336bfde7cf0da26aab674a36fedcf04b37ac4edd59c597178760c3498", size = 234867, upload-time = "2026-01-06T21:55:28.6Z" }, + { url = "https://files.pythonhosted.org/packages/b3/87/de0b33f6f00687499ca1371f22aa73396341b85bf88f1a284f9da8842493/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.py39.py310.py311.py312.py313.py314-none-macosx_10_6_intel.whl", hash = "sha256:2aab89d2d9535635ba011fc7303390685169a1aa6731ad580d08d043524b8899", size = 105326, upload-time = "2026-01-28T05:57:56.083Z" }, + { url = "https://files.pythonhosted.org/packages/b6/a6/6ea2f73ad4474896d9e38b3ffbe6ffd5a802c738392269e99e8c6621a461/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.py39.py310.py311.py312.py313.py314-none-macosx_11_0_arm64.whl", hash = "sha256:23936202a107039b5372f0b88ae1d11080746aa1c78910a45d4a0c4cf408cfaa", size = 102180, upload-time = "2026-01-28T05:57:57.787Z" }, + { url = "https://files.pythonhosted.org/packages/58/19/d81b19e8261b9cb51b81d1402167791fef81088dfe91f0c4e9d136fdc5ca/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.py39.py310.py311.py312.py313.py314-none-manylinux2014_aarch64.whl", hash = "sha256:7be06d0838f61df67bd54cb6266a6193d54083acb3624ff3c3812a6358406fa4", size = 230038, upload-time = "2026-01-28T05:57:59.105Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/b035636cd82198b97b51a93efe9cfc4343d6b15cefbd336a3f2be871d848/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.py39.py310.py311.py312.py313.py314-none-manylinux2014_x86_64.whl", hash = "sha256:91d36b3582a766512eff8e3b5dcc2d3ffcbf10b7cf448551085a08a10f1b8244", size = 241983, upload-time = "2026-01-28T05:58:00.352Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b4/f7b6cc022dd7c68b6c702d19da5d591f978f89c958b9bd3090615db0c739/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.py39.py310.py311.py312.py313.py314-none-manylinux_2_28_aarch64.whl", hash = "sha256:27c9e9a2d5e1dc3c9e3996171d844d9df9a5a101e797cb94cce217b7afcf8fd9", size = 231053, upload-time = "2026-01-28T05:58:01.683Z" }, + { url = "https://files.pythonhosted.org/packages/5a/3f/efeb7c6801c46e11bd666a5180f0d615f74f72264212f74f39586c6fda9d/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.py39.py310.py311.py312.py313.py314-none-manylinux_2_28_x86_64.whl", hash = "sha256:ce6724bb7cb3d0543dcba17206dce909f94176e68220b8eafee72e9f92bcf542", size = 243522, upload-time = "2026-01-28T05:58:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/cf/b9/b04c3aa0aad2870cfe799f32f8b59789c98e1816bbce9e83f4823c5b840b/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.py39.py310.py311.py312.py313.py314-none-win32.whl", hash = "sha256:fca724a21a372731edb290841edd28a9fb1ee490f833392752844ac807c0086a", size = 552682, upload-time = "2026-01-28T05:58:05.649Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e1/6d6816b296a529ac9b897ad228b1e084eb1f92319e96371880eebdc874a6/glfw-2.10.0-py2.py27.py3.py30.py31.py32.py33.py34.py35.py36.py37.py38.py39.py310.py311.py312.py313.py314-none-win_amd64.whl", hash = "sha256:823c0bd7770977d4b10e0ed0aef2f3682276b7c88b8b65cfc540afce5951392f", size = 559464, upload-time = "2026-01-28T05:58:07.261Z" }, ] [[package]] @@ -2455,63 +3013,63 @@ sdist = { url = "https://files.pythonhosted.org/packages/fe/26/bca4d737a9acea25e [[package]] name = "grpcio" -version = "1.76.0" +version = "1.78.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/17/ff4795dc9a34b6aee6ec379f1b66438a3789cd1315aac0cbab60d92f74b3/grpcio-1.76.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:65a20de41e85648e00305c1bb09a3598f840422e522277641145a32d42dcefcc", size = 5840037, upload-time = "2025-10-21T16:20:25.069Z" }, - { url = "https://files.pythonhosted.org/packages/4e/ff/35f9b96e3fa2f12e1dcd58a4513a2e2294a001d64dec81677361b7040c9a/grpcio-1.76.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:40ad3afe81676fd9ec6d9d406eda00933f218038433980aa19d401490e46ecde", size = 11836482, upload-time = "2025-10-21T16:20:30.113Z" }, - { url = "https://files.pythonhosted.org/packages/3e/1c/8374990f9545e99462caacea5413ed783014b3b66ace49e35c533f07507b/grpcio-1.76.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:035d90bc79eaa4bed83f524331d55e35820725c9fbb00ffa1904d5550ed7ede3", size = 6407178, upload-time = "2025-10-21T16:20:32.733Z" }, - { url = "https://files.pythonhosted.org/packages/1e/77/36fd7d7c75a6c12542c90a6d647a27935a1ecaad03e0ffdb7c42db6b04d2/grpcio-1.76.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4215d3a102bd95e2e11b5395c78562967959824156af11fa93d18fdd18050990", size = 7075684, upload-time = "2025-10-21T16:20:35.435Z" }, - { url = "https://files.pythonhosted.org/packages/38/f7/e3cdb252492278e004722306c5a8935eae91e64ea11f0af3437a7de2e2b7/grpcio-1.76.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:49ce47231818806067aea3324d4bf13825b658ad662d3b25fada0bdad9b8a6af", size = 6611133, upload-time = "2025-10-21T16:20:37.541Z" }, - { url = "https://files.pythonhosted.org/packages/7e/20/340db7af162ccd20a0893b5f3c4a5d676af7b71105517e62279b5b61d95a/grpcio-1.76.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8cc3309d8e08fd79089e13ed4819d0af72aa935dd8f435a195fd152796752ff2", size = 7195507, upload-time = "2025-10-21T16:20:39.643Z" }, - { url = "https://files.pythonhosted.org/packages/10/f0/b2160addc1487bd8fa4810857a27132fb4ce35c1b330c2f3ac45d697b106/grpcio-1.76.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:971fd5a1d6e62e00d945423a567e42eb1fa678ba89072832185ca836a94daaa6", size = 8160651, upload-time = "2025-10-21T16:20:42.492Z" }, - { url = "https://files.pythonhosted.org/packages/2c/2c/ac6f98aa113c6ef111b3f347854e99ebb7fb9d8f7bb3af1491d438f62af4/grpcio-1.76.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d9adda641db7207e800a7f089068f6f645959f2df27e870ee81d44701dd9db3", size = 7620568, upload-time = "2025-10-21T16:20:45.995Z" }, - { url = "https://files.pythonhosted.org/packages/90/84/7852f7e087285e3ac17a2703bc4129fafee52d77c6c82af97d905566857e/grpcio-1.76.0-cp310-cp310-win32.whl", hash = "sha256:063065249d9e7e0782d03d2bca50787f53bd0fb89a67de9a7b521c4a01f1989b", size = 3998879, upload-time = "2025-10-21T16:20:48.592Z" }, - { url = "https://files.pythonhosted.org/packages/10/30/d3d2adcbb6dd3ff59d6ac3df6ef830e02b437fb5c90990429fd180e52f30/grpcio-1.76.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6ae758eb08088d36812dd5d9af7a9859c05b1e0f714470ea243694b49278e7b", size = 4706892, upload-time = "2025-10-21T16:20:50.697Z" }, - { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" }, - { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" }, - { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" }, - { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" }, - { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" }, - { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" }, - { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" }, - { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" }, - { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" }, - { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" }, - { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" }, - { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" }, - { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" }, - { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267, upload-time = "2025-10-21T16:21:25.995Z" }, - { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963, upload-time = "2025-10-21T16:21:28.631Z" }, - { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484, upload-time = "2025-10-21T16:21:30.837Z" }, - { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777, upload-time = "2025-10-21T16:21:33.577Z" }, - { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014, upload-time = "2025-10-21T16:21:41.882Z" }, - { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750, upload-time = "2025-10-21T16:21:44.006Z" }, - { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003, upload-time = "2025-10-21T16:21:46.244Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ed/71467ab770effc9e8cef5f2e7388beb2be26ed642d567697bb103a790c72/grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2", size = 5807716, upload-time = "2025-10-21T16:21:48.475Z" }, - { url = "https://files.pythonhosted.org/packages/2c/85/c6ed56f9817fab03fa8a111ca91469941fb514e3e3ce6d793cb8f1e1347b/grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468", size = 11821522, upload-time = "2025-10-21T16:21:51.142Z" }, - { url = "https://files.pythonhosted.org/packages/ac/31/2b8a235ab40c39cbc141ef647f8a6eb7b0028f023015a4842933bc0d6831/grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3", size = 6362558, upload-time = "2025-10-21T16:21:54.213Z" }, - { url = "https://files.pythonhosted.org/packages/bd/64/9784eab483358e08847498ee56faf8ff6ea8e0a4592568d9f68edc97e9e9/grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb", size = 7049990, upload-time = "2025-10-21T16:21:56.476Z" }, - { url = "https://files.pythonhosted.org/packages/2b/94/8c12319a6369434e7a184b987e8e9f3b49a114c489b8315f029e24de4837/grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae", size = 6575387, upload-time = "2025-10-21T16:21:59.051Z" }, - { url = "https://files.pythonhosted.org/packages/15/0f/f12c32b03f731f4a6242f771f63039df182c8b8e2cf8075b245b409259d4/grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77", size = 7166668, upload-time = "2025-10-21T16:22:02.049Z" }, - { url = "https://files.pythonhosted.org/packages/ff/2d/3ec9ce0c2b1d92dd59d1c3264aaec9f0f7c817d6e8ac683b97198a36ed5a/grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03", size = 8124928, upload-time = "2025-10-21T16:22:04.984Z" }, - { url = "https://files.pythonhosted.org/packages/1a/74/fd3317be5672f4856bcdd1a9e7b5e17554692d3db9a3b273879dc02d657d/grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42", size = 7589983, upload-time = "2025-10-21T16:22:07.881Z" }, - { url = "https://files.pythonhosted.org/packages/45/bb/ca038cf420f405971f19821c8c15bcbc875505f6ffadafe9ffd77871dc4c/grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f", size = 3984727, upload-time = "2025-10-21T16:22:10.032Z" }, - { url = "https://files.pythonhosted.org/packages/41/80/84087dc56437ced7cdd4b13d7875e7439a52a261e3ab4e06488ba6173b0a/grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8", size = 4702799, upload-time = "2025-10-21T16:22:12.709Z" }, - { url = "https://files.pythonhosted.org/packages/b4/46/39adac80de49d678e6e073b70204091e76631e03e94928b9ea4ecf0f6e0e/grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62", size = 5808417, upload-time = "2025-10-21T16:22:15.02Z" }, - { url = "https://files.pythonhosted.org/packages/9c/f5/a4531f7fb8b4e2a60b94e39d5d924469b7a6988176b3422487be61fe2998/grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd", size = 11828219, upload-time = "2025-10-21T16:22:17.954Z" }, - { url = "https://files.pythonhosted.org/packages/4b/1c/de55d868ed7a8bd6acc6b1d6ddc4aa36d07a9f31d33c912c804adb1b971b/grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc", size = 6367826, upload-time = "2025-10-21T16:22:20.721Z" }, - { url = "https://files.pythonhosted.org/packages/59/64/99e44c02b5adb0ad13ab3adc89cb33cb54bfa90c74770f2607eea629b86f/grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a", size = 7049550, upload-time = "2025-10-21T16:22:23.637Z" }, - { url = "https://files.pythonhosted.org/packages/43/28/40a5be3f9a86949b83e7d6a2ad6011d993cbe9b6bd27bea881f61c7788b6/grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba", size = 6575564, upload-time = "2025-10-21T16:22:26.016Z" }, - { url = "https://files.pythonhosted.org/packages/4b/a9/1be18e6055b64467440208a8559afac243c66a8b904213af6f392dc2212f/grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09", size = 7176236, upload-time = "2025-10-21T16:22:28.362Z" }, - { url = "https://files.pythonhosted.org/packages/0f/55/dba05d3fcc151ce6e81327541d2cc8394f442f6b350fead67401661bf041/grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc", size = 8125795, upload-time = "2025-10-21T16:22:31.075Z" }, - { url = "https://files.pythonhosted.org/packages/4a/45/122df922d05655f63930cf42c9e3f72ba20aadb26c100ee105cad4ce4257/grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc", size = 7592214, upload-time = "2025-10-21T16:22:33.831Z" }, - { url = "https://files.pythonhosted.org/packages/4a/6e/0b899b7f6b66e5af39e377055fb4a6675c9ee28431df5708139df2e93233/grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e", size = 4062961, upload-time = "2025-10-21T16:22:36.468Z" }, - { url = "https://files.pythonhosted.org/packages/19/41/0b430b01a2eb38ee887f88c1f07644a1df8e289353b78e82b37ef988fb64/grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e", size = 4834462, upload-time = "2025-10-21T16:22:39.772Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/a8/690a085b4d1fe066130de97a87de32c45062cf2ecd218df9675add895550/grpcio-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7cc47943d524ee0096f973e1081cb8f4f17a4615f2116882a5f1416e4cfe92b5", size = 5946986, upload-time = "2026-02-06T09:54:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/c7/1b/e5213c5c0ced9d2d92778d30529ad5bb2dcfb6c48c4e2d01b1f302d33d64/grpcio-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c3f293fdc675ccba4db5a561048cca627b5e7bd1c8a6973ffedabe7d116e22e2", size = 11816533, upload-time = "2026-02-06T09:54:37.04Z" }, + { url = "https://files.pythonhosted.org/packages/18/37/1ba32dccf0a324cc5ace744c44331e300b000a924bf14840f948c559ede7/grpcio-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10a9a644b5dd5aec3b82b5b0b90d41c0fa94c85ef42cb42cf78a23291ddb5e7d", size = 6519964, upload-time = "2026-02-06T09:54:40.268Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f5/c0e178721b818072f2e8b6fde13faaba942406c634009caf065121ce246b/grpcio-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4c5533d03a6cbd7f56acfc9cfb44ea64f63d29091e40e44010d34178d392d7eb", size = 7198058, upload-time = "2026-02-06T09:54:42.389Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b2/40d43c91ae9cd667edc960135f9f08e58faa1576dc95af29f66ec912985f/grpcio-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ff870aebe9a93a85283837801d35cd5f8814fe2ad01e606861a7fb47c762a2b7", size = 6727212, upload-time = "2026-02-06T09:54:44.91Z" }, + { url = "https://files.pythonhosted.org/packages/ed/88/9da42eed498f0efcfcd9156e48ae63c0cde3bea398a16c99fb5198c885b6/grpcio-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:391e93548644e6b2726f1bb84ed60048d4bcc424ce5e4af0843d28ca0b754fec", size = 7300845, upload-time = "2026-02-06T09:54:47.562Z" }, + { url = "https://files.pythonhosted.org/packages/23/3f/1c66b7b1b19a8828890e37868411a6e6925df5a9030bfa87ab318f34095d/grpcio-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:df2c8f3141f7cbd112a6ebbd760290b5849cda01884554f7c67acc14e7b1758a", size = 8284605, upload-time = "2026-02-06T09:54:50.475Z" }, + { url = "https://files.pythonhosted.org/packages/94/c4/ca1bd87394f7b033e88525384b4d1e269e8424ab441ea2fba1a0c5b50986/grpcio-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd8cb8026e5f5b50498a3c4f196f57f9db344dad829ffae16b82e4fdbaea2813", size = 7726672, upload-time = "2026-02-06T09:54:53.11Z" }, + { url = "https://files.pythonhosted.org/packages/41/09/f16e487d4cc65ccaf670f6ebdd1a17566b965c74fc3d93999d3b2821e052/grpcio-1.78.0-cp310-cp310-win32.whl", hash = "sha256:f8dff3d9777e5d2703a962ee5c286c239bf0ba173877cc68dc02c17d042e29de", size = 4076715, upload-time = "2026-02-06T09:54:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/2a/32/4ce60d94e242725fd3bcc5673c04502c82a8e87b21ea411a63992dc39f8f/grpcio-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:94f95cf5d532d0e717eed4fc1810e8e6eded04621342ec54c89a7c2f14b581bf", size = 4799157, upload-time = "2026-02-06T09:54:59.838Z" }, + { url = "https://files.pythonhosted.org/packages/86/c7/d0b780a29b0837bf4ca9580904dfb275c1fc321ded7897d620af7047ec57/grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6", size = 5951525, upload-time = "2026-02-06T09:55:01.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/96920bf2ee61df85a9503cb6f733fe711c0ff321a5a697d791b075673281/grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e", size = 11830418, upload-time = "2026-02-06T09:55:04.462Z" }, + { url = "https://files.pythonhosted.org/packages/83/0c/7c1528f098aeb75a97de2bae18c530f56959fb7ad6c882db45d9884d6edc/grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911", size = 6524477, upload-time = "2026-02-06T09:55:07.111Z" }, + { url = "https://files.pythonhosted.org/packages/8d/52/e7c1f3688f949058e19a011c4e0dec973da3d0ae5e033909677f967ae1f4/grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e", size = 7198266, upload-time = "2026-02-06T09:55:10.016Z" }, + { url = "https://files.pythonhosted.org/packages/e5/61/8ac32517c1e856677282c34f2e7812d6c328fa02b8f4067ab80e77fdc9c9/grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303", size = 6730552, upload-time = "2026-02-06T09:55:12.207Z" }, + { url = "https://files.pythonhosted.org/packages/bd/98/b8ee0158199250220734f620b12e4a345955ac7329cfd908d0bf0fda77f0/grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04", size = 7304296, upload-time = "2026-02-06T09:55:15.044Z" }, + { url = "https://files.pythonhosted.org/packages/bd/0f/7b72762e0d8840b58032a56fdbd02b78fc645b9fa993d71abf04edbc54f4/grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec", size = 8288298, upload-time = "2026-02-06T09:55:17.276Z" }, + { url = "https://files.pythonhosted.org/packages/24/ae/ae4ce56bc5bb5caa3a486d60f5f6083ac3469228faa734362487176c15c5/grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074", size = 7730953, upload-time = "2026-02-06T09:55:19.545Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6e/8052e3a28eb6a820c372b2eb4b5e32d195c661e137d3eca94d534a4cfd8a/grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856", size = 4076503, upload-time = "2026-02-06T09:55:21.521Z" }, + { url = "https://files.pythonhosted.org/packages/08/62/f22c98c5265dfad327251fa2f840b591b1df5f5e15d88b19c18c86965b27/grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558", size = 4799767, upload-time = "2026-02-06T09:55:24.107Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f4/7384ed0178203d6074446b3c4f46c90a22ddf7ae0b3aee521627f54cfc2a/grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97", size = 5913985, upload-time = "2026-02-06T09:55:26.832Z" }, + { url = "https://files.pythonhosted.org/packages/81/ed/be1caa25f06594463f685b3790b320f18aea49b33166f4141bfdc2bfb236/grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e", size = 11811853, upload-time = "2026-02-06T09:55:29.224Z" }, + { url = "https://files.pythonhosted.org/packages/24/a7/f06d151afc4e64b7e3cc3e872d331d011c279aaab02831e40a81c691fb65/grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996", size = 6475766, upload-time = "2026-02-06T09:55:31.825Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a8/4482922da832ec0082d0f2cc3a10976d84a7424707f25780b82814aafc0a/grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7", size = 7170027, upload-time = "2026-02-06T09:55:34.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/bf/f4a3b9693e35d25b24b0b39fa46d7d8a3c439e0a3036c3451764678fec20/grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9", size = 6690766, upload-time = "2026-02-06T09:55:36.902Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/521875265cc99fe5ad4c5a17010018085cae2810a928bf15ebe7d8bcd9cc/grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383", size = 7266161, upload-time = "2026-02-06T09:55:39.824Z" }, + { url = "https://files.pythonhosted.org/packages/05/86/296a82844fd40a4ad4a95f100b55044b4f817dece732bf686aea1a284147/grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6", size = 8253303, upload-time = "2026-02-06T09:55:42.353Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ea3c0caf5468537f27ad5aab92b681ed7cc0ef5f8c9196d3fd42c8c2286b/grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce", size = 7698222, upload-time = "2026-02-06T09:55:44.629Z" }, + { url = "https://files.pythonhosted.org/packages/d7/47/7f05f81e4bb6b831e93271fb12fd52ba7b319b5402cbc101d588f435df00/grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68", size = 4066123, upload-time = "2026-02-06T09:55:47.644Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e7/d6914822c88aa2974dbbd10903d801a28a19ce9cd8bad7e694cbbcf61528/grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e", size = 4797657, upload-time = "2026-02-06T09:55:49.86Z" }, + { url = "https://files.pythonhosted.org/packages/05/a9/8f75894993895f361ed8636cd9237f4ab39ef87fd30db17467235ed1c045/grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b", size = 5920143, upload-time = "2026-02-06T09:55:52.035Z" }, + { url = "https://files.pythonhosted.org/packages/55/06/0b78408e938ac424100100fd081189451b472236e8a3a1f6500390dc4954/grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a", size = 11803926, upload-time = "2026-02-06T09:55:55.494Z" }, + { url = "https://files.pythonhosted.org/packages/88/93/b59fe7832ff6ae3c78b813ea43dac60e295fa03606d14d89d2e0ec29f4f3/grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84", size = 6478628, upload-time = "2026-02-06T09:55:58.533Z" }, + { url = "https://files.pythonhosted.org/packages/ed/df/e67e3734527f9926b7d9c0dde6cd998d1d26850c3ed8eeec81297967ac67/grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb", size = 7173574, upload-time = "2026-02-06T09:56:01.786Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/cc03fffb07bfba982a9ec097b164e8835546980aec25ecfa5f9c1a47e022/grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5", size = 6692639, upload-time = "2026-02-06T09:56:04.529Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/289c32e301b85bdb67d7ec68b752155e674ee3ba2173a1858f118e399ef3/grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9", size = 7268838, upload-time = "2026-02-06T09:56:08.397Z" }, + { url = "https://files.pythonhosted.org/packages/0e/79/1be93f32add280461fa4773880196572563e9c8510861ac2da0ea0f892b6/grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702", size = 8251878, upload-time = "2026-02-06T09:56:10.914Z" }, + { url = "https://files.pythonhosted.org/packages/65/65/793f8e95296ab92e4164593674ae6291b204bb5f67f9d4a711489cd30ffa/grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20", size = 7695412, upload-time = "2026-02-06T09:56:13.593Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/1e233fe697ecc82845942c2822ed06bb522e70d6771c28d5528e4c50f6a4/grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670", size = 4064899, upload-time = "2026-02-06T09:56:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/d86b89e36de8a951501fb06a0f38df19853210f341d0b28f83f4aa0ffa08/grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4", size = 4797393, upload-time = "2026-02-06T09:56:17.882Z" }, + { url = "https://files.pythonhosted.org/packages/29/f2/b56e43e3c968bfe822fa6ce5bca10d5c723aa40875b48791ce1029bb78c7/grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e", size = 5920591, upload-time = "2026-02-06T09:56:20.758Z" }, + { url = "https://files.pythonhosted.org/packages/5d/81/1f3b65bd30c334167bfa8b0d23300a44e2725ce39bba5b76a2460d85f745/grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f", size = 11813685, upload-time = "2026-02-06T09:56:24.315Z" }, + { url = "https://files.pythonhosted.org/packages/0e/1c/bbe2f8216a5bd3036119c544d63c2e592bdf4a8ec6e4a1867592f4586b26/grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724", size = 6487803, upload-time = "2026-02-06T09:56:27.367Z" }, + { url = "https://files.pythonhosted.org/packages/16/5c/a6b2419723ea7ddce6308259a55e8e7593d88464ce8db9f4aa857aba96fa/grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b", size = 7173206, upload-time = "2026-02-06T09:56:29.876Z" }, + { url = "https://files.pythonhosted.org/packages/df/1e/b8801345629a415ea7e26c83d75eb5dbe91b07ffe5210cc517348a8d4218/grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7", size = 6693826, upload-time = "2026-02-06T09:56:32.305Z" }, + { url = "https://files.pythonhosted.org/packages/34/84/0de28eac0377742679a510784f049738a80424b17287739fc47d63c2439e/grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452", size = 7277897, upload-time = "2026-02-06T09:56:34.915Z" }, + { url = "https://files.pythonhosted.org/packages/ca/9c/ad8685cfe20559a9edb66f735afdcb2b7d3de69b13666fdfc542e1916ebd/grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127", size = 8252404, upload-time = "2026-02-06T09:56:37.553Z" }, + { url = "https://files.pythonhosted.org/packages/3c/05/33a7a4985586f27e1de4803887c417ec7ced145ebd069bc38a9607059e2b/grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65", size = 7696837, upload-time = "2026-02-06T09:56:40.173Z" }, + { url = "https://files.pythonhosted.org/packages/73/77/7382241caf88729b106e49e7d18e3116216c778e6a7e833826eb96de22f7/grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c", size = 4142439, upload-time = "2026-02-06T09:56:43.258Z" }, + { url = "https://files.pythonhosted.org/packages/48/b2/b096ccce418882fbfda4f7496f9357aaa9a5af1896a9a7f60d9f2b275a06/grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb", size = 4929852, upload-time = "2026-02-06T09:56:45.885Z" }, ] [[package]] @@ -2524,54 +3082,16 @@ wheels = [ ] [[package]] -name = "h5py" -version = "3.15.1" +name = "h2" +version = "4.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "hpack" }, + { name = "hyperframe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/6a/0d79de0b025aa85dc8864de8e97659c94cf3d23148394a954dc5ca52f8c8/h5py-3.15.1.tar.gz", hash = "sha256:c86e3ed45c4473564de55aa83b6fc9e5ead86578773dfbd93047380042e26b69", size = 426236, upload-time = "2025-10-16T10:35:27.404Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/30/8fa61698b438dd751fa46a359792e801191dadab560d0a5f1c709443ef8e/h5py-3.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67e59f6c2f19a32973a40f43d9a088ae324fe228c8366e25ebc57ceebf093a6b", size = 3414477, upload-time = "2025-10-16T10:33:24.201Z" }, - { url = "https://files.pythonhosted.org/packages/16/16/db2f63302937337c4e9e51d97a5984b769bdb7488e3d37632a6ac297f8ef/h5py-3.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e2f471688402c3404fa4e13466e373e622fd4b74b47b56cfdff7cc688209422", size = 2850298, upload-time = "2025-10-16T10:33:27.747Z" }, - { url = "https://files.pythonhosted.org/packages/fc/2e/f1bb7de9b05112bfd14d5206090f0f92f1e75bbb412fbec5d4653c3d44dd/h5py-3.15.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c45802bcb711e128a6839cb6c01e9ac648dc55df045c9542a675c771f15c8d5", size = 4523605, upload-time = "2025-10-16T10:33:31.168Z" }, - { url = "https://files.pythonhosted.org/packages/05/8a/63f4b08f3628171ce8da1a04681a65ee7ac338fde3cb3e9e3c9f7818e4da/h5py-3.15.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64ce3f6470adb87c06e3a8dd1b90e973699f1759ad79bfa70c230939bff356c9", size = 4735346, upload-time = "2025-10-16T10:33:34.759Z" }, - { url = "https://files.pythonhosted.org/packages/74/48/f16d12d9de22277605bcc11c0dcab5e35f06a54be4798faa2636b5d44b3c/h5py-3.15.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4411c1867b9899a25e983fff56d820a66f52ac326bbe10c7cdf7d832c9dcd883", size = 4175305, upload-time = "2025-10-16T10:33:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/d6/2f/47cdbff65b2ce53c27458c6df63a232d7bb1644b97df37b2342442342c84/h5py-3.15.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2cbc4104d3d4aca9d6db8c0c694555e255805bfeacf9eb1349bda871e26cacbe", size = 4653602, upload-time = "2025-10-16T10:33:42.188Z" }, - { url = "https://files.pythonhosted.org/packages/c3/28/dc08de359c2f43a67baa529cb70d7f9599848750031975eed92d6ae78e1d/h5py-3.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:01f55111ca516f5568ae7a7fc8247dfce607de331b4467ee8a9a6ed14e5422c7", size = 2873601, upload-time = "2025-10-16T10:33:45.323Z" }, - { url = "https://files.pythonhosted.org/packages/41/fd/8349b48b15b47768042cff06ad6e1c229f0a4bd89225bf6b6894fea27e6d/h5py-3.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aaa330bcbf2830150c50897ea5dcbed30b5b6d56897289846ac5b9e529ec243", size = 3434135, upload-time = "2025-10-16T10:33:47.954Z" }, - { url = "https://files.pythonhosted.org/packages/c1/b0/1c628e26a0b95858f54aba17e1599e7f6cd241727596cc2580b72cb0a9bf/h5py-3.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c970fb80001fffabb0109eaf95116c8e7c0d3ca2de854e0901e8a04c1f098509", size = 2870958, upload-time = "2025-10-16T10:33:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e3/c255cafc9b85e6ea04e2ad1bba1416baa1d7f57fc98a214be1144087690c/h5py-3.15.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80e5bb5b9508d5d9da09f81fd00abbb3f85da8143e56b1585d59bc8ceb1dba8b", size = 4504770, upload-time = "2025-10-16T10:33:54.357Z" }, - { url = "https://files.pythonhosted.org/packages/8b/23/4ab1108e87851ccc69694b03b817d92e142966a6c4abd99e17db77f2c066/h5py-3.15.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b849ba619a066196169763c33f9f0f02e381156d61c03e000bb0100f9950faf", size = 4700329, upload-time = "2025-10-16T10:33:57.616Z" }, - { url = "https://files.pythonhosted.org/packages/a4/e4/932a3a8516e4e475b90969bf250b1924dbe3612a02b897e426613aed68f4/h5py-3.15.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e7f6c841efd4e6e5b7e82222eaf90819927b6d256ab0f3aca29675601f654f3c", size = 4152456, upload-time = "2025-10-16T10:34:00.843Z" }, - { url = "https://files.pythonhosted.org/packages/2a/0a/f74d589883b13737021b2049ac796328f188dbb60c2ed35b101f5b95a3fc/h5py-3.15.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ca8a3a22458956ee7b40d8e39c9a9dc01f82933e4c030c964f8b875592f4d831", size = 4617295, upload-time = "2025-10-16T10:34:04.154Z" }, - { url = "https://files.pythonhosted.org/packages/23/95/499b4e56452ef8b6c95a271af0dde08dac4ddb70515a75f346d4f400579b/h5py-3.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:550e51131376889656feec4aff2170efc054a7fe79eb1da3bb92e1625d1ac878", size = 2882129, upload-time = "2025-10-16T10:34:06.886Z" }, - { url = "https://files.pythonhosted.org/packages/ce/bb/cfcc70b8a42222ba3ad4478bcef1791181ea908e2adbd7d53c66395edad5/h5py-3.15.1-cp311-cp311-win_arm64.whl", hash = "sha256:b39239947cb36a819147fc19e86b618dcb0953d1cd969f5ed71fc0de60392427", size = 2477121, upload-time = "2025-10-16T10:34:09.579Z" }, - { url = "https://files.pythonhosted.org/packages/62/b8/c0d9aa013ecfa8b7057946c080c0c07f6fa41e231d2e9bd306a2f8110bdc/h5py-3.15.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:316dd0f119734f324ca7ed10b5627a2de4ea42cc4dfbcedbee026aaa361c238c", size = 3399089, upload-time = "2025-10-16T10:34:12.135Z" }, - { url = "https://files.pythonhosted.org/packages/a4/5e/3c6f6e0430813c7aefe784d00c6711166f46225f5d229546eb53032c3707/h5py-3.15.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51469890e58e85d5242e43aab29f5e9c7e526b951caab354f3ded4ac88e7b76", size = 2847803, upload-time = "2025-10-16T10:34:14.564Z" }, - { url = "https://files.pythonhosted.org/packages/00/69/ba36273b888a4a48d78f9268d2aee05787e4438557450a8442946ab8f3ec/h5py-3.15.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a33bfd5dfcea037196f7778534b1ff7e36a7f40a89e648c8f2967292eb6898e", size = 4914884, upload-time = "2025-10-16T10:34:18.452Z" }, - { url = "https://files.pythonhosted.org/packages/3a/30/d1c94066343a98bb2cea40120873193a4fed68c4ad7f8935c11caf74c681/h5py-3.15.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25c8843fec43b2cc368aa15afa1cdf83fc5e17b1c4e10cd3771ef6c39b72e5ce", size = 5109965, upload-time = "2025-10-16T10:34:21.853Z" }, - { url = "https://files.pythonhosted.org/packages/81/3d/d28172116eafc3bc9f5991b3cb3fd2c8a95f5984f50880adfdf991de9087/h5py-3.15.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a308fd8681a864c04423c0324527237a0484e2611e3441f8089fd00ed56a8171", size = 4561870, upload-time = "2025-10-16T10:34:26.69Z" }, - { url = "https://files.pythonhosted.org/packages/a5/83/393a7226024238b0f51965a7156004eaae1fcf84aa4bfecf7e582676271b/h5py-3.15.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f4a016df3f4a8a14d573b496e4d1964deb380e26031fc85fb40e417e9131888a", size = 5037161, upload-time = "2025-10-16T10:34:30.383Z" }, - { url = "https://files.pythonhosted.org/packages/cf/51/329e7436bf87ca6b0fe06dd0a3795c34bebe4ed8d6c44450a20565d57832/h5py-3.15.1-cp312-cp312-win_amd64.whl", hash = "sha256:59b25cf02411bf12e14f803fef0b80886444c7fe21a5ad17c6a28d3f08098a1e", size = 2874165, upload-time = "2025-10-16T10:34:33.461Z" }, - { url = "https://files.pythonhosted.org/packages/09/a8/2d02b10a66747c54446e932171dd89b8b4126c0111b440e6bc05a7c852ec/h5py-3.15.1-cp312-cp312-win_arm64.whl", hash = "sha256:61d5a58a9851e01ee61c932bbbb1c98fe20aba0a5674776600fb9a361c0aa652", size = 2458214, upload-time = "2025-10-16T10:34:35.733Z" }, - { url = "https://files.pythonhosted.org/packages/88/b3/40207e0192415cbff7ea1d37b9f24b33f6d38a5a2f5d18a678de78f967ae/h5py-3.15.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8440fd8bee9500c235ecb7aa1917a0389a2adb80c209fa1cc485bd70e0d94a5", size = 3376511, upload-time = "2025-10-16T10:34:38.596Z" }, - { url = "https://files.pythonhosted.org/packages/31/96/ba99a003c763998035b0de4c299598125df5fc6c9ccf834f152ddd60e0fb/h5py-3.15.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ab2219dbc6fcdb6932f76b548e2b16f34a1f52b7666e998157a4dfc02e2c4123", size = 2826143, upload-time = "2025-10-16T10:34:41.342Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c2/fc6375d07ea3962df7afad7d863fe4bde18bb88530678c20d4c90c18de1d/h5py-3.15.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8cb02c3a96255149ed3ac811eeea25b655d959c6dd5ce702c9a95ff11859eb5", size = 4908316, upload-time = "2025-10-16T10:34:44.619Z" }, - { url = "https://files.pythonhosted.org/packages/d9/69/4402ea66272dacc10b298cca18ed73e1c0791ff2ae9ed218d3859f9698ac/h5py-3.15.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:121b2b7a4c1915d63737483b7bff14ef253020f617c2fb2811f67a4bed9ac5e8", size = 5103710, upload-time = "2025-10-16T10:34:48.639Z" }, - { url = "https://files.pythonhosted.org/packages/e0/f6/11f1e2432d57d71322c02a97a5567829a75f223a8c821764a0e71a65cde8/h5py-3.15.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59b0d63b318bf3cc06687def2b45afd75926bbc006f7b8cd2b1a231299fc8599", size = 4556042, upload-time = "2025-10-16T10:34:51.841Z" }, - { url = "https://files.pythonhosted.org/packages/18/88/3eda3ef16bfe7a7dbc3d8d6836bbaa7986feb5ff091395e140dc13927bcc/h5py-3.15.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e02fe77a03f652500d8bff288cbf3675f742fc0411f5a628fa37116507dc7cc0", size = 5030639, upload-time = "2025-10-16T10:34:55.257Z" }, - { url = "https://files.pythonhosted.org/packages/e5/ea/fbb258a98863f99befb10ed727152b4ae659f322e1d9c0576f8a62754e81/h5py-3.15.1-cp313-cp313-win_amd64.whl", hash = "sha256:dea78b092fd80a083563ed79a3171258d4a4d307492e7cf8b2313d464c82ba52", size = 2864363, upload-time = "2025-10-16T10:34:58.099Z" }, - { url = "https://files.pythonhosted.org/packages/5d/c9/35021cc9cd2b2915a7da3026e3d77a05bed1144a414ff840953b33937fb9/h5py-3.15.1-cp313-cp313-win_arm64.whl", hash = "sha256:c256254a8a81e2bddc0d376e23e2a6d2dc8a1e8a2261835ed8c1281a0744cd97", size = 2449570, upload-time = "2025-10-16T10:35:00.473Z" }, - { url = "https://files.pythonhosted.org/packages/a0/2c/926eba1514e4d2e47d0e9eb16c784e717d8b066398ccfca9b283917b1bfb/h5py-3.15.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:5f4fb0567eb8517c3ecd6b3c02c4f4e9da220c8932604960fd04e24ee1254763", size = 3380368, upload-time = "2025-10-16T10:35:03.117Z" }, - { url = "https://files.pythonhosted.org/packages/65/4b/d715ed454d3baa5f6ae1d30b7eca4c7a1c1084f6a2edead9e801a1541d62/h5py-3.15.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:954e480433e82d3872503104f9b285d369048c3a788b2b1a00e53d1c47c98dd2", size = 2833793, upload-time = "2025-10-16T10:35:05.623Z" }, - { url = "https://files.pythonhosted.org/packages/ef/d4/ef386c28e4579314610a8bffebbee3b69295b0237bc967340b7c653c6c10/h5py-3.15.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fd125c131889ebbef0849f4a0e29cf363b48aba42f228d08b4079913b576bb3a", size = 4903199, upload-time = "2025-10-16T10:35:08.972Z" }, - { url = "https://files.pythonhosted.org/packages/33/5d/65c619e195e0b5e54ea5a95c1bb600c8ff8715e0d09676e4cce56d89f492/h5py-3.15.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28a20e1a4082a479b3d7db2169f3a5034af010b90842e75ebbf2e9e49eb4183e", size = 5097224, upload-time = "2025-10-16T10:35:12.808Z" }, - { url = "https://files.pythonhosted.org/packages/30/30/5273218400bf2da01609e1292f562c94b461fcb73c7a9e27fdadd43abc0a/h5py-3.15.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa8df5267f545b4946df8ca0d93d23382191018e4cda2deda4c2cedf9a010e13", size = 4551207, upload-time = "2025-10-16T10:35:16.24Z" }, - { url = "https://files.pythonhosted.org/packages/d3/39/a7ef948ddf4d1c556b0b2b9559534777bccc318543b3f5a1efdf6b556c9c/h5py-3.15.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99d374a21f7321a4c6ab327c4ab23bd925ad69821aeb53a1e75dd809d19f67fa", size = 5025426, upload-time = "2025-10-16T10:35:19.831Z" }, - { url = "https://files.pythonhosted.org/packages/b6/d8/7368679b8df6925b8415f9dcc9ab1dab01ddc384d2b2c24aac9191bd9ceb/h5py-3.15.1-cp314-cp314-win_amd64.whl", hash = "sha256:9c73d1d7cdb97d5b17ae385153472ce118bed607e43be11e9a9deefaa54e0734", size = 2865704, upload-time = "2025-10-16T10:35:22.658Z" }, - { url = "https://files.pythonhosted.org/packages/d3/b7/4a806f85d62c20157e62e58e03b27513dc9c55499768530acc4f4c5ce4be/h5py-3.15.1-cp314-cp314-win_arm64.whl", hash = "sha256:a6d8c5a05a76aca9a494b4c53ce8a9c29023b7f64f625c6ce1841e92a362ccdf", size = 2465544, upload-time = "2025-10-16T10:35:25.695Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, ] [[package]] @@ -2603,6 +3123,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735, upload-time = "2025-10-24T19:04:35.928Z" }, ] +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, +] + [[package]] name = "httpcore" version = "1.0.9" @@ -2674,6 +3203,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + [[package]] name = "httpx-sse" version = "0.4.3" @@ -2685,7 +3219,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "0.36.0" +version = "0.36.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -2697,21 +3231,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/63/4910c5fa9128fdadf6a9c5ac138e8b1b6cee4ca44bf7915bbfbce4e355ee/huggingface_hub-0.36.0.tar.gz", hash = "sha256:47b3f0e2539c39bf5cde015d63b72ec49baff67b6931c3d97f3f84532e2b8d25", size = 463358, upload-time = "2025-10-23T12:12:01.413Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/bd/1a875e0d592d447cbc02805fd3fe0f497714d6a2583f59d14fa9ebad96eb/huggingface_hub-0.36.0-py3-none-any.whl", hash = "sha256:7bcc9ad17d5b3f07b57c78e79d527102d08313caa278a641993acddcb894548d", size = 566094, upload-time = "2025-10-23T12:11:59.557Z" }, -] - -[[package]] -name = "humanfriendly" -version = "10.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyreadline3", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/b7/8cb61d2eece5fb05a83271da168186721c450eb74e3c31f7ef3169fa475b/huggingface_hub-0.36.2.tar.gz", hash = "sha256:1934304d2fb224f8afa3b87007d58501acfda9215b334eed53072dd5e815ff7a", size = 649782, upload-time = "2026-02-06T09:24:13.098Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, + { url = "https://files.pythonhosted.org/packages/a8/af/48ac8483240de756d2438c380746e7130d1c6f75802ef22f3c6d49982787/huggingface_hub-0.36.2-py3-none-any.whl", hash = "sha256:48f0c8eac16145dfce371e9d2d7772854a4f591bcb56c9cf548accf531d54270", size = 566395, upload-time = "2026-02-06T09:24:11.133Z" }, ] [[package]] @@ -2737,13 +3259,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c6/50/e0edd38dcd63fb26a8547f13d28f7a008bc4a3fd4eb4ff030673f22ad41a/hydra_core-1.3.2-py3-none-any.whl", hash = "sha256:fa0238a9e31df3373b35b0bfb672c34cc92718d21f81311d8996a16de1141d8b", size = 154547, upload-time = "2023-02-23T18:33:40.801Z" }, ] +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, +] + [[package]] name = "identify" -version = "2.6.15" +version = "2.6.16" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, + { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, ] [[package]] @@ -2764,20 +3295,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9c/1f/19ebc343cc71a7ffa78f17018535adc5cbdd87afb31d7c34874680148b32/ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748", size = 12314, upload-time = "2022-06-15T21:40:25.756Z" }, ] -[[package]] -name = "imageio" -version = "2.37.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "pillow" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/6f/606be632e37bf8d05b253e8626c2291d74c691ddc7bcdf7d6aaf33b32f6a/imageio-2.37.2.tar.gz", hash = "sha256:0212ef2727ac9caa5ca4b2c75ae89454312f440a756fcfc8ef1993e718f50f8a", size = 389600, upload-time = "2025-11-04T14:29:39.898Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/fe/301e0936b79bcab4cacc7548bf2853fc28dced0a578bab1f7ef53c9aa75b/imageio-2.37.2-py3-none-any.whl", hash = "sha256:ad9adfb20335d718c03de457358ed69f141021a333c40a53e57273d8a5bd0b9b", size = 317646, upload-time = "2025-11-04T14:29:37.948Z" }, -] - [[package]] name = "importlib-metadata" version = "8.7.1" @@ -2821,14 +3338,14 @@ sdist = { url = "https://files.pythonhosted.org/packages/72/73/b3d451dfc523756cf [[package]] name = "ipykernel" -version = "7.1.0" +version = "7.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "appnope", marker = "sys_platform == 'darwin'" }, { name = "comm" }, { name = "debugpy" }, { name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "ipython", version = "9.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "ipython", version = "9.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jupyter-client" }, { name = "jupyter-core" }, { name = "matplotlib-inline" }, @@ -2839,9 +3356,9 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/a4/4948be6eb88628505b83a1f2f40d90254cab66abf2043b3c40fa07dfce0f/ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db", size = 174579, upload-time = "2025-10-27T09:46:39.471Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/8d/b68b728e2d06b9e0051019640a40a9eb7a88fcd82c2e1b5ce70bef5ff044/ipykernel-7.2.0.tar.gz", hash = "sha256:18ed160b6dee2cbb16e5f3575858bc19d8f1fe6046a9a680c708494ce31d909e", size = 176046, upload-time = "2026-02-06T16:43:27.403Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/17/20c2552266728ceba271967b87919664ecc0e33efca29c3efc6baf88c5f9/ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c", size = 117968, upload-time = "2025-10-27T09:46:37.805Z" }, + { url = "https://files.pythonhosted.org/packages/82/b9/e73d5d9f405cba7706c539aa8b311b49d4c2f3d698d9c12f815231169c71/ipykernel-7.2.0-py3-none-any.whl", hash = "sha256:3bbd4420d2b3cc105cbdf3756bfc04500b1e52f090a90716851f3916c62e1661", size = 118788, upload-time = "2026-02-06T16:43:25.149Z" }, ] [[package]] @@ -2874,15 +3391,19 @@ wheels = [ [[package]] name = "ipython" -version = "9.9.0" +version = "9.10.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -2903,9 +3424,9 @@ dependencies = [ { name = "traitlets", marker = "python_full_version >= '3.11'" }, { name = "typing-extensions", marker = "python_full_version == '3.11.*'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/dd/fb08d22ec0c27e73c8bc8f71810709870d51cadaf27b7ddd3f011236c100/ipython-9.9.0.tar.gz", hash = "sha256:48fbed1b2de5e2c7177eefa144aba7fcb82dac514f09b57e2ac9da34ddb54220", size = 4425043, upload-time = "2026-01-05T12:36:46.233Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/60/2111715ea11f39b1535bed6024b7dec7918b71e5e5d30855a5b503056b50/ipython-9.10.0.tar.gz", hash = "sha256:cd9e656be97618a0676d058134cd44e6dc7012c0e5cb36a9ce96a8c904adaf77", size = 4426526, upload-time = "2026-02-02T10:00:33.594Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/92/162cfaee4ccf370465c5af1ce36a9eacec1becb552f2033bb3584e6f640a/ipython-9.9.0-py3-none-any.whl", hash = "sha256:b457fe9165df2b84e8ec909a97abcf2ed88f565970efba16b1f7229c283d252b", size = 621431, upload-time = "2026-01-05T12:36:44.669Z" }, + { url = "https://files.pythonhosted.org/packages/3d/aa/898dec789a05731cd5a9f50605b7b44a72bd198fd0d4528e11fc610177cc/ipython-9.10.0-py3-none-any.whl", hash = "sha256:c6ab68cc23bba8c7e18e9b932797014cc61ea7fd6f19de180ab9ba73e65ee58d", size = 622774, upload-time = "2026-02-02T10:00:31.503Z" }, ] [[package]] @@ -2920,6 +3441,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, ] +[[package]] +name = "isort" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049, upload-time = "2025-10-11T13:30:59.107Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672, upload-time = "2025-10-11T13:30:57.665Z" }, +] + [[package]] name = "itsdangerous" version = "2.2.0" @@ -2953,15 +3483,19 @@ wheels = [ [[package]] name = "jax" -version = "0.8.2" +version = "0.9.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -2970,15 +3504,15 @@ resolution-markers = [ "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", ] dependencies = [ - { name = "jaxlib", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxlib", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "ml-dtypes", marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "opt-einsum", marker = "python_full_version >= '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e6/25/5efb46e5492076622d9150ed394da97ef9aad393aa52f7dd7e980f836e1f/jax-0.8.2.tar.gz", hash = "sha256:1a685ded06a8223a7b52e45e668e406049dbbead02873f2b5a4d881ba7b421ae", size = 2505776, upload-time = "2025-12-18T18:41:59.274Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/40/f85d1feadd8f793fc1bfab726272523ef34b27302b55861ea872ec774019/jax-0.9.0.1.tar.gz", hash = "sha256:e395253449d74354fa813ff9e245acb6e42287431d8a01ff33d92e9ee57d36bd", size = 2534795, upload-time = "2026-02-05T18:47:33.088Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/f7/ae4ecf183d9693cd5fcce7ee063c5e54f173b66dc80a8a79951861e1b557/jax-0.8.2-py3-none-any.whl", hash = "sha256:d0478c5dc74406441efcd25731166a65ee782f13c352fa72dc7d734351909355", size = 2925344, upload-time = "2025-12-18T18:39:38.645Z" }, + { url = "https://files.pythonhosted.org/packages/57/1e/63ac22ec535e08129e16cb71b7eeeb8816c01d627ea1bc9105e925a71da0/jax-0.9.0.1-py3-none-any.whl", hash = "sha256:3baeaec6dc853394c272eb38a35ffba1972d67cf55d07a76bdb913bcd867e2ca", size = 2955477, upload-time = "2026-02-05T18:45:22.885Z" }, ] [[package]] @@ -3019,15 +3553,19 @@ wheels = [ [[package]] name = "jaxlib" -version = "0.8.2" +version = "0.9.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -3038,31 +3576,31 @@ resolution-markers = [ dependencies = [ { name = "ml-dtypes", marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/87/0a44b1a5c558e6d8e4fd796d4f9efe5c8cac2b3013ab7349968c65931fa4/jaxlib-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:490bf0cb029c73c65c9431124b86cdc95082dbc1fb76fc549d24d75da33e5454", size = 55929353, upload-time = "2025-12-18T18:40:35.844Z" }, - { url = "https://files.pythonhosted.org/packages/d1/d2/b37c86ee35d9ea7ee67c81e9166b31e18aa3784e1b96e8a60f52bbb8c9c0/jaxlib-0.8.2-cp311-cp311-manylinux_2_27_aarch64.whl", hash = "sha256:bb89be452b1b808d3f88fc01c415b364a260be4cc7ac120c038009f6150a32dc", size = 74548611, upload-time = "2025-12-18T18:40:39.67Z" }, - { url = "https://files.pythonhosted.org/packages/65/7d/9bb1cd620d8093098203b17d227a902939afec00da1c63cb719a9fe89525/jaxlib-0.8.2-cp311-cp311-manylinux_2_27_x86_64.whl", hash = "sha256:ccf77da917a20935247c990691decfcbdd06c25ef0ac94d914a04aadb22f714c", size = 80127195, upload-time = "2025-12-18T18:40:43.795Z" }, - { url = "https://files.pythonhosted.org/packages/e7/f1/56d830c7fcf1736cbfb11d8cf79c1932f826f319d2467becb02933df3ba9/jaxlib-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:dffc22b5b732b9556d92c918b251c61bcc046617c4dbb51e1f7a656587fddffb", size = 60338464, upload-time = "2025-12-18T18:40:47.427Z" }, - { url = "https://files.pythonhosted.org/packages/c1/77/18ac0ac08c76bf12ed47b0c2d7d35f3fc3d065bd105b36937901eab1455c/jaxlib-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:023de6f3f56da2af7037970996500586331fdb50b530ecbb54b9666da633bd00", size = 55938204, upload-time = "2025-12-18T18:40:50.859Z" }, - { url = "https://files.pythonhosted.org/packages/33/c5/fa809591cbddc0d7bbef9c95962a0b521ae4a168b0ff375cadf37840b97d/jaxlib-0.8.2-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:3b16e50c5b730c9dd0a49e55f1acfaa722b00b1af0522a591558dcc0464252f2", size = 74550881, upload-time = "2025-12-18T18:40:54.491Z" }, - { url = "https://files.pythonhosted.org/packages/e0/bf/e386c4bbfda3fb326a01594cc46c8ac90cdeeeacee4c553d9e3848f75893/jaxlib-0.8.2-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:2b9789bd08f8b0cc5a5c12ae896fe432d5942e32e417091b8b5a96a9a6fd5cf1", size = 80135127, upload-time = "2025-12-18T18:40:58.808Z" }, - { url = "https://files.pythonhosted.org/packages/bf/4c/0c90b1e2b47fdf34cd352a01c42c2628d115a6f015d4a3230060bb0d97af/jaxlib-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:f472cc72e3058e50b5f0230b236d5a1183bf6c3d5423d2a52eff07bcf34908de", size = 60361039, upload-time = "2025-12-18T18:41:02.367Z" }, - { url = "https://files.pythonhosted.org/packages/c5/22/c0ec75e43a13b2457d78d509f49b49a57fa302ffced4f4a2778e428cb0a6/jaxlib-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4d006db96be020c8165212a1216372f8acac4ff4f8fb067743d694ef2b301ace", size = 55939058, upload-time = "2025-12-18T18:41:06.199Z" }, - { url = "https://files.pythonhosted.org/packages/c5/e2/2d3eff7a49ca37ef6929bf67b8ab4c933ab53a115060e60c239702028568/jaxlib-0.8.2-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:7c304f3a016965b9d1f5239a8a0399a73925f5604fe914c5ca66ecf734bf6422", size = 74550207, upload-time = "2025-12-18T18:41:09.79Z" }, - { url = "https://files.pythonhosted.org/packages/6b/e0/91e5762a7ddb6351b07c742ca407cd28e26043d6945d6228b6c1b0881a45/jaxlib-0.8.2-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:1bfbcf6c3de221784fa4cdb6765a09d71cb4298b15626b3d0409b3dfcd8a8667", size = 80133534, upload-time = "2025-12-18T18:41:14.193Z" }, - { url = "https://files.pythonhosted.org/packages/85/68/25b38673b07a808616ce7b6efb3eed491f983f3373a09cbbd03f67178563/jaxlib-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:f205e91c3a152a2a76c0bc59a6a2de03e87ec261b91e8812922777185e7b08f5", size = 60358239, upload-time = "2025-12-18T18:41:17.661Z" }, - { url = "https://files.pythonhosted.org/packages/bc/da/753c4b16297576e33cb41bf605d27fefd016867d365861c43c505afd1579/jaxlib-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f28edac8c226fc07fa3e8af6f9defede8ac2c307429e3291edce8739d39becc9", size = 56035453, upload-time = "2025-12-18T18:41:21.004Z" }, - { url = "https://files.pythonhosted.org/packages/4c/3d/891f967b01a60de1dbcb8c40b6fee28cc39c670c27c919756c41d8c89ebe/jaxlib-0.8.2-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:7da8127557c786264049ae55460d1b8d04cc3cdf0403a087f2fc1e6d313ec722", size = 74661142, upload-time = "2025-12-18T18:41:24.454Z" }, - { url = "https://files.pythonhosted.org/packages/e2/5c/3f1476cd6cbc0e2aa661cb750489739aeda500473d91dc79837b5bc9247f/jaxlib-0.8.2-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:28eec1a4e0639a0d8702cea3cb70dd3663053dbfa344452994ea48dc6ceadaa5", size = 80238500, upload-time = "2025-12-18T18:41:28.647Z" }, - { url = "https://files.pythonhosted.org/packages/d8/9d/dca93d916bf8664d7a2bb73ea3d219028dabbe382c31774348963287356a/jaxlib-0.8.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:beffb004e7eeb5c9afb24439e2b2cf45a4ee3e3e8adf45e355edf2af62acf8b8", size = 55943240, upload-time = "2025-12-18T18:41:32.095Z" }, - { url = "https://files.pythonhosted.org/packages/f0/47/7407d010db7f5ec1c25a8b8d379defc0c8b4daaaa829c88355e03c0ad314/jaxlib-0.8.2-cp314-cp314-manylinux_2_27_aarch64.whl", hash = "sha256:68108dff0de74adc468016be9a19f80efe48c660c0d5a122287094b44b092afc", size = 74560018, upload-time = "2025-12-18T18:41:36.154Z" }, - { url = "https://files.pythonhosted.org/packages/5e/27/2e6032727e41ce74914277478021140947af59127d68aa9e6f3776b428fd/jaxlib-0.8.2-cp314-cp314-manylinux_2_27_x86_64.whl", hash = "sha256:e6a97dfb0232eed9a2bb6e3828e4f682dbac1a7fea840bfda574cae2dbf5faf9", size = 80156235, upload-time = "2025-12-18T18:41:40.227Z" }, - { url = "https://files.pythonhosted.org/packages/b3/8c/af5a00b07a446414edf6b84a7397eab02cf01ba44b6ae1fce7798ce4c127/jaxlib-0.8.2-cp314-cp314-win_amd64.whl", hash = "sha256:05b958f497e49824c432e734bb059723b7dfe69e2ad696a9f9c8ad82fff7c3f8", size = 62673493, upload-time = "2025-12-18T18:41:43.991Z" }, - { url = "https://files.pythonhosted.org/packages/4d/eb/ad70fe97fda465d536625bef39ee381a7f8fed1f1bf0bc296510bac32ec5/jaxlib-0.8.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964626f581beab31ee6826b228fcc2ec5181b05cecf94a528dff97921c145dbc", size = 56037334, upload-time = "2025-12-18T18:41:47.407Z" }, - { url = "https://files.pythonhosted.org/packages/34/97/0741440c66a49ec3702f6c28a5608c7543243b1728c3f465505ed5bfe7d2/jaxlib-0.8.2-cp314-cp314t-manylinux_2_27_aarch64.whl", hash = "sha256:a397ea7dcb37d689ce79173eeb99b2f1347637a36be9a27f20ae6848bfc58bfc", size = 74661591, upload-time = "2025-12-18T18:41:51.285Z" }, - { url = "https://files.pythonhosted.org/packages/7c/c4/388797324c201830ac414562eb6697fa38837f40852bdc4d0f464d65889c/jaxlib-0.8.2-cp314-cp314t-manylinux_2_27_x86_64.whl", hash = "sha256:aa8701b6356f098e8452c3cec762fb5f706fcb8f67ffd65964f63982479aa23b", size = 80236629, upload-time = "2025-12-18T18:41:56.05Z" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/fd/040321b0f4303ec7b558d69488c6130b1697c33d88dab0a0d2ccd2e0817c/jaxlib-0.9.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff2c550dab210278ed3a3b96454b19108a02e0795625be56dca5a181c9833c9", size = 56092920, upload-time = "2026-02-05T18:46:20.873Z" }, + { url = "https://files.pythonhosted.org/packages/e9/76/a558cd5e2ac8a2c16fe7f7e429dd5749cef48bc1a89941bb5b72bd3d7de3/jaxlib-0.9.0.1-cp311-cp311-manylinux_2_27_aarch64.whl", hash = "sha256:c4ac3cfd7aaacc37f37a6a332ee009dee39e3b5081bb4b473f410583436be553", size = 74767780, upload-time = "2026-02-05T18:46:23.917Z" }, + { url = "https://files.pythonhosted.org/packages/87/49/f72fb26e2feb100fd84d297a17111364b15d5979843f62b7539cd120f9bb/jaxlib-0.9.0.1-cp311-cp311-manylinux_2_27_x86_64.whl", hash = "sha256:dc95ee32ae2bd4ed947ad0218fd6576b50a60ce45b60714d7ff2fd9fa195ed9e", size = 80323754, upload-time = "2026-02-05T18:46:27.405Z" }, + { url = "https://files.pythonhosted.org/packages/55/fc/fa3c07d833a60cfb928f7a727fef25059e2e9af1dbc5d09821ad3a728292/jaxlib-0.9.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ed35e3300caa228c42897d8fbe961d6e03b797717e44eccbd3a788b5ac5c623", size = 60483840, upload-time = "2026-02-05T18:46:30.606Z" }, + { url = "https://files.pythonhosted.org/packages/c8/76/e89fd547f292663d8ce11b3247cd653a220e0d3cedbdbd094f0a8460d735/jaxlib-0.9.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3707bf0a58410da7c053c15ec6efee1fe12e70361416e055e4109b8041f4119b", size = 56104032, upload-time = "2026-02-05T18:46:33.606Z" }, + { url = "https://files.pythonhosted.org/packages/c1/92/40d4f0acecb3d6f7078b9eb468e524778a3497d0882c7ecf80509c10b7d3/jaxlib-0.9.0.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:5ea8ebd62165b6f18f89b02fab749e02f5c584c2a1c703f04592d4d803f9e981", size = 74769175, upload-time = "2026-02-05T18:46:36.767Z" }, + { url = "https://files.pythonhosted.org/packages/1d/89/0dd938e6ed65ee994a49351a13aceaea46235ffbc1db5444d9ba3a279814/jaxlib-0.9.0.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:e0e4a0a24ef98ec021b913991fbda09aeb96481b1bc0e5300a0339aad216b226", size = 80339748, upload-time = "2026-02-05T18:46:40.148Z" }, + { url = "https://files.pythonhosted.org/packages/bb/02/265e5ccadd65fee2f0716431573d9e512e5c6aecb23f478a7a92053cf219/jaxlib-0.9.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:08733d1431238a7cf9108338ab7be898b97181cba0eef53f2f9fd3de17d20adb", size = 60508788, upload-time = "2026-02-05T18:46:43.209Z" }, + { url = "https://files.pythonhosted.org/packages/f0/8d/f5a78b4d2a08e2d358e01527a3617af2df67c70231029ce1bdbb814219ff/jaxlib-0.9.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e857cafdd12e18493d96d4a290ed31aa9d99a0dc3056b4b42974c0f342c9bb0c", size = 56103168, upload-time = "2026-02-05T18:46:46.481Z" }, + { url = "https://files.pythonhosted.org/packages/47/c3/fd3a9e2f02c1a04a1a00ff74adb6dd09e34040587bbb1b51b0176151dfa1/jaxlib-0.9.0.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:b73b85f927d9b006f07622d5676092eab916645c4804fed6568da5fb4a541dfc", size = 74768692, upload-time = "2026-02-05T18:46:49.571Z" }, + { url = "https://files.pythonhosted.org/packages/d9/48/34923a6add7dda5fb8f30409a98b638f0dbd2d9571dbbf73db958eaec44a/jaxlib-0.9.0.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:54dd2d34c6bec4f099f888a2f7895069a47c3ba86aaa77b0b78e9c3f9ef948f1", size = 80337646, upload-time = "2026-02-05T18:46:53.299Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a9/629bed81406902653973d57de5af92842c7da63dfa8fcd84ee490c62ee94/jaxlib-0.9.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:27db7fbc49938f819f2a93fefef0bdc25bd523b499ab4d8a71ed8915c037c0b4", size = 60508306, upload-time = "2026-02-05T18:46:56.441Z" }, + { url = "https://files.pythonhosted.org/packages/45/e3/6943589aaa58d9934838e00c6149dd1fc81e0c8555e9fcc9f527648faf5c/jaxlib-0.9.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9312fcfb4c5586802c08bc1b3b2419e48aa2a4cd1356251fe791ad71edc2da2a", size = 56210697, upload-time = "2026-02-05T18:46:59.642Z" }, + { url = "https://files.pythonhosted.org/packages/7e/ff/39479759b71f1d281b77050184759ac76dfd23a3ae75132ef92d168099c5/jaxlib-0.9.0.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:b536512cf84a0cb031196d6d5233f7093745e87eb416e45ad96fbb764b2befed", size = 74882879, upload-time = "2026-02-05T18:47:02.708Z" }, + { url = "https://files.pythonhosted.org/packages/87/0d/e41eeddd761110d733688d6493defe776440c8f3d114419a8ecaef55601f/jaxlib-0.9.0.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:c4dc8828bb236532033717061d132906075452556b12d1ff6ccc10e569435dfe", size = 80438424, upload-time = "2026-02-05T18:47:06.437Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ec/54b1251cea5c74a2f0d22106f5d1c7dc9e7b6a000d6a81a88deffa34c6fe/jaxlib-0.9.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:43272e52e5c89dbc4f02c7ccb6ffa5d587a09ac8db5163cb0c43e125b7075129", size = 56101484, upload-time = "2026-02-05T18:47:09.46Z" }, + { url = "https://files.pythonhosted.org/packages/29/ce/91ba780439aa1e6bae964ea641169e8b9c9349c175fcb1a723b96ba54313/jaxlib-0.9.0.1-cp314-cp314-manylinux_2_27_aarch64.whl", hash = "sha256:82348cee1521d6123038c4c3beeafa2076c8f4ae29a233b8abff9d6dc8b44145", size = 74789558, upload-time = "2026-02-05T18:47:12.394Z" }, + { url = "https://files.pythonhosted.org/packages/ce/9b/3d7baca233c378b01fa445c9f63b260f592249ff69950baf893cea631b10/jaxlib-0.9.0.1-cp314-cp314-manylinux_2_27_x86_64.whl", hash = "sha256:e61e88032eeb31339c72ead9ed60c6153cd2222512624caadea67c350c78432e", size = 80343053, upload-time = "2026-02-05T18:47:16.042Z" }, + { url = "https://files.pythonhosted.org/packages/92/5d/80efe5295133d5114fb7b0f27bdf82bc7a2308356dde6ba77c2afbaa3a36/jaxlib-0.9.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:abd9f127d23705105683448781914f17898b2b6591a051b259e6b947d4dcb93f", size = 62826248, upload-time = "2026-02-05T18:47:19.986Z" }, + { url = "https://files.pythonhosted.org/packages/f9/a9/f72578daa6af9bed9bda75b842c97581b31a577d7b2072daf8ba3d5a8156/jaxlib-0.9.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b01a75fbac8098cc985f6f1690bfb62f98b0785c84199287e0baaae50fa4238", size = 56209722, upload-time = "2026-02-05T18:47:23.193Z" }, + { url = "https://files.pythonhosted.org/packages/95/ea/eefb118305dd5e1b0ad8d942f2bf43616c964d89fe491bec8628173da24d/jaxlib-0.9.0.1-cp314-cp314t-manylinux_2_27_aarch64.whl", hash = "sha256:76f23cbb109e673ea7a90781aca3e02a0c72464410c019fe14fba3c044f2b778", size = 74881382, upload-time = "2026-02-05T18:47:26.703Z" }, + { url = "https://files.pythonhosted.org/packages/0a/aa/a42fb912fd1f9c83e22dc2577cdfbf1a1b07d6660532cb44724db7a7c479/jaxlib-0.9.0.1-cp314-cp314t-manylinux_2_27_x86_64.whl", hash = "sha256:f80d30dedce96c73a7f5dcb79c4c827a1bde2304f502a56ce7e7f723df2a5398", size = 80438052, upload-time = "2026-02-05T18:47:30.039Z" }, ] [[package]] @@ -3071,19 +3609,31 @@ version = "0.8.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jax", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jax", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jaxlib", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jaxlib", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxlib", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/3a/da/ff7d7fbd13b8ed5e8458e80308d075fc649062b9f8676d3fc56f2dc99a82/jaxopt-0.8.5.tar.gz", hash = "sha256:2790bd68ef132b216c083a8bc7a2704eceb35a92c0fc0a1e652e79dfb1e9e9ab", size = 121709, upload-time = "2025-04-14T17:59:01.618Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/45/d8/55e0901103c93d57bab3b932294c216f0cbd49054187ce29f8f13808d530/jaxopt-0.8.5-py3-none-any.whl", hash = "sha256:ff221d1a86908ec759eb1e219ee1d12bf208a70707e961bf7401076fe7cf4d5e", size = 172434, upload-time = "2025-04-14T17:59:00.342Z" }, ] +[[package]] +name = "jaxtyping" +version = "0.3.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wadler-lindig", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/40/a2ea3ce0e3e5f540eb970de7792c90fa58fef1b27d34c83f9fa94fea4729/jaxtyping-0.3.7.tar.gz", hash = "sha256:3bd7d9beb7d3cb01a89f93f90581c6f4fff3e5c5dc3c9307e8f8687a040d10c4", size = 45721, upload-time = "2026-01-30T14:18:47.409Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/42/caf65e9a0576a3abadc537e2f831701ba9081f21317fb3be87d64451587a/jaxtyping-0.3.7-py3-none-any.whl", hash = "sha256:303ab8599edf412eeb40bf06c863e3168fa186cf0e7334703fa741ddd7046e66", size = 56101, upload-time = "2026-01-30T14:18:45.954Z" }, +] + [[package]] name = "jedi" version = "0.19.2" @@ -3110,99 +3660,99 @@ wheels = [ [[package]] name = "jiter" -version = "0.12.0" +version = "0.13.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/91/13cb9505f7be74a933f37da3af22e029f6ba64f5669416cb8b2774bc9682/jiter-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e7acbaba9703d5de82a2c98ae6a0f59ab9770ab5af5fa35e43a303aee962cf65", size = 316652, upload-time = "2025-11-09T20:46:41.021Z" }, - { url = "https://files.pythonhosted.org/packages/4e/76/4e9185e5d9bb4e482cf6dec6410d5f78dfeb374cfcecbbe9888d07c52daa/jiter-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:364f1a7294c91281260364222f535bc427f56d4de1d8ffd718162d21fbbd602e", size = 319829, upload-time = "2025-11-09T20:46:43.281Z" }, - { url = "https://files.pythonhosted.org/packages/86/af/727de50995d3a153138139f259baae2379d8cb0522c0c00419957bc478a6/jiter-0.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ee4d25805d4fb23f0a5167a962ef8e002dbfb29c0989378488e32cf2744b62", size = 350568, upload-time = "2025-11-09T20:46:45.075Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c1/d6e9f4b7a3d5ac63bcbdfddeb50b2dcfbdc512c86cffc008584fdc350233/jiter-0.12.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:796f466b7942107eb889c08433b6e31b9a7ed31daceaecf8af1be26fb26c0ca8", size = 369052, upload-time = "2025-11-09T20:46:46.818Z" }, - { url = "https://files.pythonhosted.org/packages/eb/be/00824cd530f30ed73fa8a4f9f3890a705519e31ccb9e929f1e22062e7c76/jiter-0.12.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35506cb71f47dba416694e67af996bbdefb8e3608f1f78799c2e1f9058b01ceb", size = 481585, upload-time = "2025-11-09T20:46:48.319Z" }, - { url = "https://files.pythonhosted.org/packages/74/b6/2ad7990dff9504d4b5052eef64aa9574bd03d722dc7edced97aad0d47be7/jiter-0.12.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:726c764a90c9218ec9e4f99a33d6bf5ec169163f2ca0fc21b654e88c2abc0abc", size = 380541, upload-time = "2025-11-09T20:46:49.643Z" }, - { url = "https://files.pythonhosted.org/packages/b5/c7/f3c26ecbc1adbf1db0d6bba99192143d8fe8504729d9594542ecc4445784/jiter-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa47810c5565274810b726b0dc86d18dce5fd17b190ebdc3890851d7b2a0e74", size = 364423, upload-time = "2025-11-09T20:46:51.731Z" }, - { url = "https://files.pythonhosted.org/packages/18/51/eac547bf3a2d7f7e556927278e14c56a0604b8cddae75815d5739f65f81d/jiter-0.12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ec0259d3f26c62aed4d73b198c53e316ae11f0f69c8fbe6682c6dcfa0fcce2", size = 389958, upload-time = "2025-11-09T20:46:53.432Z" }, - { url = "https://files.pythonhosted.org/packages/2c/1f/9ca592e67175f2db156cff035e0d817d6004e293ee0c1d73692d38fcb596/jiter-0.12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:79307d74ea83465b0152fa23e5e297149506435535282f979f18b9033c0bb025", size = 522084, upload-time = "2025-11-09T20:46:54.848Z" }, - { url = "https://files.pythonhosted.org/packages/83/ff/597d9cdc3028f28224f53e1a9d063628e28b7a5601433e3196edda578cdd/jiter-0.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cf6e6dd18927121fec86739f1a8906944703941d000f0639f3eb6281cc601dca", size = 513054, upload-time = "2025-11-09T20:46:56.487Z" }, - { url = "https://files.pythonhosted.org/packages/24/6d/1970bce1351bd02e3afcc5f49e4f7ef3dabd7fb688f42be7e8091a5b809a/jiter-0.12.0-cp310-cp310-win32.whl", hash = "sha256:b6ae2aec8217327d872cbfb2c1694489057b9433afce447955763e6ab015b4c4", size = 206368, upload-time = "2025-11-09T20:46:58.638Z" }, - { url = "https://files.pythonhosted.org/packages/e3/6b/eb1eb505b2d86709b59ec06681a2b14a94d0941db091f044b9f0e16badc0/jiter-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7f49ce90a71e44f7e1aa9e7ec415b9686bbc6a5961e57eab511015e6759bc11", size = 204847, upload-time = "2025-11-09T20:47:00.295Z" }, - { url = "https://files.pythonhosted.org/packages/32/f9/eaca4633486b527ebe7e681c431f529b63fe2709e7c5242fc0f43f77ce63/jiter-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8f8a7e317190b2c2d60eb2e8aa835270b008139562d70fe732e1c0020ec53c9", size = 316435, upload-time = "2025-11-09T20:47:02.087Z" }, - { url = "https://files.pythonhosted.org/packages/10/c1/40c9f7c22f5e6ff715f28113ebaba27ab85f9af2660ad6e1dd6425d14c19/jiter-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2218228a077e784c6c8f1a8e5d6b8cb1dea62ce25811c356364848554b2056cd", size = 320548, upload-time = "2025-11-09T20:47:03.409Z" }, - { url = "https://files.pythonhosted.org/packages/6b/1b/efbb68fe87e7711b00d2cfd1f26bb4bfc25a10539aefeaa7727329ffb9cb/jiter-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9354ccaa2982bf2188fd5f57f79f800ef622ec67beb8329903abf6b10da7d423", size = 351915, upload-time = "2025-11-09T20:47:05.171Z" }, - { url = "https://files.pythonhosted.org/packages/15/2d/c06e659888c128ad1e838123d0638f0efad90cc30860cb5f74dd3f2fc0b3/jiter-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f2607185ea89b4af9a604d4c7ec40e45d3ad03ee66998b031134bc510232bb7", size = 368966, upload-time = "2025-11-09T20:47:06.508Z" }, - { url = "https://files.pythonhosted.org/packages/6b/20/058db4ae5fb07cf6a4ab2e9b9294416f606d8e467fb74c2184b2a1eeacba/jiter-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a585a5e42d25f2e71db5f10b171f5e5ea641d3aa44f7df745aa965606111cc2", size = 482047, upload-time = "2025-11-09T20:47:08.382Z" }, - { url = "https://files.pythonhosted.org/packages/49/bb/dc2b1c122275e1de2eb12905015d61e8316b2f888bdaac34221c301495d6/jiter-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9e21d34edff5a663c631f850edcb786719c960ce887a5661e9c828a53a95d9", size = 380835, upload-time = "2025-11-09T20:47:09.81Z" }, - { url = "https://files.pythonhosted.org/packages/23/7d/38f9cd337575349de16da575ee57ddb2d5a64d425c9367f5ef9e4612e32e/jiter-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a612534770470686cd5431478dc5a1b660eceb410abade6b1b74e320ca98de6", size = 364587, upload-time = "2025-11-09T20:47:11.529Z" }, - { url = "https://files.pythonhosted.org/packages/f0/a3/b13e8e61e70f0bb06085099c4e2462647f53cc2ca97614f7fedcaa2bb9f3/jiter-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3985aea37d40a908f887b34d05111e0aae822943796ebf8338877fee2ab67725", size = 390492, upload-time = "2025-11-09T20:47:12.993Z" }, - { url = "https://files.pythonhosted.org/packages/07/71/e0d11422ed027e21422f7bc1883c61deba2d9752b720538430c1deadfbca/jiter-0.12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b1207af186495f48f72529f8d86671903c8c10127cac6381b11dddc4aaa52df6", size = 522046, upload-time = "2025-11-09T20:47:14.6Z" }, - { url = "https://files.pythonhosted.org/packages/9f/59/b968a9aa7102a8375dbbdfbd2aeebe563c7e5dddf0f47c9ef1588a97e224/jiter-0.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef2fb241de583934c9915a33120ecc06d94aa3381a134570f59eed784e87001e", size = 513392, upload-time = "2025-11-09T20:47:16.011Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e4/7df62002499080dbd61b505c5cb351aa09e9959d176cac2aa8da6f93b13b/jiter-0.12.0-cp311-cp311-win32.whl", hash = "sha256:453b6035672fecce8007465896a25b28a6b59cfe8fbc974b2563a92f5a92a67c", size = 206096, upload-time = "2025-11-09T20:47:17.344Z" }, - { url = "https://files.pythonhosted.org/packages/bb/60/1032b30ae0572196b0de0e87dce3b6c26a1eff71aad5fe43dee3082d32e0/jiter-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:ca264b9603973c2ad9435c71a8ec8b49f8f715ab5ba421c85a51cde9887e421f", size = 204899, upload-time = "2025-11-09T20:47:19.365Z" }, - { url = "https://files.pythonhosted.org/packages/49/d5/c145e526fccdb834063fb45c071df78b0cc426bbaf6de38b0781f45d956f/jiter-0.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:cb00ef392e7d684f2754598c02c409f376ddcef857aae796d559e6cacc2d78a5", size = 188070, upload-time = "2025-11-09T20:47:20.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, - { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, - { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, - { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, - { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, - { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, - { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, - { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, - { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, - { url = "https://files.pythonhosted.org/packages/3d/a6/97209693b177716e22576ee1161674d1d58029eb178e01866a0422b69224/jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e", size = 313658, upload-time = "2025-11-09T20:47:44.424Z" }, - { url = "https://files.pythonhosted.org/packages/06/4d/125c5c1537c7d8ee73ad3d530a442d6c619714b95027143f1b61c0b4dfe0/jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1", size = 318605, upload-time = "2025-11-09T20:47:45.973Z" }, - { url = "https://files.pythonhosted.org/packages/99/bf/a840b89847885064c41a5f52de6e312e91fa84a520848ee56c97e4fa0205/jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf", size = 349803, upload-time = "2025-11-09T20:47:47.535Z" }, - { url = "https://files.pythonhosted.org/packages/8a/88/e63441c28e0db50e305ae23e19c1d8fae012d78ed55365da392c1f34b09c/jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44", size = 365120, upload-time = "2025-11-09T20:47:49.284Z" }, - { url = "https://files.pythonhosted.org/packages/0a/7c/49b02714af4343970eb8aca63396bc1c82fa01197dbb1e9b0d274b550d4e/jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45", size = 479918, upload-time = "2025-11-09T20:47:50.807Z" }, - { url = "https://files.pythonhosted.org/packages/69/ba/0a809817fdd5a1db80490b9150645f3aae16afad166960bcd562be194f3b/jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87", size = 379008, upload-time = "2025-11-09T20:47:52.211Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c3/c9fc0232e736c8877d9e6d83d6eeb0ba4e90c6c073835cc2e8f73fdeef51/jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed", size = 361785, upload-time = "2025-11-09T20:47:53.512Z" }, - { url = "https://files.pythonhosted.org/packages/96/61/61f69b7e442e97ca6cd53086ddc1cf59fb830549bc72c0a293713a60c525/jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9", size = 386108, upload-time = "2025-11-09T20:47:54.893Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2e/76bb3332f28550c8f1eba3bf6e5efe211efda0ddbbaf24976bc7078d42a5/jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626", size = 519937, upload-time = "2025-11-09T20:47:56.253Z" }, - { url = "https://files.pythonhosted.org/packages/84/d6/fa96efa87dc8bff2094fb947f51f66368fa56d8d4fc9e77b25d7fbb23375/jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c", size = 510853, upload-time = "2025-11-09T20:47:58.32Z" }, - { url = "https://files.pythonhosted.org/packages/8a/28/93f67fdb4d5904a708119a6ab58a8f1ec226ff10a94a282e0215402a8462/jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de", size = 204699, upload-time = "2025-11-09T20:47:59.686Z" }, - { url = "https://files.pythonhosted.org/packages/c4/1f/30b0eb087045a0abe2a5c9c0c0c8da110875a1d3be83afd4a9a4e548be3c/jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a", size = 204258, upload-time = "2025-11-09T20:48:01.01Z" }, - { url = "https://files.pythonhosted.org/packages/2c/f4/2b4daf99b96bce6fc47971890b14b2a36aef88d7beb9f057fafa032c6141/jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60", size = 185503, upload-time = "2025-11-09T20:48:02.35Z" }, - { url = "https://files.pythonhosted.org/packages/39/ca/67bb15a7061d6fe20b9b2a2fd783e296a1e0f93468252c093481a2f00efa/jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6", size = 317965, upload-time = "2025-11-09T20:48:03.783Z" }, - { url = "https://files.pythonhosted.org/packages/18/af/1788031cd22e29c3b14bc6ca80b16a39a0b10e611367ffd480c06a259831/jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4", size = 345831, upload-time = "2025-11-09T20:48:05.55Z" }, - { url = "https://files.pythonhosted.org/packages/05/17/710bf8472d1dff0d3caf4ced6031060091c1320f84ee7d5dcbed1f352417/jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb", size = 361272, upload-time = "2025-11-09T20:48:06.951Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f1/1dcc4618b59761fef92d10bcbb0b038b5160be653b003651566a185f1a5c/jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7", size = 204604, upload-time = "2025-11-09T20:48:08.328Z" }, - { url = "https://files.pythonhosted.org/packages/d9/32/63cb1d9f1c5c6632a783c0052cde9ef7ba82688f7065e2f0d5f10a7e3edb/jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3", size = 185628, upload-time = "2025-11-09T20:48:09.572Z" }, - { url = "https://files.pythonhosted.org/packages/a8/99/45c9f0dbe4a1416b2b9a8a6d1236459540f43d7fb8883cff769a8db0612d/jiter-0.12.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:c46d927acd09c67a9fb1416df45c5a04c27e83aae969267e98fba35b74e99525", size = 312478, upload-time = "2025-11-09T20:48:10.898Z" }, - { url = "https://files.pythonhosted.org/packages/4c/a7/54ae75613ba9e0f55fcb0bc5d1f807823b5167cc944e9333ff322e9f07dd/jiter-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:774ff60b27a84a85b27b88cd5583899c59940bcc126caca97eb2a9df6aa00c49", size = 318706, upload-time = "2025-11-09T20:48:12.266Z" }, - { url = "https://files.pythonhosted.org/packages/59/31/2aa241ad2c10774baf6c37f8b8e1f39c07db358f1329f4eb40eba179c2a2/jiter-0.12.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5433fab222fb072237df3f637d01b81f040a07dcac1cb4a5c75c7aa9ed0bef1", size = 351894, upload-time = "2025-11-09T20:48:13.673Z" }, - { url = "https://files.pythonhosted.org/packages/54/4f/0f2759522719133a9042781b18cc94e335b6d290f5e2d3e6899d6af933e3/jiter-0.12.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8c593c6e71c07866ec6bfb790e202a833eeec885022296aff6b9e0b92d6a70e", size = 365714, upload-time = "2025-11-09T20:48:15.083Z" }, - { url = "https://files.pythonhosted.org/packages/dc/6f/806b895f476582c62a2f52c453151edd8a0fde5411b0497baaa41018e878/jiter-0.12.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90d32894d4c6877a87ae00c6b915b609406819dce8bc0d4e962e4de2784e567e", size = 478989, upload-time = "2025-11-09T20:48:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/86/6c/012d894dc6e1033acd8db2b8346add33e413ec1c7c002598915278a37f79/jiter-0.12.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:798e46eed9eb10c3adbbacbd3bdb5ecd4cf7064e453d00dbef08802dae6937ff", size = 378615, upload-time = "2025-11-09T20:48:18.614Z" }, - { url = "https://files.pythonhosted.org/packages/87/30/d718d599f6700163e28e2c71c0bbaf6dace692e7df2592fd793ac9276717/jiter-0.12.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3f1368f0a6719ea80013a4eb90ba72e75d7ea67cfc7846db2ca504f3df0169a", size = 364745, upload-time = "2025-11-09T20:48:20.117Z" }, - { url = "https://files.pythonhosted.org/packages/8f/85/315b45ce4b6ddc7d7fceca24068543b02bdc8782942f4ee49d652e2cc89f/jiter-0.12.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65f04a9d0b4406f7e51279710b27484af411896246200e461d80d3ba0caa901a", size = 386502, upload-time = "2025-11-09T20:48:21.543Z" }, - { url = "https://files.pythonhosted.org/packages/74/0b/ce0434fb40c5b24b368fe81b17074d2840748b4952256bab451b72290a49/jiter-0.12.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:fd990541982a24281d12b67a335e44f117e4c6cbad3c3b75c7dea68bf4ce3a67", size = 519845, upload-time = "2025-11-09T20:48:22.964Z" }, - { url = "https://files.pythonhosted.org/packages/e8/a3/7a7a4488ba052767846b9c916d208b3ed114e3eb670ee984e4c565b9cf0d/jiter-0.12.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:b111b0e9152fa7df870ecaebb0bd30240d9f7fff1f2003bcb4ed0f519941820b", size = 510701, upload-time = "2025-11-09T20:48:24.483Z" }, - { url = "https://files.pythonhosted.org/packages/c3/16/052ffbf9d0467b70af24e30f91e0579e13ded0c17bb4a8eb2aed3cb60131/jiter-0.12.0-cp314-cp314-win32.whl", hash = "sha256:a78befb9cc0a45b5a5a0d537b06f8544c2ebb60d19d02c41ff15da28a9e22d42", size = 205029, upload-time = "2025-11-09T20:48:25.749Z" }, - { url = "https://files.pythonhosted.org/packages/e4/18/3cf1f3f0ccc789f76b9a754bdb7a6977e5d1d671ee97a9e14f7eb728d80e/jiter-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:e1fe01c082f6aafbe5c8faf0ff074f38dfb911d53f07ec333ca03f8f6226debf", size = 204960, upload-time = "2025-11-09T20:48:27.415Z" }, - { url = "https://files.pythonhosted.org/packages/02/68/736821e52ecfdeeb0f024b8ab01b5a229f6b9293bbdb444c27efade50b0f/jiter-0.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:d72f3b5a432a4c546ea4bedc84cce0c3404874f1d1676260b9c7f048a9855451", size = 185529, upload-time = "2025-11-09T20:48:29.125Z" }, - { url = "https://files.pythonhosted.org/packages/30/61/12ed8ee7a643cce29ac97c2281f9ce3956eb76b037e88d290f4ed0d41480/jiter-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e6ded41aeba3603f9728ed2b6196e4df875348ab97b28fc8afff115ed42ba7a7", size = 318974, upload-time = "2025-11-09T20:48:30.87Z" }, - { url = "https://files.pythonhosted.org/packages/2d/c6/f3041ede6d0ed5e0e79ff0de4c8f14f401bbf196f2ef3971cdbe5fd08d1d/jiter-0.12.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a947920902420a6ada6ad51892082521978e9dd44a802663b001436e4b771684", size = 345932, upload-time = "2025-11-09T20:48:32.658Z" }, - { url = "https://files.pythonhosted.org/packages/d5/5d/4d94835889edd01ad0e2dbfc05f7bdfaed46292e7b504a6ac7839aa00edb/jiter-0.12.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:add5e227e0554d3a52cf390a7635edaffdf4f8fce4fdbcef3cc2055bb396a30c", size = 367243, upload-time = "2025-11-09T20:48:34.093Z" }, - { url = "https://files.pythonhosted.org/packages/fd/76/0051b0ac2816253a99d27baf3dda198663aff882fa6ea7deeb94046da24e/jiter-0.12.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9b1cda8fcb736250d7e8711d4580ebf004a46771432be0ae4796944b5dfa5d", size = 479315, upload-time = "2025-11-09T20:48:35.507Z" }, - { url = "https://files.pythonhosted.org/packages/70/ae/83f793acd68e5cb24e483f44f482a1a15601848b9b6f199dacb970098f77/jiter-0.12.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deeb12a2223fe0135c7ff1356a143d57f95bbf1f4a66584f1fc74df21d86b993", size = 380714, upload-time = "2025-11-09T20:48:40.014Z" }, - { url = "https://files.pythonhosted.org/packages/b1/5e/4808a88338ad2c228b1126b93fcd8ba145e919e886fe910d578230dabe3b/jiter-0.12.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c596cc0f4cb574877550ce4ecd51f8037469146addd676d7c1a30ebe6391923f", size = 365168, upload-time = "2025-11-09T20:48:41.462Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d4/04619a9e8095b42aef436b5aeb4c0282b4ff1b27d1db1508df9f5dc82750/jiter-0.12.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ab4c823b216a4aeab3fdbf579c5843165756bd9ad87cc6b1c65919c4715f783", size = 387893, upload-time = "2025-11-09T20:48:42.921Z" }, - { url = "https://files.pythonhosted.org/packages/17/ea/d3c7e62e4546fdc39197fa4a4315a563a89b95b6d54c0d25373842a59cbe/jiter-0.12.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e427eee51149edf962203ff8db75a7514ab89be5cb623fb9cea1f20b54f1107b", size = 520828, upload-time = "2025-11-09T20:48:44.278Z" }, - { url = "https://files.pythonhosted.org/packages/cc/0b/c6d3562a03fd767e31cb119d9041ea7958c3c80cb3d753eafb19b3b18349/jiter-0.12.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:edb868841f84c111255ba5e80339d386d937ec1fdce419518ce1bd9370fac5b6", size = 511009, upload-time = "2025-11-09T20:48:45.726Z" }, - { url = "https://files.pythonhosted.org/packages/aa/51/2cb4468b3448a8385ebcd15059d325c9ce67df4e2758d133ab9442b19834/jiter-0.12.0-cp314-cp314t-win32.whl", hash = "sha256:8bbcfe2791dfdb7c5e48baf646d37a6a3dcb5a97a032017741dea9f817dca183", size = 205110, upload-time = "2025-11-09T20:48:47.033Z" }, - { url = "https://files.pythonhosted.org/packages/b2/c5/ae5ec83dec9c2d1af805fd5fe8f74ebded9c8670c5210ec7820ce0dbeb1e/jiter-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2fa940963bf02e1d8226027ef461e36af472dea85d36054ff835aeed944dd873", size = 205223, upload-time = "2025-11-09T20:48:49.076Z" }, - { url = "https://files.pythonhosted.org/packages/97/9a/3c5391907277f0e55195550cf3fa8e293ae9ee0c00fb402fec1e38c0c82f/jiter-0.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:506c9708dd29b27288f9f8f1140c3cb0e3d8ddb045956d7757b1fa0e0f39a473", size = 185564, upload-time = "2025-11-09T20:48:50.376Z" }, - { url = "https://files.pythonhosted.org/packages/fe/54/5339ef1ecaa881c6948669956567a64d2670941925f245c434f494ffb0e5/jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:4739a4657179ebf08f85914ce50332495811004cc1747852e8b2041ed2aab9b8", size = 311144, upload-time = "2025-11-09T20:49:10.503Z" }, - { url = "https://files.pythonhosted.org/packages/27/74/3446c652bffbd5e81ab354e388b1b5fc1d20daac34ee0ed11ff096b1b01a/jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:41da8def934bf7bec16cb24bd33c0ca62126d2d45d81d17b864bd5ad721393c3", size = 305877, upload-time = "2025-11-09T20:49:12.269Z" }, - { url = "https://files.pythonhosted.org/packages/a1/f4/ed76ef9043450f57aac2d4fbeb27175aa0eb9c38f833be6ef6379b3b9a86/jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c44ee814f499c082e69872d426b624987dbc5943ab06e9bbaa4f81989fdb79e", size = 340419, upload-time = "2025-11-09T20:49:13.803Z" }, - { url = "https://files.pythonhosted.org/packages/21/01/857d4608f5edb0664aa791a3d45702e1a5bcfff9934da74035e7b9803846/jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd2097de91cf03eaa27b3cbdb969addf83f0179c6afc41bbc4513705e013c65d", size = 347212, upload-time = "2025-11-09T20:49:15.643Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, - { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, - { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/5a/41da76c5ea07bec1b0472b6b2fdb1b651074d504b19374d7e130e0cdfb25/jiter-0.13.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2ffc63785fd6c7977defe49b9824ae6ce2b2e2b77ce539bdaf006c26da06342e", size = 311164, upload-time = "2026-02-02T12:35:17.688Z" }, + { url = "https://files.pythonhosted.org/packages/40/cb/4a1bf994a3e869f0d39d10e11efb471b76d0ad70ecbfb591427a46c880c2/jiter-0.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a638816427006c1e3f0013eb66d391d7a3acda99a7b0cf091eff4497ccea33a", size = 320296, upload-time = "2026-02-02T12:35:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/09/82/acd71ca9b50ecebadc3979c541cd717cce2fe2bc86236f4fa597565d8f1a/jiter-0.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19928b5d1ce0ff8c1ee1b9bdef3b5bfc19e8304f1b904e436caf30bc15dc6cf5", size = 352742, upload-time = "2026-02-02T12:35:21.258Z" }, + { url = "https://files.pythonhosted.org/packages/71/03/d1fc996f3aecfd42eb70922edecfb6dd26421c874503e241153ad41df94f/jiter-0.13.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:309549b778b949d731a2f0e1594a3f805716be704a73bf3ad9a807eed5eb5721", size = 363145, upload-time = "2026-02-02T12:35:24.653Z" }, + { url = "https://files.pythonhosted.org/packages/f1/61/a30492366378cc7a93088858f8991acd7d959759fe6138c12a4644e58e81/jiter-0.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcdabaea26cb04e25df3103ce47f97466627999260290349a88c8136ecae0060", size = 487683, upload-time = "2026-02-02T12:35:26.162Z" }, + { url = "https://files.pythonhosted.org/packages/20/4e/4223cffa9dbbbc96ed821c5aeb6bca510848c72c02086d1ed3f1da3d58a7/jiter-0.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3a377af27b236abbf665a69b2bdd680e3b5a0bd2af825cd3b81245279a7606c", size = 373579, upload-time = "2026-02-02T12:35:27.582Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c9/b0489a01329ab07a83812d9ebcffe7820a38163c6d9e7da644f926ff877c/jiter-0.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe49d3ff6db74321f144dff9addd4a5874d3105ac5ba7c5b77fac099cfae31ae", size = 362904, upload-time = "2026-02-02T12:35:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/05/af/53e561352a44afcba9a9bc67ee1d320b05a370aed8df54eafe714c4e454d/jiter-0.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2113c17c9a67071b0f820733c0893ed1d467b5fcf4414068169e5c2cabddb1e2", size = 392380, upload-time = "2026-02-02T12:35:30.385Z" }, + { url = "https://files.pythonhosted.org/packages/76/2a/dd805c3afb8ed5b326c5ae49e725d1b1255b9754b1b77dbecdc621b20773/jiter-0.13.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab1185ca5c8b9491b55ebf6c1e8866b8f68258612899693e24a92c5fdb9455d5", size = 517939, upload-time = "2026-02-02T12:35:31.865Z" }, + { url = "https://files.pythonhosted.org/packages/20/2a/7b67d76f55b8fe14c937e7640389612f05f9a4145fc28ae128aaa5e62257/jiter-0.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9621ca242547edc16400981ca3231e0c91c0c4c1ab8573a596cd9bb3575d5c2b", size = 551696, upload-time = "2026-02-02T12:35:33.306Z" }, + { url = "https://files.pythonhosted.org/packages/85/9c/57cdd64dac8f4c6ab8f994fe0eb04dc9fd1db102856a4458fcf8a99dfa62/jiter-0.13.0-cp310-cp310-win32.whl", hash = "sha256:a7637d92b1c9d7a771e8c56f445c7f84396d48f2e756e5978840ecba2fac0894", size = 204592, upload-time = "2026-02-02T12:35:34.58Z" }, + { url = "https://files.pythonhosted.org/packages/a7/38/f4f3ea5788b8a5bae7510a678cdc747eda0c45ffe534f9878ff37e7cf3b3/jiter-0.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c1b609e5cbd2f52bb74fb721515745b407df26d7b800458bd97cb3b972c29e7d", size = 206016, upload-time = "2026-02-02T12:35:36.435Z" }, + { url = "https://files.pythonhosted.org/packages/71/29/499f8c9eaa8a16751b1c0e45e6f5f1761d180da873d417996cc7bddc8eef/jiter-0.13.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ea026e70a9a28ebbdddcbcf0f1323128a8db66898a06eaad3a4e62d2f554d096", size = 311157, upload-time = "2026-02-02T12:35:37.758Z" }, + { url = "https://files.pythonhosted.org/packages/50/f6/566364c777d2ab450b92100bea11333c64c38d32caf8dc378b48e5b20c46/jiter-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66aa3e663840152d18cc8ff1e4faad3dd181373491b9cfdc6004b92198d67911", size = 319729, upload-time = "2026-02-02T12:35:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/73/dd/560f13ec5e4f116d8ad2658781646cca91b617ae3b8758d4a5076b278f70/jiter-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3524798e70655ff19aec58c7d05adb1f074fecff62da857ea9be2b908b6d701", size = 354766, upload-time = "2026-02-02T12:35:40.662Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0d/061faffcfe94608cbc28a0d42a77a74222bdf5055ccdbe5fd2292b94f510/jiter-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec7e287d7fbd02cb6e22f9a00dd9c9cd504c40a61f2c61e7e1f9690a82726b4c", size = 362587, upload-time = "2026-02-02T12:35:42.025Z" }, + { url = "https://files.pythonhosted.org/packages/92/c9/c66a7864982fd38a9773ec6e932e0398d1262677b8c60faecd02ffb67bf3/jiter-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47455245307e4debf2ce6c6e65a717550a0244231240dcf3b8f7d64e4c2f22f4", size = 487537, upload-time = "2026-02-02T12:35:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/6c/86/84eb4352cd3668f16d1a88929b5888a3fe0418ea8c1dfc2ad4e7bf6e069a/jiter-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9da221dca6e0429c2704c1b3655fe7b025204a71d4d9b73390c759d776d165", size = 373717, upload-time = "2026-02-02T12:35:44.928Z" }, + { url = "https://files.pythonhosted.org/packages/6e/09/9fe4c159358176f82d4390407a03f506a8659ed13ca3ac93a843402acecf/jiter-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24ab43126d5e05f3d53a36a8e11eb2f23304c6c1117844aaaf9a0aa5e40b5018", size = 362683, upload-time = "2026-02-02T12:35:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5e/85f3ab9caca0c1d0897937d378b4a515cae9e119730563572361ea0c48ae/jiter-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9da38b4fedde4fb528c740c2564628fbab737166a0e73d6d46cb4bb5463ff411", size = 392345, upload-time = "2026-02-02T12:35:48.088Z" }, + { url = "https://files.pythonhosted.org/packages/12/4c/05b8629ad546191939e6f0c2f17e29f542a398f4a52fb987bc70b6d1eb8b/jiter-0.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b34c519e17658ed88d5047999a93547f8889f3c1824120c26ad6be5f27b6cf5", size = 517775, upload-time = "2026-02-02T12:35:49.482Z" }, + { url = "https://files.pythonhosted.org/packages/4d/88/367ea2eb6bc582c7052e4baf5ddf57ebe5ab924a88e0e09830dfb585c02d/jiter-0.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2a6394e6af690d462310a86b53c47ad75ac8c21dc79f120714ea449979cb1d3", size = 551325, upload-time = "2026-02-02T12:35:51.104Z" }, + { url = "https://files.pythonhosted.org/packages/f3/12/fa377ffb94a2f28c41afaed093e0d70cfe512035d5ecb0cad0ae4792d35e/jiter-0.13.0-cp311-cp311-win32.whl", hash = "sha256:0f0c065695f616a27c920a56ad0d4fc46415ef8b806bf8fc1cacf25002bd24e1", size = 204709, upload-time = "2026-02-02T12:35:52.467Z" }, + { url = "https://files.pythonhosted.org/packages/cb/16/8e8203ce92f844dfcd3d9d6a5a7322c77077248dbb12da52d23193a839cd/jiter-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0733312953b909688ae3c2d58d043aa040f9f1a6a75693defed7bc2cc4bf2654", size = 204560, upload-time = "2026-02-02T12:35:53.925Z" }, + { url = "https://files.pythonhosted.org/packages/44/26/97cc40663deb17b9e13c3a5cf29251788c271b18ee4d262c8f94798b8336/jiter-0.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:5d9b34ad56761b3bf0fbe8f7e55468704107608512350962d3317ffd7a4382d5", size = 189608, upload-time = "2026-02-02T12:35:55.304Z" }, + { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, + { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, + { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/61/db/c1efc32b8ba4c740ab3fc2d037d8753f67685f475e26b9d6536a4322bcdd/jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726", size = 364163, upload-time = "2026-02-02T12:36:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/55/8a/fb75556236047c8806995671a18e4a0ad646ed255276f51a20f32dceaeec/jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0", size = 483709, upload-time = "2026-02-02T12:36:03.41Z" }, + { url = "https://files.pythonhosted.org/packages/7e/16/43512e6ee863875693a8e6f6d532e19d650779d6ba9a81593ae40a9088ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089", size = 370480, upload-time = "2026-02-02T12:36:04.791Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4c/09b93e30e984a187bc8aaa3510e1ec8dcbdcd71ca05d2f56aac0492453aa/jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93", size = 360735, upload-time = "2026-02-02T12:36:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1b/46c5e349019874ec5dfa508c14c37e29864ea108d376ae26d90bee238cd7/jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08", size = 391814, upload-time = "2026-02-02T12:36:08.368Z" }, + { url = "https://files.pythonhosted.org/packages/15/9e/26184760e85baee7162ad37b7912797d2077718476bf91517641c92b3639/jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2", size = 513990, upload-time = "2026-02-02T12:36:09.993Z" }, + { url = "https://files.pythonhosted.org/packages/e9/34/2c9355247d6debad57a0a15e76ab1566ab799388042743656e566b3b7de1/jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228", size = 548021, upload-time = "2026-02-02T12:36:11.376Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4a/9f2c23255d04a834398b9c2e0e665382116911dc4d06b795710503cdad25/jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394", size = 203024, upload-time = "2026-02-02T12:36:12.682Z" }, + { url = "https://files.pythonhosted.org/packages/09/ee/f0ae675a957ae5a8f160be3e87acea6b11dc7b89f6b7ab057e77b2d2b13a/jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92", size = 205424, upload-time = "2026-02-02T12:36:13.93Z" }, + { url = "https://files.pythonhosted.org/packages/1b/02/ae611edf913d3cbf02c97cdb90374af2082c48d7190d74c1111dde08bcdd/jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9", size = 186818, upload-time = "2026-02-02T12:36:15.308Z" }, + { url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" }, + { url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" }, + { url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" }, + { url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" }, + { url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" }, + { url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" }, + { url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" }, + { url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" }, + { url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" }, + { url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" }, + { url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" }, + { url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" }, + { url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" }, + { url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" }, + { url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" }, + { url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" }, + { url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" }, + { url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" }, + { url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" }, + { url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" }, + { url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" }, + { url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" }, + { url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" }, + { url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" }, + { url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" }, + { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, + { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/3c29819a27178d0e461a8571fb63c6ae38be6dc36b78b3ec2876bbd6a910/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b1cbfa133241d0e6bdab48dcdc2604e8ba81512f6bbd68ec3e8e1357dd3c316c", size = 307016, upload-time = "2026-02-02T12:37:42.755Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ae/60993e4b07b1ac5ebe46da7aa99fdbb802eb986c38d26e3883ac0125c4e0/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:db367d8be9fad6e8ebbac4a7578b7af562e506211036cba2c06c3b998603c3d2", size = 305024, upload-time = "2026-02-02T12:37:44.774Z" }, + { url = "https://files.pythonhosted.org/packages/77/fa/2227e590e9cf98803db2811f172b2d6460a21539ab73006f251c66f44b14/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45f6f8efb2f3b0603092401dc2df79fa89ccbc027aaba4174d2d4133ed661434", size = 339337, upload-time = "2026-02-02T12:37:46.668Z" }, + { url = "https://files.pythonhosted.org/packages/2d/92/015173281f7eb96c0ef580c997da8ef50870d4f7f4c9e03c845a1d62ae04/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597245258e6ad085d064780abfb23a284d418d3e61c57362d9449c6c7317ee2d", size = 346395, upload-time = "2026-02-02T12:37:48.09Z" }, + { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, + { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, + { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, + { url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" }, ] [[package]] @@ -3417,13 +3967,11 @@ wheels = [ [[package]] name = "kubernetes" -version = "33.1.0" +version = "35.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "durationpy" }, - { name = "google-auth" }, - { name = "oauthlib" }, { name = "python-dateutil" }, { name = "pyyaml" }, { name = "requests" }, @@ -3432,9 +3980,9 @@ dependencies = [ { name = "urllib3" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779, upload-time = "2025-06-09T21:57:58.521Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/8f/85bf51ad4150f64e8c665daf0d9dfe9787ae92005efb9a4d1cba592bd79d/kubernetes-35.0.0.tar.gz", hash = "sha256:3d00d344944239821458b9efd484d6df9f011da367ecb155dadf9513f05f09ee", size = 1094642, upload-time = "2026-01-16T01:05:27.76Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" }, + { url = "https://files.pythonhosted.org/packages/0c/70/05b685ea2dffcb2adbf3cdcea5d8865b7bc66f67249084cf845012a0ff13/kubernetes-35.0.0-py2.py3-none-any.whl", hash = "sha256:39e2b33b46e5834ef6c3985ebfe2047ab39135d41de51ce7641a7ca5b372a13d", size = 2017602, upload-time = "2026-01-16T01:05:25.991Z" }, ] [[package]] @@ -3468,7 +4016,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.2.6" +version = "1.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpatch" }, @@ -3480,9 +4028,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "uuid-utils" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/ce/ba5ed5ea6df22965b2893c2ed28ebb456204962723d408904c4acfa5e942/langchain_core-1.2.6.tar.gz", hash = "sha256:b4e7841dd7f8690375aa07c54739178dc2c635147d475e0c2955bf82a1afa498", size = 833343, upload-time = "2026-01-02T21:35:44.749Z" } +sdist = { url = "https://files.pythonhosted.org/packages/70/ea/8380184b287da43d3d2556475b985cf3e27569e9d8bbe33195600a98cabb/langchain_core-1.2.3.tar.gz", hash = "sha256:61f5197aa101cd5605879ef37f2b0ac56c079974d94d347849b8d4fe18949746", size = 803567, upload-time = "2025-12-18T20:13:10.574Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/40/0655892c245d8fbe6bca6d673ab5927e5c3ab7be143de40b52289a0663bc/langchain_core-1.2.6-py3-none-any.whl", hash = "sha256:aa6ed954b4b1f4504937fe75fdf674317027e9a91ba7a97558b0de3dc8004e34", size = 489096, upload-time = "2026-01-02T21:35:43.391Z" }, + { url = "https://files.pythonhosted.org/packages/f0/57/cfc1d12e273d33d16bab7ce9a135244e6f5677a92a5a99e69a61b22b7d93/langchain_core-1.2.3-py3-none-any.whl", hash = "sha256:c3501cf0219daf67a0ae23f6d6bdf3b41ab695efd8f0f3070a566e368b8c3dc7", size = 476384, upload-time = "2025-12-18T20:13:08.998Z" }, ] [[package]] @@ -3514,16 +4062,16 @@ wheels = [ [[package]] name = "langchain-openai" -version = "1.1.7" +version = "1.1.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "openai" }, { name = "tiktoken" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/b7/30bfc4d1b658a9ee524bcce3b0b2ec9c45a11c853a13c4f0c9da9882784b/langchain_openai-1.1.7.tar.gz", hash = "sha256:f5ec31961ed24777548b63a5fe313548bc6e0eb9730d6552b8c6418765254c81", size = 1039134, upload-time = "2026-01-07T19:44:59.728Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/67/228dc28b4498ea16422577013b5bb4ba35a1b99f8be975d6747c7a9f7e6a/langchain_openai-1.1.6.tar.gz", hash = "sha256:e306612654330ae36fb6bbe36db91c98534312afade19e140c3061fe4208dac8", size = 1038310, upload-time = "2025-12-18T17:58:52.84Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/a1/50e7596aca775d8c3883eceeaf47489fac26c57c1abe243c00174f715a8a/langchain_openai-1.1.7-py3-none-any.whl", hash = "sha256:34e9cd686aac1a120d6472804422792bf8080a2103b5d21ee450c9e42d053815", size = 84753, upload-time = "2026-01-07T19:44:58.629Z" }, + { url = "https://files.pythonhosted.org/packages/db/5b/1f6521df83c1a8e8d3f52351883b59683e179c0aa1bec75d0a77a394c9e7/langchain_openai-1.1.6-py3-none-any.whl", hash = "sha256:c42d04a67a85cee1d994afe400800d2b09ebf714721345f0b651eb06a02c3948", size = 84701, upload-time = "2025-12-18T17:58:51.527Z" }, ] [[package]] @@ -3540,7 +4088,7 @@ wheels = [ [[package]] name = "langgraph" -version = "1.0.5" +version = "1.0.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, @@ -3550,53 +4098,53 @@ dependencies = [ { name = "pydantic" }, { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/47/28f4d4d33d88f69de26f7a54065961ac0c662cec2479b36a2db081ef5cb6/langgraph-1.0.5.tar.gz", hash = "sha256:7f6ae59622386b60fe9fa0ad4c53f42016b668455ed604329e7dc7904adbf3f8", size = 493969, upload-time = "2025-12-12T23:05:48.224Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/49/e9551965d8a44dd9afdc55cbcdc5a9bd18bee6918cc2395b225d40adb77c/langgraph-1.0.8.tar.gz", hash = "sha256:2630fc578846995114fd659f8b14df9eff5a4e78c49413f67718725e88ceb544", size = 498708, upload-time = "2026-02-06T12:31:13.776Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/23/1b/e318ee76e42d28f515d87356ac5bd7a7acc8bad3b8f54ee377bef62e1cbf/langgraph-1.0.5-py3-none-any.whl", hash = "sha256:b4cfd173dca3c389735b47228ad8b295e6f7b3df779aba3a1e0c23871f81281e", size = 157056, upload-time = "2025-12-12T23:05:46.499Z" }, + { url = "https://files.pythonhosted.org/packages/9a/72/b0d7fc1007821a08dfc03ce232f39f209aa4aa46414ea3d125b24e35093a/langgraph-1.0.8-py3-none-any.whl", hash = "sha256:da737177c024caad7e5262642bece4f54edf4cba2c905a1d1338963f41cf0904", size = 158144, upload-time = "2026-02-06T12:31:12.489Z" }, ] [[package]] name = "langgraph-checkpoint" -version = "3.0.1" +version = "4.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "ormsgpack" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/07/2b1c042fa87d40cf2db5ca27dc4e8dd86f9a0436a10aa4361a8982718ae7/langgraph_checkpoint-3.0.1.tar.gz", hash = "sha256:59222f875f85186a22c494aedc65c4e985a3df27e696e5016ba0b98a5ed2cee0", size = 137785, upload-time = "2025-11-04T21:55:47.774Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/76/55a18c59dedf39688d72c4b06af73a5e3ea0d1a01bc867b88fbf0659f203/langgraph_checkpoint-4.0.0.tar.gz", hash = "sha256:814d1bd050fac029476558d8e68d87bce9009a0262d04a2c14b918255954a624", size = 137320, upload-time = "2026-01-12T20:30:26.38Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/e3/616e3a7ff737d98c1bbb5700dd62278914e2a9ded09a79a1fa93cf24ce12/langgraph_checkpoint-3.0.1-py3-none-any.whl", hash = "sha256:9b04a8d0edc0474ce4eaf30c5d731cee38f11ddff50a6177eead95b5c4e4220b", size = 46249, upload-time = "2025-11-04T21:55:46.472Z" }, + { url = "https://files.pythonhosted.org/packages/4a/de/ddd53b7032e623f3c7bcdab2b44e8bf635e468f62e10e5ff1946f62c9356/langgraph_checkpoint-4.0.0-py3-none-any.whl", hash = "sha256:3fa9b2635a7c5ac28b338f631abf6a030c3b508b7b9ce17c22611513b589c784", size = 46329, upload-time = "2026-01-12T20:30:25.2Z" }, ] [[package]] name = "langgraph-prebuilt" -version = "1.0.5" +version = "1.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "langgraph-checkpoint" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/f9/54f8891b32159e4542236817aea2ee83de0de18bce28e9bdba08c7f93001/langgraph_prebuilt-1.0.5.tar.gz", hash = "sha256:85802675ad778cc7240fd02d47db1e0b59c0c86d8369447d77ce47623845db2d", size = 144453, upload-time = "2025-11-20T16:47:39.23Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/59/711aecd1a50999456850dc328f3cad72b4372d8218838d8d5326f80cb76f/langgraph_prebuilt-1.0.7.tar.gz", hash = "sha256:38e097e06de810de4d0e028ffc0e432bb56d1fb417620fb1dfdc76c5e03e4bf9", size = 163692, upload-time = "2026-01-22T16:45:22.801Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/5e/aeba4a5b39fe6e874e0dd003a82da71c7153e671312671a8dacc5cb7c1af/langgraph_prebuilt-1.0.5-py3-none-any.whl", hash = "sha256:22369563e1848862ace53fbc11b027c28dd04a9ac39314633bb95f2a7e258496", size = 35072, upload-time = "2025-11-20T16:47:38.187Z" }, + { url = "https://files.pythonhosted.org/packages/47/49/5e37abb3f38a17a3487634abc2a5da87c208cc1d14577eb8d7184b25c886/langgraph_prebuilt-1.0.7-py3-none-any.whl", hash = "sha256:e14923516504405bb5edc3977085bc9622c35476b50c1808544490e13871fe7c", size = 35324, upload-time = "2026-01-22T16:45:21.784Z" }, ] [[package]] name = "langgraph-sdk" -version = "0.3.1" +version = "0.3.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, { name = "orjson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a9/d3/b6be0b0aba2a53a8920a2b0b4328a83121ec03eea9952e576d06a4182f6f/langgraph_sdk-0.3.1.tar.gz", hash = "sha256:f6dadfd2444eeff3e01405a9005c95fb3a028d4bd954ebec80ea6150084f92bb", size = 130312, upload-time = "2025-12-18T22:11:47.42Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/2b/2dae368ac76e315197f07ab58077aadf20833c226fbfd450d71745850314/langgraph_sdk-0.3.5.tar.gz", hash = "sha256:64669e9885a908578eed921ef9a8e52b8d0cd38db1e3e5d6d299d4e6f8830ac0", size = 177470, upload-time = "2026-02-10T16:56:09.18Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/fe/0c1c9c01a154eba62b20b02fabe811fd94a2b810061ae9e4d8462b8cf85a/langgraph_sdk-0.3.1-py3-none-any.whl", hash = "sha256:0b856923bfd20bf3441ce9d03bef488aa333fb610e972618799a9d584436acad", size = 66517, upload-time = "2025-12-18T22:11:46.625Z" }, + { url = "https://files.pythonhosted.org/packages/84/d5/a14d957c515ba7a9713bf0f03f2b9277979c403bc50f829bdfd54ae7dc9e/langgraph_sdk-0.3.5-py3-none-any.whl", hash = "sha256:bcfa1dcbddadb604076ce46f5e08969538735e5ac47fa863d4fac5a512dab5c9", size = 70851, upload-time = "2026-02-10T16:56:07.983Z" }, ] [[package]] name = "langsmith" -version = "0.6.2" +version = "0.7.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -3606,11 +4154,12 @@ dependencies = [ { name = "requests" }, { name = "requests-toolbelt" }, { name = "uuid-utils" }, + { name = "xxhash" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/8e/3ea7a8e9ce8c530204964207af7f7778597f5a548dc1a489c0c0940561f3/langsmith-0.6.2.tar.gz", hash = "sha256:c2efd7ed61eed3b6fdbf158ea2e9862bc2636f2edc95e90d2faad9462773d097", size = 1739277, upload-time = "2026-01-08T23:17:40.504Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/bc/8172fefad4f2da888a6d564a27d1fb7d4dbf3c640899c2b40c46235cbe98/langsmith-0.7.3.tar.gz", hash = "sha256:0223b97021af62d2cf53c8a378a27bd22e90a7327e45b353e0069ae60d5d6f9e", size = 988575, upload-time = "2026-02-13T23:25:32.916Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e0/9d173dd2fa7f85d9ec4989f6f5a1a057d281daa8dada0ff8db0de0cb68aa/langsmith-0.6.2-py3-none-any.whl", hash = "sha256:1ea1a591f52683a5aeebdaa2b58458d72ce9598105dd8b29e16f7373631a6434", size = 282918, upload-time = "2026-01-08T23:17:38.858Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9d/5a68b6b5e313ffabbb9725d18a71edb48177fd6d3ad329c07801d2a8e862/langsmith-0.7.3-py3-none-any.whl", hash = "sha256:03659bf9274e6efcead361c9c31a7849ea565ae0d6c0d73e1d8b239029eff3be", size = 325718, upload-time = "2026-02-13T23:25:31.52Z" }, ] [[package]] @@ -3666,6 +4215,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/82/3d/14ce75ef66813643812f3093ab17e46d3a206942ce7376d31ec2d36229e7/lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12", size = 113151, upload-time = "2025-10-27T18:25:54.882Z" }, ] +[[package]] +name = "lazy-loader" +version = "0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6b/c875b30a1ba490860c93da4cabf479e03f584eba06fe5963f6f6644653d8/lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1", size = 15431, upload-time = "2024-04-05T13:03:12.261Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/60/d497a310bde3f01cb805196ac61b7ad6dc5dcf8dce66634dc34364b20b4f/lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc", size = 12097, upload-time = "2024-04-05T13:03:10.514Z" }, +] + [[package]] name = "lcm" version = "1.5.2" @@ -3722,77 +4283,126 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/80/73/623eb9f29fe54ef2109cc9ed6d49dbdd1845c625463390c067fb2ea9c7a8/lcm-1.5.2-cp314-cp314-win_amd64.whl", hash = "sha256:6ba84f4e97f61ea55bb09e8201b0bd47380332118e7199674ec9f85cb1175de3", size = 4467113, upload-time = "2025-10-23T20:39:35.195Z" }, ] +[[package]] +name = "libcoal" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, + { name = "cmeel-assimp" }, + { name = "cmeel-boost" }, + { name = "cmeel-octomap" }, + { name = "cmeel-qhull" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/51/cb68b16abd786e3ebb5e7e64036894a6f69ea8fe45c04a433e6d5462d60e/libcoal-3.0.2.tar.gz", hash = "sha256:1d48cfdce1157d4b89cf6a7215fc1b1e120d54c4a8d975cd9f45f2c8cedec275", size = 1464086, upload-time = "2025-10-15T22:53:34.811Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/ea/6aa65497d00ec494bf1c5e121b59ad8cf6da308e0cf01271a9d7c614752c/libcoal-3.0.2-0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:16702fdd13942080c42c9565eb1b692618ce4456192a5bc497c585f9142138e5", size = 1683950, upload-time = "2025-10-15T22:53:28.361Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b0/3480197ba40cf9c6de71ca7f7a81a7504a40ca77a2b8604cbcc068f8f7ca/libcoal-3.0.2-0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:527c710c6215936f1a4b99ca1d01b4bb15c6b52980fa96cfa5f1fd1a7ef12393", size = 1484168, upload-time = "2025-10-15T22:53:29.933Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e5/5b9605496e48a0437152196e5f200433d3904e59c899cab799a3c27bcd4f/libcoal-3.0.2-0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:ed45722c07a3d23a346211f837856549dce11167928743eb6c73bcf17a369dd6", size = 2257523, upload-time = "2025-10-15T22:53:31.214Z" }, + { url = "https://files.pythonhosted.org/packages/3c/49/c3bec783144c226b5ef3728ed66d7fc2d08c553922a3892591958284801a/libcoal-3.0.2-0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:c4ca3fec02386e5c8ccc81030c44e74a546b06059886ce04bb6c16fe4628e9ba", size = 2285635, upload-time = "2025-10-15T22:53:33.142Z" }, +] + +[[package]] +name = "libpinocchio" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, + { name = "cmeel-boost" }, + { name = "cmeel-urdfdom" }, + { name = "libcoal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/b4431f1acdce04300d798a87b98b064c1bb56061848abd9476c7b7e9dac2/libpinocchio-3.8.0.tar.gz", hash = "sha256:687442a8316d03cbe1a5c66e20499bf3fadb59439d6207e36118eef34f73d8c8", size = 4001141, upload-time = "2025-10-16T06:34:02.405Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/71/b17ca7f4c0cb0f216441222e22c3fb8d905ba038ec5ac7c120790340da95/libpinocchio-3.8.0-0-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:b8266d37482c35b5aa27240f3a0274447cd038aa219bdd6413c0bafcad822e2b", size = 4663536, upload-time = "2025-10-16T06:33:55.707Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a9/a4842e056d3f7d07c3f96f90c8f7fe7ef7e543c725f1c9498e5f4d58c47c/libpinocchio-3.8.0-0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:0320c471bd4e78226cc266ad7927432f884709104fa8a253e565adbed7da8aac", size = 3781718, upload-time = "2025-10-16T06:33:57.483Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f5/950cd3be129766d6f847cb0702f73ad5f6ed2d2b5775e073f9f017d923b4/libpinocchio-3.8.0-0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:b70bc23fb9f53d0a65929c92bac8c0df836bef064225a54d009214cdd778bdb7", size = 4582702, upload-time = "2025-10-16T06:33:58.887Z" }, + { url = "https://files.pythonhosted.org/packages/28/0d/5deebded1fa71a381c9efd3ea69103a38f64d804da704148e92f4886762d/libpinocchio-3.8.0-0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:b52ca3520635f551ab2c8c9bf5e8e555b54e92c4bb948020eb4e4dc1b3f9eb0b", size = 4803646, upload-time = "2025-10-16T06:34:00.662Z" }, +] + [[package]] name = "librt" -version = "0.7.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b7/29/47f29026ca17f35cf299290292d5f8331f5077364974b7675a353179afa2/librt-0.7.7.tar.gz", hash = "sha256:81d957b069fed1890953c3b9c3895c7689960f233eea9a1d9607f71ce7f00b2c", size = 145910, upload-time = "2026-01-01T23:52:22.87Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/84/2cfb1f3b9b60bab52e16a220c931223fc8e963d0d7bb9132bef012aafc3f/librt-0.7.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4836c5645f40fbdc275e5670819bde5ab5f2e882290d304e3c6ddab1576a6d0", size = 54709, upload-time = "2026-01-01T23:50:48.326Z" }, - { url = "https://files.pythonhosted.org/packages/19/a1/3127b277e9d3784a8040a54e8396d9ae5c64d6684dc6db4b4089b0eedcfb/librt-0.7.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae8aec43117a645a31e5f60e9e3a0797492e747823b9bda6972d521b436b4e8", size = 56658, upload-time = "2026-01-01T23:50:49.74Z" }, - { url = "https://files.pythonhosted.org/packages/3a/e9/b91b093a5c42eb218120445f3fef82e0b977fa2225f4d6fc133d25cdf86a/librt-0.7.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:aea05f701ccd2a76b34f0daf47ca5068176ff553510b614770c90d76ac88df06", size = 161026, upload-time = "2026-01-01T23:50:50.853Z" }, - { url = "https://files.pythonhosted.org/packages/c7/cb/1ded77d5976a79d7057af4a010d577ce4f473ff280984e68f4974a3281e5/librt-0.7.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b16ccaeff0ed4355dfb76fe1ea7a5d6d03b5ad27f295f77ee0557bc20a72495", size = 169529, upload-time = "2026-01-01T23:50:52.24Z" }, - { url = "https://files.pythonhosted.org/packages/da/6e/6ca5bdaa701e15f05000ac1a4c5d1475c422d3484bd3d1ca9e8c2f5be167/librt-0.7.7-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48c7e150c095d5e3cea7452347ba26094be905d6099d24f9319a8b475fcd3e0", size = 183271, upload-time = "2026-01-01T23:50:55.287Z" }, - { url = "https://files.pythonhosted.org/packages/e7/2d/55c0e38073997b4bbb5ddff25b6d1bbba8c2f76f50afe5bb9c844b702f34/librt-0.7.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4dcee2f921a8632636d1c37f1bbdb8841d15666d119aa61e5399c5268e7ce02e", size = 179039, upload-time = "2026-01-01T23:50:56.807Z" }, - { url = "https://files.pythonhosted.org/packages/33/4e/3662a41ae8bb81b226f3968426293517b271d34d4e9fd4b59fc511f1ae40/librt-0.7.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:14ef0f4ac3728ffd85bfc58e2f2f48fb4ef4fa871876f13a73a7381d10a9f77c", size = 173505, upload-time = "2026-01-01T23:50:58.291Z" }, - { url = "https://files.pythonhosted.org/packages/f8/5d/cf768deb8bdcbac5f8c21fcb32dd483d038d88c529fd351bbe50590b945d/librt-0.7.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e4ab69fa37f8090f2d971a5d2bc606c7401170dbdae083c393d6cbf439cb45b8", size = 193570, upload-time = "2026-01-01T23:50:59.546Z" }, - { url = "https://files.pythonhosted.org/packages/a1/ea/ee70effd13f1d651976d83a2812391f6203971740705e3c0900db75d4bce/librt-0.7.7-cp310-cp310-win32.whl", hash = "sha256:4bf3cc46d553693382d2abf5f5bd493d71bb0f50a7c0beab18aa13a5545c8900", size = 42600, upload-time = "2026-01-01T23:51:00.694Z" }, - { url = "https://files.pythonhosted.org/packages/f0/eb/dc098730f281cba76c279b71783f5de2edcba3b880c1ab84a093ef826062/librt-0.7.7-cp310-cp310-win_amd64.whl", hash = "sha256:f0c8fe5aeadd8a0e5b0598f8a6ee3533135ca50fd3f20f130f9d72baf5c6ac58", size = 48977, upload-time = "2026-01-01T23:51:01.726Z" }, - { url = "https://files.pythonhosted.org/packages/f0/56/30b5c342518005546df78841cb0820ae85a17e7d07d521c10ef367306d0d/librt-0.7.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a487b71fbf8a9edb72a8c7a456dda0184642d99cd007bc819c0b7ab93676a8ee", size = 54709, upload-time = "2026-01-01T23:51:02.774Z" }, - { url = "https://files.pythonhosted.org/packages/72/78/9f120e3920b22504d4f3835e28b55acc2cc47c9586d2e1b6ba04c3c1bf01/librt-0.7.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4d4efb218264ecf0f8516196c9e2d1a0679d9fb3bb15df1155a35220062eba8", size = 56663, upload-time = "2026-01-01T23:51:03.838Z" }, - { url = "https://files.pythonhosted.org/packages/1c/ea/7d7a1ee7dfc1151836028eba25629afcf45b56bbc721293e41aa2e9b8934/librt-0.7.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b8bb331aad734b059c4b450cd0a225652f16889e286b2345af5e2c3c625c3d85", size = 161705, upload-time = "2026-01-01T23:51:04.917Z" }, - { url = "https://files.pythonhosted.org/packages/45/a5/952bc840ac8917fbcefd6bc5f51ad02b89721729814f3e2bfcc1337a76d6/librt-0.7.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:467dbd7443bda08338fc8ad701ed38cef48194017554f4c798b0a237904b3f99", size = 171029, upload-time = "2026-01-01T23:51:06.09Z" }, - { url = "https://files.pythonhosted.org/packages/fa/bf/c017ff7da82dc9192cf40d5e802a48a25d00e7639b6465cfdcee5893a22c/librt-0.7.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50d1d1ee813d2d1a3baf2873634ba506b263032418d16287c92ec1cc9c1a00cb", size = 184704, upload-time = "2026-01-01T23:51:07.549Z" }, - { url = "https://files.pythonhosted.org/packages/77/ec/72f3dd39d2cdfd6402ab10836dc9cbf854d145226062a185b419c4f1624a/librt-0.7.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7e5070cf3ec92d98f57574da0224f8c73faf1ddd6d8afa0b8c9f6e86997bc74", size = 180719, upload-time = "2026-01-01T23:51:09.062Z" }, - { url = "https://files.pythonhosted.org/packages/78/86/06e7a1a81b246f3313bf515dd9613a1c81583e6fd7843a9f4d625c4e926d/librt-0.7.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bdb9f3d865b2dafe7f9ad7f30ef563c80d0ddd2fdc8cc9b8e4f242f475e34d75", size = 174537, upload-time = "2026-01-01T23:51:10.611Z" }, - { url = "https://files.pythonhosted.org/packages/83/08/f9fb2edc9c7a76e95b2924ce81d545673f5b034e8c5dd92159d1c7dae0c6/librt-0.7.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8185c8497d45164e256376f9da5aed2bb26ff636c798c9dabe313b90e9f25b28", size = 195238, upload-time = "2026-01-01T23:51:11.762Z" }, - { url = "https://files.pythonhosted.org/packages/ba/56/ea2d2489d3ea1f47b301120e03a099e22de7b32c93df9a211e6ff4f9bf38/librt-0.7.7-cp311-cp311-win32.whl", hash = "sha256:44d63ce643f34a903f09ff7ca355aae019a3730c7afd6a3c037d569beeb5d151", size = 42939, upload-time = "2026-01-01T23:51:13.192Z" }, - { url = "https://files.pythonhosted.org/packages/58/7b/c288f417e42ba2a037f1c0753219e277b33090ed4f72f292fb6fe175db4c/librt-0.7.7-cp311-cp311-win_amd64.whl", hash = "sha256:7d13cc340b3b82134f8038a2bfe7137093693dcad8ba5773da18f95ad6b77a8a", size = 49240, upload-time = "2026-01-01T23:51:14.264Z" }, - { url = "https://files.pythonhosted.org/packages/7c/24/738eb33a6c1516fdb2dfd2a35db6e5300f7616679b573585be0409bc6890/librt-0.7.7-cp311-cp311-win_arm64.whl", hash = "sha256:983de36b5a83fe9222f4f7dcd071f9b1ac6f3f17c0af0238dadfb8229588f890", size = 42613, upload-time = "2026-01-01T23:51:15.268Z" }, - { url = "https://files.pythonhosted.org/packages/56/72/1cd9d752070011641e8aee046c851912d5f196ecd726fffa7aed2070f3e0/librt-0.7.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a85a1fc4ed11ea0eb0a632459ce004a2d14afc085a50ae3463cd3dfe1ce43fc", size = 55687, upload-time = "2026-01-01T23:51:16.291Z" }, - { url = "https://files.pythonhosted.org/packages/50/aa/d5a1d4221c4fe7e76ae1459d24d6037783cb83c7645164c07d7daf1576ec/librt-0.7.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c87654e29a35938baead1c4559858f346f4a2a7588574a14d784f300ffba0efd", size = 57136, upload-time = "2026-01-01T23:51:17.363Z" }, - { url = "https://files.pythonhosted.org/packages/23/6f/0c86b5cb5e7ef63208c8cc22534df10ecc5278efc0d47fb8815577f3ca2f/librt-0.7.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c9faaebb1c6212c20afd8043cd6ed9de0a47d77f91a6b5b48f4e46ed470703fe", size = 165320, upload-time = "2026-01-01T23:51:18.455Z" }, - { url = "https://files.pythonhosted.org/packages/16/37/df4652690c29f645ffe405b58285a4109e9fe855c5bb56e817e3e75840b3/librt-0.7.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1908c3e5a5ef86b23391448b47759298f87f997c3bd153a770828f58c2bb4630", size = 174216, upload-time = "2026-01-01T23:51:19.599Z" }, - { url = "https://files.pythonhosted.org/packages/9a/d6/d3afe071910a43133ec9c0f3e4ce99ee6df0d4e44e4bddf4b9e1c6ed41cc/librt-0.7.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbc4900e95a98fc0729523be9d93a8fedebb026f32ed9ffc08acd82e3e181503", size = 189005, upload-time = "2026-01-01T23:51:21.052Z" }, - { url = "https://files.pythonhosted.org/packages/d5/18/74060a870fe2d9fd9f47824eba6717ce7ce03124a0d1e85498e0e7efc1b2/librt-0.7.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7ea4e1fbd253e5c68ea0fe63d08577f9d288a73f17d82f652ebc61fa48d878d", size = 183961, upload-time = "2026-01-01T23:51:22.493Z" }, - { url = "https://files.pythonhosted.org/packages/7c/5e/918a86c66304af66a3c1d46d54df1b2d0b8894babc42a14fb6f25511497f/librt-0.7.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ef7699b7a5a244b1119f85c5bbc13f152cd38240cbb2baa19b769433bae98e50", size = 177610, upload-time = "2026-01-01T23:51:23.874Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d7/b5e58dc2d570f162e99201b8c0151acf40a03a39c32ab824dd4febf12736/librt-0.7.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:955c62571de0b181d9e9e0a0303c8bc90d47670a5eff54cf71bf5da61d1899cf", size = 199272, upload-time = "2026-01-01T23:51:25.341Z" }, - { url = "https://files.pythonhosted.org/packages/18/87/8202c9bd0968bdddc188ec3811985f47f58ed161b3749299f2c0dd0f63fb/librt-0.7.7-cp312-cp312-win32.whl", hash = "sha256:1bcd79be209313b270b0e1a51c67ae1af28adad0e0c7e84c3ad4b5cb57aaa75b", size = 43189, upload-time = "2026-01-01T23:51:26.799Z" }, - { url = "https://files.pythonhosted.org/packages/61/8d/80244b267b585e7aa79ffdac19f66c4861effc3a24598e77909ecdd0850e/librt-0.7.7-cp312-cp312-win_amd64.whl", hash = "sha256:4353ee891a1834567e0302d4bd5e60f531912179578c36f3d0430f8c5e16b456", size = 49462, upload-time = "2026-01-01T23:51:27.813Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1f/75db802d6a4992d95e8a889682601af9b49d5a13bbfa246d414eede1b56c/librt-0.7.7-cp312-cp312-win_arm64.whl", hash = "sha256:a76f1d679beccccdf8c1958e732a1dfcd6e749f8821ee59d7bec009ac308c029", size = 42828, upload-time = "2026-01-01T23:51:28.804Z" }, - { url = "https://files.pythonhosted.org/packages/8d/5e/d979ccb0a81407ec47c14ea68fb217ff4315521730033e1dd9faa4f3e2c1/librt-0.7.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f4a0b0a3c86ba9193a8e23bb18f100d647bf192390ae195d84dfa0a10fb6244", size = 55746, upload-time = "2026-01-01T23:51:29.828Z" }, - { url = "https://files.pythonhosted.org/packages/f5/2c/3b65861fb32f802c3783d6ac66fc5589564d07452a47a8cf9980d531cad3/librt-0.7.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5335890fea9f9e6c4fdf8683061b9ccdcbe47c6dc03ab8e9b68c10acf78be78d", size = 57174, upload-time = "2026-01-01T23:51:31.226Z" }, - { url = "https://files.pythonhosted.org/packages/50/df/030b50614b29e443607220097ebaf438531ea218c7a9a3e21ea862a919cd/librt-0.7.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b4346b1225be26def3ccc6c965751c74868f0578cbcba293c8ae9168483d811", size = 165834, upload-time = "2026-01-01T23:51:32.278Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e1/bd8d1eacacb24be26a47f157719553bbd1b3fe812c30dddf121c0436fd0b/librt-0.7.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a10b8eebdaca6e9fdbaf88b5aefc0e324b763a5f40b1266532590d5afb268a4c", size = 174819, upload-time = "2026-01-01T23:51:33.461Z" }, - { url = "https://files.pythonhosted.org/packages/46/7d/91d6c3372acf54a019c1ad8da4c9ecf4fc27d039708880bf95f48dbe426a/librt-0.7.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:067be973d90d9e319e6eb4ee2a9b9307f0ecd648b8a9002fa237289a4a07a9e7", size = 189607, upload-time = "2026-01-01T23:51:34.604Z" }, - { url = "https://files.pythonhosted.org/packages/fa/ac/44604d6d3886f791fbd1c6ae12d5a782a8f4aca927484731979f5e92c200/librt-0.7.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:23d2299ed007812cccc1ecef018db7d922733382561230de1f3954db28433977", size = 184586, upload-time = "2026-01-01T23:51:35.845Z" }, - { url = "https://files.pythonhosted.org/packages/5c/26/d8a6e4c17117b7f9b83301319d9a9de862ae56b133efb4bad8b3aa0808c9/librt-0.7.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6b6f8ea465524aa4c7420c7cc4ca7d46fe00981de8debc67b1cc2e9957bb5b9d", size = 178251, upload-time = "2026-01-01T23:51:37.018Z" }, - { url = "https://files.pythonhosted.org/packages/99/ab/98d857e254376f8e2f668e807daccc1f445e4b4fc2f6f9c1cc08866b0227/librt-0.7.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8df32a99cc46eb0ee90afd9ada113ae2cafe7e8d673686cf03ec53e49635439", size = 199853, upload-time = "2026-01-01T23:51:38.195Z" }, - { url = "https://files.pythonhosted.org/packages/7c/55/4523210d6ae5134a5da959900be43ad8bab2e4206687b6620befddb5b5fd/librt-0.7.7-cp313-cp313-win32.whl", hash = "sha256:86f86b3b785487c7760247bcdac0b11aa8bf13245a13ed05206286135877564b", size = 43247, upload-time = "2026-01-01T23:51:39.629Z" }, - { url = "https://files.pythonhosted.org/packages/25/40/3ec0fed5e8e9297b1cf1a3836fb589d3de55f9930e3aba988d379e8ef67c/librt-0.7.7-cp313-cp313-win_amd64.whl", hash = "sha256:4862cb2c702b1f905c0503b72d9d4daf65a7fdf5a9e84560e563471e57a56949", size = 49419, upload-time = "2026-01-01T23:51:40.674Z" }, - { url = "https://files.pythonhosted.org/packages/1c/7a/aab5f0fb122822e2acbc776addf8b9abfb4944a9056c00c393e46e543177/librt-0.7.7-cp313-cp313-win_arm64.whl", hash = "sha256:0996c83b1cb43c00e8c87835a284f9057bc647abd42b5871e5f941d30010c832", size = 42828, upload-time = "2026-01-01T23:51:41.731Z" }, - { url = "https://files.pythonhosted.org/packages/69/9c/228a5c1224bd23809a635490a162e9cbdc68d99f0eeb4a696f07886b8206/librt-0.7.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:23daa1ab0512bafdd677eb1bfc9611d8ffbe2e328895671e64cb34166bc1b8c8", size = 55188, upload-time = "2026-01-01T23:51:43.14Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c2/0e7c6067e2b32a156308205e5728f4ed6478c501947e9142f525afbc6bd2/librt-0.7.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:558a9e5a6f3cc1e20b3168fb1dc802d0d8fa40731f6e9932dcc52bbcfbd37111", size = 56895, upload-time = "2026-01-01T23:51:44.534Z" }, - { url = "https://files.pythonhosted.org/packages/0e/77/de50ff70c80855eb79d1d74035ef06f664dd073fb7fb9d9fb4429651b8eb/librt-0.7.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2567cb48dc03e5b246927ab35cbb343376e24501260a9b5e30b8e255dca0d1d2", size = 163724, upload-time = "2026-01-01T23:51:45.571Z" }, - { url = "https://files.pythonhosted.org/packages/6e/19/f8e4bf537899bdef9e0bb9f0e4b18912c2d0f858ad02091b6019864c9a6d/librt-0.7.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6066c638cdf85ff92fc6f932d2d73c93a0e03492cdfa8778e6d58c489a3d7259", size = 172470, upload-time = "2026-01-01T23:51:46.823Z" }, - { url = "https://files.pythonhosted.org/packages/42/4c/dcc575b69d99076768e8dd6141d9aecd4234cba7f0e09217937f52edb6ed/librt-0.7.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a609849aca463074c17de9cda173c276eb8fee9e441053529e7b9e249dc8b8ee", size = 186806, upload-time = "2026-01-01T23:51:48.009Z" }, - { url = "https://files.pythonhosted.org/packages/fe/f8/4094a2b7816c88de81239a83ede6e87f1138477d7ee956c30f136009eb29/librt-0.7.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:add4e0a000858fe9bb39ed55f31085506a5c38363e6eb4a1e5943a10c2bfc3d1", size = 181809, upload-time = "2026-01-01T23:51:49.35Z" }, - { url = "https://files.pythonhosted.org/packages/1b/ac/821b7c0ab1b5a6cd9aee7ace8309c91545a2607185101827f79122219a7e/librt-0.7.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a3bfe73a32bd0bdb9a87d586b05a23c0a1729205d79df66dee65bb2e40d671ba", size = 175597, upload-time = "2026-01-01T23:51:50.636Z" }, - { url = "https://files.pythonhosted.org/packages/71/f9/27f6bfbcc764805864c04211c6ed636fe1d58f57a7b68d1f4ae5ed74e0e0/librt-0.7.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0ecce0544d3db91a40f8b57ae26928c02130a997b540f908cefd4d279d6c5848", size = 196506, upload-time = "2026-01-01T23:51:52.535Z" }, - { url = "https://files.pythonhosted.org/packages/46/ba/c9b9c6fc931dd7ea856c573174ccaf48714905b1a7499904db2552e3bbaf/librt-0.7.7-cp314-cp314-win32.whl", hash = "sha256:8f7a74cf3a80f0c3b0ec75b0c650b2f0a894a2cec57ef75f6f72c1e82cdac61d", size = 39747, upload-time = "2026-01-01T23:51:53.683Z" }, - { url = "https://files.pythonhosted.org/packages/c5/69/cd1269337c4cde3ee70176ee611ab0058aa42fc8ce5c9dce55f48facfcd8/librt-0.7.7-cp314-cp314-win_amd64.whl", hash = "sha256:3d1fe2e8df3268dd6734dba33ededae72ad5c3a859b9577bc00b715759c5aaab", size = 45971, upload-time = "2026-01-01T23:51:54.697Z" }, - { url = "https://files.pythonhosted.org/packages/79/fd/e0844794423f5583108c5991313c15e2b400995f44f6ec6871f8aaf8243c/librt-0.7.7-cp314-cp314-win_arm64.whl", hash = "sha256:2987cf827011907d3dfd109f1be0d61e173d68b1270107bb0e89f2fca7f2ed6b", size = 39075, upload-time = "2026-01-01T23:51:55.726Z" }, - { url = "https://files.pythonhosted.org/packages/42/02/211fd8f7c381e7b2a11d0fdfcd410f409e89967be2e705983f7c6342209a/librt-0.7.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8e92c8de62b40bfce91d5e12c6e8b15434da268979b1af1a6589463549d491e6", size = 57368, upload-time = "2026-01-01T23:51:56.706Z" }, - { url = "https://files.pythonhosted.org/packages/4c/b6/aca257affae73ece26041ae76032153266d110453173f67d7603058e708c/librt-0.7.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f683dcd49e2494a7535e30f779aa1ad6e3732a019d80abe1309ea91ccd3230e3", size = 59238, upload-time = "2026-01-01T23:51:58.066Z" }, - { url = "https://files.pythonhosted.org/packages/96/47/7383a507d8e0c11c78ca34c9d36eab9000db5989d446a2f05dc40e76c64f/librt-0.7.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b15e5d17812d4d629ff576699954f74e2cc24a02a4fc401882dd94f81daba45", size = 183870, upload-time = "2026-01-01T23:51:59.204Z" }, - { url = "https://files.pythonhosted.org/packages/a4/b8/50f3d8eec8efdaf79443963624175c92cec0ba84827a66b7fcfa78598e51/librt-0.7.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c084841b879c4d9b9fa34e5d5263994f21aea7fd9c6add29194dbb41a6210536", size = 194608, upload-time = "2026-01-01T23:52:00.419Z" }, - { url = "https://files.pythonhosted.org/packages/23/d9/1b6520793aadb59d891e3b98ee057a75de7f737e4a8b4b37fdbecb10d60f/librt-0.7.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c8fb9966f84737115513fecbaf257f9553d067a7dd45a69c2c7e5339e6a8dc", size = 206776, upload-time = "2026-01-01T23:52:01.705Z" }, - { url = "https://files.pythonhosted.org/packages/ff/db/331edc3bba929d2756fa335bfcf736f36eff4efcb4f2600b545a35c2ae58/librt-0.7.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9b5fb1ecb2c35362eab2dbd354fd1efa5a8440d3e73a68be11921042a0edc0ff", size = 203206, upload-time = "2026-01-01T23:52:03.315Z" }, - { url = "https://files.pythonhosted.org/packages/b2/e1/6af79ec77204e85f6f2294fc171a30a91bb0e35d78493532ed680f5d98be/librt-0.7.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:d1454899909d63cc9199a89fcc4f81bdd9004aef577d4ffc022e600c412d57f3", size = 196697, upload-time = "2026-01-01T23:52:04.857Z" }, - { url = "https://files.pythonhosted.org/packages/f3/46/de55ecce4b2796d6d243295c221082ca3a944dc2fb3a52dcc8660ce7727d/librt-0.7.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7ef28f2e7a016b29792fe0a2dd04dec75725b32a1264e390c366103f834a9c3a", size = 217193, upload-time = "2026-01-01T23:52:06.159Z" }, - { url = "https://files.pythonhosted.org/packages/41/61/33063e271949787a2f8dd33c5260357e3d512a114fc82ca7890b65a76e2d/librt-0.7.7-cp314-cp314t-win32.whl", hash = "sha256:5e419e0db70991b6ba037b70c1d5bbe92b20ddf82f31ad01d77a347ed9781398", size = 40277, upload-time = "2026-01-01T23:52:07.625Z" }, - { url = "https://files.pythonhosted.org/packages/06/21/1abd972349f83a696ea73159ac964e63e2d14086fdd9bc7ca878c25fced4/librt-0.7.7-cp314-cp314t-win_amd64.whl", hash = "sha256:d6b7d93657332c817b8d674ef6bf1ab7796b4f7ce05e420fd45bd258a72ac804", size = 46765, upload-time = "2026-01-01T23:52:08.647Z" }, - { url = "https://files.pythonhosted.org/packages/51/0e/b756c7708143a63fca65a51ca07990fa647db2cc8fcd65177b9e96680255/librt-0.7.7-cp314-cp314t-win_arm64.whl", hash = "sha256:142c2cd91794b79fd0ce113bd658993b7ede0fe93057668c2f98a45ca00b7e91", size = 39724, upload-time = "2026-01-01T23:52:09.745Z" }, +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/3f/4ca7dd7819bf8ff303aca39c3c60e5320e46e766ab7f7dd627d3b9c11bdf/librt-0.8.0.tar.gz", hash = "sha256:cb74cdcbc0103fc988e04e5c58b0b31e8e5dd2babb9182b6f9490488eb36324b", size = 177306, upload-time = "2026-02-12T14:53:54.743Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/e9/018cfd60629e0404e6917943789800aa2231defbea540a17b90cc4547b97/librt-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db63cf3586a24241e89ca1ce0b56baaec9d371a328bd186c529b27c914c9a1ef", size = 65690, upload-time = "2026-02-12T14:51:57.761Z" }, + { url = "https://files.pythonhosted.org/packages/b5/80/8d39980860e4d1c9497ee50e5cd7c4766d8cfd90d105578eae418e8ffcbc/librt-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba9d9e60651615bc614be5e21a82cdb7b1769a029369cf4b4d861e4f19686fb6", size = 68373, upload-time = "2026-02-12T14:51:59.013Z" }, + { url = "https://files.pythonhosted.org/packages/2d/76/6e6f7a443af63977e421bd542551fec4072d9eaba02e671b05b238fe73bc/librt-0.8.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb4b3ad543084ed79f186741470b251b9d269cd8b03556f15a8d1a99a64b7de5", size = 197091, upload-time = "2026-02-12T14:52:00.642Z" }, + { url = "https://files.pythonhosted.org/packages/14/40/fa064181c231334c9f4cb69eb338132d39510c8928e84beba34b861d0a71/librt-0.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d2720335020219197380ccfa5c895f079ac364b4c429e96952cd6509934d8eb", size = 207350, upload-time = "2026-02-12T14:52:02.32Z" }, + { url = "https://files.pythonhosted.org/packages/50/49/e7f8438dd226305e3e5955d495114ad01448e6a6ffc0303289b4153b5fc5/librt-0.8.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9726305d3e53419d27fc8cdfcd3f9571f0ceae22fa6b5ea1b3662c2e538f833e", size = 219962, upload-time = "2026-02-12T14:52:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/1f/2c/74086fc5d52e77107a3cc80a9a3209be6ad1c9b6bc99969d8d9bbf9fdfe4/librt-0.8.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cc3d107f603b5ee7a79b6aa6f166551b99b32fb4a5303c4dfcb4222fc6a0335e", size = 212939, upload-time = "2026-02-12T14:52:05.537Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ae/d6917c0ebec9bc2e0293903d6a5ccc7cdb64c228e529e96520b277318f25/librt-0.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41064a0c07b4cc7a81355ccc305cb097d6027002209ffca51306e65ee8293630", size = 221393, upload-time = "2026-02-12T14:52:07.164Z" }, + { url = "https://files.pythonhosted.org/packages/04/97/15df8270f524ce09ad5c19cbbe0e8f95067582507149a6c90594e7795370/librt-0.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c6e4c10761ddbc0d67d2f6e2753daf99908db85d8b901729bf2bf5eaa60e0567", size = 216721, upload-time = "2026-02-12T14:52:08.857Z" }, + { url = "https://files.pythonhosted.org/packages/c4/52/17cbcf9b7a1bae5016d9d3561bc7169b32c3bd216c47d934d3f270602c0c/librt-0.8.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:ba581acad5ac8f33e2ff1746e8a57e001b47c6721873121bf8bbcf7ba8bd3aa4", size = 214790, upload-time = "2026-02-12T14:52:10.033Z" }, + { url = "https://files.pythonhosted.org/packages/2a/2d/010a236e8dc4d717dd545c46fd036dcced2c7ede71ef85cf55325809ff92/librt-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bdab762e2c0b48bab76f1a08acb3f4c77afd2123bedac59446aeaaeed3d086cf", size = 237384, upload-time = "2026-02-12T14:52:11.244Z" }, + { url = "https://files.pythonhosted.org/packages/38/14/f1c0eff3df8760dee761029efb72991c554d9f3282f1048e8c3d0eb60997/librt-0.8.0-cp310-cp310-win32.whl", hash = "sha256:6a3146c63220d814c4a2c7d6a1eacc8d5c14aed0ff85115c1dfea868080cd18f", size = 54289, upload-time = "2026-02-12T14:52:12.798Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0b/2684d473e64890882729f91866ed97ccc0a751a0afc3b4bf1a7b57094dbb/librt-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:bbebd2bba5c6ae02907df49150e55870fdd7440d727b6192c46b6f754723dde9", size = 61347, upload-time = "2026-02-12T14:52:13.793Z" }, + { url = "https://files.pythonhosted.org/packages/51/e9/42af181c89b65abfd557c1b017cba5b82098eef7bf26d1649d82ce93ccc7/librt-0.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ce33a9778e294507f3a0e3468eccb6a698b5166df7db85661543eca1cfc5369", size = 65314, upload-time = "2026-02-12T14:52:14.778Z" }, + { url = "https://files.pythonhosted.org/packages/9d/4a/15a847fca119dc0334a4b8012b1e15fdc5fc19d505b71e227eaf1bcdba09/librt-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8070aa3368559de81061ef752770d03ca1f5fc9467d4d512d405bd0483bfffe6", size = 68015, upload-time = "2026-02-12T14:52:15.797Z" }, + { url = "https://files.pythonhosted.org/packages/e1/87/ffc8dbd6ab68dd91b736c88529411a6729649d2b74b887f91f3aaff8d992/librt-0.8.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:20f73d4fecba969efc15cdefd030e382502d56bb6f1fc66b580cce582836c9fa", size = 194508, upload-time = "2026-02-12T14:52:16.835Z" }, + { url = "https://files.pythonhosted.org/packages/89/92/a7355cea28d6c48ff6ff5083ac4a2a866fb9b07b786aa70d1f1116680cd5/librt-0.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a512c88900bdb1d448882f5623a0b1ad27ba81a9bd75dacfe17080b72272ca1f", size = 205630, upload-time = "2026-02-12T14:52:18.58Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5e/54509038d7ac527828db95b8ba1c8f5d2649bc32fd8f39b1718ec9957dce/librt-0.8.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:015e2dde6e096d27c10238bf9f6492ba6c65822dfb69d2bf74c41a8e88b7ddef", size = 218289, upload-time = "2026-02-12T14:52:20.134Z" }, + { url = "https://files.pythonhosted.org/packages/6d/17/0ee0d13685cefee6d6f2d47bb643ddad3c62387e2882139794e6a5f1288a/librt-0.8.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1c25a131013eadd3c600686a0c0333eb2896483cbc7f65baa6a7ee761017aef9", size = 211508, upload-time = "2026-02-12T14:52:21.413Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a8/1714ef6e9325582e3727de3be27e4c1b2f428ea411d09f1396374180f130/librt-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:21b14464bee0b604d80a638cf1ee3148d84ca4cc163dcdcecb46060c1b3605e4", size = 219129, upload-time = "2026-02-12T14:52:22.61Z" }, + { url = "https://files.pythonhosted.org/packages/89/d3/2d9fe353edff91cdc0ece179348054a6fa61f3de992c44b9477cb973509b/librt-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:05a3dd3f116747f7e1a2b475ccdc6fb637fd4987126d109e03013a79d40bf9e6", size = 213126, upload-time = "2026-02-12T14:52:23.819Z" }, + { url = "https://files.pythonhosted.org/packages/ad/8e/9f5c60444880f6ad50e3ff7475e5529e787797e7f3ad5432241633733b92/librt-0.8.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:fa37f99bff354ff191c6bcdffbc9d7cdd4fc37faccfc9be0ef3a4fd5613977da", size = 212279, upload-time = "2026-02-12T14:52:25.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/eb/d4a2cfa647da3022ae977f50d7eda1d91f70d7d1883cf958a4b6ef689eab/librt-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1566dbb9d1eb0987264c9b9460d212e809ba908d2f4a3999383a84d765f2f3f1", size = 234654, upload-time = "2026-02-12T14:52:26.204Z" }, + { url = "https://files.pythonhosted.org/packages/6a/31/26b978861c7983b036a3aea08bdbb2ec32bbaab1ad1d57c5e022be59afc1/librt-0.8.0-cp311-cp311-win32.whl", hash = "sha256:70defb797c4d5402166787a6b3c66dfb3fa7f93d118c0509ffafa35a392f4258", size = 54603, upload-time = "2026-02-12T14:52:27.342Z" }, + { url = "https://files.pythonhosted.org/packages/d0/78/f194ed7c48dacf875677e749c5d0d1d69a9daa7c994314a39466237fb1be/librt-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:db953b675079884ffda33d1dca7189fb961b6d372153750beb81880384300817", size = 61730, upload-time = "2026-02-12T14:52:28.31Z" }, + { url = "https://files.pythonhosted.org/packages/97/ee/ad71095478d02137b6f49469dc808c595cfe89b50985f6b39c5345f0faab/librt-0.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:75d1a8cab20b2043f03f7aab730551e9e440adc034d776f15f6f8d582b0a5ad4", size = 52274, upload-time = "2026-02-12T14:52:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fb/53/f3bc0c4921adb0d4a5afa0656f2c0fbe20e18e3e0295e12985b9a5dc3f55/librt-0.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:17269dd2745dbe8e42475acb28e419ad92dfa38214224b1b01020b8cac70b645", size = 66511, upload-time = "2026-02-12T14:52:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/89/4b/4c96357432007c25a1b5e363045373a6c39481e49f6ba05234bb59a839c1/librt-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f4617cef654fca552f00ce5ffdf4f4b68770f18950e4246ce94629b789b92467", size = 68628, upload-time = "2026-02-12T14:52:31.491Z" }, + { url = "https://files.pythonhosted.org/packages/47/16/52d75374d1012e8fc709216b5eaa25f471370e2a2331b8be00f18670a6c7/librt-0.8.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5cb11061a736a9db45e3c1293cfcb1e3caf205912dfa085734ba750f2197ff9a", size = 198941, upload-time = "2026-02-12T14:52:32.489Z" }, + { url = "https://files.pythonhosted.org/packages/fc/11/d5dd89e5a2228567b1228d8602d896736247424484db086eea6b8010bcba/librt-0.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4bb00bd71b448f16749909b08a0ff16f58b079e2261c2e1000f2bbb2a4f0a45", size = 210009, upload-time = "2026-02-12T14:52:33.634Z" }, + { url = "https://files.pythonhosted.org/packages/49/d8/fc1a92a77c3020ee08ce2dc48aed4b42ab7c30fb43ce488d388673b0f164/librt-0.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95a719a049f0eefaf1952673223cf00d442952273cbd20cf2ed7ec423a0ef58d", size = 224461, upload-time = "2026-02-12T14:52:34.868Z" }, + { url = "https://files.pythonhosted.org/packages/7f/98/eb923e8b028cece924c246104aa800cf72e02d023a8ad4ca87135b05a2fe/librt-0.8.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bd32add59b58fba3439d48d6f36ac695830388e3da3e92e4fc26d2d02670d19c", size = 217538, upload-time = "2026-02-12T14:52:36.078Z" }, + { url = "https://files.pythonhosted.org/packages/fd/67/24e80ab170674a1d8ee9f9a83081dca4635519dbd0473b8321deecddb5be/librt-0.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4f764b2424cb04524ff7a486b9c391e93f93dc1bd8305b2136d25e582e99aa2f", size = 225110, upload-time = "2026-02-12T14:52:37.301Z" }, + { url = "https://files.pythonhosted.org/packages/d8/c7/6fbdcbd1a6e5243c7989c21d68ab967c153b391351174b4729e359d9977f/librt-0.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f04ca50e847abc486fa8f4107250566441e693779a5374ba211e96e238f298b9", size = 217758, upload-time = "2026-02-12T14:52:38.89Z" }, + { url = "https://files.pythonhosted.org/packages/4b/bd/4d6b36669db086e3d747434430073e14def032dd58ad97959bf7e2d06c67/librt-0.8.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9ab3a3475a55b89b87ffd7e6665838e8458e0b596c22e0177e0f961434ec474a", size = 218384, upload-time = "2026-02-12T14:52:40.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/2d/afe966beb0a8f179b132f3e95c8dd90738a23e9ebdba10f89a3f192f9366/librt-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e36a8da17134ffc29373775d88c04832f9ecfab1880470661813e6c7991ef79", size = 241187, upload-time = "2026-02-12T14:52:43.55Z" }, + { url = "https://files.pythonhosted.org/packages/02/d0/6172ea4af2b538462785ab1a68e52d5c99cfb9866a7caf00fdf388299734/librt-0.8.0-cp312-cp312-win32.whl", hash = "sha256:4eb5e06ebcc668677ed6389164f52f13f71737fc8be471101fa8b4ce77baeb0c", size = 54914, upload-time = "2026-02-12T14:52:44.676Z" }, + { url = "https://files.pythonhosted.org/packages/d4/cb/ceb6ed6175612a4337ad49fb01ef594712b934b4bc88ce8a63554832eb44/librt-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a33335eb59921e77c9acc05d0e654e4e32e45b014a4d61517897c11591094f8", size = 62020, upload-time = "2026-02-12T14:52:45.676Z" }, + { url = "https://files.pythonhosted.org/packages/f1/7e/61701acbc67da74ce06ddc7ba9483e81c70f44236b2d00f6a4bfee1aacbf/librt-0.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:24a01c13a2a9bdad20997a4443ebe6e329df063d1978bbe2ebbf637878a46d1e", size = 52443, upload-time = "2026-02-12T14:52:47.218Z" }, + { url = "https://files.pythonhosted.org/packages/6d/32/3edb0bcb4113a9c8bdcd1750663a54565d255027657a5df9d90f13ee07fa/librt-0.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7f820210e21e3a8bf8fde2ae3c3d10106d4de9ead28cbfdf6d0f0f41f5b12fa1", size = 66522, upload-time = "2026-02-12T14:52:48.219Z" }, + { url = "https://files.pythonhosted.org/packages/30/ab/e8c3d05e281f5d405ebdcc5bc8ab36df23e1a4b40ac9da8c3eb9928b72b9/librt-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4831c44b8919e75ca0dfb52052897c1ef59fdae19d3589893fbd068f1e41afbf", size = 68658, upload-time = "2026-02-12T14:52:50.351Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d3/74a206c47b7748bbc8c43942de3ed67de4c231156e148b4f9250869593df/librt-0.8.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:88c6e75540f1f10f5e0fc5e87b4b6c290f0e90d1db8c6734f670840494764af8", size = 199287, upload-time = "2026-02-12T14:52:51.938Z" }, + { url = "https://files.pythonhosted.org/packages/fa/29/ef98a9131cf12cb95771d24e4c411fda96c89dc78b09c2de4704877ebee4/librt-0.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9646178cd794704d722306c2c920c221abbf080fede3ba539d5afdec16c46dad", size = 210293, upload-time = "2026-02-12T14:52:53.128Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3e/89b4968cb08c53d4c2d8b02517081dfe4b9e07a959ec143d333d76899f6c/librt-0.8.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e1af31a710e17891d9adf0dbd9a5fcd94901a3922a96499abdbf7ce658f4e01", size = 224801, upload-time = "2026-02-12T14:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/6d/28/f38526d501f9513f8b48d78e6be4a241e15dd4b000056dc8b3f06ee9ce5d/librt-0.8.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:507e94f4bec00b2f590fbe55f48cd518a208e2474a3b90a60aa8f29136ddbada", size = 218090, upload-time = "2026-02-12T14:52:55.758Z" }, + { url = "https://files.pythonhosted.org/packages/02/ec/64e29887c5009c24dc9c397116c680caffc50286f62bd99c39e3875a2854/librt-0.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f1178e0de0c271231a660fbef9be6acdfa1d596803464706862bef6644cc1cae", size = 225483, upload-time = "2026-02-12T14:52:57.375Z" }, + { url = "https://files.pythonhosted.org/packages/ee/16/7850bdbc9f1a32d3feff2708d90c56fc0490b13f1012e438532781aa598c/librt-0.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:71fc517efc14f75c2f74b1f0a5d5eb4a8e06aa135c34d18eaf3522f4a53cd62d", size = 218226, upload-time = "2026-02-12T14:52:58.534Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4a/166bffc992d65ddefa7c47052010a87c059b44a458ebaf8f5eba384b0533/librt-0.8.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:0583aef7e9a720dd40f26a2ad5a1bf2ccbb90059dac2b32ac516df232c701db3", size = 218755, upload-time = "2026-02-12T14:52:59.701Z" }, + { url = "https://files.pythonhosted.org/packages/da/5d/9aeee038bcc72a9cfaaee934463fe9280a73c5440d36bd3175069d2cb97b/librt-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5d0f76fc73480d42285c609c0ea74d79856c160fa828ff9aceab574ea4ecfd7b", size = 241617, upload-time = "2026-02-12T14:53:00.966Z" }, + { url = "https://files.pythonhosted.org/packages/64/ff/2bec6b0296b9d0402aa6ec8540aa19ebcb875d669c37800cb43d10d9c3a3/librt-0.8.0-cp313-cp313-win32.whl", hash = "sha256:e79dbc8f57de360f0ed987dc7de7be814b4803ef0e8fc6d3ff86e16798c99935", size = 54966, upload-time = "2026-02-12T14:53:02.042Z" }, + { url = "https://files.pythonhosted.org/packages/08/8d/bf44633b0182996b2c7ea69a03a5c529683fa1f6b8e45c03fe874ff40d56/librt-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:25b3e667cbfc9000c4740b282df599ebd91dbdcc1aa6785050e4c1d6be5329ab", size = 62000, upload-time = "2026-02-12T14:53:03.822Z" }, + { url = "https://files.pythonhosted.org/packages/5c/fd/c6472b8e0eac0925001f75e366cf5500bcb975357a65ef1f6b5749389d3a/librt-0.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:e9a3a38eb4134ad33122a6d575e6324831f930a771d951a15ce232e0237412c2", size = 52496, upload-time = "2026-02-12T14:53:04.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/13/79ebfe30cd273d7c0ce37a5f14dc489c5fb8b722a008983db2cfd57270bb/librt-0.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:421765e8c6b18e64d21c8ead315708a56fc24f44075059702e421d164575fdda", size = 66078, upload-time = "2026-02-12T14:53:06.085Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8f/d11eca40b62a8d5e759239a80636386ef88adecb10d1a050b38cc0da9f9e/librt-0.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:48f84830a8f8ad7918afd743fd7c4eb558728bceab7b0e38fd5a5cf78206a556", size = 68309, upload-time = "2026-02-12T14:53:07.121Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b4/f12ee70a3596db40ff3c88ec9eaa4e323f3b92f77505b4d900746706ec6a/librt-0.8.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9f09d4884f882baa39a7e36bbf3eae124c4ca2a223efb91e567381d1c55c6b06", size = 196804, upload-time = "2026-02-12T14:53:08.164Z" }, + { url = "https://files.pythonhosted.org/packages/8b/7e/70dbbdc0271fd626abe1671ad117bcd61a9a88cdc6a10ccfbfc703db1873/librt-0.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:693697133c3b32aa9b27f040e3691be210e9ac4d905061859a9ed519b1d5a376", size = 206915, upload-time = "2026-02-12T14:53:09.333Z" }, + { url = "https://files.pythonhosted.org/packages/79/13/6b9e05a635d4327608d06b3c1702166e3b3e78315846373446cf90d7b0bf/librt-0.8.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5512aae4648152abaf4d48b59890503fcbe86e85abc12fb9b096fe948bdd816", size = 221200, upload-time = "2026-02-12T14:53:10.68Z" }, + { url = "https://files.pythonhosted.org/packages/35/6c/e19a3ac53e9414de43a73d7507d2d766cd22d8ca763d29a4e072d628db42/librt-0.8.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:995d24caa6bbb34bcdd4a41df98ac6d1af637cfa8975cb0790e47d6623e70e3e", size = 214640, upload-time = "2026-02-12T14:53:12.342Z" }, + { url = "https://files.pythonhosted.org/packages/30/f0/23a78464788619e8c70f090cfd099cce4973eed142c4dccb99fc322283fd/librt-0.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b9aef96d7593584e31ef6ac1eb9775355b0099fee7651fae3a15bc8657b67b52", size = 221980, upload-time = "2026-02-12T14:53:13.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/32/38e21420c5d7aa8a8bd2c7a7d5252ab174a5a8aaec8b5551968979b747bf/librt-0.8.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:4f6e975377fbc4c9567cb33ea9ab826031b6c7ec0515bfae66a4fb110d40d6da", size = 215146, upload-time = "2026-02-12T14:53:14.8Z" }, + { url = "https://files.pythonhosted.org/packages/bb/00/bd9ecf38b1824c25240b3ad982fb62c80f0a969e6679091ba2b3afb2b510/librt-0.8.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:daae5e955764be8fd70a93e9e5133c75297f8bce1e802e1d3683b98f77e1c5ab", size = 215203, upload-time = "2026-02-12T14:53:16.087Z" }, + { url = "https://files.pythonhosted.org/packages/b9/60/7559bcc5279d37810b98d4a52616febd7b8eef04391714fd6bdf629598b1/librt-0.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7bd68cebf3131bb920d5984f75fe302d758db33264e44b45ad139385662d7bc3", size = 237937, upload-time = "2026-02-12T14:53:17.236Z" }, + { url = "https://files.pythonhosted.org/packages/41/cc/be3e7da88f1abbe2642672af1dc00a0bccece11ca60241b1883f3018d8d5/librt-0.8.0-cp314-cp314-win32.whl", hash = "sha256:1e6811cac1dcb27ca4c74e0ca4a5917a8e06db0d8408d30daee3a41724bfde7a", size = 50685, upload-time = "2026-02-12T14:53:18.888Z" }, + { url = "https://files.pythonhosted.org/packages/38/27/e381d0df182a8f61ef1f6025d8b138b3318cc9d18ad4d5f47c3bf7492523/librt-0.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:178707cda89d910c3b28bf5aa5f69d3d4734e0f6ae102f753ad79edef83a83c7", size = 57872, upload-time = "2026-02-12T14:53:19.942Z" }, + { url = "https://files.pythonhosted.org/packages/c5/0c/ca9dfdf00554a44dea7d555001248269a4bab569e1590a91391feb863fa4/librt-0.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:3e8b77b5f54d0937b26512774916041756c9eb3e66f1031971e626eea49d0bf4", size = 48056, upload-time = "2026-02-12T14:53:21.473Z" }, + { url = "https://files.pythonhosted.org/packages/f2/ed/6cc9c4ad24f90c8e782193c7b4a857408fd49540800613d1356c63567d7b/librt-0.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:789911e8fa40a2e82f41120c936b1965f3213c67f5a483fc5a41f5839a05dcbb", size = 68307, upload-time = "2026-02-12T14:53:22.498Z" }, + { url = "https://files.pythonhosted.org/packages/84/d8/0e94292c6b3e00b6eeea39dd44d5703d1ec29b6dafce7eea19dc8f1aedbd/librt-0.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2b37437e7e4ef5e15a297b36ba9e577f73e29564131d86dd75875705e97402b5", size = 70999, upload-time = "2026-02-12T14:53:23.603Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f4/6be1afcbdeedbdbbf54a7c9d73ad43e1bf36897cebf3978308cd64922e02/librt-0.8.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:671a6152edf3b924d98a5ed5e6982ec9cb30894085482acadce0975f031d4c5c", size = 220782, upload-time = "2026-02-12T14:53:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f0/8d/f306e8caa93cfaf5c6c9e0d940908d75dc6af4fd856baa5535c922ee02b1/librt-0.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8992ca186a1678107b0af3d0c9303d8c7305981b9914989b9788319ed4d89546", size = 235420, upload-time = "2026-02-12T14:53:27.047Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f2/65d86bd462e9c351326564ca805e8457442149f348496e25ccd94583ffa2/librt-0.8.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:001e5330093d887b8b9165823eca6c5c4db183fe4edea4fdc0680bbac5f46944", size = 246452, upload-time = "2026-02-12T14:53:28.341Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/39c88b503b4cb3fcbdeb3caa29672b6b44ebee8dcc8a54d49839ac280f3f/librt-0.8.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d920789eca7ef71df7f31fd547ec0d3002e04d77f30ba6881e08a630e7b2c30e", size = 238891, upload-time = "2026-02-12T14:53:29.625Z" }, + { url = "https://files.pythonhosted.org/packages/e3/c6/6c0d68190893d01b71b9569b07a1c811e280c0065a791249921c83dc0290/librt-0.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:82fb4602d1b3e303a58bfe6165992b5a78d823ec646445356c332cd5f5bbaa61", size = 250249, upload-time = "2026-02-12T14:53:30.93Z" }, + { url = "https://files.pythonhosted.org/packages/52/7a/f715ed9e039035d0ea637579c3c0155ab3709a7046bc408c0fb05d337121/librt-0.8.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:4d3e38797eb482485b486898f89415a6ab163bc291476bd95712e42cf4383c05", size = 240642, upload-time = "2026-02-12T14:53:32.174Z" }, + { url = "https://files.pythonhosted.org/packages/c2/3c/609000a333debf5992efe087edc6467c1fdbdddca5b610355569bbea9589/librt-0.8.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a905091a13e0884701226860836d0386b88c72ce5c2fdfba6618e14c72be9f25", size = 239621, upload-time = "2026-02-12T14:53:33.39Z" }, + { url = "https://files.pythonhosted.org/packages/b9/df/87b0673d5c395a8f34f38569c116c93142d4dc7e04af2510620772d6bd4f/librt-0.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:375eda7acfce1f15f5ed56cfc960669eefa1ec8732e3e9087c3c4c3f2066759c", size = 262986, upload-time = "2026-02-12T14:53:34.617Z" }, + { url = "https://files.pythonhosted.org/packages/09/7f/6bbbe9dcda649684773aaea78b87fff4d7e59550fbc2877faa83612087a3/librt-0.8.0-cp314-cp314t-win32.whl", hash = "sha256:2ccdd20d9a72c562ffb73098ac411de351b53a6fbb3390903b2d33078ef90447", size = 51328, upload-time = "2026-02-12T14:53:36.15Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/e1981ab6fa9b41be0396648b5850267888a752d025313a9e929c4856208e/librt-0.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:25e82d920d4d62ad741592fcf8d0f3bda0e3fc388a184cb7d2f566c681c5f7b9", size = 58719, upload-time = "2026-02-12T14:53:37.183Z" }, + { url = "https://files.pythonhosted.org/packages/94/d1/433b3c06e78f23486fe4fdd19bc134657eb30997d2054b0dbf52bbf3382e/librt-0.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:92249938ab744a5890580d3cb2b22042f0dce71cdaa7c1369823df62bedf7cbc", size = 48753, upload-time = "2026-02-12T14:53:38.539Z" }, ] [[package]] @@ -3853,6 +4463,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/6aa79ba3570bddd1bf7e951c6123f806751e58e8cce736bad77b2cf348d7/logistro-2.0.1-py3-none-any.whl", hash = "sha256:06ffa127b9fb4ac8b1972ae6b2a9d7fde57598bf5939cd708f43ec5bba2d31eb", size = 8555, upload-time = "2025-11-01T02:41:17.587Z" }, ] +[[package]] +name = "lsprotocol" +version = "2025.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "cattrs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/26/67b84e6ec1402f0e6764ef3d2a0aaf9a79522cc1d37738f4e5bb0b21521a/lsprotocol-2025.0.0.tar.gz", hash = "sha256:e879da2b9301e82cfc3e60d805630487ac2f7ab17492f4f5ba5aaba94fe56c29", size = 74896, upload-time = "2025-06-17T21:30:18.156Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/f0/92f2d609d6642b5f30cb50a885d2bf1483301c69d5786286500d15651ef2/lsprotocol-2025.0.0-py3-none-any.whl", hash = "sha256:f9d78f25221f2a60eaa4a96d3b4ffae011b107537facee61d3da3313880995c7", size = 76250, upload-time = "2025-06-17T21:30:19.455Z" }, +] + [[package]] name = "lxml" version = "6.0.2" @@ -4044,11 +4667,11 @@ wheels = [ [[package]] name = "markdown" -version = "3.10" +version = "3.10.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/7dd27d9d863b3376fcf23a5a13cb5d024aed1db46f963f1b5735ae43b3be/markdown-3.10.tar.gz", hash = "sha256:37062d4f2aa4b2b6b32aefb80faa300f82cc790cb949a35b8caede34f2b68c0e", size = 364931, upload-time = "2025-11-03T19:51:15.007Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2b/f4/69fa6ed85ae003c2378ffa8f6d2e3234662abd02c10d216c0ba96081a238/markdown-3.10.2.tar.gz", hash = "sha256:994d51325d25ad8aa7ce4ebaec003febcce822c3f8c911e3b17c52f7f589f950", size = 368805, upload-time = "2026-02-09T14:57:26.942Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/81/54e3ce63502cd085a0c556652a4e1b919c45a446bd1e5300e10c44c8c521/markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c", size = 107678, upload-time = "2025-11-03T19:51:13.887Z" }, + { url = "https://files.pythonhosted.org/packages/de/1f/77fa3081e4f66ca3576c896ae5d31c3002ac6607f9747d2e3aa49227e464/markdown-3.10.2-py3-none-any.whl", hash = "sha256:e91464b71ae3ee7afd3017d9f358ef0baf158fd9a298db92f1d4761133824c36", size = 108180, upload-time = "2026-02-09T14:57:25.787Z" }, ] [[package]] @@ -4156,6 +4779,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, ] +[[package]] +name = "marshmallow" +version = "3.26.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/79/de6c16cc902f4fc372236926b0ce2ab7845268dcc30fb2fbb7f71b418631/marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57", size = 222095, upload-time = "2025-12-22T06:53:53.309Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/2f/5108cb3ee4ba6501748c4908b908e55f42a5b66245b4cfe0c99326e1ef6e/marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73", size = 50964, upload-time = "2025-12-22T06:53:51.801Z" }, +] + [[package]] name = "matplotlib" version = "3.10.8" @@ -4243,9 +4878,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" }, ] +[[package]] +name = "mccabe" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, +] + [[package]] name = "mcp" -version = "1.25.0" +version = "1.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -4263,9 +4907,9 @@ dependencies = [ { name = "typing-inspection" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" }, ] [[package]] @@ -4300,19 +4944,19 @@ wheels = [ [[package]] name = "mediapy" -version = "1.2.5" +version = "1.2.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "ipython", version = "9.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "ipython", version = "9.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "matplotlib" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pillow" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a5/65/1f69540942fcae6df1df14fcd87100827f079cf94e7d6dd4bc89c4602e1c/mediapy-1.2.5.tar.gz", hash = "sha256:2c7a4c51704b26642737190a6e5fbea8260b027fde9d99c644a670255aae860f", size = 28076, upload-time = "2025-12-17T17:37:03.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/eb/8a0499fb1a2f373f97e2b4df91797507c3971c42c59f1610bed090c57ddc/mediapy-1.2.6.tar.gz", hash = "sha256:2c866cfa0a170213f771b1dd5584a2e82d8d0dc0fa94982f83e29aae27e49c83", size = 28143, upload-time = "2026-02-03T10:29:31.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/eb/acb955d5d24a70bbdfc365475364d54acd10feedab0121ebe04f312ba828/mediapy-1.2.5-py3-none-any.whl", hash = "sha256:517a7c7411742a68790671a12977d5d54a5a5f85d8b77720031fe15d5a278893", size = 27431, upload-time = "2025-12-17T17:37:01.947Z" }, + { url = "https://files.pythonhosted.org/packages/37/8c/52f0299f1675cdfa1ab39a6028a2e5adf9032ae1118c9895c84b08af162b/mediapy-1.2.6-py3-none-any.whl", hash = "sha256:0a0ea00eb0da83c3c54d588b49c49a41ba456174aa33e530ffe13e17269c9072", size = 27494, upload-time = "2026-02-03T10:29:30.245Z" }, ] [[package]] @@ -4374,45 +5018,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ad/3f/3d42e9a78fe5edf792a83c074b13b9b770092a4fbf3462872f4303135f09/ml_dtypes-0.5.4-cp314-cp314t-win_arm64.whl", hash = "sha256:11942cbf2cf92157db91e5022633c0d9474d4dfd813a909383bd23ce828a4b7d", size = 168825, upload-time = "2025-11-17T22:32:23.766Z" }, ] -[[package]] -name = "mmcv" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "addict" }, - { name = "mmengine" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "opencv-python" }, - { name = "packaging" }, - { name = "pillow" }, - { name = "pyyaml" }, - { name = "regex", marker = "sys_platform == 'win32'" }, - { name = "yapf" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/a2/57a733e7e84985a8a0e3101dfb8170fc9db92435c16afad253069ae3f9df/mmcv-2.2.0.tar.gz", hash = "sha256:ac479247e808d8802f89eadf04d4118de86bdfe81361ec5aed0cc1bf731c67c9", size = 479121, upload-time = "2024-04-24T14:24:28.064Z" } - -[[package]] -name = "mmengine" -version = "0.10.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "addict" }, - { name = "matplotlib" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "opencv-python" }, - { name = "pyyaml" }, - { name = "regex", marker = "sys_platform == 'win32'" }, - { name = "rich" }, - { name = "termcolor" }, - { name = "yapf" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/17/14/959360bbd8374e23fc1b720906999add16a3ac071a501636db12c5861ff5/mmengine-0.10.7.tar.gz", hash = "sha256:d20ffcc31127567e53dceff132612a87f0081de06cbb7ab2bdb7439125a69225", size = 378090, upload-time = "2025-03-04T12:23:09.568Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/98/8e/f98332248aad102511bea4ae19c0ddacd2f0a994f3ca4c82b7a369e0af8b/mmengine-0.10.7-py3-none-any.whl", hash = "sha256:262ac976a925562f78cd5fd14dd1bc9b680ed0aa81f0d85b723ef782f99c54ee", size = 452720, upload-time = "2025-03-04T12:23:06.339Z" }, -] - [[package]] name = "mmh3" version = "5.2.0" @@ -4546,6 +5151,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, ] +[[package]] +name = "mosek" +version = "11.0.24" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", + "python_full_version == '3.12.*' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "python_full_version < '3.11' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' and sys_platform == 'darwin'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' and sys_platform == 'darwin'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/e7/d04ea5c587fd8b491fbe9377fafa5feb063bb28a3a6949fb393a62230d9d/mosek-11.0.24-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7f2ab70ad3357f9187c96237d0c49187f82f5885250a5e211b6aa20cb0a7207f", size = 8345311, upload-time = "2025-06-25T10:51:51.777Z" }, +] + +[[package]] +name = "mosek" +version = "11.1.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version < '3.11' and sys_platform == 'win32'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", +] +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/e9/253e759e6e00b9cfbb4e95e7fe079b0e971b3c81c75f059bf2c2be3216e9/mosek-11.1.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:5c3566d2a603d94a1773bcd27097c8390dba1d9a1543534f3527deb56f1d0a55", size = 15359313, upload-time = "2026-01-07T08:22:00.805Z" }, + { url = "https://files.pythonhosted.org/packages/41/ea/17bb932e0d307c31de685ba817a3cba822e2757a9810e7cc516778c2baa3/mosek-11.1.2-cp39-abi3-manylinux_2_27_aarch64.whl", hash = "sha256:67c13d56a9b7adf2670e4ed6fb62aa92560ae2ff1050f6e756d0d3f82c42c19f", size = 11073007, upload-time = "2026-01-07T08:22:03.118Z" }, + { url = "https://files.pythonhosted.org/packages/f2/67/6f2b6e544cf5e284c7f0baebffbc82b55e7db5b7ed5d711b621fa965d4df/mosek-11.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:ad81cfd53af508db89241c7869ddce7ceaae13ef057f7b98007d57dccbb63c92", size = 11191977, upload-time = "2026-01-07T08:22:05.845Z" }, +] + [[package]] name = "mpmath" version = "1.3.0" @@ -4618,7 +5268,7 @@ wheels = [ [[package]] name = "mujoco" -version = "3.4.0" +version = "3.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "absl-py" }, @@ -4628,49 +5278,49 @@ dependencies = [ { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pyopengl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/75/3b/f688fbe34eb609ffdc9dc0f53f7acd3327588f970752780d05a0762d3511/mujoco-3.4.0.tar.gz", hash = "sha256:5a6dc6b7db41eb0ab8724cd477bd0316ba4b53debfc2d80a2d6f444a116fb8d2", size = 826806, upload-time = "2025-12-05T23:13:46.833Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/a0/ff8c20b923675ee803580bb8a33a2781e48c007a2845607f15184cf7fc32/mujoco-3.4.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:b7ae8a534ecf6afab3abab3dc0718ea47f89a2e2f096905870cbf5faf23076c3", size = 6905902, upload-time = "2025-12-05T23:12:59.76Z" }, - { url = "https://files.pythonhosted.org/packages/03/dd/2875a57cdb423d98bdcb359f34af5eb8a24e48a903f4bb7110d75e478dac/mujoco-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7960cf47b4ed274955280200a812e6d780f03707d5258b42c3afb249051216ee", size = 6861873, upload-time = "2025-12-05T23:13:02.42Z" }, - { url = "https://files.pythonhosted.org/packages/67/05/736c180caf0b051ec5ba26ab024e609fb18545b212dd0ee96ec84458f184/mujoco-3.4.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0918ff57a92ba00a95538bf6f0c67973044953bb090b9fd811f9ee6cda4ffcb8", size = 6487647, upload-time = "2025-12-05T23:13:05.097Z" }, - { url = "https://files.pythonhosted.org/packages/71/95/ba02262c7a7a786a64b8d77315e7e4d3c77598ff63d8cd605ba2b96ec349/mujoco-3.4.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b2b8a75ea191ae577bfa2385ca6ecd6328f37f6f46bc3cfb41835b2653f716a", size = 6911042, upload-time = "2025-12-05T23:13:07.727Z" }, - { url = "https://files.pythonhosted.org/packages/22/d5/f94edc884c11b63f0d7ba9322ec4ca98bc7cad57c5c034c2e3332287d9ad/mujoco-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffe79a1476767806318b7dfbbb642e428b873385fb2d2f06e69d461459d01ed1", size = 5399174, upload-time = "2025-12-05T23:13:10.271Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a1/5f92234b0e2f2b8c5b392fd71be3cfb5363bdccded4cac0b5889d07da6cb/mujoco-3.4.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:b456e1d6c3ca7010480d52f7645e49f4b564952063c5a52af1524e226ea72920", size = 6919759, upload-time = "2025-12-05T23:13:12.444Z" }, - { url = "https://files.pythonhosted.org/packages/09/d4/99acbb782cc2c2ab5e5dacdf48b5d2850b641e34a91715d262d14103b764/mujoco-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98d3ab7b02d99ac2866bb111807797549853d6ddf485ddc072cec3c1d33dfad8", size = 6874776, upload-time = "2025-12-05T23:13:14.552Z" }, - { url = "https://files.pythonhosted.org/packages/57/92/1e10be7922508a307017e2a62852555ab4f61148d9791a7bcdf03d902a9a/mujoco-3.4.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00088b3879cff675b0bad2ddaf27f1088d7d35f020903c25088c43360adcc311", size = 6501363, upload-time = "2025-12-05T23:13:17.313Z" }, - { url = "https://files.pythonhosted.org/packages/c3/c1/37c07061be2b410de33624093fc06fc49ec13888252f1a6c30fcd40633bd/mujoco-3.4.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b3dce70abdd0ac6475bb8040d7d97a4e3ccf5f6a00cc06086b7a169e246522", size = 6925780, upload-time = "2025-12-05T23:13:19.426Z" }, - { url = "https://files.pythonhosted.org/packages/04/0a/c1055f2329761c87edcdc18a480ab43ba942ed10f156888b4744d69c1deb/mujoco-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0ab4a450826ca04db11608325b37adf77dee4cde2a9dd9d43e6fa46c44545a9", size = 5422000, upload-time = "2025-12-05T23:13:21.679Z" }, - { url = "https://files.pythonhosted.org/packages/7c/c3/858d2e6fd3bf986a64bb5f0f157b601c2b9604d2b43bbc0469fe1b44d61b/mujoco-3.4.0-cp312-cp312-macosx_10_16_x86_64.whl", hash = "sha256:96bccc995fc561078b5cac1e53f8ba2ba8619348bc0c6cff15bbf6f9a441d220", size = 6922335, upload-time = "2025-12-05T23:13:23.759Z" }, - { url = "https://files.pythonhosted.org/packages/ed/fe/10dbaef500af18866a41d842f2f95cf113aacf0f1eb91677c4817cff3495/mujoco-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:339800c695166c8041cdef95ea5384fc607ad1b86c19528c785a17ed742c3a5e", size = 6793849, upload-time = "2025-12-05T23:13:25.714Z" }, - { url = "https://files.pythonhosted.org/packages/f8/6b/522696a7413f33596b8d18bca52dee2f7c0ecf23a5f08097f346e5a5b656/mujoco-3.4.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1efdab2b146cdbcef4560606b6cbc74abe80f2b94a8593a5cdc469172085a2b3", size = 6520021, upload-time = "2025-12-05T23:13:28.355Z" }, - { url = "https://files.pythonhosted.org/packages/c6/10/fa6b8762efbb02bd349503a39fb9dbbcf9e12041b0c5b29d484cafc09355/mujoco-3.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e5a36e61495be2a855f61194813e1277ab4b330cc180e50c8e3c7a459dc40b6", size = 6995663, upload-time = "2025-12-05T23:13:30.436Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ec/5336e8e86e429a7f301dac63cae6ae75ff9e91bfb01502a55f60d7305eca/mujoco-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:10314213c395aedeaf2778596e78dd9ae01d74dc92d4f75c696707781d59826a", size = 5497926, upload-time = "2025-12-05T23:13:32.382Z" }, - { url = "https://files.pythonhosted.org/packages/b6/ba/9fa63c63728d9ac0982b77af650229f14b1aa53e3331dba7ef6829fdda56/mujoco-3.4.0-cp313-cp313-macosx_10_16_x86_64.whl", hash = "sha256:345fb5adb4e9c1c108aab2ff8418280edf61cec5b705c483eae680c4cf350898", size = 6922502, upload-time = "2025-12-05T23:13:34.941Z" }, - { url = "https://files.pythonhosted.org/packages/e8/51/c3e5c3b199b1b74c85f0cb02dc0ef80363bf91ea245ee8a932804768d5e8/mujoco-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87a68d063d06d261e83755093e79371901b6a8171b0b8b88dcb020f966d4e463", size = 6794329, upload-time = "2025-12-05T23:13:37.557Z" }, - { url = "https://files.pythonhosted.org/packages/b6/2f/b2f531ae6e8fbbf095dfbb614b7d1130d3d6791920a9b075861a13f5a97b/mujoco-3.4.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:06cd3366b9548b251c3170c9b073e41a9f4a621b4f7e59ceb5fee8c46f6165ca", size = 6520414, upload-time = "2025-12-05T23:13:39.778Z" }, - { url = "https://files.pythonhosted.org/packages/df/72/0c47350ec39611ff8defe9e8af10c23c9ad0235974f1999a523fcd1c3e68/mujoco-3.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7edaffb223cf1343ed980030c466170eb8f9d624cf69c9a99925cbce371f22db", size = 6996132, upload-time = "2025-12-05T23:13:41.996Z" }, - { url = "https://files.pythonhosted.org/packages/dc/25/bbf8c01758d619c86bcbe58db6a7d61ca423e7c76791f323b8ea2e92c2bd/mujoco-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:c01a842a17c0229dce2fb65a051a8e6ee5f5307c50c825c850e3702dda4344e6", size = 5497055, upload-time = "2025-12-05T23:13:44.795Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/56/0d/005f0d49ad5878f0611a7c018550b8504d480a7a17ad7e6773ff47d8627a/mujoco-3.5.0.tar.gz", hash = "sha256:5c85a6fc7560ab5fa4534f35ff459e12dc3609681f307e457dbb49b6217f4d73", size = 912543, upload-time = "2026-02-13T01:02:51.554Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/20/9e0595e653543df3e4233bc3ad7e50b371b81dbe48d45ffbc867ed7c379d/mujoco-3.5.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:c4324161cb4f334dd984fbb4a4f7d7db9f914f40d06174b02dcf05463d8275e4", size = 7088320, upload-time = "2026-02-13T01:02:06.745Z" }, + { url = "https://files.pythonhosted.org/packages/8d/6b/fdac8ed97086e12ac930fb44e419eda1626e339010df73678cb1f22527d7/mujoco-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f3803ff0dd7bc04d6c47d53a794343843bde06f0aeefeac28bb62b4cf2baab3", size = 7093261, upload-time = "2026-02-13T01:02:09.857Z" }, + { url = "https://files.pythonhosted.org/packages/19/ce/abcd9cc6ee7802f97c729ae0ccd517c68f04882f5db755b178e199511dc2/mujoco-3.5.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e13560991c779a139b53151733a0a6f3420ef09459b32d90302c2661c1b20992", size = 6637850, upload-time = "2026-02-13T01:02:11.808Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d6/a5a7b615b257867b7c97db6b3ce07dec9351d5d9d5a5aca881cbb583d7a3/mujoco-3.5.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01b12896ae906f157e18d8b1b7c24a8b72d2576fffa09869047150f186e92b33", size = 7079429, upload-time = "2026-02-13T01:02:13.738Z" }, + { url = "https://files.pythonhosted.org/packages/7e/91/d82dd3c16892e1b0e27a2f537eec8aad54d91d939cb3cd37db2e8c09ecc2/mujoco-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:2328358d2f0031175897092560dd6d04b14bab1cc22caa145ce99b843c17daa2", size = 5624454, upload-time = "2026-02-13T01:02:15.714Z" }, + { url = "https://files.pythonhosted.org/packages/8b/47/e923589301c197c3ea0776b60cc0d57383b3cc51639ca75e4e4b6c5334d6/mujoco-3.5.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:6b3ae97c3f84d093e84dc445a093c893d9f4b6f6bbb1a441e56d77074c450553", size = 7100854, upload-time = "2026-02-13T01:02:17.649Z" }, + { url = "https://files.pythonhosted.org/packages/82/02/aa6057ac4c50fb36558208005d6da19518f9a7857ef9b5fd2ed8f9262fe2/mujoco-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fbb00809de98e8a65f2002745c5bca39076f8118b0fe08e973e7a99603c92b", size = 7105779, upload-time = "2026-02-13T01:02:19.621Z" }, + { url = "https://files.pythonhosted.org/packages/94/8a/8d87db2cf09a95ff4dcac1bd8eb6ccb95680804eff8f2f70f1d7a11e1980/mujoco-3.5.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a8d48990172d3b1eb51f20cd08f537c488686b2bc370c504333c07c04595f5d", size = 6651006, upload-time = "2026-02-13T01:02:22.197Z" }, + { url = "https://files.pythonhosted.org/packages/47/14/d5bf98385354318ec2e6c466a8c7cf7fd76f8b711ed6d11d155e2baa81fb/mujoco-3.5.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba54826121c6857fc4ca82df642d9a89174ce5537677c6ead34844bb692437e3", size = 7094833, upload-time = "2026-02-13T01:02:24.517Z" }, + { url = "https://files.pythonhosted.org/packages/b8/98/c1fac334cc764068e6c5d7eb01d6ed2a3392bab51952c816888b2dfe78c2/mujoco-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ec0e35678773b34ee8b15741c34a745e027db062efcae790315aa83a5581c505", size = 5649612, upload-time = "2026-02-13T01:02:26.45Z" }, + { url = "https://files.pythonhosted.org/packages/f9/f0/4772421643f1c5aaf46d9e500a8716f59b02c8bf30bfa92cb8a763159efb/mujoco-3.5.0-cp312-cp312-macosx_10_16_x86_64.whl", hash = "sha256:ec0587cc423385a8d45343a981df58511cb69758ba99164a71567af2d41be3c9", size = 7100581, upload-time = "2026-02-13T01:02:29.182Z" }, + { url = "https://files.pythonhosted.org/packages/e1/d4/d0032323f58a9b8080b8464c6aade8d5ac2e101dbed1de64a38b3913b446/mujoco-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:94cf4285b46bc2d74fbe86e39a93ecfb3b0e584477fff7e38d293d47b88576e7", size = 7046132, upload-time = "2026-02-13T01:02:31.606Z" }, + { url = "https://files.pythonhosted.org/packages/b8/7b/c1612ec68d98e5f3dbc5b8a21ff5d40ab52409fcc89ea7afc8a197983297/mujoco-3.5.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:12bfb2bb70f760e0d51fd59f3c43b2906c7660a23954fd717321da52ba85a617", size = 6677917, upload-time = "2026-02-13T01:02:34.13Z" }, + { url = "https://files.pythonhosted.org/packages/c8/8a/229e4db3692be55532e155e2ca6a1363752243ee79df0e7e22ba00f716cf/mujoco-3.5.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:66fe37276644c28fab497929c55580725de81afc6d511a40cc27525a8dd99efa", size = 7170882, upload-time = "2026-02-13T01:02:36.086Z" }, + { url = "https://files.pythonhosted.org/packages/02/37/527d83610b878f27c01dd762e0e41aaa62f095c607f0500ac7f724a2c7a5/mujoco-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:4b3a62af174ab59b9b6d816dca0786b7fd85ac081d6c2a931a2b22dd6e821f50", size = 5721886, upload-time = "2026-02-13T01:02:39.544Z" }, + { url = "https://files.pythonhosted.org/packages/87/2a/371033684e4ddcda47c97661fb6e9617c0e5e3749af082a9b4d5d1bf9f27/mujoco-3.5.0-cp313-cp313-macosx_10_16_x86_64.whl", hash = "sha256:74b05ec4a6a3d728b2da6944d2ae17cac4af9b7a9293f2c2e9e7332fa7535714", size = 7100778, upload-time = "2026-02-13T01:02:41.456Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c9/26bd4979d503d03f7a6ded851c3094a5708cb534cf0dc80b4db6672da2b0/mujoco-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82416804ae96c69ed779330bd4f4af0a43632e2bbbcc60e5b193642db48e84ca", size = 7046419, upload-time = "2026-02-13T01:02:43.397Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/34b49e5cfcc6a25ad8af669e170c00b77cfaae99fca12c6586ed4e6cedb7/mujoco-3.5.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b591ed76e800713cd485dd38ec681b3065bde253b25350cfbe708e43a8a7bda", size = 6678488, upload-time = "2026-02-13T01:02:45.702Z" }, + { url = "https://files.pythonhosted.org/packages/16/47/93c7ac3a9630b49c55d76b0d02aa565543e2f62cecd885f8f574f5c745e7/mujoco-3.5.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a956520adb275ce8e878da29e2586eac3affc7b7ac772065ef01f2380a9e8784", size = 7171277, upload-time = "2026-02-13T01:02:47.59Z" }, + { url = "https://files.pythonhosted.org/packages/ab/53/54a0815d43c83e1074cfc7da98a3dea88d7dda48c03edfd225a387a3767b/mujoco-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:646b26f545cfdd60ae65ee90d44f63f50fc7ea5b8242777964ef0148830e72df", size = 5721537, upload-time = "2026-02-13T01:02:49.636Z" }, ] [[package]] name = "mujoco-mjx" -version = "3.4.0" +version = "3.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "absl-py" }, { name = "etils", extra = ["epath"] }, { name = "jax", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jax", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jaxlib", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jaxlib", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxlib", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "mujoco" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "trimesh" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/57/2cebcde17bdad9c575c71a301ea1524eb9dba76a974e24f07abf714050be/mujoco_mjx-3.4.0.tar.gz", hash = "sha256:10fa51a92c22affd27c9205c5fb965c14c256729ab58fd2021dc9e4df9bedec6", size = 6872370, upload-time = "2025-12-05T23:14:13.734Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/3c/fc471adb5c83bb657c3634cf37c8c5cb5bb37c204d02192a4ee215132d1e/mujoco_mjx-3.5.0.tar.gz", hash = "sha256:42bdf3e80c0c4dfcfc78af97034f836d5292742e450a43a0dd9d44ada1e4bdc0", size = 6907429, upload-time = "2026-02-13T01:04:23.208Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/2f/8c2f734a0c4762416895ae3a7a9f46b10a3a8f3d72c0457d72b31d329c34/mujoco_mjx-3.4.0-py3-none-any.whl", hash = "sha256:b64e0e33e367027e912893701cce905efd1397e2234bca171c73990b9f088171", size = 6953534, upload-time = "2025-12-05T23:14:11.191Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ec/ba408121d07200f4d588ae83033a99dcd197bba47e35e50165d260f2ef6c/mujoco_mjx-3.5.0-py3-none-any.whl", hash = "sha256:633aa801f84fa2becc17ea124d95ad3e34f59fdfaa3720b7ec18b427f3c5bf46", size = 6992318, upload-time = "2026-02-13T01:04:21.21Z" }, ] [[package]] @@ -4730,11 +5380,11 @@ wheels = [ [[package]] name = "narwhals" -version = "2.15.0" +version = "2.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/47/6d/b57c64e5038a8cf071bce391bb11551657a74558877ac961e7fa905ece27/narwhals-2.15.0.tar.gz", hash = "sha256:a9585975b99d95084268445a1fdd881311fa26ef1caa18020d959d5b2ff9a965", size = 603479, upload-time = "2026-01-06T08:10:13.27Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/6f/713be67779028d482c6e0f2dde5bc430021b2578a4808c1c9f6d7ad48257/narwhals-2.16.0.tar.gz", hash = "sha256:155bb45132b370941ba0396d123cf9ed192bf25f39c4cea726f2da422ca4e145", size = 618268, upload-time = "2026-02-02T10:31:00.545Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/2e/cf2ffeb386ac3763526151163ad7da9f1b586aac96d2b4f7de1eaebf0c61/narwhals-2.15.0-py3-none-any.whl", hash = "sha256:cbfe21ca19d260d9fd67f995ec75c44592d1f106933b03ddd375df7ac841f9d6", size = 432856, upload-time = "2026-01-06T08:10:11.511Z" }, + { url = "https://files.pythonhosted.org/packages/03/cc/7cb74758e6df95e0c4e1253f203b6dd7f348bf2f29cf89e9210a2416d535/narwhals-2.16.0-py3-none-any.whl", hash = "sha256:846f1fd7093ac69d63526e50732033e86c30ea0026a44d9b23991010c7d1485d", size = 443951, upload-time = "2026-02-02T10:30:58.635Z" }, ] [[package]] @@ -4781,12 +5431,16 @@ name = "networkx" version = "3.6.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -4914,12 +5568,16 @@ name = "numpy" version = "2.3.5" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -5009,19 +5667,15 @@ name = "nvidia-cublas-cu12" version = "12.8.4.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", - "python_full_version == '3.12.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", - "python_full_version == '3.11.*' and sys_platform == 'win32'", "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", - "python_full_version < '3.11' and sys_platform == 'win32'", "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/29/99/db44d685f0e257ff0e213ade1964fc459b4a690a73293220e98feb3307cf/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b86f6dd8935884615a0683b663891d43781b819ac4f2ba2b0c9604676af346d0", size = 590537124, upload-time = "2025-03-07T01:43:53.556Z" }, { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921, upload-time = "2025-03-07T01:44:31.254Z" }, - { url = "https://files.pythonhosted.org/packages/70/61/7d7b3c70186fb651d0fbd35b01dbfc8e755f69fd58f817f3d0f642df20c3/nvidia_cublas_cu12-12.8.4.1-py3-none-win_amd64.whl", hash = "sha256:47e9b82132fa8d2b4944e708049229601448aaad7e6f296f630f2d1a32de35af", size = 567544208, upload-time = "2025-03-07T01:53:30.535Z" }, ] [[package]] @@ -5029,17 +5683,25 @@ name = "nvidia-cublas-cu12" version = "12.9.1.4" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", "python_full_version == '3.11.*' and sys_platform == 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", "python_full_version < '3.11' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version < '3.11' and sys_platform == 'win32'", ] wheels = [ { url = "https://files.pythonhosted.org/packages/82/6c/90d3f532f608a03a13c1d6c16c266ffa3828e8011b1549d3b61db2ad59f5/nvidia_cublas_cu12-12.9.1.4-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:7a950dae01add3b415a5a5cdc4ec818fb5858263e9cca59004bb99fdbbd3a5d6", size = 575006342, upload-time = "2025-06-05T20:04:16.902Z" }, + { url = "https://files.pythonhosted.org/packages/45/a1/a17fade6567c57452cfc8f967a40d1035bb9301db52f27808167fbb2be2f/nvidia_cublas_cu12-12.9.1.4-py3-none-win_amd64.whl", hash = "sha256:1e5fee10662e6e52bd71dec533fbbd4971bb70a5f24f3bc3793e5c2e9dc640bf", size = 553153899, upload-time = "2025-06-05T20:13:35.556Z" }, ] [[package]] @@ -5063,19 +5725,15 @@ name = "nvidia-cuda-runtime-cu12" version = "12.8.90" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", - "python_full_version == '3.12.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", - "python_full_version == '3.11.*' and sys_platform == 'win32'", "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", - "python_full_version < '3.11' and sys_platform == 'win32'", "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", ] wheels = [ { url = "https://files.pythonhosted.org/packages/7c/75/f865a3b236e4647605ea34cc450900854ba123834a5f1598e160b9530c3a/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:52bf7bbee900262ffefe5e9d5a2a69a30d97e2bc5bb6cc866688caa976966e3d", size = 965265, upload-time = "2025-03-07T01:39:43.533Z" }, { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765, upload-time = "2025-03-07T01:40:01.615Z" }, - { url = "https://files.pythonhosted.org/packages/30/a5/a515b7600ad361ea14bfa13fb4d6687abf500adc270f19e89849c0590492/nvidia_cuda_runtime_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:c0c6027f01505bfed6c3b21ec546f69c687689aad5f1a377554bc6ca4aa993a8", size = 944318, upload-time = "2025-03-07T01:51:01.794Z" }, ] [[package]] @@ -5083,17 +5741,25 @@ name = "nvidia-cuda-runtime-cu12" version = "12.9.79" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", "python_full_version == '3.11.*' and sys_platform == 'darwin'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", "python_full_version < '3.11' and sys_platform == 'darwin'", "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version < '3.11' and sys_platform == 'win32'", ] wheels = [ { url = "https://files.pythonhosted.org/packages/bc/e0/0279bd94539fda525e0c8538db29b72a5a8495b0c12173113471d28bce78/nvidia_cuda_runtime_cu12-12.9.79-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83469a846206f2a733db0c42e223589ab62fd2fabac4432d2f8802de4bded0a4", size = 3515012, upload-time = "2025-06-05T20:00:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/59/df/e7c3a360be4f7b93cee39271b792669baeb3846c58a4df6dfcf187a7ffab/nvidia_cuda_runtime_cu12-12.9.79-py3-none-win_amd64.whl", hash = "sha256:8e018af8fa02363876860388bd10ccb89eb9ab8fb0aa749aaf58430a9f7c4891", size = 3591604, upload-time = "2025-06-05T20:11:17.036Z" }, ] [[package]] @@ -5171,7 +5837,6 @@ name = "nvidia-libnvcomp-cu12" version = "5.1.0.21" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/23/b20f2381c7e92c704386428fe79736a13c50f452376453fdc60fcc0ec1b0/nvidia_libnvcomp_cu12-5.1.0.21-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:77dfb3cb8c8995dfa0279ba99b0501e03cbe77e876aab44f4693abdcfac549ce", size = 28802614, upload-time = "2025-12-02T19:05:08.101Z" }, { url = "https://files.pythonhosted.org/packages/08/ab/844fcbaa46cc1242632b4b94b4ffc210ec3d8d8f30ad8f7f1c27767389a9/nvidia_libnvcomp_cu12-5.1.0.21-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:68de61183edb9a870c9a608273a2b5da97dea18e3552096c61fafd9bb2689db0", size = 28958714, upload-time = "2025-12-02T19:01:40.466Z" }, { url = "https://files.pythonhosted.org/packages/c4/cc/c6e92d9587b9ad63c08b1b94c5ae2216319491d0bd4f40f2a9a431d4841f/nvidia_libnvcomp_cu12-5.1.0.21-py3-none-win_amd64.whl", hash = "sha256:1352c7c4264ee5357f8f20e4a8da7f2f91debe21d8968f44576a7f4b51f91533", size = 28490640, upload-time = "2025-12-02T19:07:28.096Z" }, ] @@ -5189,17 +5854,16 @@ name = "nvidia-nvimgcodec-cu12" version = "0.7.0.11" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/63/48/74d33dd126f84a4212480e2cf07504f457b5bae5acd33c0f6bf839ea17d4/nvidia_nvimgcodec_cu12-0.7.0.11-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:52d834be8122bb5b8fc3151cc3bedb95368b3e7ac76af0c4561772ab2a847b2b", size = 27409358, upload-time = "2025-12-02T09:28:16.358Z" }, { url = "https://files.pythonhosted.org/packages/73/b4/f06528ebcb82da84f4a96efe7a210c277767cb86ad2f61f8b1a17d17f251/nvidia_nvimgcodec_cu12-0.7.0.11-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:32d3457859c5784e4c0f6a2f56b6a9afec8fe646cec1cbe4bb5c320948d92dfe", size = 33735220, upload-time = "2025-12-02T09:30:02.546Z" }, { url = "https://files.pythonhosted.org/packages/be/79/95b36049a9504d59d79929e9f3bec001b270f29aec8486e5fb9783a9502c/nvidia_nvimgcodec_cu12-0.7.0.11-py3-none-win_amd64.whl", hash = "sha256:495e07e071fcb2115f7f1948a04f6c51f96d61b83c614af753f7cc1bf369a46c", size = 18448810, upload-time = "2025-12-02T09:20:33.838Z" }, ] [package.optional-dependencies] all = [ - { name = "nvidia-libnvcomp-cu12" }, - { name = "nvidia-nvjpeg-cu12" }, - { name = "nvidia-nvjpeg2k-cu12" }, - { name = "nvidia-nvtiff-cu12" }, + { name = "nvidia-libnvcomp-cu12", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "nvidia-nvjpeg-cu12", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "nvidia-nvjpeg2k-cu12", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "nvidia-nvtiff-cu12", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] [[package]] @@ -5215,7 +5879,6 @@ name = "nvidia-nvjpeg-cu12" version = "12.4.0.76" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/48/5c12a3e6afe070ff563375cc72b42e9c7400bd0b44c734591049410be7fd/nvidia_nvjpeg_cu12-12.4.0.76-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f52c5ef7cf56e8bffac8903a59f14494017a52e4fe89d5a1d16c1e88d7bbf194", size = 5273693, upload-time = "2025-06-05T20:10:35.162Z" }, { url = "https://files.pythonhosted.org/packages/57/68/d3526394584134a23f2500833c62d3352e1feda7547041f4612b1a183aa3/nvidia_nvjpeg_cu12-12.4.0.76-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3888f10b32fbd58e80166c48e01073732d752fa5f167b7cb5b9615f1c6375a20", size = 5313609, upload-time = "2025-06-05T20:10:43.92Z" }, { url = "https://files.pythonhosted.org/packages/bc/28/e05bb8e6cdb98e79c6822f8bbd7154a26d8102412b3a0bfd5e4c7c52db8c/nvidia_nvjpeg_cu12-12.4.0.76-py3-none-win_amd64.whl", hash = "sha256:21923726db667bd53050d0de88320983ff423322b7f376057dd943e487c40abc", size = 4741398, upload-time = "2025-06-05T20:16:19.152Z" }, ] @@ -5225,17 +5888,16 @@ name = "nvidia-nvjpeg2k-cu12" version = "0.9.1.47" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/0b/421625f754862b893c2f487090b4b6b86337801451f0623cda9d21d111b4/nvidia_nvjpeg2k_cu12-0.9.1.47-py3-none-manylinux2014_aarch64.whl", hash = "sha256:f6787aed8f9d0c839ea4e0ae190af90bcc71a9a6b4e3965d5b67c22a00f58714", size = 7344958, upload-time = "2025-11-13T18:17:15.127Z" }, { url = "https://files.pythonhosted.org/packages/85/91/41abf44089ceb8b29479cdef2ca952277cc6667d40affedd39c3f1744d7e/nvidia_nvjpeg2k_cu12-0.9.1.47-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6672c85e47ab61ffe3d19da8a41fd597155852e6e219ddc90a133623b54f7818", size = 7402941, upload-time = "2025-11-13T18:13:28.977Z" }, { url = "https://files.pythonhosted.org/packages/01/b2/ab62e6c008f3080743477de31da22eb83b374c37fe5d387e7435e507914f/nvidia_nvjpeg2k_cu12-0.9.1.47-py3-none-win_amd64.whl", hash = "sha256:ebb5d34d68beb70c2718c769996d9d8e49a2d9acacc79f6235c07649a4045e97", size = 6973975, upload-time = "2025-11-13T18:25:26.611Z" }, ] [[package]] name = "nvidia-nvshmem-cu12" -version = "3.3.20" +version = "3.4.5" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/6c/99acb2f9eb85c29fc6f3a7ac4dccfd992e22666dd08a642b303311326a97/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d00f26d3f9b2e3c3065be895e3059d6479ea5c638a3f38c9fec49b1b9dd7c1e5", size = 124657145, upload-time = "2025-08-04T20:25:19.995Z" }, + { url = "https://files.pythonhosted.org/packages/b5/09/6ea3ea725f82e1e76684f0708bbedd871fc96da89945adeba65c3835a64c/nvidia_nvshmem_cu12-3.4.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:042f2500f24c021db8a06c5eec2539027d57460e1c1a762055a6554f72c369bd", size = 139103095, upload-time = "2025-09-06T00:32:31.266Z" }, ] [[package]] @@ -5243,7 +5905,6 @@ name = "nvidia-nvtiff-cu12" version = "0.6.0.78" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/41/19/9529fbda1e7a24b45649c9bc86cf6490d5b53f63e6b17d851f1528ff8380/nvidia_nvtiff_cu12-0.6.0.78-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9193a46eaef2d52a92178c34e2404f621b581d651d2c7ab2d83c24fee6fcc136", size = 2478534, upload-time = "2025-11-13T18:26:02.492Z" }, { url = "https://files.pythonhosted.org/packages/62/4b/24805e9c56936dd57a1830b65b53234853f429cea5edbcbfdf853ceebdcf/nvidia_nvtiff_cu12-0.6.0.78-py3-none-manylinux2014_x86_64.whl", hash = "sha256:b48517578de6f1a6e806e00ef0da6d673036957560efbe9fa2934707d5d18c00", size = 2518414, upload-time = "2025-11-13T18:16:55.401Z" }, { url = "https://files.pythonhosted.org/packages/45/48/1d818455e6c6182354fb5b17a6c9d7dcfb002e64e258554fe3410ea44510/nvidia_nvtiff_cu12-0.6.0.78-py3-none-win_amd64.whl", hash = "sha256:daf9035b5efc315ef904b449564d1d9d9a502f38e115cf5757d98f9c52a284d0", size = 2055719, upload-time = "2025-11-13T18:29:01.023Z" }, ] @@ -5293,7 +5954,7 @@ wheels = [ [[package]] name = "onnx" -version = "1.20.0" +version = "1.20.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ml-dtypes" }, @@ -5302,38 +5963,37 @@ dependencies = [ { name = "protobuf" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bd/bf/824b13b7ea14c2d374b48a296cfa412442e5559326fbab5441a4fcb68924/onnx-1.20.0.tar.gz", hash = "sha256:1a93ec69996b4556062d552ed1aa0671978cfd3c17a40bf4c89a1ae169c6a4ad", size = 12049527, upload-time = "2025-12-01T18:14:34.679Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/23/18/8fd768f715a990d3b5786c9bffa6f158934cc1935f2774dd15b26c62f99f/onnx-1.20.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7e706470f8b731af6d0347c4f01b8e0e1810855d0c71c467066a5bd7fa21704b", size = 18341375, upload-time = "2025-12-01T18:13:29.481Z" }, - { url = "https://files.pythonhosted.org/packages/cf/47/9fdb6e8bde5f77f8bdcf7e584ad88ffa7a189338b92658351518c192bde0/onnx-1.20.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3e941d0f3edd57e1d63e2562c74aec2803ead5b965e76ccc3d2b2bd4ae0ea054", size = 17899075, upload-time = "2025-12-01T18:13:32.375Z" }, - { url = "https://files.pythonhosted.org/packages/b2/17/7bb16372f95a8a8251c202018952a747ac7f796a9e6d5720ed7b36680834/onnx-1.20.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6930ed7795912c4298ec8642b33c99c51c026a57edf17788b8451fe22d11e674", size = 18118826, upload-time = "2025-12-01T18:13:35.077Z" }, - { url = "https://files.pythonhosted.org/packages/19/d8/19e3f599601195b1d8ff0bf9e9469065ebeefd9b5e5ec090344f031c38cb/onnx-1.20.0-cp310-cp310-win32.whl", hash = "sha256:f8424c95491de38ecc280f7d467b298cb0b7cdeb1cd892eb9b4b9541c00a600e", size = 16364286, upload-time = "2025-12-01T18:13:38.304Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f9/11d2db50a6c56092bd2e22515fe6998309c7b2389ed67f8ffd27285c33b5/onnx-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:1ecca1f963d69e002c03000f15844f8cac3b6d7b6639a934e73571ee02d59c35", size = 16487791, upload-time = "2025-12-01T18:13:41.062Z" }, - { url = "https://files.pythonhosted.org/packages/9e/9a/125ad5ed919d1782b26b0b4404e51adc44afd029be30d5a81b446dccd9c5/onnx-1.20.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:00dc8ae2c7b283f79623961f450b5515bd2c4b47a7027e7a1374ba49cef27768", size = 18341929, upload-time = "2025-12-01T18:13:43.79Z" }, - { url = "https://files.pythonhosted.org/packages/4d/3c/85280dd05396493f3e1b4feb7a3426715e344b36083229437f31d9788a01/onnx-1.20.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f62978ecfb8f320faba6704abd20253a5a79aacc4e5d39a9c061dd63d3b7574f", size = 17899362, upload-time = "2025-12-01T18:13:46.496Z" }, - { url = "https://files.pythonhosted.org/packages/26/db/e11cf9aaa6ccbcd27ea94d321020fef3207cba388bff96111e6431f97d1a/onnx-1.20.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:71177f8fd5c0dd90697bc281f5035f73707bdac83257a5c54d74403a1100ace9", size = 18119129, upload-time = "2025-12-01T18:13:49.662Z" }, - { url = "https://files.pythonhosted.org/packages/ef/0b/1b99e7ba5ccfa8ecb3509ec579c8520098d09b903ccd520026d60faa7c75/onnx-1.20.0-cp311-cp311-win32.whl", hash = "sha256:1d3d0308e2c194f4b782f51e78461b567fac8ce6871c0cf5452ede261683cc8f", size = 16364604, upload-time = "2025-12-01T18:13:52.691Z" }, - { url = "https://files.pythonhosted.org/packages/51/ab/7399817821d0d18ff67292ac183383e41f4f4ddff2047902f1b7b51d2d40/onnx-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a6de7dda77926c323b0e5a830dc9c2866ce350c1901229e193be1003a076c25", size = 16488019, upload-time = "2025-12-01T18:13:55.776Z" }, - { url = "https://files.pythonhosted.org/packages/fd/e0/23059c11d9c0fb1951acec504a5cc86e1dd03d2eef3a98cf1941839f5322/onnx-1.20.0-cp311-cp311-win_arm64.whl", hash = "sha256:afc4cf83ce5d547ebfbb276dae8eb0ec836254a8698d462b4ba5f51e717fd1ae", size = 16446841, upload-time = "2025-12-01T18:13:58.091Z" }, - { url = "https://files.pythonhosted.org/packages/5e/19/2caa972a31014a8cb4525f715f2a75d93caef9d4b9da2809cc05d0489e43/onnx-1.20.0-cp312-abi3-macosx_12_0_universal2.whl", hash = "sha256:31efe37d7d1d659091f34ddd6a31780334acf7c624176832db9a0a8ececa8fb5", size = 18340913, upload-time = "2025-12-01T18:14:00.477Z" }, - { url = "https://files.pythonhosted.org/packages/78/bb/b98732309f2f6beb4cdcf7b955d7bbfd75a191185370ee21233373db381e/onnx-1.20.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d75da05e743eb9a11ff155a775cae5745e71f1cd0ca26402881b8f20e8d6e449", size = 17896118, upload-time = "2025-12-01T18:14:03.239Z" }, - { url = "https://files.pythonhosted.org/packages/84/a7/38aa564871d062c11538d65c575af9c7e057be880c09ecbd899dd1abfa83/onnx-1.20.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02e0d72ab09a983fce46686b155a5049898558d9f3bc6e8515120d6c40666318", size = 18115415, upload-time = "2025-12-01T18:14:06.261Z" }, - { url = "https://files.pythonhosted.org/packages/3b/17/a600b62cf4ad72976c66f83ce9e324205af434706ad5ec0e35129e125aef/onnx-1.20.0-cp312-abi3-win32.whl", hash = "sha256:392ca68b34b97e172d33b507e1e7bfdf2eea96603e6e7ff109895b82ff009dc7", size = 16363019, upload-time = "2025-12-01T18:14:09.16Z" }, - { url = "https://files.pythonhosted.org/packages/9c/3b/5146ba0a89f73c026bb468c49612bab8d005aa28155ebf06cf5f2eb8d36c/onnx-1.20.0-cp312-abi3-win_amd64.whl", hash = "sha256:259b05758d41645f5545c09f887187662b350d40db8d707c33c94a4f398e1733", size = 16485934, upload-time = "2025-12-01T18:14:13.046Z" }, - { url = "https://files.pythonhosted.org/packages/f3/bc/d251b97395e721b3034e9578d4d4d9fb33aac4197ae16ce8c7ed79a26dce/onnx-1.20.0-cp312-abi3-win_arm64.whl", hash = "sha256:2d25a9e1fde44bc69988e50e2211f62d6afcd01b0fd6dfd23429fd978a35d32f", size = 16444946, upload-time = "2025-12-01T18:14:15.801Z" }, - { url = "https://files.pythonhosted.org/packages/8d/11/4d47409e257013951a17d08c31988e7c2e8638c91d4d5ce18cc57c6ea9d9/onnx-1.20.0-cp313-cp313t-macosx_12_0_universal2.whl", hash = "sha256:7646e700c0a53770a86d5a9a582999a625a3173c4323635960aec3cba8441c6a", size = 18348524, upload-time = "2025-12-01T18:14:18.102Z" }, - { url = "https://files.pythonhosted.org/packages/67/60/774d29a0f00f84a4ec624fe35e0c59e1dbd7f424adaab751977a45b60e05/onnx-1.20.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0bdfd22fe92b87bf98424335ec1191ed79b08cd0f57fe396fab558b83b2c868", size = 17900987, upload-time = "2025-12-01T18:14:20.835Z" }, - { url = "https://files.pythonhosted.org/packages/9c/7c/6bd82b81b85b2680e3de8cf7b6cc49a7380674b121265bb6e1e2ff3bb0aa/onnx-1.20.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1a4e02148b2a7a4b82796d0ecdb6e49ba7abd34bb5a9de22af86aad556fb76", size = 18121332, upload-time = "2025-12-01T18:14:24.558Z" }, - { url = "https://files.pythonhosted.org/packages/d1/42/d2cd00c84def4e17b471e24d82a1d2e3c5be202e2c163420b0353ddf34df/onnx-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2241c85fdaa25a66565fcd1d327c7bcd8f55165420ebaee1e9563c3b9bf961c9", size = 16492660, upload-time = "2025-12-01T18:14:27.456Z" }, - { url = "https://files.pythonhosted.org/packages/42/cd/1106de50a17f2a2dfbb4c8bb3cf2f99be2c7ac2e19abbbf9e07ab47b1b35/onnx-1.20.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ee46cdc5abd851a007a4be81ee53e0e303cf9a0e46d74231d5d361333a1c9411", size = 16448588, upload-time = "2025-12-01T18:14:32.277Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/3b/8a/335c03a8683a88a32f9a6bb98899ea6df241a41df64b37b9696772414794/onnx-1.20.1.tar.gz", hash = "sha256:ded16de1df563d51fbc1ad885f2a426f814039d8b5f4feb77febe09c0295ad67", size = 12048980, upload-time = "2026-01-10T01:40:03.043Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/cc/4ba3c80cfaffdb541dc5a23eaccb045a627361e94ecaeba30496270f15b3/onnx-1.20.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3fe243e83ad737637af6512708454e720d4b0864def2b28e6b0ee587b80a50be", size = 17904206, upload-time = "2026-01-10T01:38:58.574Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fc/3a1c4ae2cd5cfab2d0ebc1842769b04b417fe13946144a7c8ce470dd9c85/onnx-1.20.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e24e96b48f27e4d6b44cb0b195b367a2665da2d819621eec51903d575fc49d38", size = 17414849, upload-time = "2026-01-10T01:39:01.494Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ab/5017945291b981f2681fb620f2d5b6070e02170c648770711ef1eac79d56/onnx-1.20.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0903e6088ed5e8f59ebd381ab2a6e9b2a60b4c898f79aa2fe76bb79cf38a5031", size = 17513600, upload-time = "2026-01-10T01:39:04.348Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b0/063e79dc365972af876d786bacc6acd8909691af2b9296615ff74ad182f3/onnx-1.20.1-cp310-cp310-win32.whl", hash = "sha256:17483e59082b2ca6cadd2b48fd8dce937e5b2c985ed5583fefc38af928be1826", size = 16239159, upload-time = "2026-01-10T01:39:07.254Z" }, + { url = "https://files.pythonhosted.org/packages/2a/73/a992271eb3683e676239d71b5a78ad3cf4d06d2223c387e701bf305da199/onnx-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:e2b0cf797faedfd3b83491dc168ab5f1542511448c65ceb482f20f04420cbf3a", size = 16391718, upload-time = "2026-01-10T01:39:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/0c/38/1a0e74d586c08833404100f5c052f92732fb5be417c0b2d7cb0838443bfe/onnx-1.20.1-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:53426e1b458641e7a537e9f176330012ff59d90206cac1c1a9d03cdd73ed3095", size = 17904965, upload-time = "2026-01-10T01:39:13.532Z" }, + { url = "https://files.pythonhosted.org/packages/96/25/64b076e9684d17335f80b15b3bf502f7a8e1a89f08a6b208d4f2861b3011/onnx-1.20.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ca7281f8c576adf396c338cf43fff26faee8d4d2e2577b8e73738f37ceccf945", size = 17415179, upload-time = "2026-01-10T01:39:16.516Z" }, + { url = "https://files.pythonhosted.org/packages/ac/d5/6743b409421ced20ad5af1b3a7b4c4e568689ffaca86db431692fca409a6/onnx-1.20.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2297f428c51c7fc6d8fad0cf34384284dfeff3f86799f8e83ef905451348ade0", size = 17513672, upload-time = "2026-01-10T01:39:19.35Z" }, + { url = "https://files.pythonhosted.org/packages/9a/6b/dae82e6fdb2043302f29adca37522312ea2be55b75907b59be06fbdffe87/onnx-1.20.1-cp311-cp311-win32.whl", hash = "sha256:63d9cbcab8c96841eadeb7c930e07bfab4dde8081eb76fb68e0dfb222706b81e", size = 16239336, upload-time = "2026-01-10T01:39:22.506Z" }, + { url = "https://files.pythonhosted.org/packages/8e/17/a0d7863390c1f2067d7c02dcc1477034965c32aaa1407bfcf775305ffee4/onnx-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:d78cde72d7ca8356a2d99c5dc0dbf67264254828cae2c5780184486c0cd7b3bf", size = 16392120, upload-time = "2026-01-10T01:39:25.106Z" }, + { url = "https://files.pythonhosted.org/packages/aa/72/9b879a46eb7a3322223791f36bf9c25d95da9ed93779eabb75a560f22e5b/onnx-1.20.1-cp311-cp311-win_arm64.whl", hash = "sha256:0104bb2d4394c179bcea3df7599a45a2932b80f4633840896fcf0d7d8daecea2", size = 16346923, upload-time = "2026-01-10T01:39:27.782Z" }, + { url = "https://files.pythonhosted.org/packages/7c/4c/4b17e82f91ab9aa07ff595771e935ca73547b035030dc5f5a76e63fbfea9/onnx-1.20.1-cp312-abi3-macosx_12_0_universal2.whl", hash = "sha256:1d923bb4f0ce1b24c6859222a7e6b2f123e7bfe7623683662805f2e7b9e95af2", size = 17903547, upload-time = "2026-01-10T01:39:31.015Z" }, + { url = "https://files.pythonhosted.org/packages/64/5e/1bfa100a9cb3f2d3d5f2f05f52f7e60323b0e20bb0abace1ae64dbc88f25/onnx-1.20.1-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ddc0b7d8b5a94627dc86c533d5e415af94cbfd103019a582669dad1f56d30281", size = 17412021, upload-time = "2026-01-10T01:39:33.885Z" }, + { url = "https://files.pythonhosted.org/packages/fb/71/d3fec0dcf9a7a99e7368112d9c765154e81da70fcba1e3121131a45c245b/onnx-1.20.1-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9336b6b8e6efcf5c490a845f6afd7e041c89a56199aeda384ed7d58fb953b080", size = 17510450, upload-time = "2026-01-10T01:39:36.589Z" }, + { url = "https://files.pythonhosted.org/packages/74/a7/edce1403e05a46e59b502fae8e3350ceeac5841f8e8f1561e98562ed9b09/onnx-1.20.1-cp312-abi3-win32.whl", hash = "sha256:564c35a94811979808ab5800d9eb4f3f32c12daedba7e33ed0845f7c61ef2431", size = 16238216, upload-time = "2026-01-10T01:39:39.46Z" }, + { url = "https://files.pythonhosted.org/packages/8b/c7/8690c81200ae652ac550c1df52f89d7795e6cc941f3cb38c9ef821419e80/onnx-1.20.1-cp312-abi3-win_amd64.whl", hash = "sha256:9fe7f9a633979d50984b94bda8ceb7807403f59a341d09d19342dc544d0ca1d5", size = 16389207, upload-time = "2026-01-10T01:39:41.955Z" }, + { url = "https://files.pythonhosted.org/packages/01/a0/4fb0e6d36eaf079af366b2c1f68bafe92df6db963e2295da84388af64abc/onnx-1.20.1-cp312-abi3-win_arm64.whl", hash = "sha256:21d747348b1c8207406fa2f3e12b82f53e0d5bb3958bcd0288bd27d3cb6ebb00", size = 16344155, upload-time = "2026-01-10T01:39:45.536Z" }, + { url = "https://files.pythonhosted.org/packages/ea/bb/715fad292b255664f0e603f1b2ef7bf2b386281775f37406beb99fa05957/onnx-1.20.1-cp313-cp313t-macosx_12_0_universal2.whl", hash = "sha256:29197b768f5acdd1568ddeb0a376407a2817844f6ac1ef8c8dd2d974c9ab27c3", size = 17912296, upload-time = "2026-01-10T01:39:48.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c3/541af12c3d45e159a94ee701100ba9e94b7bd8b7a8ac5ca6838569f894f8/onnx-1.20.1-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f0371aa67f51917a09cc829ada0f9a79a58f833449e03d748f7f7f53787c43c", size = 17416925, upload-time = "2026-01-10T01:39:50.82Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/d5660a7d2ddf14f531ca66d409239f543bb290277c3f14f4b4b78e32efa3/onnx-1.20.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be1e5522200b203b34327b2cf132ddec20ab063469476e1f5b02bb7bd259a489", size = 17515602, upload-time = "2026-01-10T01:39:54.132Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b4/47225ab2a92562eff87ba9a1a028e3535d659a7157d7cde659003998b8e3/onnx-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:15c815313bbc4b2fdc7e4daeb6e26b6012012adc4d850f4e3b09ed327a7ea92a", size = 16395729, upload-time = "2026-01-10T01:39:57.577Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7d/1bbe626ff6b192c844d3ad34356840cc60fca02e2dea0db95e01645758b1/onnx-1.20.1-cp313-cp313t-win_arm64.whl", hash = "sha256:eb335d7bcf9abac82a0d6a0fda0363531ae0b22cfd0fc6304bff32ee29905def", size = 16348968, upload-time = "2026-01-10T01:40:00.491Z" }, ] [[package]] name = "onnxruntime" -version = "1.23.2" +version = "1.24.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "coloredlogs" }, { name = "flatbuffers" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, @@ -5342,53 +6002,51 @@ dependencies = [ { name = "sympy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/35/d6/311b1afea060015b56c742f3531168c1644650767f27ef40062569960587/onnxruntime-1.23.2-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:a7730122afe186a784660f6ec5807138bf9d792fa1df76556b27307ea9ebcbe3", size = 17195934, upload-time = "2025-10-27T23:06:14.143Z" }, - { url = "https://files.pythonhosted.org/packages/db/db/81bf3d7cecfbfed9092b6b4052e857a769d62ed90561b410014e0aae18db/onnxruntime-1.23.2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:b28740f4ecef1738ea8f807461dd541b8287d5650b5be33bca7b474e3cbd1f36", size = 19153079, upload-time = "2025-10-27T23:05:57.686Z" }, - { url = "https://files.pythonhosted.org/packages/2e/4d/a382452b17cf70a2313153c520ea4c96ab670c996cb3a95cc5d5ac7bfdac/onnxruntime-1.23.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f7d1fe034090a1e371b7f3ca9d3ccae2fabae8c1d8844fb7371d1ea38e8e8d2", size = 15219883, upload-time = "2025-10-22T03:46:21.66Z" }, - { url = "https://files.pythonhosted.org/packages/fb/56/179bf90679984c85b417664c26aae4f427cba7514bd2d65c43b181b7b08b/onnxruntime-1.23.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4ca88747e708e5c67337b0f65eed4b7d0dd70d22ac332038c9fc4635760018f7", size = 17370357, upload-time = "2025-10-22T03:46:57.968Z" }, - { url = "https://files.pythonhosted.org/packages/cd/6d/738e50c47c2fd285b1e6c8083f15dac1a5f6199213378a5f14092497296d/onnxruntime-1.23.2-cp310-cp310-win_amd64.whl", hash = "sha256:0be6a37a45e6719db5120e9986fcd30ea205ac8103fd1fb74b6c33348327a0cc", size = 13467651, upload-time = "2025-10-27T23:06:11.904Z" }, - { url = "https://files.pythonhosted.org/packages/44/be/467b00f09061572f022ffd17e49e49e5a7a789056bad95b54dfd3bee73ff/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:6f91d2c9b0965e86827a5ba01531d5b669770b01775b23199565d6c1f136616c", size = 17196113, upload-time = "2025-10-22T03:47:33.526Z" }, - { url = "https://files.pythonhosted.org/packages/9f/a8/3c23a8f75f93122d2b3410bfb74d06d0f8da4ac663185f91866b03f7da1b/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:87d8b6eaf0fbeb6835a60a4265fde7a3b60157cf1b2764773ac47237b4d48612", size = 19153857, upload-time = "2025-10-22T03:46:37.578Z" }, - { url = "https://files.pythonhosted.org/packages/3f/d8/506eed9af03d86f8db4880a4c47cd0dffee973ef7e4f4cff9f1d4bcf7d22/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbfd2fca76c855317568c1b36a885ddea2272c13cb0e395002c402f2360429a6", size = 15220095, upload-time = "2025-10-22T03:46:24.769Z" }, - { url = "https://files.pythonhosted.org/packages/e9/80/113381ba832d5e777accedc6cb41d10f9eca82321ae31ebb6bcede530cea/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da44b99206e77734c5819aa2142c69e64f3b46edc3bd314f6a45a932defc0b3e", size = 17372080, upload-time = "2025-10-22T03:47:00.265Z" }, - { url = "https://files.pythonhosted.org/packages/3a/db/1b4a62e23183a0c3fe441782462c0ede9a2a65c6bbffb9582fab7c7a0d38/onnxruntime-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:902c756d8b633ce0dedd889b7c08459433fbcf35e9c38d1c03ddc020f0648c6e", size = 13468349, upload-time = "2025-10-22T03:47:25.783Z" }, - { url = "https://files.pythonhosted.org/packages/1b/9e/f748cd64161213adeef83d0cb16cb8ace1e62fa501033acdd9f9341fff57/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:b8f029a6b98d3cf5be564d52802bb50a8489ab73409fa9db0bf583eabb7c2321", size = 17195929, upload-time = "2025-10-22T03:47:36.24Z" }, - { url = "https://files.pythonhosted.org/packages/91/9d/a81aafd899b900101988ead7fb14974c8a58695338ab6a0f3d6b0100f30b/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:218295a8acae83905f6f1aed8cacb8e3eb3bd7513a13fe4ba3b2664a19fc4a6b", size = 19157705, upload-time = "2025-10-22T03:46:40.415Z" }, - { url = "https://files.pythonhosted.org/packages/3c/35/4e40f2fba272a6698d62be2cd21ddc3675edfc1a4b9ddefcc4648f115315/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76ff670550dc23e58ea9bc53b5149b99a44e63b34b524f7b8547469aaa0dcb8c", size = 15226915, upload-time = "2025-10-22T03:46:27.773Z" }, - { url = "https://files.pythonhosted.org/packages/ef/88/9cc25d2bafe6bc0d4d3c1db3ade98196d5b355c0b273e6a5dc09c5d5d0d5/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f9b4ae77f8e3c9bee50c27bc1beede83f786fe1d52e99ac85aa8d65a01e9b77", size = 17382649, upload-time = "2025-10-22T03:47:02.782Z" }, - { url = "https://files.pythonhosted.org/packages/c0/b4/569d298f9fc4d286c11c45e85d9ffa9e877af12ace98af8cab52396e8f46/onnxruntime-1.23.2-cp312-cp312-win_amd64.whl", hash = "sha256:25de5214923ce941a3523739d34a520aac30f21e631de53bba9174dc9c004435", size = 13470528, upload-time = "2025-10-22T03:47:28.106Z" }, - { url = "https://files.pythonhosted.org/packages/3d/41/fba0cabccecefe4a1b5fc8020c44febb334637f133acefc7ec492029dd2c/onnxruntime-1.23.2-cp313-cp313-macosx_13_0_arm64.whl", hash = "sha256:2ff531ad8496281b4297f32b83b01cdd719617e2351ffe0dba5684fb283afa1f", size = 17196337, upload-time = "2025-10-22T03:46:35.168Z" }, - { url = "https://files.pythonhosted.org/packages/fe/f9/2d49ca491c6a986acce9f1d1d5fc2099108958cc1710c28e89a032c9cfe9/onnxruntime-1.23.2-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:162f4ca894ec3de1a6fd53589e511e06ecdc3ff646849b62a9da7489dee9ce95", size = 19157691, upload-time = "2025-10-22T03:46:43.518Z" }, - { url = "https://files.pythonhosted.org/packages/1c/a1/428ee29c6eaf09a6f6be56f836213f104618fb35ac6cc586ff0f477263eb/onnxruntime-1.23.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45d127d6e1e9b99d1ebeae9bcd8f98617a812f53f46699eafeb976275744826b", size = 15226898, upload-time = "2025-10-22T03:46:30.039Z" }, - { url = "https://files.pythonhosted.org/packages/f2/2b/b57c8a2466a3126dbe0a792f56ad7290949b02f47b86216cd47d857e4b77/onnxruntime-1.23.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8bace4e0d46480fbeeb7bbe1ffe1f080e6663a42d1086ff95c1551f2d39e7872", size = 17382518, upload-time = "2025-10-22T03:47:05.407Z" }, - { url = "https://files.pythonhosted.org/packages/4a/93/aba75358133b3a941d736816dd392f687e7eab77215a6e429879080b76b6/onnxruntime-1.23.2-cp313-cp313-win_amd64.whl", hash = "sha256:1f9cc0a55349c584f083c1c076e611a7c35d5b867d5d6e6d6c823bf821978088", size = 13470276, upload-time = "2025-10-22T03:47:31.193Z" }, - { url = "https://files.pythonhosted.org/packages/7c/3d/6830fa61c69ca8e905f237001dbfc01689a4e4ab06147020a4518318881f/onnxruntime-1.23.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d2385e774f46ac38f02b3a91a91e30263d41b2f1f4f26ae34805b2a9ddef466", size = 15229610, upload-time = "2025-10-22T03:46:32.239Z" }, - { url = "https://files.pythonhosted.org/packages/b6/ca/862b1e7a639460f0ca25fd5b6135fb42cf9deea86d398a92e44dfda2279d/onnxruntime-1.23.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2b9233c4947907fd1818d0e581c049c41ccc39b2856cc942ff6d26317cee145", size = 17394184, upload-time = "2025-10-22T03:47:08.127Z" }, + { url = "https://files.pythonhosted.org/packages/d2/88/d9757c62a0f96b5193f8d447a141eefd14498c404cc5caf1a6f3233cf102/onnxruntime-1.24.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:79b3119ab9f4f3817062e6dbe7f4a44937de93905e3a31ba34313d18cb49e7be", size = 17212018, upload-time = "2026-02-05T17:32:13.986Z" }, + { url = "https://files.pythonhosted.org/packages/7b/61/b3305c39144e19dbe8791802076b29b4b592b09de03d0e340c1314bfd408/onnxruntime-1.24.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:86bc43e922b1f581b3de26a3dc402149c70e5542fceb5bec6b3a85542dbeb164", size = 15018703, upload-time = "2026-02-05T17:30:53.846Z" }, + { url = "https://files.pythonhosted.org/packages/94/d6/d273b75fe7825ea3feed321dd540aef33d8a1380ddd8ac3bb70a8ed000fe/onnxruntime-1.24.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1cabe71ca14dcfbf812d312aab0a704507ac909c137ee6e89e4908755d0fc60e", size = 17096352, upload-time = "2026-02-05T17:31:29.057Z" }, + { url = "https://files.pythonhosted.org/packages/21/3f/0616101a3938bfe2918ea60b581a9bbba61ffc255c63388abb0885f7ce18/onnxruntime-1.24.1-cp311-cp311-win_amd64.whl", hash = "sha256:3273c330f5802b64b4103e87b5bbc334c0355fff1b8935d8910b0004ce2f20c8", size = 12493235, upload-time = "2026-02-05T17:32:04.451Z" }, + { url = "https://files.pythonhosted.org/packages/c8/30/437de870e4e1c6d237a2ca5e11f54153531270cb5c745c475d6e3d5c5dcf/onnxruntime-1.24.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:7307aab9e2e879c0171f37e0eb2808a5b4aec7ba899bb17c5f0cedfc301a8ac2", size = 17211043, upload-time = "2026-02-05T17:32:16.909Z" }, + { url = "https://files.pythonhosted.org/packages/21/60/004401cd86525101ad8aa9eec301327426555d7a77fac89fd991c3c7aae6/onnxruntime-1.24.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:780add442ce2d4175fafb6f3102cdc94243acffa3ab16eacc03dd627cc7b1b54", size = 15016224, upload-time = "2026-02-05T17:30:56.791Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a1/43ad01b806a1821d1d6f98725edffcdbad54856775643718e9124a09bfbe/onnxruntime-1.24.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6119526eda12613f0d0498e2ae59563c247c370c9cef74c2fc93133dde157", size = 17098191, upload-time = "2026-02-05T17:31:31.87Z" }, + { url = "https://files.pythonhosted.org/packages/ff/37/5beb65270864037d5c8fb25cfe6b23c48b618d1f4d06022d425cbf29bd9c/onnxruntime-1.24.1-cp312-cp312-win_amd64.whl", hash = "sha256:df0af2f1cfcfff9094971c7eb1d1dfae7ccf81af197493c4dc4643e4342c0946", size = 12493108, upload-time = "2026-02-05T17:32:07.076Z" }, + { url = "https://files.pythonhosted.org/packages/95/77/7172ecfcbdabd92f338e694f38c325f6fab29a38fa0a8c3d1c85b9f4617c/onnxruntime-1.24.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:82e367770e8fba8a87ba9f4c04bb527e6d4d7204540f1390f202c27a3b759fb4", size = 17211381, upload-time = "2026-02-05T17:31:09.601Z" }, + { url = "https://files.pythonhosted.org/packages/79/5b/532a0d75b93bbd0da0e108b986097ebe164b84fbecfdf2ddbf7c8a3a2e83/onnxruntime-1.24.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1099f3629832580fedf415cfce2462a56cc9ca2b560d6300c24558e2ac049134", size = 15016000, upload-time = "2026-02-05T17:31:00.116Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b5/40606c7bce0702975a077bc6668cd072cd77695fc5c0b3fcf59bdb1fe65e/onnxruntime-1.24.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6361dda4270f3939a625670bd67ae0982a49b7f923207450e28433abc9c3a83b", size = 17097637, upload-time = "2026-02-05T17:31:34.787Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/9e8f7933796b466241b934585723c700d8fb6bde2de856e65335193d7c93/onnxruntime-1.24.1-cp313-cp313-win_amd64.whl", hash = "sha256:bd1e4aefe73b6b99aa303cd72562ab6de3cccb09088100f8ad1c974be13079c7", size = 12492467, upload-time = "2026-02-05T17:32:09.834Z" }, + { url = "https://files.pythonhosted.org/packages/fb/8a/ee07d86e35035f9fed42497af76435f5a613d4e8b6c537ea0f8ef9fa85da/onnxruntime-1.24.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:88a2b54dca00c90fca6303eedf13d49b5b4191d031372c2e85f5cffe4d86b79e", size = 15025407, upload-time = "2026-02-05T17:31:02.251Z" }, + { url = "https://files.pythonhosted.org/packages/fd/9e/ab3e1dda4b126313d240e1aaa87792ddb1f5ba6d03ca2f093a7c4af8c323/onnxruntime-1.24.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2dfbba602da840615ed5b431facda4b3a43b5d8276cf9e0dbf13d842df105838", size = 17099810, upload-time = "2026-02-05T17:31:37.537Z" }, + { url = "https://files.pythonhosted.org/packages/87/23/167d964414cee2af9c72af323b28d2c4cb35beed855c830a23f198265c79/onnxruntime-1.24.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:890c503ca187bc883c3aa72c53f2a604ec8e8444bdd1bf6ac243ec6d5e085202", size = 17214004, upload-time = "2026-02-05T17:31:11.917Z" }, + { url = "https://files.pythonhosted.org/packages/b4/24/6e5558fdd51027d6830cf411bc003ae12c64054826382e2fab89e99486a0/onnxruntime-1.24.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da1b84b3bdeec543120df169e5e62a1445bf732fc2c7fb036c2f8a4090455e8", size = 15017034, upload-time = "2026-02-05T17:31:04.331Z" }, + { url = "https://files.pythonhosted.org/packages/91/d4/3cb1c9eaae1103265ed7eb00a3eaeb0d9ba51dc88edc398b7071c9553bed/onnxruntime-1.24.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:557753ec345efa227c6a65139f3d29c76330fcbd54cc10dd1b64232ebb939c13", size = 17097531, upload-time = "2026-02-05T17:31:40.303Z" }, + { url = "https://files.pythonhosted.org/packages/0f/da/4522b199c12db7c5b46aaf265ee0d741abe65ea912f6c0aaa2cc18a4654d/onnxruntime-1.24.1-cp314-cp314-win_amd64.whl", hash = "sha256:ea4942104805e868f3ddddfa1fbb58b04503a534d489ab2d1452bbfa345c78c2", size = 12795556, upload-time = "2026-02-05T17:32:11.886Z" }, + { url = "https://files.pythonhosted.org/packages/a1/53/3b8969417276b061ff04502ccdca9db4652d397abbeb06c9f6ae05cec9ca/onnxruntime-1.24.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea8963a99e0f10489acdf00ef3383c3232b7e44aa497b063c63be140530d9f85", size = 15025434, upload-time = "2026-02-05T17:31:06.942Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a2/cfcf009eb38d90cc628c087b6506b3dfe1263387f3cbbf8d272af4fef957/onnxruntime-1.24.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34488aa760fb5c2e6d06a7ca9241124eb914a6a06f70936a14c669d1b3df9598", size = 17099815, upload-time = "2026-02-05T17:31:43.092Z" }, ] [[package]] name = "onnxruntime-gpu" -version = "1.23.2" +version = "1.24.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "coloredlogs" }, - { name = "flatbuffers" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "packaging" }, - { name = "protobuf" }, - { name = "sympy" }, + { name = "flatbuffers", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64') or (python_full_version < '3.11' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64') or (python_full_version >= '3.11' and sys_platform != 'linux')" }, + { name = "packaging", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "protobuf", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "sympy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/ae/39283748c68a96be4f5f8a9561e0e3ca92af1eae6c2b1c07fb1da5f65cd1/onnxruntime_gpu-1.23.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18de50c6c8eea50acc405ea13d299aec593e46478d7a22cd32cdbbdf7c42899d", size = 300525411, upload-time = "2025-10-22T16:56:08.415Z" }, - { url = "https://files.pythonhosted.org/packages/21/c9/47abd3ec1f34498224d2a8f5cc4d1445eb5cc7dee8e3644b1a972619c0d2/onnxruntime_gpu-1.23.2-cp310-cp310-win_amd64.whl", hash = "sha256:deba091e15357355aa836fd64c6c4ac97dd0c4609c38b08a69675073ea46b321", size = 244505340, upload-time = "2025-10-27T22:47:43.215Z" }, - { url = "https://files.pythonhosted.org/packages/43/a4/e3d7fbe32b44e814ae24ed642f05fac5d96d120efd82db7a7cac936e85a9/onnxruntime_gpu-1.23.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d76d1ac7a479ecc3ac54482eea4ba3b10d68e888a0f8b5f420f0bdf82c5eec59", size = 300525715, upload-time = "2025-10-22T16:56:19.928Z" }, - { url = "https://files.pythonhosted.org/packages/a9/5c/dba7c009e73dcce02e7f714574345b5e607c5c75510eb8d7bef682b45e5d/onnxruntime_gpu-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:054282614c2fc9a4a27d74242afbae706a410f1f63cc35bc72f99709029a5ba4", size = 244506823, upload-time = "2025-10-22T16:55:09.526Z" }, - { url = "https://files.pythonhosted.org/packages/6c/d9/b7140a4f1615195938c7e358c0804bb84271f0d6886b5cbf105c6cb58aae/onnxruntime_gpu-1.23.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f2d1f720685d729b5258ec1b36dee1de381b8898189908c98cbeecdb2f2b5c2", size = 300509596, upload-time = "2025-10-22T16:56:31.728Z" }, - { url = "https://files.pythonhosted.org/packages/87/da/2685c79e5ea587beddebe083601fead0bdf3620bc2f92d18756e7de8a636/onnxruntime_gpu-1.23.2-cp312-cp312-win_amd64.whl", hash = "sha256:fe925a84b00e291e0ad3fac29bfd8f8e06112abc760cdc82cb711b4f3935bd95", size = 244508327, upload-time = "2025-10-22T16:55:19.397Z" }, - { url = "https://files.pythonhosted.org/packages/03/05/40d561636e4114b54aa06d2371bfbca2d03e12cfdf5d4b85814802f18a75/onnxruntime_gpu-1.23.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e8f75af5da07329d0c3a5006087f4051d8abd133b4be7c9bae8cdab7bea4c26", size = 300515567, upload-time = "2025-10-22T16:56:43.794Z" }, - { url = "https://files.pythonhosted.org/packages/b6/3b/418300438063d403384c79eaef1cb13c97627042f2247b35a887276a355a/onnxruntime_gpu-1.23.2-cp313-cp313-win_amd64.whl", hash = "sha256:7f1b3f49e5e126b99e23ec86b4203db41c2a911f6165f7624f2bc8267aaca767", size = 244507535, upload-time = "2025-10-22T16:55:28.532Z" }, - { url = "https://files.pythonhosted.org/packages/b8/dc/80b145e3134d7eba31309b3299a2836e37c76e4c419a261ad9796f8f8d65/onnxruntime_gpu-1.23.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20959cd4ae358aab6579ab9123284a7b1498f7d51ec291d429a5edc26511306f", size = 300525759, upload-time = "2025-10-22T16:56:56.925Z" }, + { url = "https://files.pythonhosted.org/packages/ca/c7/07d06175f1124fc89e8b7da30d70eb8e0e1400d90961ae1cbea9da69e69b/onnxruntime_gpu-1.24.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac4bfc90c376516b13d709764ab257e4e3d78639bf6a2ccfc826e9db4a5c7ddf", size = 252616647, upload-time = "2026-02-05T17:24:02.993Z" }, + { url = "https://files.pythonhosted.org/packages/8c/9a/47c2a873bf5fc307cda696e8a8cb54b7c709f5a4b3f9e2b4a636066a63c2/onnxruntime_gpu-1.24.1-cp311-cp311-win_amd64.whl", hash = "sha256:ccd800875cb6c04ce623154c7fa312da21631ef89a9543c9a21593817cfa3473", size = 207089749, upload-time = "2026-02-05T17:23:59.5Z" }, + { url = "https://files.pythonhosted.org/packages/db/a8/fb1a36a052321a839cc9973f6cfd630709412a24afff2d7315feb3efc4b8/onnxruntime_gpu-1.24.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:710bf83751e6761584ad071102af3cbffd4b42bb77b2e3caacfb54ffbaa0666b", size = 252628733, upload-time = "2026-02-05T17:24:12.926Z" }, + { url = "https://files.pythonhosted.org/packages/52/65/48f694b81a963f3ee575041d5f2879b15268f5e7e14d90c3e671836c9646/onnxruntime_gpu-1.24.1-cp312-cp312-win_amd64.whl", hash = "sha256:b128a42b3fa098647765ba60c2af9d4bf839181307cfac27da649364feb37f7b", size = 207089008, upload-time = "2026-02-05T17:24:07.126Z" }, + { url = "https://files.pythonhosted.org/packages/7a/e7/4e19062e95d3701c0d32c228aa848ba4a1cc97651e53628d978dba8e1267/onnxruntime_gpu-1.24.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:db9acb0d0e59d93b4fa6b7fd44284ece4408d0acee73235d43ed343f8cee7ee5", size = 252629216, upload-time = "2026-02-05T17:24:24.604Z" }, + { url = "https://files.pythonhosted.org/packages/c4/82/223d7120d8a98b07c104ddecfb0cc2536188e566a4e9c2dee7572453f89c/onnxruntime_gpu-1.24.1-cp313-cp313-win_amd64.whl", hash = "sha256:59fdb40743f0722f3b859209f649ea160ca6bb42799e43f49b70a3ec5fc8c4ad", size = 207089285, upload-time = "2026-02-05T17:24:18.497Z" }, + { url = "https://files.pythonhosted.org/packages/ac/82/3159e57f09d7e6c8ad47d8ba8d5bd7494f383bc1071481cf38c9c8142bf9/onnxruntime_gpu-1.24.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:88ca04e1dffea2d4c3c79cf4de7f429e99059d085f21b3e775a8d36380cd5186", size = 252633977, upload-time = "2026-02-05T17:24:33.568Z" }, + { url = "https://files.pythonhosted.org/packages/c1/b4/51ad0ab878ff1456a831a0566b4db982a904e22f138e4b2c5f021bac517f/onnxruntime_gpu-1.24.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ced66900b1f48bddb62b5233925c3b56f8e008e2c34ebf8c060b20cae5842bcf", size = 252629039, upload-time = "2026-02-05T17:24:43.551Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/336d4e09a6af66532eedde5c8f03a73eaa91a046b408522259ab6a604363/onnxruntime_gpu-1.24.1-cp314-cp314-win_amd64.whl", hash = "sha256:129f6ae8b331a6507759597cd317b23e94aed6ead1da951f803c3328f2990b0c", size = 209487551, upload-time = "2026-02-05T17:24:26.373Z" }, + { url = "https://files.pythonhosted.org/packages/6a/94/a3b20276261f5e64dbd72bda656af988282cff01f18c2685953600e2f810/onnxruntime_gpu-1.24.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2cee7e12b0f4813c62f9a48df83fd01d066cc970400c832252cf3c155a6957", size = 252633096, upload-time = "2026-02-05T17:24:53.248Z" }, ] [[package]] @@ -5415,22 +6073,23 @@ name = "open3d" version = "0.19.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "addict" }, - { name = "configargparse" }, - { name = "dash" }, - { name = "flask" }, - { name = "matplotlib" }, - { name = "nbformat" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "pandas" }, - { name = "pillow" }, - { name = "pyquaternion" }, - { name = "pyyaml" }, - { name = "scikit-learn", version = "1.7.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scikit-learn", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "tqdm" }, - { name = "werkzeug" }, + { name = "addict", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "configargparse", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "dash", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "flask", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "matplotlib", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "nbformat", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64') or (python_full_version < '3.11' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64') or (python_full_version >= '3.11' and sys_platform != 'linux')" }, + { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64') or (python_full_version < '3.11' and sys_platform != 'linux')" }, + { name = "pandas", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64') or (python_full_version >= '3.11' and sys_platform != 'linux')" }, + { name = "pillow", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "pyquaternion", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "pyyaml", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "scikit-learn", version = "1.7.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64') or (python_full_version < '3.11' and sys_platform != 'linux')" }, + { name = "scikit-learn", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64') or (python_full_version >= '3.11' and sys_platform != 'linux')" }, + { name = "tqdm", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, + { name = "werkzeug", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/5c/4b/91e8a4100adf0ccd2f7ad21dd24c2e3d8f12925396528d0462cfb1735e5a/open3d-0.19.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f7128ded206e07987cc29d0917195fb64033dea31e0d60dead3629b33d3c175f", size = 103086005, upload-time = "2025-01-08T07:25:56.755Z" }, @@ -5444,9 +6103,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9f/c4/35a6e0a35aa72420e75dc28d54b24beaff79bcad150423e47c67d2ad8773/open3d-0.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:665839837e1d3a62524804c31031462c3b548a2b6ed55214e6deb91522844f97", size = 69169961, upload-time = "2025-01-08T07:27:35.392Z" }, ] +[[package]] +name = "open3d-unofficial-arm" +version = "0.19.0.post5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "configargparse", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" }, + { name = "dash", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" }, + { name = "flask", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" }, + { name = "nbformat", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'" }, + { name = "werkzeug", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/87/95d3cf9017a0e89a708e611d003abeb66c88d7947fa7238962971cc8b0cb/open3d_unofficial_arm-0.19.0.post5-cp310-cp310-manylinux_2_35_aarch64.whl", hash = "sha256:26bc160f3326a74b232f026d741a576bf0d1fa7b1d5128c5e979d7b4d2d1b983", size = 48230542, upload-time = "2026-02-10T08:37:33.928Z" }, + { url = "https://files.pythonhosted.org/packages/b7/98/e5f803c0ccc23ff68eee12d4b43aa48514dca604e3805f243f399050bd64/open3d_unofficial_arm-0.19.0.post5-cp311-cp311-manylinux_2_35_aarch64.whl", hash = "sha256:003db3e400cd8053e9428c6082af72e73082a28b3e69e9c49f69f83cf5205bb4", size = 48233477, upload-time = "2026-02-10T08:37:47.281Z" }, + { url = "https://files.pythonhosted.org/packages/36/36/df78b304227d7249f3cdeaf2444da17d5826a2c7a679e71084b3aa0d1b9a/open3d_unofficial_arm-0.19.0.post5-cp312-cp312-manylinux_2_35_aarch64.whl", hash = "sha256:984d7f5757e9cb2f849ce43f43046a30a82c221be0778149642cdfe450bd3664", size = 48221813, upload-time = "2026-02-10T08:37:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/fa/93/25b667f4dea742d870cce76b404aab46ebd47bd66a3efc162bc86e4c81fc/open3d_unofficial_arm-0.19.0.post5-cp313-cp313-manylinux_2_35_aarch64.whl", hash = "sha256:ced1653305fa052015fea3c9d1d7672ce2ebb8f2251dfe0258ee7073e5932da7", size = 48223510, upload-time = "2026-02-10T08:38:00.654Z" }, +] + [[package]] name = "openai" -version = "2.14.0" +version = "2.21.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -5458,9 +6137,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/b1/12fe1c196bea326261718eb037307c1c1fe1dedc2d2d4de777df822e6238/openai-2.14.0.tar.gz", hash = "sha256:419357bedde9402d23bf8f2ee372fca1985a73348debba94bddff06f19459952", size = 626938, upload-time = "2025-12-19T03:28:45.742Z" } +sdist = { url = "https://files.pythonhosted.org/packages/92/e5/3d197a0947a166649f566706d7a4c8f7fe38f1fa7b24c9bcffe4c7591d44/openai-2.21.0.tar.gz", hash = "sha256:81b48ce4b8bbb2cc3af02047ceb19561f7b1dc0d4e52d1de7f02abfd15aa59b7", size = 644374, upload-time = "2026-02-14T00:12:01.577Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" }, + { url = "https://files.pythonhosted.org/packages/cc/56/0a89092a453bb2c676d66abee44f863e742b2110d4dbb1dbcca3f7e5fc33/openai-2.21.0-py3-none-any.whl", hash = "sha256:0bc1c775e5b1536c294eded39ee08f8407656537ccc71b1004104fe1602e267c", size = 1103065, upload-time = "2026-02-14T00:11:59.603Z" }, ] [[package]] @@ -5499,20 +6178,40 @@ wheels = [ [[package]] name = "opencv-python" -version = "4.11.0.86" +version = "4.13.0.92" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/06/68c27a523103dad5837dc5b87e71285280c4f098c60e4fe8a8db6486ab09/opencv-python-4.11.0.86.tar.gz", hash = "sha256:03d60ccae62304860d232272e4a4fda93c39d595780cb40b161b310244b736a4", size = 95171956, upload-time = "2025-01-16T13:52:24.737Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/4d/53b30a2a3ac1f75f65a59eb29cf2ee7207ce64867db47036ad61743d5a23/opencv_python-4.11.0.86-cp37-abi3-macosx_13_0_arm64.whl", hash = "sha256:432f67c223f1dc2824f5e73cdfcd9db0efc8710647d4e813012195dc9122a52a", size = 37326322, upload-time = "2025-01-16T13:52:25.887Z" }, - { url = "https://files.pythonhosted.org/packages/3b/84/0a67490741867eacdfa37bc18df96e08a9d579583b419010d7f3da8ff503/opencv_python-4.11.0.86-cp37-abi3-macosx_13_0_x86_64.whl", hash = "sha256:9d05ef13d23fe97f575153558653e2d6e87103995d54e6a35db3f282fe1f9c66", size = 56723197, upload-time = "2025-01-16T13:55:21.222Z" }, - { url = "https://files.pythonhosted.org/packages/f3/bd/29c126788da65c1fb2b5fb621b7fed0ed5f9122aa22a0868c5e2c15c6d23/opencv_python-4.11.0.86-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b92ae2c8852208817e6776ba1ea0d6b1e0a1b5431e971a2a0ddd2a8cc398202", size = 42230439, upload-time = "2025-01-16T13:51:35.822Z" }, - { url = "https://files.pythonhosted.org/packages/2c/8b/90eb44a40476fa0e71e05a0283947cfd74a5d36121a11d926ad6f3193cc4/opencv_python-4.11.0.86-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b02611523803495003bd87362db3e1d2a0454a6a63025dc6658a9830570aa0d", size = 62986597, upload-time = "2025-01-16T13:52:08.836Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d7/1d5941a9dde095468b288d989ff6539dd69cd429dbf1b9e839013d21b6f0/opencv_python-4.11.0.86-cp37-abi3-win32.whl", hash = "sha256:810549cb2a4aedaa84ad9a1c92fbfdfc14090e2749cedf2c1589ad8359aa169b", size = 29384337, upload-time = "2025-01-16T13:52:13.549Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/f1c30a92854540bf789e9cd5dde7ef49bbe63f855b85a2e6b3db8135c591/opencv_python-4.11.0.86-cp37-abi3-win_amd64.whl", hash = "sha256:085ad9b77c18853ea66283e98affefe2de8cc4c1f43eda4c100cf9b2721142ec", size = 39488044, upload-time = "2025-01-16T13:52:21.928Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6f/5a28fef4c4a382be06afe3938c64cc168223016fa520c5abaf37e8862aa5/opencv_python-4.13.0.92-cp37-abi3-macosx_13_0_arm64.whl", hash = "sha256:caf60c071ec391ba51ed00a4a920f996d0b64e3e46068aac1f646b5de0326a19", size = 46247052, upload-time = "2026-02-05T07:01:25.046Z" }, + { url = "https://files.pythonhosted.org/packages/08/ac/6c98c44c650b8114a0fb901691351cfb3956d502e8e9b5cd27f4ee7fbf2f/opencv_python-4.13.0.92-cp37-abi3-macosx_14_0_x86_64.whl", hash = "sha256:5868a8c028a0b37561579bfb8ac1875babdc69546d236249fff296a8c010ccf9", size = 32568781, upload-time = "2026-02-05T07:01:41.379Z" }, + { url = "https://files.pythonhosted.org/packages/3e/51/82fed528b45173bf629fa44effb76dff8bc9f4eeaee759038362dfa60237/opencv_python-4.13.0.92-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bc2596e68f972ca452d80f444bc404e08807d021fbba40df26b61b18e01838a", size = 47685527, upload-time = "2026-02-05T06:59:11.24Z" }, + { url = "https://files.pythonhosted.org/packages/db/07/90b34a8e2cf9c50fe8ed25cac9011cde0676b4d9d9c973751ac7616223a2/opencv_python-4.13.0.92-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:402033cddf9d294693094de5ef532339f14ce821da3ad7df7c9f6e8316da32cf", size = 70460872, upload-time = "2026-02-05T06:59:19.162Z" }, + { url = "https://files.pythonhosted.org/packages/02/6d/7a9cc719b3eaf4377b9c2e3edeb7ed3a81de41f96421510c0a169ca3cfd4/opencv_python-4.13.0.92-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bccaabf9eb7f897ca61880ce2869dcd9b25b72129c28478e7f2a5e8dee945616", size = 46708208, upload-time = "2026-02-05T06:59:15.419Z" }, + { url = "https://files.pythonhosted.org/packages/fd/55/b3b49a1b97aabcfbbd6c7326df9cb0b6fa0c0aefa8e89d500939e04aa229/opencv_python-4.13.0.92-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:620d602b8f7d8b8dab5f4b99c6eb353e78d3fb8b0f53db1bd258bb1aa001c1d5", size = 72927042, upload-time = "2026-02-05T06:59:23.389Z" }, + { url = "https://files.pythonhosted.org/packages/fb/17/de5458312bcb07ddf434d7bfcb24bb52c59635ad58c6e7c751b48949b009/opencv_python-4.13.0.92-cp37-abi3-win32.whl", hash = "sha256:372fe164a3148ac1ca51e5f3ad0541a4a276452273f503441d718fab9c5e5f59", size = 30932638, upload-time = "2026-02-05T07:02:14.98Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a5/1be1516390333ff9be3a9cb648c9f33df79d5096e5884b5df71a588af463/opencv_python-4.13.0.92-cp37-abi3-win_amd64.whl", hash = "sha256:423d934c9fafb91aad38edf26efb46da91ffbc05f3f59c4b0c72e699720706f5", size = 40212062, upload-time = "2026-02-05T07:02:12.724Z" }, +] + +[[package]] +name = "opencv-python-headless" +version = "4.13.0.92" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/42/2310883be3b8826ac58c3f2787b9358a2d46923d61f88fedf930bc59c60c/opencv_python_headless-4.13.0.92-cp37-abi3-macosx_13_0_arm64.whl", hash = "sha256:1a7d040ac656c11b8c38677cc8cccdc149f98535089dbe5b081e80a4e5903209", size = 46247192, upload-time = "2026-02-05T07:01:35.187Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1e/6f9e38005a6f7f22af785df42a43139d0e20f169eb5787ce8be37ee7fcc9/opencv_python_headless-4.13.0.92-cp37-abi3-macosx_14_0_x86_64.whl", hash = "sha256:3e0a6f0a37994ec6ce5f59e936be21d5d6384a4556f2d2da9c2f9c5dc948394c", size = 32568914, upload-time = "2026-02-05T07:01:51.989Z" }, + { url = "https://files.pythonhosted.org/packages/21/76/9417a6aef9def70e467a5bf560579f816148a4c658b7d525581b356eda9e/opencv_python_headless-4.13.0.92-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c8cfc8e87ed452b5cecb9419473ee5560a989859fe1d10d1ce11ae87b09a2cb", size = 33703709, upload-time = "2026-02-05T10:24:46.469Z" }, + { url = "https://files.pythonhosted.org/packages/92/ce/bd17ff5772938267fd49716e94ca24f616ff4cb1ff4c6be13085108037be/opencv_python_headless-4.13.0.92-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0525a3d2c0b46c611e2130b5fdebc94cf404845d8fa64d2f3a3b679572a5bd22", size = 56016764, upload-time = "2026-02-05T10:26:48.904Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b4/b7bcbf7c874665825a8c8e1097e93ea25d1f1d210a3e20d4451d01da30aa/opencv_python_headless-4.13.0.92-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb60e36b237b1ebd40a912da5384b348df8ed534f6f644d8e0b4f103e272ba7d", size = 35010236, upload-time = "2026-02-05T10:28:11.031Z" }, + { url = "https://files.pythonhosted.org/packages/4b/33/b5db29a6c00eb8f50708110d8d453747ca125c8b805bc437b289dbdcc057/opencv_python_headless-4.13.0.92-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0bd48544f77c68b2941392fcdf9bcd2b9cdf00e98cb8c29b2455d194763cf99e", size = 60391106, upload-time = "2026-02-05T10:30:14.236Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c3/52cfea47cd33e53e8c0fbd6e7c800b457245c1fda7d61660b4ffe9596a7f/opencv_python_headless-4.13.0.92-cp37-abi3-win32.whl", hash = "sha256:a7cf08e5b191f4ebb530791acc0825a7986e0d0dee2a3c491184bd8599848a4b", size = 30812232, upload-time = "2026-02-05T07:02:29.594Z" }, + { url = "https://files.pythonhosted.org/packages/4a/90/b338326131ccb2aaa3c2c85d00f41822c0050139a4bfe723cfd95455bd2d/opencv_python_headless-4.13.0.92-cp37-abi3-win_amd64.whl", hash = "sha256:77a82fe35ddcec0f62c15f2ba8a12ecc2ed4207c17b0902c7a3151ae29f37fb6", size = 40070414, upload-time = "2026-02-05T07:02:26.448Z" }, ] [[package]] @@ -5615,9 +6314,9 @@ dependencies = [ { name = "chex", version = "0.1.90", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "chex", version = "0.1.91", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jax", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jax", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jaxlib", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jaxlib", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxlib", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] @@ -5628,7 +6327,7 @@ wheels = [ [[package]] name = "orbax-checkpoint" -version = "0.11.31" +version = "0.11.32" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "absl-py" }, @@ -5636,7 +6335,7 @@ dependencies = [ { name = "etils", extra = ["epath", "epy"] }, { name = "humanize" }, { name = "jax", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jax", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "msgpack" }, { name = "nest-asyncio" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, @@ -5646,148 +6345,169 @@ dependencies = [ { name = "pyyaml" }, { name = "simplejson" }, { name = "tensorstore", version = "0.1.78", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "tensorstore", version = "0.1.80", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "tensorstore", version = "0.1.81", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/1e/c65e35ab5ef9d380f4a4ce7f983a0dd360d229eec22204aacb80d2b91aca/orbax_checkpoint-0.11.31.tar.gz", hash = "sha256:f021193a619782655798bc4a285f40612f6fe647ddeb303d1f49cdbc5645e319", size = 406137, upload-time = "2025-12-11T18:09:17.181Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/5f/1733e1143696319f311bc4de48da2e306a1f62f0925f9fe9d797b8ba8abe/orbax_checkpoint-0.11.32.tar.gz", hash = "sha256:523dcf61e93c7187c6b80fd50f3177114c0b957ea62cbb5c869c0b3e3d1a7dfc", size = 431601, upload-time = "2026-01-20T16:46:06.307Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/17/aae3144258f30920741ec91dbff0ff54665e572da50e6445ef437e08ec32/orbax_checkpoint-0.11.32-py3-none-any.whl", hash = "sha256:f0bfe9f9b1ce2c32c8f5dfab63393e51de525d41352abc17c7e21f9cc731d7a9", size = 634424, upload-time = "2026-01-20T16:46:04.382Z" }, +] + +[[package]] +name = "orbax-export" +version = "0.0.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "absl-py", marker = "python_full_version >= '3.11'" }, + { name = "dataclasses-json", marker = "python_full_version >= '3.11'" }, + { name = "etils", marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxlib", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxtyping", marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "orbax-checkpoint", marker = "python_full_version >= '3.11'" }, + { name = "protobuf", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/c8/ed7ac3c3c687bf129d7469b016c2b3d8777379f4ea453474e50ee41ce5cb/orbax_export-0.0.8.tar.gz", hash = "sha256:544eef564e2a6f17cd11b1167febe348b7b7cf56d9575de994a33d5613dd568a", size = 124980, upload-time = "2025-09-17T15:41:14.264Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/3a/abbc3c5cac2e082e88cfec2161bf837f18fef786caaa3f007594c839fc8c/orbax_checkpoint-0.11.31-py3-none-any.whl", hash = "sha256:b00e39cd61cbd6c7c78b091ccac0ed1bbf3cf7788e761618e7070761195bfcc0", size = 602358, upload-time = "2025-12-11T18:09:15.667Z" }, + { url = "https://files.pythonhosted.org/packages/7f/a9/3a755a58c8b6a36fe7e9e66bb6b93967ff49cdbc77cca8eacb2cf66435e9/orbax_export-0.0.8-py3-none-any.whl", hash = "sha256:f8037e1666ad28411cdb08d0668a2737b1281a32902c623ceda12109a089bc36", size = 180487, upload-time = "2025-09-17T15:41:12.928Z" }, ] [[package]] name = "orjson" -version = "3.11.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/b8/333fdb27840f3bf04022d21b654a35f58e15407183aeb16f3b41aa053446/orjson-3.11.5.tar.gz", hash = "sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5", size = 5972347, upload-time = "2025-12-06T15:55:39.458Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/79/19/b22cf9dad4db20c8737041046054cbd4f38bb5a2d0e4bb60487832ce3d76/orjson-3.11.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:df9eadb2a6386d5ea2bfd81309c505e125cfc9ba2b1b99a97e60985b0b3665d1", size = 245719, upload-time = "2025-12-06T15:53:43.877Z" }, - { url = "https://files.pythonhosted.org/packages/03/2e/b136dd6bf30ef5143fbe76a4c142828b55ccc618be490201e9073ad954a1/orjson-3.11.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc70da619744467d8f1f49a8cadae5ec7bbe054e5232d95f92ed8737f8c5870", size = 132467, upload-time = "2025-12-06T15:53:45.379Z" }, - { url = "https://files.pythonhosted.org/packages/ae/fc/ae99bfc1e1887d20a0268f0e2686eb5b13d0ea7bbe01de2b566febcd2130/orjson-3.11.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:073aab025294c2f6fc0807201c76fdaed86f8fc4be52c440fb78fbb759a1ac09", size = 130702, upload-time = "2025-12-06T15:53:46.659Z" }, - { url = "https://files.pythonhosted.org/packages/6e/43/ef7912144097765997170aca59249725c3ab8ef6079f93f9d708dd058df5/orjson-3.11.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:835f26fa24ba0bb8c53ae2a9328d1706135b74ec653ed933869b74b6909e63fd", size = 135907, upload-time = "2025-12-06T15:53:48.487Z" }, - { url = "https://files.pythonhosted.org/packages/3f/da/24d50e2d7f4092ddd4d784e37a3fa41f22ce8ed97abc9edd222901a96e74/orjson-3.11.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667c132f1f3651c14522a119e4dd631fad98761fa960c55e8e7430bb2a1ba4ac", size = 139935, upload-time = "2025-12-06T15:53:49.88Z" }, - { url = "https://files.pythonhosted.org/packages/02/4a/b4cb6fcbfff5b95a3a019a8648255a0fac9b221fbf6b6e72be8df2361feb/orjson-3.11.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42e8961196af655bb5e63ce6c60d25e8798cd4dfbc04f4203457fa3869322c2e", size = 137541, upload-time = "2025-12-06T15:53:51.226Z" }, - { url = "https://files.pythonhosted.org/packages/a5/99/a11bd129f18c2377c27b2846a9d9be04acec981f770d711ba0aaea563984/orjson-3.11.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75412ca06e20904c19170f8a24486c4e6c7887dea591ba18a1ab572f1300ee9f", size = 139031, upload-time = "2025-12-06T15:53:52.309Z" }, - { url = "https://files.pythonhosted.org/packages/64/29/d7b77d7911574733a036bb3e8ad7053ceb2b7d6ea42208b9dbc55b23b9ed/orjson-3.11.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6af8680328c69e15324b5af3ae38abbfcf9cbec37b5346ebfd52339c3d7e8a18", size = 141622, upload-time = "2025-12-06T15:53:53.606Z" }, - { url = "https://files.pythonhosted.org/packages/93/41/332db96c1de76b2feda4f453e91c27202cd092835936ce2b70828212f726/orjson-3.11.5-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a86fe4ff4ea523eac8f4b57fdac319faf037d3c1be12405e6a7e86b3fbc4756a", size = 413800, upload-time = "2025-12-06T15:53:54.866Z" }, - { url = "https://files.pythonhosted.org/packages/76/e1/5a0d148dd1f89ad2f9651df67835b209ab7fcb1118658cf353425d7563e9/orjson-3.11.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e607b49b1a106ee2086633167033afbd63f76f2999e9236f638b06b112b24ea7", size = 151198, upload-time = "2025-12-06T15:53:56.383Z" }, - { url = "https://files.pythonhosted.org/packages/0d/96/8db67430d317a01ae5cf7971914f6775affdcfe99f5bff9ef3da32492ecc/orjson-3.11.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7339f41c244d0eea251637727f016b3d20050636695bc78345cce9029b189401", size = 141984, upload-time = "2025-12-06T15:53:57.746Z" }, - { url = "https://files.pythonhosted.org/packages/71/49/40d21e1aa1ac569e521069228bb29c9b5a350344ccf922a0227d93c2ed44/orjson-3.11.5-cp310-cp310-win32.whl", hash = "sha256:8be318da8413cdbbce77b8c5fac8d13f6eb0f0db41b30bb598631412619572e8", size = 135272, upload-time = "2025-12-06T15:53:59.769Z" }, - { url = "https://files.pythonhosted.org/packages/c4/7e/d0e31e78be0c100e08be64f48d2850b23bcb4d4c70d114f4e43b39f6895a/orjson-3.11.5-cp310-cp310-win_amd64.whl", hash = "sha256:b9f86d69ae822cabc2a0f6c099b43e8733dda788405cba2665595b7e8dd8d167", size = 133360, upload-time = "2025-12-06T15:54:01.25Z" }, - { url = "https://files.pythonhosted.org/packages/fd/68/6b3659daec3a81aed5ab47700adb1a577c76a5452d35b91c88efee89987f/orjson-3.11.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9c8494625ad60a923af6b2b0bd74107146efe9b55099e20d7740d995f338fcd8", size = 245318, upload-time = "2025-12-06T15:54:02.355Z" }, - { url = "https://files.pythonhosted.org/packages/e9/00/92db122261425f61803ccf0830699ea5567439d966cbc35856fe711bfe6b/orjson-3.11.5-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:7bb2ce0b82bc9fd1168a513ddae7a857994b780b2945a8c51db4ab1c4b751ebc", size = 129491, upload-time = "2025-12-06T15:54:03.877Z" }, - { url = "https://files.pythonhosted.org/packages/94/4f/ffdcb18356518809d944e1e1f77589845c278a1ebbb5a8297dfefcc4b4cb/orjson-3.11.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67394d3becd50b954c4ecd24ac90b5051ee7c903d167459f93e77fc6f5b4c968", size = 132167, upload-time = "2025-12-06T15:54:04.944Z" }, - { url = "https://files.pythonhosted.org/packages/97/c6/0a8caff96f4503f4f7dd44e40e90f4d14acf80d3b7a97cb88747bb712d3e/orjson-3.11.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:298d2451f375e5f17b897794bcc3e7b821c0f32b4788b9bcae47ada24d7f3cf7", size = 130516, upload-time = "2025-12-06T15:54:06.274Z" }, - { url = "https://files.pythonhosted.org/packages/4d/63/43d4dc9bd9954bff7052f700fdb501067f6fb134a003ddcea2a0bb3854ed/orjson-3.11.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa5e4244063db8e1d87e0f54c3f7522f14b2dc937e65d5241ef0076a096409fd", size = 135695, upload-time = "2025-12-06T15:54:07.702Z" }, - { url = "https://files.pythonhosted.org/packages/87/6f/27e2e76d110919cb7fcb72b26166ee676480a701bcf8fc53ac5d0edce32f/orjson-3.11.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1db2088b490761976c1b2e956d5d4e6409f3732e9d79cfa69f876c5248d1baf9", size = 139664, upload-time = "2025-12-06T15:54:08.828Z" }, - { url = "https://files.pythonhosted.org/packages/d4/f8/5966153a5f1be49b5fbb8ca619a529fde7bc71aa0a376f2bb83fed248bcd/orjson-3.11.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2ed66358f32c24e10ceea518e16eb3549e34f33a9d51f99ce23b0251776a1ef", size = 137289, upload-time = "2025-12-06T15:54:09.898Z" }, - { url = "https://files.pythonhosted.org/packages/a7/34/8acb12ff0299385c8bbcbb19fbe40030f23f15a6de57a9c587ebf71483fb/orjson-3.11.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2021afda46c1ed64d74b555065dbd4c2558d510d8cec5ea6a53001b3e5e82a9", size = 138784, upload-time = "2025-12-06T15:54:11.022Z" }, - { url = "https://files.pythonhosted.org/packages/ee/27/910421ea6e34a527f73d8f4ee7bdffa48357ff79c7b8d6eb6f7b82dd1176/orjson-3.11.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b42ffbed9128e547a1647a3e50bc88ab28ae9daa61713962e0d3dd35e820c125", size = 141322, upload-time = "2025-12-06T15:54:12.427Z" }, - { url = "https://files.pythonhosted.org/packages/87/a3/4b703edd1a05555d4bb1753d6ce44e1a05b7a6d7c164d5b332c795c63d70/orjson-3.11.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8d5f16195bb671a5dd3d1dbea758918bada8f6cc27de72bd64adfbd748770814", size = 413612, upload-time = "2025-12-06T15:54:13.858Z" }, - { url = "https://files.pythonhosted.org/packages/1b/36/034177f11d7eeea16d3d2c42a1883b0373978e08bc9dad387f5074c786d8/orjson-3.11.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c0e5d9f7a0227df2927d343a6e3859bebf9208b427c79bd31949abcc2fa32fa5", size = 150993, upload-time = "2025-12-06T15:54:15.189Z" }, - { url = "https://files.pythonhosted.org/packages/44/2f/ea8b24ee046a50a7d141c0227c4496b1180b215e728e3b640684f0ea448d/orjson-3.11.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23d04c4543e78f724c4dfe656b3791b5f98e4c9253e13b2636f1af5d90e4a880", size = 141774, upload-time = "2025-12-06T15:54:16.451Z" }, - { url = "https://files.pythonhosted.org/packages/8a/12/cc440554bf8200eb23348a5744a575a342497b65261cd65ef3b28332510a/orjson-3.11.5-cp311-cp311-win32.whl", hash = "sha256:c404603df4865f8e0afe981aa3c4b62b406e6d06049564d58934860b62b7f91d", size = 135109, upload-time = "2025-12-06T15:54:17.73Z" }, - { url = "https://files.pythonhosted.org/packages/a3/83/e0c5aa06ba73a6760134b169f11fb970caa1525fa4461f94d76e692299d9/orjson-3.11.5-cp311-cp311-win_amd64.whl", hash = "sha256:9645ef655735a74da4990c24ffbd6894828fbfa117bc97c1edd98c282ecb52e1", size = 133193, upload-time = "2025-12-06T15:54:19.426Z" }, - { url = "https://files.pythonhosted.org/packages/cb/35/5b77eaebc60d735e832c5b1a20b155667645d123f09d471db0a78280fb49/orjson-3.11.5-cp311-cp311-win_arm64.whl", hash = "sha256:1cbf2735722623fcdee8e712cbaaab9e372bbcb0c7924ad711b261c2eccf4a5c", size = 126830, upload-time = "2025-12-06T15:54:20.836Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a4/8052a029029b096a78955eadd68ab594ce2197e24ec50e6b6d2ab3f4e33b/orjson-3.11.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:334e5b4bff9ad101237c2d799d9fd45737752929753bf4faf4b207335a416b7d", size = 245347, upload-time = "2025-12-06T15:54:22.061Z" }, - { url = "https://files.pythonhosted.org/packages/64/67/574a7732bd9d9d79ac620c8790b4cfe0717a3d5a6eb2b539e6e8995e24a0/orjson-3.11.5-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:ff770589960a86eae279f5d8aa536196ebda8273a2a07db2a54e82b93bc86626", size = 129435, upload-time = "2025-12-06T15:54:23.615Z" }, - { url = "https://files.pythonhosted.org/packages/52/8d/544e77d7a29d90cf4d9eecd0ae801c688e7f3d1adfa2ebae5e1e94d38ab9/orjson-3.11.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed24250e55efbcb0b35bed7caaec8cedf858ab2f9f2201f17b8938c618c8ca6f", size = 132074, upload-time = "2025-12-06T15:54:24.694Z" }, - { url = "https://files.pythonhosted.org/packages/6e/57/b9f5b5b6fbff9c26f77e785baf56ae8460ef74acdb3eae4931c25b8f5ba9/orjson-3.11.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a66d7769e98a08a12a139049aac2f0ca3adae989817f8c43337455fbc7669b85", size = 130520, upload-time = "2025-12-06T15:54:26.185Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6d/d34970bf9eb33f9ec7c979a262cad86076814859e54eb9a059a52f6dc13d/orjson-3.11.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86cfc555bfd5794d24c6a1903e558b50644e5e68e6471d66502ce5cb5fdef3f9", size = 136209, upload-time = "2025-12-06T15:54:27.264Z" }, - { url = "https://files.pythonhosted.org/packages/e7/39/bc373b63cc0e117a105ea12e57280f83ae52fdee426890d57412432d63b3/orjson-3.11.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a230065027bc2a025e944f9d4714976a81e7ecfa940923283bca7bbc1f10f626", size = 139837, upload-time = "2025-12-06T15:54:28.75Z" }, - { url = "https://files.pythonhosted.org/packages/cb/aa/7c4818c8d7d324da220f4f1af55c343956003aa4d1ce1857bdc1d396ba69/orjson-3.11.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b29d36b60e606df01959c4b982729c8845c69d1963f88686608be9ced96dbfaa", size = 137307, upload-time = "2025-12-06T15:54:29.856Z" }, - { url = "https://files.pythonhosted.org/packages/46/bf/0993b5a056759ba65145effe3a79dd5a939d4a070eaa5da2ee3180fbb13f/orjson-3.11.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74099c6b230d4261fdc3169d50efc09abf38ace1a42ea2f9994b1d79153d477", size = 139020, upload-time = "2025-12-06T15:54:31.024Z" }, - { url = "https://files.pythonhosted.org/packages/65/e8/83a6c95db3039e504eda60fc388f9faedbb4f6472f5aba7084e06552d9aa/orjson-3.11.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e697d06ad57dd0c7a737771d470eedc18e68dfdefcdd3b7de7f33dfda5b6212e", size = 141099, upload-time = "2025-12-06T15:54:32.196Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b4/24fdc024abfce31c2f6812973b0a693688037ece5dc64b7a60c1ce69e2f2/orjson-3.11.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e08ca8a6c851e95aaecc32bc44a5aa75d0ad26af8cdac7c77e4ed93acf3d5b69", size = 413540, upload-time = "2025-12-06T15:54:33.361Z" }, - { url = "https://files.pythonhosted.org/packages/d9/37/01c0ec95d55ed0c11e4cae3e10427e479bba40c77312b63e1f9665e0737d/orjson-3.11.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e8b5f96c05fce7d0218df3fdfeb962d6b8cfff7e3e20264306b46dd8b217c0f3", size = 151530, upload-time = "2025-12-06T15:54:34.6Z" }, - { url = "https://files.pythonhosted.org/packages/f9/d4/f9ebc57182705bb4bbe63f5bbe14af43722a2533135e1d2fb7affa0c355d/orjson-3.11.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddbfdb5099b3e6ba6d6ea818f61997bb66de14b411357d24c4612cf1ebad08ca", size = 141863, upload-time = "2025-12-06T15:54:35.801Z" }, - { url = "https://files.pythonhosted.org/packages/0d/04/02102b8d19fdcb009d72d622bb5781e8f3fae1646bf3e18c53d1bc8115b5/orjson-3.11.5-cp312-cp312-win32.whl", hash = "sha256:9172578c4eb09dbfcf1657d43198de59b6cef4054de385365060ed50c458ac98", size = 135255, upload-time = "2025-12-06T15:54:37.209Z" }, - { url = "https://files.pythonhosted.org/packages/d4/fb/f05646c43d5450492cb387de5549f6de90a71001682c17882d9f66476af5/orjson-3.11.5-cp312-cp312-win_amd64.whl", hash = "sha256:2b91126e7b470ff2e75746f6f6ee32b9ab67b7a93c8ba1d15d3a0caaf16ec875", size = 133252, upload-time = "2025-12-06T15:54:38.401Z" }, - { url = "https://files.pythonhosted.org/packages/dc/a6/7b8c0b26ba18c793533ac1cd145e131e46fcf43952aa94c109b5b913c1f0/orjson-3.11.5-cp312-cp312-win_arm64.whl", hash = "sha256:acbc5fac7e06777555b0722b8ad5f574739e99ffe99467ed63da98f97f9ca0fe", size = 126777, upload-time = "2025-12-06T15:54:39.515Z" }, - { url = "https://files.pythonhosted.org/packages/10/43/61a77040ce59f1569edf38f0b9faadc90c8cf7e9bec2e0df51d0132c6bb7/orjson-3.11.5-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:3b01799262081a4c47c035dd77c1301d40f568f77cc7ec1bb7db5d63b0a01629", size = 245271, upload-time = "2025-12-06T15:54:40.878Z" }, - { url = "https://files.pythonhosted.org/packages/55/f9/0f79be617388227866d50edd2fd320cb8fb94dc1501184bb1620981a0aba/orjson-3.11.5-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:61de247948108484779f57a9f406e4c84d636fa5a59e411e6352484985e8a7c3", size = 129422, upload-time = "2025-12-06T15:54:42.403Z" }, - { url = "https://files.pythonhosted.org/packages/77/42/f1bf1549b432d4a78bfa95735b79b5dac75b65b5bb815bba86ad406ead0a/orjson-3.11.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:894aea2e63d4f24a7f04a1908307c738d0dce992e9249e744b8f4e8dd9197f39", size = 132060, upload-time = "2025-12-06T15:54:43.531Z" }, - { url = "https://files.pythonhosted.org/packages/25/49/825aa6b929f1a6ed244c78acd7b22c1481fd7e5fda047dc8bf4c1a807eb6/orjson-3.11.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ddc21521598dbe369d83d4d40338e23d4101dad21dae0e79fa20465dbace019f", size = 130391, upload-time = "2025-12-06T15:54:45.059Z" }, - { url = "https://files.pythonhosted.org/packages/42/ec/de55391858b49e16e1aa8f0bbbb7e5997b7345d8e984a2dec3746d13065b/orjson-3.11.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cce16ae2f5fb2c53c3eafdd1706cb7b6530a67cc1c17abe8ec747f5cd7c0c51", size = 135964, upload-time = "2025-12-06T15:54:46.576Z" }, - { url = "https://files.pythonhosted.org/packages/1c/40/820bc63121d2d28818556a2d0a09384a9f0262407cf9fa305e091a8048df/orjson-3.11.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e46c762d9f0e1cfb4ccc8515de7f349abbc95b59cb5a2bd68df5973fdef913f8", size = 139817, upload-time = "2025-12-06T15:54:48.084Z" }, - { url = "https://files.pythonhosted.org/packages/09/c7/3a445ca9a84a0d59d26365fd8898ff52bdfcdcb825bcc6519830371d2364/orjson-3.11.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7345c759276b798ccd6d77a87136029e71e66a8bbf2d2755cbdde1d82e78706", size = 137336, upload-time = "2025-12-06T15:54:49.426Z" }, - { url = "https://files.pythonhosted.org/packages/9a/b3/dc0d3771f2e5d1f13368f56b339c6782f955c6a20b50465a91acb79fe961/orjson-3.11.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75bc2e59e6a2ac1dd28901d07115abdebc4563b5b07dd612bf64260a201b1c7f", size = 138993, upload-time = "2025-12-06T15:54:50.939Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a2/65267e959de6abe23444659b6e19c888f242bf7725ff927e2292776f6b89/orjson-3.11.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:54aae9b654554c3b4edd61896b978568c6daa16af96fa4681c9b5babd469f863", size = 141070, upload-time = "2025-12-06T15:54:52.414Z" }, - { url = "https://files.pythonhosted.org/packages/63/c9/da44a321b288727a322c6ab17e1754195708786a04f4f9d2220a5076a649/orjson-3.11.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4bdd8d164a871c4ec773f9de0f6fe8769c2d6727879c37a9666ba4183b7f8228", size = 413505, upload-time = "2025-12-06T15:54:53.67Z" }, - { url = "https://files.pythonhosted.org/packages/7f/17/68dc14fa7000eefb3d4d6d7326a190c99bb65e319f02747ef3ebf2452f12/orjson-3.11.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a261fef929bcf98a60713bf5e95ad067cea16ae345d9a35034e73c3990e927d2", size = 151342, upload-time = "2025-12-06T15:54:55.113Z" }, - { url = "https://files.pythonhosted.org/packages/c4/c5/ccee774b67225bed630a57478529fc026eda33d94fe4c0eac8fe58d4aa52/orjson-3.11.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c028a394c766693c5c9909dec76b24f37e6a1b91999e8d0c0d5feecbe93c3e05", size = 141823, upload-time = "2025-12-06T15:54:56.331Z" }, - { url = "https://files.pythonhosted.org/packages/67/80/5d00e4155d0cd7390ae2087130637671da713959bb558db9bac5e6f6b042/orjson-3.11.5-cp313-cp313-win32.whl", hash = "sha256:2cc79aaad1dfabe1bd2d50ee09814a1253164b3da4c00a78c458d82d04b3bdef", size = 135236, upload-time = "2025-12-06T15:54:57.507Z" }, - { url = "https://files.pythonhosted.org/packages/95/fe/792cc06a84808dbdc20ac6eab6811c53091b42f8e51ecebf14b540e9cfe4/orjson-3.11.5-cp313-cp313-win_amd64.whl", hash = "sha256:ff7877d376add4e16b274e35a3f58b7f37b362abf4aa31863dadacdd20e3a583", size = 133167, upload-time = "2025-12-06T15:54:58.71Z" }, - { url = "https://files.pythonhosted.org/packages/46/2c/d158bd8b50e3b1cfdcf406a7e463f6ffe3f0d167b99634717acdaf5e299f/orjson-3.11.5-cp313-cp313-win_arm64.whl", hash = "sha256:59ac72ea775c88b163ba8d21b0177628bd015c5dd060647bbab6e22da3aad287", size = 126712, upload-time = "2025-12-06T15:54:59.892Z" }, - { url = "https://files.pythonhosted.org/packages/c2/60/77d7b839e317ead7bb225d55bb50f7ea75f47afc489c81199befc5435b50/orjson-3.11.5-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e446a8ea0a4c366ceafc7d97067bfd55292969143b57e3c846d87fc701e797a0", size = 245252, upload-time = "2025-12-06T15:55:01.127Z" }, - { url = "https://files.pythonhosted.org/packages/f1/aa/d4639163b400f8044cef0fb9aa51b0337be0da3a27187a20d1166e742370/orjson-3.11.5-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:53deb5addae9c22bbe3739298f5f2196afa881ea75944e7720681c7080909a81", size = 129419, upload-time = "2025-12-06T15:55:02.723Z" }, - { url = "https://files.pythonhosted.org/packages/30/94/9eabf94f2e11c671111139edf5ec410d2f21e6feee717804f7e8872d883f/orjson-3.11.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cd00d49d6063d2b8791da5d4f9d20539c5951f965e45ccf4e96d33505ce68f", size = 132050, upload-time = "2025-12-06T15:55:03.918Z" }, - { url = "https://files.pythonhosted.org/packages/3d/c8/ca10f5c5322f341ea9a9f1097e140be17a88f88d1cfdd29df522970d9744/orjson-3.11.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3fd15f9fc8c203aeceff4fda211157fad114dde66e92e24097b3647a08f4ee9e", size = 130370, upload-time = "2025-12-06T15:55:05.173Z" }, - { url = "https://files.pythonhosted.org/packages/25/d4/e96824476d361ee2edd5c6290ceb8d7edf88d81148a6ce172fc00278ca7f/orjson-3.11.5-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df95000fbe6777bf9820ae82ab7578e8662051bb5f83d71a28992f539d2cda7", size = 136012, upload-time = "2025-12-06T15:55:06.402Z" }, - { url = "https://files.pythonhosted.org/packages/85/8e/9bc3423308c425c588903f2d103cfcfe2539e07a25d6522900645a6f257f/orjson-3.11.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a8d676748fca47ade5bc3da7430ed7767afe51b2f8100e3cd65e151c0eaceb", size = 139809, upload-time = "2025-12-06T15:55:07.656Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3c/b404e94e0b02a232b957c54643ce68d0268dacb67ac33ffdee24008c8b27/orjson-3.11.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa0f513be38b40234c77975e68805506cad5d57b3dfd8fe3baa7f4f4051e15b4", size = 137332, upload-time = "2025-12-06T15:55:08.961Z" }, - { url = "https://files.pythonhosted.org/packages/51/30/cc2d69d5ce0ad9b84811cdf4a0cd5362ac27205a921da524ff42f26d65e0/orjson-3.11.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1863e75b92891f553b7922ce4ee10ed06db061e104f2b7815de80cdcb135ad", size = 138983, upload-time = "2025-12-06T15:55:10.595Z" }, - { url = "https://files.pythonhosted.org/packages/0e/87/de3223944a3e297d4707d2fe3b1ffb71437550e165eaf0ca8bbe43ccbcb1/orjson-3.11.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d4be86b58e9ea262617b8ca6251a2f0d63cc132a6da4b5fcc8e0a4128782c829", size = 141069, upload-time = "2025-12-06T15:55:11.832Z" }, - { url = "https://files.pythonhosted.org/packages/65/30/81d5087ae74be33bcae3ff2d80f5ccaa4a8fedc6d39bf65a427a95b8977f/orjson-3.11.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:b923c1c13fa02084eb38c9c065afd860a5cff58026813319a06949c3af5732ac", size = 413491, upload-time = "2025-12-06T15:55:13.314Z" }, - { url = "https://files.pythonhosted.org/packages/d0/6f/f6058c21e2fc1efaf918986dbc2da5cd38044f1a2d4b7b91ad17c4acf786/orjson-3.11.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:1b6bd351202b2cd987f35a13b5e16471cf4d952b42a73c391cc537974c43ef6d", size = 151375, upload-time = "2025-12-06T15:55:14.715Z" }, - { url = "https://files.pythonhosted.org/packages/54/92/c6921f17d45e110892899a7a563a925b2273d929959ce2ad89e2525b885b/orjson-3.11.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bb150d529637d541e6af06bbe3d02f5498d628b7f98267ff87647584293ab439", size = 141850, upload-time = "2025-12-06T15:55:15.94Z" }, - { url = "https://files.pythonhosted.org/packages/88/86/cdecb0140a05e1a477b81f24739da93b25070ee01ce7f7242f44a6437594/orjson-3.11.5-cp314-cp314-win32.whl", hash = "sha256:9cc1e55c884921434a84a0c3dd2699eb9f92e7b441d7f53f3941079ec6ce7499", size = 135278, upload-time = "2025-12-06T15:55:17.202Z" }, - { url = "https://files.pythonhosted.org/packages/e4/97/b638d69b1e947d24f6109216997e38922d54dcdcdb1b11c18d7efd2d3c59/orjson-3.11.5-cp314-cp314-win_amd64.whl", hash = "sha256:a4f3cb2d874e03bc7767c8f88adaa1a9a05cecea3712649c3b58589ec7317310", size = 133170, upload-time = "2025-12-06T15:55:18.468Z" }, - { url = "https://files.pythonhosted.org/packages/8f/dd/f4fff4a6fe601b4f8f3ba3aa6da8ac33d17d124491a3b804c662a70e1636/orjson-3.11.5-cp314-cp314-win_arm64.whl", hash = "sha256:38b22f476c351f9a1c43e5b07d8b5a02eb24a6ab8e75f700f7d479d4568346a5", size = 126713, upload-time = "2025-12-06T15:55:19.738Z" }, +version = "3.11.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/45/b268004f745ede84e5798b48ee12b05129d19235d0e15267aa57dcdb400b/orjson-3.11.7.tar.gz", hash = "sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49", size = 6144992, upload-time = "2026-02-02T15:38:49.29Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174", size = 229140, upload-time = "2026-02-02T15:37:06.082Z" }, + { url = "https://files.pythonhosted.org/packages/52/a2/fa129e749d500f9b183e8a3446a193818a25f60261e9ce143ad61e975208/orjson-3.11.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67", size = 128670, upload-time = "2026-02-02T15:37:08.002Z" }, + { url = "https://files.pythonhosted.org/packages/08/93/1e82011cd1e0bd051ef9d35bed1aa7fb4ea1f0a055dc2c841b46b43a9ebd/orjson-3.11.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11", size = 123832, upload-time = "2026-02-02T15:37:09.191Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d8/a26b431ef962c7d55736674dddade876822f3e33223c1f47a36879350d04/orjson-3.11.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc", size = 129171, upload-time = "2026-02-02T15:37:11.112Z" }, + { url = "https://files.pythonhosted.org/packages/a7/19/f47819b84a580f490da260c3ee9ade214cf4cf78ac9ce8c1c758f80fdfc9/orjson-3.11.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16", size = 141967, upload-time = "2026-02-02T15:37:12.282Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/37ece39a0777ba077fdcdbe4cccae3be8ed00290c14bf8afdc548befc260/orjson-3.11.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222", size = 130991, upload-time = "2026-02-02T15:37:13.465Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ed/f2b5d66aa9b6b5c02ff5f120efc7b38c7c4962b21e6be0f00fd99a5c348e/orjson-3.11.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa", size = 133674, upload-time = "2026-02-02T15:37:14.694Z" }, + { url = "https://files.pythonhosted.org/packages/c4/6e/baa83e68d1aa09fa8c3e5b2c087d01d0a0bd45256de719ed7bc22c07052d/orjson-3.11.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e", size = 138722, upload-time = "2026-02-02T15:37:16.501Z" }, + { url = "https://files.pythonhosted.org/packages/0c/47/7f8ef4963b772cd56999b535e553f7eb5cd27e9dd6c049baee6f18bfa05d/orjson-3.11.7-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2", size = 409056, upload-time = "2026-02-02T15:37:17.895Z" }, + { url = "https://files.pythonhosted.org/packages/38/eb/2df104dd2244b3618f25325a656f85cc3277f74bbd91224752410a78f3c7/orjson-3.11.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c", size = 144196, upload-time = "2026-02-02T15:37:19.349Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2a/ee41de0aa3a6686598661eae2b4ebdff1340c65bfb17fcff8b87138aab21/orjson-3.11.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f", size = 134979, upload-time = "2026-02-02T15:37:20.906Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fa/92fc5d3d402b87a8b28277a9ed35386218a6a5287c7fe5ee9b9f02c53fb2/orjson-3.11.7-cp310-cp310-win32.whl", hash = "sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de", size = 127968, upload-time = "2026-02-02T15:37:23.178Z" }, + { url = "https://files.pythonhosted.org/packages/07/29/a576bf36d73d60df06904d3844a9df08e25d59eba64363aaf8ec2f9bff41/orjson-3.11.7-cp310-cp310-win_amd64.whl", hash = "sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993", size = 125128, upload-time = "2026-02-02T15:37:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/37/02/da6cb01fc6087048d7f61522c327edf4250f1683a58a839fdcc435746dd5/orjson-3.11.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c", size = 228664, upload-time = "2026-02-02T15:37:25.542Z" }, + { url = "https://files.pythonhosted.org/packages/c1/c2/5885e7a5881dba9a9af51bc564e8967225a642b3e03d089289a35054e749/orjson-3.11.7-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b", size = 125344, upload-time = "2026-02-02T15:37:26.92Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/4e7688de0a92d1caf600dfd5fb70b4c5bfff51dfa61ac555072ef2d0d32a/orjson-3.11.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e", size = 128404, upload-time = "2026-02-02T15:37:28.108Z" }, + { url = "https://files.pythonhosted.org/packages/2f/b2/ec04b74ae03a125db7bd69cffd014b227b7f341e3261bf75b5eb88a1aa92/orjson-3.11.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5", size = 123677, upload-time = "2026-02-02T15:37:30.287Z" }, + { url = "https://files.pythonhosted.org/packages/4c/69/f95bdf960605f08f827f6e3291fe243d8aa9c5c9ff017a8d7232209184c3/orjson-3.11.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62", size = 128950, upload-time = "2026-02-02T15:37:31.595Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1b/de59c57bae1d148ef298852abd31909ac3089cff370dfd4cd84cc99cbc42/orjson-3.11.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910", size = 141756, upload-time = "2026-02-02T15:37:32.985Z" }, + { url = "https://files.pythonhosted.org/packages/ee/9e/9decc59f4499f695f65c650f6cfa6cd4c37a3fbe8fa235a0a3614cb54386/orjson-3.11.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b", size = 130812, upload-time = "2026-02-02T15:37:34.204Z" }, + { url = "https://files.pythonhosted.org/packages/28/e6/59f932bcabd1eac44e334fe8e3281a92eacfcb450586e1f4bde0423728d8/orjson-3.11.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960", size = 133444, upload-time = "2026-02-02T15:37:35.446Z" }, + { url = "https://files.pythonhosted.org/packages/f1/36/b0f05c0eaa7ca30bc965e37e6a2956b0d67adb87a9872942d3568da846ae/orjson-3.11.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8", size = 138609, upload-time = "2026-02-02T15:37:36.657Z" }, + { url = "https://files.pythonhosted.org/packages/b8/03/58ec7d302b8d86944c60c7b4b82975d5161fcce4c9bc8c6cb1d6741b6115/orjson-3.11.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504", size = 408918, upload-time = "2026-02-02T15:37:38.076Z" }, + { url = "https://files.pythonhosted.org/packages/06/3a/868d65ef9a8b99be723bd510de491349618abd9f62c826cf206d962db295/orjson-3.11.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e", size = 143998, upload-time = "2026-02-02T15:37:39.706Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c7/1e18e1c83afe3349f4f6dc9e14910f0ae5f82eac756d1412ea4018938535/orjson-3.11.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561", size = 134802, upload-time = "2026-02-02T15:37:41.002Z" }, + { url = "https://files.pythonhosted.org/packages/d4/0b/ccb7ee1a65b37e8eeb8b267dc953561d72370e85185e459616d4345bab34/orjson-3.11.7-cp311-cp311-win32.whl", hash = "sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d", size = 127828, upload-time = "2026-02-02T15:37:42.241Z" }, + { url = "https://files.pythonhosted.org/packages/af/9e/55c776dffda3f381e0f07d010a4f5f3902bf48eaba1bb7684d301acd4924/orjson-3.11.7-cp311-cp311-win_amd64.whl", hash = "sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471", size = 124941, upload-time = "2026-02-02T15:37:43.444Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8e/424a620fa7d263b880162505fb107ef5e0afaa765b5b06a88312ac291560/orjson-3.11.7-cp311-cp311-win_arm64.whl", hash = "sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d", size = 126245, upload-time = "2026-02-02T15:37:45.18Z" }, + { url = "https://files.pythonhosted.org/packages/80/bf/76f4f1665f6983385938f0e2a5d7efa12a58171b8456c252f3bae8a4cf75/orjson-3.11.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f", size = 228545, upload-time = "2026-02-02T15:37:46.376Z" }, + { url = "https://files.pythonhosted.org/packages/79/53/6c72c002cb13b5a978a068add59b25a8bdf2800ac1c9c8ecdb26d6d97064/orjson-3.11.7-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b", size = 125224, upload-time = "2026-02-02T15:37:47.697Z" }, + { url = "https://files.pythonhosted.org/packages/2c/83/10e48852865e5dd151bdfe652c06f7da484578ed02c5fca938e3632cb0b8/orjson-3.11.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a", size = 128154, upload-time = "2026-02-02T15:37:48.954Z" }, + { url = "https://files.pythonhosted.org/packages/6e/52/a66e22a2b9abaa374b4a081d410edab6d1e30024707b87eab7c734afe28d/orjson-3.11.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10", size = 123548, upload-time = "2026-02-02T15:37:50.187Z" }, + { url = "https://files.pythonhosted.org/packages/de/38/605d371417021359f4910c496f764c48ceb8997605f8c25bf1dfe58c0ebe/orjson-3.11.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa", size = 129000, upload-time = "2026-02-02T15:37:51.426Z" }, + { url = "https://files.pythonhosted.org/packages/44/98/af32e842b0ffd2335c89714d48ca4e3917b42f5d6ee5537832e069a4b3ac/orjson-3.11.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8", size = 141686, upload-time = "2026-02-02T15:37:52.607Z" }, + { url = "https://files.pythonhosted.org/packages/96/0b/fc793858dfa54be6feee940c1463370ece34b3c39c1ca0aa3845f5ba9892/orjson-3.11.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f", size = 130812, upload-time = "2026-02-02T15:37:53.944Z" }, + { url = "https://files.pythonhosted.org/packages/dc/91/98a52415059db3f374757d0b7f0f16e3b5cd5976c90d1c2b56acaea039e6/orjson-3.11.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad", size = 133440, upload-time = "2026-02-02T15:37:55.615Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b6/cb540117bda61791f46381f8c26c8f93e802892830a6055748d3bb1925ab/orjson-3.11.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867", size = 138386, upload-time = "2026-02-02T15:37:56.814Z" }, + { url = "https://files.pythonhosted.org/packages/63/1a/50a3201c334a7f17c231eee5f841342190723794e3b06293f26e7cf87d31/orjson-3.11.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d", size = 408853, upload-time = "2026-02-02T15:37:58.291Z" }, + { url = "https://files.pythonhosted.org/packages/87/cd/8de1c67d0be44fdc22701e5989c0d015a2adf391498ad42c4dc589cd3013/orjson-3.11.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab", size = 144130, upload-time = "2026-02-02T15:38:00.163Z" }, + { url = "https://files.pythonhosted.org/packages/0f/fe/d605d700c35dd55f51710d159fc54516a280923cd1b7e47508982fbb387d/orjson-3.11.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2", size = 134818, upload-time = "2026-02-02T15:38:01.507Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e4/15ecc67edb3ddb3e2f46ae04475f2d294e8b60c1825fbe28a428b93b3fbd/orjson-3.11.7-cp312-cp312-win32.whl", hash = "sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f", size = 127923, upload-time = "2026-02-02T15:38:02.75Z" }, + { url = "https://files.pythonhosted.org/packages/34/70/2e0855361f76198a3965273048c8e50a9695d88cd75811a5b46444895845/orjson-3.11.7-cp312-cp312-win_amd64.whl", hash = "sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74", size = 125007, upload-time = "2026-02-02T15:38:04.032Z" }, + { url = "https://files.pythonhosted.org/packages/68/40/c2051bd19fc467610fed469dc29e43ac65891571138f476834ca192bc290/orjson-3.11.7-cp312-cp312-win_arm64.whl", hash = "sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5", size = 126089, upload-time = "2026-02-02T15:38:05.297Z" }, + { url = "https://files.pythonhosted.org/packages/89/25/6e0e52cac5aab51d7b6dcd257e855e1dec1c2060f6b28566c509b4665f62/orjson-3.11.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733", size = 228390, upload-time = "2026-02-02T15:38:06.8Z" }, + { url = "https://files.pythonhosted.org/packages/a5/29/a77f48d2fc8a05bbc529e5ff481fb43d914f9e383ea2469d4f3d51df3d00/orjson-3.11.7-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4", size = 125189, upload-time = "2026-02-02T15:38:08.181Z" }, + { url = "https://files.pythonhosted.org/packages/89/25/0a16e0729a0e6a1504f9d1a13cdd365f030068aab64cec6958396b9969d7/orjson-3.11.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785", size = 128106, upload-time = "2026-02-02T15:38:09.41Z" }, + { url = "https://files.pythonhosted.org/packages/66/da/a2e505469d60666a05ab373f1a6322eb671cb2ba3a0ccfc7d4bc97196787/orjson-3.11.7-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539", size = 123363, upload-time = "2026-02-02T15:38:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/23/bf/ed73f88396ea35c71b38961734ea4a4746f7ca0768bf28fd551d37e48dd0/orjson-3.11.7-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1", size = 129007, upload-time = "2026-02-02T15:38:12.138Z" }, + { url = "https://files.pythonhosted.org/packages/73/3c/b05d80716f0225fc9008fbf8ab22841dcc268a626aa550561743714ce3bf/orjson-3.11.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1", size = 141667, upload-time = "2026-02-02T15:38:13.398Z" }, + { url = "https://files.pythonhosted.org/packages/61/e8/0be9b0addd9bf86abfc938e97441dcd0375d494594b1c8ad10fe57479617/orjson-3.11.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705", size = 130832, upload-time = "2026-02-02T15:38:14.698Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ec/c68e3b9021a31d9ec15a94931db1410136af862955854ed5dd7e7e4f5bff/orjson-3.11.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace", size = 133373, upload-time = "2026-02-02T15:38:16.109Z" }, + { url = "https://files.pythonhosted.org/packages/d2/45/f3466739aaafa570cc8e77c6dbb853c48bf56e3b43738020e2661e08b0ac/orjson-3.11.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b", size = 138307, upload-time = "2026-02-02T15:38:17.453Z" }, + { url = "https://files.pythonhosted.org/packages/e1/84/9f7f02288da1ffb31405c1be07657afd1eecbcb4b64ee2817b6fe0f785fa/orjson-3.11.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157", size = 408695, upload-time = "2026-02-02T15:38:18.831Z" }, + { url = "https://files.pythonhosted.org/packages/18/07/9dd2f0c0104f1a0295ffbe912bc8d63307a539b900dd9e2c48ef7810d971/orjson-3.11.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3", size = 144099, upload-time = "2026-02-02T15:38:20.28Z" }, + { url = "https://files.pythonhosted.org/packages/a5/66/857a8e4a3292e1f7b1b202883bcdeb43a91566cf59a93f97c53b44bd6801/orjson-3.11.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223", size = 134806, upload-time = "2026-02-02T15:38:22.186Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5b/6ebcf3defc1aab3a338ca777214966851e92efb1f30dc7fc8285216e6d1b/orjson-3.11.7-cp313-cp313-win32.whl", hash = "sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3", size = 127914, upload-time = "2026-02-02T15:38:23.511Z" }, + { url = "https://files.pythonhosted.org/packages/00/04/c6f72daca5092e3117840a1b1e88dfc809cc1470cf0734890d0366b684a1/orjson-3.11.7-cp313-cp313-win_amd64.whl", hash = "sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757", size = 124986, upload-time = "2026-02-02T15:38:24.836Z" }, + { url = "https://files.pythonhosted.org/packages/03/ba/077a0f6f1085d6b806937246860fafbd5b17f3919c70ee3f3d8d9c713f38/orjson-3.11.7-cp313-cp313-win_arm64.whl", hash = "sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539", size = 126045, upload-time = "2026-02-02T15:38:26.216Z" }, + { url = "https://files.pythonhosted.org/packages/e9/1e/745565dca749813db9a093c5ebc4bac1a9475c64d54b95654336ac3ed961/orjson-3.11.7-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0", size = 228391, upload-time = "2026-02-02T15:38:27.757Z" }, + { url = "https://files.pythonhosted.org/packages/46/19/e40f6225da4d3aa0c8dc6e5219c5e87c2063a560fe0d72a88deb59776794/orjson-3.11.7-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0", size = 125188, upload-time = "2026-02-02T15:38:29.241Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7e/c4de2babef2c0817fd1f048fd176aa48c37bec8aef53d2fa932983032cce/orjson-3.11.7-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6", size = 128097, upload-time = "2026-02-02T15:38:30.618Z" }, + { url = "https://files.pythonhosted.org/packages/eb/74/233d360632bafd2197f217eee7fb9c9d0229eac0c18128aee5b35b0014fe/orjson-3.11.7-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf", size = 123364, upload-time = "2026-02-02T15:38:32.363Z" }, + { url = "https://files.pythonhosted.org/packages/79/51/af79504981dd31efe20a9e360eb49c15f06df2b40e7f25a0a52d9ae888e8/orjson-3.11.7-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5", size = 129076, upload-time = "2026-02-02T15:38:33.68Z" }, + { url = "https://files.pythonhosted.org/packages/67/e2/da898eb68b72304f8de05ca6715870d09d603ee98d30a27e8a9629abc64b/orjson-3.11.7-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892", size = 141705, upload-time = "2026-02-02T15:38:34.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/89/15364d92acb3d903b029e28d834edb8780c2b97404cbf7929aa6b9abdb24/orjson-3.11.7-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e", size = 130855, upload-time = "2026-02-02T15:38:36.379Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8b/ecdad52d0b38d4b8f514be603e69ccd5eacf4e7241f972e37e79792212ec/orjson-3.11.7-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1", size = 133386, upload-time = "2026-02-02T15:38:37.704Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0e/45e1dcf10e17d0924b7c9162f87ec7b4ca79e28a0548acf6a71788d3e108/orjson-3.11.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183", size = 138295, upload-time = "2026-02-02T15:38:39.096Z" }, + { url = "https://files.pythonhosted.org/packages/63/d7/4d2e8b03561257af0450f2845b91fbd111d7e526ccdf737267108075e0ba/orjson-3.11.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650", size = 408720, upload-time = "2026-02-02T15:38:40.634Z" }, + { url = "https://files.pythonhosted.org/packages/78/cf/d45343518282108b29c12a65892445fc51f9319dc3c552ceb51bb5905ed2/orjson-3.11.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141", size = 144152, upload-time = "2026-02-02T15:38:42.262Z" }, + { url = "https://files.pythonhosted.org/packages/a9/3a/d6001f51a7275aacd342e77b735c71fa04125a3f93c36fee4526bc8c654e/orjson-3.11.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2", size = 134814, upload-time = "2026-02-02T15:38:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d3/f19b47ce16820cc2c480f7f1723e17f6d411b3a295c60c8ad3aa9ff1c96a/orjson-3.11.7-cp314-cp314-win32.whl", hash = "sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576", size = 127997, upload-time = "2026-02-02T15:38:45.06Z" }, + { url = "https://files.pythonhosted.org/packages/12/df/172771902943af54bf661a8d102bdf2e7f932127968080632bda6054b62c/orjson-3.11.7-cp314-cp314-win_amd64.whl", hash = "sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1", size = 124985, upload-time = "2026-02-02T15:38:46.388Z" }, + { url = "https://files.pythonhosted.org/packages/6f/1c/f2a8d8a1b17514660a614ce5f7aac74b934e69f5abc2700cc7ced882a009/orjson-3.11.7-cp314-cp314-win_arm64.whl", hash = "sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d", size = 126038, upload-time = "2026-02-02T15:38:47.703Z" }, ] [[package]] name = "ormsgpack" -version = "1.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/96/34c40d621996c2f377a18decbd3c59f031dde73c3ba47d1e1e8f29a05aaa/ormsgpack-1.12.1.tar.gz", hash = "sha256:a3877fde1e4f27a39f92681a0aab6385af3a41d0c25375d33590ae20410ea2ac", size = 39476, upload-time = "2025-12-14T07:57:43.248Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/da/caf25cc54d6870089a0b5614c4c5914dd3fae45f9f7f84a32445ad0612e3/ormsgpack-1.12.1-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:62e3614cab63fa5aa42f5f0ca3cd12899f0bfc5eb8a5a0ebab09d571c89d427d", size = 376182, upload-time = "2025-12-14T07:56:46.094Z" }, - { url = "https://files.pythonhosted.org/packages/fc/02/ccc9170c6bee86f428707f15b5ad68d42c71d43856e1b8e37cdfea50af5b/ormsgpack-1.12.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86d9fbf85c05c69c33c229d2eba7c8c3500a56596cd8348131c918acd040d6af", size = 202339, upload-time = "2025-12-14T07:56:47.609Z" }, - { url = "https://files.pythonhosted.org/packages/86/c7/10309a5a6421adaedab710a72470143d664bb0a043cc095c1311878325a0/ormsgpack-1.12.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d246e66f09d8e0f96e770829149ee83206e90ed12f5987998bb7be84aec99fe", size = 210720, upload-time = "2025-12-14T07:56:48.66Z" }, - { url = "https://files.pythonhosted.org/packages/1b/b4/92a0f7a00c5f0c71b51dc3112e53b1ca937b9891a08979d06524db11b799/ormsgpack-1.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfc2c830a1ed2d00de713d08c9e62efa699e8fd29beafa626aaebe466f583ebb", size = 211264, upload-time = "2025-12-14T07:56:49.976Z" }, - { url = "https://files.pythonhosted.org/packages/33/fa/5cce85c8e58fcaa048c75fbbe37816a1b3fb58ba4289a7dedc4f4ed9ce82/ormsgpack-1.12.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc892757d8f9eea5208268a527cf93c98409802f6a9f7c8d71a7b8f9ba5cb944", size = 386076, upload-time = "2025-12-14T07:56:51.74Z" }, - { url = "https://files.pythonhosted.org/packages/88/d0/f18d258c733eb22eadad748659f7984d0b6a851fb3deefcb33f50e9a947a/ormsgpack-1.12.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:0de1dbcf11ea739ac4a882b43d5c2055e6d99ce64e8d6502e25d6d881700c017", size = 479570, upload-time = "2025-12-14T07:56:52.912Z" }, - { url = "https://files.pythonhosted.org/packages/3f/3a/b362dff090f4740090fe51d512f24b1e320d1f96497ebf9248e2a04ac88f/ormsgpack-1.12.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d5065dfb9ec4db93241c60847624d9aeef4ccb449c26a018c216b55c69be83c0", size = 387859, upload-time = "2025-12-14T07:56:53.968Z" }, - { url = "https://files.pythonhosted.org/packages/7c/8a/d948965598b2b7872800076da5c02573aa72f716be57a3d4fe60490b2a2a/ormsgpack-1.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d17103c4726181d7000c61b751c881f1b6f401d146df12da028fc730227df19", size = 115906, upload-time = "2025-12-14T07:56:55.068Z" }, - { url = "https://files.pythonhosted.org/packages/57/e2/f5b89365c8dc8025c27d31316038f1c103758ddbf87dc0fa8e3f78f66907/ormsgpack-1.12.1-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4038f59ae0e19dac5e5d9aae4ec17ff84a79e046342ee73ccdecf3547ecf0d34", size = 376180, upload-time = "2025-12-14T07:56:56.521Z" }, - { url = "https://files.pythonhosted.org/packages/ca/87/3f694e06f5e32c6d65066f53b4a025282a5072b6b336c17560b00e04606d/ormsgpack-1.12.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16c63b0c5a3eec467e4bb33a14dabba076b7d934dff62898297b5c0b5f7c3cb3", size = 202338, upload-time = "2025-12-14T07:56:57.585Z" }, - { url = "https://files.pythonhosted.org/packages/e5/f5/6d95d7b7c11f97a92522082fc7e5d1ab34537929f1e13f4c369f392f19d0/ormsgpack-1.12.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:74fd6a8e037eb310dda865298e8d122540af00fe5658ec18b97a1d34f4012e4d", size = 210720, upload-time = "2025-12-14T07:56:58.968Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9d/9a49a2686f8b7165dcb2342b8554951263c30c0f0825f1fcc2d56e736a6b/ormsgpack-1.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58ad60308e233dd824a1859eabb5fe092e123e885eafa4ad5789322329c80fb5", size = 211264, upload-time = "2025-12-14T07:57:00.099Z" }, - { url = "https://files.pythonhosted.org/packages/02/31/2fdc36eaeca2182900b96fc7b19755f293283fe681750e3d295733d62f0e/ormsgpack-1.12.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:35127464c941c1219acbe1a220e48d55e7933373d12257202f4042f7044b4c90", size = 386081, upload-time = "2025-12-14T07:57:01.177Z" }, - { url = "https://files.pythonhosted.org/packages/f0/65/0a765432f08ae26b4013c6a9aed97be17a9ef85f1600948a474b518e27dd/ormsgpack-1.12.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c48d1c50794692d1e6e3f8c3bb65f5c3acfaae9347e506484a65d60b3d91fb50", size = 479572, upload-time = "2025-12-14T07:57:02.738Z" }, - { url = "https://files.pythonhosted.org/packages/4e/4f/f2f15ebef786ad71cea420bf8692448fbddf04d1bf3feaa68bd5ee3172e6/ormsgpack-1.12.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b512b2ad6feaaefdc26e05431ed2843e42483041e354e167c53401afaa83d919", size = 387862, upload-time = "2025-12-14T07:57:03.842Z" }, - { url = "https://files.pythonhosted.org/packages/15/eb/86fbef1d605fa91ecef077f93f9d0e34fc39b23475dfe3ffb92f6c8db28d/ormsgpack-1.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:93f30db95e101a9616323bfc50807ad00e7f6197cea2216d2d24af42afc77d88", size = 115900, upload-time = "2025-12-14T07:57:05.137Z" }, - { url = "https://files.pythonhosted.org/packages/5b/67/7ba1a46e6a6e263fc42a4fafc24afc1ab21a66116553cad670426f0bd9ef/ormsgpack-1.12.1-cp311-cp311-win_arm64.whl", hash = "sha256:d75b5fa14f6abffce2c392ee03b4731199d8a964c81ee8645c4c79af0e80fd50", size = 109868, upload-time = "2025-12-14T07:57:06.834Z" }, - { url = "https://files.pythonhosted.org/packages/17/fe/ab9167ca037406b5703add24049cf3e18021a3b16133ea20615b1f160ea4/ormsgpack-1.12.1-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4d7fb0e1b6fbc701d75269f7405a4f79230a6ce0063fb1092e4f6577e312f86d", size = 376725, upload-time = "2025-12-14T07:57:07.894Z" }, - { url = "https://files.pythonhosted.org/packages/c7/ea/2820e65f506894c459b840d1091ae6e327fde3d5a3f3b002a11a1b9bdf7d/ormsgpack-1.12.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43a9353e2db5b024c91a47d864ef15eaa62d81824cfc7740fed4cef7db738694", size = 202466, upload-time = "2025-12-14T07:57:09.049Z" }, - { url = "https://files.pythonhosted.org/packages/45/8b/def01c13339c5bbec2ee1469ef53e7fadd66c8d775df974ee4def1572515/ormsgpack-1.12.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc8fe866b7706fc25af0adf1f600bc06ece5b15ca44e34641327198b821e5c3c", size = 210748, upload-time = "2025-12-14T07:57:10.074Z" }, - { url = "https://files.pythonhosted.org/packages/5d/d2/bf350c92f7f067dd9484499705f2d8366d8d9008a670e3d1d0add1908f85/ormsgpack-1.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:813755b5f598a78242042e05dfd1ada4e769e94b98c9ab82554550f97ff4d641", size = 211510, upload-time = "2025-12-14T07:57:11.165Z" }, - { url = "https://files.pythonhosted.org/packages/74/92/9d689bcb95304a6da26c4d59439c350940c25d1b35f146d402ccc6344c51/ormsgpack-1.12.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8eea2a13536fae45d78f93f2cc846c9765c7160c85f19cfefecc20873c137cdd", size = 386237, upload-time = "2025-12-14T07:57:12.306Z" }, - { url = "https://files.pythonhosted.org/packages/17/fe/bd3107547f8b6129265dd957f40b9cd547d2445db2292aacb13335a7ea89/ormsgpack-1.12.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7a02ebda1a863cbc604740e76faca8eee1add322db2dcbe6cf32669fffdff65c", size = 479589, upload-time = "2025-12-14T07:57:13.475Z" }, - { url = "https://files.pythonhosted.org/packages/c1/7c/e8e5cc9edb967d44f6f85e9ebdad440b59af3fae00b137a4327dc5aed9bb/ormsgpack-1.12.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c0bd63897c439931cdf29348e5e6e8c330d529830e848d10767615c0f3d1b82", size = 388077, upload-time = "2025-12-14T07:57:14.551Z" }, - { url = "https://files.pythonhosted.org/packages/35/6b/5031797e43b58506f28a8760b26dc23f2620fb4f2200c4c1b3045603e67e/ormsgpack-1.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:362f2e812f8d7035dc25a009171e09d7cc97cb30d3c9e75a16aeae00ca3c1dcf", size = 116190, upload-time = "2025-12-14T07:57:15.575Z" }, - { url = "https://files.pythonhosted.org/packages/1e/fd/9f43ea6425e383a6b2dbfafebb06fd60e8d68c700ef715adfbcdb499f75d/ormsgpack-1.12.1-cp312-cp312-win_arm64.whl", hash = "sha256:6190281e381db2ed0045052208f47a995ccf61eed48f1215ae3cce3fbccd59c5", size = 109990, upload-time = "2025-12-14T07:57:16.419Z" }, - { url = "https://files.pythonhosted.org/packages/11/42/f110dfe7cf23a52a82e23eb23d9a6a76ae495447d474686dfa758f3d71d6/ormsgpack-1.12.1-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9663d6b3ecc917c063d61a99169ce196a80f3852e541ae404206836749459279", size = 376746, upload-time = "2025-12-14T07:57:17.699Z" }, - { url = "https://files.pythonhosted.org/packages/11/76/b386e508a8ae207daec240201a81adb26467bf99b163560724e86bd9ff33/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32e85cfbaf01a94a92520e7fe7851cfcfe21a5698299c28ab86194895f9b9233", size = 202489, upload-time = "2025-12-14T07:57:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/ea/0e/5db7a63f387149024572daa3d9512fe8fb14bf4efa0722d6d491bed280e7/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabfd2c24b59c7c69870a5ecee480dfae914a42a0c2e7c9d971cf531e2ba471a", size = 210757, upload-time = "2025-12-14T07:57:19.893Z" }, - { url = "https://files.pythonhosted.org/packages/64/79/3a9899e57cb57430bd766fc1b4c9ad410cb2ba6070bc8cf6301e7d385768/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51bbf2b64afeded34ccd8e25402e4bca038757913931fa0d693078d75563f6f9", size = 211518, upload-time = "2025-12-14T07:57:20.972Z" }, - { url = "https://files.pythonhosted.org/packages/d7/cd/4f41710ae9fe50d7fcbe476793b3c487746d0e1cc194cc0fee42ff6d989b/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9959a71dde1bd0ced84af17facc06a8afada495a34e9cb1bad8e9b20d4c59cef", size = 386251, upload-time = "2025-12-14T07:57:22.099Z" }, - { url = "https://files.pythonhosted.org/packages/bf/54/ba0c97d6231b1f01daafaa520c8cce1e1b7fceaae6fdc1c763925874a7de/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:e9be0e3b62d758f21f5b20e0e06b3a240ec546c4a327bf771f5825462aa74714", size = 479607, upload-time = "2025-12-14T07:57:23.525Z" }, - { url = "https://files.pythonhosted.org/packages/18/75/19a9a97a462776d525baf41cfb7072734528775f0a3d5fbfab3aa7756b9b/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a29d49ab7fdd77ea787818e60cb4ef491708105b9c4c9b0f919201625eb036b5", size = 388062, upload-time = "2025-12-14T07:57:24.616Z" }, - { url = "https://files.pythonhosted.org/packages/a8/6a/ec26e3f44e9632ecd2f43638b7b37b500eaea5d79cab984ad0b94be14f82/ormsgpack-1.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:c418390b47a1d367e803f6c187f77e4d67c7ae07ba962e3a4a019001f4b0291a", size = 116195, upload-time = "2025-12-14T07:57:25.626Z" }, - { url = "https://files.pythonhosted.org/packages/7d/64/bfa5f4a34d0f15c6aba1b73e73f7441a66d635bd03249d334a4796b7a924/ormsgpack-1.12.1-cp313-cp313-win_arm64.whl", hash = "sha256:cfa22c91cffc10a7fbd43729baff2de7d9c28cef2509085a704168ae31f02568", size = 109986, upload-time = "2025-12-14T07:57:26.569Z" }, - { url = "https://files.pythonhosted.org/packages/87/0e/78e5697164e3223b9b216c13e99f1acbc1ee9833490d68842b13da8ba883/ormsgpack-1.12.1-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b93c91efb1a70751a1902a5b43b27bd8fd38e0ca0365cf2cde2716423c15c3a6", size = 376758, upload-time = "2025-12-14T07:57:27.641Z" }, - { url = "https://files.pythonhosted.org/packages/2c/0e/3a3cbb64703263d7bbaed7effa3ce78cb9add360a60aa7c544d7df28b641/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf0ea0389167b5fa8d2933dd3f33e887ec4ba68f89c25214d7eec4afd746d22", size = 202487, upload-time = "2025-12-14T07:57:29.051Z" }, - { url = "https://files.pythonhosted.org/packages/d7/2c/807ebe2b77995599bbb1dec8c3f450d5d7dddee14ce3e1e71dc60e2e2a74/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4c29af837f35af3375070689e781161e7cf019eb2f7cd641734ae45cd001c0d", size = 210853, upload-time = "2025-12-14T07:57:30.508Z" }, - { url = "https://files.pythonhosted.org/packages/25/57/2cdfc354e3ad8e847628f511f4d238799d90e9e090941e50b9d5ba955ae2/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336fc65aa0fe65896a3dabaae31e332a0a98b4a00ad7b0afde21a7505fd23ff3", size = 211545, upload-time = "2025-12-14T07:57:31.585Z" }, - { url = "https://files.pythonhosted.org/packages/76/1d/c6fda560e4a8ff865b3aec8a86f7c95ab53f4532193a6ae4ab9db35f85aa/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:940f60aabfefe71dd6b82cb33f4ff10b2e7f5fcfa5f103cdb0a23b6aae4c713c", size = 386333, upload-time = "2025-12-14T07:57:32.957Z" }, - { url = "https://files.pythonhosted.org/packages/fc/3e/715081b36fceb8b497c68b87d384e1cc6d9c9c130ce3b435634d3d785b86/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:596ad9e1b6d4c95595c54aaf49b1392609ca68f562ce06f4f74a5bc4053bcda4", size = 479701, upload-time = "2025-12-14T07:57:34.686Z" }, - { url = "https://files.pythonhosted.org/packages/6d/cf/01ad04def42b3970fc1a302c07f4b46339edf62ef9650247097260471f40/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:575210e8fcbc7b0375026ba040a5eef223e9f66a4453d9623fc23282ae09c3c8", size = 388148, upload-time = "2025-12-14T07:57:35.771Z" }, - { url = "https://files.pythonhosted.org/packages/15/91/1fff2fc2b5943c740028f339154e7103c8f2edf1a881d9fbba2ce11c3b1d/ormsgpack-1.12.1-cp314-cp314-win_amd64.whl", hash = "sha256:647daa3718572280893456be44c60aea6690b7f2edc54c55648ee66e8f06550f", size = 116201, upload-time = "2025-12-14T07:57:36.763Z" }, - { url = "https://files.pythonhosted.org/packages/ed/66/142b542aed3f96002c7d1c33507ca6e1e0d0a42b9253ab27ef7ed5793bd9/ormsgpack-1.12.1-cp314-cp314-win_arm64.whl", hash = "sha256:a8b3ab762a6deaf1b6490ab46dda0c51528cf8037e0246c40875c6fe9e37b699", size = 110029, upload-time = "2025-12-14T07:57:37.703Z" }, - { url = "https://files.pythonhosted.org/packages/38/b3/ef4494438c90359e1547eaed3c5ec46e2c431d59a3de2af4e70ebd594c49/ormsgpack-1.12.1-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:12087214e436c1f6c28491949571abea759a63111908c4f7266586d78144d7a8", size = 376777, upload-time = "2025-12-14T07:57:38.795Z" }, - { url = "https://files.pythonhosted.org/packages/05/a0/1149a7163f8b0dfbc64bf9099b6f16d102ad3b03bcc11afee198d751da2d/ormsgpack-1.12.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6d54c14cf86ef13f10ccade94d1e7de146aa9b17d371e18b16e95f329393b7", size = 202490, upload-time = "2025-12-14T07:57:40.168Z" }, - { url = "https://files.pythonhosted.org/packages/68/82/f2ec5e758d6a7106645cca9bb7137d98bce5d363789fa94075be6572057c/ormsgpack-1.12.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3584d07882b7ea2a1a589f795a3af97fe4c2932b739408e6d1d9d286cad862", size = 211733, upload-time = "2025-12-14T07:57:42.253Z" }, +version = "1.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/0c/f1761e21486942ab9bb6feaebc610fa074f7c5e496e6962dea5873348077/ormsgpack-1.12.2.tar.gz", hash = "sha256:944a2233640273bee67521795a73cf1e959538e0dfb7ac635505010455e53b33", size = 39031, upload-time = "2026-01-18T20:55:28.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/fa/a91f70829ebccf6387c4946e0a1a109f6ba0d6a28d65f628bedfad94b890/ormsgpack-1.12.2-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c1429217f8f4d7fcb053523bbbac6bed5e981af0b85ba616e6df7cce53c19657", size = 378262, upload-time = "2026-01-18T20:55:22.284Z" }, + { url = "https://files.pythonhosted.org/packages/5f/62/3698a9a0c487252b5c6a91926e5654e79e665708ea61f67a8bdeceb022bf/ormsgpack-1.12.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f13034dc6c84a6280c6c33db7ac420253852ea233fc3ee27c8875f8dd651163", size = 203034, upload-time = "2026-01-18T20:55:53.324Z" }, + { url = "https://files.pythonhosted.org/packages/66/3a/f716f64edc4aec2744e817660b317e2f9bb8de372338a95a96198efa1ac1/ormsgpack-1.12.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59f5da97000c12bc2d50e988bdc8576b21f6ab4e608489879d35b2c07a8ab51a", size = 210538, upload-time = "2026-01-18T20:55:20.097Z" }, + { url = "https://files.pythonhosted.org/packages/72/30/a436be9ce27d693d4e19fa94900028067133779f09fc45776db3f689c822/ormsgpack-1.12.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e4459c3f27066beadb2b81ea48a076a417aafffff7df1d3c11c519190ed44f2", size = 212401, upload-time = "2026-01-18T20:55:46.447Z" }, + { url = "https://files.pythonhosted.org/packages/10/c5/cde98300fd33fee84ca71de4751b19aeeca675f0cf3c0ec4b043f40f3b76/ormsgpack-1.12.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a1c460655d7288407ffa09065e322a7231997c0d62ce914bf3a96ad2dc6dedd", size = 387080, upload-time = "2026-01-18T20:56:00.884Z" }, + { url = "https://files.pythonhosted.org/packages/6a/31/30bf445ef827546747c10889dd254b3d84f92b591300efe4979d792f4c41/ormsgpack-1.12.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:458e4568be13d311ef7d8877275e7ccbe06c0e01b39baaac874caaa0f46d826c", size = 482346, upload-time = "2026-01-18T20:55:39.831Z" }, + { url = "https://files.pythonhosted.org/packages/2e/f5/e1745ddf4fa246c921b5ca253636c4c700ff768d78032f79171289159f6e/ormsgpack-1.12.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8cde5eaa6c6cbc8622db71e4a23de56828e3d876aeb6460ffbcb5b8aff91093b", size = 425178, upload-time = "2026-01-18T20:55:27.106Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a2/e6532ed7716aed03dede8df2d0d0d4150710c2122647d94b474147ccd891/ormsgpack-1.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:dc7a33be14c347893edbb1ceda89afbf14c467d593a5ee92c11de4f1666b4d4f", size = 117183, upload-time = "2026-01-18T20:55:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/4b/08/8b68f24b18e69d92238aa8f258218e6dfeacf4381d9d07ab8df303f524a9/ormsgpack-1.12.2-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bd5f4bf04c37888e864f08e740c5a573c4017f6fd6e99fa944c5c935fabf2dd9", size = 378266, upload-time = "2026-01-18T20:55:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/29fc13044ecb7c153523ae0a1972269fcd613650d1fa1a9cec1044c6b666/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d5b28b3570e9fed9a5a76528fc7230c3c76333bc214798958e58e9b79cc18a", size = 203035, upload-time = "2026-01-18T20:55:30.59Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c2/00169fb25dd8f9213f5e8a549dfb73e4d592009ebc85fbbcd3e1dcac575b/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3708693412c28f3538fb5a65da93787b6bbab3484f6bc6e935bfb77a62400ae5", size = 210539, upload-time = "2026-01-18T20:55:48.569Z" }, + { url = "https://files.pythonhosted.org/packages/1b/33/543627f323ff3c73091f51d6a20db28a1a33531af30873ea90c5ac95a9b5/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43013a3f3e2e902e1d05e72c0f1aeb5bedbb8e09240b51e26792a3c89267e181", size = 212401, upload-time = "2026-01-18T20:56:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5d/f70e2c3da414f46186659d24745483757bcc9adccb481a6eb93e2b729301/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7c8b1667a72cbba74f0ae7ecf3105a5e01304620ed14528b2cb4320679d2869b", size = 387082, upload-time = "2026-01-18T20:56:12.047Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d6/06e8dc920c7903e051f30934d874d4afccc9bb1c09dcaf0bc03a7de4b343/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:df6961442140193e517303d0b5d7bc2e20e69a879c2d774316125350c4a76b92", size = 482346, upload-time = "2026-01-18T20:56:05.152Z" }, + { url = "https://files.pythonhosted.org/packages/66/c4/f337ac0905eed9c393ef990c54565cd33644918e0a8031fe48c098c71dbf/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c6a4c34ddef109647c769d69be65fa1de7a6022b02ad45546a69b3216573eb4a", size = 425181, upload-time = "2026-01-18T20:55:37.83Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/6d5758fabef3babdf4bbbc453738cc7de9cd3334e4c38dd5737e27b85653/ormsgpack-1.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:73670ed0375ecc303858e3613f407628dd1fca18fe6ac57b7b7ce66cc7bb006c", size = 117182, upload-time = "2026-01-18T20:55:31.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/17a15549233c37e7fd054c48fe9207492e06b026dbd872b826a0b5f833b6/ormsgpack-1.12.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2be829954434e33601ae5da328cccce3266b098927ca7a30246a0baec2ce7bd", size = 111464, upload-time = "2026-01-18T20:55:38.811Z" }, + { url = "https://files.pythonhosted.org/packages/4c/36/16c4b1921c308a92cef3bf6663226ae283395aa0ff6e154f925c32e91ff5/ormsgpack-1.12.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7a29d09b64b9694b588ff2f80e9826bdceb3a2b91523c5beae1fab27d5c940e7", size = 378618, upload-time = "2026-01-18T20:55:50.835Z" }, + { url = "https://files.pythonhosted.org/packages/c0/68/468de634079615abf66ed13bb5c34ff71da237213f29294363beeeca5306/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b39e629fd2e1c5b2f46f99778450b59454d1f901bc507963168985e79f09c5d", size = 203186, upload-time = "2026-01-18T20:56:11.163Z" }, + { url = "https://files.pythonhosted.org/packages/73/a9/d756e01961442688b7939bacd87ce13bfad7d26ce24f910f6028178b2cc8/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:958dcb270d30a7cb633a45ee62b9444433fa571a752d2ca484efdac07480876e", size = 210738, upload-time = "2026-01-18T20:56:09.181Z" }, + { url = "https://files.pythonhosted.org/packages/7b/ba/795b1036888542c9113269a3f5690ab53dd2258c6fb17676ac4bd44fcf94/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d379d72b6c5e964851c77cfedfb386e474adee4fd39791c2c5d9efb53505cc", size = 212569, upload-time = "2026-01-18T20:56:06.135Z" }, + { url = "https://files.pythonhosted.org/packages/6c/aa/bff73c57497b9e0cba8837c7e4bcab584b1a6dbc91a5dd5526784a5030c8/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8463a3fc5f09832e67bdb0e2fda6d518dc4281b133166146a67f54c08496442e", size = 387166, upload-time = "2026-01-18T20:55:36.738Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cf/f8283cba44bcb7b14f97b6274d449db276b3a86589bdb363169b51bc12de/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:eddffb77eff0bad4e67547d67a130604e7e2dfbb7b0cde0796045be4090f35c6", size = 482498, upload-time = "2026-01-18T20:55:29.626Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/71e37b852d723dfcbe952ad04178c030df60d6b78eba26bfd14c9a40575e/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fcd55e5f6ba0dbce624942adf9f152062135f991a0126064889f68eb850de0dd", size = 425518, upload-time = "2026-01-18T20:55:49.556Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0c/9803aa883d18c7ef197213cd2cbf73ba76472a11fe100fb7dab2884edf48/ormsgpack-1.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:d024b40828f1dde5654faebd0d824f9cc29ad46891f626272dd5bfd7af2333a4", size = 117462, upload-time = "2026-01-18T20:55:47.726Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9e/029e898298b2cc662f10d7a15652a53e3b525b1e7f07e21fef8536a09bb8/ormsgpack-1.12.2-cp312-cp312-win_arm64.whl", hash = "sha256:da538c542bac7d1c8f3f2a937863dba36f013108ce63e55745941dda4b75dbb6", size = 111559, upload-time = "2026-01-18T20:55:54.273Z" }, + { url = "https://files.pythonhosted.org/packages/eb/29/bb0eba3288c0449efbb013e9c6f58aea79cf5cb9ee1921f8865f04c1a9d7/ormsgpack-1.12.2-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5ea60cb5f210b1cfbad8c002948d73447508e629ec375acb82910e3efa8ff355", size = 378661, upload-time = "2026-01-18T20:55:57.765Z" }, + { url = "https://files.pythonhosted.org/packages/6e/31/5efa31346affdac489acade2926989e019e8ca98129658a183e3add7af5e/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3601f19afdbea273ed70b06495e5794606a8b690a568d6c996a90d7255e51c1", size = 203194, upload-time = "2026-01-18T20:56:08.252Z" }, + { url = "https://files.pythonhosted.org/packages/eb/56/d0087278beef833187e0167f8527235ebe6f6ffc2a143e9de12a98b1ce87/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29a9f17a3dac6054c0dce7925e0f4995c727f7c41859adf9b5572180f640d172", size = 210778, upload-time = "2026-01-18T20:55:17.694Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a2/072343e1413d9443e5a252a8eb591c2d5b1bffbe5e7bfc78c069361b92eb/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39c1bd2092880e413902910388be8715f70b9f15f20779d44e673033a6146f2d", size = 212592, upload-time = "2026-01-18T20:55:32.747Z" }, + { url = "https://files.pythonhosted.org/packages/a2/8b/a0da3b98a91d41187a63b02dda14267eefc2a74fcb43cc2701066cf1510e/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:50b7249244382209877deedeee838aef1542f3d0fc28b8fe71ca9d7e1896a0d7", size = 387164, upload-time = "2026-01-18T20:55:40.853Z" }, + { url = "https://files.pythonhosted.org/packages/19/bb/6d226bc4cf9fc20d8eb1d976d027a3f7c3491e8f08289a2e76abe96a65f3/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:5af04800d844451cf102a59c74a841324868d3f1625c296a06cc655c542a6685", size = 482516, upload-time = "2026-01-18T20:55:42.033Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/bb2c7223398543dedb3dbf8bb93aaa737b387de61c5feaad6f908841b782/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cec70477d4371cd524534cd16472d8b9cc187e0e3043a8790545a9a9b296c258", size = 425539, upload-time = "2026-01-18T20:55:24.727Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e8/0fb45f57a2ada1fed374f7494c8cd55e2f88ccd0ab0a669aa3468716bf5f/ormsgpack-1.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:21f4276caca5c03a818041d637e4019bc84f9d6ca8baa5ea03e5cc8bf56140e9", size = 117459, upload-time = "2026-01-18T20:55:56.876Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d4/0cfeea1e960d550a131001a7f38a5132c7ae3ebde4c82af1f364ccc5d904/ormsgpack-1.12.2-cp313-cp313-win_arm64.whl", hash = "sha256:baca4b6773d20a82e36d6fd25f341064244f9f86a13dead95dd7d7f996f51709", size = 111577, upload-time = "2026-01-18T20:55:43.605Z" }, + { url = "https://files.pythonhosted.org/packages/94/16/24d18851334be09c25e87f74307c84950f18c324a4d3c0b41dabdbf19c29/ormsgpack-1.12.2-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bc68dd5915f4acf66ff2010ee47c8906dc1cf07399b16f4089f8c71733f6e36c", size = 378717, upload-time = "2026-01-18T20:55:26.164Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a2/88b9b56f83adae8032ac6a6fa7f080c65b3baf9b6b64fd3d37bd202991d4/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46d084427b4132553940070ad95107266656cb646ea9da4975f85cb1a6676553", size = 203183, upload-time = "2026-01-18T20:55:18.815Z" }, + { url = "https://files.pythonhosted.org/packages/a9/80/43e4555963bf602e5bdc79cbc8debd8b6d5456c00d2504df9775e74b450b/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c010da16235806cf1d7bc4c96bf286bfa91c686853395a299b3ddb49499a3e13", size = 210814, upload-time = "2026-01-18T20:55:33.973Z" }, + { url = "https://files.pythonhosted.org/packages/78/e1/7cfbf28de8bca6efe7e525b329c31277d1b64ce08dcba723971c241a9d60/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18867233df592c997154ff942a6503df274b5ac1765215bceba7a231bea2745d", size = 212634, upload-time = "2026-01-18T20:55:28.634Z" }, + { url = "https://files.pythonhosted.org/packages/95/f8/30ae5716e88d792a4e879debee195653c26ddd3964c968594ddef0a3cc7e/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b009049086ddc6b8f80c76b3955df1aa22a5fbd7673c525cd63bf91f23122ede", size = 387139, upload-time = "2026-01-18T20:56:02.013Z" }, + { url = "https://files.pythonhosted.org/packages/dc/81/aee5b18a3e3a0e52f718b37ab4b8af6fae0d9d6a65103036a90c2a8ffb5d/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:1dcc17d92b6390d4f18f937cf0b99054824a7815818012ddca925d6e01c2e49e", size = 482578, upload-time = "2026-01-18T20:55:35.117Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/71c9ba472d5d45f7546317f467a5fc941929cd68fb32796ca3d13dcbaec2/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f04b5e896d510b07c0ad733d7fce2d44b260c5e6c402d272128f8941984e4285", size = 425539, upload-time = "2026-01-18T20:56:04.009Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a6/ac99cd7fe77e822fed5250ff4b86fa66dd4238937dd178d2299f10b69816/ormsgpack-1.12.2-cp314-cp314-win_amd64.whl", hash = "sha256:ae3aba7eed4ca7cb79fd3436eddd29140f17ea254b91604aa1eb19bfcedb990f", size = 117493, upload-time = "2026-01-18T20:56:07.343Z" }, + { url = "https://files.pythonhosted.org/packages/3a/67/339872846a1ae4592535385a1c1f93614138566d7af094200c9c3b45d1e5/ormsgpack-1.12.2-cp314-cp314-win_arm64.whl", hash = "sha256:118576ea6006893aea811b17429bfc561b4778fad393f5f538c84af70b01260c", size = 111579, upload-time = "2026-01-18T20:55:21.161Z" }, + { url = "https://files.pythonhosted.org/packages/49/c2/6feb972dc87285ad381749d3882d8aecbde9f6ecf908dd717d33d66df095/ormsgpack-1.12.2-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7121b3d355d3858781dc40dafe25a32ff8a8242b9d80c692fd548a4b1f7fd3c8", size = 378721, upload-time = "2026-01-18T20:55:52.12Z" }, + { url = "https://files.pythonhosted.org/packages/a3/9a/900a6b9b413e0f8a471cf07830f9cf65939af039a362204b36bd5b581d8b/ormsgpack-1.12.2-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ee766d2e78251b7a63daf1cddfac36a73562d3ddef68cacfb41b2af64698033", size = 203170, upload-time = "2026-01-18T20:55:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/87/4c/27a95466354606b256f24fad464d7c97ab62bce6cc529dd4673e1179b8fb/ormsgpack-1.12.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292410a7d23de9b40444636b9b8f1e4e4b814af7f1ef476e44887e52a123f09d", size = 212816, upload-time = "2026-01-18T20:55:23.501Z" }, + { url = "https://files.pythonhosted.org/packages/73/cd/29cee6007bddf7a834e6cd6f536754c0535fcb939d384f0f37a38b1cddb8/ormsgpack-1.12.2-cp314-cp314t-win_amd64.whl", hash = "sha256:837dd316584485b72ef451d08dd3e96c4a11d12e4963aedb40e08f89685d8ec2", size = 117232, upload-time = "2026-01-18T20:55:45.448Z" }, ] [[package]] @@ -5812,12 +6532,17 @@ wheels = [ name = "pandas" version = "2.3.3" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version < '3.11' and sys_platform == 'win32'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", +] dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "python-dateutil" }, - { name = "pytz" }, - { name = "tzdata" }, + { name = "python-dateutil", marker = "python_full_version < '3.11'" }, + { name = "pytz", marker = "python_full_version < '3.11'" }, + { name = "tzdata", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } wheels = [ @@ -5870,27 +6595,105 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" }, ] +[[package]] +name = "pandas" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", + "python_full_version == '3.12.*' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", +] +dependencies = [ + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "python-dateutil", marker = "python_full_version >= '3.11'" }, + { name = "tzdata", marker = "(python_full_version >= '3.11' and sys_platform == 'emscripten') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/da/b1dc0481ab8d55d0f46e343cfe67d4551a0e14fcee52bd38ca1bd73258d8/pandas-3.0.0.tar.gz", hash = "sha256:0facf7e87d38f721f0af46fe70d97373a37701b1c09f7ed7aeeb292ade5c050f", size = 4633005, upload-time = "2026-01-21T15:52:04.726Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/1e/b184654a856e75e975a6ee95d6577b51c271cd92cb2b020c9378f53e0032/pandas-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d64ce01eb9cdca96a15266aa679ae50212ec52757c79204dbc7701a222401850", size = 10313247, upload-time = "2026-01-21T15:50:15.775Z" }, + { url = "https://files.pythonhosted.org/packages/dd/5e/e04a547ad0f0183bf151fd7c7a477468e3b85ff2ad231c566389e6cc9587/pandas-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:613e13426069793aa1ec53bdcc3b86e8d32071daea138bbcf4fa959c9cdaa2e2", size = 9913131, upload-time = "2026-01-21T15:50:18.611Z" }, + { url = "https://files.pythonhosted.org/packages/a2/93/bb77bfa9fc2aba9f7204db807d5d3fb69832ed2854c60ba91b4c65ba9219/pandas-3.0.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0192fee1f1a8e743b464a6607858ee4b071deb0b118eb143d71c2a1d170996d5", size = 10741925, upload-time = "2026-01-21T15:50:21.058Z" }, + { url = "https://files.pythonhosted.org/packages/62/fb/89319812eb1d714bfc04b7f177895caeba8ab4a37ef6712db75ed786e2e0/pandas-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0b853319dec8d5e0c8b875374c078ef17f2269986a78168d9bd57e49bf650ae", size = 11245979, upload-time = "2026-01-21T15:50:23.413Z" }, + { url = "https://files.pythonhosted.org/packages/a9/63/684120486f541fc88da3862ed31165b3b3e12b6a1c7b93be4597bc84e26c/pandas-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:707a9a877a876c326ae2cb640fbdc4ef63b0a7b9e2ef55c6df9942dcee8e2af9", size = 11756337, upload-time = "2026-01-21T15:50:25.932Z" }, + { url = "https://files.pythonhosted.org/packages/39/92/7eb0ad232312b59aec61550c3c81ad0743898d10af5df7f80bc5e5065416/pandas-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:afd0aa3d0b5cda6e0b8ffc10dbcca3b09ef3cbcd3fe2b27364f85fdc04e1989d", size = 12325517, upload-time = "2026-01-21T15:50:27.952Z" }, + { url = "https://files.pythonhosted.org/packages/51/27/bf9436dd0a4fc3130acec0828951c7ef96a0631969613a9a35744baf27f6/pandas-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:113b4cca2614ff7e5b9fee9b6f066618fe73c5a83e99d721ffc41217b2bf57dd", size = 9881576, upload-time = "2026-01-21T15:50:30.149Z" }, + { url = "https://files.pythonhosted.org/packages/e7/2b/c618b871fce0159fd107516336e82891b404e3f340821853c2fc28c7830f/pandas-3.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c14837eba8e99a8da1527c0280bba29b0eb842f64aa94982c5e21227966e164b", size = 9140807, upload-time = "2026-01-21T15:50:32.308Z" }, + { url = "https://files.pythonhosted.org/packages/0b/38/db33686f4b5fa64d7af40d96361f6a4615b8c6c8f1b3d334eee46ae6160e/pandas-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9803b31f5039b3c3b10cc858c5e40054adb4b29b4d81cb2fd789f4121c8efbcd", size = 10334013, upload-time = "2026-01-21T15:50:34.771Z" }, + { url = "https://files.pythonhosted.org/packages/a5/7b/9254310594e9774906bacdd4e732415e1f86ab7dbb4b377ef9ede58cd8ec/pandas-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14c2a4099cd38a1d18ff108168ea417909b2dea3bd1ebff2ccf28ddb6a74d740", size = 9874154, upload-time = "2026-01-21T15:50:36.67Z" }, + { url = "https://files.pythonhosted.org/packages/63/d4/726c5a67a13bc66643e66d2e9ff115cead482a44fc56991d0c4014f15aaf/pandas-3.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d257699b9a9960e6125686098d5714ac59d05222bef7a5e6af7a7fd87c650801", size = 10384433, upload-time = "2026-01-21T15:50:39.132Z" }, + { url = "https://files.pythonhosted.org/packages/bf/2e/9211f09bedb04f9832122942de8b051804b31a39cfbad199a819bb88d9f3/pandas-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:69780c98f286076dcafca38d8b8eee1676adf220199c0a39f0ecbf976b68151a", size = 10864519, upload-time = "2026-01-21T15:50:41.043Z" }, + { url = "https://files.pythonhosted.org/packages/00/8d/50858522cdc46ac88b9afdc3015e298959a70a08cd21e008a44e9520180c/pandas-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4a66384f017240f3858a4c8a7cf21b0591c3ac885cddb7758a589f0f71e87ebb", size = 11394124, upload-time = "2026-01-21T15:50:43.377Z" }, + { url = "https://files.pythonhosted.org/packages/86/3f/83b2577db02503cd93d8e95b0f794ad9d4be0ba7cb6c8bcdcac964a34a42/pandas-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be8c515c9bc33989d97b89db66ea0cececb0f6e3c2a87fcc8b69443a6923e95f", size = 11920444, upload-time = "2026-01-21T15:50:45.932Z" }, + { url = "https://files.pythonhosted.org/packages/64/2d/4f8a2f192ed12c90a0aab47f5557ece0e56b0370c49de9454a09de7381b2/pandas-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a453aad8c4f4e9f166436994a33884442ea62aa8b27d007311e87521b97246e1", size = 9730970, upload-time = "2026-01-21T15:50:47.962Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/ff571be435cf1e643ca98d0945d76732c0b4e9c37191a89c8550b105eed1/pandas-3.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:da768007b5a33057f6d9053563d6b74dd6d029c337d93c6d0d22a763a5c2ecc0", size = 9041950, upload-time = "2026-01-21T15:50:50.422Z" }, + { url = "https://files.pythonhosted.org/packages/6f/fa/7f0ac4ca8877c57537aaff2a842f8760e630d8e824b730eb2e859ffe96ca/pandas-3.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b78d646249b9a2bc191040988c7bb524c92fa8534fb0898a0741d7e6f2ffafa6", size = 10307129, upload-time = "2026-01-21T15:50:52.877Z" }, + { url = "https://files.pythonhosted.org/packages/6f/11/28a221815dcea4c0c9414dfc845e34a84a6a7dabc6da3194498ed5ba4361/pandas-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bc9cba7b355cb4162442a88ce495e01cb605f17ac1e27d6596ac963504e0305f", size = 9850201, upload-time = "2026-01-21T15:50:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/ba/da/53bbc8c5363b7e5bd10f9ae59ab250fc7a382ea6ba08e4d06d8694370354/pandas-3.0.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c9a1a149aed3b6c9bf246033ff91e1b02d529546c5d6fb6b74a28fea0cf4c70", size = 10354031, upload-time = "2026-01-21T15:50:57.463Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a3/51e02ebc2a14974170d51e2410dfdab58870ea9bcd37cda15bd553d24dc4/pandas-3.0.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95683af6175d884ee89471842acfca29172a85031fccdabc35e50c0984470a0e", size = 10861165, upload-time = "2026-01-21T15:50:59.32Z" }, + { url = "https://files.pythonhosted.org/packages/a5/fe/05a51e3cac11d161472b8297bd41723ea98013384dd6d76d115ce3482f9b/pandas-3.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1fbbb5a7288719e36b76b4f18d46ede46e7f916b6c8d9915b756b0a6c3f792b3", size = 11359359, upload-time = "2026-01-21T15:51:02.014Z" }, + { url = "https://files.pythonhosted.org/packages/ee/56/ba620583225f9b85a4d3e69c01df3e3870659cc525f67929b60e9f21dcd1/pandas-3.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e8b9808590fa364416b49b2a35c1f4cf2785a6c156935879e57f826df22038e", size = 11912907, upload-time = "2026-01-21T15:51:05.175Z" }, + { url = "https://files.pythonhosted.org/packages/c9/8c/c6638d9f67e45e07656b3826405c5cc5f57f6fd07c8b2572ade328c86e22/pandas-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:98212a38a709feb90ae658cb6227ea3657c22ba8157d4b8f913cd4c950de5e7e", size = 9732138, upload-time = "2026-01-21T15:51:07.569Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bf/bd1335c3bf1770b6d8fed2799993b11c4971af93bb1b729b9ebbc02ca2ec/pandas-3.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:177d9df10b3f43b70307a149d7ec49a1229a653f907aa60a48f1877d0e6be3be", size = 9033568, upload-time = "2026-01-21T15:51:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/8e/c6/f5e2171914d5e29b9171d495344097d54e3ffe41d2d85d8115baba4dc483/pandas-3.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2713810ad3806767b89ad3b7b69ba153e1c6ff6d9c20f9c2140379b2a98b6c98", size = 10741936, upload-time = "2026-01-21T15:51:11.693Z" }, + { url = "https://files.pythonhosted.org/packages/51/88/9a0164f99510a1acb9f548691f022c756c2314aad0d8330a24616c14c462/pandas-3.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:15d59f885ee5011daf8335dff47dcb8a912a27b4ad7826dc6cbe809fd145d327", size = 10393884, upload-time = "2026-01-21T15:51:14.197Z" }, + { url = "https://files.pythonhosted.org/packages/e0/53/b34d78084d88d8ae2b848591229da8826d1e65aacf00b3abe34023467648/pandas-3.0.0-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24e6547fb64d2c92665dd2adbfa4e85fa4fd70a9c070e7cfb03b629a0bbab5eb", size = 10310740, upload-time = "2026-01-21T15:51:16.093Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d3/bee792e7c3d6930b74468d990604325701412e55d7aaf47460a22311d1a5/pandas-3.0.0-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48ee04b90e2505c693d3f8e8f524dab8cb8aaf7ddcab52c92afa535e717c4812", size = 10700014, upload-time = "2026-01-21T15:51:18.818Z" }, + { url = "https://files.pythonhosted.org/packages/55/db/2570bc40fb13aaed1cbc3fbd725c3a60ee162477982123c3adc8971e7ac1/pandas-3.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66f72fb172959af42a459e27a8d8d2c7e311ff4c1f7db6deb3b643dbc382ae08", size = 11323737, upload-time = "2026-01-21T15:51:20.784Z" }, + { url = "https://files.pythonhosted.org/packages/bc/2e/297ac7f21c8181b62a4cccebad0a70caf679adf3ae5e83cb676194c8acc3/pandas-3.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4a4a400ca18230976724a5066f20878af785f36c6756e498e94c2a5e5d57779c", size = 11771558, upload-time = "2026-01-21T15:51:22.977Z" }, + { url = "https://files.pythonhosted.org/packages/0a/46/e1c6876d71c14332be70239acce9ad435975a80541086e5ffba2f249bcf6/pandas-3.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:940eebffe55528074341a5a36515f3e4c5e25e958ebbc764c9502cfc35ba3faa", size = 10473771, upload-time = "2026-01-21T15:51:25.285Z" }, + { url = "https://files.pythonhosted.org/packages/c0/db/0270ad9d13c344b7a36fa77f5f8344a46501abf413803e885d22864d10bf/pandas-3.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:597c08fb9fef0edf1e4fa2f9828dd27f3d78f9b8c9b4a748d435ffc55732310b", size = 10312075, upload-time = "2026-01-21T15:51:28.5Z" }, + { url = "https://files.pythonhosted.org/packages/09/9f/c176f5e9717f7c91becfe0f55a52ae445d3f7326b4a2cf355978c51b7913/pandas-3.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:447b2d68ac5edcbf94655fe909113a6dba6ef09ad7f9f60c80477825b6c489fe", size = 9900213, upload-time = "2026-01-21T15:51:30.955Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e7/63ad4cc10b257b143e0a5ebb04304ad806b4e1a61c5da25f55896d2ca0f4/pandas-3.0.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:debb95c77ff3ed3ba0d9aa20c3a2f19165cc7956362f9873fce1ba0a53819d70", size = 10428768, upload-time = "2026-01-21T15:51:33.018Z" }, + { url = "https://files.pythonhosted.org/packages/9e/0e/4e4c2d8210f20149fd2248ef3fff26623604922bd564d915f935a06dd63d/pandas-3.0.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fedabf175e7cd82b69b74c30adbaa616de301291a5231138d7242596fc296a8d", size = 10882954, upload-time = "2026-01-21T15:51:35.287Z" }, + { url = "https://files.pythonhosted.org/packages/c6/60/c9de8ac906ba1f4d2250f8a951abe5135b404227a55858a75ad26f84db47/pandas-3.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:412d1a89aab46889f3033a386912efcdfa0f1131c5705ff5b668dda88305e986", size = 11430293, upload-time = "2026-01-21T15:51:37.57Z" }, + { url = "https://files.pythonhosted.org/packages/a1/69/806e6637c70920e5787a6d6896fd707f8134c2c55cd761e7249a97b7dc5a/pandas-3.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e979d22316f9350c516479dd3a92252be2937a9531ed3a26ec324198a99cdd49", size = 11952452, upload-time = "2026-01-21T15:51:39.618Z" }, + { url = "https://files.pythonhosted.org/packages/cb/de/918621e46af55164c400ab0ef389c9d969ab85a43d59ad1207d4ddbe30a5/pandas-3.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:083b11415b9970b6e7888800c43c82e81a06cd6b06755d84804444f0007d6bb7", size = 9851081, upload-time = "2026-01-21T15:51:41.758Z" }, + { url = "https://files.pythonhosted.org/packages/91/a1/3562a18dd0bd8c73344bfa26ff90c53c72f827df119d6d6b1dacc84d13e3/pandas-3.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:5db1e62cb99e739fa78a28047e861b256d17f88463c76b8dafc7c1338086dca8", size = 9174610, upload-time = "2026-01-21T15:51:44.312Z" }, + { url = "https://files.pythonhosted.org/packages/ce/26/430d91257eaf366f1737d7a1c158677caaf6267f338ec74e3a1ec444111c/pandas-3.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:697b8f7d346c68274b1b93a170a70974cdc7d7354429894d5927c1effdcccd73", size = 10761999, upload-time = "2026-01-21T15:51:46.899Z" }, + { url = "https://files.pythonhosted.org/packages/ec/1a/954eb47736c2b7f7fe6a9d56b0cb6987773c00faa3c6451a43db4beb3254/pandas-3.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8cb3120f0d9467ed95e77f67a75e030b67545bcfa08964e349252d674171def2", size = 10410279, upload-time = "2026-01-21T15:51:48.89Z" }, + { url = "https://files.pythonhosted.org/packages/20/fc/b96f3a5a28b250cd1b366eb0108df2501c0f38314a00847242abab71bb3a/pandas-3.0.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33fd3e6baa72899746b820c31e4b9688c8e1b7864d7aec2de7ab5035c285277a", size = 10330198, upload-time = "2026-01-21T15:51:51.015Z" }, + { url = "https://files.pythonhosted.org/packages/90/b3/d0e2952f103b4fbef1ef22d0c2e314e74fc9064b51cee30890b5e3286ee6/pandas-3.0.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8942e333dc67ceda1095227ad0febb05a3b36535e520154085db632c40ad084", size = 10728513, upload-time = "2026-01-21T15:51:53.387Z" }, + { url = "https://files.pythonhosted.org/packages/76/81/832894f286df828993dc5fd61c63b231b0fb73377e99f6c6c369174cf97e/pandas-3.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:783ac35c4d0fe0effdb0d67161859078618b1b6587a1af15928137525217a721", size = 11345550, upload-time = "2026-01-21T15:51:55.329Z" }, + { url = "https://files.pythonhosted.org/packages/34/a0/ed160a00fb4f37d806406bc0a79a8b62fe67f29d00950f8d16203ff3409b/pandas-3.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:125eb901e233f155b268bbef9abd9afb5819db74f0e677e89a61b246228c71ac", size = 11799386, upload-time = "2026-01-21T15:51:57.457Z" }, + { url = "https://files.pythonhosted.org/packages/36/c8/2ac00d7255252c5e3cf61b35ca92ca25704b0188f7454ca4aec08a33cece/pandas-3.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b86d113b6c109df3ce0ad5abbc259fe86a1bd4adfd4a31a89da42f84f65509bb", size = 10873041, upload-time = "2026-01-21T15:52:00.034Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3f/a80ac00acbc6b35166b42850e98a4f466e2c0d9c64054161ba9620f95680/pandas-3.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1c39eab3ad38f2d7a249095f0a3d8f8c22cc0f847e98ccf5bbe732b272e2d9fa", size = 9441003, upload-time = "2026-01-21T15:52:02.281Z" }, +] + [[package]] name = "pandas-stubs" -version = "2.3.3.251219" +version = "2.3.3.260113" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "types-pytz" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/ee/5407e9e63d22a47774f9246ca80b24f82c36f26efd39f9e3c5b584b915aa/pandas_stubs-2.3.3.251219.tar.gz", hash = "sha256:dc2883e6daff49d380d1b5a2e864983ab9be8cd9a661fa861e3dea37559a5af4", size = 106899, upload-time = "2025-12-19T15:49:53.766Z" } +sdist = { url = "https://files.pythonhosted.org/packages/92/5d/be23854a73fda69f1dbdda7bc10fbd6f930bd1fa87aaec389f00c901c1e8/pandas_stubs-2.3.3.260113.tar.gz", hash = "sha256:076e3724bcaa73de78932b012ec64b3010463d377fa63116f4e6850643d93800", size = 116131, upload-time = "2026-01-13T22:30:16.704Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/20/69f2a39792a653fd64d916cd563ed79ec6e5dcfa6408c4674021d810afcf/pandas_stubs-2.3.3.251219-py3-none-any.whl", hash = "sha256:ccc6337febb51d6d8a08e4c96b479478a0da0ef704b5e08bd212423fe1cb549c", size = 163667, upload-time = "2025-12-19T15:49:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c6/df1fe324248424f77b89371116dab5243db7f052c32cc9fe7442ad9c5f75/pandas_stubs-2.3.3.260113-py3-none-any.whl", hash = "sha256:ec070b5c576e1badf12544ae50385872f0631fc35d99d00dc598c2954ec564d3", size = 168246, upload-time = "2026-01-13T22:30:15.244Z" }, ] [[package]] name = "parso" -version = "0.8.5" +version = "0.8.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/76/a1e769043c0c0c9fe391b702539d594731a4362334cdf4dc25d0c09761e7/parso-0.8.6.tar.gz", hash = "sha256:2b9a0332696df97d454fa67b81618fd69c35a7b90327cbe6ba5c92d2c68a7bfd", size = 401621, upload-time = "2026-02-09T15:45:24.425Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" }, + { url = "https://files.pythonhosted.org/packages/b6/61/fae042894f4296ec49e3f193aff5d7c18440da9e48102c3315e1bc4519a7/parso-0.8.6-py2.py3-none-any.whl", hash = "sha256:2c549f800b70a5c4952197248825584cb00f033b29c692671d3bf08bf380baff", size = 106894, upload-time = "2026-02-09T15:45:21.391Z" }, ] [[package]] @@ -5908,11 +6711,11 @@ wheels = [ [[package]] name = "pathspec" -version = "0.12.1" +version = "1.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, ] [[package]] @@ -5986,6 +6789,41 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/52/3b/ce7a01026a7cf46e5452afa86f97a5e88ca97f562cafa76570178ab56d8d/pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5", size = 2554661, upload-time = "2024-07-01T09:48:20.293Z" }, ] +[[package]] +name = "pin" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cmeel" }, + { name = "cmeel-boost" }, + { name = "cmeel-urdfdom" }, + { name = "coal" }, + { name = "libpinocchio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3b/99/4e7393e8035985405e89bc61dc0037f9bd1792c7a0295192aa3791bf4844/pin-3.8.0.tar.gz", hash = "sha256:f3889867d6fb968299696e94974138d6668600663b8650723a59fe062356fece", size = 4000900, upload-time = "2025-10-16T14:04:29.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/6b/0d280cc9753acb1bca1ffad8138f1c3939a797a336b9b058a051267b4aea/pin-3.8.0-0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:92046a8b0599d2396e0f5303f81f76ad306315d7a45cc44bb1ad8afacc59760c", size = 5634231, upload-time = "2025-10-16T14:03:49.108Z" }, + { url = "https://files.pythonhosted.org/packages/c1/df/b7c9cbb484a0c096e7b4beb22fed4c5bf77c5bb042fe22702ce9c3757bb7/pin-3.8.0-0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2565eebc9dd2f84181cddc66c356f2896a64162ed1eadc7d3a60e6a034d6a5ae", size = 5420549, upload-time = "2025-10-16T14:03:51.642Z" }, + { url = "https://files.pythonhosted.org/packages/c3/2e/1cb2fc19cd5ee830a9bc992956d9ef83a3dcee347edbb56d8c35d069b374/pin-3.8.0-0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4b2e0ae3f5b06538f78f84e385c9d5d2a8470828b108520a1cf0657f658521e8", size = 7242690, upload-time = "2025-10-16T14:03:53.369Z" }, + { url = "https://files.pythonhosted.org/packages/78/9a/8f93ca590dab6058283d0cc3ee776ba3a72f6d8662e3c7e3b6b9424faee0/pin-3.8.0-0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d9a48b99f8d3b085575f88944f1537a048fcd262da3efc52fed732b220e1422f", size = 7402696, upload-time = "2025-10-16T14:03:55.133Z" }, + { url = "https://files.pythonhosted.org/packages/59/36/921da84d53048ab2cc443da6d745e03494a447a5f41dfe65f8c948b26cfa/pin-3.8.0-0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d6ef1dc90aa2af1cbe616fd29671bfab60b860def2d7f4fc8fd9ffe5f95033a8", size = 5634235, upload-time = "2025-10-16T14:03:56.692Z" }, + { url = "https://files.pythonhosted.org/packages/d4/aa/a2dbe963f20ebc89ab8f1adc6ac4a6bbe8d82383f056edc478607b349021/pin-3.8.0-0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e5c34d3b5f1307d94a94ca86e6563b5cc3c0a92bfbe17d63f408ea6e98d5befe", size = 5420564, upload-time = "2025-10-16T14:03:58.027Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/5bc1f519b56f2c546e8035cf1dc42451d40d86d5d1f693c2786fbb57ae8a/pin-3.8.0-0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:80eba5dd6e8eb91211b98170e15c25eb2927eb6c3bd561b755b33185b1ce301e", size = 7241049, upload-time = "2025-10-16T14:03:59.978Z" }, + { url = "https://files.pythonhosted.org/packages/d1/35/14336eca99c7403e011fb3d6e20d51494ba8e1b03689f63ecea0e17f4beb/pin-3.8.0-0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:9b22136ddc544d13ee56e1fcf7ffc57b16f2e28ff5484b01241e16268e19afa4", size = 7402020, upload-time = "2025-10-16T14:04:01.512Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1e/620cd7711ee033ada46f0efefc5b59587aed8ece33dbb5701954990f0a47/pin-3.8.0-0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:93fd4b3c11b450f448120a5f5891dd1f810612cb3624fa9aee9795f1efc95427", size = 5682834, upload-time = "2025-10-16T14:04:03.389Z" }, + { url = "https://files.pythonhosted.org/packages/74/7e/036ccc91f29e406ed102f4189508881f78d859d70d5ba0b553e35d72db3b/pin-3.8.0-0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fd074b97d8045cbadc6774983152bbae90347c42b8b478fe9b077f7261b2807d", size = 5451808, upload-time = "2025-10-16T14:04:05.046Z" }, + { url = "https://files.pythonhosted.org/packages/b7/67/85bf2cc80697a50e74fd2c58cc28038f557632c3ca6caef2779797dbfd6c/pin-3.8.0-0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:cfccb5d4e6a8b8a337a091762d6c09f1a6945fb6feb37968d076fd01c5631e6d", size = 7166942, upload-time = "2025-10-16T14:04:06.46Z" }, + { url = "https://files.pythonhosted.org/packages/38/a4/17aa94538ddd552767abacf29c271a7b29a4659c89a7eda140fea9507e39/pin-3.8.0-0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4d00f51d24464c61975073ee9dfbb02b0ac92c9393454c0c61086f919024f635", size = 7336970, upload-time = "2025-10-16T14:04:07.921Z" }, + { url = "https://files.pythonhosted.org/packages/79/bb/adec2172e3bce5f42539a910f4c619ffad43fe206e40e21ad02093a08cb6/pin-3.8.0-0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:735e0d4db389048cf23ae5e38d2d6991393ad42b8f0b226bfb21b44ffd29a3b0", size = 5682835, upload-time = "2025-10-16T14:04:09.554Z" }, + { url = "https://files.pythonhosted.org/packages/b0/de/4a93ee6a684057507eedfefa0f0e63240cca25d9053836e5e01ff045a2e0/pin-3.8.0-0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3a5c403821f450298ec235362ff006daa7963d7c618a34c8693fd8660573961c", size = 5451811, upload-time = "2025-10-16T14:04:11.253Z" }, + { url = "https://files.pythonhosted.org/packages/92/ef/670dd481925f4805a22138993f6e8bd08a4c717939a60a2efb554b54a6a6/pin-3.8.0-0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:b2b2b07d14194ae0178f7ea2a1427599ea57b700ee2e30e6703594f9ad055831", size = 7166941, upload-time = "2025-10-16T14:04:13.751Z" }, + { url = "https://files.pythonhosted.org/packages/39/5e/96c3b0b4480b09f44582ad79c51d3bc644cefaf9961433ea396e8da29590/pin-3.8.0-0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:976970948a3c5bcdd2807239cf072e232c88e29d0db3a49ed7a73bf18b7c59e3", size = 7336973, upload-time = "2025-10-16T14:04:15.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/53/3828fe93db30851cc03ada6d6f6f2b93493940e6b43afcad247342c0d20e/pin-3.8.0-0-cp314-cp314-macosx_10_9_x86_64.whl", hash = "sha256:a7e7b277087f80bd16a3e03c6d6b8f7000bcb5cf58bc871085c3fd4db0384078", size = 5698064, upload-time = "2025-10-16T14:04:16.811Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e4/477dfc034f337b94ad11cb3e48d9301abdf142b83568371c07abb27a3069/pin-3.8.0-0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f8261ac76c0a474e1bc40cfe04a67e24dfbc33f26cd84dae6c65cd35509f3127", size = 5467147, upload-time = "2025-10-16T14:04:18.56Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/23030667d13743a532de3bbdfcf73213c1516ede1b41198fc836675963ab/pin-3.8.0-0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:ba8d2d1e9c8faa2d8ffbb58b0cb57f79fdb9e6750d139ec5030525e67a30fd47", size = 7193153, upload-time = "2025-10-16T14:04:20.095Z" }, + { url = "https://files.pythonhosted.org/packages/51/aa/3ed32e4204194ee171ce1259ba6c86eb28373ffb139465ba0bd3b5796191/pin-3.8.0-0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:07a3b2f3bacd9510fc9a6dc8aefba286f89ded2ebbf398d4a55671de32aa76d9", size = 7350015, upload-time = "2025-10-16T14:04:21.65Z" }, +] + [[package]] name = "piper-sdk" version = "0.6.1" @@ -6000,11 +6838,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.5.1" +version = "4.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/41/9b/20c8288dc591129bf9dd7be2c91aec6ef23e450605c3403716bd6c74833e/platformdirs-4.8.0.tar.gz", hash = "sha256:c1d4a51ab04087041dd602707fbe7ee8b62b64e590f30e336e5c99c2d0c542d2", size = 27607, upload-time = "2026-02-14T01:52:03.451Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f0/227a7d1b8d80ae55c4b47f271c0870dd7a153aa65353bf71921265df2300/platformdirs-4.8.0-py3-none-any.whl", hash = "sha256:1c1328b4d2ea997bbcb904175a9bde14e824a3fa79f751ea3888d63d7d727557", size = 20647, upload-time = "2026-02-14T01:52:01.915Z" }, ] [[package]] @@ -6016,9 +6854,9 @@ dependencies = [ { name = "brax" }, { name = "etils" }, { name = "flax", version = "0.10.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "flax", version = "0.12.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "flax", version = "0.12.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "jax", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "jax", version = "0.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "lxml" }, { name = "mediapy" }, { name = "ml-collections" }, @@ -6044,15 +6882,15 @@ wheels = [ [[package]] name = "plotly" -version = "6.5.1" +version = "6.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "narwhals" }, { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d6/ff/a4938b75e95114451efdb34db6b41930253e67efc8dc737bd592ef2e419d/plotly-6.5.1.tar.gz", hash = "sha256:b0478c8d5ada0c8756bce15315bcbfec7d3ab8d24614e34af9aff7bfcfea9281", size = 7014606, upload-time = "2026-01-07T20:11:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/4f/8a10a9b9f5192cb6fdef62f1d77fa7d834190b2c50c0cd256bd62879212b/plotly-6.5.2.tar.gz", hash = "sha256:7478555be0198562d1435dee4c308268187553cc15516a2f4dd034453699e393", size = 7015695, upload-time = "2026-01-14T21:26:51.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/8e/24e0bb90b2d75af84820693260c5534e9ed351afdda67ed6f393a141a0e2/plotly-6.5.1-py3-none-any.whl", hash = "sha256:5adad4f58c360612b6c5ce11a308cdbc4fd38ceb1d40594a614f0062e227abe1", size = 9894981, upload-time = "2026-01-07T20:11:38.124Z" }, + { url = "https://files.pythonhosted.org/packages/8a/67/f95b5460f127840310d2187f916cf0023b5875c0717fdf893f71e1325e87/plotly-6.5.2-py3-none-any.whl", hash = "sha256:91757653bd9c550eeea2fa2404dba6b85d1e366d54804c340b2c874e5a7eb4a4", size = 9895973, upload-time = "2026-01-14T21:26:47.135Z" }, ] [[package]] @@ -6080,28 +6918,30 @@ wheels = [ [[package]] name = "polars" -version = "1.36.1" +version = "1.38.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "polars-runtime-32" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/dc/56f2a90c79a2cb13f9e956eab6385effe54216ae7a2068b3a6406bae4345/polars-1.36.1.tar.gz", hash = "sha256:12c7616a2305559144711ab73eaa18814f7aa898c522e7645014b68f1432d54c", size = 711993, upload-time = "2025-12-10T01:14:53.033Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/5e/208a24471a433bcd0e9a6889ac49025fd4daad2815c8220c5bd2576e5f1b/polars-1.38.1.tar.gz", hash = "sha256:803a2be5344ef880ad625addfb8f641995cfd777413b08a10de0897345778239", size = 717667, upload-time = "2026-02-06T18:13:23.013Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/c6/36a1b874036b49893ecae0ac44a2f63d1a76e6212631a5b2f50a86e0e8af/polars-1.36.1-py3-none-any.whl", hash = "sha256:853c1bbb237add6a5f6d133c15094a9b727d66dd6a4eb91dbb07cdb056b2b8ef", size = 802429, upload-time = "2025-12-10T01:13:53.838Z" }, + { url = "https://files.pythonhosted.org/packages/0a/49/737c1a6273c585719858261753da0b688454d1b634438ccba8a9c4eb5aab/polars-1.38.1-py3-none-any.whl", hash = "sha256:a29479c48fed4984d88b656486d221f638cba45d3e961631a50ee5fdde38cb2c", size = 810368, upload-time = "2026-02-06T18:11:55.819Z" }, ] [[package]] name = "polars-runtime-32" -version = "1.36.1" +version = "1.38.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/31/df/597c0ef5eb8d761a16d72327846599b57c5d40d7f9e74306fc154aba8c37/polars_runtime_32-1.36.1.tar.gz", hash = "sha256:201c2cfd80ceb5d5cd7b63085b5fd08d6ae6554f922bcb941035e39638528a09", size = 2788751, upload-time = "2025-12-10T01:14:54.172Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/4b/04d6b3fb7cf336fbe12fbc4b43f36d1783e11bb0f2b1e3980ec44878df06/polars_runtime_32-1.38.1.tar.gz", hash = "sha256:04f20ed1f5c58771f34296a27029dc755a9e4b1390caeaef8f317e06fdfce2ec", size = 2812631, upload-time = "2026-02-06T18:13:25.206Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/ea/871129a2d296966c0925b078a9a93c6c5e7facb1c5eebfcd3d5811aeddc1/polars_runtime_32-1.36.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:327b621ca82594f277751f7e23d4b939ebd1be18d54b4cdf7a2f8406cecc18b2", size = 43494311, upload-time = "2025-12-10T01:13:56.096Z" }, - { url = "https://files.pythonhosted.org/packages/d8/76/0038210ad1e526ce5bb2933b13760d6b986b3045eccc1338e661bd656f77/polars_runtime_32-1.36.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:ab0d1f23084afee2b97de8c37aa3e02ec3569749ae39571bd89e7a8b11ae9e83", size = 39300602, upload-time = "2025-12-10T01:13:59.366Z" }, - { url = "https://files.pythonhosted.org/packages/54/1e/2707bee75a780a953a77a2c59829ee90ef55708f02fc4add761c579bf76e/polars_runtime_32-1.36.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:899b9ad2e47ceb31eb157f27a09dbc2047efbf4969a923a6b1ba7f0412c3e64c", size = 44511780, upload-time = "2025-12-10T01:14:02.285Z" }, - { url = "https://files.pythonhosted.org/packages/11/b2/3fede95feee441be64b4bcb32444679a8fbb7a453a10251583053f6efe52/polars_runtime_32-1.36.1-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:d9d077bb9df711bc635a86540df48242bb91975b353e53ef261c6fae6cb0948f", size = 40688448, upload-time = "2025-12-10T01:14:05.131Z" }, - { url = "https://files.pythonhosted.org/packages/05/0f/e629713a72999939b7b4bfdbf030a32794db588b04fdf3dc977dd8ea6c53/polars_runtime_32-1.36.1-cp39-abi3-win_amd64.whl", hash = "sha256:cc17101f28c9a169ff8b5b8d4977a3683cd403621841623825525f440b564cf0", size = 44464898, upload-time = "2025-12-10T01:14:08.296Z" }, - { url = "https://files.pythonhosted.org/packages/d1/d8/a12e6aa14f63784cead437083319ec7cece0d5bb9a5bfe7678cc6578b52a/polars_runtime_32-1.36.1-cp39-abi3-win_arm64.whl", hash = "sha256:809e73857be71250141225ddd5d2b30c97e6340aeaa0d445f930e01bef6888dc", size = 39798896, upload-time = "2025-12-10T01:14:11.568Z" }, + { url = "https://files.pythonhosted.org/packages/ae/a2/a00defbddadd8cf1042f52380dcba6b6592b03bac8e3b34c436b62d12d3b/polars_runtime_32-1.38.1-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:18154e96044724a0ac38ce155cf63aa03c02dd70500efbbf1a61b08cadd269ef", size = 44108001, upload-time = "2026-02-06T18:11:58.127Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/599ff3709e6a303024efd7edfd08cf8de55c6ac39527d8f41cbc4399385f/polars_runtime_32-1.38.1-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:c49acac34cc4049ed188f1eb67d6ff3971a39b4af7f7b734b367119970f313ac", size = 40230140, upload-time = "2026-02-06T18:12:01.181Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8c/3ac18d6f89dc05fe2c7c0ee1dc5b81f77a5c85ad59898232c2500fe2ebbf/polars_runtime_32-1.38.1-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fef2ef2626a954e010e006cc8e4de467ecf32d08008f130cea1c78911f545323", size = 41994039, upload-time = "2026-02-06T18:12:04.332Z" }, + { url = "https://files.pythonhosted.org/packages/f2/5a/61d60ec5cc0ab37cbd5a699edb2f9af2875b7fdfdfb2a4608ca3cc5f0448/polars_runtime_32-1.38.1-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5f7a8125e2d50e2e060296551c929aec09be23a9edcb2b12ca923f555a5ba", size = 45755804, upload-time = "2026-02-06T18:12:07.846Z" }, + { url = "https://files.pythonhosted.org/packages/91/54/02cd4074c98c361ccd3fec3bcb0bd68dbc639c0550c42a4436b0ff0f3ccf/polars_runtime_32-1.38.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:10d19cd9863e129273b18b7fcaab625b5c8143c2d22b3e549067b78efa32e4fa", size = 42159605, upload-time = "2026-02-06T18:12:10.919Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f3/b2a5e720cc56eaa38b4518e63aa577b4bbd60e8b05a00fe43ca051be5879/polars_runtime_32-1.38.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61e8d73c614b46a00d2f853625a7569a2e4a0999333e876354ac81d1bf1bb5e2", size = 45336615, upload-time = "2026-02-06T18:12:14.074Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8d/ee2e4b7de948090cfb3df37d401c521233daf97bfc54ddec5d61d1d31618/polars_runtime_32-1.38.1-cp310-abi3-win_amd64.whl", hash = "sha256:08c2b3b93509c1141ac97891294ff5c5b0c548a373f583eaaea873a4bf506437", size = 45680732, upload-time = "2026-02-06T18:12:19.097Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/72c216f4ab0c82b907009668f79183ae029116ff0dd245d56ef58aac48e7/polars_runtime_32-1.38.1-cp310-abi3-win_arm64.whl", hash = "sha256:6d07d0cc832bfe4fb54b6e04218c2c27afcfa6b9498f9f6bbf262a00d58cc7c4", size = 41639413, upload-time = "2026-02-06T18:12:22.044Z" }, ] [[package]] @@ -6162,45 +7002,108 @@ wheels = [ [[package]] name = "protobuf" -version = "6.33.2" +version = "6.33.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296, upload-time = "2025-12-06T00:17:53.311Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603, upload-time = "2025-12-06T00:17:41.114Z" }, - { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930, upload-time = "2025-12-06T00:17:43.278Z" }, - { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621, upload-time = "2025-12-06T00:17:44.445Z" }, - { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460, upload-time = "2025-12-06T00:17:45.678Z" }, - { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168, upload-time = "2025-12-06T00:17:46.813Z" }, - { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270, upload-time = "2025-12-06T00:17:48.253Z" }, - { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501, upload-time = "2025-12-06T00:17:52.211Z" }, + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, ] [[package]] name = "psutil" -version = "7.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/cb/09e5184fb5fc0358d110fc3ca7f6b1d033800734d34cac10f4136cfac10e/psutil-7.2.1.tar.gz", hash = "sha256:f7583aec590485b43ca601dd9cea0dcd65bd7bb21d30ef4ddbf4ea6b5ed1bdd3", size = 490253, upload-time = "2025-12-29T08:26:00.169Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/8e/f0c242053a368c2aa89584ecd1b054a18683f13d6e5a318fc9ec36582c94/psutil-7.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9f33bb525b14c3ea563b2fd521a84d2fa214ec59e3e6a2858f78d0844dd60d", size = 129624, upload-time = "2025-12-29T08:26:04.255Z" }, - { url = "https://files.pythonhosted.org/packages/26/97/a58a4968f8990617decee234258a2b4fc7cd9e35668387646c1963e69f26/psutil-7.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:81442dac7abfc2f4f4385ea9e12ddf5a796721c0f6133260687fec5c3780fa49", size = 130132, upload-time = "2025-12-29T08:26:06.228Z" }, - { url = "https://files.pythonhosted.org/packages/db/6d/ed44901e830739af5f72a85fa7ec5ff1edea7f81bfbf4875e409007149bd/psutil-7.2.1-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ea46c0d060491051d39f0d2cff4f98d5c72b288289f57a21556cc7d504db37fc", size = 180612, upload-time = "2025-12-29T08:26:08.276Z" }, - { url = "https://files.pythonhosted.org/packages/c7/65/b628f8459bca4efbfae50d4bf3feaab803de9a160b9d5f3bd9295a33f0c2/psutil-7.2.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35630d5af80d5d0d49cfc4d64c1c13838baf6717a13effb35869a5919b854cdf", size = 183201, upload-time = "2025-12-29T08:26:10.622Z" }, - { url = "https://files.pythonhosted.org/packages/fb/23/851cadc9764edcc18f0effe7d0bf69f727d4cf2442deb4a9f78d4e4f30f2/psutil-7.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:923f8653416604e356073e6e0bccbe7c09990acef442def2f5640dd0faa9689f", size = 139081, upload-time = "2025-12-29T08:26:12.483Z" }, - { url = "https://files.pythonhosted.org/packages/59/82/d63e8494ec5758029f31c6cb06d7d161175d8281e91d011a4a441c8a43b5/psutil-7.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cfbe6b40ca48019a51827f20d830887b3107a74a79b01ceb8cc8de4ccb17b672", size = 134767, upload-time = "2025-12-29T08:26:14.528Z" }, - { url = "https://files.pythonhosted.org/packages/05/c2/5fb764bd61e40e1fe756a44bd4c21827228394c17414ade348e28f83cd79/psutil-7.2.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:494c513ccc53225ae23eec7fe6e1482f1b8a44674241b54561f755a898650679", size = 129716, upload-time = "2025-12-29T08:26:16.017Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d2/935039c20e06f615d9ca6ca0ab756cf8408a19d298ffaa08666bc18dc805/psutil-7.2.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3fce5f92c22b00cdefd1645aa58ab4877a01679e901555067b1bd77039aa589f", size = 130133, upload-time = "2025-12-29T08:26:18.009Z" }, - { url = "https://files.pythonhosted.org/packages/77/69/19f1eb0e01d24c2b3eacbc2f78d3b5add8a89bf0bb69465bc8d563cc33de/psutil-7.2.1-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93f3f7b0bb07711b49626e7940d6fe52aa9940ad86e8f7e74842e73189712129", size = 181518, upload-time = "2025-12-29T08:26:20.241Z" }, - { url = "https://files.pythonhosted.org/packages/e1/6d/7e18b1b4fa13ad370787626c95887b027656ad4829c156bb6569d02f3262/psutil-7.2.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d34d2ca888208eea2b5c68186841336a7f5e0b990edec929be909353a202768a", size = 184348, upload-time = "2025-12-29T08:26:22.215Z" }, - { url = "https://files.pythonhosted.org/packages/98/60/1672114392dd879586d60dd97896325df47d9a130ac7401318005aab28ec/psutil-7.2.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2ceae842a78d1603753561132d5ad1b2f8a7979cb0c283f5b52fb4e6e14b1a79", size = 140400, upload-time = "2025-12-29T08:26:23.993Z" }, - { url = "https://files.pythonhosted.org/packages/fb/7b/d0e9d4513c46e46897b46bcfc410d51fc65735837ea57a25170f298326e6/psutil-7.2.1-cp314-cp314t-win_arm64.whl", hash = "sha256:08a2f175e48a898c8eb8eace45ce01777f4785bc744c90aa2cc7f2fa5462a266", size = 135430, upload-time = "2025-12-29T08:26:25.999Z" }, - { url = "https://files.pythonhosted.org/packages/c5/cf/5180eb8c8bdf6a503c6919f1da28328bd1e6b3b1b5b9d5b01ae64f019616/psutil-7.2.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2e953fcfaedcfbc952b44744f22d16575d3aa78eb4f51ae74165b4e96e55f42", size = 128137, upload-time = "2025-12-29T08:26:27.759Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2c/78e4a789306a92ade5000da4f5de3255202c534acdadc3aac7b5458fadef/psutil-7.2.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:05cc68dbb8c174828624062e73078e7e35406f4ca2d0866c272c2410d8ef06d1", size = 128947, upload-time = "2025-12-29T08:26:29.548Z" }, - { url = "https://files.pythonhosted.org/packages/29/f8/40e01c350ad9a2b3cb4e6adbcc8a83b17ee50dd5792102b6142385937db5/psutil-7.2.1-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e38404ca2bb30ed7267a46c02f06ff842e92da3bb8c5bfdadbd35a5722314d8", size = 154694, upload-time = "2025-12-29T08:26:32.147Z" }, - { url = "https://files.pythonhosted.org/packages/06/e4/b751cdf839c011a9714a783f120e6a86b7494eb70044d7d81a25a5cd295f/psutil-7.2.1-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab2b98c9fc19f13f59628d94df5cc4cc4844bc572467d113a8b517d634e362c6", size = 156136, upload-time = "2025-12-29T08:26:34.079Z" }, - { url = "https://files.pythonhosted.org/packages/44/ad/bbf6595a8134ee1e94a4487af3f132cef7fce43aef4a93b49912a48c3af7/psutil-7.2.1-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f78baafb38436d5a128f837fab2d92c276dfb48af01a240b861ae02b2413ada8", size = 148108, upload-time = "2025-12-29T08:26:36.225Z" }, - { url = "https://files.pythonhosted.org/packages/1c/15/dd6fd869753ce82ff64dcbc18356093471a5a5adf4f77ed1f805d473d859/psutil-7.2.1-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:99a4cd17a5fdd1f3d014396502daa70b5ec21bf4ffe38393e152f8e449757d67", size = 147402, upload-time = "2025-12-29T08:26:39.21Z" }, - { url = "https://files.pythonhosted.org/packages/34/68/d9317542e3f2b180c4306e3f45d3c922d7e86d8ce39f941bb9e2e9d8599e/psutil-7.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:b1b0671619343aa71c20ff9767eced0483e4fc9e1f489d50923738caf6a03c17", size = 136938, upload-time = "2025-12-29T08:26:41.036Z" }, - { url = "https://files.pythonhosted.org/packages/3e/73/2ce007f4198c80fcf2cb24c169884f833fe93fbc03d55d302627b094ee91/psutil-7.2.1-cp37-abi3-win_arm64.whl", hash = "sha256:0d67c1822c355aa6f7314d92018fb4268a76668a536f133599b91edd48759442", size = 133836, upload-time = "2025-12-29T08:26:43.086Z" }, +version = "7.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" }, + { url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" }, + { url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" }, + { url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" }, + { url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" }, + { url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" }, + { url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/f2/8e377d29c2ecf99f6062d35ea606b036e8800720eccfec5fe3dd672c2b24/psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2", size = 3756506, upload-time = "2025-10-10T11:10:30.144Z" }, + { url = "https://files.pythonhosted.org/packages/24/cc/dc143ea88e4ec9d386106cac05023b69668bd0be20794c613446eaefafe5/psycopg2_binary-2.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c0e4262e089516603a09474ee13eabf09cb65c332277e39af68f6233911087", size = 3863943, upload-time = "2025-10-10T11:10:34.586Z" }, + { url = "https://files.pythonhosted.org/packages/8c/df/16848771155e7c419c60afeb24950b8aaa3ab09c0a091ec3ccca26a574d0/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c47676e5b485393f069b4d7a811267d3168ce46f988fa602658b8bb901e9e64d", size = 4410873, upload-time = "2025-10-10T11:10:38.951Z" }, + { url = "https://files.pythonhosted.org/packages/43/79/5ef5f32621abd5a541b89b04231fe959a9b327c874a1d41156041c75494b/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a28d8c01a7b27a1e3265b11250ba7557e5f72b5ee9e5f3a2fa8d2949c29bf5d2", size = 4468016, upload-time = "2025-10-10T11:10:43.319Z" }, + { url = "https://files.pythonhosted.org/packages/f0/9b/d7542d0f7ad78f57385971f426704776d7b310f5219ed58da5d605b1892e/psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f2732cf504a1aa9e9609d02f79bea1067d99edf844ab92c247bbca143303b", size = 4164996, upload-time = "2025-10-10T11:10:46.705Z" }, + { url = "https://files.pythonhosted.org/packages/14/ed/e409388b537fa7414330687936917c522f6a77a13474e4238219fcfd9a84/psycopg2_binary-2.9.11-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:865f9945ed1b3950d968ec4690ce68c55019d79e4497366d36e090327ce7db14", size = 3981881, upload-time = "2025-10-30T02:54:57.182Z" }, + { url = "https://files.pythonhosted.org/packages/bf/30/50e330e63bb05efc6fa7c1447df3e08954894025ca3dcb396ecc6739bc26/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91537a8df2bde69b1c1db01d6d944c831ca793952e4f57892600e96cee95f2cd", size = 3650857, upload-time = "2025-10-10T11:10:50.112Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e0/4026e4c12bb49dd028756c5b0bc4c572319f2d8f1c9008e0dad8cc9addd7/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dca1f356a67ecb68c81a7bc7809f1569ad9e152ce7fd02c2f2036862ca9f66b", size = 3296063, upload-time = "2025-10-10T11:10:54.089Z" }, + { url = "https://files.pythonhosted.org/packages/2c/34/eb172be293c886fef5299fe5c3fcf180a05478be89856067881007934a7c/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0da4de5c1ac69d94ed4364b6cbe7190c1a70d325f112ba783d83f8440285f152", size = 3043464, upload-time = "2025-10-30T02:55:02.483Z" }, + { url = "https://files.pythonhosted.org/packages/18/1c/532c5d2cb11986372f14b798a95f2eaafe5779334f6a80589a68b5fcf769/psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37d8412565a7267f7d79e29ab66876e55cb5e8e7b3bbf94f8206f6795f8f7e7e", size = 3345378, upload-time = "2025-10-10T11:11:01.039Z" }, + { url = "https://files.pythonhosted.org/packages/70/e7/de420e1cf16f838e1fa17b1120e83afff374c7c0130d088dba6286fcf8ea/psycopg2_binary-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:c665f01ec8ab273a61c62beeb8cce3014c214429ced8a308ca1fc410ecac3a39", size = 2713904, upload-time = "2025-10-10T11:11:04.81Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a8/a2709681b3ac11b0b1786def10006b8995125ba268c9a54bea6f5ae8bd3e/psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c", size = 3756572, upload-time = "2025-10-10T11:12:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/62/e1/c2b38d256d0dafd32713e9f31982a5b028f4a3651f446be70785f484f472/psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee", size = 3864529, upload-time = "2025-10-10T11:12:36.791Z" }, + { url = "https://files.pythonhosted.org/packages/11/32/b2ffe8f3853c181e88f0a157c5fb4e383102238d73c52ac6d93a5c8bffe6/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0", size = 4411242, upload-time = "2025-10-10T11:12:42.388Z" }, + { url = "https://files.pythonhosted.org/packages/10/04/6ca7477e6160ae258dc96f67c371157776564679aefd247b66f4661501a2/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766", size = 4468258, upload-time = "2025-10-10T11:12:48.654Z" }, + { url = "https://files.pythonhosted.org/packages/3c/7e/6a1a38f86412df101435809f225d57c1a021307dd0689f7a5e7fe83588b1/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3", size = 4166295, upload-time = "2025-10-10T11:12:52.525Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7d/c07374c501b45f3579a9eb761cbf2604ddef3d96ad48679112c2c5aa9c25/psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f", size = 3983133, upload-time = "2025-10-30T02:55:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/82/56/993b7104cb8345ad7d4516538ccf8f0d0ac640b1ebd8c754a7b024e76878/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4", size = 3652383, upload-time = "2025-10-10T11:12:56.387Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ac/eaeb6029362fd8d454a27374d84c6866c82c33bfc24587b4face5a8e43ef/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c", size = 3298168, upload-time = "2025-10-10T11:13:00.403Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/50c3facc66bded9ada5cbc0de867499a703dc6bca6be03070b4e3b65da6c/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60", size = 3044712, upload-time = "2025-10-30T02:55:27.975Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8e/b7de019a1f562f72ada81081a12823d3c1590bedc48d7d2559410a2763fe/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1", size = 3347549, upload-time = "2025-10-10T11:13:03.971Z" }, + { url = "https://files.pythonhosted.org/packages/80/2d/1bb683f64737bbb1f86c82b7359db1eb2be4e2c0c13b947f80efefa7d3e5/psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa", size = 2714215, upload-time = "2025-10-10T11:13:07.14Z" }, + { url = "https://files.pythonhosted.org/packages/64/12/93ef0098590cf51d9732b4f139533732565704f45bdc1ffa741b7c95fb54/psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1", size = 3756567, upload-time = "2025-10-10T11:13:11.885Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a9/9d55c614a891288f15ca4b5209b09f0f01e3124056924e17b81b9fa054cc/psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f", size = 3864755, upload-time = "2025-10-10T11:13:17.727Z" }, + { url = "https://files.pythonhosted.org/packages/13/1e/98874ce72fd29cbde93209977b196a2edae03f8490d1bd8158e7f1daf3a0/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5", size = 4411646, upload-time = "2025-10-10T11:13:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/a335ce6645334fb8d758cc358810defca14a1d19ffbc8a10bd38a2328565/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8", size = 4468701, upload-time = "2025-10-10T11:13:29.266Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/c8b4f53f34e295e45709b7568bf9b9407a612ea30387d35eb9fa84f269b4/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c", size = 4166293, upload-time = "2025-10-10T11:13:33.336Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e0/f8cc36eadd1b716ab36bb290618a3292e009867e5c97ce4aba908cb99644/psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f", size = 3983184, upload-time = "2025-10-30T02:55:32.483Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/2a8fe18a4e61cfb3417da67b6318e12691772c0696d79434184a511906dc/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747", size = 3652650, upload-time = "2025-10-10T11:13:38.181Z" }, + { url = "https://files.pythonhosted.org/packages/76/36/03801461b31b29fe58d228c24388f999fe814dfc302856e0d17f97d7c54d/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f", size = 3298663, upload-time = "2025-10-10T11:13:44.878Z" }, + { url = "https://files.pythonhosted.org/packages/97/77/21b0ea2e1a73aa5fa9222b2a6b8ba325c43c3a8d54272839c991f2345656/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b", size = 3044737, upload-time = "2025-10-30T02:55:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/67/69/f36abe5f118c1dca6d3726ceae164b9356985805480731ac6712a63f24f0/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d", size = 3347643, upload-time = "2025-10-10T11:13:53.499Z" }, + { url = "https://files.pythonhosted.org/packages/e1/36/9c0c326fe3a4227953dfb29f5d0c8ae3b8eb8c1cd2967aa569f50cb3c61f/psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316", size = 2803913, upload-time = "2025-10-10T11:13:57.058Z" }, ] [[package]] @@ -6231,81 +7134,75 @@ wheels = [ ] [[package]] -name = "pyarrow" -version = "22.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/9b/cb3f7e0a345353def531ca879053e9ef6b9f38ed91aebcf68b09ba54dec0/pyarrow-22.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:77718810bd3066158db1e95a63c160ad7ce08c6b0710bc656055033e39cdad88", size = 34223968, upload-time = "2025-10-24T10:03:31.21Z" }, - { url = "https://files.pythonhosted.org/packages/6c/41/3184b8192a120306270c5307f105b70320fdaa592c99843c5ef78aaefdcf/pyarrow-22.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:44d2d26cda26d18f7af7db71453b7b783788322d756e81730acb98f24eb90ace", size = 35942085, upload-time = "2025-10-24T10:03:38.146Z" }, - { url = "https://files.pythonhosted.org/packages/d9/3d/a1eab2f6f08001f9fb714b8ed5cfb045e2fe3e3e3c0c221f2c9ed1e6d67d/pyarrow-22.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b9d71701ce97c95480fecb0039ec5bb889e75f110da72005743451339262f4ce", size = 44964613, upload-time = "2025-10-24T10:03:46.516Z" }, - { url = "https://files.pythonhosted.org/packages/46/46/a1d9c24baf21cfd9ce994ac820a24608decf2710521b29223d4334985127/pyarrow-22.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:710624ab925dc2b05a6229d47f6f0dac1c1155e6ed559be7109f684eba048a48", size = 47627059, upload-time = "2025-10-24T10:03:55.353Z" }, - { url = "https://files.pythonhosted.org/packages/3a/4c/f711acb13075c1391fd54bc17e078587672c575f8de2a6e62509af026dcf/pyarrow-22.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f963ba8c3b0199f9d6b794c90ec77545e05eadc83973897a4523c9e8d84e9340", size = 47947043, upload-time = "2025-10-24T10:04:05.408Z" }, - { url = "https://files.pythonhosted.org/packages/4e/70/1f3180dd7c2eab35c2aca2b29ace6c519f827dcd4cfeb8e0dca41612cf7a/pyarrow-22.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd0d42297ace400d8febe55f13fdf46e86754842b860c978dfec16f081e5c653", size = 50206505, upload-time = "2025-10-24T10:04:15.786Z" }, - { url = "https://files.pythonhosted.org/packages/80/07/fea6578112c8c60ffde55883a571e4c4c6bc7049f119d6b09333b5cc6f73/pyarrow-22.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:00626d9dc0f5ef3a75fe63fd68b9c7c8302d2b5bbc7f74ecaedba83447a24f84", size = 28101641, upload-time = "2025-10-24T10:04:22.57Z" }, - { url = "https://files.pythonhosted.org/packages/2e/b7/18f611a8cdc43417f9394a3ccd3eace2f32183c08b9eddc3d17681819f37/pyarrow-22.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:3e294c5eadfb93d78b0763e859a0c16d4051fc1c5231ae8956d61cb0b5666f5a", size = 34272022, upload-time = "2025-10-24T10:04:28.973Z" }, - { url = "https://files.pythonhosted.org/packages/26/5c/f259e2526c67eb4b9e511741b19870a02363a47a35edbebc55c3178db22d/pyarrow-22.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:69763ab2445f632d90b504a815a2a033f74332997052b721002298ed6de40f2e", size = 35995834, upload-time = "2025-10-24T10:04:35.467Z" }, - { url = "https://files.pythonhosted.org/packages/50/8d/281f0f9b9376d4b7f146913b26fac0aa2829cd1ee7e997f53a27411bbb92/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b41f37cabfe2463232684de44bad753d6be08a7a072f6a83447eeaf0e4d2a215", size = 45030348, upload-time = "2025-10-24T10:04:43.366Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e5/53c0a1c428f0976bf22f513d79c73000926cb00b9c138d8e02daf2102e18/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:35ad0f0378c9359b3f297299c3309778bb03b8612f987399a0333a560b43862d", size = 47699480, upload-time = "2025-10-24T10:04:51.486Z" }, - { url = "https://files.pythonhosted.org/packages/95/e1/9dbe4c465c3365959d183e6345d0a8d1dc5b02ca3f8db4760b3bc834cf25/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8382ad21458075c2e66a82a29d650f963ce51c7708c7c0ff313a8c206c4fd5e8", size = 48011148, upload-time = "2025-10-24T10:04:59.585Z" }, - { url = "https://files.pythonhosted.org/packages/c5/b4/7caf5d21930061444c3cf4fa7535c82faf5263e22ce43af7c2759ceb5b8b/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a812a5b727bc09c3d7ea072c4eebf657c2f7066155506ba31ebf4792f88f016", size = 50276964, upload-time = "2025-10-24T10:05:08.175Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f3/cec89bd99fa3abf826f14d4e53d3d11340ce6f6af4d14bdcd54cd83b6576/pyarrow-22.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ec5d40dd494882704fb876c16fa7261a69791e784ae34e6b5992e977bd2e238c", size = 28106517, upload-time = "2025-10-24T10:05:14.314Z" }, - { url = "https://files.pythonhosted.org/packages/af/63/ba23862d69652f85b615ca14ad14f3bcfc5bf1b99ef3f0cd04ff93fdad5a/pyarrow-22.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bea79263d55c24a32b0d79c00a1c58bb2ee5f0757ed95656b01c0fb310c5af3d", size = 34211578, upload-time = "2025-10-24T10:05:21.583Z" }, - { url = "https://files.pythonhosted.org/packages/b1/d0/f9ad86fe809efd2bcc8be32032fa72e8b0d112b01ae56a053006376c5930/pyarrow-22.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:12fe549c9b10ac98c91cf791d2945e878875d95508e1a5d14091a7aaa66d9cf8", size = 35989906, upload-time = "2025-10-24T10:05:29.485Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a8/f910afcb14630e64d673f15904ec27dd31f1e009b77033c365c84e8c1e1d/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:334f900ff08ce0423407af97e6c26ad5d4e3b0763645559ece6fbf3747d6a8f5", size = 45021677, upload-time = "2025-10-24T10:05:38.274Z" }, - { url = "https://files.pythonhosted.org/packages/13/95/aec81f781c75cd10554dc17a25849c720d54feafb6f7847690478dcf5ef8/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c6c791b09c57ed76a18b03f2631753a4960eefbbca80f846da8baefc6491fcfe", size = 47726315, upload-time = "2025-10-24T10:05:47.314Z" }, - { url = "https://files.pythonhosted.org/packages/bb/d4/74ac9f7a54cfde12ee42734ea25d5a3c9a45db78f9def949307a92720d37/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c3200cb41cdbc65156e5f8c908d739b0dfed57e890329413da2748d1a2cd1a4e", size = 47990906, upload-time = "2025-10-24T10:05:58.254Z" }, - { url = "https://files.pythonhosted.org/packages/2e/71/fedf2499bf7a95062eafc989ace56572f3343432570e1c54e6599d5b88da/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ac93252226cf288753d8b46280f4edf3433bf9508b6977f8dd8526b521a1bbb9", size = 50306783, upload-time = "2025-10-24T10:06:08.08Z" }, - { url = "https://files.pythonhosted.org/packages/68/ed/b202abd5a5b78f519722f3d29063dda03c114711093c1995a33b8e2e0f4b/pyarrow-22.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:44729980b6c50a5f2bfcc2668d36c569ce17f8b17bccaf470c4313dcbbf13c9d", size = 27972883, upload-time = "2025-10-24T10:06:14.204Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d6/d0fac16a2963002fc22c8fa75180a838737203d558f0ed3b564c4a54eef5/pyarrow-22.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e6e95176209257803a8b3d0394f21604e796dadb643d2f7ca21b66c9c0b30c9a", size = 34204629, upload-time = "2025-10-24T10:06:20.274Z" }, - { url = "https://files.pythonhosted.org/packages/c6/9c/1d6357347fbae062ad3f17082f9ebc29cc733321e892c0d2085f42a2212b/pyarrow-22.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:001ea83a58024818826a9e3f89bf9310a114f7e26dfe404a4c32686f97bd7901", size = 35985783, upload-time = "2025-10-24T10:06:27.301Z" }, - { url = "https://files.pythonhosted.org/packages/ff/c0/782344c2ce58afbea010150df07e3a2f5fdad299cd631697ae7bd3bac6e3/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ce20fe000754f477c8a9125543f1936ea5b8867c5406757c224d745ed033e691", size = 45020999, upload-time = "2025-10-24T10:06:35.387Z" }, - { url = "https://files.pythonhosted.org/packages/1b/8b/5362443737a5307a7b67c1017c42cd104213189b4970bf607e05faf9c525/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e0a15757fccb38c410947df156f9749ae4a3c89b2393741a50521f39a8cf202a", size = 47724601, upload-time = "2025-10-24T10:06:43.551Z" }, - { url = "https://files.pythonhosted.org/packages/69/4d/76e567a4fc2e190ee6072967cb4672b7d9249ac59ae65af2d7e3047afa3b/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cedb9dd9358e4ea1d9bce3665ce0797f6adf97ff142c8e25b46ba9cdd508e9b6", size = 48001050, upload-time = "2025-10-24T10:06:52.284Z" }, - { url = "https://files.pythonhosted.org/packages/01/5e/5653f0535d2a1aef8223cee9d92944cb6bccfee5cf1cd3f462d7cb022790/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:252be4a05f9d9185bb8c18e83764ebcfea7185076c07a7a662253af3a8c07941", size = 50307877, upload-time = "2025-10-24T10:07:02.405Z" }, - { url = "https://files.pythonhosted.org/packages/2d/f8/1d0bd75bf9328a3b826e24a16e5517cd7f9fbf8d34a3184a4566ef5a7f29/pyarrow-22.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:a4893d31e5ef780b6edcaf63122df0f8d321088bb0dee4c8c06eccb1ca28d145", size = 27977099, upload-time = "2025-10-24T10:08:07.259Z" }, - { url = "https://files.pythonhosted.org/packages/90/81/db56870c997805bf2b0f6eeeb2d68458bf4654652dccdcf1bf7a42d80903/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:f7fe3dbe871294ba70d789be16b6e7e52b418311e166e0e3cba9522f0f437fb1", size = 34336685, upload-time = "2025-10-24T10:07:11.47Z" }, - { url = "https://files.pythonhosted.org/packages/1c/98/0727947f199aba8a120f47dfc229eeb05df15bcd7a6f1b669e9f882afc58/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ba95112d15fd4f1105fb2402c4eab9068f0554435e9b7085924bcfaac2cc306f", size = 36032158, upload-time = "2025-10-24T10:07:18.626Z" }, - { url = "https://files.pythonhosted.org/packages/96/b4/9babdef9c01720a0785945c7cf550e4acd0ebcd7bdd2e6f0aa7981fa85e2/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c064e28361c05d72eed8e744c9605cbd6d2bb7481a511c74071fd9b24bc65d7d", size = 44892060, upload-time = "2025-10-24T10:07:26.002Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ca/2f8804edd6279f78a37062d813de3f16f29183874447ef6d1aadbb4efa0f/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6f9762274496c244d951c819348afbcf212714902742225f649cf02823a6a10f", size = 47504395, upload-time = "2025-10-24T10:07:34.09Z" }, - { url = "https://files.pythonhosted.org/packages/b9/f0/77aa5198fd3943682b2e4faaf179a674f0edea0d55d326d83cb2277d9363/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a9d9ffdc2ab696f6b15b4d1f7cec6658e1d788124418cb30030afbae31c64746", size = 48066216, upload-time = "2025-10-24T10:07:43.528Z" }, - { url = "https://files.pythonhosted.org/packages/79/87/a1937b6e78b2aff18b706d738c9e46ade5bfcf11b294e39c87706a0089ac/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ec1a15968a9d80da01e1d30349b2b0d7cc91e96588ee324ce1b5228175043e95", size = 50288552, upload-time = "2025-10-24T10:07:53.519Z" }, - { url = "https://files.pythonhosted.org/packages/60/ae/b5a5811e11f25788ccfdaa8f26b6791c9807119dffcf80514505527c384c/pyarrow-22.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bba208d9c7decf9961998edf5c65e3ea4355d5818dd6cd0f6809bec1afb951cc", size = 28262504, upload-time = "2025-10-24T10:08:00.932Z" }, - { url = "https://files.pythonhosted.org/packages/bd/b0/0fa4d28a8edb42b0a7144edd20befd04173ac79819547216f8a9f36f9e50/pyarrow-22.0.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:9bddc2cade6561f6820d4cd73f99a0243532ad506bc510a75a5a65a522b2d74d", size = 34224062, upload-time = "2025-10-24T10:08:14.101Z" }, - { url = "https://files.pythonhosted.org/packages/0f/a8/7a719076b3c1be0acef56a07220c586f25cd24de0e3f3102b438d18ae5df/pyarrow-22.0.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:e70ff90c64419709d38c8932ea9fe1cc98415c4f87ea8da81719e43f02534bc9", size = 35990057, upload-time = "2025-10-24T10:08:21.842Z" }, - { url = "https://files.pythonhosted.org/packages/89/3c/359ed54c93b47fb6fe30ed16cdf50e3f0e8b9ccfb11b86218c3619ae50a8/pyarrow-22.0.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:92843c305330aa94a36e706c16209cd4df274693e777ca47112617db7d0ef3d7", size = 45068002, upload-time = "2025-10-24T10:08:29.034Z" }, - { url = "https://files.pythonhosted.org/packages/55/fc/4945896cc8638536ee787a3bd6ce7cec8ec9acf452d78ec39ab328efa0a1/pyarrow-22.0.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:6dda1ddac033d27421c20d7a7943eec60be44e0db4e079f33cc5af3b8280ccde", size = 47737765, upload-time = "2025-10-24T10:08:38.559Z" }, - { url = "https://files.pythonhosted.org/packages/cd/5e/7cb7edeb2abfaa1f79b5d5eb89432356155c8426f75d3753cbcb9592c0fd/pyarrow-22.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:84378110dd9a6c06323b41b56e129c504d157d1a983ce8f5443761eb5256bafc", size = 48048139, upload-time = "2025-10-24T10:08:46.784Z" }, - { url = "https://files.pythonhosted.org/packages/88/c6/546baa7c48185f5e9d6e59277c4b19f30f48c94d9dd938c2a80d4d6b067c/pyarrow-22.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:854794239111d2b88b40b6ef92aa478024d1e5074f364033e73e21e3f76b25e0", size = 50314244, upload-time = "2025-10-24T10:08:55.771Z" }, - { url = "https://files.pythonhosted.org/packages/3c/79/755ff2d145aafec8d347bf18f95e4e81c00127f06d080135dfc86aea417c/pyarrow-22.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:b883fe6fd85adad7932b3271c38ac289c65b7337c2c132e9569f9d3940620730", size = 28757501, upload-time = "2025-10-24T10:09:59.891Z" }, - { url = "https://files.pythonhosted.org/packages/0e/d2/237d75ac28ced3147912954e3c1a174df43a95f4f88e467809118a8165e0/pyarrow-22.0.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7a820d8ae11facf32585507c11f04e3f38343c1e784c9b5a8b1da5c930547fe2", size = 34355506, upload-time = "2025-10-24T10:09:02.953Z" }, - { url = "https://files.pythonhosted.org/packages/1e/2c/733dfffe6d3069740f98e57ff81007809067d68626c5faef293434d11bd6/pyarrow-22.0.0-cp314-cp314t-macosx_12_0_x86_64.whl", hash = "sha256:c6ec3675d98915bf1ec8b3c7986422682f7232ea76cad276f4c8abd5b7319b70", size = 36047312, upload-time = "2025-10-24T10:09:10.334Z" }, - { url = "https://files.pythonhosted.org/packages/7c/2b/29d6e3782dc1f299727462c1543af357a0f2c1d3c160ce199950d9ca51eb/pyarrow-22.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3e739edd001b04f654b166204fc7a9de896cf6007eaff33409ee9e50ceaff754", size = 45081609, upload-time = "2025-10-24T10:09:18.61Z" }, - { url = "https://files.pythonhosted.org/packages/8d/42/aa9355ecc05997915af1b7b947a7f66c02dcaa927f3203b87871c114ba10/pyarrow-22.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7388ac685cab5b279a41dfe0a6ccd99e4dbf322edfb63e02fc0443bf24134e91", size = 47703663, upload-time = "2025-10-24T10:09:27.369Z" }, - { url = "https://files.pythonhosted.org/packages/ee/62/45abedde480168e83a1de005b7b7043fd553321c1e8c5a9a114425f64842/pyarrow-22.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f633074f36dbc33d5c05b5dc75371e5660f1dbf9c8b1d95669def05e5425989c", size = 48066543, upload-time = "2025-10-24T10:09:34.908Z" }, - { url = "https://files.pythonhosted.org/packages/84/e9/7878940a5b072e4f3bf998770acafeae13b267f9893af5f6d4ab3904b67e/pyarrow-22.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4c19236ae2402a8663a2c8f21f1870a03cc57f0bef7e4b6eb3238cc82944de80", size = 50288838, upload-time = "2025-10-24T10:09:44.394Z" }, - { url = "https://files.pythonhosted.org/packages/7b/03/f335d6c52b4a4761bcc83499789a1e2e16d9d201a58c327a9b5cc9a41bd9/pyarrow-22.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0c34fe18094686194f204a3b1787a27456897d8a2d62caf84b61e8dfbc0252ae", size = 29185594, upload-time = "2025-10-24T10:09:53.111Z" }, -] - -[[package]] -name = "pyasn1" -version = "0.6.1" +name = "py-spy" +version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/e2/ff811a367028b87e86714945bb9ecb5c1cc69114a8039a67b3a862cef921/py_spy-0.4.1.tar.gz", hash = "sha256:e53aa53daa2e47c2eef97dd2455b47bb3a7e7f962796a86cc3e7dbde8e6f4db4", size = 244726, upload-time = "2025-07-31T19:33:25.172Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, + { url = "https://files.pythonhosted.org/packages/14/e3/3a32500d845bdd94f6a2b4ed6244982f42ec2bc64602ea8fcfe900678ae7/py_spy-0.4.1-py2.py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:809094208c6256c8f4ccadd31e9a513fe2429253f48e20066879239ba12cd8cc", size = 3682508, upload-time = "2025-07-31T19:33:13.753Z" }, + { url = "https://files.pythonhosted.org/packages/4f/bf/e4d280e9e0bec71d39fc646654097027d4bbe8e04af18fb68e49afcff404/py_spy-0.4.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:1fb8bf71ab8df95a95cc387deed6552934c50feef2cf6456bc06692a5508fd0c", size = 1796395, upload-time = "2025-07-31T19:33:15.325Z" }, + { url = "https://files.pythonhosted.org/packages/df/79/9ed50bb0a9de63ed023aa2db8b6265b04a7760d98c61eb54def6a5fddb68/py_spy-0.4.1-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee776b9d512a011d1ad3907ed53ae32ce2f3d9ff3e1782236554e22103b5c084", size = 2034938, upload-time = "2025-07-31T19:33:17.194Z" }, + { url = "https://files.pythonhosted.org/packages/53/a5/36862e3eea59f729dfb70ee6f9e14b051d8ddce1aa7e70e0b81d9fe18536/py_spy-0.4.1-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:532d3525538254d1859b49de1fbe9744df6b8865657c9f0e444bf36ce3f19226", size = 2658968, upload-time = "2025-07-31T19:33:18.916Z" }, + { url = "https://files.pythonhosted.org/packages/08/f8/9ea0b586b065a623f591e5e7961282ec944b5fbbdca33186c7c0296645b3/py_spy-0.4.1-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4972c21890b6814017e39ac233c22572c4a61fd874524ebc5ccab0f2237aee0a", size = 2147541, upload-time = "2025-07-31T19:33:20.565Z" }, + { url = "https://files.pythonhosted.org/packages/68/fb/bc7f639aed026bca6e7beb1e33f6951e16b7d315594e7635a4f7d21d63f4/py_spy-0.4.1-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6a80ec05eb8a6883863a367c6a4d4f2d57de68466f7956b6367d4edd5c61bb29", size = 2763338, upload-time = "2025-07-31T19:33:22.202Z" }, + { url = "https://files.pythonhosted.org/packages/e1/da/fcc9a9fcd4ca946ff402cff20348e838b051d69f50f5d1f5dca4cd3c5eb8/py_spy-0.4.1-py2.py3-none-win_amd64.whl", hash = "sha256:d92e522bd40e9bf7d87c204033ce5bb5c828fca45fa28d970f58d71128069fdc", size = 1818784, upload-time = "2025-07-31T19:33:23.802Z" }, ] [[package]] -name = "pyasn1-modules" -version = "0.4.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyasn1" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +name = "pyarrow" +version = "23.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/33/ffd9c3eb087fa41dd79c3cf20c4c0ae3cdb877c4f8e1107a446006344924/pyarrow-23.0.0.tar.gz", hash = "sha256:180e3150e7edfcd182d3d9afba72f7cf19839a497cc76555a8dce998a8f67615", size = 1167185, upload-time = "2026-01-18T16:19:42.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/2f/23e042a5aa99bcb15e794e14030e8d065e00827e846e53a66faec73c7cd6/pyarrow-23.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cbdc2bf5947aa4d462adcf8453cf04aee2f7932653cb67a27acd96e5e8528a67", size = 34281861, upload-time = "2026-01-18T16:13:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/8b/65/1651933f504b335ec9cd8f99463718421eb08d883ed84f0abd2835a16cad/pyarrow-23.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:4d38c836930ce15cd31dce20114b21ba082da231c884bdc0a7b53e1477fe7f07", size = 35825067, upload-time = "2026-01-18T16:13:42.549Z" }, + { url = "https://files.pythonhosted.org/packages/84/ec/d6fceaec050c893f4e35c0556b77d4cc9973fcc24b0a358a5781b1234582/pyarrow-23.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4222ff8f76919ecf6c716175a0e5fddb5599faeed4c56d9ea41a2c42be4998b2", size = 44458539, upload-time = "2026-01-18T16:13:52.975Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d9/369f134d652b21db62fe3ec1c5c2357e695f79eb67394b8a93f3a2b2cffa/pyarrow-23.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:87f06159cbe38125852657716889296c83c37b4d09a5e58f3d10245fd1f69795", size = 47535889, upload-time = "2026-01-18T16:14:03.693Z" }, + { url = "https://files.pythonhosted.org/packages/a3/95/f37b6a252fdbf247a67a78fb3f61a529fe0600e304c4d07741763d3522b1/pyarrow-23.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1675c374570d8b91ea6d4edd4608fa55951acd44e0c31bd146e091b4005de24f", size = 48157777, upload-time = "2026-01-18T16:14:12.483Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ab/fb94923108c9c6415dab677cf1f066d3307798eafc03f9a65ab4abc61056/pyarrow-23.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:247374428fde4f668f138b04031a7e7077ba5fa0b5b1722fdf89a017bf0b7ee0", size = 50580441, upload-time = "2026-01-18T16:14:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/ae/78/897ba6337b517fc8e914891e1bd918da1c4eb8e936a553e95862e67b80f6/pyarrow-23.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:de53b1bd3b88a2ee93c9af412c903e57e738c083be4f6392288294513cd8b2c1", size = 27530028, upload-time = "2026-01-18T16:14:27.353Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c0/57fe251102ca834fee0ef69a84ad33cc0ff9d5dfc50f50b466846356ecd7/pyarrow-23.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5574d541923efcbfdf1294a2746ae3b8c2498a2dc6cd477882f6f4e7b1ac08d3", size = 34276762, upload-time = "2026-01-18T16:14:34.128Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4e/24130286548a5bc250cbed0b6bbf289a2775378a6e0e6f086ae8c68fc098/pyarrow-23.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:2ef0075c2488932e9d3c2eb3482f9459c4be629aa673b725d5e3cf18f777f8e4", size = 35821420, upload-time = "2026-01-18T16:14:40.699Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/a869e8529d487aa2e842d6c8865eb1e2c9ec33ce2786eb91104d2c3e3f10/pyarrow-23.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:65666fc269669af1ef1c14478c52222a2aa5c907f28b68fb50a203c777e4f60c", size = 44457412, upload-time = "2026-01-18T16:14:49.051Z" }, + { url = "https://files.pythonhosted.org/packages/36/81/1de4f0edfa9a483bbdf0082a05790bd6a20ed2169ea12a65039753be3a01/pyarrow-23.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:4d85cb6177198f3812db4788e394b757223f60d9a9f5ad6634b3e32be1525803", size = 47534285, upload-time = "2026-01-18T16:14:56.748Z" }, + { url = "https://files.pythonhosted.org/packages/f2/04/464a052d673b5ece074518f27377861662449f3c1fdb39ce740d646fd098/pyarrow-23.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1a9ff6fa4141c24a03a1a434c63c8fa97ce70f8f36bccabc18ebba905ddf0f17", size = 48157913, upload-time = "2026-01-18T16:15:05.114Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1b/32a4de9856ee6688c670ca2def588382e573cce45241a965af04c2f61687/pyarrow-23.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:84839d060a54ae734eb60a756aeacb62885244aaa282f3c968f5972ecc7b1ecc", size = 50582529, upload-time = "2026-01-18T16:15:12.846Z" }, + { url = "https://files.pythonhosted.org/packages/db/c7/d6581f03e9b9e44ea60b52d1750ee1a7678c484c06f939f45365a45f7eef/pyarrow-23.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a149a647dbfe928ce8830a713612aa0b16e22c64feac9d1761529778e4d4eaa5", size = 27542646, upload-time = "2026-01-18T16:15:18.89Z" }, + { url = "https://files.pythonhosted.org/packages/3d/bd/c861d020831ee57609b73ea721a617985ece817684dc82415b0bc3e03ac3/pyarrow-23.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:5961a9f646c232697c24f54d3419e69b4261ba8a8b66b0ac54a1851faffcbab8", size = 34189116, upload-time = "2026-01-18T16:15:28.054Z" }, + { url = "https://files.pythonhosted.org/packages/8c/23/7725ad6cdcbaf6346221391e7b3eecd113684c805b0a95f32014e6fa0736/pyarrow-23.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:632b3e7c3d232f41d64e1a4a043fb82d44f8a349f339a1188c6a0dd9d2d47d8a", size = 35803831, upload-time = "2026-01-18T16:15:33.798Z" }, + { url = "https://files.pythonhosted.org/packages/57/06/684a421543455cdc2944d6a0c2cc3425b028a4c6b90e34b35580c4899743/pyarrow-23.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:76242c846db1411f1d6c2cc3823be6b86b40567ee24493344f8226ba34a81333", size = 44436452, upload-time = "2026-01-18T16:15:41.598Z" }, + { url = "https://files.pythonhosted.org/packages/c6/6f/8f9eb40c2328d66e8b097777ddcf38494115ff9f1b5bc9754ba46991191e/pyarrow-23.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b73519f8b52ae28127000986bf228fda781e81d3095cd2d3ece76eb5cf760e1b", size = 47557396, upload-time = "2026-01-18T16:15:51.252Z" }, + { url = "https://files.pythonhosted.org/packages/10/6e/f08075f1472e5159553501fde2cc7bc6700944bdabe49a03f8a035ee6ccd/pyarrow-23.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:068701f6823449b1b6469120f399a1239766b117d211c5d2519d4ed5861f75de", size = 48147129, upload-time = "2026-01-18T16:16:00.299Z" }, + { url = "https://files.pythonhosted.org/packages/7d/82/d5a680cd507deed62d141cc7f07f7944a6766fc51019f7f118e4d8ad0fb8/pyarrow-23.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1801ba947015d10e23bca9dd6ef5d0e9064a81569a89b6e9a63b59224fd060df", size = 50596642, upload-time = "2026-01-18T16:16:08.502Z" }, + { url = "https://files.pythonhosted.org/packages/a9/26/4f29c61b3dce9fa7780303b86895ec6a0917c9af927101daaaf118fbe462/pyarrow-23.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:52265266201ec25b6839bf6bd4ea918ca6d50f31d13e1cf200b4261cd11dc25c", size = 27660628, upload-time = "2026-01-18T16:16:15.28Z" }, + { url = "https://files.pythonhosted.org/packages/66/34/564db447d083ec7ff93e0a883a597d2f214e552823bfc178a2d0b1f2c257/pyarrow-23.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:ad96a597547af7827342ffb3c503c8316e5043bb09b47a84885ce39394c96e00", size = 34184630, upload-time = "2026-01-18T16:16:22.141Z" }, + { url = "https://files.pythonhosted.org/packages/aa/3a/3999daebcb5e6119690c92a621c4d78eef2ffba7a0a1b56386d2875fcd77/pyarrow-23.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:b9edf990df77c2901e79608f08c13fbde60202334a4fcadb15c1f57bf7afee43", size = 35796820, upload-time = "2026-01-18T16:16:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ee/39195233056c6a8d0976d7d1ac1cd4fe21fb0ec534eca76bc23ef3f60e11/pyarrow-23.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:36d1b5bc6ddcaff0083ceec7e2561ed61a51f49cce8be079ee8ed406acb6fdef", size = 44438735, upload-time = "2026-01-18T16:16:38.79Z" }, + { url = "https://files.pythonhosted.org/packages/2c/41/6a7328ee493527e7afc0c88d105ecca69a3580e29f2faaeac29308369fd7/pyarrow-23.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4292b889cd224f403304ddda8b63a36e60f92911f89927ec8d98021845ea21be", size = 47557263, upload-time = "2026-01-18T16:16:46.248Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ee/34e95b21ee84db494eae60083ddb4383477b31fb1fd19fd866d794881696/pyarrow-23.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dfd9e133e60eaa847fd80530a1b89a052f09f695d0b9c34c235ea6b2e0924cf7", size = 48153529, upload-time = "2026-01-18T16:16:53.412Z" }, + { url = "https://files.pythonhosted.org/packages/52/88/8a8d83cea30f4563efa1b7bf51d241331ee5cd1b185a7e063f5634eca415/pyarrow-23.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832141cc09fac6aab1cd3719951d23301396968de87080c57c9a7634e0ecd068", size = 50598851, upload-time = "2026-01-18T16:17:01.133Z" }, + { url = "https://files.pythonhosted.org/packages/c6/4c/2929c4be88723ba025e7b3453047dc67e491c9422965c141d24bab6b5962/pyarrow-23.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:7a7d067c9a88faca655c71bcc30ee2782038d59c802d57950826a07f60d83c4c", size = 27577747, upload-time = "2026-01-18T16:18:02.413Z" }, + { url = "https://files.pythonhosted.org/packages/64/52/564a61b0b82d72bd68ec3aef1adda1e3eba776f89134b9ebcb5af4b13cb6/pyarrow-23.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:ce9486e0535a843cf85d990e2ec5820a47918235183a5c7b8b97ed7e92c2d47d", size = 34446038, upload-time = "2026-01-18T16:17:07.861Z" }, + { url = "https://files.pythonhosted.org/packages/cc/c9/232d4f9855fd1de0067c8a7808a363230d223c83aeee75e0fe6eab851ba9/pyarrow-23.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:075c29aeaa685fd1182992a9ed2499c66f084ee54eea47da3eb76e125e06064c", size = 35921142, upload-time = "2026-01-18T16:17:15.401Z" }, + { url = "https://files.pythonhosted.org/packages/96/f2/60af606a3748367b906bb82d41f0032e059f075444445d47e32a7ff1df62/pyarrow-23.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:799965a5379589510d888be3094c2296efd186a17ca1cef5b77703d4d5121f53", size = 44490374, upload-time = "2026-01-18T16:17:23.93Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2d/7731543050a678ea3a413955a2d5d80d2a642f270aa57a3cb7d5a86e3f46/pyarrow-23.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ef7cac8fe6fccd8b9e7617bfac785b0371a7fe26af59463074e4882747145d40", size = 47527896, upload-time = "2026-01-18T16:17:33.393Z" }, + { url = "https://files.pythonhosted.org/packages/5a/90/f3342553b7ac9879413aed46500f1637296f3c8222107523a43a1c08b42a/pyarrow-23.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15a414f710dc927132dd67c361f78c194447479555af57317066ee5116b90e9e", size = 48210401, upload-time = "2026-01-18T16:17:42.012Z" }, + { url = "https://files.pythonhosted.org/packages/f3/da/9862ade205ecc46c172b6ce5038a74b5151c7401e36255f15975a45878b2/pyarrow-23.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e0d2e6915eca7d786be6a77bf227fbc06d825a75b5b5fe9bcbef121dec32685", size = 50579677, upload-time = "2026-01-18T16:17:50.241Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4c/f11f371f5d4740a5dafc2e11c76bcf42d03dfdb2d68696da97de420b6963/pyarrow-23.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4b317ea6e800b5704e5e5929acb6e2dc13e9276b708ea97a39eb8b345aa2658b", size = 27631889, upload-time = "2026-01-18T16:17:56.55Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/15aec78bcf43a0c004067bd33eb5352836a29a49db8581fc56f2b6ca88b7/pyarrow-23.0.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:20b187ed9550d233a872074159f765f52f9d92973191cd4b93f293a19efbe377", size = 34213265, upload-time = "2026-01-18T16:18:07.904Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/deb2c594bbba41c37c5d9aa82f510376998352aa69dfcb886cb4b18ad80f/pyarrow-23.0.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:18ec84e839b493c3886b9b5e06861962ab4adfaeb79b81c76afbd8d84c7d5fda", size = 35819211, upload-time = "2026-01-18T16:18:13.94Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e5/ee82af693cb7b5b2b74f6524cdfede0e6ace779d7720ebca24d68b57c36b/pyarrow-23.0.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:e438dd3f33894e34fd02b26bd12a32d30d006f5852315f611aa4add6c7fab4bc", size = 44502313, upload-time = "2026-01-18T16:18:20.367Z" }, + { url = "https://files.pythonhosted.org/packages/9c/86/95c61ad82236495f3c31987e85135926ba3ec7f3819296b70a68d8066b49/pyarrow-23.0.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:a244279f240c81f135631be91146d7fa0e9e840e1dfed2aba8483eba25cd98e6", size = 47585886, upload-time = "2026-01-18T16:18:27.544Z" }, + { url = "https://files.pythonhosted.org/packages/bb/6e/a72d901f305201802f016d015de1e05def7706fff68a1dedefef5dc7eff7/pyarrow-23.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c4692e83e42438dba512a570c6eaa42be2f8b6c0f492aea27dec54bdc495103a", size = 48207055, upload-time = "2026-01-18T16:18:35.425Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/5de029c537630ca18828db45c30e2a78da03675a70ac6c3528203c416fe3/pyarrow-23.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ae7f30f898dfe44ea69654a35c93e8da4cef6606dc4c72394068fd95f8e9f54a", size = 50619812, upload-time = "2026-01-18T16:18:43.553Z" }, + { url = "https://files.pythonhosted.org/packages/59/8d/2af846cd2412e67a087f5bda4a8e23dfd4ebd570f777db2e8686615dafc1/pyarrow-23.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:5b86bb649e4112fb0614294b7d0a175c7513738876b89655605ebb87c804f861", size = 28263851, upload-time = "2026-01-18T16:19:38.567Z" }, + { url = "https://files.pythonhosted.org/packages/7b/7f/caab863e587041156f6786c52e64151b7386742c8c27140f637176e9230e/pyarrow-23.0.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:ebc017d765d71d80a3f8584ca0566b53e40464586585ac64176115baa0ada7d3", size = 34463240, upload-time = "2026-01-18T16:18:49.755Z" }, + { url = "https://files.pythonhosted.org/packages/c9/fa/3a5b8c86c958e83622b40865e11af0857c48ec763c11d472c87cd518283d/pyarrow-23.0.0-cp314-cp314t-macosx_12_0_x86_64.whl", hash = "sha256:0800cc58a6d17d159df823f87ad66cefebf105b982493d4bad03ee7fab84b993", size = 35935712, upload-time = "2026-01-18T16:18:55.626Z" }, + { url = "https://files.pythonhosted.org/packages/c5/08/17a62078fc1a53decb34a9aa79cf9009efc74d63d2422e5ade9fed2f99e3/pyarrow-23.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3a7c68c722da9bb5b0f8c10e3eae71d9825a4b429b40b32709df5d1fa55beb3d", size = 44503523, upload-time = "2026-01-18T16:19:03.958Z" }, + { url = "https://files.pythonhosted.org/packages/cc/70/84d45c74341e798aae0323d33b7c39194e23b1abc439ceaf60a68a7a969a/pyarrow-23.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:bd5556c24622df90551063ea41f559b714aa63ca953db884cfb958559087a14e", size = 47542490, upload-time = "2026-01-18T16:19:11.208Z" }, + { url = "https://files.pythonhosted.org/packages/61/d9/d1274b0e6f19e235de17441e53224f4716574b2ca837022d55702f24d71d/pyarrow-23.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54810f6e6afc4ffee7c2e0051b61722fbea9a4961b46192dcfae8ea12fa09059", size = 48233605, upload-time = "2026-01-18T16:19:19.544Z" }, + { url = "https://files.pythonhosted.org/packages/39/07/e4e2d568cb57543d84482f61e510732820cddb0f47c4bb7df629abfed852/pyarrow-23.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:14de7d48052cf4b0ed174533eafa3cfe0711b8076ad70bede32cf59f744f0d7c", size = 50603979, upload-time = "2026-01-18T16:19:26.717Z" }, + { url = "https://files.pythonhosted.org/packages/72/9c/47693463894b610f8439b2e970b82ef81e9599c757bf2049365e40ff963c/pyarrow-23.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:427deac1f535830a744a4f04a6ac183a64fcac4341b3f618e693c41b7b98d2b0", size = 28338905, upload-time = "2026-01-18T16:19:32.93Z" }, ] [[package]] @@ -6497,13 +7394,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/2b/e18ee7c5ee508a82897f021c1981533eca2940b5f072fc6ed0906c03a7a7/pybase64-1.4.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:debf737e09b8bf832ba86f5ecc3d3dbd0e3021d6cd86ba4abe962d6a5a77adb3", size = 36134, upload-time = "2025-12-06T13:26:47.35Z" }, ] +[[package]] +name = "pycodestyle" +version = "2.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/aa/210b2c9aedd8c1cbeea31a50e42050ad56187754b34eb214c46709445801/pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521", size = 39232, upload-time = "2024-08-04T20:26:54.576Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/d8/a211b3f85e99a0daa2ddec96c949cac6824bd305b040571b82a03dd62636/pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3", size = 31284, upload-time = "2024-08-04T20:26:53.173Z" }, +] + [[package]] name = "pycparser" -version = "2.23" +version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] [[package]] @@ -6688,6 +7594,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, ] +[[package]] +name = "pydocstyle" +version = "6.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "snowballstemmer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/d5385ca59fd065e3c6a5fe19f9bc9d5ea7f2509fa8c9c22fb6b2031dd953/pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1", size = 36796, upload-time = "2023-01-17T20:29:19.838Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/ea/99ddefac41971acad68f14114f38261c1f27dac0b3ec529824ebc739bdaa/pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019", size = 38038, upload-time = "2023-01-17T20:29:18.094Z" }, +] + +[[package]] +name = "pydot" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/35/b17cb89ff865484c6a20ef46bf9d95a5f07328292578de0b295f4a6beec2/pydot-4.0.1.tar.gz", hash = "sha256:c2148f681c4a33e08bf0e26a9e5f8e4099a82e0e2a068098f32ce86577364ad5", size = 162594, upload-time = "2025-06-17T20:09:56.454Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/32/a7125fb28c4261a627f999d5fb4afff25b523800faed2c30979949d6facd/pydot-4.0.1-py3-none-any.whl", hash = "sha256:869c0efadd2708c0be1f916eb669f3d664ca684bc57ffb7ecc08e70d5e93fee6", size = 37087, upload-time = "2025-06-17T20:09:55.25Z" }, +] + [[package]] name = "pydub" version = "0.25.1" @@ -6709,6 +7639,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730, upload-time = "2025-03-17T18:53:14.532Z" }, ] +[[package]] +name = "pyflakes" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/f9/669d8c9c86613c9d568757c7f5824bd3197d7b1c6c27553bc5618a27cce2/pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", size = 63788, upload-time = "2024-01-05T00:28:47.703Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/d7/f1b7db88d8e4417c5d47adad627a93547f44bdc9028372dbd2313f34a855/pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a", size = 62725, upload-time = "2024-01-05T00:28:45.903Z" }, +] + [[package]] name = "pygame" version = "2.6.1" @@ -6745,15 +7684,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/11/17f7f319ca91824b86557e9303e3b7a71991ef17fd45286bf47d7f0a38e6/pygame-2.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:813af4fba5d0b2cb8e58f5d95f7910295c34067dcc290d34f1be59c48bd1ea6a", size = 10620084, upload-time = "2024-09-29T11:48:51.587Z" }, ] -[[package]] -name = "pyglet" -version = "2.1.12" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/07/6c/4bf476a1522d8293565f801ef305f2932148950b552df866a771c884ddaf/pyglet-2.1.12.tar.gz", hash = "sha256:bd7a750b2a5beaf0d2dd4bf4052d96e711ecd00ad29dada889b1f8374285b5f6", size = 6594600, upload-time = "2026-01-07T11:45:23.453Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/73/eb/872c18852bc1b9f39e7a14e992ebdc0bb6535227b2828a2bb737f6aa81b3/pyglet-2.1.12-py3-none-any.whl", hash = "sha256:875052fcfe1fbdd32272b0f57c4b3da908e727da7c98cf29485f10672607d327", size = 1032686, upload-time = "2026-01-07T11:45:18.585Z" }, -] - [[package]] name = "pygments" version = "2.19.2" @@ -6765,11 +7695,11 @@ wheels = [ [[package]] name = "pyjwt" -version = "2.10.1" +version = "2.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5a/b46fa56bf322901eee5b0454a34343cdbdae202cd421775a8ee4e42fd519/pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623", size = 98019, upload-time = "2026-01-30T19:59:55.694Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, + { url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" }, ] [package.optional-dependencies] @@ -6799,6 +7729,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8e/ec/6e02b2561d056ea5b33046e3cad21238e6a9097b97d6ccc0fbe52b50c858/pylibsrtp-1.0.0-cp310-abi3-win_arm64.whl", hash = "sha256:2696bdb2180d53ac55d0eb7b58048a2aa30cd4836dd2ca683669889137a94d2a", size = 1159246, upload-time = "2025-10-13T16:12:30.285Z" }, ] +[[package]] +name = "pylint" +version = "4.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "astroid" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "dill" }, + { name = "isort" }, + { name = "mccabe" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomlkit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/d2/b081da1a8930d00e3fc06352a1d449aaf815d4982319fab5d8cdb2e9ab35/pylint-4.0.4.tar.gz", hash = "sha256:d9b71674e19b1c36d79265b5887bf8e55278cbe236c9e95d22dc82cf044fdbd2", size = 1571735, upload-time = "2025-11-30T13:29:04.315Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/92/d40f5d937517cc489ad848fc4414ecccc7592e4686b9071e09e64f5e378e/pylint-4.0.4-py3-none-any.whl", hash = "sha256:63e06a37d5922555ee2c20963eb42559918c20bd2b21244e4ef426e7c43b92e0", size = 536425, upload-time = "2025-11-30T13:29:02.53Z" }, +] + [[package]] name = "pymavlink" version = "2.4.49" @@ -6861,9 +7810,12 @@ wheels = [ [[package]] name = "pyopengl" -version = "3.1.0" +version = "3.1.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9c/1d/4544708aaa89f26c97cc09450bb333a23724a320923e74d73e028b3560f9/PyOpenGL-3.1.0.tar.gz", hash = "sha256:9b47c5c3a094fa518ca88aeed35ae75834d53e4285512c61879f67a48c94ddaf", size = 1172688, upload-time = "2014-06-26T14:51:25.571Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/16/912b7225d56284859cd9a672827f18be43f8012f8b7b932bc4bd959a298e/pyopengl-3.1.10.tar.gz", hash = "sha256:c4a02d6866b54eb119c8e9b3fb04fa835a95ab802dd96607ab4cdb0012df8335", size = 1915580, upload-time = "2025-08-18T02:33:01.76Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/e4/1ba6f44e491c4eece978685230dde56b14d51a0365bc1b774ddaa94d14cd/pyopengl-3.1.10-py3-none-any.whl", hash = "sha256:794a943daced39300879e4e47bd94525280685f42dbb5a998d336cfff151d74f", size = 3194996, upload-time = "2025-08-18T02:32:59.902Z" }, +] [[package]] name = "pyopenssl" @@ -6880,18 +7832,24 @@ wheels = [ [[package]] name = "pyparsing" -version = "3.3.1" +version = "3.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/33/c1/1d9de9aeaa1b89b0186e5fe23294ff6517fce1bc69149185577cd31016b2/pyparsing-3.3.1.tar.gz", hash = "sha256:47fad0f17ac1e2cad3de3b458570fbc9b03560aa029ed5e16ee5554da9a2251c", size = 1550512, upload-time = "2025-12-23T03:14:04.391Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/40/2614036cdd416452f5bf98ec037f38a1afb17f327cb8e6b652d4729e0af8/pyparsing-3.3.1-py3-none-any.whl", hash = "sha256:023b5e7e5520ad96642e2c6db4cb683d3970bd640cdf7115049a6e9c3682df82", size = 121793, upload-time = "2025-12-23T03:14:02.103Z" }, + { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, ] [[package]] name = "pypika" -version = "0.48.9" +version = "0.51.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/2c/94ed7b91db81d61d7096ac8f2d325ec562fc75e35f3baea8749c85b28784/PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378", size = 67259, upload-time = "2022-03-15T11:22:57.066Z" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/78/cbaebba88e05e2dcda13ca203131b38d3640219f20ebb49676d26714861b/pypika-0.51.1.tar.gz", hash = "sha256:c30c7c1048fbf056fd3920c5a2b88b0c29dd190a9b2bee971fd17e4abe4d0ebe", size = 80919, upload-time = "2026-02-04T11:27:48.304Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/83/c77dfeed04022e8930b08eedca2b6e5efed256ab3321396fde90066efb65/pypika-0.51.1-py2.py3-none-any.whl", hash = "sha256:77985b4d7ce71b9905255bf12468cf598349e98837c037541cfc240e528aec46", size = 60585, upload-time = "2026-02-04T11:27:46.251Z" }, +] [[package]] name = "pyproject-hooks" @@ -6907,47 +7865,14 @@ name = "pyquaternion" version = "0.9.9" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64') or (python_full_version < '3.11' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64') or (python_full_version >= '3.11' and sys_platform != 'linux')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/3d092aa20efaedacb89c3221a92c6491be5b28f618a2c36b52b53e7446c2/pyquaternion-0.9.9.tar.gz", hash = "sha256:b1f61af219cb2fe966b5fb79a192124f2e63a3f7a777ac3cadf2957b1a81bea8", size = 15530, upload-time = "2020-10-05T01:31:30.327Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/49/b3/d8482e8cacc8ea15a356efea13d22ce1c5914a9ee36622ba250523240bf2/pyquaternion-0.9.9-py3-none-any.whl", hash = "sha256:e65f6e3f7b1fdf1a9e23f82434334a1ae84f14223eee835190cd2e841f8172ec", size = 14361, upload-time = "2020-10-05T01:31:37.575Z" }, ] -[[package]] -name = "pyreadline3" -version = "3.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, -] - -[[package]] -name = "pyrender" -version = "0.1.45" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "freetype-py" }, - { name = "imageio" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "pillow" }, - { name = "pyglet" }, - { name = "pyopengl" }, - { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "six" }, - { name = "trimesh" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4d/5a/2a3e5bfd83071a81e02291288391e0fa2c85d1c6765357f4de2dbc27bca6/pyrender-0.1.45.tar.gz", hash = "sha256:284b2432bf6832f05c5216c4b979ceb514ea78163bf53b8ce2bdf0069cb3b92e", size = 1202386, upload-time = "2021-02-18T18:56:28.82Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/28/88/174c28b9d3d03cf6d8edb6f637458f30f1cf1a2bd7a617cbd9dadb1740f6/pyrender-0.1.45-py3-none-any.whl", hash = "sha256:5cf751d1f21fba4640e830cef3a0b5a95ed0f05677bf92c6b8330056b4023aeb", size = 1214061, upload-time = "2021-02-18T18:56:27.275Z" }, -] - [[package]] name = "pysocks" version = "1.7.1" @@ -7060,50 +7985,73 @@ wheels = [ [[package]] name = "python-engineio" -version = "4.13.0" +version = "4.13.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "simple-websocket" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/5a/349caac055e03ef9e56ed29fa304846063b1771ee54ab8132bf98b29491e/python_engineio-4.13.0.tar.gz", hash = "sha256:f9c51a8754d2742ba832c24b46ed425fdd3064356914edd5a1e8ffde76ab7709", size = 92194, upload-time = "2025-12-24T22:38:05.111Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/12/bdef9dbeedbe2cdeba2a2056ad27b1fb081557d34b69a97f574843462cae/python_engineio-4.13.1.tar.gz", hash = "sha256:0a853fcef52f5b345425d8c2b921ac85023a04dfcf75d7b74696c61e940fd066", size = 92348, upload-time = "2026-02-06T23:38:06.12Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/74/c655a6eda0fd188d490c14142a0f0380655ac7099604e1fbf8fa1a97f0a1/python_engineio-4.13.0-py3-none-any.whl", hash = "sha256:57b94eac094fa07b050c6da59f48b12250ab1cd920765f4849963e3d89ad9de3", size = 59676, upload-time = "2025-12-24T22:38:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/aa/54/0cce26da03a981f949bb8449c9778537f75f5917c172e1d2992ff25cb57d/python_engineio-4.13.1-py3-none-any.whl", hash = "sha256:f32ad10589859c11053ad7d9bb3c9695cdf862113bfb0d20bc4d890198287399", size = 59847, upload-time = "2026-02-06T23:38:04.861Z" }, ] [[package]] -name = "python-fcl" -version = "0.7.0.10" +name = "python-lsp-jsonrpc" +version = "1.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cython" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "ujson" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/b6/fd92e2ea4635d88966bb42c20198df1a981340f07843b5e3c6694ba3557b/python-lsp-jsonrpc-1.1.2.tar.gz", hash = "sha256:4688e453eef55cd952bff762c705cedefa12055c0aec17a06f595bcc002cc912", size = 15298, upload-time = "2023-09-23T17:48:30.451Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/d9/656659d5b5d5f402b2b174cd0ba9bc827e07ce3c0bf88da65424baf64af8/python_lsp_jsonrpc-1.1.2-py3-none-any.whl", hash = "sha256:7339c2e9630ae98903fdaea1ace8c47fba0484983794d6aafd0bd8989be2b03c", size = 8805, upload-time = "2023-09-23T17:48:28.804Z" }, +] + +[[package]] +name = "python-lsp-ruff" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cattrs" }, + { name = "lsprotocol" }, + { name = "python-lsp-server" }, + { name = "ruff" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/af/79/2f6322c47bd2956447e0a6787084b4110b4473e3d2501b86aa47c802e6a0/python_lsp_ruff-2.3.0.tar.gz", hash = "sha256:647745b7f3010ac101e3c53a797b8f9deb1f52228b608d70ad0e8e056978c3b7", size = 17268, upload-time = "2025-09-29T20:14:02.994Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/c0/761e359e255fce641c263a3c3e43f7685d1667139e9d35a376c1cc9f6f70/python_lsp_ruff-2.3.0-py3-none-any.whl", hash = "sha256:b858b698fbaff5670f6d5e6c66afc632908f78639d73dc85dedd33ae5fdd204f", size = 12039, upload-time = "2025-09-29T20:14:01.56Z" }, +] + +[[package]] +name = "python-lsp-server" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "black" }, + { name = "docstring-to-markdown" }, + { name = "jedi" }, + { name = "pluggy" }, + { name = "python-lsp-jsonrpc" }, + { name = "ujson" }, ] +sdist = { url = "https://files.pythonhosted.org/packages/b4/b5/b989d41c63390dfc2bf63275ab543b82fed076723d912055e77ccbae1422/python_lsp_server-1.14.0.tar.gz", hash = "sha256:509c445fc667f41ffd3191cb7512a497bf7dd76c14ceb1ee2f6c13ebe71f9a6b", size = 121536, upload-time = "2025-12-06T16:12:20.86Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/ff/5f095a3f8a4ba918b14f61d6566fd50dcad0beb0f8f8e7f9569f4fc70469/python_fcl-0.7.0.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9a2428768f6d1d3dab1e3f7ccbae3cd5e36287e74e1006773fdc5c1fc908b375", size = 2004230, upload-time = "2025-10-22T06:28:08.625Z" }, - { url = "https://files.pythonhosted.org/packages/d5/e4/0e3a47dba337c66f68468a5dcc4737a83b055347783de25bf2f1cee8d3f6/python_fcl-0.7.0.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9877c731ad80971afa89d87e8adb2edcd80c2804ad214dc7767661c33a40c5c0", size = 1568886, upload-time = "2025-10-22T06:28:10.605Z" }, - { url = "https://files.pythonhosted.org/packages/b1/84/a13e09672d86eb12d6614537a30c649feedd143b56a2ce659723e64a3068/python_fcl-0.7.0.10-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c3252c0e8420857e6a08a703be46a65b97d089bab8a571f4ddc3d3c9b604f665", size = 4626302, upload-time = "2025-10-22T06:28:12.488Z" }, - { url = "https://files.pythonhosted.org/packages/9b/6f/4fc417d2e2ed7c2cc826ab992e06ce7297fec8966343be8d2f4ce74c4147/python_fcl-0.7.0.10-cp310-cp310-win_amd64.whl", hash = "sha256:a25ffd460c1bfdcd296ad97bccff5bd7696cf5311e73260c1dcf46262cc84113", size = 1095153, upload-time = "2025-10-22T06:28:14.232Z" }, - { url = "https://files.pythonhosted.org/packages/68/a6/62d3426e438991c1c97c6483045da8c22fd037972b9299fcf3e6e80b7c9e/python_fcl-0.7.0.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81eaf143c5fe478928c7012b26ab98f75deda2b01abff4f633b54a75bfa35eae", size = 2006706, upload-time = "2025-10-22T06:28:15.563Z" }, - { url = "https://files.pythonhosted.org/packages/32/16/c468f3b2a5bef5ae0662b4a44ec1baf660c383b229a7836e636d83568d02/python_fcl-0.7.0.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4a4218ca935ca2306ac0f43600ca159919470e0702532dff14bace3e06ef98c2", size = 1571152, upload-time = "2025-10-22T06:28:17.281Z" }, - { url = "https://files.pythonhosted.org/packages/c6/71/cfe8928d36463972a011afb127dfdf18f903dab4184f2cffdf818e592514/python_fcl-0.7.0.10-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f942fe9307287f9c4dd6288881883afe29c80730e670cd0ad83851d2d0e27fcd", size = 4690394, upload-time = "2025-10-22T06:28:19.035Z" }, - { url = "https://files.pythonhosted.org/packages/9b/63/d4b8b2735806710835e94614c57551564218c25900eb47f62652b8250ff3/python_fcl-0.7.0.10-cp311-cp311-win_amd64.whl", hash = "sha256:b569acd01fc9e86f83b2c185a299301ab494143fdb46b0c57c81aa657696a6a5", size = 1095360, upload-time = "2025-10-22T06:28:20.708Z" }, - { url = "https://files.pythonhosted.org/packages/0d/19/9453f061ef50746c8e1bc0b15b3549d8ec599e8d1b13413d0b44b4307775/python_fcl-0.7.0.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:151112db1ab2cd9245046054cd632e6a6441a178704c69e40f2b4d040093be2c", size = 2004820, upload-time = "2025-10-22T06:28:21.938Z" }, - { url = "https://files.pythonhosted.org/packages/3d/71/9761bd7f2d89e45afc199c797a6e70c556134b56827983fb874231f0affb/python_fcl-0.7.0.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:471f7f5c2cac5397835f009bb7d8f4efed86ab5ac232cf85f35f99d15bbab832", size = 1571792, upload-time = "2025-10-22T06:28:23.353Z" }, - { url = "https://files.pythonhosted.org/packages/97/46/bda2b85d827b7c05effbac3563d8cd7635baa7e939fc8c183a0455ab973a/python_fcl-0.7.0.10-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ad9b66dedd1f267bb1cc27a8c27fdefb180e9f782b8e670ef3a7e59bc6aec8d", size = 4679032, upload-time = "2025-10-22T06:28:24.924Z" }, - { url = "https://files.pythonhosted.org/packages/03/db/324bba54308477bac0c9b3c6b5b91cbb0c2b4c65c4dc08c9cabc8adb215a/python_fcl-0.7.0.10-cp312-cp312-win_amd64.whl", hash = "sha256:10ef439be61b591928ae0081f749b34aa68ca5a60504f60adbcbe19106d4b2bd", size = 1095678, upload-time = "2025-10-22T06:28:26.637Z" }, - { url = "https://files.pythonhosted.org/packages/d7/af/28dd814aeeea6ca7ae7c6ceee3e8d44a9006158cec1b1b7da40cd68d562f/python_fcl-0.7.0.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bb37579eea7043cf8a2aa2ab7ee705e8438a08d22dcf7ebaf0c8ec6dfcf89b2", size = 2003577, upload-time = "2025-10-22T06:28:28.045Z" }, - { url = "https://files.pythonhosted.org/packages/00/b6/609147335621a9244bca472da4938a6145429803f67d1eb75722797058a7/python_fcl-0.7.0.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb9bf7e768a433c3eabd0cae73c1a9c411f590b49d73eb38c91bb88f44b782cc", size = 1570854, upload-time = "2025-10-22T06:28:29.493Z" }, - { url = "https://files.pythonhosted.org/packages/57/6d/344c46667901b4b0c64a44fa0f73ef4d9ce1757d86129083b820a27971b3/python_fcl-0.7.0.10-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c905bc8b6ebf86524d958b3f17e28f5f033331aed9dd8610c6896243a0003e4c", size = 4679836, upload-time = "2025-10-22T06:28:30.991Z" }, - { url = "https://files.pythonhosted.org/packages/08/14/405b88ce34e2d05d4765b58b7f1f99b9afd91eef9bf4807ef6310669fed0/python_fcl-0.7.0.10-cp313-cp313-win_amd64.whl", hash = "sha256:7f2014f29a7ba65c9c4be2bd1ad1c80d91079b1e94f06fb59abbe4595b73d3a2", size = 1095886, upload-time = "2025-10-22T06:28:32.672Z" }, - { url = "https://files.pythonhosted.org/packages/45/db/220c122653624901fdd50bfdb4f4103f326b2d5438d208af286ae4b6bf26/python_fcl-0.7.0.10-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3791d32c35b50f7b8b3941ecf3b6f8435ede3db16cf9255ef5577a78291dd954", size = 2003205, upload-time = "2025-10-22T06:28:33.893Z" }, - { url = "https://files.pythonhosted.org/packages/b2/f5/4964d80affcf581b2e55a068737448f46ca48ad07281913e450e55d793a3/python_fcl-0.7.0.10-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:288e60098004f41c458ac6835f00a87241ddcb2364476156f23cd040963c4e32", size = 1571231, upload-time = "2025-10-22T06:28:35.578Z" }, - { url = "https://files.pythonhosted.org/packages/c8/c7/c3f9832eabdfbe597691f43e59ee50af024a2152f8ff8fa7b12d9fd1e15f/python_fcl-0.7.0.10-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:518391eee8033fdbae0e5de12644978c3ffe7c7c9ec0b2712fe9e501372022db", size = 4666617, upload-time = "2025-10-22T06:28:36.98Z" }, - { url = "https://files.pythonhosted.org/packages/03/77/68cd8914605a5d6657ba13c21d1d8c16000c4e8acc49237866c94a0a63ad/python_fcl-0.7.0.10-cp314-cp314-win_amd64.whl", hash = "sha256:978f4f187ed04dcacb2ed975c081899a587bcbd053eafffc40abc6d0aefd2269", size = 1116098, upload-time = "2025-10-22T06:28:38.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/fc/8c29bcbf7a0dc8419cec46e1081e4e5e981a018fce0669cc9cd5df824ee6/python_fcl-0.7.0.10-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:e8f20a2e76c3728b4dc6741cb99dd2b0fdcb26e77bd24d3036b2d78fae398743", size = 2009882, upload-time = "2025-10-22T06:28:39.953Z" }, - { url = "https://files.pythonhosted.org/packages/6e/3f/c664eb49a2370b0bdf6e98ec3927b45c2ded45b20db4bb325c606089bfbd/python_fcl-0.7.0.10-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c6dd8534085f48d41b5171ae0f397b6d34ca046194826ff4dfa17a2139f323fa", size = 1579024, upload-time = "2025-10-22T06:28:41.531Z" }, - { url = "https://files.pythonhosted.org/packages/03/27/59296f3280169d3e39d29cfe8170e8edeaecb38270dacf467571c2ee85d0/python_fcl-0.7.0.10-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bce8f0823bdf040a2b0668599afdcb7405ac7e6a272ffedf0c6acd6756d082e", size = 4653964, upload-time = "2025-10-22T06:28:43.337Z" }, - { url = "https://files.pythonhosted.org/packages/b9/06/a4ddfd46794c7d6e175c34e8c10554949d1c17aeb78c188050b4746d4b48/python_fcl-0.7.0.10-cp314-cp314t-win_amd64.whl", hash = "sha256:6ab961f459c294695385d518f7a6eb3a2577029ca008698045dac2b7253fa3f7", size = 1140958, upload-time = "2025-10-22T06:28:44.586Z" }, + { url = "https://files.pythonhosted.org/packages/08/cf/587f913335e3855e0ddca2aee7c3f9d5de2d75a1e23434891e9f74783bcd/python_lsp_server-1.14.0-py3-none-any.whl", hash = "sha256:a71a917464effc48f4c70363f90b8520e5e3ba8201428da80b97a7ceb259e32a", size = 77060, upload-time = "2025-12-06T16:12:19.46Z" }, +] + +[package.optional-dependencies] +all = [ + { name = "autopep8" }, + { name = "flake8" }, + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pydocstyle" }, + { name = "pyflakes" }, + { name = "pylint" }, + { name = "rope" }, + { name = "whatthepatch" }, + { name = "yapf" }, ] [[package]] @@ -7117,15 +8065,72 @@ wheels = [ [[package]] name = "python-socketio" -version = "5.16.0" +version = "5.16.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "bidict" }, { name = "python-engineio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b8/55/5d8af5884283b58e4405580bcd84af1d898c457173c708736e065f10ca4a/python_socketio-5.16.0.tar.gz", hash = "sha256:f79403c7f1ba8b84460aa8fe4c671414c8145b21a501b46b676f3740286356fd", size = 127120, upload-time = "2025-12-24T23:51:48.826Z" } +sdist = { url = "https://files.pythonhosted.org/packages/59/81/cf8284f45e32efa18d3848ed82cdd4dcc1b657b082458fbe01ad3e1f2f8d/python_socketio-5.16.1.tar.gz", hash = "sha256:f863f98eacce81ceea2e742f6388e10ca3cdd0764be21d30d5196470edf5ea89", size = 128508, upload-time = "2026-02-06T23:42:07Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c7/deb8c5e604404dbf10a3808a858946ca3547692ff6316b698945bb72177e/python_socketio-5.16.1-py3-none-any.whl", hash = "sha256:a3eb1702e92aa2f2b5d3ba00261b61f062cce51f1cfb6900bf3ab4d1934d2d35", size = 82054, upload-time = "2026-02-06T23:42:05.772Z" }, +] + +[[package]] +name = "pytokens" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/24/f206113e05cb8ef51b3850e7ef88f20da6f4bf932190ceb48bd3da103e10/pytokens-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a44ed93ea23415c54f3face3b65ef2b844d96aeb3455b8a69b3df6beab6acc5", size = 161522, upload-time = "2026-01-30T01:02:50.393Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e9/06a6bf1b90c2ed81a9c7d2544232fe5d2891d1cd480e8a1809ca354a8eb2/pytokens-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:add8bf86b71a5d9fb5b89f023a80b791e04fba57960aa790cc6125f7f1d39dfe", size = 246945, upload-time = "2026-01-30T01:02:52.399Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/f6fb1007a4c3d8b682d5d65b7c1fb33257587a5f782647091e3408abe0b8/pytokens-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:670d286910b531c7b7e3c0b453fd8156f250adb140146d234a82219459b9640c", size = 259525, upload-time = "2026-01-30T01:02:53.737Z" }, + { url = "https://files.pythonhosted.org/packages/04/92/086f89b4d622a18418bac74ab5db7f68cf0c21cf7cc92de6c7b919d76c88/pytokens-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4e691d7f5186bd2842c14813f79f8884bb03f5995f0575272009982c5ac6c0f7", size = 262693, upload-time = "2026-01-30T01:02:54.871Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7b/8b31c347cf94a3f900bdde750b2e9131575a61fdb620d3d3c75832262137/pytokens-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:27b83ad28825978742beef057bfe406ad6ed524b2d28c252c5de7b4a6dd48fa2", size = 103567, upload-time = "2026-01-30T01:02:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/3d/92/790ebe03f07b57e53b10884c329b9a1a308648fc083a6d4a39a10a28c8fc/pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440", size = 160864, upload-time = "2026-01-30T01:02:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/13/25/a4f555281d975bfdd1eba731450e2fe3a95870274da73fb12c40aeae7625/pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc", size = 248565, upload-time = "2026-01-30T01:02:59.912Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/bc0394b4ad5b1601be22fa43652173d47e4c9efbf0044c62e9a59b747c56/pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d", size = 260824, upload-time = "2026-01-30T01:03:01.471Z" }, + { url = "https://files.pythonhosted.org/packages/4e/54/3e04f9d92a4be4fc6c80016bc396b923d2a6933ae94b5f557c939c460ee0/pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16", size = 264075, upload-time = "2026-01-30T01:03:04.143Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1b/44b0326cb5470a4375f37988aea5d61b5cc52407143303015ebee94abfd6/pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6", size = 103323, upload-time = "2026-01-30T01:03:05.412Z" }, + { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, + { url = "https://files.pythonhosted.org/packages/20/01/7436e9ad693cebda0551203e0bf28f7669976c60ad07d6402098208476de/pytokens-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ad948d085ed6c16413eb5fec6b3e02fa00dc29a2534f088d3302c47eb59adf9", size = 268076, upload-time = "2026-01-30T01:03:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/2e/df/533c82a3c752ba13ae7ef238b7f8cdd272cf1475f03c63ac6cf3fcfb00b6/pytokens-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:3f901fe783e06e48e8cbdc82d631fca8f118333798193e026a50ce1b3757ea68", size = 103552, upload-time = "2026-01-30T01:03:12.066Z" }, + { url = "https://files.pythonhosted.org/packages/cb/dc/08b1a080372afda3cceb4f3c0a7ba2bde9d6a5241f1edb02a22a019ee147/pytokens-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8bdb9d0ce90cbf99c525e75a2fa415144fd570a1ba987380190e8b786bc6ef9b", size = 160720, upload-time = "2026-01-30T01:03:13.843Z" }, + { url = "https://files.pythonhosted.org/packages/64/0c/41ea22205da480837a700e395507e6a24425151dfb7ead73343d6e2d7ffe/pytokens-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5502408cab1cb18e128570f8d598981c68a50d0cbd7c61312a90507cd3a1276f", size = 254204, upload-time = "2026-01-30T01:03:14.886Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d2/afe5c7f8607018beb99971489dbb846508f1b8f351fcefc225fcf4b2adc0/pytokens-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29d1d8fb1030af4d231789959f21821ab6325e463f0503a61d204343c9b355d1", size = 268423, upload-time = "2026-01-30T01:03:15.936Z" }, + { url = "https://files.pythonhosted.org/packages/68/d4/00ffdbd370410c04e9591da9220a68dc1693ef7499173eb3e30d06e05ed1/pytokens-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:970b08dd6b86058b6dc07efe9e98414f5102974716232d10f32ff39701e841c4", size = 266859, upload-time = "2026-01-30T01:03:17.458Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c9/c3161313b4ca0c601eeefabd3d3b576edaa9afdefd32da97210700e47652/pytokens-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:9bd7d7f544d362576be74f9d5901a22f317efc20046efe2034dced238cbbfe78", size = 103520, upload-time = "2026-01-30T01:03:18.652Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a7/b470f672e6fc5fee0a01d9e75005a0e617e162381974213a945fcd274843/pytokens-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a14d5f5fc78ce85e426aa159489e2d5961acf0e47575e08f35584009178e321", size = 160821, upload-time = "2026-01-30T01:03:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/80/98/e83a36fe8d170c911f864bfded690d2542bfcfacb9c649d11a9e6eb9dc41/pytokens-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f50fd18543be72da51dd505e2ed20d2228c74e0464e4262e4899797803d7fa", size = 254263, upload-time = "2026-01-30T01:03:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/0f/95/70d7041273890f9f97a24234c00b746e8da86df462620194cef1d411ddeb/pytokens-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc74c035f9bfca0255c1af77ddd2d6ae8419012805453e4b0e7513e17904545d", size = 268071, upload-time = "2026-01-30T01:03:21.888Z" }, + { url = "https://files.pythonhosted.org/packages/da/79/76e6d09ae19c99404656d7db9c35dfd20f2086f3eb6ecb496b5b31163bad/pytokens-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f66a6bbe741bd431f6d741e617e0f39ec7257ca1f89089593479347cc4d13324", size = 271716, upload-time = "2026-01-30T01:03:23.633Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/482e55fa1602e0a7ff012661d8c946bafdc05e480ea5a32f4f7e336d4aa9/pytokens-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:b35d7e5ad269804f6697727702da3c517bb8a5228afa450ab0fa787732055fc9", size = 104539, upload-time = "2026-01-30T01:03:24.788Z" }, + { url = "https://files.pythonhosted.org/packages/30/e8/20e7db907c23f3d63b0be3b8a4fd1927f6da2395f5bcc7f72242bb963dfe/pytokens-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8fcb9ba3709ff77e77f1c7022ff11d13553f3c30299a9fe246a166903e9091eb", size = 168474, upload-time = "2026-01-30T01:03:26.428Z" }, + { url = "https://files.pythonhosted.org/packages/d6/81/88a95ee9fafdd8f5f3452107748fd04c24930d500b9aba9738f3ade642cc/pytokens-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79fc6b8699564e1f9b521582c35435f1bd32dd06822322ec44afdeba666d8cb3", size = 290473, upload-time = "2026-01-30T01:03:27.415Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/3aa899645e29b6375b4aed9f8d21df219e7c958c4c186b465e42ee0a06bf/pytokens-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d31b97b3de0f61571a124a00ffe9a81fb9939146c122c11060725bd5aea79975", size = 303485, upload-time = "2026-01-30T01:03:28.558Z" }, + { url = "https://files.pythonhosted.org/packages/52/a0/07907b6ff512674d9b201859f7d212298c44933633c946703a20c25e9d81/pytokens-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:967cf6e3fd4adf7de8fc73cd3043754ae79c36475c1c11d514fc72cf5490094a", size = 306698, upload-time = "2026-01-30T01:03:29.653Z" }, + { url = "https://files.pythonhosted.org/packages/39/2a/cbbf9250020a4a8dd53ba83a46c097b69e5eb49dd14e708f496f548c6612/pytokens-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:584c80c24b078eec1e227079d56dc22ff755e0ba8654d8383b2c549107528918", size = 116287, upload-time = "2026-01-30T01:03:30.912Z" }, + { url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" }, +] + +[[package]] +name = "pytoolconfig" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/dc/abf70d2c2bcac20e8c71a7cdf6d44e4ddba4edf65acb179248d554d743db/pytoolconfig-1.3.1.tar.gz", hash = "sha256:51e6bd1a6f108238ae6aab6a65e5eed5e75d456be1c2bf29b04e5c1e7d7adbae", size = 16655, upload-time = "2024-01-11T16:25:11.914Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/d2/2ccc2b69a187b80fda3152745670cfba936704f296a9fa54c6c8ac694d12/python_socketio-5.16.0-py3-none-any.whl", hash = "sha256:d95802961e15c7bd54ecf884c6e7644f81be8460f0a02ee66b473df58088ee8a", size = 79607, upload-time = "2025-12-24T23:51:47.2Z" }, + { url = "https://files.pythonhosted.org/packages/92/44/da239917f5711ca7105f7d7f9e2765716dd883b241529beafc0f28504725/pytoolconfig-1.3.1-py3-none-any.whl", hash = "sha256:5d8cea8ae1996938ec3eaf44567bbc5ef1bc900742190c439a44a704d6e1b62b", size = 17022, upload-time = "2024-01-11T16:25:10.589Z" }, +] + +[package.optional-dependencies] +global = [ + { name = "platformdirs" }, ] [[package]] @@ -7334,109 +8339,123 @@ wheels = [ [[package]] name = "regex" -version = "2025.11.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/d6/d788d52da01280a30a3f6268aef2aa71043bff359c618fea4c5b536654d5/regex-2025.11.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2b441a4ae2c8049106e8b39973bfbddfb25a179dda2bdb99b0eeb60c40a6a3af", size = 488087, upload-time = "2025-11-03T21:30:47.317Z" }, - { url = "https://files.pythonhosted.org/packages/69/39/abec3bd688ec9bbea3562de0fd764ff802976185f5ff22807bf0a2697992/regex-2025.11.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2fa2eed3f76677777345d2f81ee89f5de2f5745910e805f7af7386a920fa7313", size = 290544, upload-time = "2025-11-03T21:30:49.912Z" }, - { url = "https://files.pythonhosted.org/packages/39/b3/9a231475d5653e60002508f41205c61684bb2ffbf2401351ae2186897fc4/regex-2025.11.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8b4a27eebd684319bdf473d39f1d79eed36bf2cd34bd4465cdb4618d82b3d56", size = 288408, upload-time = "2025-11-03T21:30:51.344Z" }, - { url = "https://files.pythonhosted.org/packages/c3/c5/1929a0491bd5ac2d1539a866768b88965fa8c405f3e16a8cef84313098d6/regex-2025.11.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cf77eac15bd264986c4a2c63353212c095b40f3affb2bc6b4ef80c4776c1a28", size = 781584, upload-time = "2025-11-03T21:30:52.596Z" }, - { url = "https://files.pythonhosted.org/packages/ce/fd/16aa16cf5d497ef727ec966f74164fbe75d6516d3d58ac9aa989bc9cdaad/regex-2025.11.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b7f9ee819f94c6abfa56ec7b1dbab586f41ebbdc0a57e6524bd5e7f487a878c7", size = 850733, upload-time = "2025-11-03T21:30:53.825Z" }, - { url = "https://files.pythonhosted.org/packages/e6/49/3294b988855a221cb6565189edf5dc43239957427df2d81d4a6b15244f64/regex-2025.11.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:838441333bc90b829406d4a03cb4b8bf7656231b84358628b0406d803931ef32", size = 898691, upload-time = "2025-11-03T21:30:55.575Z" }, - { url = "https://files.pythonhosted.org/packages/14/62/b56d29e70b03666193369bdbdedfdc23946dbe9f81dd78ce262c74d988ab/regex-2025.11.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfe6d3f0c9e3b7e8c0c694b24d25e677776f5ca26dce46fd6b0489f9c8339391", size = 791662, upload-time = "2025-11-03T21:30:57.262Z" }, - { url = "https://files.pythonhosted.org/packages/15/fc/e4c31d061eced63fbf1ce9d853975f912c61a7d406ea14eda2dd355f48e7/regex-2025.11.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2ab815eb8a96379a27c3b6157fcb127c8f59c36f043c1678110cea492868f1d5", size = 782587, upload-time = "2025-11-03T21:30:58.788Z" }, - { url = "https://files.pythonhosted.org/packages/b2/bb/5e30c7394bcf63f0537121c23e796be67b55a8847c3956ae6068f4c70702/regex-2025.11.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:728a9d2d173a65b62bdc380b7932dd8e74ed4295279a8fe1021204ce210803e7", size = 774709, upload-time = "2025-11-03T21:31:00.081Z" }, - { url = "https://files.pythonhosted.org/packages/c5/c4/fce773710af81b0cb37cb4ff0947e75d5d17dee304b93d940b87a67fc2f4/regex-2025.11.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:509dc827f89c15c66a0c216331260d777dd6c81e9a4e4f830e662b0bb296c313", size = 845773, upload-time = "2025-11-03T21:31:01.583Z" }, - { url = "https://files.pythonhosted.org/packages/7b/5e/9466a7ec4b8ec282077095c6eb50a12a389d2e036581134d4919e8ca518c/regex-2025.11.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:849202cd789e5f3cf5dcc7822c34b502181b4824a65ff20ce82da5524e45e8e9", size = 836164, upload-time = "2025-11-03T21:31:03.244Z" }, - { url = "https://files.pythonhosted.org/packages/95/18/82980a60e8ed1594eb3c89eb814fb276ef51b9af7caeab1340bfd8564af6/regex-2025.11.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b6f78f98741dcc89607c16b1e9426ee46ce4bf31ac5e6b0d40e81c89f3481ea5", size = 779832, upload-time = "2025-11-03T21:31:04.876Z" }, - { url = "https://files.pythonhosted.org/packages/03/cc/90ab0fdbe6dce064a42015433f9152710139fb04a8b81b4fb57a1cb63ffa/regex-2025.11.3-cp310-cp310-win32.whl", hash = "sha256:149eb0bba95231fb4f6d37c8f760ec9fa6fabf65bab555e128dde5f2475193ec", size = 265802, upload-time = "2025-11-03T21:31:06.581Z" }, - { url = "https://files.pythonhosted.org/packages/34/9d/e9e8493a85f3b1ddc4a5014465f5c2b78c3ea1cbf238dcfde78956378041/regex-2025.11.3-cp310-cp310-win_amd64.whl", hash = "sha256:ee3a83ce492074c35a74cc76cf8235d49e77b757193a5365ff86e3f2f93db9fd", size = 277722, upload-time = "2025-11-03T21:31:08.144Z" }, - { url = "https://files.pythonhosted.org/packages/15/c4/b54b24f553966564506dbf873a3e080aef47b356a3b39b5d5aba992b50db/regex-2025.11.3-cp310-cp310-win_arm64.whl", hash = "sha256:38af559ad934a7b35147716655d4a2f79fcef2d695ddfe06a06ba40ae631fa7e", size = 270289, upload-time = "2025-11-03T21:31:10.267Z" }, - { url = "https://files.pythonhosted.org/packages/f7/90/4fb5056e5f03a7048abd2b11f598d464f0c167de4f2a51aa868c376b8c70/regex-2025.11.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eadade04221641516fa25139273505a1c19f9bf97589a05bc4cfcd8b4a618031", size = 488081, upload-time = "2025-11-03T21:31:11.946Z" }, - { url = "https://files.pythonhosted.org/packages/85/23/63e481293fac8b069d84fba0299b6666df720d875110efd0338406b5d360/regex-2025.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:feff9e54ec0dd3833d659257f5c3f5322a12eee58ffa360984b716f8b92983f4", size = 290554, upload-time = "2025-11-03T21:31:13.387Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9d/b101d0262ea293a0066b4522dfb722eb6a8785a8c3e084396a5f2c431a46/regex-2025.11.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b30bc921d50365775c09a7ed446359e5c0179e9e2512beec4a60cbcef6ddd50", size = 288407, upload-time = "2025-11-03T21:31:14.809Z" }, - { url = "https://files.pythonhosted.org/packages/0c/64/79241c8209d5b7e00577ec9dca35cd493cc6be35b7d147eda367d6179f6d/regex-2025.11.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f99be08cfead2020c7ca6e396c13543baea32343b7a9a5780c462e323bd8872f", size = 793418, upload-time = "2025-11-03T21:31:16.556Z" }, - { url = "https://files.pythonhosted.org/packages/3d/e2/23cd5d3573901ce8f9757c92ca4db4d09600b865919b6d3e7f69f03b1afd/regex-2025.11.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6dd329a1b61c0ee95ba95385fb0c07ea0d3fe1a21e1349fa2bec272636217118", size = 860448, upload-time = "2025-11-03T21:31:18.12Z" }, - { url = "https://files.pythonhosted.org/packages/2a/4c/aecf31beeaa416d0ae4ecb852148d38db35391aac19c687b5d56aedf3a8b/regex-2025.11.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c5238d32f3c5269d9e87be0cf096437b7622b6920f5eac4fd202468aaeb34d2", size = 907139, upload-time = "2025-11-03T21:31:20.753Z" }, - { url = "https://files.pythonhosted.org/packages/61/22/b8cb00df7d2b5e0875f60628594d44dba283e951b1ae17c12f99e332cc0a/regex-2025.11.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10483eefbfb0adb18ee9474498c9a32fcf4e594fbca0543bb94c48bac6183e2e", size = 800439, upload-time = "2025-11-03T21:31:22.069Z" }, - { url = "https://files.pythonhosted.org/packages/02/a8/c4b20330a5cdc7a8eb265f9ce593f389a6a88a0c5f280cf4d978f33966bc/regex-2025.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:78c2d02bb6e1da0720eedc0bad578049cad3f71050ef8cd065ecc87691bed2b0", size = 782965, upload-time = "2025-11-03T21:31:23.598Z" }, - { url = "https://files.pythonhosted.org/packages/b4/4c/ae3e52988ae74af4b04d2af32fee4e8077f26e51b62ec2d12d246876bea2/regex-2025.11.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e6b49cd2aad93a1790ce9cffb18964f6d3a4b0b3dbdbd5de094b65296fce6e58", size = 854398, upload-time = "2025-11-03T21:31:25.008Z" }, - { url = "https://files.pythonhosted.org/packages/06/d1/a8b9cf45874eda14b2e275157ce3b304c87e10fb38d9fc26a6e14eb18227/regex-2025.11.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:885b26aa3ee56433b630502dc3d36ba78d186a00cc535d3806e6bfd9ed3c70ab", size = 845897, upload-time = "2025-11-03T21:31:26.427Z" }, - { url = "https://files.pythonhosted.org/packages/ea/fe/1830eb0236be93d9b145e0bd8ab499f31602fe0999b1f19e99955aa8fe20/regex-2025.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd76a9f58e6a00f8772e72cff8ebcff78e022be95edf018766707c730593e1e", size = 788906, upload-time = "2025-11-03T21:31:28.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/47/dc2577c1f95f188c1e13e2e69d8825a5ac582ac709942f8a03af42ed6e93/regex-2025.11.3-cp311-cp311-win32.whl", hash = "sha256:3e816cc9aac1cd3cc9a4ec4d860f06d40f994b5c7b4d03b93345f44e08cc68bf", size = 265812, upload-time = "2025-11-03T21:31:29.72Z" }, - { url = "https://files.pythonhosted.org/packages/50/1e/15f08b2f82a9bbb510621ec9042547b54d11e83cb620643ebb54e4eb7d71/regex-2025.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:087511f5c8b7dfbe3a03f5d5ad0c2a33861b1fc387f21f6f60825a44865a385a", size = 277737, upload-time = "2025-11-03T21:31:31.422Z" }, - { url = "https://files.pythonhosted.org/packages/f4/fc/6500eb39f5f76c5e47a398df82e6b535a5e345f839581012a418b16f9cc3/regex-2025.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:1ff0d190c7f68ae7769cd0313fe45820ba07ffebfddfaa89cc1eb70827ba0ddc", size = 270290, upload-time = "2025-11-03T21:31:33.041Z" }, - { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" }, - { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" }, - { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" }, - { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" }, - { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" }, - { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" }, - { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" }, - { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" }, - { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" }, - { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" }, - { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" }, - { url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" }, - { url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" }, - { url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" }, - { url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" }, - { url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" }, - { url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" }, - { url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" }, - { url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" }, - { url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" }, - { url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" }, - { url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" }, - { url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" }, - { url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" }, - { url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" }, - { url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" }, - { url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" }, - { url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" }, - { url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" }, - { url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" }, - { url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" }, - { url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" }, - { url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" }, - { url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" }, - { url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" }, - { url = "https://files.pythonhosted.org/packages/31/e9/f6e13de7e0983837f7b6d238ad9458800a874bf37c264f7923e63409944c/regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6", size = 489089, upload-time = "2025-11-03T21:32:50.027Z" }, - { url = "https://files.pythonhosted.org/packages/a3/5c/261f4a262f1fa65141c1b74b255988bd2fa020cc599e53b080667d591cfc/regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4", size = 291059, upload-time = "2025-11-03T21:32:51.682Z" }, - { url = "https://files.pythonhosted.org/packages/8e/57/f14eeb7f072b0e9a5a090d1712741fd8f214ec193dba773cf5410108bb7d/regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73", size = 288900, upload-time = "2025-11-03T21:32:53.569Z" }, - { url = "https://files.pythonhosted.org/packages/3c/6b/1d650c45e99a9b327586739d926a1cd4e94666b1bd4af90428b36af66dc7/regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f", size = 799010, upload-time = "2025-11-03T21:32:55.222Z" }, - { url = "https://files.pythonhosted.org/packages/99/ee/d66dcbc6b628ce4e3f7f0cbbb84603aa2fc0ffc878babc857726b8aab2e9/regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d", size = 864893, upload-time = "2025-11-03T21:32:57.239Z" }, - { url = "https://files.pythonhosted.org/packages/bf/2d/f238229f1caba7ac87a6c4153d79947fb0261415827ae0f77c304260c7d3/regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be", size = 911522, upload-time = "2025-11-03T21:32:59.274Z" }, - { url = "https://files.pythonhosted.org/packages/bd/3d/22a4eaba214a917c80e04f6025d26143690f0419511e0116508e24b11c9b/regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db", size = 803272, upload-time = "2025-11-03T21:33:01.393Z" }, - { url = "https://files.pythonhosted.org/packages/84/b1/03188f634a409353a84b5ef49754b97dbcc0c0f6fd6c8ede505a8960a0a4/regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62", size = 787958, upload-time = "2025-11-03T21:33:03.379Z" }, - { url = "https://files.pythonhosted.org/packages/99/6a/27d072f7fbf6fadd59c64d210305e1ff865cc3b78b526fd147db768c553b/regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f", size = 859289, upload-time = "2025-11-03T21:33:05.374Z" }, - { url = "https://files.pythonhosted.org/packages/9a/70/1b3878f648e0b6abe023172dacb02157e685564853cc363d9961bcccde4e/regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02", size = 850026, upload-time = "2025-11-03T21:33:07.131Z" }, - { url = "https://files.pythonhosted.org/packages/dd/d5/68e25559b526b8baab8e66839304ede68ff6727237a47727d240006bd0ff/regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed", size = 789499, upload-time = "2025-11-03T21:33:09.141Z" }, - { url = "https://files.pythonhosted.org/packages/fc/df/43971264857140a350910d4e33df725e8c94dd9dee8d2e4729fa0d63d49e/regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4", size = 271604, upload-time = "2025-11-03T21:33:10.9Z" }, - { url = "https://files.pythonhosted.org/packages/01/6f/9711b57dc6894a55faf80a4c1b5aa4f8649805cb9c7aef46f7d27e2b9206/regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad", size = 280320, upload-time = "2025-11-03T21:33:12.572Z" }, - { url = "https://files.pythonhosted.org/packages/f1/7e/f6eaa207d4377481f5e1775cdeb5a443b5a59b392d0065f3417d31d80f87/regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f", size = 273372, upload-time = "2025-11-03T21:33:14.219Z" }, - { url = "https://files.pythonhosted.org/packages/c3/06/49b198550ee0f5e4184271cee87ba4dfd9692c91ec55289e6282f0f86ccf/regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc", size = 491985, upload-time = "2025-11-03T21:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/ce/bf/abdafade008f0b1c9da10d934034cb670432d6cf6cbe38bbb53a1cfd6cf8/regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49", size = 292669, upload-time = "2025-11-03T21:33:18.32Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ef/0c357bb8edbd2ad8e273fcb9e1761bc37b8acbc6e1be050bebd6475f19c1/regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536", size = 291030, upload-time = "2025-11-03T21:33:20.048Z" }, - { url = "https://files.pythonhosted.org/packages/79/06/edbb67257596649b8fb088d6aeacbcb248ac195714b18a65e018bf4c0b50/regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95", size = 807674, upload-time = "2025-11-03T21:33:21.797Z" }, - { url = "https://files.pythonhosted.org/packages/f4/d9/ad4deccfce0ea336296bd087f1a191543bb99ee1c53093dcd4c64d951d00/regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009", size = 873451, upload-time = "2025-11-03T21:33:23.741Z" }, - { url = "https://files.pythonhosted.org/packages/13/75/a55a4724c56ef13e3e04acaab29df26582f6978c000ac9cd6810ad1f341f/regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9", size = 914980, upload-time = "2025-11-03T21:33:25.999Z" }, - { url = "https://files.pythonhosted.org/packages/67/1e/a1657ee15bd9116f70d4a530c736983eed997b361e20ecd8f5ca3759d5c5/regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d", size = 812852, upload-time = "2025-11-03T21:33:27.852Z" }, - { url = "https://files.pythonhosted.org/packages/b8/6f/f7516dde5506a588a561d296b2d0044839de06035bb486b326065b4c101e/regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6", size = 795566, upload-time = "2025-11-03T21:33:32.364Z" }, - { url = "https://files.pythonhosted.org/packages/d9/dd/3d10b9e170cc16fb34cb2cef91513cf3df65f440b3366030631b2984a264/regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154", size = 868463, upload-time = "2025-11-03T21:33:34.459Z" }, - { url = "https://files.pythonhosted.org/packages/f5/8e/935e6beff1695aa9085ff83195daccd72acc82c81793df480f34569330de/regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267", size = 854694, upload-time = "2025-11-03T21:33:36.793Z" }, - { url = "https://files.pythonhosted.org/packages/92/12/10650181a040978b2f5720a6a74d44f841371a3d984c2083fc1752e4acf6/regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379", size = 799691, upload-time = "2025-11-03T21:33:39.079Z" }, - { url = "https://files.pythonhosted.org/packages/67/90/8f37138181c9a7690e7e4cb388debbd389342db3c7381d636d2875940752/regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38", size = 274583, upload-time = "2025-11-03T21:33:41.302Z" }, - { url = "https://files.pythonhosted.org/packages/8f/cd/867f5ec442d56beb56f5f854f40abcfc75e11d10b11fdb1869dd39c63aaf/regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de", size = 284286, upload-time = "2025-11-03T21:33:43.324Z" }, - { url = "https://files.pythonhosted.org/packages/20/31/32c0c4610cbc070362bf1d2e4ea86d1ea29014d400a6d6c2486fcfd57766/regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801", size = 274741, upload-time = "2025-11-03T21:33:45.557Z" }, +version = "2026.1.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/d2/e6ee96b7dff201a83f650241c52db8e5bd080967cb93211f57aa448dc9d6/regex-2026.1.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e3dd93c8f9abe8aa4b6c652016da9a3afa190df5ad822907efe6b206c09896e", size = 488166, upload-time = "2026-01-14T23:13:46.408Z" }, + { url = "https://files.pythonhosted.org/packages/23/8a/819e9ce14c9f87af026d0690901b3931f3101160833e5d4c8061fa3a1b67/regex-2026.1.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97499ff7862e868b1977107873dd1a06e151467129159a6ffd07b66706ba3a9f", size = 290632, upload-time = "2026-01-14T23:13:48.688Z" }, + { url = "https://files.pythonhosted.org/packages/d5/c3/23dfe15af25d1d45b07dfd4caa6003ad710dcdcb4c4b279909bdfe7a2de8/regex-2026.1.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0bda75ebcac38d884240914c6c43d8ab5fb82e74cde6da94b43b17c411aa4c2b", size = 288500, upload-time = "2026-01-14T23:13:50.503Z" }, + { url = "https://files.pythonhosted.org/packages/c6/31/1adc33e2f717df30d2f4d973f8776d2ba6ecf939301efab29fca57505c95/regex-2026.1.15-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7dcc02368585334f5bc81fc73a2a6a0bbade60e7d83da21cead622faf408f32c", size = 781670, upload-time = "2026-01-14T23:13:52.453Z" }, + { url = "https://files.pythonhosted.org/packages/23/ce/21a8a22d13bc4adcb927c27b840c948f15fc973e21ed2346c1bd0eae22dc/regex-2026.1.15-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:693b465171707bbe882a7a05de5e866f33c76aa449750bee94a8d90463533cc9", size = 850820, upload-time = "2026-01-14T23:13:54.894Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/3eeacdf587a4705a44484cd0b30e9230a0e602811fb3e2cc32268c70d509/regex-2026.1.15-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b0d190e6f013ea938623a58706d1469a62103fb2a241ce2873a9906e0386582c", size = 898777, upload-time = "2026-01-14T23:13:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/79/a9/1898a077e2965c35fc22796488141a22676eed2d73701e37c73ad7c0b459/regex-2026.1.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ff818702440a5878a81886f127b80127f5d50563753a28211482867f8318106", size = 791750, upload-time = "2026-01-14T23:13:58.527Z" }, + { url = "https://files.pythonhosted.org/packages/4c/84/e31f9d149a178889b3817212827f5e0e8c827a049ff31b4b381e76b26e2d/regex-2026.1.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f052d1be37ef35a54e394de66136e30fa1191fab64f71fc06ac7bc98c9a84618", size = 782674, upload-time = "2026-01-14T23:13:59.874Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ff/adf60063db24532add6a1676943754a5654dcac8237af024ede38244fd12/regex-2026.1.15-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6bfc31a37fd1592f0c4fc4bfc674b5c42e52efe45b4b7a6a14f334cca4bcebe4", size = 767906, upload-time = "2026-01-14T23:14:01.298Z" }, + { url = "https://files.pythonhosted.org/packages/af/3e/e6a216cee1e2780fec11afe7fc47b6f3925d7264e8149c607ac389fd9b1a/regex-2026.1.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3d6ce5ae80066b319ae3bc62fd55a557c9491baa5efd0d355f0de08c4ba54e79", size = 774798, upload-time = "2026-01-14T23:14:02.715Z" }, + { url = "https://files.pythonhosted.org/packages/0f/98/23a4a8378a9208514ed3efc7e7850c27fa01e00ed8557c958df0335edc4a/regex-2026.1.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1704d204bd42b6bb80167df0e4554f35c255b579ba99616def38f69e14a5ccb9", size = 845861, upload-time = "2026-01-14T23:14:04.824Z" }, + { url = "https://files.pythonhosted.org/packages/f8/57/d7605a9d53bd07421a8785d349cd29677fe660e13674fa4c6cbd624ae354/regex-2026.1.15-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:e3174a5ed4171570dc8318afada56373aa9289eb6dc0d96cceb48e7358b0e220", size = 755648, upload-time = "2026-01-14T23:14:06.371Z" }, + { url = "https://files.pythonhosted.org/packages/6f/76/6f2e24aa192da1e299cc1101674a60579d3912391867ce0b946ba83e2194/regex-2026.1.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:87adf5bd6d72e3e17c9cb59ac4096b1faaf84b7eb3037a5ffa61c4b4370f0f13", size = 836250, upload-time = "2026-01-14T23:14:08.343Z" }, + { url = "https://files.pythonhosted.org/packages/11/3a/1f2a1d29453299a7858eab7759045fc3d9d1b429b088dec2dc85b6fa16a2/regex-2026.1.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e85dc94595f4d766bd7d872a9de5ede1ca8d3063f3bdf1e2c725f5eb411159e3", size = 779919, upload-time = "2026-01-14T23:14:09.954Z" }, + { url = "https://files.pythonhosted.org/packages/c0/67/eab9bc955c9dcc58e9b222c801e39cff7ca0b04261792a2149166ce7e792/regex-2026.1.15-cp310-cp310-win32.whl", hash = "sha256:21ca32c28c30d5d65fc9886ff576fc9b59bbca08933e844fa2363e530f4c8218", size = 265888, upload-time = "2026-01-14T23:14:11.35Z" }, + { url = "https://files.pythonhosted.org/packages/1d/62/31d16ae24e1f8803bddb0885508acecaec997fcdcde9c243787103119ae4/regex-2026.1.15-cp310-cp310-win_amd64.whl", hash = "sha256:3038a62fc7d6e5547b8915a3d927a0fbeef84cdbe0b1deb8c99bbd4a8961b52a", size = 277830, upload-time = "2026-01-14T23:14:12.908Z" }, + { url = "https://files.pythonhosted.org/packages/e5/36/5d9972bccd6417ecd5a8be319cebfd80b296875e7f116c37fb2a2deecebf/regex-2026.1.15-cp310-cp310-win_arm64.whl", hash = "sha256:505831646c945e3e63552cc1b1b9b514f0e93232972a2d5bedbcc32f15bc82e3", size = 270376, upload-time = "2026-01-14T23:14:14.782Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c9/0c80c96eab96948363d270143138d671d5731c3a692b417629bf3492a9d6/regex-2026.1.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ae6020fb311f68d753b7efa9d4b9a5d47a5d6466ea0d5e3b5a471a960ea6e4a", size = 488168, upload-time = "2026-01-14T23:14:16.129Z" }, + { url = "https://files.pythonhosted.org/packages/17/f0/271c92f5389a552494c429e5cc38d76d1322eb142fb5db3c8ccc47751468/regex-2026.1.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eddf73f41225942c1f994914742afa53dc0d01a6e20fe14b878a1b1edc74151f", size = 290636, upload-time = "2026-01-14T23:14:17.715Z" }, + { url = "https://files.pythonhosted.org/packages/a0/f9/5f1fd077d106ca5655a0f9ff8f25a1ab55b92128b5713a91ed7134ff688e/regex-2026.1.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e8cd52557603f5c66a548f69421310886b28b7066853089e1a71ee710e1cdc1", size = 288496, upload-time = "2026-01-14T23:14:19.326Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e1/8f43b03a4968c748858ec77f746c286d81f896c2e437ccf050ebc5d3128c/regex-2026.1.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5170907244b14303edc5978f522f16c974f32d3aa92109fabc2af52411c9433b", size = 793503, upload-time = "2026-01-14T23:14:20.922Z" }, + { url = "https://files.pythonhosted.org/packages/8d/4e/a39a5e8edc5377a46a7c875c2f9a626ed3338cb3bb06931be461c3e1a34a/regex-2026.1.15-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2748c1ec0663580b4510bd89941a31560b4b439a0b428b49472a3d9944d11cd8", size = 860535, upload-time = "2026-01-14T23:14:22.405Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1c/9dce667a32a9477f7a2869c1c767dc00727284a9fa3ff5c09a5c6c03575e/regex-2026.1.15-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2f2775843ca49360508d080eaa87f94fa248e2c946bbcd963bb3aae14f333413", size = 907225, upload-time = "2026-01-14T23:14:23.897Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3c/87ca0a02736d16b6262921425e84b48984e77d8e4e572c9072ce96e66c30/regex-2026.1.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9ea2604370efc9a174c1b5dcc81784fb040044232150f7f33756049edfc9026", size = 800526, upload-time = "2026-01-14T23:14:26.039Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/647d5715aeea7c87bdcbd2f578f47b415f55c24e361e639fe8c0cc88878f/regex-2026.1.15-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0dcd31594264029b57bf16f37fd7248a70b3b764ed9e0839a8f271b2d22c0785", size = 773446, upload-time = "2026-01-14T23:14:28.109Z" }, + { url = "https://files.pythonhosted.org/packages/af/89/bf22cac25cb4ba0fe6bff52ebedbb65b77a179052a9d6037136ae93f42f4/regex-2026.1.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c08c1f3e34338256732bd6938747daa3c0d5b251e04b6e43b5813e94d503076e", size = 783051, upload-time = "2026-01-14T23:14:29.929Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f4/6ed03e71dca6348a5188363a34f5e26ffd5db1404780288ff0d79513bce4/regex-2026.1.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e43a55f378df1e7a4fa3547c88d9a5a9b7113f653a66821bcea4718fe6c58763", size = 854485, upload-time = "2026-01-14T23:14:31.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/8e8560bd78caded8eb137e3e47612430a05b9a772caf60876435192d670a/regex-2026.1.15-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:f82110ab962a541737bd0ce87978d4c658f06e7591ba899192e2712a517badbb", size = 762195, upload-time = "2026-01-14T23:14:32.802Z" }, + { url = "https://files.pythonhosted.org/packages/38/6b/61fc710f9aa8dfcd764fe27d37edfaa023b1a23305a0d84fccd5adb346ea/regex-2026.1.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:27618391db7bdaf87ac6c92b31e8f0dfb83a9de0075855152b720140bda177a2", size = 845986, upload-time = "2026-01-14T23:14:34.898Z" }, + { url = "https://files.pythonhosted.org/packages/fd/2e/fbee4cb93f9d686901a7ca8d94285b80405e8c34fe4107f63ffcbfb56379/regex-2026.1.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bfb0d6be01fbae8d6655c8ca21b3b72458606c4aec9bbc932db758d47aba6db1", size = 788992, upload-time = "2026-01-14T23:14:37.116Z" }, + { url = "https://files.pythonhosted.org/packages/ed/14/3076348f3f586de64b1ab75a3fbabdaab7684af7f308ad43be7ef1849e55/regex-2026.1.15-cp311-cp311-win32.whl", hash = "sha256:b10e42a6de0e32559a92f2f8dc908478cc0fa02838d7dbe764c44dca3fa13569", size = 265893, upload-time = "2026-01-14T23:14:38.426Z" }, + { url = "https://files.pythonhosted.org/packages/0f/19/772cf8b5fc803f5c89ba85d8b1870a1ca580dc482aa030383a9289c82e44/regex-2026.1.15-cp311-cp311-win_amd64.whl", hash = "sha256:e9bf3f0bbdb56633c07d7116ae60a576f846efdd86a8848f8d62b749e1209ca7", size = 277840, upload-time = "2026-01-14T23:14:39.785Z" }, + { url = "https://files.pythonhosted.org/packages/78/84/d05f61142709474da3c0853222d91086d3e1372bcdab516c6fd8d80f3297/regex-2026.1.15-cp311-cp311-win_arm64.whl", hash = "sha256:41aef6f953283291c4e4e6850607bd71502be67779586a61472beacb315c97ec", size = 270374, upload-time = "2026-01-14T23:14:41.592Z" }, + { url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" }, + { url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" }, + { url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e4/1fc4599450c9f0863d9406e944592d968b8d6dfd0d552a7d569e43bceada/regex-2026.1.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa", size = 798656, upload-time = "2026-01-14T23:14:48.77Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e6/59650d73a73fa8a60b3a590545bfcf1172b4384a7df2e7fe7b9aab4e2da9/regex-2026.1.15-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804", size = 864252, upload-time = "2026-01-14T23:14:50.528Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ab/1d0f4d50a1638849a97d731364c9a80fa304fec46325e48330c170ee8e80/regex-2026.1.15-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c", size = 912268, upload-time = "2026-01-14T23:14:52.952Z" }, + { url = "https://files.pythonhosted.org/packages/dd/df/0d722c030c82faa1d331d1921ee268a4e8fb55ca8b9042c9341c352f17fa/regex-2026.1.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5", size = 803589, upload-time = "2026-01-14T23:14:55.182Z" }, + { url = "https://files.pythonhosted.org/packages/66/23/33289beba7ccb8b805c6610a8913d0131f834928afc555b241caabd422a9/regex-2026.1.15-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3", size = 775700, upload-time = "2026-01-14T23:14:56.707Z" }, + { url = "https://files.pythonhosted.org/packages/e7/65/bf3a42fa6897a0d3afa81acb25c42f4b71c274f698ceabd75523259f6688/regex-2026.1.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb", size = 787928, upload-time = "2026-01-14T23:14:58.312Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f5/13bf65864fc314f68cdd6d8ca94adcab064d4d39dbd0b10fef29a9da48fc/regex-2026.1.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410", size = 858607, upload-time = "2026-01-14T23:15:00.657Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/040e589834d7a439ee43fb0e1e902bc81bd58a5ba81acffe586bb3321d35/regex-2026.1.15-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4", size = 763729, upload-time = "2026-01-14T23:15:02.248Z" }, + { url = "https://files.pythonhosted.org/packages/9b/84/6921e8129687a427edf25a34a5594b588b6d88f491320b9de5b6339a4fcb/regex-2026.1.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d", size = 850697, upload-time = "2026-01-14T23:15:03.878Z" }, + { url = "https://files.pythonhosted.org/packages/8a/87/3d06143d4b128f4229158f2de5de6c8f2485170c7221e61bf381313314b2/regex-2026.1.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22", size = 789849, upload-time = "2026-01-14T23:15:06.102Z" }, + { url = "https://files.pythonhosted.org/packages/77/69/c50a63842b6bd48850ebc7ab22d46e7a2a32d824ad6c605b218441814639/regex-2026.1.15-cp312-cp312-win32.whl", hash = "sha256:82345326b1d8d56afbe41d881fdf62f1926d7264b2fc1537f99ae5da9aad7913", size = 266279, upload-time = "2026-01-14T23:15:07.678Z" }, + { url = "https://files.pythonhosted.org/packages/f2/36/39d0b29d087e2b11fd8191e15e81cce1b635fcc845297c67f11d0d19274d/regex-2026.1.15-cp312-cp312-win_amd64.whl", hash = "sha256:4def140aa6156bc64ee9912383d4038f3fdd18fee03a6f222abd4de6357ce42a", size = 277166, upload-time = "2026-01-14T23:15:09.257Z" }, + { url = "https://files.pythonhosted.org/packages/28/32/5b8e476a12262748851fa8ab1b0be540360692325975b094e594dfebbb52/regex-2026.1.15-cp312-cp312-win_arm64.whl", hash = "sha256:c6c565d9a6e1a8d783c1948937ffc377dd5771e83bd56de8317c450a954d2056", size = 270415, upload-time = "2026-01-14T23:15:10.743Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2e/6870bb16e982669b674cce3ee9ff2d1d46ab80528ee6bcc20fb2292efb60/regex-2026.1.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e69d0deeb977ffe7ed3d2e4439360089f9c3f217ada608f0f88ebd67afb6385e", size = 489164, upload-time = "2026-01-14T23:15:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/9774542e203849b0286badf67199970a44ebdb0cc5fb739f06e47ada72f8/regex-2026.1.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3601ffb5375de85a16f407854d11cca8fe3f5febbe3ac78fb2866bb220c74d10", size = 291218, upload-time = "2026-01-14T23:15:15.647Z" }, + { url = "https://files.pythonhosted.org/packages/b2/87/b0cda79f22b8dee05f774922a214da109f9a4c0eca5da2c9d72d77ea062c/regex-2026.1.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4c5ef43b5c2d4114eb8ea424bb8c9cec01d5d17f242af88b2448f5ee81caadbc", size = 288895, upload-time = "2026-01-14T23:15:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/3b/6a/0041f0a2170d32be01ab981d6346c83a8934277d82c780d60b127331f264/regex-2026.1.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:968c14d4f03e10b2fd960f1d5168c1f0ac969381d3c1fcc973bc45fb06346599", size = 798680, upload-time = "2026-01-14T23:15:19.342Z" }, + { url = "https://files.pythonhosted.org/packages/58/de/30e1cfcdbe3e891324aa7568b7c968771f82190df5524fabc1138cb2d45a/regex-2026.1.15-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56a5595d0f892f214609c9f76b41b7428bed439d98dc961efafdd1354d42baae", size = 864210, upload-time = "2026-01-14T23:15:22.005Z" }, + { url = "https://files.pythonhosted.org/packages/64/44/4db2f5c5ca0ccd40ff052ae7b1e9731352fcdad946c2b812285a7505ca75/regex-2026.1.15-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf650f26087363434c4e560011f8e4e738f6f3e029b85d4904c50135b86cfa5", size = 912358, upload-time = "2026-01-14T23:15:24.569Z" }, + { url = "https://files.pythonhosted.org/packages/79/b6/e6a5665d43a7c42467138c8a2549be432bad22cbd206f5ec87162de74bd7/regex-2026.1.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18388a62989c72ac24de75f1449d0fb0b04dfccd0a1a7c1c43af5eb503d890f6", size = 803583, upload-time = "2026-01-14T23:15:26.526Z" }, + { url = "https://files.pythonhosted.org/packages/e7/53/7cd478222169d85d74d7437e74750005e993f52f335f7c04ff7adfda3310/regex-2026.1.15-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d220a2517f5893f55daac983bfa9fe998a7dbcaee4f5d27a88500f8b7873788", size = 775782, upload-time = "2026-01-14T23:15:29.352Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b5/75f9a9ee4b03a7c009fe60500fe550b45df94f0955ca29af16333ef557c5/regex-2026.1.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9c08c2fbc6120e70abff5d7f28ffb4d969e14294fb2143b4b5c7d20e46d1714", size = 787978, upload-time = "2026-01-14T23:15:31.295Z" }, + { url = "https://files.pythonhosted.org/packages/72/b3/79821c826245bbe9ccbb54f6eadb7879c722fd3e0248c17bfc90bf54e123/regex-2026.1.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7ef7d5d4bd49ec7364315167a4134a015f61e8266c6d446fc116a9ac4456e10d", size = 858550, upload-time = "2026-01-14T23:15:33.558Z" }, + { url = "https://files.pythonhosted.org/packages/4a/85/2ab5f77a1c465745bfbfcb3ad63178a58337ae8d5274315e2cc623a822fa/regex-2026.1.15-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e42844ad64194fa08d5ccb75fe6a459b9b08e6d7296bd704460168d58a388f3", size = 763747, upload-time = "2026-01-14T23:15:35.206Z" }, + { url = "https://files.pythonhosted.org/packages/6d/84/c27df502d4bfe2873a3e3a7cf1bdb2b9cc10284d1a44797cf38bed790470/regex-2026.1.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cfecdaa4b19f9ca534746eb3b55a5195d5c95b88cac32a205e981ec0a22b7d31", size = 850615, upload-time = "2026-01-14T23:15:37.523Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b7/658a9782fb253680aa8ecb5ccbb51f69e088ed48142c46d9f0c99b46c575/regex-2026.1.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:08df9722d9b87834a3d701f3fca570b2be115654dbfd30179f30ab2f39d606d3", size = 789951, upload-time = "2026-01-14T23:15:39.582Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2a/5928af114441e059f15b2f63e188bd00c6529b3051c974ade7444b85fcda/regex-2026.1.15-cp313-cp313-win32.whl", hash = "sha256:d426616dae0967ca225ab12c22274eb816558f2f99ccb4a1d52ca92e8baf180f", size = 266275, upload-time = "2026-01-14T23:15:42.108Z" }, + { url = "https://files.pythonhosted.org/packages/4f/16/5bfbb89e435897bff28cf0352a992ca719d9e55ebf8b629203c96b6ce4f7/regex-2026.1.15-cp313-cp313-win_amd64.whl", hash = "sha256:febd38857b09867d3ed3f4f1af7d241c5c50362e25ef43034995b77a50df494e", size = 277145, upload-time = "2026-01-14T23:15:44.244Z" }, + { url = "https://files.pythonhosted.org/packages/56/c1/a09ff7392ef4233296e821aec5f78c51be5e91ffde0d163059e50fd75835/regex-2026.1.15-cp313-cp313-win_arm64.whl", hash = "sha256:8e32f7896f83774f91499d239e24cebfadbc07639c1494bb7213983842348337", size = 270411, upload-time = "2026-01-14T23:15:45.858Z" }, + { url = "https://files.pythonhosted.org/packages/3c/38/0cfd5a78e5c6db00e6782fdae70458f89850ce95baa5e8694ab91d89744f/regex-2026.1.15-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ec94c04149b6a7b8120f9f44565722c7ae31b7a6d2275569d2eefa76b83da3be", size = 492068, upload-time = "2026-01-14T23:15:47.616Z" }, + { url = "https://files.pythonhosted.org/packages/50/72/6c86acff16cb7c959c4355826bbf06aad670682d07c8f3998d9ef4fee7cd/regex-2026.1.15-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40c86d8046915bb9aeb15d3f3f15b6fd500b8ea4485b30e1bbc799dab3fe29f8", size = 292756, upload-time = "2026-01-14T23:15:49.307Z" }, + { url = "https://files.pythonhosted.org/packages/4e/58/df7fb69eadfe76526ddfce28abdc0af09ffe65f20c2c90932e89d705153f/regex-2026.1.15-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:726ea4e727aba21643205edad8f2187ec682d3305d790f73b7a51c7587b64bdd", size = 291114, upload-time = "2026-01-14T23:15:51.484Z" }, + { url = "https://files.pythonhosted.org/packages/ed/6c/a4011cd1cf96b90d2cdc7e156f91efbd26531e822a7fbb82a43c1016678e/regex-2026.1.15-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb740d044aff31898804e7bf1181cc72c03d11dfd19932b9911ffc19a79070a", size = 807524, upload-time = "2026-01-14T23:15:53.102Z" }, + { url = "https://files.pythonhosted.org/packages/1d/25/a53ffb73183f69c3e9f4355c4922b76d2840aee160af6af5fac229b6201d/regex-2026.1.15-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05d75a668e9ea16f832390d22131fe1e8acc8389a694c8febc3e340b0f810b93", size = 873455, upload-time = "2026-01-14T23:15:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/66/0b/8b47fc2e8f97d9b4a851736f3890a5f786443aa8901061c55f24c955f45b/regex-2026.1.15-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d991483606f3dbec93287b9f35596f41aa2e92b7c2ebbb935b63f409e243c9af", size = 915007, upload-time = "2026-01-14T23:15:57.041Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fa/97de0d681e6d26fabe71968dbee06dd52819e9a22fdce5dac7256c31ed84/regex-2026.1.15-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:194312a14819d3e44628a44ed6fea6898fdbecb0550089d84c403475138d0a09", size = 812794, upload-time = "2026-01-14T23:15:58.916Z" }, + { url = "https://files.pythonhosted.org/packages/22/38/e752f94e860d429654aa2b1c51880bff8dfe8f084268258adf9151cf1f53/regex-2026.1.15-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe2fda4110a3d0bc163c2e0664be44657431440722c5c5315c65155cab92f9e5", size = 781159, upload-time = "2026-01-14T23:16:00.817Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a7/d739ffaef33c378fc888302a018d7f81080393d96c476b058b8c64fd2b0d/regex-2026.1.15-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:124dc36c85d34ef2d9164da41a53c1c8c122cfb1f6e1ec377a1f27ee81deb794", size = 795558, upload-time = "2026-01-14T23:16:03.267Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c4/542876f9a0ac576100fc73e9c75b779f5c31e3527576cfc9cb3009dcc58a/regex-2026.1.15-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1774cd1981cd212506a23a14dba7fdeaee259f5deba2df6229966d9911e767a", size = 868427, upload-time = "2026-01-14T23:16:05.646Z" }, + { url = "https://files.pythonhosted.org/packages/fc/0f/d5655bea5b22069e32ae85a947aa564912f23758e112cdb74212848a1a1b/regex-2026.1.15-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b5f7d8d2867152cdb625e72a530d2ccb48a3d199159144cbdd63870882fb6f80", size = 769939, upload-time = "2026-01-14T23:16:07.542Z" }, + { url = "https://files.pythonhosted.org/packages/20/06/7e18a4fa9d326daeda46d471a44ef94201c46eaa26dbbb780b5d92cbfdda/regex-2026.1.15-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:492534a0ab925d1db998defc3c302dae3616a2fc3fe2e08db1472348f096ddf2", size = 854753, upload-time = "2026-01-14T23:16:10.395Z" }, + { url = "https://files.pythonhosted.org/packages/3b/67/dc8946ef3965e166f558ef3b47f492bc364e96a265eb4a2bb3ca765c8e46/regex-2026.1.15-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c661fc820cfb33e166bf2450d3dadbda47c8d8981898adb9b6fe24e5e582ba60", size = 799559, upload-time = "2026-01-14T23:16:12.347Z" }, + { url = "https://files.pythonhosted.org/packages/a5/61/1bba81ff6d50c86c65d9fd84ce9699dd106438ee4cdb105bf60374ee8412/regex-2026.1.15-cp313-cp313t-win32.whl", hash = "sha256:99ad739c3686085e614bf77a508e26954ff1b8f14da0e3765ff7abbf7799f952", size = 268879, upload-time = "2026-01-14T23:16:14.049Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/cef7d4c5fb0ea3ac5c775fd37db5747f7378b29526cc83f572198924ff47/regex-2026.1.15-cp313-cp313t-win_amd64.whl", hash = "sha256:32655d17905e7ff8ba5c764c43cb124e34a9245e45b83c22e81041e1071aee10", size = 280317, upload-time = "2026-01-14T23:16:15.718Z" }, + { url = "https://files.pythonhosted.org/packages/b4/52/4317f7a5988544e34ab57b4bde0f04944c4786128c933fb09825924d3e82/regex-2026.1.15-cp313-cp313t-win_arm64.whl", hash = "sha256:b2a13dd6a95e95a489ca242319d18fc02e07ceb28fa9ad146385194d95b3c829", size = 271551, upload-time = "2026-01-14T23:16:17.533Z" }, + { url = "https://files.pythonhosted.org/packages/52/0a/47fa888ec7cbbc7d62c5f2a6a888878e76169170ead271a35239edd8f0e8/regex-2026.1.15-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:d920392a6b1f353f4aa54328c867fec3320fa50657e25f64abf17af054fc97ac", size = 489170, upload-time = "2026-01-14T23:16:19.835Z" }, + { url = "https://files.pythonhosted.org/packages/ac/c4/d000e9b7296c15737c9301708e9e7fbdea009f8e93541b6b43bdb8219646/regex-2026.1.15-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b5a28980a926fa810dbbed059547b02783952e2efd9c636412345232ddb87ff6", size = 291146, upload-time = "2026-01-14T23:16:21.541Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b6/921cc61982e538682bdf3bdf5b2c6ab6b34368da1f8e98a6c1ddc503c9cf/regex-2026.1.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:621f73a07595d83f28952d7bd1e91e9d1ed7625fb7af0064d3516674ec93a2a2", size = 288986, upload-time = "2026-01-14T23:16:23.381Z" }, + { url = "https://files.pythonhosted.org/packages/ca/33/eb7383dde0bbc93f4fb9d03453aab97e18ad4024ac7e26cef8d1f0a2cff0/regex-2026.1.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d7d92495f47567a9b1669c51fc8d6d809821849063d168121ef801bbc213846", size = 799098, upload-time = "2026-01-14T23:16:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/27/56/b664dccae898fc8d8b4c23accd853f723bde0f026c747b6f6262b688029c/regex-2026.1.15-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8dd16fba2758db7a3780a051f245539c4451ca20910f5a5e6ea1c08d06d4a76b", size = 864980, upload-time = "2026-01-14T23:16:27.297Z" }, + { url = "https://files.pythonhosted.org/packages/16/40/0999e064a170eddd237bae9ccfcd8f28b3aa98a38bf727a086425542a4fc/regex-2026.1.15-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1e1808471fbe44c1a63e5f577a1d5f02fe5d66031dcbdf12f093ffc1305a858e", size = 911607, upload-time = "2026-01-14T23:16:29.235Z" }, + { url = "https://files.pythonhosted.org/packages/07/78/c77f644b68ab054e5a674fb4da40ff7bffb2c88df58afa82dbf86573092d/regex-2026.1.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0751a26ad39d4f2ade8fe16c59b2bf5cb19eb3d2cd543e709e583d559bd9efde", size = 803358, upload-time = "2026-01-14T23:16:31.369Z" }, + { url = "https://files.pythonhosted.org/packages/27/31/d4292ea8566eaa551fafc07797961c5963cf5235c797cc2ae19b85dfd04d/regex-2026.1.15-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0f0c7684c7f9ca241344ff95a1de964f257a5251968484270e91c25a755532c5", size = 775833, upload-time = "2026-01-14T23:16:33.141Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b2/cff3bf2fea4133aa6fb0d1e370b37544d18c8350a2fa118c7e11d1db0e14/regex-2026.1.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:74f45d170a21df41508cb67165456538425185baaf686281fa210d7e729abc34", size = 788045, upload-time = "2026-01-14T23:16:35.005Z" }, + { url = "https://files.pythonhosted.org/packages/8d/99/2cb9b69045372ec877b6f5124bda4eb4253bc58b8fe5848c973f752bc52c/regex-2026.1.15-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f1862739a1ffb50615c0fde6bae6569b5efbe08d98e59ce009f68a336f64da75", size = 859374, upload-time = "2026-01-14T23:16:36.919Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/710b0a5abe8e077b1729a562d2f297224ad079f3a66dce46844c193416c8/regex-2026.1.15-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:453078802f1b9e2b7303fb79222c054cb18e76f7bdc220f7530fdc85d319f99e", size = 763940, upload-time = "2026-01-14T23:16:38.685Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/7585c8e744e40eb3d32f119191969b91de04c073fca98ec14299041f6e7e/regex-2026.1.15-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:a30a68e89e5a218b8b23a52292924c1f4b245cb0c68d1cce9aec9bbda6e2c160", size = 850112, upload-time = "2026-01-14T23:16:40.646Z" }, + { url = "https://files.pythonhosted.org/packages/af/d6/43e1dd85df86c49a347aa57c1f69d12c652c7b60e37ec162e3096194a278/regex-2026.1.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9479cae874c81bf610d72b85bb681a94c95722c127b55445285fb0e2c82db8e1", size = 789586, upload-time = "2026-01-14T23:16:42.799Z" }, + { url = "https://files.pythonhosted.org/packages/93/38/77142422f631e013f316aaae83234c629555729a9fbc952b8a63ac91462a/regex-2026.1.15-cp314-cp314-win32.whl", hash = "sha256:d639a750223132afbfb8f429c60d9d318aeba03281a5f1ab49f877456448dcf1", size = 271691, upload-time = "2026-01-14T23:16:44.671Z" }, + { url = "https://files.pythonhosted.org/packages/4a/a9/ab16b4649524ca9e05213c1cdbb7faa85cc2aa90a0230d2f796cbaf22736/regex-2026.1.15-cp314-cp314-win_amd64.whl", hash = "sha256:4161d87f85fa831e31469bfd82c186923070fc970b9de75339b68f0c75b51903", size = 280422, upload-time = "2026-01-14T23:16:46.607Z" }, + { url = "https://files.pythonhosted.org/packages/be/2a/20fd057bf3521cb4791f69f869635f73e0aaf2b9ad2d260f728144f9047c/regex-2026.1.15-cp314-cp314-win_arm64.whl", hash = "sha256:91c5036ebb62663a6b3999bdd2e559fd8456d17e2b485bf509784cd31a8b1705", size = 273467, upload-time = "2026-01-14T23:16:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/ad/77/0b1e81857060b92b9cad239104c46507dd481b3ff1fa79f8e7f865aae38a/regex-2026.1.15-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ee6854c9000a10938c79238de2379bea30c82e4925a371711af45387df35cab8", size = 492073, upload-time = "2026-01-14T23:16:51.154Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/f8302b0c208b22c1e4f423147e1913fd475ddd6230565b299925353de644/regex-2026.1.15-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c2b80399a422348ce5de4fe40c418d6299a0fa2803dd61dc0b1a2f28e280fcf", size = 292757, upload-time = "2026-01-14T23:16:53.08Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f0/ef55de2460f3b4a6da9d9e7daacd0cb79d4ef75c64a2af316e68447f0df0/regex-2026.1.15-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:dca3582bca82596609959ac39e12b7dad98385b4fefccb1151b937383cec547d", size = 291122, upload-time = "2026-01-14T23:16:55.383Z" }, + { url = "https://files.pythonhosted.org/packages/cf/55/bb8ccbacabbc3a11d863ee62a9f18b160a83084ea95cdfc5d207bfc3dd75/regex-2026.1.15-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71d476caa6692eea743ae5ea23cde3260677f70122c4d258ca952e5c2d4e84", size = 807761, upload-time = "2026-01-14T23:16:57.251Z" }, + { url = "https://files.pythonhosted.org/packages/8f/84/f75d937f17f81e55679a0509e86176e29caa7298c38bd1db7ce9c0bf6075/regex-2026.1.15-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c243da3436354f4af6c3058a3f81a97d47ea52c9bd874b52fd30274853a1d5df", size = 873538, upload-time = "2026-01-14T23:16:59.349Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d9/0da86327df70349aa8d86390da91171bd3ca4f0e7c1d1d453a9c10344da3/regex-2026.1.15-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8355ad842a7c7e9e5e55653eade3b7d1885ba86f124dd8ab1f722f9be6627434", size = 915066, upload-time = "2026-01-14T23:17:01.607Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5e/f660fb23fc77baa2a61aa1f1fe3a4eea2bbb8a286ddec148030672e18834/regex-2026.1.15-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f192a831d9575271a22d804ff1a5355355723f94f31d9eef25f0d45a152fdc1a", size = 812938, upload-time = "2026-01-14T23:17:04.366Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/a47a29bfecebbbfd1e5cd3f26b28020a97e4820f1c5148e66e3b7d4b4992/regex-2026.1.15-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:166551807ec20d47ceaeec380081f843e88c8949780cd42c40f18d16168bed10", size = 781314, upload-time = "2026-01-14T23:17:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/65/ec/7ec2bbfd4c3f4e494a24dec4c6943a668e2030426b1b8b949a6462d2c17b/regex-2026.1.15-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9ca1cbdc0fbfe5e6e6f8221ef2309988db5bcede52443aeaee9a4ad555e0dac", size = 795652, upload-time = "2026-01-14T23:17:08.521Z" }, + { url = "https://files.pythonhosted.org/packages/46/79/a5d8651ae131fe27d7c521ad300aa7f1c7be1dbeee4d446498af5411b8a9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b30bcbd1e1221783c721483953d9e4f3ab9c5d165aa709693d3f3946747b1aea", size = 868550, upload-time = "2026-01-14T23:17:10.573Z" }, + { url = "https://files.pythonhosted.org/packages/06/b7/25635d2809664b79f183070786a5552dd4e627e5aedb0065f4e3cf8ee37d/regex-2026.1.15-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2a8d7b50c34578d0d3bf7ad58cde9652b7d683691876f83aedc002862a35dc5e", size = 769981, upload-time = "2026-01-14T23:17:12.871Z" }, + { url = "https://files.pythonhosted.org/packages/16/8b/fc3fcbb2393dcfa4a6c5ffad92dc498e842df4581ea9d14309fcd3c55fb9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9d787e3310c6a6425eb346be4ff2ccf6eece63017916fd77fe8328c57be83521", size = 854780, upload-time = "2026-01-14T23:17:14.837Z" }, + { url = "https://files.pythonhosted.org/packages/d0/38/dde117c76c624713c8a2842530be9c93ca8b606c0f6102d86e8cd1ce8bea/regex-2026.1.15-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:619843841e220adca114118533a574a9cd183ed8a28b85627d2844c500a2b0db", size = 799778, upload-time = "2026-01-14T23:17:17.369Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0d/3a6cfa9ae99606afb612d8fb7a66b245a9d5ff0f29bb347c8a30b6ad561b/regex-2026.1.15-cp314-cp314t-win32.whl", hash = "sha256:e90b8db97f6f2c97eb045b51a6b2c5ed69cedd8392459e0642d4199b94fabd7e", size = 274667, upload-time = "2026-01-14T23:17:19.301Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b2/297293bb0742fd06b8d8e2572db41a855cdf1cae0bf009b1cb74fe07e196/regex-2026.1.15-cp314-cp314t-win_amd64.whl", hash = "sha256:5ef19071f4ac9f0834793af85bd04a920b4407715624e40cb7a0631a11137cdf", size = 284386, upload-time = "2026-01-14T23:17:21.231Z" }, + { url = "https://files.pythonhosted.org/packages/95/e4/a3b9480c78cf8ee86626cb06f8d931d74d775897d44201ccb813097ae697/regex-2026.1.15-cp314-cp314t-win_arm64.whl", hash = "sha256:ca89c5e596fc05b015f27561b3793dc2fa0917ea0d7507eebb448efd35274a70", size = 274837, upload-time = "2026-01-14T23:17:23.146Z" }, ] [[package]] @@ -7498,7 +8517,7 @@ wheels = [ [[package]] name = "rerun-sdk" -version = "0.28.2" +version = "0.29.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -7509,10 +8528,10 @@ dependencies = [ { name = "typing-extensions" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/7d/a86b9a2cf182942f15ce92aa2ebb30c55ccd9eb1f7d8ab6eecea5b929495/rerun_sdk-0.28.2-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:bbc573a74863f71d1f64dc386af8f7b5d92745828421a8968194ee805b9793f5", size = 108607081, upload-time = "2026-01-08T13:48:21.955Z" }, - { url = "https://files.pythonhosted.org/packages/92/f5/721f0aae6c071eb6ea7b04917041e81301cd76741d6e18bbc97ffe94cbf1/rerun_sdk-0.28.2-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:968006d0b7d401910aca5c8f3d913c626e6d878739df4d95817f9523f5a9e356", size = 116446459, upload-time = "2026-01-08T13:48:29.794Z" }, - { url = "https://files.pythonhosted.org/packages/11/6f/433ed77f07be45be33295df3f749239304eef2007869701541fc57dbb8c0/rerun_sdk-0.28.2-cp310-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0422373e1b8650472225d5dabdd633393305dc871227cd61ddf3115c2d45c797", size = 124332311, upload-time = "2026-01-08T13:48:36.892Z" }, - { url = "https://files.pythonhosted.org/packages/e9/71/403e9fafa345b5de7f12584984801b781adac622e902e9c99122f6c27c4c/rerun_sdk-0.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:b1ed6fa51baa7c9608bc9e4d0f11aa332d50a58bb5c2787ed57e1ffaf1011cbe", size = 106147976, upload-time = "2026-01-08T13:48:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/ad/d1/6b31d12e726732dced50806b1cb0b5fb55c478ee4ac23d68f50db888cf2c/rerun_sdk-0.29.2-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:ead2b4bb93cac553c9b524442e49ba5f34c30ab9db2225e1ed2ce2ee235ea46b", size = 112371441, upload-time = "2026-02-12T19:31:07.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/81/9b3619b37c8a7492ccbe9ea172dedc5ffb66b83ded82b8f443c1958fe1c0/rerun_sdk-0.29.2-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a97f5601cb50c14ec665525c0cf65056167de1306958a0526ff1e8d384320076", size = 120304992, upload-time = "2026-02-12T19:31:12.499Z" }, + { url = "https://files.pythonhosted.org/packages/63/43/2590293ce7985cbb88f9fdd67b90c36b954116f6c75639b378f098b3ff61/rerun_sdk-0.29.2-cp310-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:392a7f2c3db660716b660f4b164f9b73a076100378781a3a2551edf290d00c23", size = 125305451, upload-time = "2026-02-12T19:31:17.319Z" }, + { url = "https://files.pythonhosted.org/packages/bc/06/b73e04344f2220d48c0583270a54873bca3b93ab476cf09629941afac8e5/rerun_sdk-0.29.2-cp310-abi3-win_amd64.whl", hash = "sha256:a3ccfbac8df89519a075f9dc3499a9e715c653a19a17de00d39fd218a589e009", size = 108289765, upload-time = "2026-02-12T19:31:22.616Z" }, ] [[package]] @@ -7526,15 +8545,42 @@ wheels = [ [[package]] name = "rich" -version = "14.2.0" +version = "14.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/99/a4cab2acbb884f80e558b0771e97e21e939c5dfb460f488d19df485e8298/rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8", size = 230143, upload-time = "2026-02-01T16:20:47.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" }, +] + +[[package]] +name = "rich-click" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "rich" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/27/091e140ea834272188e63f8dd6faac1f5c687582b687197b3e0ec3c78ebf/rich_click-1.9.7.tar.gz", hash = "sha256:022997c1e30731995bdbc8ec2f82819340d42543237f033a003c7b1f843fc5dc", size = 74838, upload-time = "2026-01-31T04:29:27.707Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/e5/d708d262b600a352abe01c2ae360d8ff75b0af819b78e9af293191d928e6/rich_click-1.9.7-py3-none-any.whl", hash = "sha256:2f99120fca78f536e07b114d3b60333bc4bb2a0969053b1250869bcdc1b5351b", size = 71491, upload-time = "2026-01-31T04:29:26.777Z" }, +] + +[[package]] +name = "rope" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytoolconfig", extra = ["global"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/3a/85e60d154f26ecdc1d47a63ac58bd9f32a5a9f3f771f6672197f02a00ade/rope-1.14.0.tar.gz", hash = "sha256:8803e3b667315044f6270b0c69a10c0679f9f322ed8efe6245a93ceb7658da69", size = 296801, upload-time = "2025-07-12T17:46:07.786Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, + { url = "https://files.pythonhosted.org/packages/75/35/130469d1901da2b3a5a377539b4ffcd8a5c983f1c9e3ba5ffdd8d71ae314/rope-1.14.0-py3-none-any.whl", hash = "sha256:00a7ea8c0c376fc0b053b2f2f8ef3bfb8b50fecf1ebf3eb80e4f8bd7f1941918", size = 207143, upload-time = "2025-07-12T17:46:05.928Z" }, ] [[package]] @@ -7659,34 +8705,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" }, ] -[[package]] -name = "rsa" -version = "4.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyasn1" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, -] - -[[package]] -name = "rtree" -version = "1.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/09/7302695875a019514de9a5dd17b8320e7a19d6e7bc8f85dcfb79a4ce2da3/rtree-1.4.1.tar.gz", hash = "sha256:c6b1b3550881e57ebe530cc6cffefc87cd9bf49c30b37b894065a9f810875e46", size = 52425, upload-time = "2025-08-13T19:32:01.413Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/d9/108cd989a4c0954e60b3cdc86fd2826407702b5375f6dfdab2802e5fed98/rtree-1.4.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:d672184298527522d4914d8ae53bf76982b86ca420b0acde9298a7a87d81d4a4", size = 468484, upload-time = "2025-08-13T19:31:50.593Z" }, - { url = "https://files.pythonhosted.org/packages/f3/cf/2710b6fd6b07ea0aef317b29f335790ba6adf06a28ac236078ed9bd8a91d/rtree-1.4.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a7e48d805e12011c2cf739a29d6a60ae852fb1de9fc84220bbcef67e6e595d7d", size = 436325, upload-time = "2025-08-13T19:31:52.367Z" }, - { url = "https://files.pythonhosted.org/packages/55/e1/4d075268a46e68db3cac51846eb6a3ab96ed481c585c5a1ad411b3c23aad/rtree-1.4.1-py3-none-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:efa8c4496e31e9ad58ff6c7df89abceac7022d906cb64a3e18e4fceae6b77f65", size = 459789, upload-time = "2025-08-13T19:31:53.926Z" }, - { url = "https://files.pythonhosted.org/packages/d1/75/e5d44be90525cd28503e7f836d077ae6663ec0687a13ba7810b4114b3668/rtree-1.4.1-py3-none-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12de4578f1b3381a93a655846900be4e3d5f4cd5e306b8b00aa77c1121dc7e8c", size = 507644, upload-time = "2025-08-13T19:31:55.164Z" }, - { url = "https://files.pythonhosted.org/packages/fd/85/b8684f769a142163b52859a38a486493b05bafb4f2fb71d4f945de28ebf9/rtree-1.4.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b558edda52eca3e6d1ee629042192c65e6b7f2c150d6d6cd207ce82f85be3967", size = 1454478, upload-time = "2025-08-13T19:31:56.808Z" }, - { url = "https://files.pythonhosted.org/packages/e9/a4/c2292b95246b9165cc43a0c3757e80995d58bc9b43da5cb47ad6e3535213/rtree-1.4.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f155bc8d6bac9dcd383481dee8c130947a4866db1d16cb6dff442329a038a0dc", size = 1555140, upload-time = "2025-08-13T19:31:58.031Z" }, - { url = "https://files.pythonhosted.org/packages/74/25/5282c8270bfcd620d3e73beb35b40ac4ab00f0a898d98ebeb41ef0989ec8/rtree-1.4.1-py3-none-win_amd64.whl", hash = "sha256:efe125f416fd27150197ab8521158662943a40f87acab8028a1aac4ad667a489", size = 389358, upload-time = "2025-08-13T19:31:59.247Z" }, - { url = "https://files.pythonhosted.org/packages/3f/50/0a9e7e7afe7339bd5e36911f0ceb15fed51945836ed803ae5afd661057fd/rtree-1.4.1-py3-none-win_arm64.whl", hash = "sha256:3d46f55729b28138e897ffef32f7ce93ac335cb67f9120125ad3742a220800f0", size = 355253, upload-time = "2025-08-13T19:32:00.296Z" }, -] - [[package]] name = "ruff" version = "0.14.3" @@ -7794,12 +8812,16 @@ name = "scikit-learn" version = "1.8.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -7810,7 +8832,7 @@ resolution-markers = [ dependencies = [ { name = "joblib", marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "threadpoolctl", marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" } @@ -7917,15 +8939,19 @@ wheels = [ [[package]] name = "scipy" -version = "1.16.3" +version = "1.17.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -7936,97 +8962,99 @@ resolution-markers = [ dependencies = [ { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/5f/6f37d7439de1455ce9c5a556b8d1db0979f03a796c030bafdf08d35b7bf9/scipy-1.16.3-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:40be6cf99e68b6c4321e9f8782e7d5ff8265af28ef2cd56e9c9b2638fa08ad97", size = 36630881, upload-time = "2025-10-28T17:31:47.104Z" }, - { url = "https://files.pythonhosted.org/packages/7c/89/d70e9f628749b7e4db2aa4cd89735502ff3f08f7b9b27d2e799485987cd9/scipy-1.16.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:8be1ca9170fcb6223cc7c27f4305d680ded114a1567c0bd2bfcbf947d1b17511", size = 28941012, upload-time = "2025-10-28T17:31:53.411Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a8/0e7a9a6872a923505dbdf6bb93451edcac120363131c19013044a1e7cb0c/scipy-1.16.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bea0a62734d20d67608660f69dcda23e7f90fb4ca20974ab80b6ed40df87a005", size = 20931935, upload-time = "2025-10-28T17:31:57.361Z" }, - { url = "https://files.pythonhosted.org/packages/bd/c7/020fb72bd79ad798e4dbe53938543ecb96b3a9ac3fe274b7189e23e27353/scipy-1.16.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:2a207a6ce9c24f1951241f4693ede2d393f59c07abc159b2cb2be980820e01fb", size = 23534466, upload-time = "2025-10-28T17:32:01.875Z" }, - { url = "https://files.pythonhosted.org/packages/be/a0/668c4609ce6dbf2f948e167836ccaf897f95fb63fa231c87da7558a374cd/scipy-1.16.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:532fb5ad6a87e9e9cd9c959b106b73145a03f04c7d57ea3e6f6bb60b86ab0876", size = 33593618, upload-time = "2025-10-28T17:32:06.902Z" }, - { url = "https://files.pythonhosted.org/packages/ca/6e/8942461cf2636cdae083e3eb72622a7fbbfa5cf559c7d13ab250a5dbdc01/scipy-1.16.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0151a0749efeaaab78711c78422d413c583b8cdd2011a3c1d6c794938ee9fdb2", size = 35899798, upload-time = "2025-10-28T17:32:12.665Z" }, - { url = "https://files.pythonhosted.org/packages/79/e8/d0f33590364cdbd67f28ce79368b373889faa4ee959588beddf6daef9abe/scipy-1.16.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7180967113560cca57418a7bc719e30366b47959dd845a93206fbed693c867e", size = 36226154, upload-time = "2025-10-28T17:32:17.961Z" }, - { url = "https://files.pythonhosted.org/packages/39/c1/1903de608c0c924a1749c590064e65810f8046e437aba6be365abc4f7557/scipy-1.16.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:deb3841c925eeddb6afc1e4e4a45e418d19ec7b87c5df177695224078e8ec733", size = 38878540, upload-time = "2025-10-28T17:32:23.907Z" }, - { url = "https://files.pythonhosted.org/packages/f1/d0/22ec7036ba0b0a35bccb7f25ab407382ed34af0b111475eb301c16f8a2e5/scipy-1.16.3-cp311-cp311-win_amd64.whl", hash = "sha256:53c3844d527213631e886621df5695d35e4f6a75f620dca412bcd292f6b87d78", size = 38722107, upload-time = "2025-10-28T17:32:29.921Z" }, - { url = "https://files.pythonhosted.org/packages/7b/60/8a00e5a524bb3bf8898db1650d350f50e6cffb9d7a491c561dc9826c7515/scipy-1.16.3-cp311-cp311-win_arm64.whl", hash = "sha256:9452781bd879b14b6f055b26643703551320aa8d79ae064a71df55c00286a184", size = 25506272, upload-time = "2025-10-28T17:32:34.577Z" }, - { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043, upload-time = "2025-10-28T17:32:40.285Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986, upload-time = "2025-10-28T17:32:45.325Z" }, - { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814, upload-time = "2025-10-28T17:32:49.277Z" }, - { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795, upload-time = "2025-10-28T17:32:53.337Z" }, - { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476, upload-time = "2025-10-28T17:32:58.353Z" }, - { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692, upload-time = "2025-10-28T17:33:03.88Z" }, - { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345, upload-time = "2025-10-28T17:33:09.773Z" }, - { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975, upload-time = "2025-10-28T17:33:15.809Z" }, - { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926, upload-time = "2025-10-28T17:33:21.388Z" }, - { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014, upload-time = "2025-10-28T17:33:25.975Z" }, - { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856, upload-time = "2025-10-28T17:33:31.375Z" }, - { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306, upload-time = "2025-10-28T17:33:36.516Z" }, - { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371, upload-time = "2025-10-28T17:33:42.094Z" }, - { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877, upload-time = "2025-10-28T17:33:48.483Z" }, - { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103, upload-time = "2025-10-28T17:33:56.495Z" }, - { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297, upload-time = "2025-10-28T17:34:04.722Z" }, - { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756, upload-time = "2025-10-28T17:34:13.482Z" }, - { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566, upload-time = "2025-10-28T17:34:22.384Z" }, - { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877, upload-time = "2025-10-28T17:35:51.076Z" }, - { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366, upload-time = "2025-10-28T17:35:59.014Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931, upload-time = "2025-10-28T17:34:31.451Z" }, - { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081, upload-time = "2025-10-28T17:34:39.087Z" }, - { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244, upload-time = "2025-10-28T17:34:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753, upload-time = "2025-10-28T17:34:51.793Z" }, - { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912, upload-time = "2025-10-28T17:34:59.8Z" }, - { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371, upload-time = "2025-10-28T17:35:08.173Z" }, - { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477, upload-time = "2025-10-28T17:35:16.7Z" }, - { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678, upload-time = "2025-10-28T17:35:26.354Z" }, - { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178, upload-time = "2025-10-28T17:35:35.304Z" }, - { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246, upload-time = "2025-10-28T17:35:42.155Z" }, - { url = "https://files.pythonhosted.org/packages/99/f6/99b10fd70f2d864c1e29a28bbcaa0c6340f9d8518396542d9ea3b4aaae15/scipy-1.16.3-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:875555ce62743e1d54f06cdf22c1e0bc47b91130ac40fe5d783b6dfa114beeb6", size = 36606469, upload-time = "2025-10-28T17:36:08.741Z" }, - { url = "https://files.pythonhosted.org/packages/4d/74/043b54f2319f48ea940dd025779fa28ee360e6b95acb7cd188fad4391c6b/scipy-1.16.3-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:bb61878c18a470021fb515a843dc7a76961a8daceaaaa8bad1332f1bf4b54657", size = 28872043, upload-time = "2025-10-28T17:36:16.599Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e1/24b7e50cc1c4ee6ffbcb1f27fe9f4c8b40e7911675f6d2d20955f41c6348/scipy-1.16.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f2622206f5559784fa5c4b53a950c3c7c1cf3e84ca1b9c4b6c03f062f289ca26", size = 20862952, upload-time = "2025-10-28T17:36:22.966Z" }, - { url = "https://files.pythonhosted.org/packages/dd/3a/3e8c01a4d742b730df368e063787c6808597ccb38636ed821d10b39ca51b/scipy-1.16.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7f68154688c515cdb541a31ef8eb66d8cd1050605be9dcd74199cbd22ac739bc", size = 23508512, upload-time = "2025-10-28T17:36:29.731Z" }, - { url = "https://files.pythonhosted.org/packages/1f/60/c45a12b98ad591536bfe5330cb3cfe1850d7570259303563b1721564d458/scipy-1.16.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3c820ddb80029fe9f43d61b81d8b488d3ef8ca010d15122b152db77dc94c22", size = 33413639, upload-time = "2025-10-28T17:36:37.982Z" }, - { url = "https://files.pythonhosted.org/packages/71/bc/35957d88645476307e4839712642896689df442f3e53b0fa016ecf8a3357/scipy-1.16.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3837938ae715fc0fe3c39c0202de3a8853aff22ca66781ddc2ade7554b7e2cc", size = 35704729, upload-time = "2025-10-28T17:36:46.547Z" }, - { url = "https://files.pythonhosted.org/packages/3b/15/89105e659041b1ca11c386e9995aefacd513a78493656e57789f9d9eab61/scipy-1.16.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aadd23f98f9cb069b3bd64ddc900c4d277778242e961751f77a8cb5c4b946fb0", size = 36086251, upload-time = "2025-10-28T17:36:55.161Z" }, - { url = "https://files.pythonhosted.org/packages/1a/87/c0ea673ac9c6cc50b3da2196d860273bc7389aa69b64efa8493bdd25b093/scipy-1.16.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b7c5f1bda1354d6a19bc6af73a649f8285ca63ac6b52e64e658a5a11d4d69800", size = 38716681, upload-time = "2025-10-28T17:37:04.1Z" }, - { url = "https://files.pythonhosted.org/packages/91/06/837893227b043fb9b0d13e4bd7586982d8136cb249ffb3492930dab905b8/scipy-1.16.3-cp314-cp314-win_amd64.whl", hash = "sha256:e5d42a9472e7579e473879a1990327830493a7047506d58d73fc429b84c1d49d", size = 39358423, upload-time = "2025-10-28T17:38:20.005Z" }, - { url = "https://files.pythonhosted.org/packages/95/03/28bce0355e4d34a7c034727505a02d19548549e190bedd13a721e35380b7/scipy-1.16.3-cp314-cp314-win_arm64.whl", hash = "sha256:6020470b9d00245926f2d5bb93b119ca0340f0d564eb6fbaad843eaebf9d690f", size = 26135027, upload-time = "2025-10-28T17:38:24.966Z" }, - { url = "https://files.pythonhosted.org/packages/b2/6f/69f1e2b682efe9de8fe9f91040f0cd32f13cfccba690512ba4c582b0bc29/scipy-1.16.3-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:e1d27cbcb4602680a49d787d90664fa4974063ac9d4134813332a8c53dbe667c", size = 37028379, upload-time = "2025-10-28T17:37:14.061Z" }, - { url = "https://files.pythonhosted.org/packages/7c/2d/e826f31624a5ebbab1cd93d30fd74349914753076ed0593e1d56a98c4fb4/scipy-1.16.3-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:9b9c9c07b6d56a35777a1b4cc8966118fb16cfd8daf6743867d17d36cfad2d40", size = 29400052, upload-time = "2025-10-28T17:37:21.709Z" }, - { url = "https://files.pythonhosted.org/packages/69/27/d24feb80155f41fd1f156bf144e7e049b4e2b9dd06261a242905e3bc7a03/scipy-1.16.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:3a4c460301fb2cffb7f88528f30b3127742cff583603aa7dc964a52c463b385d", size = 21391183, upload-time = "2025-10-28T17:37:29.559Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d3/1b229e433074c5738a24277eca520a2319aac7465eea7310ea6ae0e98ae2/scipy-1.16.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:f667a4542cc8917af1db06366d3f78a5c8e83badd56409f94d1eac8d8d9133fa", size = 23930174, upload-time = "2025-10-28T17:37:36.306Z" }, - { url = "https://files.pythonhosted.org/packages/16/9d/d9e148b0ec680c0f042581a2be79a28a7ab66c0c4946697f9e7553ead337/scipy-1.16.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f379b54b77a597aa7ee5e697df0d66903e41b9c85a6dd7946159e356319158e8", size = 33497852, upload-time = "2025-10-28T17:37:42.228Z" }, - { url = "https://files.pythonhosted.org/packages/2f/22/4e5f7561e4f98b7bea63cf3fd7934bff1e3182e9f1626b089a679914d5c8/scipy-1.16.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4aff59800a3b7f786b70bfd6ab551001cb553244988d7d6b8299cb1ea653b353", size = 35798595, upload-time = "2025-10-28T17:37:48.102Z" }, - { url = "https://files.pythonhosted.org/packages/83/42/6644d714c179429fc7196857866f219fef25238319b650bb32dde7bf7a48/scipy-1.16.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:da7763f55885045036fabcebd80144b757d3db06ab0861415d1c3b7c69042146", size = 36186269, upload-time = "2025-10-28T17:37:53.72Z" }, - { url = "https://files.pythonhosted.org/packages/ac/70/64b4d7ca92f9cf2e6fc6aaa2eecf80bb9b6b985043a9583f32f8177ea122/scipy-1.16.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ffa6eea95283b2b8079b821dc11f50a17d0571c92b43e2b5b12764dc5f9b285d", size = 38802779, upload-time = "2025-10-28T17:37:59.393Z" }, - { url = "https://files.pythonhosted.org/packages/61/82/8d0e39f62764cce5ffd5284131e109f07cf8955aef9ab8ed4e3aa5e30539/scipy-1.16.3-cp314-cp314t-win_amd64.whl", hash = "sha256:d9f48cafc7ce94cf9b15c6bffdc443a81a27bf7075cf2dcd5c8b40f85d10c4e7", size = 39471128, upload-time = "2025-10-28T17:38:05.259Z" }, - { url = "https://files.pythonhosted.org/packages/64/47/a494741db7280eae6dc033510c319e34d42dd41b7ac0c7ead39354d1a2b5/scipy-1.16.3-cp314-cp314t-win_arm64.whl", hash = "sha256:21d9d6b197227a12dcbf9633320a4e34c6b0e51c57268df255a0942983bac562", size = 26464127, upload-time = "2025-10-28T17:38:11.34Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/4b/c89c131aa87cad2b77a54eb0fb94d633a842420fa7e919dc2f922037c3d8/scipy-1.17.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:2abd71643797bd8a106dff97894ff7869eeeb0af0f7a5ce02e4227c6a2e9d6fd", size = 31381316, upload-time = "2026-01-10T21:24:33.42Z" }, + { url = "https://files.pythonhosted.org/packages/5e/5f/a6b38f79a07d74989224d5f11b55267714707582908a5f1ae854cf9a9b84/scipy-1.17.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ef28d815f4d2686503e5f4f00edc387ae58dfd7a2f42e348bb53359538f01558", size = 27966760, upload-time = "2026-01-10T21:24:38.911Z" }, + { url = "https://files.pythonhosted.org/packages/c1/20/095ad24e031ee8ed3c5975954d816b8e7e2abd731e04f8be573de8740885/scipy-1.17.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:272a9f16d6bb4667e8b50d25d71eddcc2158a214df1b566319298de0939d2ab7", size = 20138701, upload-time = "2026-01-10T21:24:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/89/11/4aad2b3858d0337756f3323f8960755704e530b27eb2a94386c970c32cbe/scipy-1.17.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:7204fddcbec2fe6598f1c5fdf027e9f259106d05202a959a9f1aecf036adc9f6", size = 22480574, upload-time = "2026-01-10T21:24:47.266Z" }, + { url = "https://files.pythonhosted.org/packages/85/bd/f5af70c28c6da2227e510875cadf64879855193a687fb19951f0f44cfd6b/scipy-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc02c37a5639ee67d8fb646ffded6d793c06c5622d36b35cfa8fe5ececb8f042", size = 32862414, upload-time = "2026-01-10T21:24:52.566Z" }, + { url = "https://files.pythonhosted.org/packages/ef/df/df1457c4df3826e908879fe3d76bc5b6e60aae45f4ee42539512438cfd5d/scipy-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dac97a27520d66c12a34fd90a4fe65f43766c18c0d6e1c0a80f114d2260080e4", size = 35112380, upload-time = "2026-01-10T21:24:58.433Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bb/88e2c16bd1dd4de19d80d7c5e238387182993c2fb13b4b8111e3927ad422/scipy-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb7446a39b3ae0fe8f416a9a3fdc6fba3f11c634f680f16a239c5187bc487c0", size = 34922676, upload-time = "2026-01-10T21:25:04.287Z" }, + { url = "https://files.pythonhosted.org/packages/02/ba/5120242cc735f71fc002cff0303d536af4405eb265f7c60742851e7ccfe9/scipy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:474da16199f6af66601a01546144922ce402cb17362e07d82f5a6cf8f963e449", size = 37507599, upload-time = "2026-01-10T21:25:09.851Z" }, + { url = "https://files.pythonhosted.org/packages/52/c8/08629657ac6c0da198487ce8cd3de78e02cfde42b7f34117d56a3fe249dc/scipy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:255c0da161bd7b32a6c898e7891509e8a9289f0b1c6c7d96142ee0d2b114c2ea", size = 36380284, upload-time = "2026-01-10T21:25:15.632Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4a/465f96d42c6f33ad324a40049dfd63269891db9324aa66c4a1c108c6f994/scipy-1.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:85b0ac3ad17fa3be50abd7e69d583d98792d7edc08367e01445a1e2076005379", size = 24370427, upload-time = "2026-01-10T21:25:20.514Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/7241a63e73ba5a516f1930ac8d5b44cbbfabd35ac73a2d08ca206df007c4/scipy-1.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57", size = 31364580, upload-time = "2026-01-10T21:25:25.717Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1d/5057f812d4f6adc91a20a2d6f2ebcdb517fdbc87ae3acc5633c9b97c8ba5/scipy-1.17.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e", size = 27969012, upload-time = "2026-01-10T21:25:30.921Z" }, + { url = "https://files.pythonhosted.org/packages/e3/21/f6ec556c1e3b6ec4e088da667d9987bb77cc3ab3026511f427dc8451187d/scipy-1.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8", size = 20140691, upload-time = "2026-01-10T21:25:34.802Z" }, + { url = "https://files.pythonhosted.org/packages/7a/fe/5e5ad04784964ba964a96f16c8d4676aa1b51357199014dce58ab7ec5670/scipy-1.17.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306", size = 22463015, upload-time = "2026-01-10T21:25:39.277Z" }, + { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, + { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, + { url = "https://files.pythonhosted.org/packages/81/61/0470810c8a093cdacd4ba7504b8a218fd49ca070d79eca23a615f5d9a0b0/scipy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e", size = 37405953, upload-time = "2026-01-10T21:26:07.75Z" }, + { url = "https://files.pythonhosted.org/packages/92/ce/672ed546f96d5d41ae78c4b9b02006cedd0b3d6f2bf5bb76ea455c320c28/scipy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8", size = 36328121, upload-time = "2026-01-10T21:26:16.509Z" }, + { url = "https://files.pythonhosted.org/packages/9d/21/38165845392cae67b61843a52c6455d47d0cc2a40dd495c89f4362944654/scipy-1.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b", size = 24314368, upload-time = "2026-01-10T21:26:23.087Z" }, + { url = "https://files.pythonhosted.org/packages/0c/51/3468fdfd49387ddefee1636f5cf6d03ce603b75205bf439bbf0e62069bfd/scipy-1.17.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:65ec32f3d32dfc48c72df4291345dae4f048749bc8d5203ee0a3f347f96c5ce6", size = 31344101, upload-time = "2026-01-10T21:26:30.25Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9a/9406aec58268d437636069419e6977af953d1e246df941d42d3720b7277b/scipy-1.17.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:1f9586a58039d7229ce77b52f8472c972448cded5736eaf102d5658bbac4c269", size = 27950385, upload-time = "2026-01-10T21:26:36.801Z" }, + { url = "https://files.pythonhosted.org/packages/4f/98/e7342709e17afdfd1b26b56ae499ef4939b45a23a00e471dfb5375eea205/scipy-1.17.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9fad7d3578c877d606b1150135c2639e9de9cecd3705caa37b66862977cc3e72", size = 20122115, upload-time = "2026-01-10T21:26:42.107Z" }, + { url = "https://files.pythonhosted.org/packages/fd/0e/9eeeb5357a64fd157cbe0302c213517c541cc16b8486d82de251f3c68ede/scipy-1.17.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:423ca1f6584fc03936972b5f7c06961670dbba9f234e71676a7c7ccf938a0d61", size = 22442402, upload-time = "2026-01-10T21:26:48.029Z" }, + { url = "https://files.pythonhosted.org/packages/c9/10/be13397a0e434f98e0c79552b2b584ae5bb1c8b2be95db421533bbca5369/scipy-1.17.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe508b5690e9eaaa9467fc047f833af58f1152ae51a0d0aed67aa5801f4dd7d6", size = 32696338, upload-time = "2026-01-10T21:26:55.521Z" }, + { url = "https://files.pythonhosted.org/packages/63/1e/12fbf2a3bb240161651c94bb5cdd0eae5d4e8cc6eaeceb74ab07b12a753d/scipy-1.17.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6680f2dfd4f6182e7d6db161344537da644d1cf85cf293f015c60a17ecf08752", size = 34977201, upload-time = "2026-01-10T21:27:03.501Z" }, + { url = "https://files.pythonhosted.org/packages/19/5b/1a63923e23ccd20bd32156d7dd708af5bbde410daa993aa2500c847ab2d2/scipy-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eec3842ec9ac9de5917899b277428886042a93db0b227ebbe3a333b64ec7643d", size = 34777384, upload-time = "2026-01-10T21:27:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/39/22/b5da95d74edcf81e540e467202a988c50fef41bd2011f46e05f72ba07df6/scipy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d7425fcafbc09a03731e1bc05581f5fad988e48c6a861f441b7ab729a49a55ea", size = 37379586, upload-time = "2026-01-10T21:27:20.171Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b6/8ac583d6da79e7b9e520579f03007cb006f063642afd6b2eeb16b890bf93/scipy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:87b411e42b425b84777718cc41516b8a7e0795abfa8e8e1d573bf0ef014f0812", size = 36287211, upload-time = "2026-01-10T21:28:43.122Z" }, + { url = "https://files.pythonhosted.org/packages/55/fb/7db19e0b3e52f882b420417644ec81dd57eeef1bd1705b6f689d8ff93541/scipy-1.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:357ca001c6e37601066092e7c89cca2f1ce74e2a520ca78d063a6d2201101df2", size = 24312646, upload-time = "2026-01-10T21:28:49.893Z" }, + { url = "https://files.pythonhosted.org/packages/20/b6/7feaa252c21cc7aff335c6c55e1b90ab3e3306da3f048109b8b639b94648/scipy-1.17.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:ec0827aa4d36cb79ff1b81de898e948a51ac0b9b1c43e4a372c0508c38c0f9a3", size = 31693194, upload-time = "2026-01-10T21:27:27.454Z" }, + { url = "https://files.pythonhosted.org/packages/76/bb/bbb392005abce039fb7e672cb78ac7d158700e826b0515cab6b5b60c26fb/scipy-1.17.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:819fc26862b4b3c73a60d486dbb919202f3d6d98c87cf20c223511429f2d1a97", size = 28365415, upload-time = "2026-01-10T21:27:34.26Z" }, + { url = "https://files.pythonhosted.org/packages/37/da/9d33196ecc99fba16a409c691ed464a3a283ac454a34a13a3a57c0d66f3a/scipy-1.17.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:363ad4ae2853d88ebcde3ae6ec46ccca903ea9835ee8ba543f12f575e7b07e4e", size = 20537232, upload-time = "2026-01-10T21:27:40.306Z" }, + { url = "https://files.pythonhosted.org/packages/56/9d/f4b184f6ddb28e9a5caea36a6f98e8ecd2a524f9127354087ce780885d83/scipy-1.17.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:979c3a0ff8e5ba254d45d59ebd38cde48fce4f10b5125c680c7a4bfe177aab07", size = 22791051, upload-time = "2026-01-10T21:27:46.539Z" }, + { url = "https://files.pythonhosted.org/packages/9b/9d/025cccdd738a72140efc582b1641d0dd4caf2e86c3fb127568dc80444e6e/scipy-1.17.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:130d12926ae34399d157de777472bf82e9061c60cc081372b3118edacafe1d00", size = 32815098, upload-time = "2026-01-10T21:27:54.389Z" }, + { url = "https://files.pythonhosted.org/packages/48/5f/09b879619f8bca15ce392bfc1894bd9c54377e01d1b3f2f3b595a1b4d945/scipy-1.17.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e886000eb4919eae3a44f035e63f0fd8b651234117e8f6f29bad1cd26e7bc45", size = 35031342, upload-time = "2026-01-10T21:28:03.012Z" }, + { url = "https://files.pythonhosted.org/packages/f2/9a/f0f0a9f0aa079d2f106555b984ff0fbb11a837df280f04f71f056ea9c6e4/scipy-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13c4096ac6bc31d706018f06a49abe0485f96499deb82066b94d19b02f664209", size = 34893199, upload-time = "2026-01-10T21:28:10.832Z" }, + { url = "https://files.pythonhosted.org/packages/90/b8/4f0f5cf0c5ea4d7548424e6533e6b17d164f34a6e2fb2e43ffebb6697b06/scipy-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cacbaddd91fcffde703934897c5cd2c7cb0371fac195d383f4e1f1c5d3f3bd04", size = 37438061, upload-time = "2026-01-10T21:28:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/f9/cc/2bd59140ed3b2fa2882fb15da0a9cb1b5a6443d67cfd0d98d4cec83a57ec/scipy-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:edce1a1cf66298cccdc48a1bdf8fb10a3bf58e8b58d6c3883dd1530e103f87c0", size = 36328593, upload-time = "2026-01-10T21:28:28.007Z" }, + { url = "https://files.pythonhosted.org/packages/13/1b/c87cc44a0d2c7aaf0f003aef2904c3d097b422a96c7e7c07f5efd9073c1b/scipy-1.17.0-cp313-cp313t-win_arm64.whl", hash = "sha256:30509da9dbec1c2ed8f168b8d8aa853bc6723fede1dbc23c7d43a56f5ab72a67", size = 24625083, upload-time = "2026-01-10T21:28:35.188Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2d/51006cd369b8e7879e1c630999a19d1fbf6f8b5ed3e33374f29dc87e53b3/scipy-1.17.0-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:c17514d11b78be8f7e6331b983a65a7f5ca1fd037b95e27b280921fe5606286a", size = 31346803, upload-time = "2026-01-10T21:28:57.24Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2e/2349458c3ce445f53a6c93d4386b1c4c5c0c540917304c01222ff95ff317/scipy-1.17.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:4e00562e519c09da34c31685f6acc3aa384d4d50604db0f245c14e1b4488bfa2", size = 27967182, upload-time = "2026-01-10T21:29:04.107Z" }, + { url = "https://files.pythonhosted.org/packages/5e/7c/df525fbfa77b878d1cfe625249529514dc02f4fd5f45f0f6295676a76528/scipy-1.17.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f7df7941d71314e60a481e02d5ebcb3f0185b8d799c70d03d8258f6c80f3d467", size = 20139125, upload-time = "2026-01-10T21:29:10.179Z" }, + { url = "https://files.pythonhosted.org/packages/33/11/fcf9d43a7ed1234d31765ec643b0515a85a30b58eddccc5d5a4d12b5f194/scipy-1.17.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:aabf057c632798832f071a8dde013c2e26284043934f53b00489f1773b33527e", size = 22443554, upload-time = "2026-01-10T21:29:15.888Z" }, + { url = "https://files.pythonhosted.org/packages/80/5c/ea5d239cda2dd3d31399424967a24d556cf409fbea7b5b21412b0fd0a44f/scipy-1.17.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a38c3337e00be6fd8a95b4ed66b5d988bac4ec888fd922c2ea9fe5fb1603dd67", size = 32757834, upload-time = "2026-01-10T21:29:23.406Z" }, + { url = "https://files.pythonhosted.org/packages/b8/7e/8c917cc573310e5dc91cbeead76f1b600d3fb17cf0969db02c9cf92e3cfa/scipy-1.17.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00fb5f8ec8398ad90215008d8b6009c9db9fa924fd4c7d6be307c6f945f9cd73", size = 34995775, upload-time = "2026-01-10T21:29:31.915Z" }, + { url = "https://files.pythonhosted.org/packages/c5/43/176c0c3c07b3f7df324e7cdd933d3e2c4898ca202b090bd5ba122f9fe270/scipy-1.17.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f2a4942b0f5f7c23c7cd641a0ca1955e2ae83dedcff537e3a0259096635e186b", size = 34841240, upload-time = "2026-01-10T21:29:39.995Z" }, + { url = "https://files.pythonhosted.org/packages/44/8c/d1f5f4b491160592e7f084d997de53a8e896a3ac01cd07e59f43ca222744/scipy-1.17.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:dbf133ced83889583156566d2bdf7a07ff89228fe0c0cb727f777de92092ec6b", size = 37394463, upload-time = "2026-01-10T21:29:48.723Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ec/42a6657f8d2d087e750e9a5dde0b481fd135657f09eaf1cf5688bb23c338/scipy-1.17.0-cp314-cp314-win_amd64.whl", hash = "sha256:3625c631a7acd7cfd929e4e31d2582cf00f42fcf06011f59281271746d77e061", size = 37053015, upload-time = "2026-01-10T21:30:51.418Z" }, + { url = "https://files.pythonhosted.org/packages/27/58/6b89a6afd132787d89a362d443a7bddd511b8f41336a1ae47f9e4f000dc4/scipy-1.17.0-cp314-cp314-win_arm64.whl", hash = "sha256:9244608d27eafe02b20558523ba57f15c689357c85bdcfe920b1828750aa26eb", size = 24951312, upload-time = "2026-01-10T21:30:56.771Z" }, + { url = "https://files.pythonhosted.org/packages/e9/01/f58916b9d9ae0112b86d7c3b10b9e685625ce6e8248df139d0fcb17f7397/scipy-1.17.0-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:2b531f57e09c946f56ad0b4a3b2abee778789097871fc541e267d2eca081cff1", size = 31706502, upload-time = "2026-01-10T21:29:56.326Z" }, + { url = "https://files.pythonhosted.org/packages/59/8e/2912a87f94a7d1f8b38aabc0faf74b82d3b6c9e22be991c49979f0eceed8/scipy-1.17.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:13e861634a2c480bd237deb69333ac79ea1941b94568d4b0efa5db5e263d4fd1", size = 28380854, upload-time = "2026-01-10T21:30:01.554Z" }, + { url = "https://files.pythonhosted.org/packages/bd/1c/874137a52dddab7d5d595c1887089a2125d27d0601fce8c0026a24a92a0b/scipy-1.17.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:eb2651271135154aa24f6481cbae5cc8af1f0dd46e6533fb7b56aa9727b6a232", size = 20552752, upload-time = "2026-01-10T21:30:05.93Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f0/7518d171cb735f6400f4576cf70f756d5b419a07fe1867da34e2c2c9c11b/scipy-1.17.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:c5e8647f60679790c2f5c76be17e2e9247dc6b98ad0d3b065861e082c56e078d", size = 22803972, upload-time = "2026-01-10T21:30:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/7c/74/3498563a2c619e8a3ebb4d75457486c249b19b5b04a30600dfd9af06bea5/scipy-1.17.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5fb10d17e649e1446410895639f3385fd2bf4c3c7dfc9bea937bddcbc3d7b9ba", size = 32829770, upload-time = "2026-01-10T21:30:16.359Z" }, + { url = "https://files.pythonhosted.org/packages/48/d1/7b50cedd8c6c9d6f706b4b36fa8544d829c712a75e370f763b318e9638c1/scipy-1.17.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8547e7c57f932e7354a2319fab613981cde910631979f74c9b542bb167a8b9db", size = 35051093, upload-time = "2026-01-10T21:30:22.987Z" }, + { url = "https://files.pythonhosted.org/packages/e2/82/a2d684dfddb87ba1b3ea325df7c3293496ee9accb3a19abe9429bce94755/scipy-1.17.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33af70d040e8af9d5e7a38b5ed3b772adddd281e3062ff23fec49e49681c38cf", size = 34909905, upload-time = "2026-01-10T21:30:28.704Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5e/e565bd73991d42023eb82bb99e51c5b3d9e2c588ca9d4b3e2cc1d3ca62a6/scipy-1.17.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb55bb97d00f8b7ab95cb64f873eb0bf54d9446264d9f3609130381233483f", size = 37457743, upload-time = "2026-01-10T21:30:34.819Z" }, + { url = "https://files.pythonhosted.org/packages/58/a8/a66a75c3d8f1fb2b83f66007d6455a06a6f6cf5618c3dc35bc9b69dd096e/scipy-1.17.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1ff269abf702f6c7e67a4b7aad981d42871a11b9dd83c58d2d2ea624efbd1088", size = 37098574, upload-time = "2026-01-10T21:30:40.782Z" }, + { url = "https://files.pythonhosted.org/packages/56/a5/df8f46ef7da168f1bc52cd86e09a9de5c6f19cc1da04454d51b7d4f43408/scipy-1.17.0-cp314-cp314t-win_arm64.whl", hash = "sha256:031121914e295d9791319a1875444d55079885bbae5bdc9c5e0f2ee5f09d34ff", size = 25246266, upload-time = "2026-01-10T21:30:45.923Z" }, ] [[package]] name = "sentence-transformers" -version = "5.2.0" +version = "5.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "scikit-learn", version = "1.7.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "scikit-learn", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "torch" }, { name = "tqdm" }, { name = "transformers" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/a1/64e7b111e753307ffb7c5b6d039c52d4a91a47fa32a7f5bc377a49b22402/sentence_transformers-5.2.0.tar.gz", hash = "sha256:acaeb38717de689f3dab45d5e5a02ebe2f75960a4764ea35fea65f58a4d3019f", size = 381004, upload-time = "2025-12-11T14:12:31.038Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/bc/0bc9c0ec1cf83ab2ec6e6f38667d167349b950fff6dd2086b79bd360eeca/sentence_transformers-5.2.2.tar.gz", hash = "sha256:7033ee0a24bc04c664fd490abf2ef194d387b3a58a97adcc528783ff505159fa", size = 381607, upload-time = "2026-01-27T11:11:02.658Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/d0/3b2897ef6a0c0c801e9fecca26bcc77081648e38e8c772885ebdd8d7d252/sentence_transformers-5.2.0-py3-none-any.whl", hash = "sha256:aa57180f053687d29b08206766ae7db549be5074f61849def7b17bf0b8025ca2", size = 493748, upload-time = "2025-12-11T14:12:29.516Z" }, + { url = "https://files.pythonhosted.org/packages/cc/21/7e925890636791386e81b52878134f114d63072e79fffe14cdcc5e7a5e6a/sentence_transformers-5.2.2-py3-none-any.whl", hash = "sha256:280ac54bffb84c110726b4d8848ba7b7c60813b9034547f8aea6e9a345cd1c23", size = 494106, upload-time = "2026-01-27T11:11:00.983Z" }, ] [[package]] name = "setuptools" -version = "80.9.0" +version = "81.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/1c/73e719955c59b8e424d015ab450f51c0af856ae46ea2da83eba51cc88de1/setuptools-81.0.0.tar.gz", hash = "sha256:487b53915f52501f0a79ccfd0c02c165ffe06631443a886740b91af4b7a5845a", size = 1198299, upload-time = "2026-02-06T21:10:39.601Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e3/c164c88b2e5ce7b24d667b9bd83589cf4f3520d97cad01534cd3c4f55fdb/setuptools-81.0.0-py3-none-any.whl", hash = "sha256:fdd925d5c5d9f62e4b74b30d6dd7828ce236fd6ed998a08d81de62ce5a6310d6", size = 1062021, upload-time = "2026-02-06T21:10:37.175Z" }, ] [[package]] @@ -8129,6 +9157,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + [[package]] name = "sortedcontainers" version = "2.4.0" @@ -8140,17 +9177,18 @@ wheels = [ [[package]] name = "sounddevice" -version = "0.5.3" +version = "0.5.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4e/4f/28e734898b870db15b6474453f19813d3c81b91c806d9e6f867bd6e4dd03/sounddevice-0.5.3.tar.gz", hash = "sha256:cbac2b60198fbab84533697e7c4904cc895ec69d5fb3973556c9eb74a4629b2c", size = 53465, upload-time = "2025-10-19T13:23:57.922Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/f9/2592608737553638fca98e21e54bfec40bf577bb98a61b2770c912aab25e/sounddevice-0.5.5.tar.gz", hash = "sha256:22487b65198cb5bf2208755105b524f78ad173e5ab6b445bdab1c989f6698df3", size = 143191, upload-time = "2026-01-23T18:36:43.529Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/e7/9020e9f0f3df00432728f4c4044387468a743e3d9a4f91123d77be10010e/sounddevice-0.5.3-py3-none-any.whl", hash = "sha256:ea7738baa0a9f9fef7390f649e41c9f2c8ada776180e56c2ffd217133c92a806", size = 32670, upload-time = "2025-10-19T13:23:51.779Z" }, - { url = "https://files.pythonhosted.org/packages/2f/39/714118f8413e0e353436914f2b976665161f1be2b6483ac15a8f61484c14/sounddevice-0.5.3-py3-none-macosx_10_6_x86_64.macosx_10_6_universal2.whl", hash = "sha256:278dc4451fff70934a176df048b77d80d7ce1623a6ec9db8b34b806f3112f9c2", size = 108306, upload-time = "2025-10-19T13:23:53.277Z" }, - { url = "https://files.pythonhosted.org/packages/f5/74/52186e3e5c833d00273f7949a9383adff93692c6e02406bf359cb4d3e921/sounddevice-0.5.3-py3-none-win32.whl", hash = "sha256:845d6927bcf14e84be5292a61ab3359cf8e6b9145819ec6f3ac2619ff089a69c", size = 312882, upload-time = "2025-10-19T13:23:54.829Z" }, - { url = "https://files.pythonhosted.org/packages/66/c7/16123d054aef6d445176c9122bfbe73c11087589b2413cab22aff5a7839a/sounddevice-0.5.3-py3-none-win_amd64.whl", hash = "sha256:f55ad20082efc2bdec06928e974fbcae07bc6c405409ae1334cefe7d377eb687", size = 364025, upload-time = "2025-10-19T13:23:56.362Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0a/478e441fd049002cf308520c0d62dd8333e7c6cc8d997f0dda07b9fbcc46/sounddevice-0.5.5-py3-none-any.whl", hash = "sha256:30ff99f6c107f49d25ad16a45cacd8d91c25a1bcdd3e81a206b921a3a6405b1f", size = 32807, upload-time = "2026-01-23T18:36:35.649Z" }, + { url = "https://files.pythonhosted.org/packages/56/f9/c037c35f6d0b6bc3bc7bfb314f1d6f1f9a341328ef47cd63fc4f850a7b27/sounddevice-0.5.5-py3-none-macosx_10_6_x86_64.macosx_10_6_universal2.whl", hash = "sha256:05eb9fd6c54c38d67741441c19164c0dae8ce80453af2d8c4ad2e7823d15b722", size = 108557, upload-time = "2026-01-23T18:36:37.41Z" }, + { url = "https://files.pythonhosted.org/packages/88/a1/d19dd9889cd4bce2e233c4fac007cd8daaf5b9fe6e6a5d432cf17be0b807/sounddevice-0.5.5-py3-none-win32.whl", hash = "sha256:1234cc9b4c9df97b6cbe748146ae0ec64dd7d6e44739e8e42eaa5b595313a103", size = 317765, upload-time = "2026-01-23T18:36:39.047Z" }, + { url = "https://files.pythonhosted.org/packages/c3/0e/002ed7c4c1c2ab69031f78989d3b789fee3a7fba9e586eb2b81688bf4961/sounddevice-0.5.5-py3-none-win_amd64.whl", hash = "sha256:cfc6b2c49fb7f555591c78cb8ecf48d6a637fd5b6e1db5fec6ed9365d64b3519", size = 365324, upload-time = "2026-01-23T18:36:40.496Z" }, + { url = "https://files.pythonhosted.org/packages/4e/39/a61d4b83a7746b70d23d9173be688c0c6bfc7173772344b7442c2c155497/sounddevice-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3861901ddd8230d2e0e8ae62ac320cdd4c688d81df89da036dcb812f757bb3e6", size = 317115, upload-time = "2026-01-23T18:36:42.235Z" }, ] [[package]] @@ -8175,24 +9213,24 @@ wheels = [ [[package]] name = "soupsieve" -version = "2.8.1" +version = "2.8.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/23/adf3796d740536d63a6fbda113d07e60c734b6ed5d3058d1e47fc0495e47/soupsieve-2.8.1.tar.gz", hash = "sha256:4cf733bc50fa805f5df4b8ef4740fc0e0fa6218cf3006269afd3f9d6d80fd350", size = 117856, upload-time = "2025-12-18T13:50:34.655Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/f3/b67d6ea49ca9154453b6d70b34ea22f3996b9fa55da105a79d8732227adc/soupsieve-2.8.1-py3-none-any.whl", hash = "sha256:a11fe2a6f3d76ab3cf2de04eb339c1be5b506a8a47f2ceb6d139803177f85434", size = 36710, upload-time = "2025-12-18T13:50:33.267Z" }, + { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, ] [[package]] name = "sse-starlette" -version = "3.1.2" +version = "3.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/34/f5df66cb383efdbf4f2db23cabb27f51b1dcb737efaf8a558f6f1d195134/sse_starlette-3.1.2.tar.gz", hash = "sha256:55eff034207a83a0eb86de9a68099bd0157838f0b8b999a1b742005c71e33618", size = 26303, upload-time = "2025-12-31T08:02:20.023Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/95/8c4b76eec9ae574474e5d2997557cebf764bcd3586458956c30631ae08f4/sse_starlette-3.1.2-py3-none-any.whl", hash = "sha256:cd800dd349f4521b317b9391d3796fa97b71748a4da9b9e00aafab32dda375c8", size = 12484, upload-time = "2025-12-31T08:02:18.894Z" }, + { url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" }, ] [[package]] @@ -8211,15 +9249,15 @@ wheels = [ [[package]] name = "starlette" -version = "0.50.0" +version = "0.52.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, ] [[package]] @@ -8257,11 +9295,11 @@ wheels = [ [[package]] name = "tenacity" -version = "9.1.2" +version = "9.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/c6/ee486fd809e357697ee8a44d3d69222b344920433d3b6666ccd9b374630c/tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a", size = 49413, upload-time = "2026-02-07T10:45:33.841Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, + { url = "https://files.pythonhosted.org/packages/d7/c1/eb8f9debc45d3b7918a32ab756658a0904732f75e555402972246b0b8e71/tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55", size = 28926, upload-time = "2026-02-07T10:45:32.24Z" }, ] [[package]] @@ -8350,15 +9388,19 @@ wheels = [ [[package]] name = "tensorstore" -version = "0.1.80" +version = "0.1.81" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", "python_full_version == '3.12.*' and sys_platform == 'darwin'", - "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", - "python_full_version >= '3.13' and sys_platform == 'win32'", - "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.12.*' and sys_platform == 'win32'", "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')", "python_full_version == '3.11.*' and sys_platform == 'darwin'", @@ -8370,32 +9412,32 @@ dependencies = [ { name = "ml-dtypes", marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/88/18/7b91daa9cf29dbb6bfdd603154f355c9069a9cd8c757038fe52b0f613611/tensorstore-0.1.80.tar.gz", hash = "sha256:4158fe76b96f62d12a37d7868150d836e089b5280b2bdd363c43c5d651f10e26", size = 7090032, upload-time = "2025-12-10T21:35:10.941Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/96/1f/902d822626a6c2774229236440c85c17e384f53afb4d2c6fa4118a30c53a/tensorstore-0.1.80-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:246641a8780ee5e04e88bc95c8e31faac6471bab1180d1f5cdc9804b29a77c04", size = 16519587, upload-time = "2025-12-10T21:34:05.758Z" }, - { url = "https://files.pythonhosted.org/packages/21/c9/2ed6ed809946d7b0de08645800584937912c404b85900eea66361d5e2541/tensorstore-0.1.80-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7451b30f99d9f31a2b9d70e6ef61815713dc782c58c6d817f91781341e4dac05", size = 14550336, upload-time = "2025-12-10T21:34:08.394Z" }, - { url = "https://files.pythonhosted.org/packages/d6/50/d97acbc5a4d632590dd9053697181fa41cbcb09389e88acfa6958ab8ead5/tensorstore-0.1.80-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1113a6982fc0fa8dda8fcc0495715e647ac3360909a86ff13f2e04564f82d54a", size = 19004795, upload-time = "2025-12-10T21:34:11.14Z" }, - { url = "https://files.pythonhosted.org/packages/a9/2d/fdbbf3cd6f08d41d3c1d8a2f6a67a4a2a07ac238fb6eeea852c2669184a3/tensorstore-0.1.80-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b193a7a1c4f455a61e60ed2dd67271a3daab0910ddb4bd9db51390d1b36d9996", size = 20996847, upload-time = "2025-12-10T21:34:14.031Z" }, - { url = "https://files.pythonhosted.org/packages/b6/37/4570fe93f0c5c339843042556a841cfe0073d3e7fa4dae7ba31417eb4fd3/tensorstore-0.1.80-cp311-cp311-win_amd64.whl", hash = "sha256:9c088e8c9f67c266ef4dae3703bd617f7c0cb0fd98e99c4500692e38a4328140", size = 13258296, upload-time = "2025-12-10T21:34:16.764Z" }, - { url = "https://files.pythonhosted.org/packages/c3/47/8733a99926caca2db6e8dbe22491c0623da2298a23bc649bfe6e6f645fa7/tensorstore-0.1.80-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:f65dfaf9e737a41389e29a5a2ea52ca5d14c8d6f48b402c723d800cd16d322b0", size = 16537887, upload-time = "2025-12-10T21:34:19.799Z" }, - { url = "https://files.pythonhosted.org/packages/50/54/59a34fee963e46f9f401c54131bdc6a17d6cfb10e5a094d586d33ae273df/tensorstore-0.1.80-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f8b51d7e685bbb63f6becd7d2ac8634d5ab67ec7e53038e597182e2db2c7aa90", size = 14551674, upload-time = "2025-12-10T21:34:22.171Z" }, - { url = "https://files.pythonhosted.org/packages/87/15/0734521f8b648e2c43a00f1bc99a7195646c9e4e31f64ab22a15ac84e75c/tensorstore-0.1.80-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:acb8d52fadcefafef4ef8ecca3fc99b1d0e3c5c5a888766484c3e39f050be7f5", size = 19013402, upload-time = "2025-12-10T21:34:24.961Z" }, - { url = "https://files.pythonhosted.org/packages/48/85/55addd16896343ea2731388028945576060139dda3c68a15d6b00158ef6f/tensorstore-0.1.80-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc28a58c580253a526a4b6d239d18181ef96f1e285a502dbb03ff15eeec07a5b", size = 21007488, upload-time = "2025-12-10T21:34:28.093Z" }, - { url = "https://files.pythonhosted.org/packages/c3/d2/5075cfea2ffd13c5bd2e91d76cdf87a355f617e40fa0b8fbfbbdc5e7bd23/tensorstore-0.1.80-cp312-cp312-win_amd64.whl", hash = "sha256:1b2b2ed0051dfab7e25295b14e6620520729e6e2ddf505f98c8d3917569614bf", size = 13263376, upload-time = "2025-12-10T21:34:30.797Z" }, - { url = "https://files.pythonhosted.org/packages/79/3d/34e64ef1e4573419671b9aa72b69e927702d84e1d95bcef3cc98a8d63ad5/tensorstore-0.1.80-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:46136fe42ee6dd835d957db37073058aea0b78fdfbe2975941640131b7740824", size = 16537403, upload-time = "2025-12-10T21:34:33.404Z" }, - { url = "https://files.pythonhosted.org/packages/94/03/19f45f6134bbb98d13f8de3160271aa4f49466e1a91000c6ab2eec7d9264/tensorstore-0.1.80-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a92505189731fcb03f1c69a84ea4460abb24204bfac1f339448a0621e7def77c", size = 14551401, upload-time = "2025-12-10T21:34:36.041Z" }, - { url = "https://files.pythonhosted.org/packages/f7/fa/d5de3f1b711773e33a329b5fe11de1265b77a13f2a2447fe685ee5d0c1bc/tensorstore-0.1.80-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de63843706fdfe9565a45567238c5b1e55a0b28bbde6524200b31d29043a9a16", size = 19013246, upload-time = "2025-12-10T21:34:38.507Z" }, - { url = "https://files.pythonhosted.org/packages/87/ee/e874b5a495a7aa14817772a91095971f3a965a4cef5b52ad06a8e15c924f/tensorstore-0.1.80-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c8dbbdd31cbb28eccfb23dbbd4218fe67bfc32e9cb452875a485b81031c949d", size = 21008391, upload-time = "2025-12-10T21:34:41.332Z" }, - { url = "https://files.pythonhosted.org/packages/2f/99/03bcc5da6a735ffa290f888af1f2c990edc9a375b373d04152d8b6fce3e8/tensorstore-0.1.80-cp313-cp313-win_amd64.whl", hash = "sha256:c0529afab3800749dd245843d3bf0d061a109a8edb77fb345f476e8bccda51b8", size = 13262770, upload-time = "2025-12-10T21:34:43.673Z" }, - { url = "https://files.pythonhosted.org/packages/ef/57/75f65d8ba5829768e67aa978d4c0856956b9bacb279c96f0ee28564b6c41/tensorstore-0.1.80-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:04c29d979eb8b8ee48f873dc13d2701bfd49425500ffc5b848e4ec55b2548281", size = 16543698, upload-time = "2025-12-10T21:34:46.095Z" }, - { url = "https://files.pythonhosted.org/packages/9c/92/17a18eac2cfdb019c36b4362d1a5c614d769a78d10cad0aae3d368fefa0e/tensorstore-0.1.80-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:189d924eaec394c9331e284a9c513ed583e336472a925823b5151cb26f41d091", size = 14552217, upload-time = "2025-12-10T21:34:48.539Z" }, - { url = "https://files.pythonhosted.org/packages/b6/df/71f317633a0cd5270b85d185ac5ce91a749930fc076205d3fae4f1f043ed/tensorstore-0.1.80-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:07e4a84bacf70b78305831897068a9b5ad30326e63bbeb92c4bf7e565fcf5e9e", size = 19020675, upload-time = "2025-12-10T21:34:51.168Z" }, - { url = "https://files.pythonhosted.org/packages/2b/35/f03cdb5edf8e009ff73e48c0c3d0f692a70a7ffc5e393f2ea1761eff89b5/tensorstore-0.1.80-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2b353b0bd53fedd77fc5a12a1c1a91cacc3cf59e3dd785529c5a54b31d1c7b1", size = 21009171, upload-time = "2025-12-10T21:34:53.979Z" }, - { url = "https://files.pythonhosted.org/packages/51/a9/6cf5675a7d4214ae7fd114c5c7bcf09aa71a57fce6648e187576e60c0c08/tensorstore-0.1.80-cp314-cp314-win_amd64.whl", hash = "sha256:53fd121ccd332bc4cc397f7af45889360c668b43dc3ff6bc3264df0f9886c11a", size = 13653134, upload-time = "2025-12-10T21:34:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/1d/d0/8cd2725c6691387438491d0c1fbbe07235439084722f968c20f07de4119d/tensorstore-0.1.80-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:4baee67fce95f29f593fbab4866119347115eaace887732aa92cfcbb9e6b0748", size = 16620211, upload-time = "2025-12-10T21:34:59.106Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c0/289b8979a08b477ce0622a6c13a59dbe8cda407e4c82c8b2ab0b4f8d1989/tensorstore-0.1.80-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8cd11027b5a8b66db8d344085a31a1666c78621dac27039c4d571bc4974804a1", size = 14638072, upload-time = "2025-12-10T21:35:01.598Z" }, - { url = "https://files.pythonhosted.org/packages/42/47/5c63024ced48e3f440c131babedef2f5398f48ab81c1aeee6c6193491d1c/tensorstore-0.1.80-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b7c5dd434bba4ee08fe46bbbdb25c60dd3d47ccb4b8561a9751cf1526da52b8", size = 19024739, upload-time = "2025-12-10T21:35:04.324Z" }, - { url = "https://files.pythonhosted.org/packages/6e/16/d08ade819949e0622f27e949c15b09f7b86ac18f8ac7c4d8bdfb4a711076/tensorstore-0.1.80-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e93df6d34ff5f0f6be245f4d29b99a7c1eef8ad91b50686adf57a5eeea99cb74", size = 21024449, upload-time = "2025-12-10T21:35:08.149Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/43/f6/e2403fc05b97ba74ad408a98a42c288e6e1b8eacc23780c153b0e5166179/tensorstore-0.1.81.tar.gz", hash = "sha256:687546192ea6f6c8ae28d18f13103336f68017d928b9f5a00325e9b0548d9c25", size = 7120819, upload-time = "2026-02-06T18:56:12.535Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/df/f472bd0dee801d7e33c53335ad0fcde9c71e5f9324241faa0a6b4be4270a/tensorstore-0.1.81-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:f64fb510f293079f9e5c63cb227e8a76904655a32912fc107c1e63bd8dc3e187", size = 16501390, upload-time = "2026-02-06T18:55:13.678Z" }, + { url = "https://files.pythonhosted.org/packages/5a/93/5f40c51d7b15d3574b1788a251dd4e3abd0415dab71811e126d2da5e826b/tensorstore-0.1.81-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4282587598885ff447f08369ac9bb681a65e224888cfa8ef8f3dd63544759e6c", size = 14535592, upload-time = "2026-02-06T18:55:16.44Z" }, + { url = "https://files.pythonhosted.org/packages/76/48/b7adcc8eca502ce8050c18cea066ca0c0122df7a686e10da6470e55456b4/tensorstore-0.1.81-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b4ea06038f6912bb6ed8a89db0c31e4e3d1b2404f3365dc756e4bc42bd6a89c", size = 19038732, upload-time = "2026-02-06T18:55:18.924Z" }, + { url = "https://files.pythonhosted.org/packages/40/b0/99294895b030bd7d9ebc06e7ed523d0c09ab65667e031f8a67923f398f86/tensorstore-0.1.81-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51d59f7db9cdae02fce9d347300c0ccfb8265052945757e95592a265eb620b15", size = 21038447, upload-time = "2026-02-06T18:55:21.085Z" }, + { url = "https://files.pythonhosted.org/packages/32/e6/1ce977baf09aa3889f10f04460b588a6c8876ea441e51090c671f0400a6f/tensorstore-0.1.81-cp311-cp311-win_amd64.whl", hash = "sha256:fdb9579a729cccc02127cab5abf26f57a0e27968ba65c9c548ad058f5a45417f", size = 13221673, upload-time = "2026-02-06T18:55:23.195Z" }, + { url = "https://files.pythonhosted.org/packages/85/82/00037db699f74d792efe2696305ddd6932e04306899e3701824a7f7de961/tensorstore-0.1.81-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:7aefa1e3eadca804bce05215184c9cde29205ac2f3b443ca15a4e1846d31af4e", size = 16521245, upload-time = "2026-02-06T18:55:25.559Z" }, + { url = "https://files.pythonhosted.org/packages/86/2e/1deca1b955cb959eec13fd342ffaa2fd84e4770b4e2bcb95a2f541875a52/tensorstore-0.1.81-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7e001d3edc6758eb5dc80556da9e945c1381f0529102fcc0301358ba6b9b70ed", size = 14543561, upload-time = "2026-02-06T18:55:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e4/b4343eae773f72a8777f82c5328191a06d8a5195e62105c14b7dcc49823f/tensorstore-0.1.81-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c27e07f4e91e6dc6a0878e13e2c5931d1716196b67b0df927f2f571de2576e9", size = 19043982, upload-time = "2026-02-06T18:55:30.076Z" }, + { url = "https://files.pythonhosted.org/packages/31/6c/d8c8508a9f4a83dc910d2365c484ba0debf5e531782065e3657fc8fc9b54/tensorstore-0.1.81-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcb4786c4955e2d88d518b5b5a367427e3ad21d059cba366ad7aebf5fcc2302e", size = 21049171, upload-time = "2026-02-06T18:55:34.383Z" }, + { url = "https://files.pythonhosted.org/packages/44/a9/c1a751e35a0fcff7f795398c4f98b6c8ea0f00fe7d7704f66a1e08d4352f/tensorstore-0.1.81-cp312-cp312-win_amd64.whl", hash = "sha256:b96cbf1ee74d9038762b2d81305ee1589ec89913a440df6cbd514bc5879655d2", size = 13226573, upload-time = "2026-02-06T18:55:36.463Z" }, + { url = "https://files.pythonhosted.org/packages/06/c0/32f7d52bfcf1728f557cccb17ac85f57bcc3fa92f4034368d6e7d7d06406/tensorstore-0.1.81-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:7bb563ad4d4d6c4748d9fe4f01f639ddf4ffef83ac180fc3b6d73f46ad854e62", size = 16521316, upload-time = "2026-02-06T18:55:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/38/b9/06ffc44e38ca18aeb3973f6b709d4d2102e17a8d700c7c3e2af3f2830722/tensorstore-0.1.81-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2ff7e6c457596cf21f31c690e451fe634ac804fc98ff8131188e99d5ef7d29bc", size = 14543212, upload-time = "2026-02-06T18:55:42.246Z" }, + { url = "https://files.pythonhosted.org/packages/00/01/3c27962f7258ad0bb552c3cd324fa2e01f746c8b6e81bd25d468f72204e8/tensorstore-0.1.81-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b218a6fe09c72c002f2c6480fc58b78cdbba8bb9c6f3a0d7dd1f70625cb37995", size = 19044489, upload-time = "2026-02-06T18:55:44.957Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/fe0f14a1da96d6e0aa6c24d6c31f3ce4b203f8e8a1a2e359489e52b33400/tensorstore-0.1.81-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f33e7c11035c14dad01aeba012051643110cbb95c239e512106fe1be692c98b6", size = 21052658, upload-time = "2026-02-06T18:55:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e2/cc189d799982f02c200b22405c4d3f28845df6321de2ac3a35ae087758ed/tensorstore-0.1.81-cp313-cp313-win_amd64.whl", hash = "sha256:b55126bcf084cc5fe0151bf465f3a5dedb5b5da0133d01227f75d0e71f9cfae5", size = 13226848, upload-time = "2026-02-06T18:55:49.631Z" }, + { url = "https://files.pythonhosted.org/packages/89/b0/0ca436391f832fad365977623f3c08c4fbbf553fd9a112604aa106646654/tensorstore-0.1.81-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a48c23e4df50681d8f4f365b08a0beb114ab210accbde9f34d37fd7b45c31005", size = 16525537, upload-time = "2026-02-06T18:55:51.708Z" }, + { url = "https://files.pythonhosted.org/packages/8a/02/c10052b86cf8d47b4cf41e5f139b4003c69bb69e506759b0eb87b873d213/tensorstore-0.1.81-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0be0ce646263820f3d4c9ba738d8e9be7da241cbe093ca2fd02e25023344347c", size = 14547490, upload-time = "2026-02-06T18:55:53.899Z" }, + { url = "https://files.pythonhosted.org/packages/01/d1/bd86c46367624522967e896ca45d77ba9085de3f15081fdad6576ba70aa9/tensorstore-0.1.81-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93996e756dce82589f5a19e27b4e7c0b5b40221a7e41ddce46dc13d378dbd157", size = 19050938, upload-time = "2026-02-06T18:55:56.123Z" }, + { url = "https://files.pythonhosted.org/packages/11/a2/59a8e9a33cd9e17461f918bda4a20712ed3c51c52e0e42b2f673441bc90d/tensorstore-0.1.81-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:444c088919a739c20ca1f87935d72de4fd87605eb2c0f093b8d49251b7884aef", size = 21055275, upload-time = "2026-02-06T18:55:58.259Z" }, + { url = "https://files.pythonhosted.org/packages/c5/ec/2988f210729b523975b1bee030cabd64b256943c08463331598f1e03bd4f/tensorstore-0.1.81-cp314-cp314-win_amd64.whl", hash = "sha256:f7aa0a3a470c4d832faff7d77dd688b1d352b718d110c95ceba54ec637ca3ffa", size = 13614713, upload-time = "2026-02-06T18:56:00.291Z" }, + { url = "https://files.pythonhosted.org/packages/ae/5d/60e990df3f1dc57c33644375a0eccb906a79fd8a5e2d81238f856c65ad7f/tensorstore-0.1.81-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:6c36d8a827120aa15e50ec5c36dd7e73978d86ba4f46d073fb648d8dda3948e9", size = 16605091, upload-time = "2026-02-06T18:56:02.807Z" }, + { url = "https://files.pythonhosted.org/packages/85/22/f599576815227735d3e34f86f05a8b39d8b15fd979d0029383ebae23978d/tensorstore-0.1.81-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3c31d831707c4ff3c6ecdcba129f7c39e982572837b2f93e02ccb83fc8581bca", size = 14631573, upload-time = "2026-02-06T18:56:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/cb/76/b5d0b424b7af057a3d4de3f312eba9ddf8a3c750a766b42e0b7f6c2ebef0/tensorstore-0.1.81-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9fba383f108d7450bf9a03487ac7fa3bb2c3080c91cee9d2da3bb217b560846b", size = 19065251, upload-time = "2026-02-06T18:56:06.972Z" }, + { url = "https://files.pythonhosted.org/packages/54/6c/0f113eae73b1e8eb2f712cf5f1efd269452f0f0045158fae43ce7b4701b4/tensorstore-0.1.81-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f88c52f592e2982682045199cabf360462146749d48b7be2969cd640e877c6c3", size = 21066488, upload-time = "2026-02-06T18:56:10.236Z" }, ] [[package]] @@ -8418,15 +9460,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f4/fd/88f4368b71ae8c4bd1e3ed99c1660467760ca6cfbd31d9167f3a010f9d02/tensorzero-2025.7.5-cp39-abi3-win_amd64.whl", hash = "sha256:a80d9739c61c8d839f8d4f9f61d6333ca13b2bd7ea1bb021ea989dd15a8eb39e", size = 17174978, upload-time = "2025-07-30T16:24:08.122Z" }, ] -[[package]] -name = "termcolor" -version = "3.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/46/79/cf31d7a93a8fdc6aa0fbb665be84426a8c5a557d9240b6239e9e11e35fc5/termcolor-3.3.0.tar.gz", hash = "sha256:348871ca648ec6a9a983a13ab626c0acce02f515b9e1983332b17af7979521c5", size = 14434, upload-time = "2025-12-29T12:55:21.882Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/d1/8bb87d21e9aeb323cc03034f5eaf2c8f69841e40e4853c2627edf8111ed3/termcolor-3.3.0-py3-none-any.whl", hash = "sha256:cf642efadaf0a8ebbbf4bc7a31cec2f9b5f21a9f726f4ccbb08192c9c26f43a5", size = 7734, upload-time = "2025-12-29T12:55:20.718Z" }, -] - [[package]] name = "terminaltexteffects" version = "0.12.2" @@ -8564,51 +9597,65 @@ wheels = [ [[package]] name = "tomli" -version = "2.3.0" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, +] + +[[package]] +name = "tomlkit" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, - { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, - { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, - { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, - { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, - { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, - { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, - { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, - { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, - { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, - { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, - { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, - { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, - { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, - { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, - { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, - { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, - { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, - { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, - { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, - { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, - { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, - { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, - { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, - { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, - { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, - { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, - { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, - { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, - { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, - { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, - { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, - { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/c3/af/14b24e41977adb296d6bd1fb59402cf7d60ce364f90c890bd2ec65c43b5a/tomlkit-0.14.0.tar.gz", hash = "sha256:cf00efca415dbd57575befb1f6634c4f42d2d87dbba376128adb42c121b87064", size = 187167, upload-time = "2026-01-13T01:14:53.304Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl", hash = "sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680", size = 39310, upload-time = "2026-01-13T01:14:51.965Z" }, ] [[package]] @@ -8622,9 +9669,10 @@ wheels = [ [[package]] name = "torch" -version = "2.9.1" +version = "2.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "cuda-bindings", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "filelock" }, { name = "fsspec" }, { name = "jinja2" }, @@ -8651,34 +9699,38 @@ dependencies = [ { name = "typing-extensions" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/56/9577683b23072075ed2e40d725c52c2019d71a972fab8e083763da8e707e/torch-2.9.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:1cc208435f6c379f9b8fdfd5ceb5be1e3b72a6bdf1cb46c0d2812aa73472db9e", size = 104207681, upload-time = "2025-11-12T15:19:56.48Z" }, - { url = "https://files.pythonhosted.org/packages/38/45/be5a74f221df8f4b609b78ff79dc789b0cc9017624544ac4dd1c03973150/torch-2.9.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:9fd35c68b3679378c11f5eb73220fdcb4e6f4592295277fbb657d31fd053237c", size = 899794036, upload-time = "2025-11-12T15:21:01.886Z" }, - { url = "https://files.pythonhosted.org/packages/67/95/a581e8a382596b69385a44bab2733f1273d45c842f5d4a504c0edc3133b6/torch-2.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:2af70e3be4a13becba4655d6cc07dcfec7ae844db6ac38d6c1dafeb245d17d65", size = 110969861, upload-time = "2025-11-12T15:21:30.145Z" }, - { url = "https://files.pythonhosted.org/packages/ad/51/1756dc128d2bf6ea4e0a915cb89ea5e730315ff33d60c1ff56fd626ba3eb/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:a83b0e84cc375e3318a808d032510dde99d696a85fe9473fc8575612b63ae951", size = 74452222, upload-time = "2025-11-12T15:20:46.223Z" }, - { url = "https://files.pythonhosted.org/packages/15/db/c064112ac0089af3d2f7a2b5bfbabf4aa407a78b74f87889e524b91c5402/torch-2.9.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:62b3fd888277946918cba4478cf849303da5359f0fb4e3bfb86b0533ba2eaf8d", size = 104220430, upload-time = "2025-11-12T15:20:31.705Z" }, - { url = "https://files.pythonhosted.org/packages/56/be/76eaa36c9cd032d3b01b001e2c5a05943df75f26211f68fae79e62f87734/torch-2.9.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d033ff0ac3f5400df862a51bdde9bad83561f3739ea0046e68f5401ebfa67c1b", size = 899821446, upload-time = "2025-11-12T15:20:15.544Z" }, - { url = "https://files.pythonhosted.org/packages/47/cc/7a2949e38dfe3244c4df21f0e1c27bce8aedd6c604a587dd44fc21017cb4/torch-2.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:0d06b30a9207b7c3516a9e0102114024755a07045f0c1d2f2a56b1819ac06bcb", size = 110973074, upload-time = "2025-11-12T15:21:39.958Z" }, - { url = "https://files.pythonhosted.org/packages/1e/ce/7d251155a783fb2c1bb6837b2b7023c622a2070a0a72726ca1df47e7ea34/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:52347912d868653e1528b47cafaf79b285b98be3f4f35d5955389b1b95224475", size = 74463887, upload-time = "2025-11-12T15:20:36.611Z" }, - { url = "https://files.pythonhosted.org/packages/0f/27/07c645c7673e73e53ded71705045d6cb5bae94c4b021b03aa8d03eee90ab/torch-2.9.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:da5f6f4d7f4940a173e5572791af238cb0b9e21b1aab592bd8b26da4c99f1cd6", size = 104126592, upload-time = "2025-11-12T15:20:41.62Z" }, - { url = "https://files.pythonhosted.org/packages/19/17/e377a460603132b00760511299fceba4102bd95db1a0ee788da21298ccff/torch-2.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:27331cd902fb4322252657f3902adf1c4f6acad9dcad81d8df3ae14c7c4f07c4", size = 899742281, upload-time = "2025-11-12T15:22:17.602Z" }, - { url = "https://files.pythonhosted.org/packages/b1/1a/64f5769025db846a82567fa5b7d21dba4558a7234ee631712ee4771c436c/torch-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083", size = 110940568, upload-time = "2025-11-12T15:21:18.689Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ab/07739fd776618e5882661d04c43f5b5586323e2f6a2d7d84aac20d8f20bd/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e", size = 74479191, upload-time = "2025-11-12T15:21:25.816Z" }, - { url = "https://files.pythonhosted.org/packages/20/60/8fc5e828d050bddfab469b3fe78e5ab9a7e53dda9c3bdc6a43d17ce99e63/torch-2.9.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c29455d2b910b98738131990394da3e50eea8291dfeb4b12de71ecf1fdeb21cb", size = 104135743, upload-time = "2025-11-12T15:21:34.936Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b7/6d3f80e6918213babddb2a37b46dbb14c15b14c5f473e347869a51f40e1f/torch-2.9.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:524de44cd13931208ba2c4bde9ec7741fd4ae6bfd06409a604fc32f6520c2bc9", size = 899749493, upload-time = "2025-11-12T15:24:36.356Z" }, - { url = "https://files.pythonhosted.org/packages/a6/47/c7843d69d6de8938c1cbb1eba426b1d48ddf375f101473d3e31a5fc52b74/torch-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:545844cc16b3f91e08ce3b40e9c2d77012dd33a48d505aed34b7740ed627a1b2", size = 110944162, upload-time = "2025-11-12T15:21:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/28/0e/2a37247957e72c12151b33a01e4df651d9d155dd74d8cfcbfad15a79b44a/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e", size = 74830751, upload-time = "2025-11-12T15:21:43.792Z" }, - { url = "https://files.pythonhosted.org/packages/4b/f7/7a18745edcd7b9ca2381aa03353647bca8aace91683c4975f19ac233809d/torch-2.9.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:30a3e170a84894f3652434b56d59a64a2c11366b0ed5776fab33c2439396bf9a", size = 104142929, upload-time = "2025-11-12T15:21:48.319Z" }, - { url = "https://files.pythonhosted.org/packages/f4/dd/f1c0d879f2863ef209e18823a988dc7a1bf40470750e3ebe927efdb9407f/torch-2.9.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8301a7b431e51764629208d0edaa4f9e4c33e6df0f2f90b90e261d623df6a4e2", size = 899748978, upload-time = "2025-11-12T15:23:04.568Z" }, - { url = "https://files.pythonhosted.org/packages/1f/9f/6986b83a53b4d043e36f3f898b798ab51f7f20fdf1a9b01a2720f445043d/torch-2.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2e1c42c0ae92bf803a4b2409fdfed85e30f9027a66887f5e7dcdbc014c7531db", size = 111176995, upload-time = "2025-11-12T15:22:01.618Z" }, - { url = "https://files.pythonhosted.org/packages/40/60/71c698b466dd01e65d0e9514b5405faae200c52a76901baf6906856f17e4/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587", size = 74480347, upload-time = "2025-11-12T15:21:57.648Z" }, - { url = "https://files.pythonhosted.org/packages/48/50/c4b5112546d0d13cc9eaa1c732b823d676a9f49ae8b6f97772f795874a03/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1edee27a7c9897f4e0b7c14cfc2f3008c571921134522d5b9b5ec4ebbc69041a", size = 74433245, upload-time = "2025-11-12T15:22:39.027Z" }, - { url = "https://files.pythonhosted.org/packages/81/c9/2628f408f0518b3bae49c95f5af3728b6ab498c8624ab1e03a43dd53d650/torch-2.9.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:19d144d6b3e29921f1fc70503e9f2fc572cde6a5115c0c0de2f7ca8b1483e8b6", size = 104134804, upload-time = "2025-11-12T15:22:35.222Z" }, - { url = "https://files.pythonhosted.org/packages/28/fc/5bc91d6d831ae41bf6e9e6da6468f25330522e92347c9156eb3f1cb95956/torch-2.9.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:c432d04376f6d9767a9852ea0def7b47a7bbc8e7af3b16ac9cf9ce02b12851c9", size = 899747132, upload-time = "2025-11-12T15:23:36.068Z" }, - { url = "https://files.pythonhosted.org/packages/63/5d/e8d4e009e52b6b2cf1684bde2a6be157b96fb873732542fb2a9a99e85a83/torch-2.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:d187566a2cdc726fc80138c3cdb260970fab1c27e99f85452721f7759bbd554d", size = 110934845, upload-time = "2025-11-12T15:22:48.367Z" }, - { url = "https://files.pythonhosted.org/packages/bd/b2/2d15a52516b2ea3f414643b8de68fa4cb220d3877ac8b1028c83dc8ca1c4/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cb10896a1f7fedaddbccc2017ce6ca9ecaaf990f0973bdfcf405439750118d2c", size = 74823558, upload-time = "2025-11-12T15:22:43.392Z" }, - { url = "https://files.pythonhosted.org/packages/86/5c/5b2e5d84f5b9850cd1e71af07524d8cbb74cba19379800f1f9f7c997fc70/torch-2.9.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:0a2bd769944991c74acf0c4ef23603b9c777fdf7637f115605a4b2d8023110c7", size = 104145788, upload-time = "2025-11-12T15:23:52.109Z" }, - { url = "https://files.pythonhosted.org/packages/a9/8c/3da60787bcf70add986c4ad485993026ac0ca74f2fc21410bc4eb1bb7695/torch-2.9.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:07c8a9660bc9414c39cac530ac83b1fb1b679d7155824144a40a54f4a47bfa73", size = 899735500, upload-time = "2025-11-12T15:24:08.788Z" }, - { url = "https://files.pythonhosted.org/packages/db/2b/f7818f6ec88758dfd21da46b6cd46af9d1b3433e53ddbb19ad1e0da17f9b/torch-2.9.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c88d3299ddeb2b35dcc31753305612db485ab6f1823e37fb29451c8b2732b87e", size = 111163659, upload-time = "2025-11-12T15:23:20.009Z" }, + { url = "https://files.pythonhosted.org/packages/5b/30/bfebdd8ec77db9a79775121789992d6b3b75ee5494971294d7b4b7c999bc/torch-2.10.0-2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:2b980edd8d7c0a68c4e951ee1856334a43193f98730d97408fbd148c1a933313", size = 79411457, upload-time = "2026-02-10T21:44:59.189Z" }, + { url = "https://files.pythonhosted.org/packages/0f/8b/4b61d6e13f7108f36910df9ab4b58fd389cc2520d54d81b88660804aad99/torch-2.10.0-2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:418997cb02d0a0f1497cf6a09f63166f9f5df9f3e16c8a716ab76a72127c714f", size = 79423467, upload-time = "2026-02-10T21:44:48.711Z" }, + { url = "https://files.pythonhosted.org/packages/d3/54/a2ba279afcca44bbd320d4e73675b282fcee3d81400ea1b53934efca6462/torch-2.10.0-2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:13ec4add8c3faaed8d13e0574f5cd4a323c11655546f91fbe6afa77b57423574", size = 79498202, upload-time = "2026-02-10T21:44:52.603Z" }, + { url = "https://files.pythonhosted.org/packages/ec/23/2c9fe0c9c27f7f6cb865abcea8a4568f29f00acaeadfc6a37f6801f84cb4/torch-2.10.0-2-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:e521c9f030a3774ed770a9c011751fb47c4d12029a3d6522116e48431f2ff89e", size = 79498254, upload-time = "2026-02-10T21:44:44.095Z" }, + { url = "https://files.pythonhosted.org/packages/0c/1a/c61f36cfd446170ec27b3a4984f072fd06dab6b5d7ce27e11adb35d6c838/torch-2.10.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5276fa790a666ee8becaffff8acb711922252521b28fbce5db7db5cf9cb2026d", size = 145992962, upload-time = "2026-01-21T16:24:14.04Z" }, + { url = "https://files.pythonhosted.org/packages/b5/60/6662535354191e2d1555296045b63e4279e5a9dbad49acf55a5d38655a39/torch-2.10.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:aaf663927bcd490ae971469a624c322202a2a1e68936eb952535ca4cd3b90444", size = 915599237, upload-time = "2026-01-21T16:23:25.497Z" }, + { url = "https://files.pythonhosted.org/packages/40/b8/66bbe96f0d79be2b5c697b2e0b187ed792a15c6c4b8904613454651db848/torch-2.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:a4be6a2a190b32ff5c8002a0977a25ea60e64f7ba46b1be37093c141d9c49aeb", size = 113720931, upload-time = "2026-01-21T16:24:23.743Z" }, + { url = "https://files.pythonhosted.org/packages/76/bb/d820f90e69cda6c8169b32a0c6a3ab7b17bf7990b8f2c680077c24a3c14c/torch-2.10.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:35e407430795c8d3edb07a1d711c41cc1f9eaddc8b2f1cc0a165a6767a8fb73d", size = 79411450, upload-time = "2026-01-21T16:25:30.692Z" }, + { url = "https://files.pythonhosted.org/packages/78/89/f5554b13ebd71e05c0b002f95148033e730d3f7067f67423026cc9c69410/torch-2.10.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3282d9febd1e4e476630a099692b44fdc214ee9bf8ee5377732d9d9dfe5712e4", size = 145992610, upload-time = "2026-01-21T16:25:26.327Z" }, + { url = "https://files.pythonhosted.org/packages/ae/30/a3a2120621bf9c17779b169fc17e3dc29b230c29d0f8222f499f5e159aa8/torch-2.10.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a2f9edd8dbc99f62bc4dfb78af7bf89499bca3d753423ac1b4e06592e467b763", size = 915607863, upload-time = "2026-01-21T16:25:06.696Z" }, + { url = "https://files.pythonhosted.org/packages/6f/3d/c87b33c5f260a2a8ad68da7147e105f05868c281c63d65ed85aa4da98c66/torch-2.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:29b7009dba4b7a1c960260fc8ac85022c784250af43af9fb0ebafc9883782ebd", size = 113723116, upload-time = "2026-01-21T16:25:21.916Z" }, + { url = "https://files.pythonhosted.org/packages/61/d8/15b9d9d3a6b0c01b883787bd056acbe5cc321090d4b216d3ea89a8fcfdf3/torch-2.10.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:b7bd80f3477b830dd166c707c5b0b82a898e7b16f59a7d9d42778dd058272e8b", size = 79423461, upload-time = "2026-01-21T16:24:50.266Z" }, + { url = "https://files.pythonhosted.org/packages/cc/af/758e242e9102e9988969b5e621d41f36b8f258bb4a099109b7a4b4b50ea4/torch-2.10.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5fd4117d89ffd47e3dcc71e71a22efac24828ad781c7e46aaaf56bf7f2796acf", size = 145996088, upload-time = "2026-01-21T16:24:44.171Z" }, + { url = "https://files.pythonhosted.org/packages/23/8e/3c74db5e53bff7ed9e34c8123e6a8bfef718b2450c35eefab85bb4a7e270/torch-2.10.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:787124e7db3b379d4f1ed54dd12ae7c741c16a4d29b49c0226a89bea50923ffb", size = 915711952, upload-time = "2026-01-21T16:23:53.503Z" }, + { url = "https://files.pythonhosted.org/packages/6e/01/624c4324ca01f66ae4c7cd1b74eb16fb52596dce66dbe51eff95ef9e7a4c/torch-2.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:2c66c61f44c5f903046cc696d088e21062644cbe541c7f1c4eaae88b2ad23547", size = 113757972, upload-time = "2026-01-21T16:24:39.516Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5c/dee910b87c4d5c0fcb41b50839ae04df87c1cfc663cf1b5fca7ea565eeaa/torch-2.10.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:6d3707a61863d1c4d6ebba7be4ca320f42b869ee657e9b2c21c736bf17000294", size = 79498198, upload-time = "2026-01-21T16:24:34.704Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6f/f2e91e34e3fcba2e3fc8d8f74e7d6c22e74e480bbd1db7bc8900fdf3e95c/torch-2.10.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5c4d217b14741e40776dd7074d9006fd28b8a97ef5654db959d8635b2fe5f29b", size = 146004247, upload-time = "2026-01-21T16:24:29.335Z" }, + { url = "https://files.pythonhosted.org/packages/98/fb/5160261aeb5e1ee12ee95fe599d0541f7c976c3701d607d8fc29e623229f/torch-2.10.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6b71486353fce0f9714ca0c9ef1c850a2ae766b409808acd58e9678a3edb7738", size = 915716445, upload-time = "2026-01-21T16:22:45.353Z" }, + { url = "https://files.pythonhosted.org/packages/6a/16/502fb1b41e6d868e8deb5b0e3ae926bbb36dab8ceb0d1b769b266ad7b0c3/torch-2.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:c2ee399c644dc92ef7bc0d4f7e74b5360c37cdbe7c5ba11318dda49ffac2bc57", size = 113757050, upload-time = "2026-01-21T16:24:19.204Z" }, + { url = "https://files.pythonhosted.org/packages/1a/0b/39929b148f4824bc3ad6f9f72a29d4ad865bcf7ebfc2fa67584773e083d2/torch-2.10.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:3202429f58309b9fa96a614885eace4b7995729f44beb54d3e4a47773649d382", size = 79851305, upload-time = "2026-01-21T16:24:09.209Z" }, + { url = "https://files.pythonhosted.org/packages/d8/14/21fbce63bc452381ba5f74a2c0a959fdf5ad5803ccc0c654e752e0dbe91a/torch-2.10.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:aae1b29cd68e50a9397f5ee897b9c24742e9e306f88a807a27d617f07adb3bd8", size = 146005472, upload-time = "2026-01-21T16:22:29.022Z" }, + { url = "https://files.pythonhosted.org/packages/54/fd/b207d1c525cb570ef47f3e9f836b154685011fce11a2f444ba8a4084d042/torch-2.10.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6021db85958db2f07ec94e1bc77212721ba4920c12a18dc552d2ae36a3eb163f", size = 915612644, upload-time = "2026-01-21T16:21:47.019Z" }, + { url = "https://files.pythonhosted.org/packages/36/53/0197f868c75f1050b199fe58f9bf3bf3aecac9b4e85cc9c964383d745403/torch-2.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff43db38af76fda183156153983c9a096fc4c78d0cd1e07b14a2314c7f01c2c8", size = 113997015, upload-time = "2026-01-21T16:23:00.767Z" }, + { url = "https://files.pythonhosted.org/packages/0e/13/e76b4d9c160e89fff48bf16b449ea324bda84745d2ab30294c37c2434c0d/torch-2.10.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:cdf2a523d699b70d613243211ecaac14fe9c5df8a0b0a9c02add60fb2a413e0f", size = 79498248, upload-time = "2026-01-21T16:23:09.315Z" }, + { url = "https://files.pythonhosted.org/packages/4f/93/716b5ac0155f1be70ed81bacc21269c3ece8dba0c249b9994094110bfc51/torch-2.10.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:bf0d9ff448b0218e0433aeb198805192346c4fd659c852370d5cc245f602a06a", size = 79464992, upload-time = "2026-01-21T16:23:05.162Z" }, + { url = "https://files.pythonhosted.org/packages/69/2b/51e663ff190c9d16d4a8271203b71bc73a16aa7619b9f271a69b9d4a936b/torch-2.10.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:233aed0659a2503b831d8a67e9da66a62c996204c0bba4f4c442ccc0c68a3f60", size = 146018567, upload-time = "2026-01-21T16:22:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cd/4b95ef7f293b927c283db0b136c42be91c8ec6845c44de0238c8c23bdc80/torch-2.10.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:682497e16bdfa6efeec8cde66531bc8d1fbbbb4d8788ec6173c089ed3cc2bfe5", size = 915721646, upload-time = "2026-01-21T16:21:16.983Z" }, + { url = "https://files.pythonhosted.org/packages/56/97/078a007208f8056d88ae43198833469e61a0a355abc0b070edd2c085eb9a/torch-2.10.0-cp314-cp314-win_amd64.whl", hash = "sha256:6528f13d2a8593a1a412ea07a99812495bec07e9224c28b2a25c0a30c7da025c", size = 113752373, upload-time = "2026-01-21T16:22:13.471Z" }, + { url = "https://files.pythonhosted.org/packages/d8/94/71994e7d0d5238393df9732fdab607e37e2b56d26a746cb59fdb415f8966/torch-2.10.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:f5ab4ba32383061be0fb74bda772d470140a12c1c3b58a0cfbf3dae94d164c28", size = 79850324, upload-time = "2026-01-21T16:22:09.494Z" }, + { url = "https://files.pythonhosted.org/packages/e2/65/1a05346b418ea8ccd10360eef4b3e0ce688fba544e76edec26913a8d0ee0/torch-2.10.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:716b01a176c2a5659c98f6b01bf868244abdd896526f1c692712ab36dbaf9b63", size = 146006482, upload-time = "2026-01-21T16:22:18.42Z" }, + { url = "https://files.pythonhosted.org/packages/1d/b9/5f6f9d9e859fc3235f60578fa64f52c9c6e9b4327f0fe0defb6de5c0de31/torch-2.10.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:d8f5912ba938233f86361e891789595ff35ca4b4e2ac8fe3670895e5976731d6", size = 915613050, upload-time = "2026-01-21T16:20:49.035Z" }, + { url = "https://files.pythonhosted.org/packages/66/4d/35352043ee0eaffdeff154fad67cd4a31dbed7ff8e3be1cc4549717d6d51/torch-2.10.0-cp314-cp314t-win_amd64.whl", hash = "sha256:71283a373f0ee2c89e0f0d5f446039bdabe8dbc3c9ccf35f0f784908b0acd185", size = 113995816, upload-time = "2026-01-21T16:22:05.312Z" }, ] [[package]] @@ -8689,7 +9741,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/62/9a/d3d8da1d1a8a189b2 [[package]] name = "torchvision" -version = "0.24.1" +version = "0.25.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, @@ -8698,34 +9750,34 @@ dependencies = [ { name = "torch" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/09/d51aadf8591138e08b74c64a6eb783630c7a31ca2634416277115a9c3a2b/torchvision-0.24.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ded5e625788572e4e1c4d155d1bbc48805c113794100d70e19c76e39e4d53465", size = 1891441, upload-time = "2025-11-12T15:25:01.687Z" }, - { url = "https://files.pythonhosted.org/packages/6b/49/a35df863e7c153aad82af7505abd8264a5b510306689712ef86bea862822/torchvision-0.24.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:54ed17c3d30e718e08d8da3fd5b30ea44b0311317e55647cb97077a29ecbc25b", size = 2386226, upload-time = "2025-11-12T15:25:05.449Z" }, - { url = "https://files.pythonhosted.org/packages/49/20/f2d7cd1eea052887c1083afff0b8df5228ec93b53e03759f20b1a3c6d22a/torchvision-0.24.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f476da4e085b7307aaab6f540219617d46d5926aeda24be33e1359771c83778f", size = 8046093, upload-time = "2025-11-12T15:25:09.425Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cf/0ff4007c09903199307da5f53a192ff5d62b45447069e9ef3a19bdc5ff12/torchvision-0.24.1-cp310-cp310-win_amd64.whl", hash = "sha256:fbdbdae5e540b868a681240b7dbd6473986c862445ee8a138680a6a97d6c34ff", size = 3696202, upload-time = "2025-11-12T15:25:10.657Z" }, - { url = "https://files.pythonhosted.org/packages/e7/69/30f5f03752aa1a7c23931d2519b31e557f3f10af5089d787cddf3b903ecf/torchvision-0.24.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:056c525dc875f18fe8e9c27079ada166a7b2755cea5a2199b0bc7f1f8364e600", size = 1891436, upload-time = "2025-11-12T15:25:04.3Z" }, - { url = "https://files.pythonhosted.org/packages/0c/69/49aae86edb75fe16460b59a191fcc0f568c2378f780bb063850db0fe007a/torchvision-0.24.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1e39619de698e2821d71976c92c8a9e50cdfd1e993507dfb340f2688bfdd8283", size = 2387757, upload-time = "2025-11-12T15:25:06.795Z" }, - { url = "https://files.pythonhosted.org/packages/11/c9/1dfc3db98797b326f1d0c3f3bb61c83b167a813fc7eab6fcd2edb8c7eb9d/torchvision-0.24.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a0f106663e60332aa4fcb1ca2159ef8c3f2ed266b0e6df88de261048a840e0df", size = 8047682, upload-time = "2025-11-12T15:25:21.125Z" }, - { url = "https://files.pythonhosted.org/packages/fa/bb/cfc6a6f6ccc84a534ed1fdf029ae5716dd6ff04e57ed9dc2dab38bf652d5/torchvision-0.24.1-cp311-cp311-win_amd64.whl", hash = "sha256:a9308cdd37d8a42e14a3e7fd9d271830c7fecb150dd929b642f3c1460514599a", size = 4037588, upload-time = "2025-11-12T15:25:14.402Z" }, - { url = "https://files.pythonhosted.org/packages/f0/af/18e2c6b9538a045f60718a0c5a058908ccb24f88fde8e6f0fc12d5ff7bd3/torchvision-0.24.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e48bf6a8ec95872eb45763f06499f87bd2fb246b9b96cb00aae260fda2f96193", size = 1891433, upload-time = "2025-11-12T15:25:03.232Z" }, - { url = "https://files.pythonhosted.org/packages/9d/43/600e5cfb0643d10d633124f5982d7abc2170dfd7ce985584ff16edab3e76/torchvision-0.24.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:7fb7590c737ebe3e1c077ad60c0e5e2e56bb26e7bccc3b9d04dbfc34fd09f050", size = 2386737, upload-time = "2025-11-12T15:25:08.288Z" }, - { url = "https://files.pythonhosted.org/packages/93/b1/db2941526ecddd84884132e2742a55c9311296a6a38627f9e2627f5ac889/torchvision-0.24.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:66a98471fc18cad9064123106d810a75f57f0838eee20edc56233fd8484b0cc7", size = 8049868, upload-time = "2025-11-12T15:25:13.058Z" }, - { url = "https://files.pythonhosted.org/packages/69/98/16e583f59f86cd59949f59d52bfa8fc286f86341a229a9d15cbe7a694f0c/torchvision-0.24.1-cp312-cp312-win_amd64.whl", hash = "sha256:4aa6cb806eb8541e92c9b313e96192c6b826e9eb0042720e2fa250d021079952", size = 4302006, upload-time = "2025-11-12T15:25:16.184Z" }, - { url = "https://files.pythonhosted.org/packages/e4/97/ab40550f482577f2788304c27220e8ba02c63313bd74cf2f8920526aac20/torchvision-0.24.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:8a6696db7fb71eadb2c6a48602106e136c785642e598eb1533e0b27744f2cce6", size = 1891435, upload-time = "2025-11-12T15:25:28.642Z" }, - { url = "https://files.pythonhosted.org/packages/30/65/ac0a3f9be6abdbe4e1d82c915d7e20de97e7fd0e9a277970508b015309f3/torchvision-0.24.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:db2125c46f9cb25dc740be831ce3ce99303cfe60439249a41b04fd9f373be671", size = 2338718, upload-time = "2025-11-12T15:25:26.19Z" }, - { url = "https://files.pythonhosted.org/packages/10/b5/5bba24ff9d325181508501ed7f0c3de8ed3dd2edca0784d48b144b6c5252/torchvision-0.24.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:f035f0cacd1f44a8ff6cb7ca3627d84c54d685055961d73a1a9fb9827a5414c8", size = 8049661, upload-time = "2025-11-12T15:25:22.558Z" }, - { url = "https://files.pythonhosted.org/packages/5c/ec/54a96ae9ab6a0dd66d4bba27771f892e36478a9c3489fa56e51c70abcc4d/torchvision-0.24.1-cp313-cp313-win_amd64.whl", hash = "sha256:16274823b93048e0a29d83415166a2e9e0bf4e1b432668357b657612a4802864", size = 4319808, upload-time = "2025-11-12T15:25:17.318Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f3/a90a389a7e547f3eb8821b13f96ea7c0563cdefbbbb60a10e08dda9720ff/torchvision-0.24.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e3f96208b4bef54cd60e415545f5200346a65024e04f29a26cd0006dbf9e8e66", size = 2005342, upload-time = "2025-11-12T15:25:11.871Z" }, - { url = "https://files.pythonhosted.org/packages/a9/fe/ff27d2ed1b524078164bea1062f23d2618a5fc3208e247d6153c18c91a76/torchvision-0.24.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:f231f6a4f2aa6522713326d0d2563538fa72d613741ae364f9913027fa52ea35", size = 2341708, upload-time = "2025-11-12T15:25:25.08Z" }, - { url = "https://files.pythonhosted.org/packages/b1/b9/d6c903495cbdfd2533b3ef6f7b5643ff589ea062f8feb5c206ee79b9d9e5/torchvision-0.24.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:1540a9e7f8cf55fe17554482f5a125a7e426347b71de07327d5de6bfd8d17caa", size = 8177239, upload-time = "2025-11-12T15:25:18.554Z" }, - { url = "https://files.pythonhosted.org/packages/4f/2b/ba02e4261369c3798310483028495cf507e6cb3f394f42e4796981ecf3a7/torchvision-0.24.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d83e16d70ea85d2f196d678bfb702c36be7a655b003abed84e465988b6128938", size = 4251604, upload-time = "2025-11-12T15:25:34.069Z" }, - { url = "https://files.pythonhosted.org/packages/42/84/577b2cef8f32094add5f52887867da4c2a3e6b4261538447e9b48eb25812/torchvision-0.24.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cccf4b4fec7fdfcd3431b9ea75d1588c0a8596d0333245dafebee0462abe3388", size = 2005319, upload-time = "2025-11-12T15:25:23.827Z" }, - { url = "https://files.pythonhosted.org/packages/5f/34/ecb786bffe0159a3b49941a61caaae089853132f3cd1e8f555e3621f7e6f/torchvision-0.24.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:1b495edd3a8f9911292424117544f0b4ab780452e998649425d1f4b2bed6695f", size = 2338844, upload-time = "2025-11-12T15:25:32.625Z" }, - { url = "https://files.pythonhosted.org/packages/51/99/a84623786a6969504c87f2dc3892200f586ee13503f519d282faab0bb4f0/torchvision-0.24.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:ab211e1807dc3e53acf8f6638df9a7444c80c0ad050466e8d652b3e83776987b", size = 8175144, upload-time = "2025-11-12T15:25:31.355Z" }, - { url = "https://files.pythonhosted.org/packages/6d/ba/8fae3525b233e109317ce6a9c1de922ab2881737b029a7e88021f81e068f/torchvision-0.24.1-cp314-cp314-win_amd64.whl", hash = "sha256:18f9cb60e64b37b551cd605a3d62c15730c086362b40682d23e24b616a697d41", size = 4234459, upload-time = "2025-11-12T15:25:19.859Z" }, - { url = "https://files.pythonhosted.org/packages/50/33/481602c1c72d0485d4b3a6b48c9534b71c2957c9d83bf860eb837bf5a620/torchvision-0.24.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ec9d7379c519428395e4ffda4dbb99ec56be64b0a75b95989e00f9ec7ae0b2d7", size = 2005336, upload-time = "2025-11-12T15:25:27.225Z" }, - { url = "https://files.pythonhosted.org/packages/d0/7f/372de60bf3dd8f5593bd0d03f4aecf0d1fd58f5bc6943618d9d913f5e6d5/torchvision-0.24.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:af9201184c2712d808bd4eb656899011afdfce1e83721c7cb08000034df353fe", size = 2341704, upload-time = "2025-11-12T15:25:29.857Z" }, - { url = "https://files.pythonhosted.org/packages/36/9b/0f3b9ff3d0225ee2324ec663de0e7fb3eb855615ca958ac1875f22f1f8e5/torchvision-0.24.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:9ef95d819fd6df81bc7cc97b8f21a15d2c0d3ac5dbfaab5cbc2d2ce57114b19e", size = 8177422, upload-time = "2025-11-12T15:25:37.357Z" }, - { url = "https://files.pythonhosted.org/packages/d6/ab/e2bcc7c2f13d882a58f8b30ff86f794210b075736587ea50f8c545834f8a/torchvision-0.24.1-cp314-cp314t-win_amd64.whl", hash = "sha256:480b271d6edff83ac2e8d69bbb4cf2073f93366516a50d48f140ccfceedb002e", size = 4335190, upload-time = "2025-11-12T15:25:35.745Z" }, + { url = "https://files.pythonhosted.org/packages/50/ae/cbf727421eb73f1cf907fbe5788326a08f111b3f6b6ddca15426b53fec9a/torchvision-0.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a95c47abb817d4e90ea1a8e57bd0d728e3e6b533b3495ae77d84d883c4d11f56", size = 1874919, upload-time = "2026-01-21T16:27:47.617Z" }, + { url = "https://files.pythonhosted.org/packages/64/68/dc7a224f606d53ea09f9a85196a3921ec3a801b0b1d17e84c73392f0c029/torchvision-0.25.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:acc339aba4a858192998c2b91f635827e40d9c469d9cf1455bafdda6e4c28ea4", size = 2343220, upload-time = "2026-01-21T16:27:44.26Z" }, + { url = "https://files.pythonhosted.org/packages/f9/fa/8cce5ca7ffd4da95193232493703d20aa06303f37b119fd23a65df4f239a/torchvision-0.25.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0d9a3f925a081dd2ebb0b791249b687c2ef2c2717d027946654607494b9b64b6", size = 8068106, upload-time = "2026-01-21T16:27:37.805Z" }, + { url = "https://files.pythonhosted.org/packages/8b/b9/a53bcf8f78f2cd89215e9ded70041765d50ef13bf301f9884ec6041a9421/torchvision-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:b57430fbe9e9b697418a395041bb615124d9c007710a2712fda6e35fb310f264", size = 3697295, upload-time = "2026-01-21T16:27:36.574Z" }, + { url = "https://files.pythonhosted.org/packages/3e/be/c704bceaf11c4f6b19d64337a34a877fcdfe3bd68160a8c9ae9bea4a35a3/torchvision-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:db74a551946b75d19f9996c419a799ffdf6a223ecf17c656f90da011f1d75b20", size = 1874923, upload-time = "2026-01-21T16:27:46.574Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e9/f143cd71232430de1f547ceab840f68c55e127d72558b1061a71d0b193cd/torchvision-0.25.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f49964f96644dbac2506dffe1a0a7ec0f2bf8cf7a588c3319fed26e6329ffdf3", size = 2344808, upload-time = "2026-01-21T16:27:43.191Z" }, + { url = "https://files.pythonhosted.org/packages/43/ae/ad5d6165797de234c9658752acb4fce65b78a6a18d82efdf8367c940d8da/torchvision-0.25.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:153c0d2cbc34b7cf2da19d73450f24ba36d2b75ec9211b9962b5022fb9e4ecee", size = 8070752, upload-time = "2026-01-21T16:27:33.748Z" }, + { url = "https://files.pythonhosted.org/packages/23/19/55b28aecdc7f38df57b8eb55eb0b14a62b470ed8efeb22cdc74224df1d6a/torchvision-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:ea580ffd6094cc01914ad32f8c8118174f18974629af905cea08cb6d5d48c7b7", size = 4038722, upload-time = "2026-01-21T16:27:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/56/3a/6ea0d73f49a9bef38a1b3a92e8dd455cea58470985d25635beab93841748/torchvision-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2abe430c90b1d5e552680037d68da4eb80a5852ebb1c811b2b89d299b10573b", size = 1874920, upload-time = "2026-01-21T16:27:45.348Z" }, + { url = "https://files.pythonhosted.org/packages/51/f8/c0e1ef27c66e15406fece94930e7d6feee4cb6374bbc02d945a630d6426e/torchvision-0.25.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b75deafa2dfea3e2c2a525559b04783515e3463f6e830cb71de0fb7ea36fe233", size = 2344556, upload-time = "2026-01-21T16:27:40.125Z" }, + { url = "https://files.pythonhosted.org/packages/68/2f/f24b039169db474e8688f649377de082a965fbf85daf4e46c44412f1d15a/torchvision-0.25.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f25aa9e380865b11ea6e9d99d84df86b9cc959f1a007cd966fc6f1ab2ed0e248", size = 8072351, upload-time = "2026-01-21T16:27:21.074Z" }, + { url = "https://files.pythonhosted.org/packages/ad/16/8f650c2e288977cf0f8f85184b90ee56ed170a4919347fc74ee99286ed6f/torchvision-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:f9c55ae8d673ab493325d1267cbd285bb94d56f99626c00ac4644de32a59ede3", size = 4303059, upload-time = "2026-01-21T16:27:11.08Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5b/1562a04a6a5a4cf8cf40016a0cdeda91ede75d6962cff7f809a85ae966a5/torchvision-0.25.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:24e11199e4d84ba9c5ee7825ebdf1cd37ce8deec225117f10243cae984ced3ec", size = 1874918, upload-time = "2026-01-21T16:27:39.02Z" }, + { url = "https://files.pythonhosted.org/packages/36/b1/3d6c42f62c272ce34fcce609bb8939bdf873dab5f1b798fd4e880255f129/torchvision-0.25.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5f271136d2d2c0b7a24c5671795c6e4fd8da4e0ea98aeb1041f62bc04c4370ef", size = 2309106, upload-time = "2026-01-21T16:27:30.624Z" }, + { url = "https://files.pythonhosted.org/packages/c7/60/59bb9c8b67cce356daeed4cb96a717caa4f69c9822f72e223a0eae7a9bd9/torchvision-0.25.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:855c0dc6d37f462482da7531c6788518baedca1e0847f3df42a911713acdfe52", size = 8071522, upload-time = "2026-01-21T16:27:29.392Z" }, + { url = "https://files.pythonhosted.org/packages/32/a5/9a9b1de0720f884ea50dbf9acb22cbe5312e51d7b8c4ac6ba9b51efd9bba/torchvision-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:cef0196be31be421f6f462d1e9da1101be7332d91984caa6f8022e6c78a5877f", size = 4321911, upload-time = "2026-01-21T16:27:35.195Z" }, + { url = "https://files.pythonhosted.org/packages/52/99/dca81ed21ebaeff2b67cc9f815a20fdaa418b69f5f9ea4c6ed71721470db/torchvision-0.25.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a8f8061284395ce31bcd460f2169013382ccf411148ceb2ee38e718e9860f5a7", size = 1896209, upload-time = "2026-01-21T16:27:32.159Z" }, + { url = "https://files.pythonhosted.org/packages/28/cc/2103149761fdb4eaed58a53e8437b2d716d48f05174fab1d9fcf1e2a2244/torchvision-0.25.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:146d02c9876858420adf41f3189fe90e3d6a409cbfa65454c09f25fb33bf7266", size = 2310735, upload-time = "2026-01-21T16:27:22.327Z" }, + { url = "https://files.pythonhosted.org/packages/76/ad/f4c985ad52ddd3b22711c588501be1b330adaeaf6850317f66751711b78c/torchvision-0.25.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c4d395cb2c4a2712f6eb93a34476cdf7aae74bb6ea2ea1917f858e96344b00aa", size = 8089557, upload-time = "2026-01-21T16:27:27.666Z" }, + { url = "https://files.pythonhosted.org/packages/63/cc/0ea68b5802e5e3c31f44b307e74947bad5a38cc655231d845534ed50ddb8/torchvision-0.25.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5e6b449e9fa7d642142c0e27c41e5a43b508d57ed8e79b7c0a0c28652da8678c", size = 4344260, upload-time = "2026-01-21T16:27:17.018Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1f/fa839532660e2602b7e704d65010787c5bb296258b44fa8b9c1cd6175e7d/torchvision-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:620a236288d594dcec7634c754484542dc0a5c1b0e0b83a34bda5e91e9b7c3a1", size = 1896193, upload-time = "2026-01-21T16:27:24.785Z" }, + { url = "https://files.pythonhosted.org/packages/80/ed/d51889da7ceaf5ff7a0574fb28f9b6b223df19667265395891f81b364ab3/torchvision-0.25.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:0b5e7f50002a8145a98c5694a018e738c50e2972608310c7e88e1bd4c058f6ce", size = 2309331, upload-time = "2026-01-21T16:27:19.97Z" }, + { url = "https://files.pythonhosted.org/packages/90/a5/f93fcffaddd8f12f9e812256830ec9c9ca65abbf1bc369379f9c364d1ff4/torchvision-0.25.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:632db02300e83793812eee4f61ae6a2686dab10b4cfd628b620dc47747aa9d03", size = 8088713, upload-time = "2026-01-21T16:27:15.281Z" }, + { url = "https://files.pythonhosted.org/packages/1f/eb/d0096eed5690d962853213f2ee00d91478dfcb586b62dbbb449fb8abc3a6/torchvision-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:d1abd5ed030c708f5dbf4812ad5f6fbe9384b63c40d6bd79f8df41a4a759a917", size = 4325058, upload-time = "2026-01-21T16:27:26.165Z" }, + { url = "https://files.pythonhosted.org/packages/97/36/96374a4c7ab50dea9787ce987815614ccfe988a42e10ac1a2e3e5b60319a/torchvision-0.25.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad9a8a5877782944d99186e4502a614770fe906626d76e9cd32446a0ac3075f2", size = 1896207, upload-time = "2026-01-21T16:27:23.383Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e2/7abb10a867db79b226b41da419b63b69c0bd5b82438c4a4ed50e084c552f/torchvision-0.25.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:40a122c3cf4d14b651f095e0f672b688dde78632783fc5cd3d4d5e4f6a828563", size = 2310741, upload-time = "2026-01-21T16:27:18.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/e6/0927784e6ffc340b6676befde1c60260bd51641c9c574b9298d791a9cda4/torchvision-0.25.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:846890161b825b38aa85fc37fb3ba5eea74e7091ff28bab378287111483b6443", size = 8089772, upload-time = "2026-01-21T16:27:14.048Z" }, + { url = "https://files.pythonhosted.org/packages/b6/37/e7ca4ec820d434c0f23f824eb29f0676a0c3e7a118f1514f5b949c3356da/torchvision-0.25.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f07f01d27375ad89d72aa2b3f2180f07da95dd9d2e4c758e015c0acb2da72977", size = 4425879, upload-time = "2026-01-21T16:27:12.579Z" }, ] [[package]] @@ -8749,14 +9801,14 @@ wheels = [ [[package]] name = "tqdm" -version = "4.67.1" +version = "4.67.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, ] [[package]] @@ -8811,29 +9863,29 @@ wheels = [ [[package]] name = "trimesh" -version = "4.11.0" +version = "4.11.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/47/f618103c076b06ac79f1ba71e26ca7df0292c3e2b83535d4873e7f127f95/trimesh-4.11.0.tar.gz", hash = "sha256:0b4acdcf28f21013385ccf81619a9dce703af348f69b198180a2212b1bc67821", size = 834847, upload-time = "2026-01-07T19:51:53.043Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/41/de14e2fa9b2d99214c60402fc57d2efb201f2925b16d6bee289565901d83/trimesh-4.11.2.tar.gz", hash = "sha256:30fbde5b8dd7c157e7ff4d54286cb35291844fd3f4d0364e8b2727f1b308fb06", size = 835044, upload-time = "2026-02-10T16:00:27.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/23/cc/5056718bf473be51712ecf74800093a13d69c576f1ca0e0f3cf4684567cf/trimesh-4.11.0-py3-none-any.whl", hash = "sha256:6237019fed3bdc8d68acc45a47a4ea62db1483c3fab5c87d587a48a0e69bbff3", size = 740342, upload-time = "2026-01-07T19:51:50.993Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b9/da09903ea53b677a58ba770112de6fe8b2acb8b4cd9bffae4ff6cfe7c072/trimesh-4.11.2-py3-none-any.whl", hash = "sha256:25e3ab2620f9eca5c9376168c67aabdd32205dad1c4eea09cd45cd4a3edf775a", size = 740328, upload-time = "2026-02-10T16:00:25.246Z" }, ] [[package]] name = "triton" -version = "3.5.1" +version = "3.6.0" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/6e/676ab5019b4dde8b9b7bab71245102fc02778ef3df48218b298686b9ffd6/triton-3.5.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5fc53d849f879911ea13f4a877243afc513187bc7ee92d1f2c0f1ba3169e3c94", size = 170320692, upload-time = "2025-11-11T17:40:46.074Z" }, - { url = "https://files.pythonhosted.org/packages/b0/72/ec90c3519eaf168f22cb1757ad412f3a2add4782ad3a92861c9ad135d886/triton-3.5.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61413522a48add32302353fdbaaf92daaaab06f6b5e3229940d21b5207f47579", size = 170425802, upload-time = "2025-11-11T17:40:53.209Z" }, - { url = "https://files.pythonhosted.org/packages/f2/50/9a8358d3ef58162c0a415d173cfb45b67de60176e1024f71fbc4d24c0b6d/triton-3.5.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2c6b915a03888ab931a9fd3e55ba36785e1fe70cbea0b40c6ef93b20fc85232", size = 170470207, upload-time = "2025-11-11T17:41:00.253Z" }, - { url = "https://files.pythonhosted.org/packages/27/46/8c3bbb5b0a19313f50edcaa363b599e5a1a5ac9683ead82b9b80fe497c8d/triton-3.5.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3f4346b6ebbd4fad18773f5ba839114f4826037c9f2f34e0148894cd5dd3dba", size = 170470410, upload-time = "2025-11-11T17:41:06.319Z" }, - { url = "https://files.pythonhosted.org/packages/37/92/e97fcc6b2c27cdb87ce5ee063d77f8f26f19f06916aa680464c8104ef0f6/triton-3.5.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b4d2c70127fca6a23e247f9348b8adde979d2e7a20391bfbabaac6aebc7e6a8", size = 170579924, upload-time = "2025-11-11T17:41:12.455Z" }, - { url = "https://files.pythonhosted.org/packages/a4/e6/c595c35e5c50c4bc56a7bac96493dad321e9e29b953b526bbbe20f9911d0/triton-3.5.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0637b1efb1db599a8e9dc960d53ab6e4637db7d4ab6630a0974705d77b14b60", size = 170480488, upload-time = "2025-11-11T17:41:18.222Z" }, - { url = "https://files.pythonhosted.org/packages/16/b5/b0d3d8b901b6a04ca38df5e24c27e53afb15b93624d7fd7d658c7cd9352a/triton-3.5.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bac7f7d959ad0f48c0e97d6643a1cc0fd5786fe61cb1f83b537c6b2d54776478", size = 170582192, upload-time = "2025-11-11T17:41:23.963Z" }, + { url = "https://files.pythonhosted.org/packages/8c/f7/f1c9d3424ab199ac53c2da567b859bcddbb9c9e7154805119f8bd95ec36f/triton-3.6.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6550fae429e0667e397e5de64b332d1e5695b73650ee75a6146e2e902770bea", size = 188105201, upload-time = "2026-01-20T16:00:29.272Z" }, + { url = "https://files.pythonhosted.org/packages/e0/12/b05ba554d2c623bffa59922b94b0775673de251f468a9609bc9e45de95e9/triton-3.6.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8e323d608e3a9bfcc2d9efcc90ceefb764a82b99dea12a86d643c72539ad5d3", size = 188214640, upload-time = "2026-01-20T16:00:35.869Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a8/cdf8b3e4c98132f965f88c2313a4b493266832ad47fb52f23d14d4f86bb5/triton-3.6.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74caf5e34b66d9f3a429af689c1c7128daba1d8208df60e81106b115c00d6fca", size = 188266850, upload-time = "2026-01-20T16:00:43.041Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0b/37d991d8c130ce81a8728ae3c25b6e60935838e9be1b58791f5997b24a54/triton-3.6.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c7f76c6e72d2ef08df639e3d0d30729112f47a56b0c81672edc05ee5116ac9", size = 188289450, upload-time = "2026-01-20T16:00:49.136Z" }, + { url = "https://files.pythonhosted.org/packages/35/f8/9c66bfc55361ec6d0e4040a0337fb5924ceb23de4648b8a81ae9d33b2b38/triton-3.6.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d002e07d7180fd65e622134fbd980c9a3d4211fb85224b56a0a0efbd422ab72f", size = 188400296, upload-time = "2026-01-20T16:00:56.042Z" }, + { url = "https://files.pythonhosted.org/packages/df/3d/9e7eee57b37c80cec63322c0231bb6da3cfe535a91d7a4d64896fcb89357/triton-3.6.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a17a5d5985f0ac494ed8a8e54568f092f7057ef60e1b0fa09d3fd1512064e803", size = 188273063, upload-time = "2026-01-20T16:01:07.278Z" }, + { url = "https://files.pythonhosted.org/packages/f6/56/6113c23ff46c00aae423333eb58b3e60bdfe9179d542781955a5e1514cb3/triton-3.6.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:46bd1c1af4b6704e554cad2eeb3b0a6513a980d470ccfa63189737340c7746a7", size = 188397994, upload-time = "2026-01-20T16:01:14.236Z" }, ] [[package]] @@ -8850,17 +9902,17 @@ wheels = [ [[package]] name = "typer" -version = "0.21.1" +version = "0.23.1" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "annotated-doc" }, { name = "click" }, { name = "rich" }, { name = "shellingham" }, - { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/bf/8825b5929afd84d0dabd606c67cd57b8388cb3ec385f7ef19c5cc2202069/typer-0.21.1.tar.gz", hash = "sha256:ea835607cd752343b6b2b7ce676893e5a0324082268b48f27aa058bdb7d2145d", size = 110371, upload-time = "2026-01-06T11:21:10.989Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/07/b822e1b307d40e263e8253d2384cf98c51aa2368cc7ba9a07e523a1d964b/typer-0.23.1.tar.gz", hash = "sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134", size = 120047, upload-time = "2026-02-13T10:04:30.984Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl", hash = "sha256:7985e89081c636b88d172c2ee0cfe33c253160994d47bdfdc302defd7d1f1d01", size = 47381, upload-time = "2026-01-06T11:21:09.824Z" }, + { url = "https://files.pythonhosted.org/packages/d5/91/9b286ab899c008c2cb05e8be99814807e7fbbd33f0c0c960470826e5ac82/typer-0.23.1-py3-none-any.whl", hash = "sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e", size = 56813, upload-time = "2026-02-13T10:04:32.008Z" }, ] [[package]] @@ -8905,36 +9957,36 @@ wheels = [ [[package]] name = "types-jmespath" -version = "1.0.2.20250809" +version = "1.1.0.20260124" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d5/ff/6848b1603ca47fff317b44dfff78cc1fb0828262f840b3ab951b619d5a22/types_jmespath-1.0.2.20250809.tar.gz", hash = "sha256:e194efec21c0aeae789f701ae25f17c57c25908e789b1123a5c6f8d915b4adff", size = 10248, upload-time = "2025-08-09T03:14:57.996Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2b/ca/c8d7fc6e450c2f8fc6f510cb194754c43b17f933f2dcabcfc6985cbb97a8/types_jmespath-1.1.0.20260124.tar.gz", hash = "sha256:29d86868e72c0820914577077b27d167dcab08b1fc92157a29d537ff7153fdfe", size = 10709, upload-time = "2026-01-24T03:18:46.557Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/6a/65c8be6b6555beaf1a654ae1c2308c2e19a610c0b318a9730e691b79ac79/types_jmespath-1.0.2.20250809-py3-none-any.whl", hash = "sha256:4147d17cc33454f0dac7e78b4e18e532a1330c518d85f7f6d19e5818ab83da21", size = 11494, upload-time = "2025-08-09T03:14:57.292Z" }, + { url = "https://files.pythonhosted.org/packages/61/91/915c4a6e6e9bd2bca3ec0c21c1771b175c59e204b85e57f3f572370fe753/types_jmespath-1.1.0.20260124-py3-none-any.whl", hash = "sha256:ec387666d446b15624215aa9cbd2867ffd885b6c74246d357c65e830c7a138b3", size = 11509, upload-time = "2026-01-24T03:18:45.536Z" }, ] [[package]] name = "types-jsonschema" -version = "4.25.1.20251009" +version = "4.26.0.20260202" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ef/da/5b901088da5f710690b422137e8ae74197fb1ca471e4aa84dd3ef0d6e295/types_jsonschema-4.25.1.20251009.tar.gz", hash = "sha256:75d0f5c5dd18dc23b664437a0c1a625743e8d2e665ceaf3aecb29841f3a5f97f", size = 15661, upload-time = "2025-10-09T02:54:36.963Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/07/68f63e715eb327ed2f5292e29e8be99785db0f72c7664d2c63bd4dbdc29d/types_jsonschema-4.26.0.20260202.tar.gz", hash = "sha256:29831baa4308865a9aec547a61797a06fc152b0dac8dddd531e002f32265cb07", size = 16168, upload-time = "2026-02-02T04:11:22.585Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/6a/e5146754c0dfc272f176db9c245bc43cc19030262d891a5a85d472797e60/types_jsonschema-4.25.1.20251009-py3-none-any.whl", hash = "sha256:f30b329037b78e7a60146b1146feb0b6fb0b71628637584409bada83968dad3e", size = 15925, upload-time = "2025-10-09T02:54:35.847Z" }, + { url = "https://files.pythonhosted.org/packages/c1/06/962d4f364f779d7389cd31a1bb581907b057f52f0ace2c119a8dd8409db6/types_jsonschema-4.26.0.20260202-py3-none-any.whl", hash = "sha256:41c95343abc4de9264e333a55e95dfb4d401e463856d0164eec9cb182e8746da", size = 15914, upload-time = "2026-02-02T04:11:21.61Z" }, ] [[package]] name = "types-networkx" -version = "3.6.1.20251220" +version = "3.6.1.20260210" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/07/e3/dcc20d645dc0631b0df263959b8dde49dc47ad3c0537d8958bfefe692380/types_networkx-3.6.1.20251220.tar.gz", hash = "sha256:caf95e0d7777b969e50ceeb2c430d9d4dfe6b7bdee43c42dc9879a2d4408a790", size = 73500, upload-time = "2025-12-20T03:07:47.933Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/d9/7ddf6afb27246998ae41f7ad19da410d83e24623b4db065b5a46888d327e/types_networkx-3.6.1.20260210.tar.gz", hash = "sha256:9864affb01ed53d6bf41c1042fbced155ac409ae02ca505e0a3fffe48901b6e1", size = 73702, upload-time = "2026-02-10T04:22:17.641Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/65/e7/fe40cfe7ba384d1f46fee835eb7727a4ee2fd80021a69add9553197b69a1/types_networkx-3.6.1.20251220-py3-none-any.whl", hash = "sha256:417ccbe7841f335a4c2b8e7515c3bc97a00fb5f686f399a763ef64392b209eac", size = 162715, upload-time = "2025-12-20T03:07:46.882Z" }, + { url = "https://files.pythonhosted.org/packages/55/b0/1c45681a8b8d3ccf25cebaa296b06d5240518bd7a7d861cf14a15bf9dd20/types_networkx-3.6.1.20260210-py3-none-any.whl", hash = "sha256:075ccb9f2e2b370c3a9eae9636f2f38890e7c494e6323cb72a0207f104f8225e", size = 162684, upload-time = "2026-02-10T04:22:16.055Z" }, ] [[package]] @@ -8948,11 +10000,20 @@ wheels = [ [[package]] name = "types-psutil" -version = "7.2.1.20251231" +version = "7.2.2.20260130" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/e0/f4881668da3fcc9473b3fb4b3dc028840cf57374d72b798c0912a183163a/types_psutil-7.2.1.20251231.tar.gz", hash = "sha256:dbf9df530b1130e131e4211ed8cea62c08007bfa69faf2883d296bd241d30e4a", size = 25620, upload-time = "2025-12-31T03:18:29.302Z" } +sdist = { url = "https://files.pythonhosted.org/packages/69/14/fc5fb0a6ddfadf68c27e254a02ececd4d5c7fdb0efcb7e7e917a183497fb/types_psutil-7.2.2.20260130.tar.gz", hash = "sha256:15b0ab69c52841cf9ce3c383e8480c620a4d13d6a8e22b16978ebddac5590950", size = 26535, upload-time = "2026-01-30T03:58:14.116Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/61/81f180ffbcd0b3516fa3e0e95588dcd48200b6a08e3df53c6c0941a688fe/types_psutil-7.2.1.20251231-py3-none-any.whl", hash = "sha256:40735ca2fc818aed9dcbff7acb3317a774896615e3f4a7bd356afa224b9178e3", size = 32426, upload-time = "2025-12-31T03:18:28.14Z" }, + { url = "https://files.pythonhosted.org/packages/17/d7/60974b7e31545d3768d1770c5fe6e093182c3bfd819429b33133ba6b3e89/types_psutil-7.2.2.20260130-py3-none-any.whl", hash = "sha256:15523a3caa7b3ff03ac7f9b78a6470a59f88f48df1d74a39e70e06d2a99107da", size = 32876, upload-time = "2026-01-30T03:58:13.172Z" }, +] + +[[package]] +name = "types-psycopg2" +version = "2.9.21.20251012" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9b/b3/2d09eaf35a084cffd329c584970a3fa07101ca465c13cad1576d7c392587/types_psycopg2-2.9.21.20251012.tar.gz", hash = "sha256:4cdafd38927da0cfde49804f39ab85afd9c6e9c492800e42f1f0c1a1b0312935", size = 26710, upload-time = "2025-10-12T02:55:39.5Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/0c/05feaf8cb51159f2c0af04b871dab7e98a2f83a3622f5f216331d2dd924c/types_psycopg2-2.9.21.20251012-py3-none-any.whl", hash = "sha256:712bad5c423fe979e357edbf40a07ca40ef775d74043de72bd4544ca328cc57e", size = 24883, upload-time = "2025-10-12T02:55:38.439Z" }, ] [[package]] @@ -9014,7 +10075,7 @@ wheels = [ [[package]] name = "types-tensorflow" -version = "2.18.0.20251008" +version = "2.18.0.20260121" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, @@ -9022,21 +10083,21 @@ dependencies = [ { name = "types-protobuf" }, { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0d/0a/13bde03fb5a23faaadcca2d6914f865e444334133902310ea05e6ade780c/types_tensorflow-2.18.0.20251008.tar.gz", hash = "sha256:8db03d4dd391a362e2ea796ffdbccb03c082127606d4d852edb7ed9504745933", size = 257550, upload-time = "2025-10-08T02:51:51.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/81/43d17caea48c3454bf64c23cba5f7876fc0cd0f0434f350f61782cc95587/types_tensorflow-2.18.0.20260121.tar.gz", hash = "sha256:7fe9f75fd00be0f53ca97ba3d3b4cf8ab45447f6d3a959ad164cf9ac421a5f89", size = 258281, upload-time = "2026-01-21T03:24:22.488Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/cc/e50e49db621b0cf03c1f3d10be47389de41a02dc9924c3a83a9c1a55bf28/types_tensorflow-2.18.0.20251008-py3-none-any.whl", hash = "sha256:d6b0dd4d81ac6d9c5af803ebcc8ce0f65c5850c063e8b9789dc828898944b5f4", size = 329023, upload-time = "2025-10-08T02:51:50.024Z" }, + { url = "https://files.pythonhosted.org/packages/87/84/6510e7c7b29c6005d93fd6762f7d7d4a413ffd8ec8e04ebc53ac2d8c5372/types_tensorflow-2.18.0.20260121-py3-none-any.whl", hash = "sha256:80d9a9528fa52dc215a914d6ba47f5500f54b421efd2923adf98cff1760b2cce", size = 329562, upload-time = "2026-01-21T03:24:21.147Z" }, ] [[package]] name = "types-tqdm" -version = "4.67.0.20250809" +version = "4.67.3.20260205" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d0/cf498fc630d9fdaf2428b93e60b0e67b08008fec22b78716b8323cf644dc/types_tqdm-4.67.0.20250809.tar.gz", hash = "sha256:02bf7ab91256080b9c4c63f9f11b519c27baaf52718e5fdab9e9606da168d500", size = 17200, upload-time = "2025-08-09T03:17:43.489Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/46/790b9872523a48163bdda87d47849b4466017640e5259d06eed539340afd/types_tqdm-4.67.3.20260205.tar.gz", hash = "sha256:f3023682d4aa3bbbf908c8c6bb35f35692d319460d9bbd3e646e8852f3dd9f85", size = 17597, upload-time = "2026-02-05T04:03:19.721Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/13/3ff0781445d7c12730befce0fddbbc7a76e56eb0e7029446f2853238360a/types_tqdm-4.67.0.20250809-py3-none-any.whl", hash = "sha256:1a73053b31fcabf3c1f3e2a9d5ecdba0f301bde47a418cd0e0bdf774827c5c57", size = 24020, upload-time = "2025-08-09T03:17:42.453Z" }, + { url = "https://files.pythonhosted.org/packages/cc/da/7f761868dbaa328392356fab30c18ab90d14cce86b269e7e63328f29d4a3/types_tqdm-4.67.3.20260205-py3-none-any.whl", hash = "sha256:85c31731e81dc3c5cecc34c6c8b2e5166fafa722468f58840c2b5ac6a8c5c173", size = 23894, upload-time = "2026-02-05T04:03:18.48Z" }, ] [[package]] @@ -9048,6 +10109,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] +[[package]] +name = "typing-inspect" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions", marker = "python_full_version >= '3.11'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, +] + [[package]] name = "typing-inspection" version = "0.4.2" @@ -9078,9 +10152,89 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/37/87/1f677586e8ac487e29672e4b17455758fce261de06a0d086167bb760361a/uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5", size = 6229, upload-time = "2024-02-09T16:52:00.371Z" }, ] +[[package]] +name = "ujson" +version = "5.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/d9/3f17e3c5773fb4941c68d9a37a47b1a79c9649d6c56aefbed87cc409d18a/ujson-5.11.0.tar.gz", hash = "sha256:e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0", size = 7156583, upload-time = "2025-08-20T11:57:02.452Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/0c/8bf7a4fabfd01c7eed92d9b290930ce6d14910dec708e73538baa38885d1/ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:446e8c11c06048611c9d29ef1237065de0af07cabdd97e6b5b527b957692ec25", size = 55248, upload-time = "2025-08-20T11:55:02.368Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2e/eeab0b8b641817031ede4f790db4c4942df44a12f44d72b3954f39c6a115/ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16ccb973b7ada0455201808ff11d48fe9c3f034a6ab5bd93b944443c88299f89", size = 53157, upload-time = "2025-08-20T11:55:04.012Z" }, + { url = "https://files.pythonhosted.org/packages/21/1b/a4e7a41870797633423ea79618526747353fd7be9191f3acfbdee0bf264b/ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3134b783ab314d2298d58cda7e47e7a0f7f71fc6ade6ac86d5dbeaf4b9770fa6", size = 57657, upload-time = "2025-08-20T11:55:05.169Z" }, + { url = "https://files.pythonhosted.org/packages/94/ae/4e0d91b8f6db7c9b76423b3649612189506d5a06ddd3b6334b6d37f77a01/ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:185f93ebccffebc8baf8302c869fac70dd5dd78694f3b875d03a31b03b062cdb", size = 59780, upload-time = "2025-08-20T11:55:06.325Z" }, + { url = "https://files.pythonhosted.org/packages/b3/cc/46b124c2697ca2da7c65c4931ed3cb670646978157aa57a7a60f741c530f/ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d06e87eded62ff0e5f5178c916337d2262fdbc03b31688142a3433eabb6511db", size = 57307, upload-time = "2025-08-20T11:55:07.493Z" }, + { url = "https://files.pythonhosted.org/packages/39/eb/20dd1282bc85dede2f1c62c45b4040bc4c389c80a05983515ab99771bca7/ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:181fb5b15703a8b9370b25345d2a1fd1359f0f18776b3643d24e13ed9c036d4c", size = 1036369, upload-time = "2025-08-20T11:55:09.192Z" }, + { url = "https://files.pythonhosted.org/packages/64/a2/80072439065d493e3a4b1fbeec991724419a1b4c232e2d1147d257cac193/ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4df61a6df0a4a8eb5b9b1ffd673429811f50b235539dac586bb7e9e91994138", size = 1195738, upload-time = "2025-08-20T11:55:11.402Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7e/d77f9e9c039d58299c350c978e086a804d1fceae4fd4a1cc6e8d0133f838/ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6eff24e1abd79e0ec6d7eae651dd675ddbc41f9e43e29ef81e16b421da896915", size = 1088718, upload-time = "2025-08-20T11:55:13.297Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f1/697559d45acc849cada6b3571d53522951b1a64027400507aabc6a710178/ujson-5.11.0-cp310-cp310-win32.whl", hash = "sha256:30f607c70091483550fbd669a0b37471e5165b317d6c16e75dba2aa967608723", size = 39653, upload-time = "2025-08-20T11:55:14.869Z" }, + { url = "https://files.pythonhosted.org/packages/86/a2/70b73a0f55abe0e6b8046d365d74230c20c5691373e6902a599b2dc79ba1/ujson-5.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:3d2720e9785f84312b8e2cb0c2b87f1a0b1c53aaab3b2af3ab817d54409012e0", size = 43720, upload-time = "2025-08-20T11:55:15.897Z" }, + { url = "https://files.pythonhosted.org/packages/1c/5f/b19104afa455630b43efcad3a24495b9c635d92aa8f2da4f30e375deb1a2/ujson-5.11.0-cp310-cp310-win_arm64.whl", hash = "sha256:85e6796631165f719084a9af00c79195d3ebf108151452fefdcb1c8bb50f0105", size = 38410, upload-time = "2025-08-20T11:55:17.556Z" }, + { url = "https://files.pythonhosted.org/packages/da/ea/80346b826349d60ca4d612a47cdf3533694e49b45e9d1c07071bb867a184/ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f", size = 55248, upload-time = "2025-08-20T11:55:19.033Z" }, + { url = "https://files.pythonhosted.org/packages/57/df/b53e747562c89515e18156513cc7c8ced2e5e3fd6c654acaa8752ffd7cd9/ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58", size = 53156, upload-time = "2025-08-20T11:55:20.174Z" }, + { url = "https://files.pythonhosted.org/packages/41/b8/ab67ec8c01b8a3721fd13e5cb9d85ab2a6066a3a5e9148d661a6870d6293/ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26", size = 57657, upload-time = "2025-08-20T11:55:21.296Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c7/fb84f27cd80a2c7e2d3c6012367aecade0da936790429801803fa8d4bffc/ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a", size = 59779, upload-time = "2025-08-20T11:55:22.772Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/48706f7c1e917ecb97ddcfb7b1d756040b86ed38290e28579d63bd3fcc48/ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6", size = 57284, upload-time = "2025-08-20T11:55:24.01Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ce/48877c6eb4afddfd6bd1db6be34456538c07ca2d6ed233d3f6c6efc2efe8/ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b", size = 1036395, upload-time = "2025-08-20T11:55:25.725Z" }, + { url = "https://files.pythonhosted.org/packages/8b/7a/2c20dc97ad70cd7c31ad0596ba8e2cf8794d77191ba4d1e0bded69865477/ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba", size = 1195731, upload-time = "2025-08-20T11:55:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/15/f5/ca454f2f6a2c840394b6f162fff2801450803f4ff56c7af8ce37640b8a2a/ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3", size = 1088710, upload-time = "2025-08-20T11:55:29.426Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d3/9ba310e07969bc9906eb7548731e33a0f448b122ad9705fed699c9b29345/ujson-5.11.0-cp311-cp311-win32.whl", hash = "sha256:e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34", size = 39648, upload-time = "2025-08-20T11:55:31.194Z" }, + { url = "https://files.pythonhosted.org/packages/57/f7/da05b4a8819f1360be9e71fb20182f0bb3ec611a36c3f213f4d20709e099/ujson-5.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01", size = 43717, upload-time = "2025-08-20T11:55:32.241Z" }, + { url = "https://files.pythonhosted.org/packages/9a/cc/f3f9ac0f24f00a623a48d97dc3814df5c2dc368cfb00031aa4141527a24b/ujson-5.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835", size = 38402, upload-time = "2025-08-20T11:55:33.641Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ef/a9cb1fce38f699123ff012161599fb9f2ff3f8d482b4b18c43a2dc35073f/ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702", size = 55434, upload-time = "2025-08-20T11:55:34.987Z" }, + { url = "https://files.pythonhosted.org/packages/b1/05/dba51a00eb30bd947791b173766cbed3492269c150a7771d2750000c965f/ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d", size = 53190, upload-time = "2025-08-20T11:55:36.384Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/fd11a224f73fbffa299fb9644e425f38b38b30231f7923a088dd513aabb4/ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80", size = 57600, upload-time = "2025-08-20T11:55:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/405103cae24899df688a3431c776e00528bd4799e7d68820e7ebcf824f92/ujson-5.11.0-cp312-cp312-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:fa79fdb47701942c2132a9dd2297a1a85941d966d8c87bfd9e29b0cf423f26cc", size = 59791, upload-time = "2025-08-20T11:55:38.877Z" }, + { url = "https://files.pythonhosted.org/packages/17/7b/2dcbc2bbfdbf68f2368fb21ab0f6735e872290bb604c75f6e06b81edcb3f/ujson-5.11.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8254e858437c00f17cb72e7a644fc42dad0ebb21ea981b71df6e84b1072aaa7c", size = 57356, upload-time = "2025-08-20T11:55:40.036Z" }, + { url = "https://files.pythonhosted.org/packages/d1/71/fea2ca18986a366c750767b694430d5ded6b20b6985fddca72f74af38a4c/ujson-5.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1aa8a2ab482f09f6c10fba37112af5f957689a79ea598399c85009f2f29898b5", size = 1036313, upload-time = "2025-08-20T11:55:41.408Z" }, + { url = "https://files.pythonhosted.org/packages/a3/bb/d4220bd7532eac6288d8115db51710fa2d7d271250797b0bfba9f1e755af/ujson-5.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a638425d3c6eed0318df663df44480f4a40dc87cc7c6da44d221418312f6413b", size = 1195782, upload-time = "2025-08-20T11:55:43.357Z" }, + { url = "https://files.pythonhosted.org/packages/80/47/226e540aa38878ce1194454385701d82df538ccb5ff8db2cf1641dde849a/ujson-5.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e3cff632c1d78023b15f7e3a81c3745cd3f94c044d1e8fa8efbd6b161997bbc", size = 1088817, upload-time = "2025-08-20T11:55:45.262Z" }, + { url = "https://files.pythonhosted.org/packages/7e/81/546042f0b23c9040d61d46ea5ca76f0cc5e0d399180ddfb2ae976ebff5b5/ujson-5.11.0-cp312-cp312-win32.whl", hash = "sha256:be6b0eaf92cae8cdee4d4c9e074bde43ef1c590ed5ba037ea26c9632fb479c88", size = 39757, upload-time = "2025-08-20T11:55:46.522Z" }, + { url = "https://files.pythonhosted.org/packages/44/1b/27c05dc8c9728f44875d74b5bfa948ce91f6c33349232619279f35c6e817/ujson-5.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:b7b136cc6abc7619124fd897ef75f8e63105298b5ca9bdf43ebd0e1fa0ee105f", size = 43859, upload-time = "2025-08-20T11:55:47.987Z" }, + { url = "https://files.pythonhosted.org/packages/22/2d/37b6557c97c3409c202c838aa9c960ca3896843b4295c4b7bb2bbd260664/ujson-5.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:6cd2df62f24c506a0ba322d5e4fe4466d47a9467b57e881ee15a31f7ecf68ff6", size = 38361, upload-time = "2025-08-20T11:55:49.122Z" }, + { url = "https://files.pythonhosted.org/packages/1c/ec/2de9dd371d52c377abc05d2b725645326c4562fc87296a8907c7bcdf2db7/ujson-5.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:109f59885041b14ee9569bf0bb3f98579c3fa0652317b355669939e5fc5ede53", size = 55435, upload-time = "2025-08-20T11:55:50.243Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a4/f611f816eac3a581d8a4372f6967c3ed41eddbae4008d1d77f223f1a4e0a/ujson-5.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a31c6b8004438e8c20fc55ac1c0e07dad42941db24176fe9acf2815971f8e752", size = 53193, upload-time = "2025-08-20T11:55:51.373Z" }, + { url = "https://files.pythonhosted.org/packages/e9/c5/c161940967184de96f5cbbbcce45b562a4bf851d60f4c677704b1770136d/ujson-5.11.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78c684fb21255b9b90320ba7e199780f653e03f6c2528663768965f4126a5b50", size = 57603, upload-time = "2025-08-20T11:55:52.583Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d6/c7b2444238f5b2e2d0e3dab300b9ddc3606e4b1f0e4bed5a48157cebc792/ujson-5.11.0-cp313-cp313-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:4c9f5d6a27d035dd90a146f7761c2272cf7103de5127c9ab9c4cd39ea61e878a", size = 59794, upload-time = "2025-08-20T11:55:53.69Z" }, + { url = "https://files.pythonhosted.org/packages/fe/a3/292551f936d3d02d9af148f53e1bc04306b00a7cf1fcbb86fa0d1c887242/ujson-5.11.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:837da4d27fed5fdc1b630bd18f519744b23a0b5ada1bbde1a36ba463f2900c03", size = 57363, upload-time = "2025-08-20T11:55:54.843Z" }, + { url = "https://files.pythonhosted.org/packages/90/a6/82cfa70448831b1a9e73f882225980b5c689bf539ec6400b31656a60ea46/ujson-5.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:787aff4a84da301b7f3bac09bc696e2e5670df829c6f8ecf39916b4e7e24e701", size = 1036311, upload-time = "2025-08-20T11:55:56.197Z" }, + { url = "https://files.pythonhosted.org/packages/84/5c/96e2266be50f21e9b27acaee8ca8f23ea0b85cb998c33d4f53147687839b/ujson-5.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6dd703c3e86dc6f7044c5ac0b3ae079ed96bf297974598116aa5fb7f655c3a60", size = 1195783, upload-time = "2025-08-20T11:55:58.081Z" }, + { url = "https://files.pythonhosted.org/packages/8d/20/78abe3d808cf3bb3e76f71fca46cd208317bf461c905d79f0d26b9df20f1/ujson-5.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3772e4fe6b0c1e025ba3c50841a0ca4786825a4894c8411bf8d3afe3a8061328", size = 1088822, upload-time = "2025-08-20T11:55:59.469Z" }, + { url = "https://files.pythonhosted.org/packages/d8/50/8856e24bec5e2fc7f775d867aeb7a3f137359356200ac44658f1f2c834b2/ujson-5.11.0-cp313-cp313-win32.whl", hash = "sha256:8fa2af7c1459204b7a42e98263b069bd535ea0cd978b4d6982f35af5a04a4241", size = 39753, upload-time = "2025-08-20T11:56:01.345Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d8/1baee0f4179a4d0f5ce086832147b6cc9b7731c24ca08e14a3fdb8d39c32/ujson-5.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:34032aeca4510a7c7102bd5933f59a37f63891f30a0706fb46487ab6f0edf8f0", size = 43866, upload-time = "2025-08-20T11:56:02.552Z" }, + { url = "https://files.pythonhosted.org/packages/a9/8c/6d85ef5be82c6d66adced3ec5ef23353ed710a11f70b0b6a836878396334/ujson-5.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:ce076f2df2e1aa62b685086fbad67f2b1d3048369664b4cdccc50707325401f9", size = 38363, upload-time = "2025-08-20T11:56:03.688Z" }, + { url = "https://files.pythonhosted.org/packages/28/08/4518146f4984d112764b1dfa6fb7bad691c44a401adadaa5e23ccd930053/ujson-5.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:65724738c73645db88f70ba1f2e6fb678f913281804d5da2fd02c8c5839af302", size = 55462, upload-time = "2025-08-20T11:56:04.873Z" }, + { url = "https://files.pythonhosted.org/packages/29/37/2107b9a62168867a692654d8766b81bd2fd1e1ba13e2ec90555861e02b0c/ujson-5.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29113c003ca33ab71b1b480bde952fbab2a0b6b03a4ee4c3d71687cdcbd1a29d", size = 53246, upload-time = "2025-08-20T11:56:06.054Z" }, + { url = "https://files.pythonhosted.org/packages/9b/f8/25583c70f83788edbe3ca62ce6c1b79eff465d78dec5eb2b2b56b3e98b33/ujson-5.11.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c44c703842024d796b4c78542a6fcd5c3cb948b9fc2a73ee65b9c86a22ee3638", size = 57631, upload-time = "2025-08-20T11:56:07.374Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ca/19b3a632933a09d696f10dc1b0dfa1d692e65ad507d12340116ce4f67967/ujson-5.11.0-cp314-cp314-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:e750c436fb90edf85585f5c62a35b35082502383840962c6983403d1bd96a02c", size = 59877, upload-time = "2025-08-20T11:56:08.534Z" }, + { url = "https://files.pythonhosted.org/packages/55/7a/4572af5324ad4b2bfdd2321e898a527050290147b4ea337a79a0e4e87ec7/ujson-5.11.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f278b31a7c52eb0947b2db55a5133fbc46b6f0ef49972cd1a80843b72e135aba", size = 57363, upload-time = "2025-08-20T11:56:09.758Z" }, + { url = "https://files.pythonhosted.org/packages/7b/71/a2b8c19cf4e1efe53cf439cdf7198ac60ae15471d2f1040b490c1f0f831f/ujson-5.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ab2cb8351d976e788669c8281465d44d4e94413718af497b4e7342d7b2f78018", size = 1036394, upload-time = "2025-08-20T11:56:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3e/7b98668cba3bb3735929c31b999b374ebc02c19dfa98dfebaeeb5c8597ca/ujson-5.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:090b4d11b380ae25453100b722d0609d5051ffe98f80ec52853ccf8249dfd840", size = 1195837, upload-time = "2025-08-20T11:56:12.6Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ea/8870f208c20b43571a5c409ebb2fe9b9dba5f494e9e60f9314ac01ea8f78/ujson-5.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:80017e870d882d5517d28995b62e4e518a894f932f1e242cbc802a2fd64d365c", size = 1088837, upload-time = "2025-08-20T11:56:14.15Z" }, + { url = "https://files.pythonhosted.org/packages/63/b6/c0e6607e37fa47929920a685a968c6b990a802dec65e9c5181e97845985d/ujson-5.11.0-cp314-cp314-win32.whl", hash = "sha256:1d663b96eb34c93392e9caae19c099ec4133ba21654b081956613327f0e973ac", size = 41022, upload-time = "2025-08-20T11:56:15.509Z" }, + { url = "https://files.pythonhosted.org/packages/4e/56/f4fe86b4c9000affd63e9219e59b222dc48b01c534533093e798bf617a7e/ujson-5.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:849e65b696f0d242833f1df4182096cedc50d414215d1371fca85c541fbff629", size = 45111, upload-time = "2025-08-20T11:56:16.597Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f3/669437f0280308db4783b12a6d88c00730b394327d8334cc7a32ef218e64/ujson-5.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:e73df8648c9470af2b6a6bf5250d4744ad2cf3d774dcf8c6e31f018bdd04d764", size = 39682, upload-time = "2025-08-20T11:56:17.763Z" }, + { url = "https://files.pythonhosted.org/packages/6e/cd/e9809b064a89fe5c4184649adeb13c1b98652db3f8518980b04227358574/ujson-5.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:de6e88f62796372fba1de973c11138f197d3e0e1d80bcb2b8aae1e826096d433", size = 55759, upload-time = "2025-08-20T11:56:18.882Z" }, + { url = "https://files.pythonhosted.org/packages/1b/be/ae26a6321179ebbb3a2e2685b9007c71bcda41ad7a77bbbe164005e956fc/ujson-5.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:49e56ef8066f11b80d620985ae36869a3ff7e4b74c3b6129182ec5d1df0255f3", size = 53634, upload-time = "2025-08-20T11:56:20.012Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e9/fb4a220ee6939db099f4cfeeae796ecb91e7584ad4d445d4ca7f994a9135/ujson-5.11.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a325fd2c3a056cf6c8e023f74a0c478dd282a93141356ae7f16d5309f5ff823", size = 58547, upload-time = "2025-08-20T11:56:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/bd/f8/fc4b952b8f5fea09ea3397a0bd0ad019e474b204cabcb947cead5d4d1ffc/ujson-5.11.0-cp314-cp314t-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:a0af6574fc1d9d53f4ff371f58c96673e6d988ed2b5bf666a6143c782fa007e9", size = 60489, upload-time = "2025-08-20T11:56:22.342Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e5/af5491dfda4f8b77e24cf3da68ee0d1552f99a13e5c622f4cef1380925c3/ujson-5.11.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10f29e71ecf4ecd93a6610bd8efa8e7b6467454a363c3d6416db65de883eb076", size = 58035, upload-time = "2025-08-20T11:56:23.92Z" }, + { url = "https://files.pythonhosted.org/packages/c4/09/0945349dd41f25cc8c38d78ace49f14c5052c5bbb7257d2f466fa7bdb533/ujson-5.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1a0a9b76a89827a592656fe12e000cf4f12da9692f51a841a4a07aa4c7ecc41c", size = 1037212, upload-time = "2025-08-20T11:56:25.274Z" }, + { url = "https://files.pythonhosted.org/packages/49/44/8e04496acb3d5a1cbee3a54828d9652f67a37523efa3d3b18a347339680a/ujson-5.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b16930f6a0753cdc7d637b33b4e8f10d5e351e1fb83872ba6375f1e87be39746", size = 1196500, upload-time = "2025-08-20T11:56:27.517Z" }, + { url = "https://files.pythonhosted.org/packages/64/ae/4bc825860d679a0f208a19af2f39206dfd804ace2403330fdc3170334a2f/ujson-5.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:04c41afc195fd477a59db3a84d5b83a871bd648ef371cf8c6f43072d89144eef", size = 1089487, upload-time = "2025-08-20T11:56:29.07Z" }, + { url = "https://files.pythonhosted.org/packages/30/ed/5a057199fb0a5deabe0957073a1c1c1c02a3e99476cd03daee98ea21fa57/ujson-5.11.0-cp314-cp314t-win32.whl", hash = "sha256:aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5", size = 41859, upload-time = "2025-08-20T11:56:30.495Z" }, + { url = "https://files.pythonhosted.org/packages/aa/03/b19c6176bdf1dc13ed84b886e99677a52764861b6cc023d5e7b6ebda249d/ujson-5.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec", size = 46183, upload-time = "2025-08-20T11:56:31.574Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ca/a0413a3874b2dc1708b8796ca895bf363292f9c70b2e8ca482b7dbc0259d/ujson-5.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab", size = 40264, upload-time = "2025-08-20T11:56:32.773Z" }, + { url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362", size = 51206, upload-time = "2025-08-20T11:56:48.797Z" }, + { url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39", size = 48907, upload-time = "2025-08-20T11:56:50.136Z" }, + { url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc", size = 50319, upload-time = "2025-08-20T11:56:51.63Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/209d90506b7d6c5873f82c5a226d7aad1a1da153364e9ebf61eff0740c33/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844", size = 56584, upload-time = "2025-08-20T11:56:52.89Z" }, + { url = "https://files.pythonhosted.org/packages/e9/97/bd939bb76943cb0e1d2b692d7e68629f51c711ef60425fa5bb6968037ecd/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49", size = 51588, upload-time = "2025-08-20T11:56:54.054Z" }, + { url = "https://files.pythonhosted.org/packages/52/5b/8c5e33228f7f83f05719964db59f3f9f276d272dc43752fa3bbf0df53e7b/ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04", size = 43835, upload-time = "2025-08-20T11:56:55.237Z" }, +] + [[package]] name = "ultralytics" -version = "8.3.250" +version = "8.4.14" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "matplotlib" }, @@ -9093,14 +10247,14 @@ dependencies = [ { name = "pyyaml" }, { name = "requests" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "torch" }, { name = "torchvision" }, { name = "ultralytics-thop" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/80/b59ee8aac9b46f0cd933a571d10925a7ce9def0d41dc9ddba335143bf520/ultralytics-8.3.250.tar.gz", hash = "sha256:af1a99afbfc23d8c888811bebe7930b0bdc2ae7a2f71ee9f412b77568bfc0297", size = 992533, upload-time = "2026-01-08T18:48:38.45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/dc/7947df41679c009bc33b61e10d6274a8ec885206b726ebb6027d5f204b35/ultralytics-8.4.14.tar.gz", hash = "sha256:360dff28ecb6cc7bf561aadf5bfe208c3900380bf1d4b2b190cb8db60e7b7626", size = 1014432, upload-time = "2026-02-10T11:31:51.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/7e/17af4263d4e139a0153bdf60b590ecad079df992545af35ea356a602548a/ultralytics-8.3.250-py3-none-any.whl", hash = "sha256:3578a10095b54b8707c622ad15588140eb5bcf4ff99629cff544faa04b39b7b3", size = 1156491, upload-time = "2026-01-08T18:48:35.335Z" }, + { url = "https://files.pythonhosted.org/packages/89/39/3b19ee32a174c285c6b2bdf5cec222155938e5f0cf3fef997df131f98189/ultralytics-8.4.14-py3-none-any.whl", hash = "sha256:0ce8f4081c1e7dd96a7a3ac82a820681443042609c4b48adca85a2289cdaef17", size = 1188742, upload-time = "2026-02-10T11:31:47.44Z" }, ] [[package]] @@ -9152,31 +10306,31 @@ wheels = [ [[package]] name = "uuid-utils" -version = "0.13.0" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8a/17b11768dcb473d3a255c02ffdd94fbd1b345c906efea0a39124dcbaed52/uuid_utils-0.13.0.tar.gz", hash = "sha256:4c17df6427a9e23a4cd7fb9ee1efb53b8abb078660b9bdb2524ca8595022dfe1", size = 21921, upload-time = "2026-01-08T15:48:10.841Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/b8/d40848ca22781f206c60a1885fc737d2640392bd6b5792d455525accd89c/uuid_utils-0.13.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:83628283e977fb212e756bc055df8fdd2f9f589a2e539ba1abe755b8ce8df7a4", size = 602130, upload-time = "2026-01-08T15:47:34.877Z" }, - { url = "https://files.pythonhosted.org/packages/40/b9/00a944b8096632ea12638181f8e294abcde3e3b8b5e29b777f809896f6ae/uuid_utils-0.13.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c47638ed6334ab19d80f73664f153b04bbb04ab8ce4298d10da6a292d4d21c47", size = 304213, upload-time = "2026-01-08T15:47:36.807Z" }, - { url = "https://files.pythonhosted.org/packages/da/d7/07b36c33aef683b81c9afff3ec178d5eb39d325447a68c3c68a62e4abb32/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:b276b538c57733ed406948584912da422a604313c71479654848b84b9e19c9b0", size = 340624, upload-time = "2026-01-08T15:47:38.821Z" }, - { url = "https://files.pythonhosted.org/packages/7d/55/fcff2fff02a27866cb1a6614c9df2b3ace721f0a0aab2b7b8f5a7d4e4221/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_armv7l.whl", hash = "sha256:bdaf2b77e34b199cf04cde28399495fd1ed951de214a4ece1f3919b2f945bb06", size = 346705, upload-time = "2026-01-08T15:47:40.397Z" }, - { url = "https://files.pythonhosted.org/packages/41/48/67438506c2bb8bee1b4b00d7c0b3ff866401b4790849bf591d654d4ea0bc/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_i686.whl", hash = "sha256:eb2f0baf81e82f9769a7684022dca8f3bf801ca1574a3e94df1876e9d6f9271e", size = 366023, upload-time = "2026-01-08T15:47:42.662Z" }, - { url = "https://files.pythonhosted.org/packages/8b/d7/2d91ce17f62fd764d593430de296b70843cc25229c772453f7261de9e6a8/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_ppc64le.whl", hash = "sha256:6be6c4d11275f5cc402a4fdba6c2b1ce45fd3d99bb78716cd1cc2cbf6802b2ce", size = 471149, upload-time = "2026-01-08T15:47:44.963Z" }, - { url = "https://files.pythonhosted.org/packages/2e/9a/aa0756186073ba84daf5704c150d41ede10eb3185d510e02532e2071550e/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:77621cf6ceca7f42173a642a01c01c216f9eaec3b7b65d093d2d6a433ca0a83d", size = 342130, upload-time = "2026-01-08T15:47:46.331Z" }, - { url = "https://files.pythonhosted.org/packages/74/b4/3191789f4dc3bed59d79cec90559821756297a25d7dc34d1bf7781577a75/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a5a9eb06c2bb86dd876cd7b2fe927fc8543d14c90d971581db6ffda4a02526f", size = 524128, upload-time = "2026-01-08T15:47:47.628Z" }, - { url = "https://files.pythonhosted.org/packages/b2/30/29839210a8fff9fc219bfa7c8d8cd115324e92618cba0cda090d54d3d321/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:775347c6110fb71360df17aac74132d8d47c1dbe71233ac98197fc872a791fd2", size = 615872, upload-time = "2026-01-08T15:47:50.61Z" }, - { url = "https://files.pythonhosted.org/packages/99/ed/15000c96a8bd8f5fd8efd622109bf52549ea0b366f8ce71c45580fa55878/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf95f6370ad1a0910ee7b5ad5228fd19c4ae32fe3627389006adaf519408c41e", size = 581023, upload-time = "2026-01-08T15:47:52.776Z" }, - { url = "https://files.pythonhosted.org/packages/67/c8/3f809fa2dc2ca4bd331c792a3c7d3e45ae2b709d85847a12b8b27d1d5f19/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a88e23e0b2f4203fefe2ccbca5736ee06fcad10e61b5e7e39c8d7904bc13300", size = 546715, upload-time = "2026-01-08T15:47:54.415Z" }, - { url = "https://files.pythonhosted.org/packages/f5/80/4f7c7efd734d1494397c781bd3d421688e9c187ae836e3174625b1ddf8b0/uuid_utils-0.13.0-cp39-abi3-win32.whl", hash = "sha256:3e4f2cc54e6a99c0551158100ead528479ad2596847478cbad624977064ffce3", size = 177650, upload-time = "2026-01-08T15:47:55.679Z" }, - { url = "https://files.pythonhosted.org/packages/6c/94/d05ab68622e66ad787a241dfe5ccc649b3af09f30eae977b9ee8f7046aaa/uuid_utils-0.13.0-cp39-abi3-win_amd64.whl", hash = "sha256:046cb2756e1597b3de22d24851b769913e192135830486a0a70bf41327f0360c", size = 183211, upload-time = "2026-01-08T15:47:57.604Z" }, - { url = "https://files.pythonhosted.org/packages/69/37/674b3ce25cd715b831ea8ebbd828b74c40159f04c95d1bb963b2c876fe79/uuid_utils-0.13.0-cp39-abi3-win_arm64.whl", hash = "sha256:5447a680df6ef8a5a353976aaf4c97cc3a3a22b1ee13671c44227b921e3ae2a9", size = 183518, upload-time = "2026-01-08T15:47:59.148Z" }, - { url = "https://files.pythonhosted.org/packages/99/fa/1d92de9538463859228e68db679b766fd300770c9a2db849dcba0c0c5a57/uuid_utils-0.13.0-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e5182e2d95f38e65f2e5bce90648ef56987443da13e145afcd747e584f9bc69c", size = 587641, upload-time = "2026-01-08T15:48:00.433Z" }, - { url = "https://files.pythonhosted.org/packages/ca/07/6bd9e6f5367e38c2ee7178ad882d2bd1b0d17c5393974b09ab027a215eba/uuid_utils-0.13.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e3909a8a1fbd79d7c8bdc874eeb83e23ccb7a7cb0aa821a49596cc96c0cce84b", size = 298273, upload-time = "2026-01-08T15:48:02.063Z" }, - { url = "https://files.pythonhosted.org/packages/dc/14/7061b868a8a6799c8df83768a23f313d4e22075069f01ee3c28fa82aa2c6/uuid_utils-0.13.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.whl", hash = "sha256:5dc4c9f749bd2511b8dcbf0891e658d7d86880022963db050722ad7b502b5e22", size = 333618, upload-time = "2026-01-08T15:48:03.503Z" }, - { url = "https://files.pythonhosted.org/packages/bc/f1/f48c3c9c343c9071ade5f355403e344d817412d9cf379a2d04b181282e74/uuid_utils-0.13.0-pp311-pypy311_pp73-manylinux_2_24_armv7l.whl", hash = "sha256:516adf07f5b2cdb88d50f489c702b5f1a75ae8b2639bfd254f4192d5f7ee261f", size = 339104, upload-time = "2026-01-08T15:48:05.02Z" }, - { url = "https://files.pythonhosted.org/packages/47/22/8e3142b4baffee77ce533fe956446d3699ec42f1d5252911208cbef4501e/uuid_utils-0.13.0-pp311-pypy311_pp73-manylinux_2_24_i686.whl", hash = "sha256:aeee3bd89e8de6184a3ab778ce19f5ce9ad32849d1be549516e0ddb257562d8d", size = 359503, upload-time = "2026-01-08T15:48:06.347Z" }, - { url = "https://files.pythonhosted.org/packages/bd/1a/756f1f9e31b15019c87cd2becb1c596351c50967cd143443da38df8818d1/uuid_utils-0.13.0-pp311-pypy311_pp73-manylinux_2_24_ppc64le.whl", hash = "sha256:97985256c2e59b7caa51f5c8515f64d777328562a9c900ec65e9d627baf72737", size = 467480, upload-time = "2026-01-08T15:48:07.681Z" }, - { url = "https://files.pythonhosted.org/packages/0a/20/a6929e98d9a461ca49e96194a82a1cc3fd5420f3a2f53cbb34fca438549e/uuid_utils-0.13.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:b7ccaa20e24c5f60f41a69ef571ed820737f9b0ade4cbeef56aaa8f80f5aa475", size = 333610, upload-time = "2026-01-08T15:48:09.375Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/57/7c/3a926e847516e67bc6838634f2e54e24381105b4e80f9338dc35cca0086b/uuid_utils-0.14.0.tar.gz", hash = "sha256:fc5bac21e9933ea6c590433c11aa54aaca599f690c08069e364eb13a12f670b4", size = 22072, upload-time = "2026-01-20T20:37:15.729Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/42/42d003f4a99ddc901eef2fd41acb3694163835e037fb6dde79ad68a72342/uuid_utils-0.14.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f6695c0bed8b18a904321e115afe73b34444bc8451d0ce3244a1ec3b84deb0e5", size = 601786, upload-time = "2026-01-20T20:37:09.843Z" }, + { url = "https://files.pythonhosted.org/packages/96/e6/775dfb91f74b18f7207e3201eb31ee666d286579990dc69dd50db2d92813/uuid_utils-0.14.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:4f0a730bbf2d8bb2c11b93e1005e91769f2f533fa1125ed1f00fd15b6fcc732b", size = 303943, upload-time = "2026-01-20T20:37:18.767Z" }, + { url = "https://files.pythonhosted.org/packages/17/82/ea5f5e85560b08a1f30cdc65f75e76494dc7aba9773f679e7eaa27370229/uuid_utils-0.14.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40ce3fd1a4fdedae618fc3edc8faf91897012469169d600133470f49fd699ed3", size = 340467, upload-time = "2026-01-20T20:37:11.794Z" }, + { url = "https://files.pythonhosted.org/packages/ca/33/54b06415767f4569882e99b6470c6c8eeb97422686a6d432464f9967fd91/uuid_utils-0.14.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ae4a98416a440e78f7d9543d11b11cae4bab538b7ed94ec5da5221481748f2", size = 346333, upload-time = "2026-01-20T20:37:12.818Z" }, + { url = "https://files.pythonhosted.org/packages/cb/10/a6bce636b8f95e65dc84bf4a58ce8205b8e0a2a300a38cdbc83a3f763d27/uuid_utils-0.14.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:971e8c26b90d8ae727e7f2ac3ee23e265971d448b3672882f2eb44828b2b8c3e", size = 470859, upload-time = "2026-01-20T20:37:01.512Z" }, + { url = "https://files.pythonhosted.org/packages/8a/27/84121c51ea72f013f0e03d0886bcdfa96b31c9b83c98300a7bd5cc4fa191/uuid_utils-0.14.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5cde1fa82804a8f9d2907b7aec2009d440062c63f04abbdb825fce717a5e860", size = 341988, upload-time = "2026-01-20T20:37:22.881Z" }, + { url = "https://files.pythonhosted.org/packages/90/a4/01c1c7af5e6a44f20b40183e8dac37d6ed83e7dc9e8df85370a15959b804/uuid_utils-0.14.0-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c7343862a2359e0bd48a7f3dfb5105877a1728677818bb694d9f40703264a2db", size = 365784, upload-time = "2026-01-20T20:37:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/04/f0/65ee43ec617b8b6b1bf2a5aecd56a069a08cca3d9340c1de86024331bde3/uuid_utils-0.14.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c51e4818fdb08ccec12dc7083a01f49507b4608770a0ab22368001685d59381b", size = 523750, upload-time = "2026-01-20T20:37:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/95/d3/6bf503e3f135a5dfe705a65e6f89f19bccd55ac3fb16cb5d3ec5ba5388b8/uuid_utils-0.14.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:181bbcccb6f93d80a8504b5bd47b311a1c31395139596edbc47b154b0685b533", size = 615818, upload-time = "2026-01-20T20:37:21.816Z" }, + { url = "https://files.pythonhosted.org/packages/df/6c/99937dd78d07f73bba831c8dc9469dfe4696539eba2fc269ae1b92752f9e/uuid_utils-0.14.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:5c8ae96101c3524ba8dbf762b6f05e9e9d896544786c503a727c5bf5cb9af1a7", size = 580831, upload-time = "2026-01-20T20:37:19.691Z" }, + { url = "https://files.pythonhosted.org/packages/44/fa/bbc9e2c25abd09a293b9b097a0d8fc16acd6a92854f0ec080f1ea7ad8bb3/uuid_utils-0.14.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00ac3c6edfdaff7e1eed041f4800ae09a3361287be780d7610a90fdcde9befdc", size = 546333, upload-time = "2026-01-20T20:37:03.117Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9b/e5e99b324b1b5f0c62882230455786df0bc66f67eff3b452447e703f45d2/uuid_utils-0.14.0-cp39-abi3-win32.whl", hash = "sha256:ec2fd80adf8e0e6589d40699e6f6df94c93edcc16dd999be0438dd007c77b151", size = 177319, upload-time = "2026-01-20T20:37:04.208Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/2c7d417ea483b6ff7820c948678fdf2ac98899dc7e43bb15852faa95acaf/uuid_utils-0.14.0-cp39-abi3-win_amd64.whl", hash = "sha256:efe881eb43a5504fad922644cb93d725fd8a6a6d949bd5a4b4b7d1a1587c7fd1", size = 182566, upload-time = "2026-01-20T20:37:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/b8/86/49e4bdda28e962fbd7266684171ee29b3d92019116971d58783e51770745/uuid_utils-0.14.0-cp39-abi3-win_arm64.whl", hash = "sha256:32b372b8fd4ebd44d3a219e093fe981af4afdeda2994ee7db208ab065cfcd080", size = 182809, upload-time = "2026-01-20T20:37:05.139Z" }, + { url = "https://files.pythonhosted.org/packages/f1/03/1f1146e32e94d1f260dfabc81e1649102083303fb4ad549775c943425d9a/uuid_utils-0.14.0-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:762e8d67992ac4d2454e24a141a1c82142b5bde10409818c62adbe9924ebc86d", size = 587430, upload-time = "2026-01-20T20:37:24.998Z" }, + { url = "https://files.pythonhosted.org/packages/87/ba/d5a7469362594d885fd9219fe9e851efbe65101d3ef1ef25ea321d7ce841/uuid_utils-0.14.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:40be5bf0b13aa849d9062abc86c198be6a25ff35316ce0b89fc25f3bac6d525e", size = 298106, upload-time = "2026-01-20T20:37:23.896Z" }, + { url = "https://files.pythonhosted.org/packages/8a/11/3dafb2a5502586f59fd49e93f5802cd5face82921b3a0f3abb5f357cb879/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:191a90a6f3940d1b7322b6e6cceff4dd533c943659e0a15f788674407856a515", size = 333423, upload-time = "2026-01-20T20:37:17.828Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f2/c8987663f0cdcf4d717a36d85b5db2a5589df0a4e129aa10f16f4380ef48/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4aa4525f4ad82f9d9c842f9a3703f1539c1808affbaec07bb1b842f6b8b96aa5", size = 338659, upload-time = "2026-01-20T20:37:14.286Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c8/929d81665d83f0b2ffaecb8e66c3091a50f62c7cb5b65e678bd75a96684e/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdbd82ff20147461caefc375551595ecf77ebb384e46267f128aca45a0f2cdfc", size = 467029, upload-time = "2026-01-20T20:37:08.277Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a0/27d7daa1bfed7163f4ccaf52d7d2f4ad7bb1002a85b45077938b91ee584f/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff57e8a5d540006ce73cf0841a643d445afe78ba12e75ac53a95ca2924a56be", size = 333298, upload-time = "2026-01-20T20:37:07.271Z" }, + { url = "https://files.pythonhosted.org/packages/63/d4/acad86ce012b42ce18a12f31ee2aa3cbeeb98664f865f05f68c882945913/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fd9112ca96978361201e669729784f26c71fecc9c13a7f8a07162c31bd4d1e2", size = 359217, upload-time = "2026-01-20T20:36:59.687Z" }, ] [[package]] @@ -9250,7 +10404,7 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.36.0" +version = "20.36.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, @@ -9258,43 +10412,52 @@ dependencies = [ { name = "platformdirs" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/49/87e23d8f742f10f965bce5d6b285fc88a4f436b11daf6b6225d4d66f8492/virtualenv-20.36.0.tar.gz", hash = "sha256:a3601f540b515a7983508113f14e78993841adc3d83710fa70f0ac50f43b23ed", size = 6032237, upload-time = "2026-01-07T17:20:04.975Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, +] + +[[package]] +name = "wadler-lindig" +version = "0.1.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/67/cbae4bf7683a64755c2c1778c418fea96d00e34395bb91743f08bd951571/wadler_lindig-0.1.7.tar.gz", hash = "sha256:81d14d3fe77d441acf3ebd7f4aefac20c74128bf460e84b512806dccf7b2cd55", size = 15842, upload-time = "2025-06-18T07:00:42.843Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/6a/0af36875e0023a1f2d0b66b4051721fc26740e947696922df1665b75e5d3/virtualenv-20.36.0-py3-none-any.whl", hash = "sha256:e7ded577f3af534fd0886d4ca03277f5542053bedb98a70a989d3c22cfa5c9ac", size = 6008261, upload-time = "2026-01-07T17:20:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/04e7b441807b26b794da5b11e59ed7f83b2cf8af202bd7eba8ad2fa6046e/wadler_lindig-0.1.7-py3-none-any.whl", hash = "sha256:e3ec83835570fd0a9509f969162aeb9c65618f998b1f42918cfc8d45122fe953", size = 20516, upload-time = "2025-06-18T07:00:41.684Z" }, ] [[package]] name = "warp-lang" -version = "1.11.0" +version = "1.11.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/0e/aa6c2a57d987d18d6463a30a358f5454599ae5d03bbfe889a6330e0b9931/warp_lang-1.11.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3a4f1c9a6e721d7de7d6dad6b242c54afaf20c6e14a767c0da03e5e963fcc13c", size = 24025392, upload-time = "2026-01-02T13:18:13.517Z" }, - { url = "https://files.pythonhosted.org/packages/0b/56/935b926526075149fdf8851e7593ea63880458c5303798852d9c99c30382/warp_lang-1.11.0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:524dce20de6162ba25333552168ebf430973050e00d9f8116b8df41a60d25d6e", size = 134812535, upload-time = "2026-01-02T13:13:23.908Z" }, - { url = "https://files.pythonhosted.org/packages/65/b7/8915e42cb10bbe7d2ff1b32554b52d4c90234119aab4de48476ebf700147/warp_lang-1.11.0-py3-none-manylinux_2_34_aarch64.whl", hash = "sha256:1ae6cfc226107f96e4d495b41a3dab32488e8ee8f074b0e1bcaf22e7fb8c904d", size = 136105356, upload-time = "2026-01-02T13:15:37.795Z" }, - { url = "https://files.pythonhosted.org/packages/e4/cc/6261315e43fa879d30b1bd79904ce8075a8a7d42fc49dd235de337f2ac02/warp_lang-1.11.0-py3-none-win_amd64.whl", hash = "sha256:80d8493cbe243a3510134f3af289646d7bd7484217a30ecf565d676466ef8a5e", size = 118919958, upload-time = "2026-01-02T13:17:20.067Z" }, + { url = "https://files.pythonhosted.org/packages/d5/86/507cb6e0534422ff8437f71d676f6366ec907031db54751ad371f07c0b7f/warp_lang-1.11.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1ad11f1fa775269e991a3d55039152c8a504baf86701c849b485cb8e66c49d15", size = 24056749, upload-time = "2026-02-03T21:18:51.64Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bb/21e9396a963d50171f539f4a4c9411435e7bb9c5131f4480f882d5e51dc6/warp_lang-1.11.1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:8b098f41e71d421d80ee7562e38aa8380ff6b0d3b4c6ee866cfbdef733ac5bdc", size = 134843847, upload-time = "2026-02-03T21:19:14.318Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ff/9ced2d69dc9db6cb6b1d3b80a3d2a81590e11ae368a7864aa5d6089fd820/warp_lang-1.11.1-py3-none-manylinux_2_34_aarch64.whl", hash = "sha256:5d0904b0eefcc81f39ba65375427a3de99006088aa43e24a9011263f07d0cd07", size = 136139429, upload-time = "2026-02-03T21:18:45.854Z" }, + { url = "https://files.pythonhosted.org/packages/25/2f/2713f29bba5800b59835d97e136fa75d65a58b89734ae01de5a5f8f26482/warp_lang-1.11.1-py3-none-win_amd64.whl", hash = "sha256:15dc10aa51fb0fdbe1ca16d52e5fadca35a47ffd9d0c636826506f96bb2e7c41", size = 118951410, upload-time = "2026-02-03T21:19:02.038Z" }, ] [[package]] name = "wasmtime" -version = "40.0.0" +version = "41.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ab/96/3e7e9b4c5b9d3071b469502d0c4418d1492e5ce52bbf5b985703b08c6892/wasmtime-40.0.0.tar.gz", hash = "sha256:48417c59f13be145184cff61fef61bb52556ea0e7417c25bec09af2d859745ab", size = 117370, upload-time = "2025-12-22T16:30:39.179Z" } +sdist = { url = "https://files.pythonhosted.org/packages/be/68/6dc0e7156f883afe0129dd89e4031c8d1163131794ba6ce9e454a09168ad/wasmtime-41.0.0.tar.gz", hash = "sha256:fc2aaacf3ba794eac8baeb739939b2f7903e12d6b78edddc0b7f3ac3a9af6dfc", size = 117354, upload-time = "2026-01-20T18:18:00.565Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/19/da6935d495d5bf5a1defa261c183af3e624b6684bfe8d54a0aa4caf238b6/wasmtime-40.0.0-py3-none-android_26_arm64_v8a.whl", hash = "sha256:f81dcd8850c66bbe8da53774515bd255a18fce595899e9d851f9969d48d7f592", size = 6894176, upload-time = "2025-12-22T16:30:23.962Z" }, - { url = "https://files.pythonhosted.org/packages/bf/20/2d6afa0e102e85745a3f637e399151f725e836e91c1cd8304bf8cda6eb8f/wasmtime-40.0.0-py3-none-android_26_x86_64.whl", hash = "sha256:b462e868f9af4bc69ee353e2cebb3ea5c14984f07b703e3dfc208697ac798fc9", size = 7735017, upload-time = "2025-12-22T16:30:25.709Z" }, - { url = "https://files.pythonhosted.org/packages/ac/fa/4d061d3b54d8b550c1a043d197380dd54fb1954c58363b914c061fa7a86e/wasmtime-40.0.0-py3-none-any.whl", hash = "sha256:b7532706094f768fcab15fa0cf8c57278f7bc2770a32a74b98e3be7db0984e56", size = 6297908, upload-time = "2025-12-22T16:30:27.164Z" }, - { url = "https://files.pythonhosted.org/packages/64/33/10a68779d53557a7d441b40106a7ea0085e9b0af9d82466082cafa890258/wasmtime-40.0.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:e2f374948982a749e5c64de04d1a322ecc79ffd633e0f269c47567c3834c4836", size = 7507846, upload-time = "2025-12-22T16:30:28.312Z" }, - { url = "https://files.pythonhosted.org/packages/2c/5f/ef035900032a5012aad368017abf2a7b626aed38b31e8f35c3266a3a3676/wasmtime-40.0.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fd2d37071c493377b7c4b27e5d1fe2154f4434fbb6af70f1dce9969f287dac62", size = 6533509, upload-time = "2025-12-22T16:30:29.99Z" }, - { url = "https://files.pythonhosted.org/packages/01/bb/8f6dd6a213706a101c7c598609015648fbd82bd34455cabdec300c304d8c/wasmtime-40.0.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:d1ad1be984bea3f2325e67258bc9d6d2d4520cfdbcc3b0ae752c8b4817d0212c", size = 7798564, upload-time = "2025-12-22T16:30:31.649Z" }, - { url = "https://files.pythonhosted.org/packages/a8/d2/d6f1b1f22da240c14bc60459677fbc13cd630260a2c5eac9737dbde63bb5/wasmtime-40.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:3e711b1049ac95f5f34c945b827311c5d382236af5e535a880a26b8861e85aae", size = 6815182, upload-time = "2025-12-22T16:30:33.154Z" }, - { url = "https://files.pythonhosted.org/packages/be/9f/401934f38c6a6559d2be12180793f18c7c726938a1d207fcfc20a8d4091b/wasmtime-40.0.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:504a903099b0518d589db5c17ee6c95b207392c58a272081dc59c33d7000d11f", size = 6893582, upload-time = "2025-12-22T16:30:34.344Z" }, - { url = "https://files.pythonhosted.org/packages/9f/60/c9300d1146f577847aab879ec90d9da3bc7e20f62150386f08adc4aacf41/wasmtime-40.0.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:fee1be5dea191e8350db2e00ff0e57205b9c2d552c6537d736c5c9c75b1470da", size = 7831639, upload-time = "2025-12-22T16:30:35.6Z" }, - { url = "https://files.pythonhosted.org/packages/85/be/2f81a31430f02f57602ae1b4ff0e369b3cbd07c2fcdd0b696b75e9bfc30a/wasmtime-40.0.0-py3-none-win_amd64.whl", hash = "sha256:ebce72e82d1d18726ce3e769094fd8b1d9fc9a1d310cd87c6a85d3ce48fa6567", size = 6297915, upload-time = "2025-12-22T16:30:36.878Z" }, - { url = "https://files.pythonhosted.org/packages/d2/87/35cbfdf9619c958a8b48f2ad083b88abc1521d771bfab668002e4405a1da/wasmtime-40.0.0-py3-none-win_arm64.whl", hash = "sha256:7667966236bba5e80a1c454553e566a1fa700328bc3e65b5ca970bee7e177e57", size = 5398931, upload-time = "2025-12-22T16:30:38.047Z" }, + { url = "https://files.pythonhosted.org/packages/31/f9/f6aef5de536d12652d97cf162f124cbdd642150c7da61ffa7863272cdab7/wasmtime-41.0.0-py3-none-android_26_arm64_v8a.whl", hash = "sha256:f5a6e237b5b94188ef9867926b447f779f540c729c92e4d91cc946f2bee7c282", size = 6837018, upload-time = "2026-01-20T18:17:41.489Z" }, + { url = "https://files.pythonhosted.org/packages/04/b9/42ec977972b2dcc8c61e3a40644d24d229b41fba151410644e44e35e6eb1/wasmtime-41.0.0-py3-none-android_26_x86_64.whl", hash = "sha256:4a3e33d0d3cf49062eaa231f748f54af991e89e9a795c5ab9d4f0eee85736e4c", size = 7654957, upload-time = "2026-01-20T18:17:43.285Z" }, + { url = "https://files.pythonhosted.org/packages/18/ca/6cce49b03c35c7fecb4437fd98990c64694a5e0024f9279bef0ddef000f7/wasmtime-41.0.0-py3-none-any.whl", hash = "sha256:5f6721406a6cd186d11f34e6d4991c4d536387b0c577d09a56bd93b8a3cf10c2", size = 6325757, upload-time = "2026-01-20T18:17:44.789Z" }, + { url = "https://files.pythonhosted.org/packages/a0/16/d91cb80322cc7ae10bfa5db8cea4e0b9bb112f0c100b4486783ab16c1c22/wasmtime-41.0.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:2107360212fce33ed2adcfc33b7e75ed7136380a17d3ed598a5bab376dcf9e1b", size = 7471888, upload-time = "2026-01-20T18:17:46.185Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f0/dcc80973d2ec58a1978b838887ccbd84d56900cf66dec5fb730bec3bd081/wasmtime-41.0.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f475df32ce9bfec4f6d0e124a49ca4a89e2ee71ccca460677f5237b1c8ee92ae", size = 6507285, upload-time = "2026-01-20T18:17:48.138Z" }, + { url = "https://files.pythonhosted.org/packages/bd/df/0867edd9ec26eb2e5eee7674a55f82c23ec27dd1d38d2d401f0e308eb920/wasmtime-41.0.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:ad7e866430313eb2ee07c85811e524344884489d00896f3b2246b65553fe322c", size = 7732024, upload-time = "2026-01-20T18:17:50.207Z" }, + { url = "https://files.pythonhosted.org/packages/bb/48/b748a2e70478feabc5c876d90e90a39f4aba35378f5ee822f607e8f29c69/wasmtime-41.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:e0ea44584f60dcfa620af82d4fc2589248bcf64a93905b54ac3144242113b48a", size = 6800017, upload-time = "2026-01-20T18:17:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/14/29/43656c3a464d437d62421de16f2de2db645647bab0a0153deea30bfdade4/wasmtime-41.0.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dabb20a2751f01b835095013426a76091bd0bdb36ca9bcfc49c910b78347438", size = 6840763, upload-time = "2026-01-20T18:17:53.125Z" }, + { url = "https://files.pythonhosted.org/packages/9f/09/4608b65fa35ce5fc1479e138293a1166b4ea817cfa9a79f019ab6d7013d8/wasmtime-41.0.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d9627dfc5625b4947ea35c819561da358838fe76f65bda8ffe01ce34df8b32b1", size = 7754016, upload-time = "2026-01-20T18:17:55.346Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9d/236bb367270579e4f628fb7b04fe93541151df7953006f3766607fc667c9/wasmtime-41.0.0-py3-none-win_amd64.whl", hash = "sha256:4f29171d73b71f232b6fe86cba77526fee84139f1590071af5facba401b0c9eb", size = 6325764, upload-time = "2026-01-20T18:17:57.034Z" }, + { url = "https://files.pythonhosted.org/packages/4a/4a/bba9c0368c377250ab24fd005a7a1e9076121778c1e83b1bcc092ab84f86/wasmtime-41.0.0-py3-none-win_arm64.whl", hash = "sha256:0c4bcaba055e78fc161f497b85f39f1d35d475f0341b1e0259fa0a4b49e223e8", size = 5392238, upload-time = "2026-01-20T18:17:59.052Z" }, ] [[package]] @@ -9434,11 +10597,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.2.14" +version = "0.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, + { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, ] [[package]] @@ -9452,61 +10615,70 @@ wheels = [ [[package]] name = "websockets" -version = "15.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b", size = 175423, upload-time = "2025-03-05T20:01:35.363Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205", size = 173080, upload-time = "2025-03-05T20:01:37.304Z" }, - { url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a", size = 173329, upload-time = "2025-03-05T20:01:39.668Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e", size = 182312, upload-time = "2025-03-05T20:01:41.815Z" }, - { url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf", size = 181319, upload-time = "2025-03-05T20:01:43.967Z" }, - { url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb", size = 181631, upload-time = "2025-03-05T20:01:46.104Z" }, - { url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d", size = 182016, upload-time = "2025-03-05T20:01:47.603Z" }, - { url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9", size = 181426, upload-time = "2025-03-05T20:01:48.949Z" }, - { url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c", size = 181360, upload-time = "2025-03-05T20:01:50.938Z" }, - { url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256", size = 176388, upload-time = "2025-03-05T20:01:52.213Z" }, - { url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41", size = 176830, upload-time = "2025-03-05T20:01:53.922Z" }, - { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, - { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, - { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, - { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, - { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, - { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, - { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, - { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, - { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, - { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, - { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, - { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, - { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, - { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, - { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, - { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, - { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3", size = 173109, upload-time = "2025-03-05T20:03:17.769Z" }, - { url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1", size = 173343, upload-time = "2025-03-05T20:03:19.094Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475", size = 174599, upload-time = "2025-03-05T20:03:21.1Z" }, - { url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9", size = 174207, upload-time = "2025-03-05T20:03:23.221Z" }, - { url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04", size = 174155, upload-time = "2025-03-05T20:03:25.321Z" }, - { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884, upload-time = "2025-03-05T20:03:27.934Z" }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/74/221f58decd852f4b59cc3354cccaf87e8ef695fede361d03dc9a7396573b/websockets-16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04cdd5d2d1dacbad0a7bf36ccbcd3ccd5a30ee188f2560b7a62a30d14107b31a", size = 177343, upload-time = "2026-01-10T09:22:21.28Z" }, + { url = "https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0", size = 175021, upload-time = "2026-01-10T09:22:22.696Z" }, + { url = "https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957", size = 175320, upload-time = "2026-01-10T09:22:23.94Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72", size = 183815, upload-time = "2026-01-10T09:22:25.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/26/d40eaa2a46d4302becec8d15b0fc5e45bdde05191e7628405a19cf491ccd/websockets-16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df57afc692e517a85e65b72e165356ed1df12386ecb879ad5693be08fac65dde", size = 185054, upload-time = "2026-01-10T09:22:27.101Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ba/6500a0efc94f7373ee8fefa8c271acdfd4dca8bd49a90d4be7ccabfc397e/websockets-16.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2b9f1e0d69bc60a4a87349d50c09a037a2607918746f07de04df9e43252c77a3", size = 184565, upload-time = "2026-01-10T09:22:28.293Z" }, + { url = "https://files.pythonhosted.org/packages/04/b4/96bf2cee7c8d8102389374a2616200574f5f01128d1082f44102140344cc/websockets-16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:335c23addf3d5e6a8633f9f8eda77efad001671e80b95c491dd0924587ece0b3", size = 183848, upload-time = "2026-01-10T09:22:30.394Z" }, + { url = "https://files.pythonhosted.org/packages/02/8e/81f40fb00fd125357814e8c3025738fc4ffc3da4b6b4a4472a82ba304b41/websockets-16.0-cp310-cp310-win32.whl", hash = "sha256:37b31c1623c6605e4c00d466c9d633f9b812ea430c11c8a278774a1fde1acfa9", size = 178249, upload-time = "2026-01-10T09:22:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/b4/5f/7e40efe8df57db9b91c88a43690ac66f7b7aa73a11aa6a66b927e44f26fa/websockets-16.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e1dab317b6e77424356e11e99a432b7cb2f3ec8c5ab4dabbcee6add48f72b35", size = 178685, upload-time = "2026-01-10T09:22:33.345Z" }, + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340, upload-time = "2026-01-10T09:22:34.539Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022, upload-time = "2026-01-10T09:22:36.332Z" }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319, upload-time = "2026-01-10T09:22:37.602Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631, upload-time = "2026-01-10T09:22:38.789Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870, upload-time = "2026-01-10T09:22:39.893Z" }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361, upload-time = "2026-01-10T09:22:41.016Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615, upload-time = "2026-01-10T09:22:42.442Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246, upload-time = "2026-01-10T09:22:43.654Z" }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684, upload-time = "2026-01-10T09:22:44.941Z" }, + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947, upload-time = "2026-01-10T09:23:36.166Z" }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260, upload-time = "2026-01-10T09:23:37.409Z" }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071, upload-time = "2026-01-10T09:23:39.158Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968, upload-time = "2026-01-10T09:23:41.031Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] [[package]] @@ -9521,6 +10693,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ad/e4/8d97cca767bcc1be76d16fb76951608305561c6e056811587f36cb1316a8/werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc", size = 225025, upload-time = "2026-01-08T17:49:21.859Z" }, ] +[[package]] +name = "whatthepatch" +version = "1.0.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/28/55bc3e107a56fdcf7d5022cb32b8c21d98a9cc2df5cd9f3b93e10419099e/whatthepatch-1.0.7.tar.gz", hash = "sha256:9eefb4ebea5200408e02d413d2b4bc28daea6b78bb4b4d53431af7245f7d7edf", size = 34612, upload-time = "2024-11-16T17:21:22.153Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/93/af1d6ccb69ab6b5a00e03fa0cefa563f9862412667776ea15dd4eece3a90/whatthepatch-1.0.7-py3-none-any.whl", hash = "sha256:1b6f655fd31091c001c209529dfaabbabdbad438f5de14e3951266ea0fc6e7ed", size = 11964, upload-time = "2024-11-16T17:21:20.761Z" }, +] + [[package]] name = "wrapt" version = "1.17.3" @@ -9602,6 +10783,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/f5/10b68b7b1544245097b2a1b8238f66f2fc6dcaeb24ba5d917f52bd2eed4f/wsproto-1.3.2-py3-none-any.whl", hash = "sha256:61eea322cdf56e8cc904bd3ad7573359a242ba65688716b0710a5eb12beab584", size = 24405, upload-time = "2025-11-20T18:18:00.454Z" }, ] +[[package]] +name = "xacro" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/b2/12fc318d3563481fe01482dd8e925d38e83b62d64291ed16ab0b6d836a91/xacro-2.1.1.tar.gz", hash = "sha256:3e7adf33cdd90d9fbe8ca0d07d9118acf770a2ad8bf575977019a5c8a60d4d1b", size = 104752, upload-time = "2025-08-28T18:21:20.397Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/6b/3fcfd8589d0e319f5fb56f105acbe791198fd36bb54469a91fbe49d828c4/xacro-2.1.1-py3-none-any.whl", hash = "sha256:c3b330ebd984a3bce6d6482e0047eae5c5333fedd49b30b9b6df863a086b35f7", size = 27087, upload-time = "2025-08-28T18:21:19.165Z" }, +] + [[package]] name = "xarm-python-sdk" version = "1.17.3" @@ -9613,17 +10806,17 @@ wheels = [ [[package]] name = "xformers" -version = "0.0.33.post2" +version = "0.0.34" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "torch" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'aarch64') or (python_full_version < '3.11' and sys_platform != 'linux')" }, + { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64') or (python_full_version >= '3.11' and sys_platform != 'linux')" }, + { name = "torch", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/69/403e963d35f1b0c52a1b3127e0bc4e94e7e50ecee8c6684a8abe40e6638e/xformers-0.0.33.post2.tar.gz", hash = "sha256:647ddf26578d2b8643230467ef1f0fbfef0bbe556a546bd27a70d4855d3433e1", size = 14783914, upload-time = "2025-12-04T18:52:42.572Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/2b/365151a1e2e6aa70c1bd66e0532e3d71915a28a34ebde3d9b068e8849f66/xformers-0.0.34.tar.gz", hash = "sha256:716bd9ffe61f46c2cc0536abf8b8c43ec594bea47a49394ea5cfa417e9de6a6f", size = 14303297, upload-time = "2026-01-23T18:14:31.457Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/c8/2957d8a8bf089a4e57f046867d4c9b31fc2e1d16013bc57cd7ae651a65b5/xformers-0.0.33.post2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9ea6032defa60395559b6a446c2ae945236707e98daabd88fea57cd08671c174", size = 122883631, upload-time = "2025-12-04T18:52:35.318Z" }, - { url = "https://files.pythonhosted.org/packages/b3/72/057e48a3c2187f74202b3cca97e9f8a844342122909c93314fd641daa5d0/xformers-0.0.33.post2-cp39-abi3-win_amd64.whl", hash = "sha256:4a0a59a0c698a483f13ecad967dbbe71386827985e80cc373bec4cdf9aed59cd", size = 105088221, upload-time = "2025-12-04T18:52:39.699Z" }, + { url = "https://files.pythonhosted.org/packages/44/33/3f4316a70ebbc2cccd3219d85bec9f4c134e5c135afbf8cba2b2be26cb40/xformers-0.0.34-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:381cc47f43e95893e21b7f04f1aa31dc10a81fc95ba92482e4465a5064c77743", size = 110763890, upload-time = "2026-01-23T18:14:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/15/03/5e3cfc5b45d008667e3cb87f1e75144a6fcd87eafa1fabb923f10c4cd9f5/xformers-0.0.34-cp39-abi3-win_amd64.whl", hash = "sha256:941979e890dd18e26f9860daa83acb706e658345d18511a962f909067331cc19", size = 103155172, upload-time = "2026-01-23T18:14:27.798Z" }, ] [[package]]