diff --git a/.env.development b/.env.development new file mode 100644 index 0000000..aebe440 --- /dev/null +++ b/.env.development @@ -0,0 +1,6 @@ +SDL_FRONTEND_HOST=localhost +SDL_FRONTEND_PORT=1420 +SDL_SIDECAR_HOST=127.0.0.1 +SDL_SIDECAR_PORT=5321 +SDL_CONFIG_DIR=.local/sidecar-dev +VITE_API_BASE_URL=/api diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..b642623 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,112 @@ +name: CI + +on: + push: + branches: + - '**' + pull_request: + +permissions: + contents: read + +concurrency: + group: ci-${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + validate-and-test: + name: Validate and Test + runs-on: ubuntu-22.04 + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + cache: npm + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install Linux system dependencies + run: | + sudo apt-get update + sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf + + - name: Install frontend dependencies + run: npm ci + + - name: Verify Cargo version is readable + shell: bash + run: | + VERSION=$(sed -nE 's/^version = "([^"]+)"$/\1/p' Cargo.toml | head -n 1) + if [ -z "$VERSION" ]; then + echo "Could not read [package].version from Cargo.toml." + exit 1 + fi + echo "Cargo version: $VERSION" + + - name: Run frontend unit tests + run: npm test + + - name: Run Rust unit tests + run: cargo test + + build-desktop: + name: Build Desktop (${{ matrix.label }}) + needs: validate-and-test + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - label: macOS universal DMG + os: macos-15 + bundle_args: --target universal-apple-darwin --bundles dmg + rust_targets: x86_64-apple-darwin + - label: Windows NSIS + os: windows-2022 + bundle_args: --bundles nsis + rust_targets: '' + - label: Linux AppImage + os: ubuntu-22.04 + bundle_args: --bundles appimage + rust_targets: '' + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + cache: npm + + - name: Install Rust toolchain + if: matrix.rust_targets == '' + uses: dtolnay/rust-toolchain@stable + + - name: Install Rust toolchain and targets + if: matrix.rust_targets != '' + uses: dtolnay/rust-toolchain@stable + with: + targets: ${{ matrix.rust_targets }} + + - name: Install Linux system dependencies + if: matrix.os == 'ubuntu-22.04' + run: | + sudo apt-get update + sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf + + - name: Install frontend dependencies + run: npm ci + + - name: Build desktop bundle + uses: tauri-apps/tauri-action@v0.6.2 + with: + projectPath: . + args: ${{ matrix.bundle_args }} diff --git a/.github/workflows/distribute.yaml b/.github/workflows/distribute.yaml deleted file mode 100644 index 91c538a..0000000 --- a/.github/workflows/distribute.yaml +++ /dev/null @@ -1,217 +0,0 @@ -name: Streaming Data Loader Build and AWS Distribution -run-name: "${{ github.actor }} is building Streaming Data Loader on ${{ github.ref }}" - -on: - release: - types: [created] - - -jobs: - build-ubuntu: - runs-on: ubuntu-latest - - steps: - - name: Checkout Repo - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements.txt - pip install pyinstaller==5.13.0 - - - name: Build App - env: - APP_VERSION: ${{ github.ref_name }} - run: | - echo $APP_VERSION > version.txt - pyinstaller \ - --name 'Streaming Data Loader' \ - --icon 'src/assets/app_icon.png' \ - --windowed \ - --onedir \ - --add-data 'version.txt:.' \ - --add-data 'src/assets/setup_icon.png:.' \ - --add-data 'src/assets/app_icon.png:.' \ - --add-data 'src/assets/connected.png:.' \ - --add-data 'src/assets/database.png:.' \ - --add-data 'src/assets/description.png:.' \ - --add-data 'src/assets/disconnected.png:.' \ - --add-data 'src/assets/exit.png:.' \ - --add-data 'src/assets/pause.png:.' \ - --add-data 'src/assets/resume.png:.' \ - --hidden-import 'demjson3' \ - --additional-hooks-dir 'pyi-hooks' \ - src/app.py - - - name: Zip App - run: zip -r "Streaming Data Loader.zip" "dist/Streaming Data Loader" - - - name: Upload Build - uses: actions/upload-artifact@v4 - with: - name: Streaming Data Loader Ubuntu - path: "Streaming Data Loader.zip" - - build-macos: - runs-on: macos-latest - - steps: - - name: Checkout Repo - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements.txt - pip install pyinstaller==5.13.0 - pip install dmgbuild==1.6.1 - - - name: Build App - env: - APP_VERSION: ${{ github.ref_name }} - run: | - echo $APP_VERSION > version.txt - pyinstaller \ - --name 'Streaming Data Loader' \ - --icon 'src/assets/app_icon.png' \ - --windowed \ - --onedir \ - --add-data 'version.txt:.' \ - --add-data 'src/assets/setup_icon.png:.' \ - --add-data 'src/assets/app_icon.png:.' \ - --add-data 'src/assets/connected.png:.' \ - --add-data 'src/assets/database.png:.' \ - --add-data 'src/assets/description.png:.' \ - --add-data 'src/assets/disconnected.png:.' \ - --add-data 'src/assets/exit.png:.' \ - --add-data 'src/assets/pause.png:.' \ - --add-data 'src/assets/resume.png:.' \ - --hidden-import 'demjson3' \ - --additional-hooks-dir 'pyi-hooks' \ - src/app.py - - - name: Package App DMG - run: | - dmgbuild \ - -s src/package/macos/settings.py \ - "Install Streaming Data Loader" \ - "Streaming Data Loader.dmg" - - - name: Upload build artifact - uses: actions/upload-artifact@v4 - with: - name: Streaming Data Loader MacOS - path: "Streaming Data Loader.dmg" - - build-windows: - runs-on: windows-latest - - steps: - - name: Checkout Repo - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r requirements.txt - pip install pyinstaller==5.13.0 - - - name: Build App - env: - APP_VERSION: ${{ github.ref_name }} - shell: pwsh - run: | - echo $env:APP_VERSION > version.txt - pyinstaller ` - --name "Streaming Data Loader" ` - --icon src\assets\app_icon.png ` - --windowed ` - --onedir ` - --add-data "version.txt;." ` - --add-data "src\assets\setup_icon.png;." ` - --add-data "src\assets\app_icon.png;." ` - --add-data "src\assets\connected.png;." ` - --add-data "src\assets\database.png;." ` - --add-data "src\assets\description.png;." ` - --add-data "src\assets\disconnected.png;." ` - --add-data "src\assets\exit.png;." ` - --add-data "src\assets\pause.png;." ` - --add-data "src\assets\resume.png;." ` - --hidden-import "demjson3" ` - --additional-hooks-dir "pyi-hooks" ` - src\app.py - - - name: Zip App - run: tar.exe -a -c -f "Streaming Data Loader.zip" "dist/Streaming Data Loader" - - - name: Upload build artifact - uses: actions/upload-artifact@v4 - with: - name: Streaming Data Loader Windows - path: "Streaming Data Loader.zip" - - upload-release-assets: - needs: [build-ubuntu, build-macos, build-windows] - runs-on: ubuntu-latest - - steps: - - name: Checkout Repo - uses: actions/checkout@v3 - - - name: Download build artifact (Ubuntu) - uses: actions/download-artifact@v4 - with: - name: Streaming Data Loader Ubuntu - path: release/ubuntu - - - name: Download build artifact (MacOS) - uses: actions/download-artifact@v4 - with: - name: Streaming Data Loader MacOS - path: release/macos - - - name: Download build artifact (Windows) - uses: actions/download-artifact@v4 - with: - name: Streaming Data Loader Windows - path: release/windows - - - name: Upload Release Asset (Ubuntu) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ github.event.release.upload_url }} - asset_path: release/ubuntu/Streaming Data Loader.zip - asset_name: Streaming Data Loader Ubuntu.zip - asset_content_type: application/zip - - - name: Upload Release Asset (MacOS) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ github.event.release.upload_url }} - asset_path: release/macos/Streaming Data Loader.dmg - asset_name: Streaming Data Loader MacOS.dmg - asset_content_type: application/x-apple-diskimage - - - name: Upload Release Asset (Windows) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ github.event.release.upload_url }} - asset_path: release/windows/Streaming Data Loader.zip - asset_name: Streaming Data Loader Windows.zip - asset_content_type: application/zip diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..3d5628c --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,132 @@ +name: Release Bundles + +on: + release: + types: + - published + +permissions: + contents: write + +concurrency: + group: release-${{ github.event.release.id }} + cancel-in-progress: false + +jobs: + verify-release: + name: Verify Release + runs-on: ubuntu-22.04 + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + cache: npm + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install Linux system dependencies + run: | + sudo apt-get update + sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf + + - name: Install frontend dependencies + run: npm ci + + - name: Verify release tag matches app version + shell: bash + run: | + VERSION=$(sed -nE 's/^version = "([^"]+)"$/\1/p' Cargo.toml | head -n 1) + if [ -z "$VERSION" ]; then + echo "Could not read [package].version from Cargo.toml." + exit 1 + fi + + TAG="${{ github.event.release.tag_name }}" + NORMALIZED_TAG="${TAG#refs/tags/}" + NORMALIZED_TAG="${NORMALIZED_TAG#v}" + + if [ "$NORMALIZED_TAG" != "$VERSION" ]; then + echo "Release tag does not match Cargo.toml version." + echo "release tag: $TAG" + echo "normalized tag: $NORMALIZED_TAG" + echo "Cargo.toml version: $VERSION" + exit 1 + fi + + echo "Release tag matches Cargo.toml version: $VERSION" + + - name: Run frontend unit tests + run: npm test + + - name: Run Rust unit tests + run: cargo test + + publish-bundles: + name: Publish ${{ matrix.label }} + needs: verify-release + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - label: macOS universal DMG + os: macos-15 + bundle_args: --target universal-apple-darwin --bundles dmg + rust_targets: x86_64-apple-darwin + release_asset_name_pattern: 'Streaming.Data.Loader.MacOS[ext]' + - label: Windows NSIS + os: windows-2022 + bundle_args: --bundles nsis + rust_targets: '' + release_asset_name_pattern: 'Streaming.Data.Loader.Windows[ext]' + - label: Linux AppImage + os: ubuntu-22.04 + bundle_args: --bundles appimage + rust_targets: '' + release_asset_name_pattern: 'Streaming.Data.Loader.Linux[ext]' + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + cache: npm + + - name: Install Rust toolchain + if: matrix.rust_targets == '' + uses: dtolnay/rust-toolchain@stable + + - name: Install Rust toolchain and targets + if: matrix.rust_targets != '' + uses: dtolnay/rust-toolchain@stable + with: + targets: ${{ matrix.rust_targets }} + + - name: Install Linux system dependencies + if: matrix.os == 'ubuntu-22.04' + run: | + sudo apt-get update + sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf + + - name: Install frontend dependencies + run: npm ci + + - name: Build and upload bundle + uses: tauri-apps/tauri-action@v0.6.2 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + releaseId: ${{ github.event.release.id }} + tagName: ${{ github.event.release.tag_name }} + assetNamePattern: ${{ matrix.release_asset_name_pattern }} + projectPath: . + args: ${{ matrix.bundle_args }} diff --git a/.gitignore b/.gitignore index dda6ac6..9538ad2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,23 @@ -build -dist -.DS_Store -.idea -*.dmg +# Generated by Cargo +# will have compiled files and executables +/target/ + +# Generated by Tauri +# will have schema files for capabilities auto-completion +/gen/schemas + +# Root frontend artifacts +/node_modules/ +/dist/ +.sdl-dev-data/ +*.local +/.cache/ +/.venv/ +/frontend/generated.css *.pyc -*.spec -__pycache__/ \ No newline at end of file +__pycache__/ + +# Legacy reference build artifacts +/legacy-reference/node_modules/ +/legacy-reference/dist/ +/legacy-reference/build/ diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000..fa51da2 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,6 @@ +{ + "trailingComma": "es5", + "tabWidth": 2, + "semi": false, + "singleQuote": true +} diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..8a1257e --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,9 @@ +{ + "recommendations": [ + "ms-python.python", + "ms-python.debugpy", + "bradlc.vscode-tailwindcss", + "tauri-apps.tauri-vscode", + "rust-lang.rust-analyzer" + ] +} diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..e913111 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,4 @@ +{ + "version": "0.2.0", + "configurations": [] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..1daadb5 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,39 @@ +{ + "files.exclude": { + "**/.cache": true, + "**/.venv": true, + "**/__pycache__": true, + "dist": true, + "node_modules": true + }, + "files.watcherExclude": { + "**/.cache/**": true, + "**/.venv/**": true, + "**/dist/**": true, + "**/node_modules/**": true + }, + "search.exclude": { + "**/.cache/**": true, + "**/.venv/**": true, + "**/dist/**": true, + "**/node_modules/**": true + }, + "editor.formatOnSave": true, + "prettier.requireConfig": true, + "[javascript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[typescript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[vue]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[json]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "python.analysis.exclude": [ + ".venv/**" + ], + "python.terminal.activateEnvironment": true +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..4bd43c4 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,100 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "SDL: Bootstrap Frontend", + "type": "shell", + "command": "node", + "args": [ + "./scripts/bootstrap-frontend.mjs" + ], + "options": { + "cwd": "${workspaceFolder}" + }, + "presentation": { + "reveal": "always", + "panel": "dedicated", + "group": "sdl-bootstrap" + }, + "problemMatcher": [] + }, + { + "label": "SDL: Tailwind", + "type": "shell", + "command": "npm", + "args": [ + "run", + "tailwind:watch" + ], + "options": { + "cwd": "${workspaceFolder}" + }, + "isBackground": true, + "presentation": { + "reveal": "always", + "panel": "dedicated", + "group": "sdl-dev" + }, + "problemMatcher": { + "owner": "tailwind", + "pattern": { + "regexp": ".+" + }, + "background": { + "activeOnStart": true, + "beginsPattern": ".", + "endsPattern": "Done in .*" + } + } + }, + { + "label": "SDL: Frontend", + "type": "shell", + "command": "npm", + "args": [ + "run", + "dev" + ], + "options": { + "cwd": "${workspaceFolder}" + }, + "isBackground": true, + "presentation": { + "reveal": "always", + "panel": "dedicated", + "group": "sdl-dev" + }, + "problemMatcher": { + "owner": "vite", + "pattern": { + "regexp": ".+" + }, + "background": { + "activeOnStart": true, + "beginsPattern": ".", + "endsPattern": "Local:.*" + } + } + }, + { + "label": "SDL: Dev Services", + "dependsOrder": "sequence", + "dependsOn": [ + "SDL: Bootstrap Frontend", + "SDL: Tailwind", + "SDL: Frontend" + ], + "problemMatcher": [] + }, + { + "label": "SDL: Dev", + "dependsOrder": "sequence", + "dependsOn": [ + "SDL: Bootstrap Frontend", + "SDL: Tailwind", + "SDL: Frontend" + ], + "problemMatcher": [] + } + ] +} diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..05dc42b --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,6323 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "async-broadcast" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435a87a52755b8f27fcf321ac4f04b2802e337c8c4872923137471ec39c37532" +dependencies = [ + "event-listener", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c96bf972d85afc50bf5ab8fe2d54d1586b4e0b46c97c50a0c9e71e2f7bcd812a" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "pin-project-lite", + "slab", +] + +[[package]] +name = "async-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" +dependencies = [ + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-lock" +version = "3.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-process" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" +dependencies = [ + "async-channel", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener", + "futures-lite", + "rustix", +] + +[[package]] +name = "async-recursion" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "async-signal" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "atk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241b621213072e993be4f6f3a9e4b45f65b7e6faad43001be957184b7bb1824b" +dependencies = [ + "atk-sys", + "glib", + "libc", +] + +[[package]] +name = "atk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5e48b684b0ca77d2bbadeef17424c2ea3c897d44d566a1617e7e8f30614d086" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "axum" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +dependencies = [ + "async-trait", + "axum-core", + "axum-macros", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-macros" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d123550fa8d071b7255cb0cc04dc302baa6c8c4a79f55701552684d8399bce" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" +dependencies = [ + "serde_core", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" +dependencies = [ + "objc2", +] + +[[package]] +name = "blocking" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" +dependencies = [ + "async-channel", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + +[[package]] +name = "brotli" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "bytemuck" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" +dependencies = [ + "serde", +] + +[[package]] +name = "cairo-rs" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ca26ef0159422fb77631dc9d17b102f253b876fe1586b03b803e63a309b4ee2" +dependencies = [ + "bitflags 2.11.0", + "cairo-sys-rs", + "glib", + "libc", + "once_cell", + "thiserror 1.0.69", +] + +[[package]] +name = "cairo-sys-rs" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "685c9fa8e590b8b3d678873528d83411db17242a73fccaed827770ea0fedda51" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "camino" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48" +dependencies = [ + "serde_core", +] + +[[package]] +name = "cargo-platform" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.19.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "cargo_toml" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374b7c592d9c00c1f4972ea58390ac6b18cbb6ab79011f3bdc90a0b82ca06b77" +dependencies = [ + "serde", + "toml 0.9.12+spec-1.1.0", +] + +[[package]] +name = "cc" +version = "1.2.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" +dependencies = [ + "find-msvc-tools", + "shlex", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cfb" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f" +dependencies = [ + "byteorder", + "fnv", + "uuid", +] + +[[package]] +name = "cfg-expr" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02" +dependencies = [ + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link 0.2.1", +] + +[[package]] +name = "chrono-tz" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" +dependencies = [ + "chrono", + "phf 0.12.1", +] + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +dependencies = [ + "time", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core-graphics" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "064badf302c3194842cf2c5d61f56cc88e54a759313879cdf03abdd27d0c3b97" +dependencies = [ + "bitflags 2.11.0", + "core-foundation", + "core-graphics-types", + "foreign-types", + "libc", +] + +[[package]] +name = "core-graphics-types" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" +dependencies = [ + "bitflags 2.11.0", + "core-foundation", + "libc", +] + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "cssparser" +version = "0.29.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93d03419cb5950ccfd3daf3ff1c7a36ace64609a1a8746d493df1ca0afde0fa" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa", + "matches", + "phf 0.10.1", + "proc-macro2", + "quote", + "smallvec", + "syn 1.0.109", +] + +[[package]] +name = "cssparser" +version = "0.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dae61cf9c0abb83bd659dab65b7e4e38d8236824c85f0f804f173567bda257d2" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa", + "phf 0.13.1", + "smallvec", +] + +[[package]] +name = "cssparser-macros" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" +dependencies = [ + "quote", + "syn 2.0.117", +] + +[[package]] +name = "csv" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52cd9d68cf7efc6ddfaaee42e7288d3a99d613d4b50f76ce9827ae0c6e14f938" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde_core", +] + +[[package]] +name = "csv-core" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704a3c26996a80471189265814dbc2c257598b96b8a7feae2d31ace646bb9782" +dependencies = [ + "memchr", +] + +[[package]] +name = "ctor" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +dependencies = [ + "quote", + "syn 2.0.117", +] + +[[package]] +name = "darling" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.117", +] + +[[package]] +name = "darling_macro" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "deranged" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c" +dependencies = [ + "powerfmt", + "serde_core", +] + +[[package]] +name = "derive_more" +version = "0.99.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.117", +] + +[[package]] +name = "derive_more" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" +dependencies = [ + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.117", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.61.2", +] + +[[package]] +name = "dispatch2" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0e367e4e7da84520dedcac1901e4da967309406d1e51017ae1abfb97adbd38" +dependencies = [ + "bitflags 2.11.0", + "block2", + "libc", + "objc2", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "dlopen2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e2c5bd4158e66d1e215c49b837e11d62f3267b30c92f1d171c4d3105e3dc4d4" +dependencies = [ + "dlopen2_derive", + "libc", + "once_cell", + "winapi", +] + +[[package]] +name = "dlopen2_derive" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fbbb781877580993a8707ec48672673ec7b81eeba04cfd2310bd28c08e47c8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "dom_query" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d9c2e7f1d22d0f2ce07626d259b8a55f4a47cb0938d4006dd8ae037f17d585e" +dependencies = [ + "bit-set", + "cssparser 0.36.0", + "foldhash 0.2.0", + "html5ever 0.36.1", + "precomputed-hash", + "selectors 0.35.0", + "tendril", +] + +[[package]] +name = "dpi" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b14ccef22fc6f5a8f4d7d768562a182c04ce9a3b3157b91390b52ddfdf1a76" +dependencies = [ + "serde", +] + +[[package]] +name = "dtoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c3cf4824e2d5f025c7b531afcb2325364084a16806f6d47fbc1f5fbd9960590" + +[[package]] +name = "dtoa-short" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd1511a7b6a56299bd043a9c167a6d2bfb37bf84a6dfceaba651168adfb43c87" +dependencies = [ + "dtoa", +] + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "embed-resource" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55a075fc573c64510038d7ee9abc7990635863992f83ebc52c8b433b8411a02e" +dependencies = [ + "cc", + "memchr", + "rustc_version", + "toml 0.9.12+spec-1.1.0", + "vswhom", + "winreg", +] + +[[package]] +name = "embed_plist" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ef6b89e5b37196644d8796de5268852ff179b44e96276cf4290264843743bb7" + +[[package]] +name = "endi" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66b7e2430c6dff6a955451e2cfc438f09cea1965a9d6f87f7e3b90decc014099" + +[[package]] +name = "enumflags2" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1027f7680c853e056ebcec683615fb6fbbc07dbaa13b4d5d9442b146ded4ecef" +dependencies = [ + "enumflags2_derive", + "serde", +] + +[[package]] +name = "enumflags2_derive" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "erased-serde" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2add8a07dd6a8d93ff627029c51de145e12686fbc36ecb298ac22e74cf02dec" +dependencies = [ + "serde", + "serde_core", + "typeid", +] + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener", + "pin-project-lite", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fdeflate" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "field-offset" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38e2275cc4e4fc009b0669731a1e5ab7ebf11f469eaede2bab9309a5b4d6057f" +dependencies = [ + "memoffset", + "rustc_version", +] + +[[package]] +name = "filetime" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" +dependencies = [ + "cfg-if", + "libc", + "libredox", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + +[[package]] +name = "foreign-types" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" +dependencies = [ + "foreign-types-macros", + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "foreign-types-shared" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + +[[package]] +name = "futf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" +dependencies = [ + "mac", + "new_debug_unreachable", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "gdk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9f245958c627ac99d8e529166f9823fb3b838d1d41fd2b297af3075093c2691" +dependencies = [ + "cairo-rs", + "gdk-pixbuf", + "gdk-sys", + "gio", + "glib", + "libc", + "pango", +] + +[[package]] +name = "gdk-pixbuf" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50e1f5f1b0bfb830d6ccc8066d18db35c487b1b2b1e8589b5dfe9f07e8defaec" +dependencies = [ + "gdk-pixbuf-sys", + "gio", + "glib", + "libc", + "once_cell", +] + +[[package]] +name = "gdk-pixbuf-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9839ea644ed9c97a34d129ad56d38a25e6756f99f3a88e15cd39c20629caf7" +dependencies = [ + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "gdk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c2d13f38594ac1e66619e188c6d5a1adb98d11b2fcf7894fc416ad76aa2f3f7" +dependencies = [ + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "pkg-config", + "system-deps", +] + +[[package]] +name = "gdkwayland-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "140071d506d223f7572b9f09b5e155afbd77428cd5cc7af8f2694c41d98dfe69" +dependencies = [ + "gdk-sys", + "glib-sys", + "gobject-sys", + "libc", + "pkg-config", + "system-deps", +] + +[[package]] +name = "gdkx11" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3caa00e14351bebbc8183b3c36690327eb77c49abc2268dd4bd36b856db3fbfe" +dependencies = [ + "gdk", + "gdkx11-sys", + "gio", + "glib", + "libc", + "x11", +] + +[[package]] +name = "gdkx11-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e7445fe01ac26f11601db260dd8608fe172514eb63b3b5e261ea6b0f4428d" +dependencies = [ + "gdk-sys", + "glib-sys", + "libc", + "system-deps", + "x11", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi 5.3.0", + "wasip2", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi 6.0.0", + "wasip2", + "wasip3", +] + +[[package]] +name = "gio" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fc8f532f87b79cbc51a79748f16a6828fb784be93145a322fa14d06d354c73" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "gio-sys", + "glib", + "libc", + "once_cell", + "pin-project-lite", + "smallvec", + "thiserror 1.0.69", +] + +[[package]] +name = "gio-sys" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37566df850baf5e4cb0dfb78af2e4b9898d817ed9263d1090a2df958c64737d2" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", + "winapi", +] + +[[package]] +name = "glib" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233daaf6e83ae6a12a52055f568f9d7cf4671dabb78ff9560ab6da230ce00ee5" +dependencies = [ + "bitflags 2.11.0", + "futures-channel", + "futures-core", + "futures-executor", + "futures-task", + "futures-util", + "gio-sys", + "glib-macros", + "glib-sys", + "gobject-sys", + "libc", + "memchr", + "once_cell", + "smallvec", + "thiserror 1.0.69", +] + +[[package]] +name = "glib-macros" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc" +dependencies = [ + "heck 0.4.1", + "proc-macro-crate 2.0.2", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "glib-sys" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063ce2eb6a8d0ea93d2bf8ba1957e78dbab6be1c2220dd3daca57d5a9d869898" +dependencies = [ + "libc", + "system-deps", +] + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "gobject-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0850127b514d1c4a4654ead6dedadb18198999985908e6ffe4436f53c785ce44" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "governor" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0746aa765db78b521451ef74221663b57ba595bf83f75d0ce23cc09447c8139f" +dependencies = [ + "cfg-if", + "futures-sink", + "futures-timer", + "futures-util", + "no-std-compat", + "nonzero_ext", + "parking_lot", + "portable-atomic", + "smallvec", + "spinning_top", +] + +[[package]] +name = "gtk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd56fb197bfc42bd5d2751f4f017d44ff59fbb58140c6b49f9b3b2bdab08506a" +dependencies = [ + "atk", + "cairo-rs", + "field-offset", + "futures-channel", + "gdk", + "gdk-pixbuf", + "gio", + "glib", + "gtk-sys", + "gtk3-macros", + "libc", + "pango", + "pkg-config", +] + +[[package]] +name = "gtk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f29a1c21c59553eb7dd40e918be54dccd60c52b049b75119d5d96ce6b624414" +dependencies = [ + "atk-sys", + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gdk-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "system-deps", +] + +[[package]] +name = "gtk3-macros" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52ff3c5b21f14f0736fed6dcfc0bfb4225ebf5725f3c0209edeec181e4d73e9d" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash 0.1.5", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "html5ever" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b7410cae13cbc75623c98ac4cbfd1f0bedddf3227afc24f370cf0f50a44a11c" +dependencies = [ + "log", + "mac", + "markup5ever 0.14.1", + "match_token", +] + +[[package]] +name = "html5ever" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6452c4751a24e1b99c3260d505eaeee76a050573e61f30ac2c924ddc7236f01e" +dependencies = [ + "log", + "markup5ever 0.36.1", +] + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2b52f86d1d4bc0d6b4e6826d960b1b333217e07d36b882dca570a5e1c48895b" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "tokio", + "tokio-rustls", + "tower-service", + "webpki-roots", +] + +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core 0.62.2", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ico" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e795dff5605e0f04bff85ca41b51a96b83e80b281e96231bcaaf1ac35103371" +dependencies = [ + "byteorder", + "png", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "infer" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7" +dependencies = [ + "cfb", +] + +[[package]] +name = "inotify" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" +dependencies = [ + "bitflags 1.3.2", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "ipnet" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is-docker" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "928bae27f42bc99b60d9ac7334e3a21d10ad8f1835a4e12ec3ec0464765ed1b3" +dependencies = [ + "once_cell", +] + +[[package]] +name = "is-wsl" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "173609498df190136aa7dea1a91db051746d339e18476eed5ca40521f02d7aa5" +dependencies = [ + "is-docker", + "once_cell", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "javascriptcore-rs" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca5671e9ffce8ffba57afc24070e906da7fc4b1ba66f2cabebf61bf2ea257fcc" +dependencies = [ + "bitflags 1.3.2", + "glib", + "javascriptcore-rs-sys", +] + +[[package]] +name = "javascriptcore-rs-sys" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af1be78d14ffa4b75b66df31840478fef72b51f8c2465d4ca7c194da9f7a5124" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "js-sys" +version = "0.3.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "json-patch" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "863726d7afb6bc2590eeff7135d923545e5e964f004c2ccf8716c25e70a86f08" +dependencies = [ + "jsonptr", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "jsonptr" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dea2b27dd239b2556ed7a25ba842fe47fd602e7fc7433c2a8d6106d4d9edd70" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "keyboard-types" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b750dcadc39a09dbadd74e118f6dd6598df77fa01df0cfcdc52c28dece74528a" +dependencies = [ + "bitflags 2.11.0", + "serde", + "unicode-segmentation", +] + +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + +[[package]] +name = "kuchikiki" +version = "0.8.8-speedreader" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02cb977175687f33fa4afa0c95c112b987ea1443e5a51c8f8ff27dc618270cc2" +dependencies = [ + "cssparser 0.29.6", + "html5ever 0.29.1", + "indexmap 2.13.0", + "selectors 0.24.0", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libappindicator" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03589b9607c868cc7ae54c0b2a22c8dc03dd41692d48f2d7df73615c6a95dc0a" +dependencies = [ + "glib", + "gtk", + "gtk-sys", + "libappindicator-sys", + "log", +] + +[[package]] +name = "libappindicator-sys" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e9ec52138abedcc58dc17a7c6c0c00a2bdb4f3427c7f63fa97fd0d859155caf" +dependencies = [ + "gtk-sys", + "libloading", + "once_cell", +] + +[[package]] +name = "libc" +version = "0.2.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if", + "winapi", +] + +[[package]] +name = "libredox" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1744e39d1d6a9948f4f388969627434e31128196de472883b39f148769bfe30a" +dependencies = [ + "bitflags 2.11.0", + "libc", + "plain", + "redox_syscall 0.7.4", +] + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "mac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" + +[[package]] +name = "markup5ever" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7a7213d12e1864c0f002f52c2923d4556935a43dec5e71355c2760e0f6e7a18" +dependencies = [ + "log", + "phf 0.11.3", + "phf_codegen 0.11.3", + "string_cache 0.8.9", + "string_cache_codegen 0.5.4", + "tendril", +] + +[[package]] +name = "markup5ever" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c3294c4d74d0742910f8c7b466f44dda9eb2d5742c1e430138df290a1e8451c" +dependencies = [ + "log", + "tendril", + "web_atoms", +] + +[[package]] +name = "match_token" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "log", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.48.0", +] + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.61.2", +] + +[[package]] +name = "muda" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01c1738382f66ed56b3b9c8119e794a2e23148ac8ea214eda86622d4cb9d415a" +dependencies = [ + "crossbeam-channel", + "dpi", + "gtk", + "keyboard-types", + "objc2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation", + "once_cell", + "png", + "serde", + "thiserror 2.0.18", + "windows-sys 0.60.2", +] + +[[package]] +name = "ndk" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4" +dependencies = [ + "bitflags 2.11.0", + "jni-sys", + "log", + "ndk-sys", + "num_enum", + "raw-window-handle", + "thiserror 1.0.69", +] + +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + +[[package]] +name = "ndk-sys" +version = "0.6.0+11769913" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6cda3051665f1fb8d9e08fc35c96d5a244fb1be711a03b71118828afc9a873" +dependencies = [ + "jni-sys", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "no-std-compat" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" + +[[package]] +name = "notify" +version = "6.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" +dependencies = [ + "bitflags 2.11.0", + "crossbeam-channel", + "filetime", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio 0.8.11", + "walkdir", + "windows-sys 0.48.0", +] + +[[package]] +name = "notify-debouncer-mini" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d40b221972a1fc5ef4d858a2f671fb34c75983eb385463dff3780eeff6a9d43" +dependencies = [ + "crossbeam-channel", + "log", + "notify", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "num-conv" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_enum" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" +dependencies = [ + "num_enum_derive", + "rustversion", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" +dependencies = [ + "proc-macro-crate 3.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "objc2" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a12a8ed07aefc768292f076dc3ac8c48f3781c8f2d5851dd3d98950e8c5a89f" +dependencies = [ + "objc2-encode", + "objc2-exception-helper", +] + +[[package]] +name = "objc2-app-kit" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d49e936b501e5c5bf01fda3a9452ff86dc3ea98ad5f283e1455153142d97518c" +dependencies = [ + "bitflags 2.11.0", + "block2", + "objc2", + "objc2-core-foundation", + "objc2-foundation", +] + +[[package]] +name = "objc2-core-foundation" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" +dependencies = [ + "bitflags 2.11.0", + "dispatch2", + "objc2", +] + +[[package]] +name = "objc2-core-graphics" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e022c9d066895efa1345f8e33e584b9f958da2fd4cd116792e15e07e4720a807" +dependencies = [ + "bitflags 2.11.0", + "dispatch2", + "objc2", + "objc2-core-foundation", + "objc2-io-surface", +] + +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + +[[package]] +name = "objc2-exception-helper" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7a1c5fbb72d7735b076bb47b578523aedc40f3c439bea6dfd595c089d79d98a" +dependencies = [ + "cc", +] + +[[package]] +name = "objc2-foundation" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" +dependencies = [ + "bitflags 2.11.0", + "block2", + "objc2", + "objc2-core-foundation", +] + +[[package]] +name = "objc2-io-surface" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180788110936d59bab6bd83b6060ffdfffb3b922ba1396b312ae795e1de9d81d" +dependencies = [ + "bitflags 2.11.0", + "objc2", + "objc2-core-foundation", +] + +[[package]] +name = "objc2-quartz-core" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c1358452b371bf9f104e21ec536d37a650eb10f7ee379fff67d2e08d537f1f" +dependencies = [ + "bitflags 2.11.0", + "objc2", + "objc2-core-foundation", + "objc2-foundation", +] + +[[package]] +name = "objc2-ui-kit" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d87d638e33c06f577498cbcc50491496a3ed4246998a7fbba7ccb98b1e7eab22" +dependencies = [ + "bitflags 2.11.0", + "objc2", + "objc2-core-foundation", + "objc2-foundation", +] + +[[package]] +name = "objc2-web-kit" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2e5aaab980c433cf470df9d7af96a7b46a9d892d521a2cbbb2f8a4c16751e7f" +dependencies = [ + "bitflags 2.11.0", + "block2", + "objc2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "open" +version = "5.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43bb73a7fa3799b198970490a51174027ba0d4ec504b03cd08caf513d40024bc" +dependencies = [ + "dunce", + "is-wsl", + "libc", + "pathdiff", +] + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "ordered-stream" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "pango" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ca27ec1eb0457ab26f3036ea52229edbdb74dee1edd29063f5b9b010e7ebee4" +dependencies = [ + "gio", + "glib", + "libc", + "once_cell", + "pango-sys", +] + +[[package]] +name = "pango-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436737e391a843e5933d6d9aa102cb126d501e815b83601365a948a518555dc5" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.18", + "smallvec", + "windows-link 0.2.1", +] + +[[package]] +name = "pathdiff" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "phf" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" +dependencies = [ + "phf_shared 0.8.0", +] + +[[package]] +name = "phf" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" +dependencies = [ + "phf_macros 0.10.0", + "phf_shared 0.10.0", + "proc-macro-hack", +] + +[[package]] +name = "phf" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" +dependencies = [ + "phf_macros 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_shared 0.12.1", +] + +[[package]] +name = "phf" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" +dependencies = [ + "phf_macros 0.13.1", + "phf_shared 0.13.1", + "serde", +] + +[[package]] +name = "phf_codegen" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" +dependencies = [ + "phf_generator 0.8.0", + "phf_shared 0.8.0", +] + +[[package]] +name = "phf_codegen" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf_codegen" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49aa7f9d80421bca176ca8dbfebe668cc7a2684708594ec9f3c0db0805d5d6e1" +dependencies = [ + "phf_generator 0.13.1", + "phf_shared 0.13.1", +] + +[[package]] +name = "phf_generator" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" +dependencies = [ + "phf_shared 0.8.0", + "rand 0.7.3", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared 0.10.0", + "rand 0.8.5", +] + +[[package]] +name = "phf_generator" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" +dependencies = [ + "phf_shared 0.11.3", + "rand 0.8.5", +] + +[[package]] +name = "phf_generator" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" +dependencies = [ + "fastrand", + "phf_shared 0.13.1", +] + +[[package]] +name = "phf_macros" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "phf_macros" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "phf_macros" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" +dependencies = [ + "phf_generator 0.13.1", + "phf_shared 0.13.1", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "phf_shared" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher 1.0.2", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher 1.0.2", +] + +[[package]] +name = "phf_shared" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" +dependencies = [ + "siphasher 1.0.2", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c835479a4443ded371d6c535cbfd8d31ad92c5d23ae9770a61bc155e4992a3c1" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "plain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" + +[[package]] +name = "plist" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "740ebea15c5d1428f910cd1a5f52cebf8d25006245ed8ade92702f4943d91e07" +dependencies = [ + "base64 0.22.1", + "indexmap 2.13.0", + "quick-xml", + "serde", + "time", +] + +[[package]] +name = "png" +version = "0.17.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "polling" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "portable-atomic" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.117", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit 0.19.15", +] + +[[package]] +name = "proc-macro-crate" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b00f26d3400549137f92511a46ac1cd8ce37cb5598a96d382381458b992a5d24" +dependencies = [ + "toml_datetime 0.6.3", + "toml_edit 0.20.2", +] + +[[package]] +name = "proc-macro-crate" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e67ba7e9b2b56446f1d419b1d807906278ffa1a658a8a5d8a39dcb1f5a78614f" +dependencies = [ + "toml_edit 0.25.4+spec-1.1.0", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quick-xml" +version = "0.38.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c" +dependencies = [ + "memchr", +] + +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2", + "thiserror 2.0.18", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098" +dependencies = [ + "bytes", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.4", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.18", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.60.2", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", + "rand_pcg", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44c5af06bb1b7d3216d91932aed5265164bf384dc89cd6ba05cf59a35f5f76ea" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.17", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_pcg" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "raw-window-handle" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539" + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags 2.11.0", +] + +[[package]] +name = "redox_syscall" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f450ad9c3b1da563fb6948a8e0fb0fb9269711c9c73d9ea1de5058c79c8d643a" +dependencies = [ + "bitflags 2.11.0", +] + +[[package]] +name = "redox_users" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror 2.0.18", +] + +[[package]] +name = "ref-cast" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "regex" +version = "1.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-core", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", +] + +[[package]] +name = "reqwest" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "sync_wrapper", + "tokio", + "tokio-util", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", +] + +[[package]] +name = "rfd" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a15ad77d9e70a92437d8f74c35d99b4e4691128df018833e99f90bcd36152672" +dependencies = [ + "block2", + "dispatch2", + "glib-sys", + "gobject-sys", + "gtk-sys", + "js-sys", + "log", + "objc2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation", + "raw-window-handle", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows-sys 0.60.2", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" +dependencies = [ + "bitflags 2.11.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f9466fb2c14ea04357e91413efb882e2a6d4a406e625449bc0a5d360d53a21" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20a6af516fea4b20eccceaf166e8aa666ac996208e8a644ce3ef5aa783bc7cd4" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schemars" +version = "0.8.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" +dependencies = [ + "dyn-clone", + "indexmap 1.9.3", + "schemars_derive", + "serde", + "serde_json", + "url", + "uuid", +] + +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.8.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.117", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "selectors" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c37578180969d00692904465fb7f6b3d50b9a2b952b87c23d0e2e5cb5013416" +dependencies = [ + "bitflags 1.3.2", + "cssparser 0.29.6", + "derive_more 0.99.20", + "fxhash", + "log", + "phf 0.8.0", + "phf_codegen 0.8.0", + "precomputed-hash", + "servo_arc 0.2.0", + "smallvec", +] + +[[package]] +name = "selectors" +version = "0.35.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fdfed56cd634f04fe8b9ddf947ae3dc493483e819593d2ba17df9ad05db8b2" +dependencies = [ + "bitflags 2.11.0", + "cssparser 0.36.0", + "derive_more 2.1.1", + "log", + "new_debug_unreachable", + "phf 0.13.1", + "phf_codegen 0.13.1", + "precomputed-hash", + "rustc-hash", + "servo_arc 0.4.3", + "smallvec", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde-untagged" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058" +dependencies = [ + "erased-serde", + "serde", + "serde_core", + "typeid", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_spanned" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" +dependencies = [ + "serde_core", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "381b283ce7bc6b476d903296fb59d0d36633652b633b27f64db4fb46dcbfc3b9" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.13.0", + "schemars 0.9.0", + "schemars 1.2.1", + "serde_core", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6d4e30573c8cb306ed6ab1dca8423eec9a463ea0e155f45399455e0368b27e0" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "serialize-to-javascript" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04f3666a07a197cdb77cdf306c32be9b7f598d7060d50cfd4d5aa04bfd92f6c5" +dependencies = [ + "serde", + "serde_json", + "serialize-to-javascript-impl", +] + +[[package]] +name = "serialize-to-javascript-impl" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "772ee033c0916d670af7860b6e1ef7d658a4629a6d0b4c8c3e67f09b3765b75d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "servo_arc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52aa42f8fdf0fed91e5ce7f23d8138441002fa31dca008acf47e6fd4721f741" +dependencies = [ + "nodrop", + "stable_deref_trait", +] + +[[package]] +name = "servo_arc" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "170fb83ab34de17dc69aa7c67482b22218ddb85da56546f9bd6b929e32a05930" +dependencies = [ + "stable_deref_trait", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "siphasher" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "softbuffer" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aac18da81ebbf05109ab275b157c22a653bb3c12cf884450179942f81bcbf6c3" +dependencies = [ + "bytemuck", + "js-sys", + "ndk", + "objc2", + "objc2-core-foundation", + "objc2-core-graphics", + "objc2-foundation", + "objc2-quartz-core", + "raw-window-handle", + "redox_syscall 0.5.18", + "tracing", + "wasm-bindgen", + "web-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "soup3" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "471f924a40f31251afc77450e781cb26d55c0b650842efafc9c6cbd2f7cc4f9f" +dependencies = [ + "futures-channel", + "gio", + "glib", + "libc", + "soup3-sys", +] + +[[package]] +name = "soup3-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ebe8950a680a12f24f15ebe1bf70db7af98ad242d9db43596ad3108aab86c27" +dependencies = [ + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "spinning_top" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96d2d1d716fb500937168cc09353ffdc7a012be8475ac7308e1bdf0e3923300" +dependencies = [ + "lock_api", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "streaming-data-loader" +version = "2.0.0-beta.5" +dependencies = [ + "axum", + "chrono", + "chrono-tz", + "csv", + "fs2", + "governor", + "notify", + "notify-debouncer-mini", + "rand 0.8.5", + "reqwest 0.12.28", + "serde", + "serde_json", + "tauri", + "tauri-build", + "tauri-plugin-dialog", + "tauri-plugin-opener", + "tokio", + "tokio-stream", + "tower-http", + "tracing", + "tracing-appender", + "tracing-subscriber", + "windows", + "windows-service", +] + +[[package]] +name = "string_cache" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" +dependencies = [ + "new_debug_unreachable", + "parking_lot", + "phf_shared 0.11.3", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_cache" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a18596f8c785a729f2819c0f6a7eae6ebeebdfffbfe4214ae6b087f690e31901" +dependencies = [ + "new_debug_unreachable", + "parking_lot", + "phf_shared 0.13.1", + "precomputed-hash", +] + +[[package]] +name = "string_cache_codegen" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", + "proc-macro2", + "quote", +] + +[[package]] +name = "string_cache_codegen" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "585635e46db231059f76c5849798146164652513eb9e8ab2685939dd90f29b69" +dependencies = [ + "phf_generator 0.13.1", + "phf_shared 0.13.1", + "proc-macro2", + "quote", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "swift-rs" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4057c98e2e852d51fdcfca832aac7b571f6b351ad159f9eda5db1655f8d0c4d7" +dependencies = [ + "base64 0.21.7", + "serde", + "serde_json", +] + +[[package]] +name = "symlink" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "system-deps" +version = "6.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349" +dependencies = [ + "cfg-expr", + "heck 0.5.0", + "pkg-config", + "toml 0.8.2", + "version-compare", +] + +[[package]] +name = "tao" +version = "0.34.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d52c379e63da659a483a958110bbde891695a0ecb53e48cc7786d5eda7bb" +dependencies = [ + "bitflags 2.11.0", + "block2", + "core-foundation", + "core-graphics", + "crossbeam-channel", + "dispatch2", + "dlopen2", + "dpi", + "gdkwayland-sys", + "gdkx11-sys", + "gtk", + "jni", + "libc", + "log", + "ndk", + "ndk-context", + "ndk-sys", + "objc2", + "objc2-app-kit", + "objc2-foundation", + "once_cell", + "parking_lot", + "raw-window-handle", + "tao-macros", + "unicode-segmentation", + "url", + "windows", + "windows-core 0.61.2", + "windows-version", + "x11-dl", +] + +[[package]] +name = "tao-macros" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4e16beb8b2ac17db28eab8bca40e62dbfbb34c0fcdc6d9826b11b7b5d047dfd" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "target-lexicon" +version = "0.12.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" + +[[package]] +name = "tauri" +version = "2.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da77cc00fb9028caf5b5d4650f75e31f1ef3693459dfca7f7e506d1ecef0ba2d" +dependencies = [ + "anyhow", + "bytes", + "cookie", + "dirs", + "dunce", + "embed_plist", + "getrandom 0.3.4", + "glob", + "gtk", + "heck 0.5.0", + "http", + "jni", + "libc", + "log", + "mime", + "muda", + "objc2", + "objc2-app-kit", + "objc2-foundation", + "objc2-ui-kit", + "objc2-web-kit", + "percent-encoding", + "plist", + "raw-window-handle", + "reqwest 0.13.2", + "serde", + "serde_json", + "serde_repr", + "serialize-to-javascript", + "swift-rs", + "tauri-build", + "tauri-macros", + "tauri-runtime", + "tauri-runtime-wry", + "tauri-utils", + "thiserror 2.0.18", + "tokio", + "tray-icon", + "url", + "webkit2gtk", + "webview2-com", + "window-vibrancy", + "windows", +] + +[[package]] +name = "tauri-build" +version = "2.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bbc990d1dbf57a8e1c7fa2327f2a614d8b757805603c1b9ba5c81bade09fd4d" +dependencies = [ + "anyhow", + "cargo_toml", + "dirs", + "glob", + "heck 0.5.0", + "json-patch", + "schemars 0.8.22", + "semver", + "serde", + "serde_json", + "tauri-utils", + "tauri-winres", + "toml 0.9.12+spec-1.1.0", + "walkdir", +] + +[[package]] +name = "tauri-codegen" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4a24476afd977c5d5d169f72425868613d82747916dd29e0a357c84c4bd6d29" +dependencies = [ + "base64 0.22.1", + "brotli", + "ico", + "json-patch", + "plist", + "png", + "proc-macro2", + "quote", + "semver", + "serde", + "serde_json", + "sha2", + "syn 2.0.117", + "tauri-utils", + "thiserror 2.0.18", + "time", + "url", + "uuid", + "walkdir", +] + +[[package]] +name = "tauri-macros" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d39b349a98dadaffebb73f0a40dcd1f23c999211e5a2e744403db384d0c33de7" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", + "tauri-codegen", + "tauri-utils", +] + +[[package]] +name = "tauri-plugin" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddde7d51c907b940fb573006cdda9a642d6a7c8153657e88f8a5c3c9290cd4aa" +dependencies = [ + "anyhow", + "glob", + "plist", + "schemars 0.8.22", + "serde", + "serde_json", + "tauri-utils", + "toml 0.9.12+spec-1.1.0", + "walkdir", +] + +[[package]] +name = "tauri-plugin-dialog" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9204b425d9be8d12aa60c2a83a289cf7d1caae40f57f336ed1155b3a5c0e359b" +dependencies = [ + "log", + "raw-window-handle", + "rfd", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "tauri-plugin-fs", + "thiserror 2.0.18", + "url", +] + +[[package]] +name = "tauri-plugin-fs" +version = "2.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed390cc669f937afeb8b28032ce837bac8ea023d975a2e207375ec05afaf1804" +dependencies = [ + "anyhow", + "dunce", + "glob", + "percent-encoding", + "schemars 0.8.22", + "serde", + "serde_json", + "serde_repr", + "tauri", + "tauri-plugin", + "tauri-utils", + "thiserror 2.0.18", + "toml 0.9.12+spec-1.1.0", + "url", +] + +[[package]] +name = "tauri-plugin-opener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc624469b06f59f5a29f874bbc61a2ed737c0f9c23ef09855a292c389c42e83f" +dependencies = [ + "dunce", + "glob", + "objc2-app-kit", + "objc2-foundation", + "open", + "schemars 0.8.22", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "thiserror 2.0.18", + "url", + "windows", + "zbus", +] + +[[package]] +name = "tauri-runtime" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2826d79a3297ed08cd6ea7f412644ef58e32969504bc4fbd8d7dbeabc4445ea2" +dependencies = [ + "cookie", + "dpi", + "gtk", + "http", + "jni", + "objc2", + "objc2-ui-kit", + "objc2-web-kit", + "raw-window-handle", + "serde", + "serde_json", + "tauri-utils", + "thiserror 2.0.18", + "url", + "webkit2gtk", + "webview2-com", + "windows", +] + +[[package]] +name = "tauri-runtime-wry" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e11ea2e6f801d275fdd890d6c9603736012742a1c33b96d0db788c9cdebf7f9e" +dependencies = [ + "gtk", + "http", + "jni", + "log", + "objc2", + "objc2-app-kit", + "once_cell", + "percent-encoding", + "raw-window-handle", + "softbuffer", + "tao", + "tauri-runtime", + "tauri-utils", + "url", + "webkit2gtk", + "webview2-com", + "windows", + "wry", +] + +[[package]] +name = "tauri-utils" +version = "2.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219a1f983a2af3653f75b5747f76733b0da7ff03069c7a41901a5eb3ace4557d" +dependencies = [ + "anyhow", + "brotli", + "cargo_metadata", + "ctor", + "dunce", + "glob", + "html5ever 0.29.1", + "http", + "infer", + "json-patch", + "kuchikiki", + "log", + "memchr", + "phf 0.11.3", + "proc-macro2", + "quote", + "regex", + "schemars 0.8.22", + "semver", + "serde", + "serde-untagged", + "serde_json", + "serde_with", + "swift-rs", + "thiserror 2.0.18", + "toml 0.9.12+spec-1.1.0", + "url", + "urlpattern", + "uuid", + "walkdir", +] + +[[package]] +name = "tauri-winres" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1087b111fe2b005e42dbdc1990fc18593234238d47453b0c99b7de1c9ab2c1e0" +dependencies = [ + "dunce", + "embed-resource", + "toml 0.9.12+spec-1.1.0", +] + +[[package]] +name = "tempfile" +version = "3.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" +dependencies = [ + "fastrand", + "getrandom 0.4.2", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "tendril" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" +dependencies = [ + "futf", + "mac", + "utf-8", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl 2.0.18", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "time" +version = "0.3.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde_core", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" + +[[package]] +name = "time-macros" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e61e67053d25a4e82c844e8424039d9745781b3fc4f32b8d55ed50f5f667ef3" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" +dependencies = [ + "bytes", + "libc", + "mio 1.1.1", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d" +dependencies = [ + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.3", + "toml_edit 0.20.2", +] + +[[package]] +name = "toml" +version = "0.9.12+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863" +dependencies = [ + "indexmap 2.13.0", + "serde_core", + "serde_spanned 1.0.4", + "toml_datetime 0.7.5+spec-1.1.0", + "toml_parser", + "toml_writer", + "winnow 0.7.15", +] + +[[package]] +name = "toml_datetime" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.5+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_datetime" +version = "1.0.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32c2555c699578a4f59f0cc68e5116c8d7cabbd45e1409b989d4be085b53f13e" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.13.0", + "toml_datetime 0.6.3", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" +dependencies = [ + "indexmap 2.13.0", + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.3", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.25.4+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7193cbd0ce53dc966037f54351dbbcf0d5a642c7f0038c382ef9e677ce8c13f2" +dependencies = [ + "indexmap 2.13.0", + "toml_datetime 1.0.0+spec-1.1.0", + "toml_parser", + "winnow 0.7.15", +] + +[[package]] +name = "toml_parser" +version = "1.0.9+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" +dependencies = [ + "winnow 0.7.15", +] + +[[package]] +name = "toml_writer" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" + +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags 2.11.0", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-appender" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "050686193eb999b4bb3bc2acfa891a13da00f79734704c4b8b4ef1a10b368a3c" +dependencies = [ + "crossbeam-channel", + "symlink", + "thiserror 2.0.18", + "time", + "tracing-subscriber", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319" +dependencies = [ + "nu-ansi-term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "tray-icon" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e85aa143ceb072062fc4d6356c1b520a51d636e7bc8e77ec94be3608e5e80c" +dependencies = [ + "crossbeam-channel", + "dirs", + "libappindicator", + "muda", + "objc2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-core-graphics", + "objc2-foundation", + "once_cell", + "png", + "serde", + "thiserror 2.0.18", + "windows-sys 0.60.2", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typeid" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "uds_windows" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b70b87d15e91f553711b40df3048faf27a7a04e01e0ddc0cf9309f0af7c2ca" +dependencies = [ + "memoffset", + "tempfile", + "windows-sys 0.61.2", +] + +[[package]] +name = "unic-char-property" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221" +dependencies = [ + "unic-char-range", +] + +[[package]] +name = "unic-char-range" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc" + +[[package]] +name = "unic-common" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" + +[[package]] +name = "unic-ucd-ident" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e230a37c0381caa9219d67cf063aa3a375ffed5bf541a452db16e744bdab6987" +dependencies = [ + "unic-char-property", + "unic-char-range", + "unic-ucd-version", +] + +[[package]] +name = "unic-ucd-version" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4" +dependencies = [ + "unic-common", +] + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", + "serde_derive", +] + +[[package]] +name = "urlpattern" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70acd30e3aa1450bc2eece896ce2ad0d178e9c079493819301573dae3c37ba6d" +dependencies = [ + "regex", + "serde", + "unic-ucd-ident", + "url", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" +dependencies = [ + "getrandom 0.4.2", + "js-sys", + "serde_core", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "version-compare" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c2856837ef78f57382f06b2b8563a2f512f7185d732608fd9176cb3b8edf0e" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "vswhom" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be979b7f07507105799e854203b470ff7c78a1639e330a58f183b5fea574608b" +dependencies = [ + "libc", + "vswhom-sys", +] + +[[package]] +name = "vswhom-sys" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb067e4cbd1ff067d1df46c9194b5de0e98efd2810bbc95c5d5e5f25a3231150" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9c5522b3a28661442748e09d40924dfb9ca614b21c00d3fd135720e48b67db8" +dependencies = [ + "cfg-if", + "futures-util", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn 2.0.117", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap 2.13.0", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasm-streams" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1ec4f6517c9e11ae630e200b2b65d193279042e28edd4a2cda233e46670bbb" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "semver", +] + +[[package]] +name = "web-sys" +version = "0.3.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web_atoms" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a9779e9f04d2ac1ce317aee707aa2f6b773afba7b931222bff6983843b1576" +dependencies = [ + "phf 0.13.1", + "phf_codegen 0.13.1", + "string_cache 0.9.0", + "string_cache_codegen 0.6.1", +] + +[[package]] +name = "webkit2gtk" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1027150013530fb2eaf806408df88461ae4815a45c541c8975e61d6f2fc4793" +dependencies = [ + "bitflags 1.3.2", + "cairo-rs", + "gdk", + "gdk-sys", + "gio", + "gio-sys", + "glib", + "glib-sys", + "gobject-sys", + "gtk", + "gtk-sys", + "javascriptcore-rs", + "libc", + "once_cell", + "soup3", + "webkit2gtk-sys", +] + +[[package]] +name = "webkit2gtk-sys" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "916a5f65c2ef0dfe12fff695960a2ec3d4565359fdbb2e9943c974e06c734ea5" +dependencies = [ + "bitflags 1.3.2", + "cairo-sys-rs", + "gdk-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "gtk-sys", + "javascriptcore-rs-sys", + "libc", + "pkg-config", + "soup3-sys", + "system-deps", +] + +[[package]] +name = "webpki-roots" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "webview2-com" +version = "0.38.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7130243a7a5b33c54a444e54842e6a9e133de08b5ad7b5861cd8ed9a6a5bc96a" +dependencies = [ + "webview2-com-macros", + "webview2-com-sys", + "windows", + "windows-core 0.61.2", + "windows-implement", + "windows-interface", +] + +[[package]] +name = "webview2-com-macros" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a921c1b6914c367b2b823cd4cde6f96beec77d30a939c8199bb377cf9b9b54" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "webview2-com-sys" +version = "0.38.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "381336cfffd772377d291702245447a5251a2ffa5bad679c99e61bc48bacbf9c" +dependencies = [ + "thiserror 2.0.18", + "windows", + "windows-core 0.61.2", +] + +[[package]] +name = "widestring" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "window-vibrancy" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9bec5a31f3f9362f2258fd0e9c9dd61a9ca432e7306cc78c444258f0dce9a9c" +dependencies = [ + "objc2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation", + "raw-window-handle", + "windows-sys 0.59.0", + "windows-version", +] + +[[package]] +name = "windows" +version = "0.61.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" +dependencies = [ + "windows-collections", + "windows-core 0.61.2", + "windows-future", + "windows-link 0.1.3", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" +dependencies = [ + "windows-core 0.61.2", +] + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link 0.2.1", + "windows-result 0.4.1", + "windows-strings 0.5.1", +] + +[[package]] +name = "windows-future" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", + "windows-threading", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", +] + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link 0.1.3", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows-service" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193cae8e647981c35bc947fdd57ba7928b1fa0d4a79305f6dd2dc55221ac35ac" +dependencies = [ + "bitflags 2.11.0", + "widestring", + "windows-sys 0.59.0", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link 0.1.3", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link 0.2.1", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link 0.1.3", +] + +[[package]] +name = "windows-version" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4060a1da109b9d0326b7262c8e12c84df67cc0dbc9e33cf49e01ccc2eb63631" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df79d97927682d2fd8adb29682d1140b343be4ac0f08fd68b7765d9c059d3945" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.55.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb5a765337c50e9ec252c2069be9bf91c7df47afb103b642ba3a53bf8101be97" +dependencies = [ + "cfg-if", + "windows-sys 0.59.0", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck 0.5.0", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck 0.5.0", + "indexmap 2.13.0", + "prettyplease", + "syn 2.0.117", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.117", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.11.0", + "indexmap 2.13.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.13.0", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "wry" +version = "0.54.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a24eda84b5d488f99344e54b807138896cee8df0b2d16c793f1f6b80e6d8df1f" +dependencies = [ + "base64 0.22.1", + "block2", + "cookie", + "crossbeam-channel", + "dirs", + "dom_query", + "dpi", + "dunce", + "gdkx11", + "gtk", + "http", + "javascriptcore-rs", + "jni", + "libc", + "ndk", + "objc2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation", + "objc2-ui-kit", + "objc2-web-kit", + "once_cell", + "percent-encoding", + "raw-window-handle", + "sha2", + "soup3", + "tao-macros", + "thiserror 2.0.18", + "url", + "webkit2gtk", + "webkit2gtk-sys", + "webview2-com", + "windows", + "windows-core 0.61.2", + "windows-version", + "x11-dl", +] + +[[package]] +name = "x11" +version = "2.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "502da5464ccd04011667b11c435cb992822c2c0dbde1770c988480d312a0db2e" +dependencies = [ + "libc", + "pkg-config", +] + +[[package]] +name = "x11-dl" +version = "2.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38735924fedd5314a6e548792904ed8c6de6636285cb9fec04d5b1db85c1516f" +dependencies = [ + "libc", + "once_cell", + "pkg-config", +] + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", + "synstructure", +] + +[[package]] +name = "zbus" +version = "5.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca82f95dbd3943a40a53cfded6c2d0a2ca26192011846a1810c4256ef92c60bc" +dependencies = [ + "async-broadcast", + "async-executor", + "async-io", + "async-lock", + "async-process", + "async-recursion", + "async-task", + "async-trait", + "blocking", + "enumflags2", + "event-listener", + "futures-core", + "futures-lite", + "hex", + "libc", + "ordered-stream", + "rustix", + "serde", + "serde_repr", + "tracing", + "uds_windows", + "uuid", + "windows-sys 0.61.2", + "winnow 0.7.15", + "zbus_macros", + "zbus_names", + "zvariant", +] + +[[package]] +name = "zbus_macros" +version = "5.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897e79616e84aac4b2c46e9132a4f63b93105d54fe8c0e8f6bffc21fa8d49222" +dependencies = [ + "proc-macro-crate 3.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", + "zbus_names", + "zvariant", + "zvariant_utils", +] + +[[package]] +name = "zbus_names" +version = "4.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffd8af6d5b78619bab301ff3c560a5bd22426150253db278f164d6cf3b72c50f" +dependencies = [ + "serde", + "winnow 0.7.15", + "zvariant", +] + +[[package]] +name = "zerocopy" +version = "0.8.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2578b716f8a7a858b7f02d5bd870c14bf4ddbbcf3a4c05414ba6503640505e3" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e6cc098ea4d3bd6246687de65af3f920c430e236bee1e3bf2e441463f08a02f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" + +[[package]] +name = "zvariant" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5708299b21903bbe348e94729f22c49c55d04720a004aa350f1f9c122fd2540b" +dependencies = [ + "endi", + "enumflags2", + "serde", + "winnow 0.7.15", + "zvariant_derive", + "zvariant_utils", +] + +[[package]] +name = "zvariant_derive" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b59b012ebe9c46656f9cc08d8da8b4c726510aef12559da3e5f1bf72780752c" +dependencies = [ + "proc-macro-crate 3.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", + "zvariant_utils", +] + +[[package]] +name = "zvariant_utils" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f75c23a64ef8f40f13a6989991e643554d9bef1d682a281160cf0c1bc389c5e9" +dependencies = [ + "proc-macro2", + "quote", + "serde", + "syn 2.0.117", + "winnow 0.7.15", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..4ff78f9 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,50 @@ +[package] +name = "streaming-data-loader" +version = "2.0.0-beta.5" +description = "Desktop app for loading streaming CSV observations into HydroServer" +authors = ["Daniel Slaugh", "Ken Lippold"] +edition = "2021" +default-run = "streaming-data-loader" +readme = "README.md" +repository = "https://github.com/hydroserver2/streaming-data-loader" +homepage = "https://github.com/hydroserver2/streaming-data-loader" +publish = false + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +# The `_lib` suffix may seem redundant but it is necessary +# to make the lib name unique and wouldn't conflict with the bin name. +# This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519 +name = "streaming_data_loader_lib" +crate-type = ["staticlib", "cdylib", "rlib"] + +[build-dependencies] +tauri-build = { version = "2", features = [] } + +[dependencies] +tauri = { version = "2", features = [] } +tauri-plugin-dialog = "2" +tauri-plugin-opener = "2" +axum = { version = "0.7", features = ["macros"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] } +chrono = { version = "0.4", features = ["serde"] } +chrono-tz = "0.10" +csv = "1" +tokio = { version = "1", features = ["macros", "rt-multi-thread", "sync", "time", "signal"] } +tokio-stream = { version = "0.1", features = ["sync"] } +notify = "6" +notify-debouncer-mini = "0.4.1" +governor = { version = "0.7", default-features = false, features = ["std"] } +fs2 = "0.4" +tracing = "0.1" +tracing-appender = "0.2" +tracing-subscriber = { version = "0.3", features = ["fmt"] } +tower-http = { version = "0.6", features = ["cors"] } +rand = "0.8" + +[target.'cfg(windows)'.dependencies] +windows-service = "0.8" +windows = { version = "0.61", features = ["Win32_Graphics_Dwm", "Win32_Foundation"] } diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 768672e..0000000 --- a/Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -FROM python:3.9-slim - -# Set the working directory in the container -WORKDIR /app - -# Copy only requirements first to leverage Docker cache -COPY requirements.txt ./ -RUN pip install --no-cache-dir -r requirements.txt - -# Copy the rest of the application code -COPY . ./ - -# Set environment variables for defaults (optional ones) -ENV HYDRO_SERVICE_URL=https://playground.hydroserver.org -ENV LOG_FILE=/app/logs/hydroloader.log - -# Expose the logs directory (useful for debugging in container setups) -VOLUME ["/app/logs"] - -# Ensure the logs directory exists -RUN mkdir -p /app/logs - -# Define the default command to run the application -CMD [ \ - "python", "main.py" \ -] diff --git a/LICENSE b/LICENSE deleted file mode 100644 index e854b79..0000000 --- a/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2023, Utah State University - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md index 2a46973..8fbc1bd 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,91 @@ -# HydroServer Streaming Data Loader -A desktop application for loading streaming data to a HydroServer instance +# Streaming Data Loader -## Funding and Acknowledgements +The Streaming Data Loader (SDL) is a desktop app for getting time-series data +from CSV files into HydroServer. -Funding for this project was provided by the National Oceanic & Atmospheric Administration (NOAA), awarded to the Cooperative Institute for Research to Operations in Hydrology (CIROH) through the NOAA Cooperative Agreement with The University of Alabama (NA22NWS4320003). Utah State University is a founding member of CIROH and receives funding under subaward from the University of Alabama. Additional funding and support have been provided by the State of Utah Division of Water Rights, the World Meorological Organization, and the Utah Water Research laboratory at Utah State University. +It is built for workflows where a logger, instrument, or upstream process keeps +writing rows to a local CSV file. SDL lets a user connect +to a HydroServer workspace, inspect the CSV structure, configure how timestamps +should be interpreted, and map value columns to HydroServer datastreams. Once a +data source is enabled, SDL watches the file for changes and pushes new +observations into the selected datastreams. + +The desktop UI and the watcher/uploader runtime are now split. The UI talks to +the headless daemon over localhost HTTP, and the daemon is the only process +that reads and writes persisted config/workspace/log state. + +## What SDL does + +- Connects to a HydroServer instance with either an API key or username and + password. +- Loads datastreams from the selected workspace so CSV columns can be mapped to + the right targets. +- Previews CSV files before setup so the user can confirm headers, delimiter, + and where data begins. +- Parses timestamps from ISO 8601 or custom formats, with timezone + handling when needed. +- Tracks upload progress so only new rows are sent after the initial load. +- Batches uploads, retries transient failures, and records recent job logs and + status. +- Supports filesystem-triggered updates with a lightweight, always on operating system task so you can set up the job orchestration once and not worry about it again. + +## Daemon layout + +- `streaming-data-loader` is the Tauri desktop UI. +- `streaming-data-loader-daemon` is the headless Rust service. +- On macOS, both default to `/Users/Shared/Streaming Data Loader` for shared + config, workspace, and log files unless `SDL_CONFIG_DIR` is set. +- The daemon publishes its active localhost API endpoint and bearer token to + `/daemon.endpoint.json`. + +## Daemon API + +The runtime boundary is intentionally small: + +- Commands go in with `POST /api/commands/...`. +- Status comes out through `GET /api/status` as Server-Sent Events. +- The Tauri app only uses direct commands for OS-specific concerns like service + install/restart/uninstall and local file-manager actions. + +Key command routes: + +- `POST /api/commands/bootstrap` +- `POST /api/commands/update-server-config` +- `POST /api/commands/create-job` +- `POST /api/commands/update-job` +- `POST /api/commands/delete-job` +- `POST /api/commands/run-job-now` + +The status stream sends full app snapshots containing health, config, and job +runtime summaries. The frontend treats the daemon as its backend and no longer +mutates shared state files directly. + +## macOS launchd + +The repository includes a launchd template at +`deploy/macos/com.hydroserver.sdl.plist`. + +Example install flow: + +```sh +sudo cp deploy/macos/com.hydroserver.sdl.plist /Library/LaunchDaemons/com.hydroserver.sdl.plist +sudo launchctl bootstrap system /Library/LaunchDaemons/com.hydroserver.sdl.plist +``` + +## Typical workflow + +1. Connect SDL to a HydroServer workspace. +2. Choose a CSV file that is being updated over time. +3. Review the preview and configure file parsing and timestamp rules. +4. Map CSV value columns to HydroServer datastreams. +5. Enable the data source and let SDL keep the datastreams current as new rows appear. + +## Local development + +- `npm run dev` runs the frontend with Vite. +- `npm run tauri dev` runs the desktop app with the frontend dev server. +- `npm run tauri dev` also sets `SDL_CONFIG_DIR=.sdl-dev-data` so the desktop + app and daemon use a repo-local config directory during development instead of + the shared system directory. +- `cargo run --bin streaming-data-loader-daemon` runs the headless daemon. +- `npm test` runs the frontend test suite. diff --git a/build.rs b/build.rs new file mode 100644 index 0000000..82285bd --- /dev/null +++ b/build.rs @@ -0,0 +1,14 @@ +fn main() { + println!("cargo:rerun-if-changed=tauri.conf.json"); + println!("cargo:rerun-if-changed=icons"); + println!("cargo:rerun-if-changed=icons/icon-color.svg"); + println!("cargo:rerun-if-changed=icons/icon-color-thick.svg"); + println!("cargo:rerun-if-changed=icons/icon.icns"); + println!("cargo:rerun-if-changed=icons/icon.ico"); + println!("cargo:rerun-if-changed=icons/icon.png"); + println!("cargo:rerun-if-changed=icons/32x32.png"); + println!("cargo:rerun-if-changed=icons/128x128.png"); + println!("cargo:rerun-if-changed=icons/128x128@2x.png"); + + tauri_build::build() +} diff --git a/capabilities/default.json b/capabilities/default.json new file mode 100644 index 0000000..36716dc --- /dev/null +++ b/capabilities/default.json @@ -0,0 +1,11 @@ +{ + "$schema": "../gen/schemas/desktop-schema.json", + "identifier": "default", + "description": "Default capabilities", + "windows": ["main"], + "permissions": [ + "core:default", + "dialog:default", + "opener:default" + ] +} diff --git a/deploy/macos/com.hydroserver.sdl.plist b/deploy/macos/com.hydroserver.sdl.plist new file mode 100644 index 0000000..b717a0f --- /dev/null +++ b/deploy/macos/com.hydroserver.sdl.plist @@ -0,0 +1,29 @@ + + + + + Label + com.hydroserver.sdl + + ProgramArguments + + /Applications/Streaming Data Loader.app/Contents/MacOS/streaming-data-loader + --service + + + RunAtLoad + + + KeepAlive + + + WorkingDirectory + /Users/Shared/Streaming Data Loader + + StandardOutPath + /Users/Shared/Streaming Data Loader/logs/daemon.stdout.log + + StandardErrorPath + /Users/Shared/Streaming Data Loader/logs/daemon.stderr.log + + diff --git a/frontend/App.vue b/frontend/App.vue new file mode 100644 index 0000000..6033a98 --- /dev/null +++ b/frontend/App.vue @@ -0,0 +1,34 @@ + + + diff --git a/frontend/api/http.ts b/frontend/api/http.ts new file mode 100644 index 0000000..e559b35 --- /dev/null +++ b/frontend/api/http.ts @@ -0,0 +1,34 @@ +import { apiBaseUrl } from "../config" +import { formatErrorDetail } from "./runtime" + +function buildApiUrl(path: string): string { + return `${apiBaseUrl.replace(/\/$/, "")}${path}` +} + +export async function requestJson(path: string, init?: RequestInit): Promise { + const response = await fetch(buildApiUrl(path), { + headers: { + "Content-Type": "application/json", + ...(init?.headers ?? {}), + }, + ...init, + }) + + if (!response.ok) { + let detail = `Request failed with status ${response.status}` + + try { + const payload = (await response.json()) as { detail?: unknown } + const formattedDetail = formatErrorDetail(payload.detail) + if (formattedDetail) { + detail = formattedDetail + } + } catch { + // Ignore JSON parsing errors for non-JSON error responses. + } + + throw new Error(detail) + } + + return (await response.json()) as T +} diff --git a/frontend/api/hydroserver/client.ts b/frontend/api/hydroserver/client.ts new file mode 100644 index 0000000..2dede04 --- /dev/null +++ b/frontend/api/hydroserver/client.ts @@ -0,0 +1,231 @@ +import { requestJson } from '../http' +import { invokeCommand, isTauriRuntime } from '../runtime' +import { daemonCommand, subscribeToDaemonStatus } from './daemonTransport' +import type { + ActionResponse, + AppBootstrapResponse, + AppConfig, + ConnectionTestResponse, + CsvPreviewResponse, + DaemonStatusSnapshot, + DatastreamDetail, + DatastreamSummary, + HealthResponse, + JobDetail, + JobLogsResponse, + JobStatusSummary, + JobUpsertRequest, + ServerConfig, + ServerUrlValidationResponse, +} from './types' + +export { subscribeToDaemonStatus } +export type { DaemonStatusSnapshot } + +export function getBootstrap(): Promise { + if (isTauriRuntime()) { + return daemonCommand('bootstrap') + } + + return Promise.all([getHealth(), getConfig(), getJobs()]).then( + ([health, config, jobs]) => ({ + health, + config, + jobs, + }) + ) +} + +export function getHealth(): Promise { + if (isTauriRuntime()) { + return daemonCommand('get-health') + } + return requestJson('/health') +} + +export function getConfig(): Promise { + if (isTauriRuntime()) { + return daemonCommand('get-config') + } + return requestJson('/config') +} + +export function updateServerConfig(server: ServerConfig): Promise { + if (isTauriRuntime()) { + return daemonCommand('update-server-config', { server }) + } + return requestJson('/config/server', { + method: 'PUT', + body: JSON.stringify(server), + }) +} + +export function clearServerConfig(): Promise { + if (isTauriRuntime()) { + return daemonCommand('clear-server-config') + } + return requestJson('/config/server', { + method: 'DELETE', + }) +} + +export function testConnection( + server: ServerConfig +): Promise { + if (isTauriRuntime()) { + return daemonCommand('test-connection', { server }) + } + return requestJson('/connection/test', { + method: 'POST', + body: JSON.stringify(server), + }) +} + +export function validateServerUrl( + url: string +): Promise { + if (isTauriRuntime()) { + return daemonCommand('validate-server-url', { + url, + }) + } + const params = new URLSearchParams({ url }) + return requestJson( + `/connection/validate-url?${params.toString()}` + ) +} + +export function getCsvPreview( + path: string, + rows = 100 +): Promise { + if (isTauriRuntime()) { + return daemonCommand('get-csv-preview', { path, rows }) + } + const params = new URLSearchParams({ + path, + rows: String(rows), + }) + return requestJson(`/csv/preview?${params.toString()}`) +} + +export function revealFileInFolder(path: string): Promise { + if (isTauriRuntime()) { + return invokeCommand('reveal_file_in_folder', { path }) + } + + return Promise.reject( + new Error( + 'Opening the local file system is only available in the desktop app.' + ) + ) +} + +export function getDatastreams(): Promise { + if (isTauriRuntime()) { + return daemonCommand('get-datastreams') + } + return requestJson('/datastreams') +} + +export function getDatastreamDetail( + datastreamId: string +): Promise { + if (isTauriRuntime()) { + return daemonCommand('get-datastream-detail', { + datastream_id: datastreamId, + }) + } + + return Promise.reject( + new Error( + 'Expanded datastream metadata is only available in the desktop app.' + ) + ) +} + +export function createJob(payload: JobUpsertRequest): Promise { + if (isTauriRuntime()) { + return daemonCommand('create-job', { payload }) + } + return requestJson('/jobs', { + method: 'POST', + body: JSON.stringify(payload), + }) +} + +export function updateJob( + jobId: string, + payload: JobUpsertRequest +): Promise { + if (isTauriRuntime()) { + return daemonCommand('update-job', { job_id: jobId, payload }) + } + return requestJson(`/jobs/${encodeURIComponent(jobId)}`, { + method: 'PUT', + body: JSON.stringify(payload), + }) +} + +export function getJob(jobId: string): Promise { + if (isTauriRuntime()) { + return daemonCommand('get-job', { job_id: jobId }) + } + return requestJson(`/jobs/${encodeURIComponent(jobId)}`) +} + +export function getJobLogs(jobId: string): Promise { + if (isTauriRuntime()) { + return daemonCommand('get-job-logs', { job_id: jobId }) + } + return requestJson(`/jobs/${encodeURIComponent(jobId)}/logs`) +} + +export function getJobs(): Promise { + if (isTauriRuntime()) { + return daemonCommand('get-jobs') + } + return requestJson('/jobs') +} + +export function deleteJob(jobId: string): Promise { + if (isTauriRuntime()) { + return daemonCommand('delete-job', { job_id: jobId }) + } + return requestJson(`/jobs/${encodeURIComponent(jobId)}`, { + method: 'DELETE', + }) +} + +export function runJobNow(jobId: string): Promise { + if (isTauriRuntime()) { + return daemonCommand('run-job-now', { job_id: jobId }) + } + return requestJson(`/jobs/${encodeURIComponent(jobId)}/run`, { + method: 'POST', + }) +} + +export function enableJob(jobId: string): Promise { + if (isTauriRuntime()) { + return daemonCommand('enable-job', { job_id: jobId }) + } + return requestJson( + `/jobs/${encodeURIComponent(jobId)}/enable`, + { + method: 'POST', + } + ) +} + +export function disableJob(jobId: string): Promise { + if (isTauriRuntime()) { + return daemonCommand('disable-job', { job_id: jobId }) + } + return requestJson( + `/jobs/${encodeURIComponent(jobId)}/disable`, + { + method: 'POST', + } + ) +} diff --git a/frontend/api/hydroserver/daemonTransport.ts b/frontend/api/hydroserver/daemonTransport.ts new file mode 100644 index 0000000..79c4a22 --- /dev/null +++ b/frontend/api/hydroserver/daemonTransport.ts @@ -0,0 +1,143 @@ +import type { DaemonConnectionInfo, DaemonStatusSnapshot } from './types' +import { + formatErrorDetail, + invokeCommand, + isTauriRuntime, + normalizeError, +} from '../runtime' + +let daemonConnectionPromise: Promise | null = null + +function resetDaemonConnection(): void { + daemonConnectionPromise = null +} + +export function disconnectDaemonConnection(): void { + resetDaemonConnection() +} + +export async function getDaemonConnection(): Promise { + if (!isTauriRuntime()) { + throw new Error( + 'The daemon connection is only available in the desktop app.' + ) + } + + if (!daemonConnectionPromise) { + daemonConnectionPromise = invokeCommand( + 'get_daemon_connection' + ).catch((error) => { + resetDaemonConnection() + throw error + }) + } + + return daemonConnectionPromise +} + +export async function daemonCommand( + command: string, + payload?: Record +): Promise { + const connection = await getDaemonConnection() + const baseUrl = connection.base_url.replace(/\/$/, '') + + try { + const response = await fetch(`${baseUrl}/api/commands/${command}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${connection.token}`, + }, + body: JSON.stringify(payload ?? {}), + }) + + if (!response.ok) { + let detail = `Request failed with status ${response.status}` + + try { + const body = (await response.json()) as { detail?: unknown } + const formattedDetail = formatErrorDetail(body.detail) + if (formattedDetail) { + detail = formattedDetail + } + } catch { + // Ignore JSON parsing errors for non-JSON error responses. + } + + throw new Error(detail) + } + + return (await response.json()) as T + } catch (error) { + resetDaemonConnection() + throw normalizeError(error) + } +} + +export function subscribeToDaemonStatus(handlers: { + onStatus: (snapshot: DaemonStatusSnapshot) => void + onError?: (error: Error) => void +}): () => void { + if (!isTauriRuntime()) { + return () => undefined + } + + let closed = false + let reconnectTimer: number | null = null + let eventSource: EventSource | null = null + + const connect = async () => { + try { + const connection = await getDaemonConnection() + if (closed) return + + const url = new URL( + `${connection.base_url.replace(/\/$/, '')}/api/status` + ) + url.searchParams.set('access_token', connection.token) + + eventSource = new EventSource(url.toString()) + eventSource.addEventListener('status', (event) => { + if (!(event instanceof MessageEvent)) return + try { + handlers.onStatus(JSON.parse(event.data) as DaemonStatusSnapshot) + } catch (error) { + handlers.onError?.(normalizeError(error)) + } + }) + eventSource.onerror = () => { + eventSource?.close() + eventSource = null + resetDaemonConnection() + handlers.onError?.( + new Error('Lost connection to the local daemon. Retrying...') + ) + if (!closed) { + reconnectTimer = window.setTimeout(() => { + reconnectTimer = null + void connect() + }, 1000) + } + } + } catch (error) { + handlers.onError?.(normalizeError(error)) + if (!closed) { + reconnectTimer = window.setTimeout(() => { + reconnectTimer = null + void connect() + }, 1000) + } + } + } + + void connect() + + return () => { + closed = true + if (reconnectTimer !== null) { + window.clearTimeout(reconnectTimer) + } + eventSource?.close() + } +} diff --git a/frontend/api/hydroserver/index.ts b/frontend/api/hydroserver/index.ts new file mode 100644 index 0000000..2f07b58 --- /dev/null +++ b/frontend/api/hydroserver/index.ts @@ -0,0 +1,2 @@ +export * from "./types" +export * from "./client" diff --git a/frontend/api/hydroserver/types.ts b/frontend/api/hydroserver/types.ts new file mode 100644 index 0000000..afc6156 --- /dev/null +++ b/frontend/api/hydroserver/types.ts @@ -0,0 +1,265 @@ +import type { Timestamp } from "../../models/timestamp" + +export type ConnectionState = "not_configured" | "configured" | "connected" | "error" +export type AuthType = "apikey" | "userpass" + +export interface ServerConfig { + auth_type: AuthType + url: string + api_key: string + username: string + password: string + workspace_id: string + workspace_name: string +} + +export interface AppConfig { + version: number + server: ServerConfig + jobs: JobConfig[] +} + +export interface ConnectionStatus { + state: ConnectionState + message: string +} + +export interface HealthResponse { + status: "ok" + version: string + config_dir: string + server_configured: boolean + connection: ConnectionStatus +} + +export interface AppBootstrapResponse { + health: HealthResponse + config: AppConfig + jobs: JobStatusSummary[] +} + +export interface DaemonConnectionInfo { + base_url: string + token: string +} + +export interface DaemonStatusSnapshot { + health: HealthResponse + config: AppConfig + jobs: JobStatusSummary[] +} + +export interface ConnectionTestResponse { + ok: boolean + state: ConnectionState + message: string + invalid_field: string | null + instance_name: string | null + workspace_id: string | null + workspace_name: string | null + workspace_count: number + datastream_count: number + permissions_ok: boolean +} + +export interface ServerUrlValidationResponse { + ok: boolean + message: string + instance_name: string | null +} + +export interface CsvPreviewResponse { + raw_lines: string[] + parsed_rows: string[][] + detected_header_row: number | null + detected_data_start_row: number | null + detected_delimiter: string + total_lines: number + encoding: string +} + +export type CsvTransformerIdentifierType = "name" | "index" +export interface CsvTransformerTimestampSettings extends Timestamp { + key: string +} + +export interface CsvTransformerSettings { + headerRow: number | null + dataStartRow: number + delimiter: string + identifierType: CsvTransformerIdentifierType + timestamp: CsvTransformerTimestampSettings +} + +export interface DatastreamSummary { + id: string + name: string + thing_id: string + thing_name: string + observed_property_name: string + processing_level_definition: string + unit_name: string + unit_symbol: string + sampled_medium: string + sensor_name: string + result_type: string +} + +export interface DatastreamThingLocationDetail { + latitude: string + longitude: string + elevation_m: string + elevation_datum: string + admin_area_1: string + admin_area_2: string + country: string +} + +export interface DatastreamThingDetail { + id: string + name: string + description: string + sampling_feature_code: string + site_type: string + sampling_feature_type: string + is_private: boolean + location: DatastreamThingLocationDetail +} + +export interface DatastreamObservedPropertyDetail { + id: string + name: string + definition: string + description: string + property_type: string + code: string +} + +export interface DatastreamUnitDetail { + id: string + name: string + symbol: string + definition: string + unit_type: string +} + +export interface DatastreamSensorDetail { + id: string + name: string + description: string + manufacturer: string + model: string + method_type: string + method_code: string + method_link: string + encoding_type: string + model_link: string +} + +export interface DatastreamProcessingLevelDetail { + id: string + code: string + definition: string + explanation: string +} + +export interface DatastreamDetail { + id: string + name: string + description: string + sampled_medium: string + result_type: string + observation_type: string + no_data_value: string + aggregation_statistic: string + intended_time_spacing: string + intended_time_spacing_unit: string + time_aggregation_interval: string + time_aggregation_interval_unit: string + phenomenon_begin_time: string + phenomenon_end_time: string + value_count: string + is_private: boolean + is_visible: boolean + thing: DatastreamThingDetail + observed_property: DatastreamObservedPropertyDetail + unit: DatastreamUnitDetail + sensor: DatastreamSensorDetail + processing_level: DatastreamProcessingLevelDetail +} + +export interface ColumnMapping { + csv_column: string + datastream_id: string + datastream_name: string +} + +export interface JobConfig { + id: string + name: string + enabled: boolean + file_path: string + schedule_minutes: number + file_config: CsvTransformerSettings + column_mappings: ColumnMapping[] +} + +export interface JobLogEntry { + timestamp: string + level: "info" | "warning" | "error" + message: string +} + +export interface JobLogsResponse { + entries: JobLogEntry[] + log_file_path: string | null +} + +export type JobStatus = + | "healthy" + | "warning" + | "error" + | "disabled" + | "pending" + | "running" + +export interface JobStatusSummary { + id: string + name: string + enabled: boolean + file_path: string + schedule_minutes: number + file_config: CsvTransformerSettings + column_mappings: ColumnMapping[] + status: JobStatus + status_message: string + last_pushed_timestamp: string | null + last_run_at: string | null + last_error: string | null +} + +export interface JobUpsertRequest { + name: string + enabled?: boolean + file_path: string + schedule_minutes?: number + file_config: CsvTransformerSettings + column_mappings: ColumnMapping[] +} + +export interface JobDetail extends JobUpsertRequest { + id: string + enabled: boolean + schedule_minutes: number + recent_logs: JobLogEntry[] + status: JobStatus + status_message: string + last_pushed_timestamp: string | null + last_run_at: string | null + last_error: string | null +} + +export interface ActionResponse { + ok: boolean + message: string +} diff --git a/frontend/api/index.ts b/frontend/api/index.ts new file mode 100644 index 0000000..43bf2be --- /dev/null +++ b/frontend/api/index.ts @@ -0,0 +1,2 @@ +export * from './hydroserver' +export * from './os-service' diff --git a/frontend/api/os-service/client.ts b/frontend/api/os-service/client.ts new file mode 100644 index 0000000..ef688f9 --- /dev/null +++ b/frontend/api/os-service/client.ts @@ -0,0 +1,42 @@ +import { invokeCommand, isTauriRuntime } from "../runtime" +import type { ServiceStatusResponse } from "./types" + +export function getServiceStatus(): Promise { + if (isTauriRuntime()) { + return invokeCommand("get_service_status") + } + + return Promise.reject( + new Error("Background service management is only available in the desktop app.") + ) +} + +export function installOsService(): Promise { + if (isTauriRuntime()) { + return invokeCommand("install_os_service") + } + + return Promise.reject( + new Error("Background service management is only available in the desktop app.") + ) +} + +export function restartOsService(): Promise { + if (isTauriRuntime()) { + return invokeCommand("restart_os_service") + } + + return Promise.reject( + new Error("Background service management is only available in the desktop app.") + ) +} + +export function uninstallOsService(): Promise { + if (isTauriRuntime()) { + return invokeCommand("uninstall_os_service") + } + + return Promise.reject( + new Error("Background service management is only available in the desktop app.") + ) +} diff --git a/frontend/api/os-service/index.ts b/frontend/api/os-service/index.ts new file mode 100644 index 0000000..2f07b58 --- /dev/null +++ b/frontend/api/os-service/index.ts @@ -0,0 +1,2 @@ +export * from "./types" +export * from "./client" diff --git a/frontend/api/os-service/types.ts b/frontend/api/os-service/types.ts new file mode 100644 index 0000000..668f5b8 --- /dev/null +++ b/frontend/api/os-service/types.ts @@ -0,0 +1,9 @@ +export interface ServiceStatusResponse { + supported: boolean + installed: boolean + running: boolean + label: string + plist_path: string + executable_path: string + status_message: string +} diff --git a/frontend/api/runtime.ts b/frontend/api/runtime.ts new file mode 100644 index 0000000..c73ba6b --- /dev/null +++ b/frontend/api/runtime.ts @@ -0,0 +1,79 @@ +export function isTauriRuntime(): boolean { + return ( + typeof window !== 'undefined' && + '__TAURI_INTERNALS__' in (window as Window & typeof globalThis) + ) +} + +export function isWindowsPlatform(): boolean { + if (typeof navigator === 'undefined') { + return false + } + + const navigatorWithUserAgentData = navigator as Navigator & { + userAgentData?: { platform?: string } + } + const platform = + navigatorWithUserAgentData.userAgentData?.platform ?? + navigator.platform ?? + navigator.userAgent + + return /win/i.test(platform) +} + +export function formatErrorDetail(detail: unknown): string | null { + if (typeof detail === 'string' && detail.trim()) { + return detail + } + + if (Array.isArray(detail)) { + const firstMessage = detail + .map((item) => { + if (typeof item === 'string') return item + if ( + item && + typeof item === 'object' && + 'msg' in item && + typeof item.msg === 'string' + ) { + return item.msg + } + return null + }) + .find(Boolean) + + return firstMessage ?? null + } + + if (detail && typeof detail === 'object') { + if ('msg' in detail && typeof detail.msg === 'string') { + return detail.msg + } + + try { + return JSON.stringify(detail) + } catch { + return null + } + } + + return null +} + +export function normalizeError(error: unknown): Error { + if (error instanceof Error) return error + if (typeof error === 'string' && error.trim()) return new Error(error) + return new Error('Request failed.') +} + +export async function invokeCommand( + command: string, + payload?: Record +): Promise { + try { + const { invoke } = await import('@tauri-apps/api/core') + return await invoke(command, payload) + } catch (error) { + throw normalizeError(error) + } +} diff --git a/frontend/auth-submit.ts b/frontend/auth-submit.ts new file mode 100644 index 0000000..05b5fdb --- /dev/null +++ b/frontend/auth-submit.ts @@ -0,0 +1,182 @@ +import type { + AuthType, + ConnectionTestResponse, + ServerConfig, +} from "./api/hydroserver"; + +export type AuthFieldName = + | "url" + | "api_key" + | "username" + | "password" + | "workspace_name"; + +export type FieldValidationState = { + state: "idle" | "checking" | "valid" | "invalid"; + message: string | null; +}; + +export type AuthFieldStates = Record; + +export function emptyFieldValidationState(): FieldValidationState { + return { state: "idle", message: null }; +} + +export function createAuthFieldStates(): AuthFieldStates { + return { + url: emptyFieldValidationState(), + api_key: emptyFieldValidationState(), + username: emptyFieldValidationState(), + password: emptyFieldValidationState(), + workspace_name: emptyFieldValidationState(), + }; +} + +export function resetAuthFieldStates( + authFieldStates: AuthFieldStates, + authType: AuthType +): void { + authFieldStates.url = emptyFieldValidationState(); + authFieldStates.api_key = emptyFieldValidationState(); + authFieldStates.username = emptyFieldValidationState(); + authFieldStates.password = emptyFieldValidationState(); + authFieldStates.workspace_name = emptyFieldValidationState(); + + if (authType === "apikey") { + authFieldStates.username = emptyFieldValidationState(); + authFieldStates.password = emptyFieldValidationState(); + authFieldStates.workspace_name = emptyFieldValidationState(); + } else { + authFieldStates.api_key = emptyFieldValidationState(); + } +} + +export function credentialFields(authType: AuthType): AuthFieldName[] { + return authType === "userpass" + ? ["username", "password", "workspace_name"] + : ["api_key"]; +} + +export function isValidHttpUrl(value: string): boolean { + try { + const parsed = new URL(value); + return parsed.protocol === "http:" || parsed.protocol === "https:"; + } catch { + return false; + } +} + +export function validateAuthFieldsForSubmit( + server: ServerConfig, + markField: ( + field: AuthFieldName, + nextState: FieldValidationState["state"], + message?: string | null + ) => void +): boolean { + let valid = true; + + if (!server.url) { + markField("url", "invalid", "Please enter your HydroServer URL."); + valid = false; + } else if (!isValidHttpUrl(server.url)) { + markField("url", "invalid", "Please enter a full http:// or https:// URL."); + valid = false; + } else { + markField("url", "valid"); + } + + if (server.auth_type === "apikey") { + if (!server.api_key) { + markField("api_key", "invalid", "Please enter your API key."); + valid = false; + } else { + markField("api_key", "valid"); + } + } else { + if (!server.username) { + markField("username", "invalid", "Please enter your username."); + valid = false; + } else { + markField("username", "valid"); + } + + if (!server.password) { + markField("password", "invalid", "Please enter your password."); + valid = false; + } else { + markField("password", "valid"); + } + + if (!server.workspace_name.trim()) { + markField("workspace_name", "invalid", "Please enter a workspace name."); + valid = false; + } else { + markField("workspace_name", "valid"); + } + } + + return valid; +} + +export function applyConnectionValidationResult( + server: ServerConfig, + result: ConnectionTestResponse, + markField: ( + field: AuthFieldName, + nextState: FieldValidationState["state"], + message?: string | null + ) => void +): void { + markField("url", "valid"); + + if (result.ok) { + for (const field of credentialFields(server.auth_type)) { + markField(field, "valid"); + } + return; + } + + const message = result.message; + const isUrlError = + result.message.includes("Couldn't reach HydroServer") || + result.message.includes("HydroServer returned an error"); + + if (isUrlError) { + markField("url", "invalid", message); + for (const field of credentialFields(server.auth_type)) { + markField(field, "idle"); + } + return; + } + + if (result.invalid_field === "workspace_name") { + markField("workspace_name", "invalid", message); + if (server.auth_type === "userpass") { + markField("username", "valid"); + markField("password", "valid"); + } + return; + } + + for (const field of credentialFields(server.auth_type)) { + markField(field, "invalid", message); + } +} + +export async function runAuthSubmission(params: { + render: () => void; + setSubmitting: (value: boolean) => void; + action: () => Promise; +}): Promise { + const { render, setSubmitting, action } = params; + setSubmitting(true); + render(); + + try { + return await action(); + } finally { + setSubmitting(false); + render(); + } +} diff --git a/frontend/components/AccountMenuButton.vue b/frontend/components/AccountMenuButton.vue new file mode 100644 index 0000000..6b98af4 --- /dev/null +++ b/frontend/components/AccountMenuButton.vue @@ -0,0 +1,292 @@ + + + diff --git a/frontend/components/AnimatedLoadingIcon.vue b/frontend/components/AnimatedLoadingIcon.vue new file mode 100644 index 0000000..c92980f --- /dev/null +++ b/frontend/components/AnimatedLoadingIcon.vue @@ -0,0 +1,204 @@ + + + diff --git a/frontend/components/AuthForm.vue b/frontend/components/AuthForm.vue new file mode 100644 index 0000000..0b3ecb7 --- /dev/null +++ b/frontend/components/AuthForm.vue @@ -0,0 +1,152 @@ + + + diff --git a/frontend/components/CsvPreview.vue b/frontend/components/CsvPreview.vue new file mode 100644 index 0000000..3cdff16 --- /dev/null +++ b/frontend/components/CsvPreview.vue @@ -0,0 +1,448 @@ + + + diff --git a/frontend/components/CsvTransformerSettings.vue b/frontend/components/CsvTransformerSettings.vue new file mode 100644 index 0000000..6152f36 --- /dev/null +++ b/frontend/components/CsvTransformerSettings.vue @@ -0,0 +1,309 @@ + + + diff --git a/frontend/components/HeaderControls.vue b/frontend/components/HeaderControls.vue new file mode 100644 index 0000000..cb16b39 --- /dev/null +++ b/frontend/components/HeaderControls.vue @@ -0,0 +1,11 @@ + + + diff --git a/frontend/components/SettingsMenuButton.vue b/frontend/components/SettingsMenuButton.vue new file mode 100644 index 0000000..5bd7491 --- /dev/null +++ b/frontend/components/SettingsMenuButton.vue @@ -0,0 +1,135 @@ + + + diff --git a/frontend/composables/state.ts b/frontend/composables/state.ts new file mode 100644 index 0000000..930f6f5 --- /dev/null +++ b/frontend/composables/state.ts @@ -0,0 +1,165 @@ +import { reactive } from "vue"; + +import { + createAuthFieldStates, + type AuthFieldName, + type FieldValidationState, +} from "../auth-submit"; +import { + createPipelineFieldStates, + type PipelineFieldStates, +} from "../pipeline-submit"; +import { getRouteFromHash, type AppRoute } from "../router"; +import type { + AppConfig, + ColumnMapping, + ConnectionState, + ConnectionTestResponse, + CsvPreviewResponse, + DatastreamSummary, + CsvTransformerSettings, + CsvTransformerTimestampSettings, + HealthResponse, + JobStatusSummary, + ServerConfig, +} from "../api/hydroserver"; +import type { ServiceStatusResponse } from "../api/os-service"; + +export type PipelineIdentifierType = "name" | "index"; +export type PipelineEditorStep = 1 | 2; + +export type PipelineFormState = { + filePath: string; + hasHeaderRow: boolean; + headerRow: number; + dataStartRow: number; + delimiter: string; + identifierType: PipelineIdentifierType; + timestamp: CsvTransformerTimestampSettings; +}; + +export type PipelineMappingDraft = { + csvColumn: string; + thingId: string; + datastreamId: string; +}; + +export type PipelineEditTarget = { + jobId: string; + name: string; + enabled: boolean; + scheduleMinutes: number; +}; + +export type PreviewSelectionTarget = + | "header-row" + | "data-start-row" + | "timestamp-column" + | null; + +export type PreviewRowSelectionTarget = Exclude< + PreviewSelectionTarget, + "timestamp-column" | null +>; + +type UiState = { + route: AppRoute; + health: HealthResponse | null; + config: AppConfig | null; + connectionSummary: ConnectionTestResponse | null; + serviceStatus: ServiceStatusResponse | null; + jobStatuses: JobStatusSummary[]; + serviceStatusLoading: boolean; + serviceActionSubmitting: boolean; + serviceActionError: string | null; + loading: boolean; + lastConnectionState: ConnectionState | null; + pipelineForm: PipelineFormState; + pipelinePreview: CsvPreviewResponse | null; + authDraft: ServerConfig; + authDraftDirty: boolean; + authFieldStates: Record; + pipelineFieldStates: PipelineFieldStates; + authSubmitting: boolean; + postAuthRedirectPending: boolean; + pipelineSelectionTarget: PreviewSelectionTarget; + pipelineEditorStartStep: PipelineEditorStep | null; + pipelinePreviewRowsRequested: number; + pipelineValidationAttempted: boolean; + pipelineReadyForMapping: boolean; + validatedPipelineSettings: CsvTransformerSettings | null; + pipelineDatastreams: DatastreamSummary[]; + pipelineDatastreamsLoading: boolean; + pipelineMappingDrafts: PipelineMappingDraft[]; + validatedColumnMappings: ColumnMapping[]; + pipelineEditTarget: PipelineEditTarget | null; + pipelineCreateSubmitting: boolean; +}; + +export const PREVIEW_PAGE_SIZE = 100; +export const PREVIEW_PAGE_INCREMENT = PREVIEW_PAGE_SIZE; +export const APP_NAME = "HydroServer Streaming Data Loader"; +export const API_KEY_DOCS_URL = + "https://hydroserver2.github.io/hydroserver/tutorials/creating-your-first-orchestration-system#create-an-api-key"; + +export function emptyServerConfig(): ServerConfig { + return { + auth_type: "apikey", + url: "", + api_key: "", + username: "", + password: "", + workspace_id: "", + workspace_name: "", + }; +} + +export function createEmptyPipelineForm(): PipelineFormState { + return { + filePath: "", + hasHeaderRow: true, + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "timestamp", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }; +} + +export const state = reactive({ + route: getRouteFromHash(), + health: null, + config: null, + connectionSummary: null, + serviceStatus: null, + jobStatuses: [], + serviceStatusLoading: false, + serviceActionSubmitting: false, + serviceActionError: null, + loading: true, + lastConnectionState: null, + pipelineForm: createEmptyPipelineForm(), + pipelinePreview: null, + authDraft: emptyServerConfig(), + authDraftDirty: false, + authFieldStates: createAuthFieldStates(), + pipelineFieldStates: createPipelineFieldStates(), + authSubmitting: false, + postAuthRedirectPending: false, + pipelineSelectionTarget: null, + pipelineEditorStartStep: null, + pipelinePreviewRowsRequested: PREVIEW_PAGE_SIZE, + pipelineValidationAttempted: false, + pipelineReadyForMapping: false, + validatedPipelineSettings: null, + pipelineDatastreams: [], + pipelineDatastreamsLoading: false, + pipelineMappingDrafts: [], + validatedColumnMappings: [], + pipelineEditTarget: null, + pipelineCreateSubmitting: false, +}); diff --git a/frontend/composables/useAppModel.ts b/frontend/composables/useAppModel.ts new file mode 100644 index 0000000..ca34d84 --- /dev/null +++ b/frontend/composables/useAppModel.ts @@ -0,0 +1,541 @@ +import { computed, watch } from "vue" + +import { + getBootstrap, + subscribeToDaemonStatus, + type DaemonStatusSnapshot, +} from "../api/hydroserver" +import { disconnectDaemonConnection } from "../api/hydroserver/daemonTransport" +import { getServiceStatus, type ServiceStatusResponse } from "../api/os-service" +import { getRouteFromHash, navigate } from "../router" +import { + state, + emptyServerConfig, + APP_NAME, + API_KEY_DOCS_URL, + PREVIEW_PAGE_INCREMENT, + PREVIEW_PAGE_SIZE, +} from "./state" +import { syncAuthenticationStatus, serverConfigured } from "./useAuth" +import { + installBackgroundService, + isServiceReady, + refreshServiceStatus, + restartBackgroundService, + uninstallBackgroundService, +} from "./useService" +import { isTauriRuntime, normalizeError } from "../api/runtime" +import type { AppRoute } from "../router" + +export { APP_NAME, API_KEY_DOCS_URL, PREVIEW_PAGE_INCREMENT, PREVIEW_PAGE_SIZE } +export type { PreviewSelectionTarget, PreviewRowSelectionTarget } from "./state" + +export * from "./useAuth" +export * from "./usePipeline" +export * from "./useService" + +const isConnected = computed( + () => state.connectionSummary?.ok === true && state.lastConnectionState === "connected" +) + +const hasSavedDatasources = computed( + () => (state.config?.jobs?.length ?? 0) > 0 +) + +export function resolveAuthenticatedRoute(params: { + route: AppRoute + hasSavedDatasources: boolean + pipelineReadyForMapping: boolean + serviceReady: boolean +}): AppRoute { + const { route, hasSavedDatasources, pipelineReadyForMapping, serviceReady } = params + const fallbackRoute: AppRoute = hasSavedDatasources ? "dashboard" : "jobs-new" + + if (!serviceReady) { + return "service" + } + + if (route === "jobs-new-mapping" && !pipelineReadyForMapping) { + return fallbackRoute + } + + if (route === "welcome" || route === "service") { + return fallbackRoute + } + + if (route === "dashboard" && !hasSavedDatasources) { + return "jobs-new" + } + + if ( + route !== "dashboard" && + route !== "jobs-new" && + route !== "jobs-new-mapping" + ) { + return fallbackRoute + } + + return route +} + +export function requiresDesktopServiceSetup(params: { + tauriRuntime: boolean + serviceReady: boolean + daemonReady: boolean +}): boolean { + const { tauriRuntime, serviceReady, daemonReady } = params + return tauriRuntime && (!serviceReady || !daemonReady) +} + +export function shouldBootstrapDesktopDaemon(params: { + tauriRuntime: boolean + serviceStatus: ServiceStatusResponse | null +}): boolean { + const { tauriRuntime, serviceStatus } = params + if (!tauriRuntime) { + return true + } + + return isServiceReady(serviceStatus) +} + +export function shouldRefreshServiceStatusOnFocus(params: { + loading: boolean + connected: boolean + serviceActionSubmitting: boolean +}): boolean { + const { loading, connected, serviceActionSubmitting } = params + return !loading && connected && !serviceActionSubmitting +} + +export function shouldHydrateAuthDraftFromDaemon(params: { + authSubmitting: boolean + authDraftDirty: boolean +}): boolean { + const { authSubmitting, authDraftDirty } = params + return !authSubmitting && !authDraftDirty +} + +export function shouldApplyDaemonConnectionState(params: { + authSubmitting: boolean + snapshotConnectionState: DaemonStatusSnapshot["health"]["connection"]["state"] +}): boolean { + const { authSubmitting, snapshotConnectionState } = params + + if (authSubmitting) { + return snapshotConnectionState === "connected" + } + + return true +} + +export function resolvePostAuthRoute(params: { + hasSavedDatasources: boolean + serviceReady: boolean +}): AppRoute { + const { hasSavedDatasources, serviceReady } = params + if (!serviceReady) { + return "service" + } + return hasSavedDatasources ? "dashboard" : "jobs-new" +} + +function bootstrapServiceErrorMessage(params: { + error: unknown + serviceStatusInstalled: boolean + serviceStatusRunning: boolean + serviceSupported: boolean +}): string { + const { error, serviceStatusInstalled, serviceStatusRunning, serviceSupported } = params + + if (!serviceSupported) { + return normalizeError(error).message + } + + if (!serviceStatusInstalled) { + return "Install the background service to continue." + } + + if (!serviceStatusRunning) { + return "Restart the background service to continue." + } + + return "Couldn't connect to the local background service. Restart it to continue." +} + +function syncRouteState(): void { + let route = getRouteFromHash() + + if (!state.loading) { + if ( + requiresDesktopServiceSetup({ + tauriRuntime: isTauriRuntime(), + serviceReady: isServiceReady(state.serviceStatus), + daemonReady: state.health !== null && state.config !== null, + }) + ) { + if (route !== "service") { + navigate("service") + route = "service" + } + } else if (!isConnected.value) { + if (route !== "welcome") { + navigate("welcome") + route = "welcome" + } + } else { + const nextRoute = state.postAuthRedirectPending + ? resolvePostAuthRoute({ + hasSavedDatasources: hasSavedDatasources.value, + serviceReady: isServiceReady(state.serviceStatus), + }) + : resolveAuthenticatedRoute({ + route, + hasSavedDatasources: hasSavedDatasources.value, + pipelineReadyForMapping: state.pipelineReadyForMapping, + serviceReady: isServiceReady(state.serviceStatus), + }) + if (nextRoute !== route) { + navigate(nextRoute) + route = nextRoute + } + state.postAuthRedirectPending = false + } + } + + state.route = route +} + +watch( + [ + isConnected, + hasSavedDatasources, + () => state.loading, + () => state.pipelineReadyForMapping, + () => state.serviceStatus?.installed, + () => state.serviceStatus?.running, + ], + syncRouteState +) + +export const useWelcomeSurface = computed( + () => + Boolean( + state.loading || + state.route === "welcome" || + state.route === "service" || + state.route === "dashboard" || + state.route === "jobs-new" || + state.route === "jobs-new-mapping" + ) +) + +let stopStatusSubscription: (() => void) | null = null + +const STARTUP_RETRY_ATTEMPTS = 12 +const STARTUP_RETRY_DELAY_MS = 350 + +function sleep(ms: number): Promise { + return new Promise((resolve) => window.setTimeout(resolve, ms)) +} + +function isTransientError(error: unknown): boolean { + if (!(error instanceof Error)) return false + const msg = error.message.toLowerCase() + return ( + msg.includes("failed to fetch") || + msg.includes("networkerror") || + msg.includes("status 500") || + msg.includes("status 502") || + msg.includes("status 503") || + msg.includes("status 504") + ) +} + +async function loadDesktopInitialState(): Promise<{ + serviceStatus: ServiceStatusResponse + bootstrapResponse: Awaited> | null +}> { + const serviceStatus = await getServiceStatus() + if ( + !shouldBootstrapDesktopDaemon({ + tauriRuntime: true, + serviceStatus, + }) + ) { + return { serviceStatus, bootstrapResponse: null } + } + + let lastError: unknown = null + for (let attempt = 1; attempt <= STARTUP_RETRY_ATTEMPTS; attempt++) { + try { + return { + serviceStatus, + bootstrapResponse: await getBootstrap(), + } + } catch (error) { + lastError = error + if (attempt === STARTUP_RETRY_ATTEMPTS || !isTransientError(error)) throw error + await sleep(STARTUP_RETRY_DELAY_MS) + } + } + + throw lastError instanceof Error ? lastError : new Error(`Failed to load ${APP_NAME}.`) +} + +async function loadInitialState() { + if (isTauriRuntime()) { + return loadDesktopInitialState() + } + + let lastError: unknown = null + for (let attempt = 1; attempt <= STARTUP_RETRY_ATTEMPTS; attempt++) { + try { + return { + serviceStatus: null, + bootstrapResponse: await getBootstrap(), + } + } catch (error) { + lastError = error + if (attempt === STARTUP_RETRY_ATTEMPTS || !isTransientError(error)) throw error + await sleep(STARTUP_RETRY_DELAY_MS) + } + } + throw lastError instanceof Error ? lastError : new Error(`Failed to load ${APP_NAME}.`) +} + +function applyDaemonStatusSnapshot(snapshot: DaemonStatusSnapshot): void { + state.health = snapshot.health + state.config = snapshot.config + state.jobStatuses = snapshot.jobs + + if ( + shouldHydrateAuthDraftFromDaemon({ + authSubmitting: state.authSubmitting, + authDraftDirty: state.authDraftDirty, + }) + ) { + state.authDraft = { ...emptyServerConfig(), ...snapshot.config.server } + state.authDraftDirty = false + } + + if ( + shouldApplyDaemonConnectionState({ + authSubmitting: state.authSubmitting, + snapshotConnectionState: snapshot.health.connection.state, + }) + ) { + if (snapshot.health.connection.state === "not_configured") { + state.connectionSummary = null + state.lastConnectionState = "not_configured" + } else if (!state.lastConnectionState || state.lastConnectionState === "not_configured") { + state.lastConnectionState = snapshot.health.connection.state + } + } +} + +function ensureStatusSubscription(): void { + stopStatusSubscription?.() + stopStatusSubscription = subscribeToDaemonStatus({ + onStatus(snapshot) { + applyDaemonStatusSnapshot(snapshot) + }, + onError(error) { + console.error("The daemon status stream disconnected.", error) + }, + }) +} + +function clearDaemonStatusSnapshot(): void { + state.health = null + state.config = null + state.jobStatuses = [] +} + +function disconnectDaemonSession(): void { + stopStatusSubscription?.() + stopStatusSubscription = null + disconnectDaemonConnection() +} + +export async function bootstrap(): Promise { + state.loading = true + syncRouteState() + + try { + disconnectDaemonSession() + + const { serviceStatus, bootstrapResponse } = await loadInitialState() + state.serviceStatus = serviceStatus + state.serviceActionError = null + + if (!bootstrapResponse) { + clearDaemonStatusSnapshot() + state.lastConnectionState = null + return + } + + applyDaemonStatusSnapshot(bootstrapResponse) + ensureStatusSubscription() + state.lastConnectionState = bootstrapResponse.health.connection.state + + if (serverConfigured(bootstrapResponse.config.server)) { + await syncAuthenticationStatus(bootstrapResponse.config.server) + } + } catch (error) { + if (isTauriRuntime()) { + const serviceStatus = await refreshServiceStatus() + clearDaemonStatusSnapshot() + state.serviceActionError = bootstrapServiceErrorMessage({ + error, + serviceStatusInstalled: Boolean(serviceStatus?.installed), + serviceStatusRunning: Boolean(serviceStatus?.running), + serviceSupported: serviceStatus?.supported !== false, + }) + } + } finally { + state.loading = false + syncRouteState() + } +} + +async function runServiceActionWithDaemonDisconnect( + action: () => Promise +): Promise { + disconnectDaemonSession() + await action() +} + +async function installBackgroundServiceFromModel(): Promise { + await runServiceActionWithDaemonDisconnect(() => installBackgroundService()) +} + +async function restartBackgroundServiceFromModel(): Promise { + await runServiceActionWithDaemonDisconnect(() => restartBackgroundService()) +} + +async function uninstallBackgroundServiceFromModel(): Promise { + await runServiceActionWithDaemonDisconnect(() => uninstallBackgroundService()) +} + +export function init(): void { + window.addEventListener("hashchange", () => { + syncRouteState() + }) + window.addEventListener("focus", () => { + if ( + shouldRefreshServiceStatusOnFocus({ + loading: state.loading, + connected: isConnected.value, + serviceActionSubmitting: state.serviceActionSubmitting, + }) + ) { + void refreshServiceStatus() + } + }) + + syncRouteState() + void bootstrap() +} + +import { + updateAuthDraftField, + toggleAuthMode, + submitAuthConfig, + disconnectHydroServer, +} from "./useAuth" + +import { + abandonPipelineCreation, + buildPipelineTransformerSettings, + createPipelineDatasource, + editPipelineCsvSetup, + editPipelineMappings, + editPipelineSourceFile, + submitPipelineConfig, + parsedPreviewRows, + previewHeaders, + selectedPreviewTimestampColumn, + canShowMorePreviewLines, + updatePipelineField, + setPipelineHasHeaderRow, + setPipelineIdentifierType, + applyPreviewLineSelection, + applyPreviewColumnSelection, + updateHeaderRowFromPreview, + updateDataStartRowFromPreview, + loadPipelinePreview, + showMorePreviewLines, + browseForCsvPath, +} from "./usePipeline" + +import { + buildPipelineColumnMappings, + pipelineDatastreamBrowserEntries, + clearPipelineMapping, + datastreamOptionsForThing, + loadPipelineDatastreams, + pipelineMappingRows, + pipelineMappingSourceColumns, + pipelineThingOptions, + syncPipelineMappingDrafts, + updatePipelineMappingDatastream, + updatePipelineMappingThing, +} from "./useMapping" + +const model = { + state, + APP_NAME, + API_KEY_DOCS_URL, + PREVIEW_PAGE_INCREMENT, + PREVIEW_PAGE_SIZE, + isConnected, + hasSavedDatasources, + useWelcomeSurface, + parsedPreviewRows, + previewHeaders, + selectedPreviewTimestampColumn, + abandonPipelineCreation, + buildPipelineTransformerSettings, + buildPipelineColumnMappings, + submitPipelineConfig, + createPipelineDatasource, + editPipelineCsvSetup, + editPipelineMappings, + editPipelineSourceFile, + canShowMorePreviewLines, + pipelineDatastreamBrowserEntries, + pipelineMappingRows, + pipelineMappingSourceColumns, + pipelineThingOptions, + init, + bootstrap, + updateAuthDraftField, + toggleAuthMode, + submitAuthConfig, + disconnectHydroServer, + updatePipelineField, + setPipelineHasHeaderRow, + setPipelineIdentifierType, + applyPreviewLineSelection, + applyPreviewColumnSelection, + updateHeaderRowFromPreview, + updateDataStartRowFromPreview, + loadPipelinePreview, + loadPipelineDatastreams, + syncPipelineMappingDrafts, + datastreamOptionsForThing, + updatePipelineMappingThing, + updatePipelineMappingDatastream, + clearPipelineMapping, + showMorePreviewLines, + browseForCsvPath, + refreshServiceStatus, + installBackgroundService: installBackgroundServiceFromModel, + restartBackgroundService: restartBackgroundServiceFromModel, + uninstallBackgroundService: uninstallBackgroundServiceFromModel, +} as const + +export function useAppModel() { + return model +} diff --git a/frontend/composables/useAuth.ts b/frontend/composables/useAuth.ts new file mode 100644 index 0000000..3cc189a --- /dev/null +++ b/frontend/composables/useAuth.ts @@ -0,0 +1,180 @@ +import { + applyConnectionValidationResult, + resetAuthFieldStates, + runAuthSubmission, + validateAuthFieldsForSubmit, + type AuthFieldName, + type FieldValidationState, +} from "../auth-submit" +import { + clearServerConfig, + testConnection, + updateServerConfig, + validateServerUrl, + type AuthType, + type ConnectionTestResponse, + type ServerConfig, +} from "../api/hydroserver" +import { + createEmptyPipelineForm, + emptyServerConfig, + PREVIEW_PAGE_SIZE, + state, +} from "./state" +import { refreshServiceStatus } from "./useService" + +export function serverConfigured(server: ServerConfig | null | undefined): boolean { + if (!server?.url.trim()) return false + if (server.auth_type === "userpass") { + return Boolean(server.username.trim() && server.password.trim() && (server.workspace_id.trim() || server.workspace_name.trim())) + } + return Boolean(server.api_key.trim()) +} + +export function markField( + field: AuthFieldName, + nextState: FieldValidationState["state"], + message: string | null = null +): void { + state.authFieldStates[field] = { state: nextState, message } +} + +function resetFieldStates(authType: AuthType): void { + resetAuthFieldStates(state.authFieldStates, authType) +} + +function normalizeServerDraft(): ServerConfig { + const server = state.authDraft + return { + auth_type: server.auth_type, + url: server.url.trim(), + api_key: server.auth_type === "apikey" ? server.api_key.trim() : server.api_key, + username: server.auth_type === "userpass" ? server.username.trim() : server.username, + password: server.auth_type === "userpass" ? server.password.trim() : server.password, + workspace_id: server.auth_type === "userpass" ? server.workspace_id.trim() : "", + workspace_name: + server.auth_type === "userpass" ? server.workspace_name.trim() : server.workspace_name, + } +} + +export function updateAuthDraftField( + _formId: "welcome-form" | "settings-form", + field: AuthFieldName, + value: string +): void { + state.authDraftDirty = true + state.authDraft[field] = value + if ( + field === "url" || + field === "username" || + field === "password" || + field === "workspace_name" + ) { + state.authDraft.workspace_id = "" + } + markField(field, "idle") +} + +export function toggleAuthMode(_formId: "welcome-form" | "settings-form"): void { + const nextType: AuthType = + state.authDraft.auth_type === "apikey" ? "userpass" : "apikey" + state.authDraftDirty = true + state.authDraft = { ...state.authDraft, auth_type: nextType } + resetFieldStates(nextType) +} + +export async function syncAuthenticationStatus( + server: ServerConfig +): Promise { + const result = await testConnection(server) + state.connectionSummary = result + state.lastConnectionState = result.state + + if (result.ok && result.workspace_id) { + if (state.config) { + state.config.server.workspace_id = result.workspace_id + state.config.server.workspace_name = result.workspace_name ?? state.config.server.workspace_name + } + state.authDraft.workspace_id = result.workspace_id + state.authDraft.workspace_name = result.workspace_name ?? state.authDraft.workspace_name + } + + return result +} + +export async function submitAuthConfig( + _formId: "welcome-form" | "settings-form" +): Promise { + if (state.authSubmitting) return + + const payload = normalizeServerDraft() + state.authDraft = { ...payload } + + resetFieldStates(payload.auth_type) + + if (!validateAuthFieldsForSubmit(payload, markField)) return + + try { + await runAuthSubmission({ + render: () => undefined, + setSubmitting: (value) => { + state.authSubmitting = value + }, + action: async () => { + const urlValidation = await validateServerUrl(payload.url) + if (!urlValidation.ok) { + markField("url", "invalid", urlValidation.message) + return + } + + markField("url", "valid") + + const result = await syncAuthenticationStatus(payload) + applyConnectionValidationResult(payload, result, markField) + if (!result.ok) { + return + } + + state.config = await updateServerConfig(payload) + state.authDraft = { ...emptyServerConfig(), ...state.config.server } + state.authDraftDirty = false + await syncAuthenticationStatus(state.config.server) + await refreshServiceStatus() + state.postAuthRedirectPending = _formId === "welcome-form" + }, + }) + } catch (error) { + console.error("Couldn't verify the HydroServer connection.", error) + state.lastConnectionState = "error" + } +} + +export async function disconnectHydroServer(): Promise { + try { + state.config = await clearServerConfig() + state.authDraft = emptyServerConfig() + state.authDraftDirty = false + state.postAuthRedirectPending = false + state.connectionSummary = null + state.jobStatuses = [] + state.serviceActionError = null + state.lastConnectionState = "not_configured" + state.pipelineForm = createEmptyPipelineForm() + state.pipelinePreview = null + state.pipelineSelectionTarget = null + state.pipelineEditorStartStep = null + state.pipelinePreviewRowsRequested = PREVIEW_PAGE_SIZE + state.pipelineValidationAttempted = false + state.pipelineReadyForMapping = false + state.validatedPipelineSettings = null + state.pipelineDatastreams = [] + state.pipelineDatastreamsLoading = false + state.pipelineMappingDrafts = [] + state.validatedColumnMappings = [] + state.pipelineEditTarget = null + state.pipelineCreateSubmitting = false + resetFieldStates("apikey") + } catch (error) { + console.error("Couldn't disconnect from HydroServer right now.", error) + } +} diff --git a/frontend/composables/useMapping.ts b/frontend/composables/useMapping.ts new file mode 100644 index 0000000..a8e58cc --- /dev/null +++ b/frontend/composables/useMapping.ts @@ -0,0 +1,342 @@ +import { computed } from "vue" + +import { + getDatastreams, + type ColumnMapping, + type DatastreamSummary, +} from "../api/hydroserver" +import { state, type PipelineMappingDraft } from "./state" +import { previewHeaders } from "./usePipeline" + +export type MappingSourceColumn = { + csvColumn: string + label: string +} + +export type MappingThingOption = { + id: string + name: string +} + +export type PipelineMappingRow = MappingSourceColumn & { + thingId: string + datastreamId: string + selectedDatastream: DatastreamSummary | null +} + +export type MappingDatastreamBrowserEntry = + | { + kind: "thing" + key: string + thingId: string + thingName: string + } + | { + kind: "datastream" + key: string + datastream: DatastreamSummary + mappedCsvColumn: string | null + mappedColumnLabel: string | null + } + +export function buildMappingSourceColumns( + headers: string[], + identifierType: "name" | "index", + timestampKey: string +): MappingSourceColumn[] { + return headers + .map((header, index) => { + const csvColumn = + identifierType === "index" ? String(index + 1) : header + + if (csvColumn === timestampKey) { + return null + } + + return { + csvColumn, + label: + identifierType === "index" ? `${index + 1} · ${header}` : header, + } + }) + .filter((column): column is MappingSourceColumn => column !== null) +} + +export const pipelineMappingSourceColumns = computed(() => { + const settings = state.validatedPipelineSettings + if (!settings) return [] + + return buildMappingSourceColumns( + previewHeaders.value, + settings.identifierType, + settings.timestamp.key + ) +}) + +export const pipelineThingOptions = computed(() => { + const things = new Map() + + for (const datastream of state.pipelineDatastreams) { + if (!datastream.thing_id || !datastream.thing_name) continue + things.set(datastream.thing_id, datastream.thing_name) + } + + return Array.from(things.entries()) + .map(([id, name]) => ({ id, name })) + .sort((a, b) => a.name.localeCompare(b.name)) +}) + +export const pipelineMappingRows = computed(() => + pipelineMappingSourceColumns.value.map((source) => { + const draft = mappingDraftByColumn(source.csvColumn) + const selectedDatastream = draft?.datastreamId + ? datastreamById(draft.datastreamId) + : null + + return { + ...source, + thingId: selectedDatastream?.thing_id ?? draft?.thingId ?? "", + datastreamId: selectedDatastream?.id ?? draft?.datastreamId ?? "", + selectedDatastream, + } + }) +) + +export const pipelineDatastreamBrowserEntries = + computed(() => + buildDatastreamBrowserEntries( + state.pipelineDatastreams, + state.pipelineMappingDrafts, + pipelineMappingSourceColumns.value + ) + ) + +export async function loadPipelineDatastreams(force = false): Promise { + if (state.pipelineDatastreamsLoading) return + + if (state.pipelineDatastreams.length > 0 && !force) { + syncPipelineMappingDrafts() + return + } + + state.pipelineDatastreamsLoading = true + + try { + state.pipelineDatastreams = sortDatastreams(await getDatastreams()) + syncPipelineMappingDrafts() + } catch { + state.pipelineDatastreams = [] + } finally { + state.pipelineDatastreamsLoading = false + } +} + +export function syncPipelineMappingDrafts(): void { + const nextDrafts: PipelineMappingDraft[] = pipelineMappingSourceColumns.value.map( + (source) => { + const existing = mappingDraftByColumn(source.csvColumn) + const selectedDatastream = existing?.datastreamId + ? datastreamById(existing.datastreamId) + : null + + return { + csvColumn: source.csvColumn, + thingId: selectedDatastream?.thing_id ?? existing?.thingId ?? "", + datastreamId: selectedDatastream?.id ?? "", + } + } + ) + + state.pipelineMappingDrafts = nextDrafts + syncValidatedColumnMappings() +} + +export function datastreamOptionsForThing( + thingId: string, + csvColumn: string +): DatastreamSummary[] { + const currentDatastreamId = mappingDraftByColumn(csvColumn)?.datastreamId ?? "" + + return sortDatastreams( + state.pipelineDatastreams.filter( + (datastream) => + datastream.thing_id === thingId && + (!isDatastreamMappedElsewhere(datastream.id, csvColumn) || + datastream.id === currentDatastreamId) + ) + ) +} + +export function updatePipelineMappingThing( + csvColumn: string, + thingId: string +): void { + syncPipelineMappingDrafts() + + const draft = mappingDraftByColumn(csvColumn) + if (!draft) return + + draft.thingId = thingId + + const selectedDatastream = datastreamById(draft.datastreamId) + if (!selectedDatastream || selectedDatastream.thing_id !== thingId) { + draft.datastreamId = "" + } + + syncValidatedColumnMappings() +} + +export function updatePipelineMappingDatastream( + csvColumn: string, + datastreamId: string +): void { + syncPipelineMappingDrafts() + + const draft = mappingDraftByColumn(csvColumn) + if (!draft) return + + if (!datastreamId) { + draft.datastreamId = "" + syncValidatedColumnMappings() + return + } + + const datastream = datastreamById(datastreamId) + if (!datastream) return + + const owner = state.pipelineMappingDrafts.find( + (candidate) => + candidate.csvColumn !== csvColumn && candidate.datastreamId === datastream.id + ) + + if (draft.datastreamId === datastream.id) { + draft.thingId = "" + draft.datastreamId = "" + syncValidatedColumnMappings() + return + } + + if (owner) { + return + } + + draft.thingId = datastream.thing_id + draft.datastreamId = datastream.id + + syncValidatedColumnMappings() +} + +export function clearPipelineMapping(csvColumn: string): void { + syncPipelineMappingDrafts() + + const draft = mappingDraftByColumn(csvColumn) + if (!draft) return + + draft.thingId = "" + draft.datastreamId = "" + + syncValidatedColumnMappings() +} + +export function buildPipelineColumnMappings(): ColumnMapping[] { + return state.pipelineMappingDrafts.flatMap((draft) => { + const datastream = datastreamById(draft.datastreamId) + if (!datastream) return [] + + return [ + { + csv_column: draft.csvColumn, + datastream_id: datastream.id, + datastream_name: datastream.name, + }, + ] + }) +} + +function syncValidatedColumnMappings(): void { + state.validatedColumnMappings = buildPipelineColumnMappings() +} + +function datastreamById(datastreamId: string): DatastreamSummary | null { + return ( + state.pipelineDatastreams.find( + (datastream) => datastream.id === datastreamId + ) ?? null + ) +} + +function mappingDraftByColumn(csvColumn: string): PipelineMappingDraft | null { + return ( + state.pipelineMappingDrafts.find((draft) => draft.csvColumn === csvColumn) ?? + null + ) +} + +function isDatastreamMappedElsewhere( + datastreamId: string, + csvColumn: string +): boolean { + return state.pipelineMappingDrafts.some( + (draft) => + draft.csvColumn !== csvColumn && draft.datastreamId === datastreamId + ) +} + +function sortDatastreams( + datastreams: DatastreamSummary[] +): DatastreamSummary[] { + return [...datastreams].sort((a, b) => { + const thingCompare = a.thing_name.localeCompare(b.thing_name) + if (thingCompare !== 0) return thingCompare + + const observedPropertyCompare = a.observed_property_name.localeCompare( + b.observed_property_name + ) + if (observedPropertyCompare !== 0) return observedPropertyCompare + + return a.name.localeCompare(b.name) + }) +} + +export function buildDatastreamBrowserEntries( + datastreams: DatastreamSummary[], + drafts: PipelineMappingDraft[], + sourceColumns: MappingSourceColumn[] +): MappingDatastreamBrowserEntry[] { + const sourceLabelByColumn = new Map( + sourceColumns.map((source) => [source.csvColumn, source.label]) + ) + const mappedColumnByDatastream = new Map( + drafts + .filter((draft) => draft.datastreamId) + .map((draft) => [draft.datastreamId, draft.csvColumn]) + ) + const entries: MappingDatastreamBrowserEntry[] = [] + let currentThingId = "" + + for (const datastream of sortDatastreams(datastreams)) { + if (datastream.thing_id !== currentThingId) { + currentThingId = datastream.thing_id + entries.push({ + kind: "thing", + key: `thing-${datastream.thing_id}`, + thingId: datastream.thing_id, + thingName: datastream.thing_name, + }) + } + + const mappedCsvColumn = mappedColumnByDatastream.get(datastream.id) ?? null + + entries.push({ + kind: "datastream", + key: `datastream-${datastream.id}`, + datastream, + mappedCsvColumn, + mappedColumnLabel: mappedCsvColumn + ? sourceLabelByColumn.get(mappedCsvColumn) ?? mappedCsvColumn + : null, + }) + } + + return entries +} diff --git a/frontend/composables/usePipeline.ts b/frontend/composables/usePipeline.ts new file mode 100644 index 0000000..9cb9c2f --- /dev/null +++ b/frontend/composables/usePipeline.ts @@ -0,0 +1,972 @@ +import { computed } from "vue" + +import type { + JobConfig, + JobUpsertRequest, + CsvPreviewResponse, + CsvTransformerSettings, + CsvTransformerTimestampSettings, +} from "../api/hydroserver" +import { createJob, getConfig, getCsvPreview, updateJob } from "../api/hydroserver" +import { + createPipelineFieldStates, + type PipelineFieldName, + resetPipelineFieldStates, + validatePipelineFieldsForSubmit, +} from "../pipeline-submit" +import { loadPipelineDatastreams } from "./useMapping" +import { navigate } from "../router" +import { + createEmptyPipelineForm, + PREVIEW_PAGE_INCREMENT, + PREVIEW_PAGE_SIZE, + state, + type PipelineIdentifierType, +} from "./state" +import type { TimezoneMode, TimestampFormat } from "../models/timestamp" + +type PipelineEditSection = "file" | "setup" | "mappings" + +type DetectedTimestampPattern = Pick< + CsvTransformerTimestampSettings, + "format" | "customFormat" | "timezoneMode" +> + +type TimestampPatternDefinition = { + format: TimestampFormat + timezoneMode: TimezoneMode + customFormat?: string + pattern: RegExp +} + +const TIMESTAMP_PATTERNS: TimestampPatternDefinition[] = [ + { + format: "ISO8601", + timezoneMode: "embeddedOffset", + pattern: + /^\d{4}-\d{2}-\d{2}[ T]\d{2}:\d{2}(?::\d{2}(?:\.\d{1,6})?)?(?:Z|[+-]\d{2}:\d{2}|[+-]\d{4})$/i, + }, + { + format: "naive", + timezoneMode: "utc", + pattern: /^\d{4}-\d{2}-\d{2}(?:[ T]\d{2}:\d{2}(?::\d{2}(?:\.\d{1,6})?)?)?$/, + }, + { + format: "custom", + customFormat: "%m/%d/%Y %H:%M:%S", + timezoneMode: "utc", + pattern: /^\d{1,2}\/\d{1,2}\/\d{4}\s+\d{1,2}:\d{2}:\d{2}$/, + }, + { + format: "custom", + customFormat: "%m/%d/%Y %H:%M", + timezoneMode: "utc", + pattern: /^\d{1,2}\/\d{1,2}\/\d{4}\s+\d{1,2}:\d{2}$/, + }, + { + format: "custom", + customFormat: "%Y/%m/%d %H:%M:%S", + timezoneMode: "utc", + pattern: /^\d{4}\/\d{1,2}\/\d{1,2}\s+\d{1,2}:\d{2}:\d{2}$/, + }, + { + format: "custom", + customFormat: "%Y/%m/%d %H:%M", + timezoneMode: "utc", + pattern: /^\d{4}\/\d{1,2}\/\d{1,2}\s+\d{1,2}:\d{2}$/, + }, + { + format: "custom", + customFormat: "%m/%d/%Y", + timezoneMode: "utc", + pattern: /^\d{1,2}\/\d{1,2}\/\d{4}$/, + }, + { + format: "custom", + customFormat: "%Y/%m/%d", + timezoneMode: "utc", + pattern: /^\d{4}\/\d{1,2}\/\d{1,2}$/, + }, +] + +function parseDelimitedLine(line: string, delimiter: string): string[] { + if (!delimiter) return [line] + + const cells: string[] = [] + let current = "" + let inQuotes = false + + for (let i = 0; i < line.length; i++) { + const char = line[i] + if (char === '"') { + if (inQuotes && line[i + 1] === '"') { + current += '"' + i++ + } else { + inQuotes = !inQuotes + } + continue + } + + if (!inQuotes && line.startsWith(delimiter, i)) { + cells.push(current) + current = "" + i += delimiter.length - 1 + continue + } + + current += char + } + + cells.push(current) + return cells +} + +function normalizeHeaderName(value: string, index: number): string { + return value.trim() || `Column ${index + 1}` +} + +function preferredTimestampColumnIndex(headers: string[]): number { + const preferredIndex = headers.findIndex((header) => + /(time|date|stamp)/i.test(header) + ) + return preferredIndex >= 0 ? preferredIndex : 0 +} + +function previewHeadersForDetection( + preview: CsvPreviewResponse, + hasHeaderRow: boolean +): string[] { + const rows = preview.parsed_rows + const columnCount = rows.reduce((max, row) => Math.max(max, row.length), 0) + + if (!hasHeaderRow) { + return Array.from({ length: columnCount }, (_, index) => `Column ${index + 1}`) + } + + const headerIndex = Math.max((preview.detected_header_row ?? 1) - 1, 0) + const headerRow = rows[headerIndex] ?? [] + + return Array.from({ length: columnCount }, (_, index) => + normalizeHeaderName(headerRow[index] ?? "", index) + ) +} + +function detectTimestampPattern(value: string): DetectedTimestampPattern | null { + const trimmed = value.trim() + if (!trimmed) return null + + const matched = TIMESTAMP_PATTERNS.find(({ pattern }) => pattern.test(trimmed)) + if (!matched) return null + + return { + format: matched.format, + customFormat: matched.customFormat, + timezoneMode: matched.timezoneMode, + } +} + +function detectTimestampColumn( + preview: CsvPreviewResponse, + hasHeaderRow: boolean +): { columnIndex: number; pattern: DetectedTimestampPattern | null } { + const headers = previewHeadersForDetection(preview, hasHeaderRow) + const dataStartRow = Math.max( + (preview.detected_data_start_row ?? (hasHeaderRow ? 2 : 1)) - 1, + 0 + ) + const sampleRows = preview.parsed_rows + .slice(dataStartRow) + .filter((row) => row.some((value) => value.trim() !== "")) + .slice(0, 12) + + if (headers.length === 0 || sampleRows.length === 0) { + return { columnIndex: 0, pattern: null } + } + + let bestMatch: { + columnIndex: number + score: number + ratio: number + pattern: DetectedTimestampPattern | null + } | null = null + + for (let columnIndex = 0; columnIndex < headers.length; columnIndex++) { + const values = sampleRows + .map((row) => row[columnIndex]?.trim() ?? "") + .filter(Boolean) + + if (values.length === 0) continue + + const detectedPatterns = values + .map((value) => detectTimestampPattern(value)) + .filter((pattern): pattern is DetectedTimestampPattern => pattern !== null) + + const ratio = detectedPatterns.length / values.length + const headerBonus = /(time|date|stamp)/i.test(headers[columnIndex] ?? "") ? 0.15 : 0 + + const patternCounts = new Map() + for (const pattern of detectedPatterns) { + const key = `${pattern.format}|${pattern.customFormat ?? ""}|${pattern.timezoneMode}` + const current = patternCounts.get(key) + patternCounts.set(key, { + count: (current?.count ?? 0) + 1, + pattern, + }) + } + + const dominantPattern = Array.from(patternCounts.values()).sort( + (left, right) => right.count - left.count + )[0]?.pattern ?? null + + const score = ratio + headerBonus + if ( + bestMatch === null || + score > bestMatch.score || + (score === bestMatch.score && columnIndex < bestMatch.columnIndex) + ) { + bestMatch = { columnIndex, score, ratio, pattern: dominantPattern } + } + } + + if (!bestMatch || bestMatch.ratio < 0.6) { + return { columnIndex: 0, pattern: null } + } + + return { columnIndex: bestMatch.columnIndex, pattern: bestMatch.pattern } +} + +function resolveTimestampColumnName( + headers: string[], + identifierType: PipelineIdentifierType, + timestampKey: string +): string { + if (headers.length === 0) return "" + + if (identifierType === "index") { + const columnIndex = Number(timestampKey) + if ( + Number.isInteger(columnIndex) && + columnIndex >= 1 && + columnIndex <= headers.length + ) { + return headers[columnIndex - 1] + } + return "" + } + + return headers.includes(timestampKey) ? timestampKey : "" +} + +export const parsedPreviewRows = computed(() => { + if (!state.pipelinePreview) return [] + return state.pipelinePreview.raw_lines.map((line) => + parseDelimitedLine(line, state.pipelineForm.delimiter) + ) +}) + +export const previewHeaders = computed(() => { + const rows = parsedPreviewRows.value + const columnCount = rows.reduce((max, row) => Math.max(max, row.length), 0) + + if (!state.pipelineForm.hasHeaderRow) { + const dataRows = rows.slice(Math.max(state.pipelineForm.dataStartRow - 1, 0)) + const count = (dataRows.length > 0 ? dataRows : rows).reduce( + (max, row) => Math.max(max, row.length), + 0 + ) + return Array.from({ length: count }, (_, index) => `Column ${index + 1}`) + } + + const headerRow = rows[state.pipelineForm.headerRow - 1] ?? [] + return Array.from({ length: columnCount }, (_, index) => + normalizeHeaderName(headerRow[index] ?? "", index) + ) +}) + +export const selectedPreviewTimestampColumn = computed(() => + resolveTimestampColumnName( + previewHeaders.value, + state.pipelineForm.identifierType, + state.pipelineForm.timestamp.key + ) +) + +function markPipelineField( + field: PipelineFieldName, + nextState: "idle" | "checking" | "valid" | "invalid", + message?: string | null +): void { + state.pipelineFieldStates[field] = { + state: nextState, + message: message ?? null, + } +} + +function invalidateValidatedPipeline(): void { + state.pipelineReadyForMapping = false + state.validatedPipelineSettings = null + state.validatedColumnMappings = [] +} + +function validatePipelineForm(): boolean { + resetPipelineFieldStates(state.pipelineFieldStates) + + return validatePipelineFieldsForSubmit({ + form: state.pipelineForm, + hasPreview: state.pipelinePreview !== null, + previewHeaders: previewHeaders.value, + markField: markPipelineField, + }) +} + +function refreshPipelineValidation(): void { + if (!state.pipelineValidationAttempted) return + + validatePipelineForm() +} + +function syncSelectionsWithPreview(): void { + const headers = previewHeaders.value + if (headers.length === 0) return + + const preferredIndex = preferredTimestampColumnIndex(headers) + const preferredHeader = headers[preferredIndex] + + if (!state.pipelineForm.hasHeaderRow) { + state.pipelineForm.identifierType = "index" + } + + if (state.pipelineForm.identifierType === "index") { + const currentIndex = Number(state.pipelineForm.timestamp.key) + if ( + !Number.isInteger(currentIndex) || + currentIndex < 1 || + currentIndex > headers.length + ) { + state.pipelineForm.timestamp.key = String(preferredIndex + 1) + } + return + } + + if (!headers.includes(state.pipelineForm.timestamp.key)) { + state.pipelineForm.timestamp.key = preferredHeader + } +} + +export function canShowMorePreviewLines(): boolean { + if (!state.pipelinePreview) return false + return state.pipelinePreview.raw_lines.length < state.pipelinePreview.total_lines +} + +export function updateHeaderRowFromPreview(lineNumber: number): void { + invalidateValidatedPipeline() + state.pipelineForm.hasHeaderRow = true + state.pipelineForm.headerRow = lineNumber + if (state.pipelineForm.dataStartRow <= lineNumber) { + state.pipelineForm.dataStartRow = lineNumber + 1 + } + syncSelectionsWithPreview() + refreshPipelineValidation() +} + +export function updateDataStartRowFromPreview(lineNumber: number): void { + invalidateValidatedPipeline() + state.pipelineForm.dataStartRow = Math.max( + state.pipelineForm.hasHeaderRow ? 2 : 1, + lineNumber + ) + if ( + state.pipelineForm.hasHeaderRow && + state.pipelineForm.headerRow >= state.pipelineForm.dataStartRow + ) { + state.pipelineForm.headerRow = state.pipelineForm.dataStartRow - 1 + } + syncSelectionsWithPreview() + refreshPipelineValidation() +} + +export function applyPreviewLineSelection(lineNumber: number): void { + if (state.pipelineSelectionTarget === "header-row") { + updateHeaderRowFromPreview(lineNumber) + state.pipelineSelectionTarget = null + } else if (state.pipelineSelectionTarget === "data-start-row") { + updateDataStartRowFromPreview(lineNumber) + state.pipelineSelectionTarget = null + } +} + +export function applyPreviewColumnSelection(columnName: string): void { + if ( + state.pipelineSelectionTarget && + state.pipelineSelectionTarget !== "timestamp-column" + ) { + return + } + + invalidateValidatedPipeline() + state.pipelineForm.timestamp.key = + state.pipelineForm.identifierType === "index" + ? String(previewHeaders.value.indexOf(columnName) + 1) + : columnName + state.pipelineSelectionTarget = null + refreshPipelineValidation() +} + +export function setPipelineHasHeaderRow(enabled: boolean): void { + invalidateValidatedPipeline() + const headersBeforeToggle = previewHeaders.value + const currentVisibleTimestampColumn = resolveTimestampColumnName( + headersBeforeToggle, + state.pipelineForm.identifierType, + state.pipelineForm.timestamp.key + ) + + state.pipelineForm.hasHeaderRow = enabled + if (!enabled && state.pipelineSelectionTarget === "header-row") { + state.pipelineSelectionTarget = null + } + + if (!enabled) { + state.pipelineForm.identifierType = "index" + if (currentVisibleTimestampColumn) { + const currentIndex = headersBeforeToggle.indexOf(currentVisibleTimestampColumn) + if (currentIndex >= 0) { + state.pipelineForm.timestamp.key = String(currentIndex + 1) + } + } + state.pipelineForm.dataStartRow = Math.max(1, state.pipelineForm.dataStartRow) + } else { + state.pipelineForm.dataStartRow = Math.max(2, state.pipelineForm.dataStartRow) + if (state.pipelineForm.headerRow >= state.pipelineForm.dataStartRow) { + state.pipelineForm.headerRow = Math.max( + 1, + state.pipelineForm.dataStartRow - 1 + ) + } + } + + syncSelectionsWithPreview() + refreshPipelineValidation() +} + +export function setPipelineIdentifierType(identifierType: PipelineIdentifierType): void { + invalidateValidatedPipeline() + + if (!state.pipelineForm.hasHeaderRow && identifierType === "name") { + return + } + + const headers = previewHeaders.value + const currentVisibleTimestampColumn = resolveTimestampColumnName( + headers, + state.pipelineForm.identifierType, + state.pipelineForm.timestamp.key + ) + + state.pipelineForm.identifierType = identifierType + + if (headers.length === 0) { + state.pipelineForm.timestamp.key = + identifierType === "index" ? "1" : "timestamp" + refreshPipelineValidation() + return + } + + if (identifierType === "index") { + const currentIndex = headers.indexOf(currentVisibleTimestampColumn) + state.pipelineForm.timestamp.key = + currentIndex >= 0 + ? String(currentIndex + 1) + : String(preferredTimestampColumnIndex(headers) + 1) + } else { + state.pipelineForm.timestamp.key = + currentVisibleTimestampColumn || headers[preferredTimestampColumnIndex(headers)] + } + + refreshPipelineValidation() +} + +function syncTimestampFormat(format: TimestampFormat): void { + const timestamp = state.pipelineForm.timestamp + timestamp.format = format + + if (format === "custom") { + timestamp.customFormat = timestamp.customFormat || "%Y-%m-%d %H:%M:%S" + } else { + timestamp.customFormat = undefined + } + + if (format === "ISO8601") { + syncTimestampTimezone("embeddedOffset") + } else { + syncTimestampTimezone("utc") + } +} + +function syncTimestampTimezone(mode: TimezoneMode): void { + const timestamp = state.pipelineForm.timestamp + timestamp.timezoneMode = mode + + if (mode === "utc" || mode === "embeddedOffset") { + timestamp.timezone = undefined + } else if (mode === "fixedOffset") { + timestamp.timezone = "-0700" + } else if (mode === "daylightSavings") { + timestamp.timezone = "America/Denver" + } +} + +function applyDetectedTimestampPattern(pattern: DetectedTimestampPattern | null): void { + if (!pattern) return + + syncTimestampFormat(pattern.format) + if (pattern.format === "custom" && pattern.customFormat) { + state.pipelineForm.timestamp.customFormat = pattern.customFormat + } +} + +export function updatePipelineField(name: string, value: string): void { + invalidateValidatedPipeline() + + switch (name) { + case "file_path": + state.pipelineForm.filePath = value + state.pipelinePreview = null + state.pipelineSelectionTarget = null + state.pipelinePreviewRowsRequested = PREVIEW_PAGE_SIZE + break + case "header_row": + state.pipelineForm.headerRow = Math.max(1, Number(value) || 1) + if ( + state.pipelineForm.hasHeaderRow && + state.pipelineForm.dataStartRow <= state.pipelineForm.headerRow + ) { + state.pipelineForm.dataStartRow = state.pipelineForm.headerRow + 1 + } + syncSelectionsWithPreview() + break + case "data_start_row": + state.pipelineForm.dataStartRow = Math.max( + state.pipelineForm.hasHeaderRow ? 2 : 1, + Number(value) || 1 + ) + if ( + state.pipelineForm.hasHeaderRow && + state.pipelineForm.headerRow >= state.pipelineForm.dataStartRow + ) { + state.pipelineForm.headerRow = state.pipelineForm.dataStartRow - 1 + } + syncSelectionsWithPreview() + break + case "delimiter": + state.pipelineForm.delimiter = value || "," + syncSelectionsWithPreview() + break + case "timestamp_key": + state.pipelineForm.timestamp.key = value + syncSelectionsWithPreview() + break + case "timestamp_format": + if (value === "ISO8601" || value === "naive" || value === "custom") { + syncTimestampFormat(value) + } + break + case "custom_timestamp_format": + state.pipelineForm.timestamp.customFormat = value + break + case "timezone_mode": + if ( + value === "embeddedOffset" || + value === "utc" || + value === "fixedOffset" || + value === "daylightSavings" + ) { + syncTimestampTimezone(value) + } + break + case "timezone": + state.pipelineForm.timestamp.timezone = value + break + } + + refreshPipelineValidation() +} + +export async function loadPipelinePreview( + path: string, + rows = PREVIEW_PAGE_SIZE +): Promise { + invalidateValidatedPipeline() + + if (!path.trim()) { + return + } + + try { + const preview = await getCsvPreview(path.trim(), rows) + const shouldApplyDetectedDefaults = + !state.pipelinePreview || state.pipelineForm.filePath !== path.trim() + + state.pipelinePreview = preview + state.pipelineForm.filePath = path.trim() + + if (shouldApplyDetectedDefaults) { + state.pipelineForm.hasHeaderRow = preview.detected_header_row !== null + state.pipelineForm.headerRow = + preview.detected_header_row ?? state.pipelineForm.headerRow + state.pipelineForm.dataStartRow = + preview.detected_data_start_row ?? state.pipelineForm.dataStartRow + state.pipelineForm.delimiter = + preview.detected_delimiter || state.pipelineForm.delimiter + state.pipelineForm.identifierType = state.pipelineForm.hasHeaderRow + ? "name" + : "index" + + const detectedTimestamp = detectTimestampColumn( + preview, + state.pipelineForm.hasHeaderRow + ) + state.pipelineForm.timestamp.key = + state.pipelineForm.identifierType === "index" + ? String(detectedTimestamp.columnIndex + 1) + : previewHeadersForDetection( + preview, + state.pipelineForm.hasHeaderRow + )[detectedTimestamp.columnIndex] ?? "Column 1" + applyDetectedTimestampPattern(detectedTimestamp.pattern) + } + + state.pipelineSelectionTarget = null + syncSelectionsWithPreview() + state.pipelinePreviewRowsRequested = rows + refreshPipelineValidation() + } catch { + state.pipelinePreview = null + state.pipelineSelectionTarget = null + state.pipelinePreviewRowsRequested = PREVIEW_PAGE_SIZE + refreshPipelineValidation() + } +} + +export async function showMorePreviewLines(): Promise { + if (!state.pipelinePreview) return + + const nextRows = Math.min( + state.pipelinePreviewRowsRequested + PREVIEW_PAGE_INCREMENT, + state.pipelinePreview.total_lines + ) + await loadPipelinePreview(state.pipelineForm.filePath, nextRows) +} + +export async function browseForCsvPath(): Promise { + try { + const dialog = await import("@tauri-apps/plugin-dialog") + const selection = await dialog.open({ + directory: false, + multiple: false, + filters: [{ name: "CSV files", extensions: ["csv", "txt"] }], + }) + + if (typeof selection !== "string" || !selection) return + + updatePipelineField("file_path", selection) + await loadPipelinePreview(selection) + } catch { + return + } +} + +export function submitPipelineConfig(): void { + state.pipelineValidationAttempted = true + + const valid = validatePipelineForm() + if (!valid) { + invalidateValidatedPipeline() + return + } + + state.validatedPipelineSettings = buildPipelineTransformerSettings() + state.pipelineReadyForMapping = true + navigate("jobs-new-mapping") +} + +export function buildPipelineTransformerSettings() { + const settings: CsvTransformerSettings = { + headerRow: + state.pipelineForm.hasHeaderRow && state.pipelineForm.identifierType === "name" + ? state.pipelineForm.headerRow + : null, + dataStartRow: state.pipelineForm.dataStartRow, + delimiter: state.pipelineForm.delimiter, + identifierType: state.pipelineForm.identifierType, + timestamp: { + ...state.pipelineForm.timestamp, + }, + } + + if (settings.timestamp.format !== "custom") { + delete settings.timestamp.customFormat + } + + if ( + settings.timestamp.timezoneMode !== "fixedOffset" && + settings.timestamp.timezoneMode !== "daylightSavings" + ) { + delete settings.timestamp.timezone + } + + return settings +} + +function basenameWithoutExtension(path: string): string { + const basename = path.split(/[\\/]/).filter(Boolean).at(-1)?.trim() ?? "" + if (!basename) return "" + + const extensionIndex = basename.lastIndexOf(".") + if (extensionIndex <= 0) { + return basename + } + + return basename.slice(0, extensionIndex) +} + +function pipelineFormFromJob(job: JobConfig) { + return { + filePath: job.file_path, + hasHeaderRow: job.file_config.headerRow !== null, + headerRow: job.file_config.headerRow ?? 1, + dataStartRow: job.file_config.dataStartRow, + delimiter: job.file_config.delimiter, + identifierType: job.file_config.identifierType, + timestamp: { + ...job.file_config.timestamp, + }, + } +} + +function pipelineMappingDraftsFromJob(job: JobConfig) { + return job.column_mappings.map((mapping) => ({ + csvColumn: mapping.csv_column, + thingId: + state.pipelineDatastreams.find( + (datastream) => datastream.id === mapping.datastream_id + )?.thing_id ?? "", + datastreamId: mapping.datastream_id, + })) +} + +function currentPipelineJob(): JobConfig | null { + const jobId = state.pipelineEditTarget?.jobId + if (!jobId) return null + + return state.config?.jobs.find((job) => job.id === jobId) ?? null +} + +async function preparePipelineEditFlow( + job: JobConfig, + section: PipelineEditSection +): Promise { + resetPipelineCreationFlow() + state.pipelineEditTarget = { + jobId: job.id, + name: job.name, + enabled: job.enabled, + scheduleMinutes: job.schedule_minutes, + } + state.pipelineEditorStartStep = section === "file" ? 1 : 2 + state.pipelineForm = pipelineFormFromJob(job) + + await loadPipelinePreview(job.file_path) + state.pipelineForm = pipelineFormFromJob(job) + state.pipelineFieldStates = createPipelineFieldStates() + state.pipelineValidationAttempted = false + state.validatedPipelineSettings = { + ...job.file_config, + timestamp: { + ...job.file_config.timestamp, + }, + } + state.pipelineMappingDrafts = job.column_mappings.map((mapping) => ({ + csvColumn: mapping.csv_column, + thingId: "", + datastreamId: mapping.datastream_id, + })) + state.validatedColumnMappings = [...job.column_mappings] + state.pipelineReadyForMapping = section === "mappings" + + if (section === "mappings") { + await loadPipelineDatastreams() + state.pipelineMappingDrafts = pipelineMappingDraftsFromJob(job) + navigate("jobs-new-mapping") + return + } + + navigate("jobs-new") +} + +function canCreatePipelineDatasource(): { ok: true } | { ok: false; message: string } { + if (!(state.connectionSummary?.ok && state.lastConnectionState === "connected")) { + return { + ok: false, + message: "Connect to HydroServer before creating a data source.", + } + } + + if (!(state.config?.server.workspace_id || state.authDraft.workspace_id)) { + return { + ok: false, + message: "Connect to a workspace before creating a data source.", + } + } + + if (!state.validatedPipelineSettings) { + return { + ok: false, + message: "Validate the CSV configuration before creating a data source.", + } + } + + if (state.validatedColumnMappings.length === 0) { + return { + ok: false, + message: "Map at least one CSV column to a datastream before creating a data source.", + } + } + + if (!state.pipelineForm.filePath.trim()) { + return { + ok: false, + message: "Choose a CSV file before creating a data source.", + } + } + + if (!basenameWithoutExtension(state.pipelineForm.filePath)) { + return { + ok: false, + message: "The selected CSV file must have a valid filename.", + } + } + + return { ok: true } +} + +export function resetPipelineCreationFlow(): void { + state.pipelineForm = createEmptyPipelineForm() + state.pipelinePreview = null + state.pipelineSelectionTarget = null + state.pipelineEditorStartStep = null + state.pipelinePreviewRowsRequested = PREVIEW_PAGE_SIZE + state.pipelineFieldStates = createPipelineFieldStates() + state.pipelineValidationAttempted = false + state.pipelineReadyForMapping = false + state.validatedPipelineSettings = null + state.pipelineMappingDrafts = [] + state.validatedColumnMappings = [] + state.pipelineEditTarget = null + state.pipelineCreateSubmitting = false +} + +export function abandonPipelineCreation(): void { + resetPipelineCreationFlow() + navigate("dashboard") +} + +export async function editPipelineSourceFile(jobId: string): Promise { + const job = state.config?.jobs.find((item) => item.id === jobId) + if (!job) { + return + } + + try { + await preparePipelineEditFlow(job, "file") + } catch { + state.pipelineEditTarget = { + jobId: job.id, + name: job.name, + enabled: job.enabled, + scheduleMinutes: job.schedule_minutes, + } + state.pipelineEditorStartStep = 1 + state.pipelineForm = pipelineFormFromJob(job) + state.pipelineFieldStates = createPipelineFieldStates() + navigate("jobs-new") + } +} + +export async function editPipelineCsvSetup(jobId: string): Promise { + await editPipelineSection(jobId, "setup") +} + +export async function editPipelineMappings(jobId: string): Promise { + await editPipelineSection(jobId, "mappings") +} + +async function editPipelineSection( + jobId: string, + section: PipelineEditSection +): Promise { + const job = state.config?.jobs.find((item) => item.id === jobId) + if (!job) { + return + } + + try { + await preparePipelineEditFlow(job, section) + } catch { + state.pipelineEditTarget = { + jobId: job.id, + name: job.name, + enabled: job.enabled, + scheduleMinutes: job.schedule_minutes, + } + state.pipelineEditorStartStep = 1 + state.pipelineForm = pipelineFormFromJob(job) + state.pipelineFieldStates = createPipelineFieldStates() + navigate("jobs-new") + } +} + +export async function createPipelineDatasource(): Promise { + if (state.pipelineCreateSubmitting) return + + const readiness = canCreatePipelineDatasource() + if (!readiness.ok) { + return + } + + const editingTarget = state.pipelineEditTarget + const existingJob = currentPipelineJob() + const name = editingTarget?.name ?? basenameWithoutExtension(state.pipelineForm.filePath) + const payload: JobUpsertRequest = { + name, + enabled: editingTarget?.enabled ?? existingJob?.enabled ?? true, + file_path: state.pipelineForm.filePath.trim(), + file_config: state.validatedPipelineSettings!, + column_mappings: state.validatedColumnMappings, + schedule_minutes: + editingTarget?.scheduleMinutes ?? existingJob?.schedule_minutes, + } + + state.pipelineCreateSubmitting = true + + try { + if (editingTarget) { + await updateJob(editingTarget.jobId, payload) + } else { + await createJob(payload) + } + state.config = await getConfig() + resetPipelineCreationFlow() + navigate("dashboard") + } catch (error) { + console.error( + editingTarget + ? "Couldn't update the data source right now." + : "Couldn't create the data source right now.", + error + ) + } finally { + state.pipelineCreateSubmitting = false + } +} diff --git a/frontend/composables/useService.ts b/frontend/composables/useService.ts new file mode 100644 index 0000000..0866502 --- /dev/null +++ b/frontend/composables/useService.ts @@ -0,0 +1,75 @@ +import { + getServiceStatus, + installOsService, + restartOsService, + uninstallOsService, + type ServiceStatusResponse, +} from "../api/os-service" +import { navigate } from "../router" +import { state } from "./state" + +export function isServiceReady(status: ServiceStatusResponse | null | undefined): boolean { + if (!status) return false + if (!status.supported) return true + return status.installed && status.running +} + +export async function refreshServiceStatus(): Promise { + if (state.serviceActionSubmitting) { + return state.serviceStatus + } + + state.serviceStatusLoading = true + + try { + const status = await getServiceStatus() + state.serviceStatus = status + return status + } catch (error) { + state.serviceActionError = + error instanceof Error + ? error.message + : "Couldn't determine the background service status." + return null + } finally { + state.serviceStatusLoading = false + } +} + +async function runServiceAction( + action: () => Promise +): Promise { + if (state.serviceActionSubmitting) return + + state.serviceActionSubmitting = true + state.serviceActionError = null + + try { + const status = await action() + state.serviceStatus = status + + if (isServiceReady(status)) { + const { bootstrap } = await import("./useAppModel") + await bootstrap() + } else { + navigate("service") + } + } catch (error) { + state.serviceActionError = + error instanceof Error ? error.message : "The background service action failed." + } finally { + state.serviceActionSubmitting = false + } +} + +export async function installBackgroundService(): Promise { + await runServiceAction(() => installOsService()) +} + +export async function restartBackgroundService(): Promise { + await runServiceAction(() => restartOsService()) +} + +export async function uninstallBackgroundService(): Promise { + await runServiceAction(() => uninstallOsService()) +} diff --git a/frontend/config.ts b/frontend/config.ts new file mode 100644 index 0000000..2014250 --- /dev/null +++ b/frontend/config.ts @@ -0,0 +1,2 @@ +export const apiBaseUrl = + (import.meta.env?.VITE_API_BASE_URL as string | undefined)?.trim() || "/api" diff --git a/frontend/main.ts b/frontend/main.ts new file mode 100644 index 0000000..1c0e1a9 --- /dev/null +++ b/frontend/main.ts @@ -0,0 +1,12 @@ +import "./generated.css" + +import { createApp } from "vue" + +import App from "./App.vue" +import { isWindowsPlatform } from "./api/runtime" + +if (typeof document !== "undefined" && isWindowsPlatform()) { + document.documentElement.classList.add("platform-windows") +} + +createApp(App).mount("#app") diff --git a/frontend/models/timestamp.ts b/frontend/models/timestamp.ts new file mode 100644 index 0000000..d7f6cfb --- /dev/null +++ b/frontend/models/timestamp.ts @@ -0,0 +1,114 @@ +export const FIXED_OFFSET_TIMEZONES = [ + { title: "UTC-12:00 (International Date Line West)", value: "-1200" }, + { title: "UTC-11:00 (Samoa Standard Time)", value: "-1100" }, + { title: "UTC-10:00 (Hawaii-Aleutian Standard Time)", value: "-1000" }, + { title: "UTC-09:00 (Alaska Standard Time)", value: "-0900" }, + { title: "UTC-08:00 (Pacific Standard Time)", value: "-0800" }, + { title: "UTC-07:00 (Mountain Standard Time)", value: "-0700" }, + { title: "UTC-06:00 (Central Standard Time)", value: "-0600" }, + { title: "UTC-05:00 (Eastern Standard Time)", value: "-0500" }, + { title: "UTC-04:30 (Venezuelan Standard Time)", value: "-0430" }, + { title: "UTC-04:00 (Atlantic Standard Time)", value: "-0400" }, + { title: "UTC-03:30 (Newfoundland Standard Time)", value: "-0330" }, + { title: "UTC-03:00 (Argentina Standard Time)", value: "-0300" }, + { title: "UTC-02:00 (Brazil Time)", value: "-0200" }, + { title: "UTC-01:00 (Azores Standard Time)", value: "-0100" }, + { title: "UTC+00:00 (Greenwich Mean Time)", value: "+0000" }, + { title: "UTC+01:00 (Central European Time)", value: "+0100" }, + { title: "UTC+02:00 (Eastern European Time)", value: "+0200" }, + { title: "UTC+03:00 (Moscow Standard Time)", value: "+0300" }, + { title: "UTC+03:30 (Iran Standard Time)", value: "+0330" }, + { title: "UTC+04:00 (Azerbaijan Standard Time)", value: "+0400" }, + { title: "UTC+04:30 (Afghanistan Time)", value: "+0430" }, + { title: "UTC+05:00 (Pakistan Standard Time)", value: "+0500" }, + { title: "UTC+05:30 (Indian Standard Time)", value: "+0530" }, + { title: "UTC+05:45 (Nepal Time)", value: "+0545" }, + { title: "UTC+06:00 (Bangladesh Standard Time)", value: "+0600" }, + { title: "UTC+06:30 (Cocos Islands Time)", value: "+0630" }, + { title: "UTC+07:00 (Indochina Time)", value: "+0700" }, + { title: "UTC+08:00 (China Standard Time)", value: "+0800" }, + { + title: "UTC+08:45 (Australia Central Western Standard Time)", + value: "+0845", + }, + { title: "UTC+09:00 (Japan Standard Time)", value: "+0900" }, + { title: "UTC+09:30 (Australian Central Standard Time)", value: "+0930" }, + { title: "UTC+10:00 (Australian Eastern Standard Time)", value: "+1000" }, + { title: "UTC+10:30 (Lord Howe Standard Time)", value: "+1030" }, + { title: "UTC+11:00 (Solomon Islands Time)", value: "+1100" }, + { title: "UTC+11:30 (Norfolk Island Time)", value: "+1130" }, + { title: "UTC+12:00 (Fiji Time)", value: "+1200" }, + { title: "UTC+12:45 (Chatham Islands Time)", value: "+1245" }, + { title: "UTC+13:00 (Tonga Time)", value: "+1300" }, + { title: "UTC+14:00 (Line Islands Time)", value: "+1400" }, +] as const + +export type FixedOffsetTimezone = + (typeof FIXED_OFFSET_TIMEZONES)[number]["value"] + +const WINTER = new Date("2025-01-01T00:00:00Z") +const SUMMER = new Date("2025-07-01T00:00:00Z") +const intlWithSupportedValues = Intl as typeof Intl & { + supportedValuesOf?: (key: string) => string[] +} + +function getLocaleOffset(date: Date, timeZone: string) { + const parts = new Intl.DateTimeFormat("en-US", { + timeZone, + timeZoneName: "shortOffset", + }).formatToParts(date) + + const offset = + parts.find((part) => part.type === "timeZoneName")?.value.replace("GMT", "") || + "" + if (offset === "+0" || offset === "-0" || offset === "") { + return "0" + } + return offset +} + +const supportedTimezones = + typeof intlWithSupportedValues.supportedValuesOf === "function" + ? intlWithSupportedValues.supportedValuesOf("timeZone") + : [Intl.DateTimeFormat().resolvedOptions().timeZone || "UTC"] + +export const DST_AWARE_TIMEZONES = supportedTimezones.map((timezone: string) => { + const winterOffset = getLocaleOffset(WINTER, timezone) + const summerOffset = getLocaleOffset(SUMMER, timezone) + return { + title: `${timezone} (Winter ${winterOffset} / Summer ${summerOffset})`, + value: timezone, + } +}) + +Object.freeze(DST_AWARE_TIMEZONES) + +export type DstAwareTimezone = (typeof DST_AWARE_TIMEZONES)[number]["value"] + +export const TIMESTAMP_FORMATS = [ + { + text: "Full ISO 8601 (YYYY-MM-DD hh:mm:ss.ssss+hh:mm)", + value: "ISO8601", + }, + { + text: "Timezone naive (YYYY-MM-DD hh:mm:ss)", + value: "naive", + }, + { text: "Custom Format", value: "custom" }, +] as const + +export type TimestampFormat = (typeof TIMESTAMP_FORMATS)[number]["value"] + +export type TimezoneMode = + | "utc" + | "daylightSavings" + | "fixedOffset" + | "embeddedOffset" + +export interface Timestamp { + key?: string + format: TimestampFormat + customFormat?: string + timezoneMode: TimezoneMode + timezone?: FixedOffsetTimezone | DstAwareTimezone +} diff --git a/frontend/pipeline-submit.ts b/frontend/pipeline-submit.ts new file mode 100644 index 0000000..bac4d27 --- /dev/null +++ b/frontend/pipeline-submit.ts @@ -0,0 +1,178 @@ +import type { FieldValidationState } from "./auth-submit" +import { + DST_AWARE_TIMEZONES, + FIXED_OFFSET_TIMEZONES, +} from "./models/timestamp" +import type { PipelineFormState } from "./composables/state" + +export type PipelineFieldName = + | "file_path" + | "header_row" + | "data_start_row" + | "timestamp_key" + | "custom_timestamp_format" + | "timezone" + +export type PipelineFieldStates = Record + +export function createPipelineFieldStates(): PipelineFieldStates { + return { + file_path: emptyFieldValidationState(), + header_row: emptyFieldValidationState(), + data_start_row: emptyFieldValidationState(), + timestamp_key: emptyFieldValidationState(), + custom_timestamp_format: emptyFieldValidationState(), + timezone: emptyFieldValidationState(), + } +} + +export function resetPipelineFieldStates( + fieldStates: PipelineFieldStates +): void { + for (const field of Object.keys(fieldStates) as PipelineFieldName[]) { + fieldStates[field] = emptyFieldValidationState() + } +} + +export function validatePipelineFieldsForSubmit(params: { + form: PipelineFormState + hasPreview: boolean + previewHeaders: string[] + markField: ( + field: PipelineFieldName, + nextState: FieldValidationState["state"], + message?: string | null + ) => void +}): boolean { + const { form, hasPreview, previewHeaders, markField } = params + let valid = true + + if (!form.filePath.trim()) { + markField("file_path", "invalid", "Choose a CSV file path.") + valid = false + } else if (!hasPreview) { + markField( + "file_path", + "invalid", + "Load a CSV preview before continuing to mapping." + ) + valid = false + } else { + markField("file_path", "valid") + } + + if (form.identifierType === "name") { + if (!Number.isInteger(form.headerRow) || form.headerRow <= 0) { + markField("header_row", "invalid", "Enter a header row number above 0.") + valid = false + } else if (form.headerRow >= form.dataStartRow) { + markField( + "header_row", + "invalid", + "Header row must be less than the data start row." + ) + valid = false + } else { + markField("header_row", "valid") + } + } else { + markField("header_row", "valid") + } + + if (!Number.isInteger(form.dataStartRow) || form.dataStartRow <= 0) { + markField("data_start_row", "invalid", "Enter a data start row above 0.") + valid = false + } else if ( + form.identifierType === "name" && + form.dataStartRow <= form.headerRow + ) { + markField( + "data_start_row", + "invalid", + "Data start row must be greater than the header row." + ) + valid = false + } else { + markField("data_start_row", "valid") + } + + if (form.identifierType === "index") { + const timestampIndex = Number(form.timestamp.key) + if (!Number.isInteger(timestampIndex) || timestampIndex <= 0) { + markField( + "timestamp_key", + "invalid", + "Enter a positive timestamp column number." + ) + valid = false + } else if (previewHeaders.length > 0 && timestampIndex > previewHeaders.length) { + markField( + "timestamp_key", + "invalid", + "Choose a timestamp column that exists in the preview." + ) + valid = false + } else { + markField("timestamp_key", "valid") + } + } else if (!form.timestamp.key.trim()) { + markField("timestamp_key", "invalid", "Choose a timestamp column.") + valid = false + } else if ( + previewHeaders.length > 0 && + !previewHeaders.includes(form.timestamp.key) + ) { + markField( + "timestamp_key", + "invalid", + "Choose a timestamp column that exists in the preview." + ) + valid = false + } else { + markField("timestamp_key", "valid") + } + + if (form.timestamp.format === "custom") { + if (!(form.timestamp.customFormat ?? "").trim()) { + markField( + "custom_timestamp_format", + "invalid", + "Enter the custom timestamp format." + ) + valid = false + } else { + markField("custom_timestamp_format", "valid") + } + } else { + markField("custom_timestamp_format", "valid") + } + + if ( + form.timestamp.timezoneMode === "fixedOffset" || + form.timestamp.timezoneMode === "daylightSavings" + ) { + const allowedTimezones = + form.timestamp.timezoneMode === "fixedOffset" + ? FIXED_OFFSET_TIMEZONES + : DST_AWARE_TIMEZONES + const timezone = form.timestamp.timezone ?? "" + + if (!timezone) { + markField("timezone", "invalid", "Choose a timezone value.") + valid = false + } else if (!allowedTimezones.some((option) => option.value === timezone)) { + markField("timezone", "invalid", "Choose a timezone from the list.") + valid = false + } else { + markField("timezone", "valid") + } + } else { + markField("timezone", "valid") + } + + return valid +} + +function emptyFieldValidationState(): FieldValidationState { + return { state: "idle", message: null } +} diff --git a/frontend/router.ts b/frontend/router.ts new file mode 100644 index 0000000..80f70e2 --- /dev/null +++ b/frontend/router.ts @@ -0,0 +1,53 @@ +export type AppRoute = "welcome" | "service" | "dashboard" | "jobs-new" | "jobs-new-mapping" + +const DEFAULT_ROUTE: AppRoute = "welcome" + +function currentHash(): string { + if (typeof window === "undefined") return "" + return window.location.hash +} + +export function getRouteFromHash(hash = currentHash()): AppRoute { + const normalized = hash.replace(/^#/, "").trim() + + switch (normalized) { + case "service": + return "service" + case "dashboard": + return "dashboard" + case "jobs/new/mapping": + return "jobs-new-mapping" + case "jobs/new": + return "jobs-new" + case "welcome": + case "": + return "welcome" + default: + return DEFAULT_ROUTE + } +} + +export function routeHref(route: AppRoute): string { + switch (route) { + case "service": + return "#service" + case "dashboard": + return "#dashboard" + case "jobs-new-mapping": + return "#jobs/new/mapping" + case "jobs-new": + return "#jobs/new" + case "welcome": + default: + return "#welcome" + } +} + +export function navigate(route: AppRoute): void { + if (typeof window === "undefined") return + + const nextHref = routeHref(route) + if (window.location.hash !== nextHref) { + window.location.hash = nextHref + } +} diff --git a/frontend/styles.css b/frontend/styles.css new file mode 100644 index 0000000..6ebe455 --- /dev/null +++ b/frontend/styles.css @@ -0,0 +1,2379 @@ +@import url("https://fonts.googleapis.com/css2?family=DM+Mono:wght@400;500&family=DM+Sans:wght@400;500;600;700&display=swap"); +@import "tailwindcss"; + +@theme { + --color-brand-50: #f0f9ff; + --color-brand-100: #e0f2fe; + --color-brand-400: #38bdf8; + --color-brand-500: #0ea5e9; + --color-brand-600: #0284c7; + --color-brand-700: #0369a1; + --color-success: #10b981; + --color-warning: #f59e0b; + --color-danger: #ef4444; + --color-surface: #ffffff; + --color-surface-secondary: #f8fafc; + --color-surface-border: #e2e8f0; + --font-sans: "DM Sans", system-ui, sans-serif; + --font-mono: "DM Mono", ui-monospace, monospace; + --shadow-card: 0 1px 3px 0 rgb(0 0 0 / 0.06), 0 1px 2px -1px rgb(0 0 0 / 0.06); + --shadow-card-hover: 0 10px 30px -14px rgb(14 116 144 / 0.16); + --animate-fade-in: fadeIn 180ms ease-out; +} + +@keyframes fadeIn { + 0% { + opacity: 0; + transform: translateY(6px); + } + + 100% { + opacity: 1; + transform: translateY(0); + } +} + +@keyframes loaderSpin { + 0% { + transform: rotate(0deg); + } + + 100% { + transform: rotate(360deg); + } +} + +@layer base { + html { + color-scheme: dark; + -webkit-font-smoothing: antialiased; + scrollbar-color: #5b6674 #2f3133; + scrollbar-gutter: stable; + } + + html.platform-windows { + scrollbar-gutter: auto; + } + + body { + min-height: 100vh; + margin: 0; + background: #edf1f5; + color: #1e293b; + font-family: var(--font-sans); + transition: background-color 140ms ease-out; + } + + button, + input, + select, + textarea { + font: inherit; + } + + a[href], + button:not(:disabled), + summary, + [role="button"] { + cursor: pointer; + } + + code { + font-family: var(--font-mono); + } + + #main-content { + min-height: 100vh; + background: #edf1f5; + } + + body.app-surface-welcome, + #main-content.main-content-welcome { + background: #2f3133; + } + + html:has(.pipeline-editor-shell-fullscreen), + body:has(.pipeline-editor-shell-fullscreen), + #main-content:has(.pipeline-editor-shell-fullscreen) { + overflow: hidden; + scrollbar-gutter: auto; + } + + * { + scrollbar-color: #5b6674 #2f3133; + } + + ::-webkit-scrollbar { + width: 12px; + height: 12px; + } + + ::-webkit-scrollbar-track { + background: #2f3133; + } + + ::-webkit-scrollbar-thumb { + background: #5b6674; + border: 3px solid #2f3133; + border-radius: 999px; + } + + ::-webkit-scrollbar-thumb:hover { + background: #708092; + } + + ::-webkit-scrollbar-corner { + background: #2f3133; + } +} + +@layer components { + .nav-item { + @apply flex h-10 w-10 items-center justify-center rounded-xl text-slate-400 transition-colors hover:bg-slate-100 hover:text-slate-600; + } + + .nav-item-active { + @apply bg-brand-50 text-brand-600; + } + + .btn-primary { + @apply inline-flex items-center justify-center gap-2 rounded-lg bg-brand-600 px-4 py-2 text-sm font-medium text-white transition-colors hover:bg-brand-700 disabled:cursor-not-allowed disabled:opacity-60 disabled:hover:bg-brand-600; + } + + .btn-ghost { + @apply inline-flex items-center justify-center gap-2 rounded-lg px-4 py-2 text-sm font-medium text-slate-600 transition-colors hover:bg-slate-100; + } + + .btn-danger { + @apply inline-flex items-center justify-center gap-2 rounded-lg px-4 py-2 text-sm font-medium text-red-600 transition-colors hover:bg-red-50; + } + + .account-menu { + @apply relative; + } + + .account-menu-button { + @apply inline-flex h-10 w-10 items-center justify-center rounded-full border border-slate-200 bg-white text-slate-600 transition-colors hover:border-slate-300 hover:bg-slate-50 hover:text-slate-900; + } + + .account-menu-button[aria-expanded="true"] { + @apply border-slate-300 bg-slate-100 text-slate-900; + } + + .account-menu-icon { + @apply h-5 w-5; + } + + .account-menu-panel { + @apply absolute right-0 top-[calc(100%+0.75rem)] z-50 w-[20rem] rounded-2xl border border-slate-200 bg-white p-4 shadow-2xl; + } + + .account-menu-copy { + @apply min-w-0; + } + + .account-menu-eyebrow { + @apply text-[0.6875rem] font-semibold uppercase tracking-[0.16em] text-slate-500; + } + + .account-menu-title { + @apply mt-2 text-base font-semibold leading-tight text-slate-900; + } + + .account-menu-meta { + @apply mt-1 truncate text-sm text-slate-500; + } + + .account-menu-details { + @apply mt-4 grid gap-2; + } + + .account-menu-detail { + @apply grid gap-0.5 rounded-xl bg-slate-50 px-3 py-2; + } + + .account-menu-detail dt { + @apply text-[0.6875rem] font-medium uppercase tracking-[0.12em] text-slate-500; + } + + .account-menu-detail dd { + @apply text-sm font-medium text-slate-800; + } + + .account-menu-form { + @apply mt-4 flex flex-col gap-3; + } + + .account-menu-field { + @apply gap-1.5; + } + + .account-menu-inline-action, + .account-menu-cancel { + @apply w-full; + } + + .account-menu-input { + @apply h-10; + } + + .account-menu-actions { + @apply mt-1 flex flex-col gap-2; + } + + .account-menu-save, + .account-menu-disconnect { + @apply w-full; + } + + .status-dot { + @apply inline-block h-2.5 w-2.5 rounded-full; + } + + .page-shell { + @apply mx-auto flex min-h-screen w-full max-w-6xl flex-col gap-8 px-8 py-10; + } + + .page-header { + @apply flex flex-col gap-5 md:flex-row md:items-end md:justify-between; + } + + .wizard-header { + @apply fixed inset-x-0 top-0 z-40 mx-auto w-full md:items-start; + } + + .wizard-header-bar { + @apply mx-auto flex w-full max-w-6xl flex-col gap-4 px-8 py-4 md:flex-row md:items-start md:justify-between; + } + + .wizard-title-block { + @apply min-w-0 flex-1 pr-4; + } + + .wizard-actions { + @apply pt-0 md:shrink-0 md:justify-end; + } + + .header-utility-buttons { + @apply flex items-center gap-3; + } + + .wizard-nav-button { + @apply whitespace-nowrap; + } + + .wizard-nav-glyph { + @apply text-[0.75rem] font-semibold tracking-[-0.02em]; + } + + .service-layout { + @apply flex max-w-[44rem] flex-col gap-10 pt-6; + } + + .service-card { + @apply border-0 bg-transparent p-0 shadow-none; + } + + .service-card-muted { + @apply bg-transparent; + } + + .service-title { + @apply mt-3 max-w-3xl; + } + + .service-subtitle { + @apply mt-3 text-xl font-semibold text-slate-100; + } + + .service-copy { + @apply mt-4 max-w-3xl text-sm leading-7 text-slate-300; + } + + .service-actions { + @apply mt-6 flex flex-wrap items-center justify-end gap-3; + } + + .service-primary-button { + @apply min-w-[16rem]; + } + + .service-button-spinner { + @apply inline-block h-4 w-4 rounded-full border-2 border-white/30 border-t-white; + animation: loaderSpin 800ms linear infinite; + } + + .service-shell.page-shell { + @apply max-w-6xl px-8 pt-28 md:pt-24; + } + + .service-shell .notice-info { + @apply bg-sky-950/40 text-sky-200; + } + + .service-shell .notice-success { + @apply bg-emerald-950/40 text-emerald-200; + } + + .service-shell .notice-error { + @apply bg-red-950/40 text-red-200; + } + + .wizard-step-label { + @apply text-[0.6875rem] font-medium uppercase tracking-[0.12em] text-slate-500; + } + + .wizard-page-title { + @apply mt-1 text-[1.25rem] font-semibold leading-tight tracking-[-0.01em] text-slate-900; + } + + .page-title { + @apply text-3xl font-semibold tracking-tight text-slate-900; + } + + .page-copy { + @apply mt-2 max-w-2xl text-sm leading-6 text-slate-600; + } + + .eyebrow { + @apply text-xs font-semibold uppercase tracking-[0.24em] text-brand-600; + } + + .settings-card, + .welcome-card, + .auth-card, + .summary-card, + .pipeline-form, + .preview-card, + .pipeline-subcard, + .job-card, + .empty-panel { + border-color: var(--color-surface-border); + box-shadow: var(--shadow-card); + @apply rounded-2xl border bg-white; + } + + .settings-card, + .auth-card { + @apply w-full max-w-3xl overflow-hidden; + } + + .auth-header { + @apply flex flex-col items-center; + } + + .auth-app-icon { + @apply mb-5 h-11 w-11 shrink-0; + filter: drop-shadow(0 8px 18px rgb(14 116 144 / 0.14)); + } + + .auth-header .page-title { + @apply text-[1.5rem] font-semibold leading-tight; + } + + .auth-intro { + @apply grid gap-4 rounded-xl border border-slate-200 bg-slate-50/80 p-4 md:grid-cols-2; + } + + .auth-intro-block { + @apply flex flex-col gap-2; + } + + .auth-intro-label { + @apply text-xs font-semibold uppercase tracking-[0.18em] text-slate-500; + } + + .summary-card { + @apply flex flex-col gap-5 p-6 md:flex-row md:items-center md:justify-between; + } + + .summary-card-copy { + @apply flex flex-col gap-2; + } + + .summary-inline { + @apply mt-1 flex flex-wrap items-center gap-3; + } + + .summary-meta { + @apply text-sm text-slate-500; + } + + .card-section { + @apply flex flex-col gap-4 border-b border-slate-200 px-6 py-6 last:border-b-0; + } + + .muted-section { + @apply bg-slate-50/60; + } + + .section-title { + @apply text-sm font-semibold text-slate-800; + } + + .section-copy { + @apply text-sm leading-6 text-slate-600; + } + + .field { + @apply flex flex-col gap-1.5; + } + + .field-label-row { + @apply flex items-center justify-between gap-4; + } + + .field-hint { + @apply text-xs leading-5 text-slate-500; + } + + .label { + @apply text-xs font-medium uppercase tracking-wide text-slate-500; + } + + .input { + @apply w-full rounded-lg border border-slate-200 bg-white px-3 py-2 text-sm text-slate-800 placeholder:text-slate-400 focus:border-brand-500 focus:outline-none focus:ring-2 focus:ring-sky-100; + } + + .field-error { + @apply text-sm text-red-600; + } + + .button-row { + @apply flex flex-wrap items-center gap-3 pt-2; + } + + .button-row-between { + @apply justify-between; + } + + .button-row-tight { + @apply gap-2 pt-0; + } + + .button-row-end { + @apply justify-end; + } + + .btn-link { + @apply inline-flex items-center gap-2 text-sm font-medium text-brand-700 hover:text-brand-600; + } + + .text-link { + @apply inline-flex text-sm font-medium text-brand-700 underline underline-offset-3 hover:text-brand-600; + } + + .label-link { + @apply text-sm text-slate-500 hover:text-slate-700; + } + + .field-picker { + @apply inline-flex items-center justify-center rounded-full border border-slate-300 px-3 py-1 text-[11px] font-semibold uppercase tracking-[0.16em] text-slate-500 transition-colors hover:border-slate-400 hover:text-slate-700; + } + + .field-picker-active { + @apply text-white; + } + + .field-picker-header.field-picker-active { + @apply border-sky-400 bg-sky-500/90; + } + + .field-picker-data.field-picker-active { + @apply border-emerald-400 bg-emerald-500/90; + } + + .field-picker-timestamp.field-picker-active { + @apply border-amber-400 bg-amber-500/90 text-slate-950; + } + + .auth-toggle-group { + @apply flex flex-col items-center gap-1 py-1; + } + + .auth-divider-label { + @apply text-xs text-slate-400; + } + + .auth-toggle { + @apply mx-auto w-fit cursor-pointer appearance-none border-0 bg-transparent p-0 text-sm text-brand-600 underline underline-offset-2 hover:text-brand-700 hover:decoration-brand-600; + } + + .onboarding-list { + @apply m-0 flex list-decimal flex-col gap-2 pl-5 text-sm leading-6 text-slate-600; + } + + .onboarding-list-compact { + @apply gap-1.5; + } + + .notice-success, + .notice-error, + .notice-info { + @apply rounded-lg px-4 py-3 text-sm; + } + + .notice-success { + @apply bg-emerald-50 text-emerald-700; + } + + .notice-error { + @apply bg-red-50 text-red-700; + } + + .notice-info { + @apply bg-brand-50 text-brand-700; + } + + .welcome-shell { + @apply flex min-h-screen w-full items-stretch bg-[#2f3133]; + } + + .loading-shell { + @apply flex min-h-screen w-full items-center justify-center bg-[#2f3133]; + } + + .loading-spinner { + @apply h-12 w-12 rounded-full border-4 border-slate-700 border-t-brand-400; + animation: loaderSpin 800ms linear infinite; + } + + .welcome-card { + @apply w-full max-w-2xl p-8 md:p-10; + } + + .welcome-shell .auth-card { + @apply flex w-full max-w-none flex-col justify-center rounded-none border-0 bg-transparent shadow-none; + } + + .welcome-shell .card-section { + @apply mx-auto w-full max-w-[42rem] gap-5 border-0 px-10 py-0; + } + + .welcome-shell .card-section + .card-section { + @apply mt-8; + } + + .welcome-shell .auth-app-icon { + @apply h-32 w-32; + filter: drop-shadow(0 10px 24px rgb(0 0 0 / 0.24)); + } + + .welcome-shell .page-title { + @apply text-slate-50; + } + + .welcome-shell .label { + @apply text-slate-200; + } + + .welcome-shell .label-link { + @apply text-slate-400 hover:text-slate-200; + } + + .welcome-shell .auth-divider-label { + @apply text-slate-500; + } + + .welcome-shell .auth-toggle { + @apply text-brand-400 hover:text-brand-500 hover:decoration-brand-400; + } + + .welcome-shell .input { + @apply border-slate-700 bg-[#1f2022] text-slate-100 placeholder:text-slate-500 focus:border-brand-500 focus:ring-sky-950; + } + + .welcome-shell .field-error { + @apply text-red-400; + } + + .welcome-shell .notice-error { + @apply bg-red-950/40 text-red-200; + } + + .welcome-shell .notice-info { + @apply bg-sky-950/40 text-sky-200; + } + + .welcome-shell .notice-success { + @apply bg-emerald-950/40 text-emerald-200; + } + + .welcome-shell .btn-ghost { + @apply text-slate-300 hover:bg-white/6 hover:text-slate-100; + } + + .onboarding-shell .page-title { + @apply text-slate-50; + } + + .onboarding-shell .wizard-step-label { + color: #9ea3aa; + } + + .onboarding-shell .wizard-page-title { + color: #f3f4f6; + } + + .onboarding-shell .account-menu-button { + border-color: rgb(255 255 255 / 0.1); + background: rgb(255 255 255 / 0.05); + color: #e5e7eb; + } + + .onboarding-shell .account-menu-button:hover, + .onboarding-shell .account-menu-button[aria-expanded="true"] { + border-color: rgb(255 255 255 / 0.16); + background: rgb(255 255 255 / 0.1); + color: #ffffff; + } + + .onboarding-shell .account-menu-panel { + border-color: rgb(255 255 255 / 0.08); + background: #242729; + box-shadow: 0 24px 60px rgb(0 0 0 / 0.34); + } + + .onboarding-shell .account-menu-eyebrow, + .onboarding-shell .account-menu-detail dt { + color: #9ea3aa; + } + + .onboarding-shell .account-menu-title, + .onboarding-shell .account-menu-detail dd { + color: #f3f4f6; + } + + .onboarding-shell .account-menu-meta { + color: #b0b7c0; + } + + .onboarding-shell .account-menu-detail { + background: rgb(255 255 255 / 0.045); + } + + .onboarding-shell .account-menu-panel .label { + @apply text-slate-300; + } + + .onboarding-shell .account-menu-input { + @apply border-slate-700 bg-[#1f2022] text-slate-100 placeholder:text-slate-500 focus:border-brand-500 focus:ring-sky-950; + } + + .onboarding-shell .account-menu-panel .notice-error { + @apply bg-red-950/40 text-red-200; + } + + .onboarding-shell .account-menu-panel .notice-success { + @apply bg-emerald-950/40 text-emerald-200; + } + + .onboarding-shell .wizard-header { + background: rgb(47 49 51 / 0.98); + border-bottom: 1px solid rgb(255 255 255 / 0.06); + backdrop-filter: blur(14px); + } + + .onboarding-shell.page-shell { + @apply pt-36 md:pt-32; + } + + .pipeline-editor-shell.page-shell { + @apply max-w-7xl px-8 pt-28 md:px-8 md:pt-24; + } + + .pipeline-editor-shell .wizard-header-bar { + @apply max-w-7xl px-8 py-3; + } + + .pipeline-editor-shell-fullscreen.page-shell { + @apply max-w-none gap-0 overflow-hidden px-0 pb-0; + height: 100vh; + padding-top: calc(4.75rem - 2px); + } + + .pipeline-editor-shell-fullscreen .wizard-header-bar { + @apply max-w-none px-8; + } + + .onboarding-shell .page-copy { + @apply text-slate-300; + } + + .onboarding-shell .eyebrow { + @apply text-brand-400; + } + + .onboarding-shell .summary-card, + .onboarding-shell .pipeline-form, + .onboarding-shell .preview-card, + .onboarding-shell .pipeline-subcard, + .onboarding-shell .empty-panel { + @apply border-0 bg-[#181a1d] text-slate-100 shadow-none; + } + + .onboarding-shell .preview-card { + @apply rounded-none border-0 bg-transparent p-0; + } + + .onboarding-shell .pipeline-subcard { + @apply bg-[#202427]; + } + + .onboarding-shell .mapping-subcard { + @apply bg-transparent p-0; + } + + .onboarding-shell .mapping-pane-label { + @apply text-slate-500; + } + + .onboarding-shell .mapping-pane-copy { + @apply text-slate-400; + } + + .onboarding-shell .mapping-pane-chip { + @apply bg-sky-950/50 text-sky-200; + } + + .onboarding-shell .mapping-column-button { + @apply border-slate-800 bg-[#141719] hover:border-slate-700 hover:bg-[#181c1f]; + } + + .onboarding-shell .mapping-column-button-active { + @apply border-emerald-500 bg-emerald-950/30; + } + + .onboarding-shell .mapping-column-button-mapped { + @apply border-sky-700 bg-sky-950/25; + } + + .onboarding-shell .mapping-column-dot { + @apply bg-slate-600; + } + + .onboarding-shell .mapping-column-name, + .onboarding-shell .mapping-thing-header, + .onboarding-shell .mapping-datastream-title { + @apply text-slate-100; + } + + .onboarding-shell .mapping-datastream-card { + @apply border-slate-800 bg-[#141719] hover:border-slate-700 hover:bg-[#181c1f]; + } + + .onboarding-shell .mapping-datastream-card-active { + @apply border-emerald-500 bg-emerald-950/30; + } + + .onboarding-shell .mapping-datastream-card-occupied { + @apply border-sky-700 bg-sky-950/25; + } + + .onboarding-shell .mapping-datastream-meta { + @apply text-slate-400; + } + + .onboarding-shell .mapping-linked-badge { + @apply bg-emerald-900/50 text-emerald-200; + } + + .onboarding-shell .mapping-occupied-label { + @apply bg-sky-900/50 text-sky-200; + } + + .onboarding-shell .mapping-connector-shell { + border-color: rgb(255 255 255 / 0.12); + background: #303133; + } + + .onboarding-shell .mapping-connector-panel + .mapping-connector-panel, + .onboarding-shell .mapping-connector-header, + .onboarding-shell .mapping-connector-footer { + border-color: rgb(255 255 255 / 0.1); + } + + .onboarding-shell .mapping-connector-title, + .onboarding-shell .mapping-connector-header-meta, + .onboarding-shell .mapping-connector-footer, + .onboarding-shell .mapping-connector-section, + .onboarding-shell .mapping-datastream-item-detail { + color: #aba69e; + } + + .onboarding-shell .mapping-datastream-sticky { + background: #303133; + box-shadow: 0 1px 0 #303133; + } + + .onboarding-shell .mapping-connector-header-count { + color: #e8e3db; + } + + .onboarding-shell .mapping-connector-footer b { + color: #d5d1cb; + } + + .onboarding-shell .mapping-column-item { + border-color: rgb(255 255 255 / 0.08); + background: rgb(255 255 255 / 0.045); + box-shadow: inset 0 1px 0 rgb(255 255 255 / 0.02); + } + + .onboarding-shell .mapping-datastream-item { + border-color: rgb(255 255 255 / 0.05); + background: rgb(255 255 255 / 0.025); + } + + .onboarding-shell .mapping-column-item:hover, + .onboarding-shell .mapping-datastream-item:hover { + border-color: rgb(255 255 255 / 0.12); + background: #34363a; + } + + .onboarding-shell .mapping-column-item-selected { + border-color: rgb(255 255 255 / 0.12); + background: #35373a; + } + + .onboarding-shell .mapping-column-item-selected .mapping-item-dot { + background: #9fb6d8; + } + + .onboarding-shell .mapping-item-dot { + background: rgb(255 255 255 / 0.14); + } + + .onboarding-shell .mapping-item-badge-filled { + color: #ffffff; + } + + .onboarding-shell .mapping-column-item-name, + .onboarding-shell .mapping-datastream-item-name { + @apply text-slate-100; + } + + .onboarding-shell .mapping-column-item-target { + color: #918b83; + } + + .onboarding-shell .mapping-connector-item-mapped { + background-color: var(--mapping-surface); + border-color: var(--mapping-border); + color: var(--mapping-text); + box-shadow: none; + } + + .onboarding-shell .mapping-connector-item-mapped:hover { + background-color: var(--mapping-surface); + border-color: var(--mapping-border); + } + + .onboarding-shell .mapping-connector-item-mapped .mapping-column-item-name, + .onboarding-shell + .mapping-connector-item-mapped + .mapping-datastream-item-name, + .onboarding-shell + .mapping-connector-item-mapped + .mapping-datastream-item-detail { + color: var(--mapping-text); + } + + .onboarding-shell .mapping-datastream-meta-button { + border-color: rgb(255 255 255 / 0.12); + background: rgb(255 255 255 / 0.04); + color: #d5d1cb; + } + + .onboarding-shell .mapping-datastream-meta-button:hover { + border-color: rgb(255 255 255 / 0.18); + background: rgb(255 255 255 / 0.08); + } + + .onboarding-shell .mapping-datastream-modal { + border-color: rgb(255 255 255 / 0.12); + background: #232628; + color: #e8e3db; + } + + .onboarding-shell .mapping-datastream-modal-header, + .onboarding-shell .mapping-datastream-modal-footer { + border-color: rgb(255 255 255 / 0.1); + } + + .onboarding-shell .mapping-datastream-modal-kicker, + .onboarding-shell .mapping-datastream-metadata-item dt { + color: #aba69e; + } + + .onboarding-shell .mapping-datastream-metadata-section-title, + .onboarding-shell .mapping-datastream-metadata-item dd { + color: #f3efe9; + } + + .onboarding-shell .mapping-datastream-modal-close { + border-color: rgb(255 255 255 / 0.12); + background: rgb(255 255 255 / 0.04); + color: #d5d1cb; + } + + .onboarding-shell .mapping-datastream-modal-close:hover { + border-color: rgb(255 255 255 / 0.18); + background: rgb(255 255 255 / 0.08); + color: #f3efe9; + } + + .onboarding-shell .mapping-datastream-metadata-section { + border-color: rgb(255 255 255 / 0.08); + background: rgb(255 255 255 / 0.04); + } + + .onboarding-shell .mapping-datastream-metadata-section-title { + border-color: rgb(255 255 255 / 0.08); + } + + .onboarding-shell .mapping-datastream-metadata-list { + background: rgb(255 255 255 / 0.08); + } + + .onboarding-shell .mapping-datastream-metadata-item { + background: #232628; + } + + .onboarding-shell .mapping-datastream-modal-state { + color: #aba69e; + } + + .onboarding-shell .mapping-datastream-modal-state-error { + color: #f0a2a2; + } + + .onboarding-shell .mapping-connector-item-mapped .mapping-column-item-target { + color: var(--mapping-badge); + } + + .onboarding-shell + .mapping-datastream-item-disabled:not(.mapping-connector-item-mapped) { + opacity: 0.52; + } + + .onboarding-shell .mapping-connector-divider { + background: rgb(255 255 255 / 0.08); + } + + .onboarding-shell .mapping-thing-group { + color: #85817a; + } + + .onboarding-shell .mapping-row { + @apply border-0 bg-[#16191b]; + } + + .onboarding-shell .mapping-summary-card { + @apply border-0 bg-[#111315]; + } + + .onboarding-shell .mapping-summary-card-empty { + @apply bg-transparent; + } + + .onboarding-shell .section-title, + .onboarding-shell .mapping-source, + .onboarding-shell .mapping-summary-title { + @apply text-slate-100; + } + + .onboarding-shell .section-copy, + .onboarding-shell .summary-meta, + .onboarding-shell .job-meta, + .onboarding-shell .mapping-help, + .onboarding-shell .field-hint, + .onboarding-shell .label { + @apply text-slate-400; + } + + .onboarding-shell .input { + @apply border-transparent bg-[#111315] text-slate-100 placeholder:text-slate-500 focus:border-brand-500 focus:ring-sky-950; + } + + .onboarding-shell .btn-ghost { + @apply text-slate-300 hover:bg-white/6 hover:text-slate-100; + } + + .onboarding-shell .btn-danger { + @apply text-red-300 hover:bg-red-950/40; + } + + .onboarding-shell .pill-success { + @apply bg-emerald-950/50 text-emerald-200; + } + + .onboarding-shell .pill-warning { + @apply bg-amber-950/50 text-amber-200; + } + + .onboarding-shell .pill-danger { + @apply bg-red-950/50 text-red-200; + } + + .onboarding-shell .pill-info { + @apply bg-sky-950/50 text-sky-200; + } + + .onboarding-shell .pill-muted { + @apply bg-slate-800 text-slate-300; + } + + .onboarding-shell .notice-error { + @apply bg-red-950/40 text-red-200; + } + + .onboarding-shell .notice-info { + @apply bg-sky-950/40 text-sky-200; + } + + .onboarding-shell .notice-success { + @apply bg-emerald-950/40 text-emerald-200; + } + + .onboarding-shell .validation-panel { + @apply border-red-900/60 bg-red-950/35; + } + + .onboarding-shell .validation-list { + @apply text-red-200; + } + + .onboarding-shell .field-picker { + @apply border-slate-700 text-slate-300 hover:border-slate-500 hover:text-slate-100; + } + + .onboarding-shell .pipeline-source-card, + .onboarding-shell .preview-shell, + .onboarding-shell .transformer-section, + .onboarding-shell .preview-table-shell { + border-color: rgb(255 255 255 / 0.1); + } + + .onboarding-shell .pipeline-source-card, + .onboarding-shell .preview-shell { + background: #303133; + } + + .onboarding-shell .transformer-section-header, + .onboarding-shell .preview-panel-header, + .onboarding-shell .preview-panel-footer, + .onboarding-shell .preview-panel + .preview-panel { + border-color: rgb(255 255 255 / 0.1); + } + + .onboarding-shell .preview-panel-label, + .onboarding-shell .transformer-section-kicker, + .onboarding-shell .preview-panel-meta { + color: #aba69e; + } + + .onboarding-shell .preview-panel-title, + .onboarding-shell .transformer-section .section-title { + color: #f3f4f6; + } + + .onboarding-shell .preview-guidance, + .onboarding-shell .transformer-section .field-hint, + .onboarding-shell .transformer-section .preview-toggle, + .onboarding-shell .preview-panel-footer { + color: #9ea3aa; + } + + .onboarding-shell .pipeline-source-field .label, + .onboarding-shell .transformer-section .label { + color: #aba69e; + font-size: 0.6875rem; + font-weight: 500; + letter-spacing: 0.02em; + text-transform: none; + } + + .onboarding-shell .transformer-section { + background: rgb(255 255 255 / 0.035); + } + + .onboarding-shell .transformer-section .input { + min-height: 2.25rem; + border-color: rgb(255 255 255 / 0.08); + background: #202225; + } + + .onboarding-shell .transformer-section .input:disabled { + opacity: 0.55; + } + + .onboarding-file-form { + @apply flex w-full max-w-3xl flex-col gap-0; + } + + .onboarding-file-form .button-row { + @apply pt-1; + } + + .pipeline-editor-workspace { + @apply flex flex-1 flex-col gap-4; + } + + .pipeline-editor-shell-fullscreen .pipeline-editor-workspace { + @apply min-h-0 gap-0 overflow-hidden; + margin-top: -3px; + } + + .pipeline-mapping-workspace { + @apply flex flex-1 min-h-0 flex-col; + } + + .pipeline-editor-shell-fullscreen .pipeline-mapping-workspace { + @apply overflow-hidden p-0; + margin-top: 0; + } + + .pipeline-editor-shell-fullscreen .pipeline-mapping-workspace > .empty-panel { + @apply min-h-0 flex-1; + } + + .pipeline-editor-workspace-empty { + @apply items-center justify-center; + } + + .pipeline-source-card { + @apply w-full; + } + + .onboarding-shell .pipeline-source-card { + @apply border; + } + + .pipeline-source-field { + @apply gap-2; + } + + .pipeline-source-control { + @apply flex flex-col gap-3 sm:flex-row sm:items-center; + } + + .pipeline-source-input { + @apply h-10 min-w-0 flex-1; + } + + .pipeline-source-browse { + @apply w-full shrink-0 sm:w-auto; + } + + .pipeline-layout { + @apply grid gap-6 xl:grid-cols-[minmax(0,28rem)_minmax(0,1fr)]; + } + + .pipeline-form { + @apply flex flex-col gap-4 p-5; + } + + .pipeline-subcard { + @apply rounded-xl border border-slate-200 bg-slate-50/40 p-4; + } + + .transformer-settings { + @apply grid gap-3; + } + + .transformer-section { + @apply flex h-full flex-col overflow-hidden rounded-lg border; + } + + .transformer-section-header { + @apply flex flex-col gap-1 border-b px-4 py-3; + } + + .transformer-section-kicker { + @apply text-[0.6875rem] font-medium uppercase tracking-[0.08em]; + } + + .transformer-section-body { + @apply flex flex-col gap-3 px-4 py-4; + } + + .transformer-toggle { + @apply mt-0; + } + + .split-fields { + @apply grid gap-3; + } + + .transformer-field-stack { + @apply flex flex-col gap-3; + } + + .mapping-grid { + @apply mt-4 flex flex-col gap-3; + } + + .mapping-row { + @apply grid items-center gap-3 rounded-xl border border-slate-200 bg-white p-3 md:grid-cols-[minmax(0,12rem)_minmax(0,1fr)]; + } + + .mapping-row-rich { + @apply md:grid-cols-[minmax(0,12rem)_minmax(0,22rem)_minmax(0,1fr)]; + } + + .mapping-source { + @apply text-sm font-medium text-slate-800; + } + + .mapping-source-stack { + @apply flex flex-col gap-1; + } + + .mapping-controls { + @apply grid gap-3; + } + + .mapping-summary-card { + @apply rounded-xl border border-slate-200 bg-slate-50/70 p-3; + } + + .mapping-summary-card-empty { + @apply flex h-full items-center bg-transparent; + } + + .mapping-summary-title { + @apply text-sm font-semibold text-slate-800; + } + + .mapping-meta-row { + @apply my-2 flex flex-wrap gap-2; + } + + .mapping-help { + @apply mt-1 text-xs text-slate-500; + } + + .mapping-board { + @apply grid gap-8 xl:grid-cols-[minmax(0,22rem)_minmax(0,1fr)]; + } + + .mapping-column-pane, + .mapping-datastream-pane { + @apply flex min-h-0 flex-col gap-4; + } + + .mapping-pane-header { + @apply flex items-start justify-between gap-4; + } + + .mapping-pane-label { + @apply text-xs font-semibold uppercase tracking-[0.18em] text-slate-500; + } + + .mapping-pane-copy { + @apply mt-1 text-sm text-slate-500; + } + + .mapping-pane-action { + @apply shrink-0 px-3 py-1.5; + } + + .mapping-pane-chip { + @apply inline-flex items-center rounded-full bg-brand-50 px-3 py-1 text-xs font-medium text-brand-700; + } + + .mapping-column-list, + .mapping-datastream-viewport { + height: calc(100vh - 17rem); + min-height: 28rem; + } + + .mapping-column-list { + @apply flex flex-col gap-3 overflow-auto pr-2; + } + + .mapping-column-button { + @apply flex items-center gap-3 rounded-xl border border-slate-200 bg-white px-4 py-3 text-left transition-colors hover:border-slate-300 hover:bg-slate-50; + } + + .mapping-column-button-active { + @apply border-emerald-400 bg-emerald-50/80; + } + + .mapping-column-button-mapped { + @apply border-sky-300 bg-sky-50/70; + } + + .mapping-column-dot { + @apply h-3 w-3 rounded-full bg-slate-400; + } + + .mapping-column-button-active .mapping-column-dot { + @apply bg-emerald-500; + } + + .mapping-column-button-mapped .mapping-column-dot { + @apply bg-sky-500; + } + + .mapping-column-name { + @apply text-base font-medium text-slate-900; + } + + .mapping-datastream-viewport { + @apply overflow-auto pr-2; + } + + .mapping-virtual-stage { + @apply relative; + } + + .mapping-virtual-item { + @apply absolute inset-x-0 top-0; + } + + .mapping-thing-header { + @apply px-1 pt-2 text-sm font-medium uppercase tracking-[0.14em] text-slate-500; + } + + .mapping-datastream-card { + @apply mt-2 flex w-full items-center justify-between gap-3 rounded-xl border border-slate-200 bg-white px-4 py-3 text-left transition-colors hover:border-slate-300 hover:bg-slate-50; + } + + .mapping-datastream-card-active { + @apply border-emerald-400 bg-emerald-50/80; + } + + .mapping-datastream-card-occupied { + @apply border-sky-300 bg-sky-50/70; + } + + .mapping-datastream-copy { + @apply min-w-0 flex-1; + } + + .mapping-datastream-title { + @apply truncate text-base font-medium text-slate-900; + } + + .mapping-datastream-meta { + @apply mt-0.5 truncate text-xs text-slate-500; + } + + .mapping-datastream-status { + @apply shrink-0; + } + + .mapping-linked-badge { + @apply inline-flex items-center rounded-full bg-emerald-100 px-2.5 py-0.5 text-xs font-medium text-emerald-700; + } + + .mapping-occupied-label { + @apply inline-flex max-w-40 truncate rounded-full bg-sky-100 px-2.5 py-0.5 text-xs font-medium text-sky-700; + } + + .mapping-connector-shell { + @apply grid overflow-hidden rounded-xl border border-slate-200 bg-transparent lg:grid-cols-2; + height: calc(100vh - 13.5rem); + min-height: 34rem; + } + + .pipeline-editor-shell-fullscreen .mapping-connector-shell { + @apply lg:grid-cols-[23rem_minmax(0,1fr)]; + height: 100%; + min-height: 0; + border: 0; + border-radius: 0; + } + + .mapping-connector-panel { + @apply grid min-h-0 grid-rows-[auto_minmax(0,1fr)] bg-transparent; + } + + .mapping-connector-panel + .mapping-connector-panel { + @apply border-t border-slate-200 lg:border-t-0 lg:border-l; + } + + .mapping-connector-header { + @apply border-b border-slate-200 px-3.5 py-2.5; + } + + .mapping-connector-header-row { + @apply flex items-center justify-between gap-4; + } + + .mapping-filter-grid { + @apply mt-2 grid gap-2 sm:grid-cols-2; + } + + .mapping-filter-grid-single { + @apply sm:grid-cols-1; + } + + .mapping-filter-field { + @apply flex min-w-0 flex-col gap-1; + } + + .mapping-filter-label { + @apply text-[0.6875rem] font-normal text-slate-500; + } + + .mapping-filter-input { + @apply h-8 rounded-md px-2.5 py-1.5 text-[0.75rem]; + } + + .mapping-connector-title { + @apply text-[0.6875rem] font-medium uppercase tracking-[0.06em] text-slate-500; + } + + .mapping-connector-header-meta { + @apply flex items-center gap-1 text-[0.6875rem] font-medium text-slate-500; + } + + .mapping-connector-header-count { + @apply font-semibold text-slate-800; + } + + .mapping-connector-body { + @apply min-h-0 overflow-hidden px-2 py-2; + } + + .mapping-column-scroll { + @apply flex h-full flex-col gap-1.5 overflow-auto pr-1; + } + + .mapping-datastream-scroll { + @apply overflow-auto; + } + + .mapping-datastream-sticky { + @apply sticky z-10 -mx-2 -mt-2 px-2 pt-2 pb-2; + top: -0.5rem; + } + + .mapping-filter-empty { + @apply flex min-h-full items-center justify-center px-3 py-6 text-[0.75rem] text-slate-500; + } + + .mapping-connector-footer { + @apply flex min-h-[38px] items-center border-t border-slate-200 px-3.5 py-2.5 text-[0.75rem] text-slate-500; + } + + .mapping-connector-footer b { + @apply font-medium text-slate-700; + } + + .mapping-column-item, + .mapping-datastream-item { + @apply flex w-full items-center gap-2.5 rounded-lg border px-2.5 py-2 text-left transition-colors; + } + + .mapping-column-item:hover, + .mapping-datastream-item:hover { + @apply border-slate-200 bg-slate-50; + } + + .mapping-column-item-selected { + @apply border-slate-300 bg-slate-100/70; + } + + .mapping-column-item-selected .mapping-item-dot { + background: rgb(99 102 241 / 0.72); + } + + .mapping-column-item-copy { + @apply flex min-w-0 flex-1 items-center justify-between gap-3; + } + + .mapping-column-item-name, + .mapping-datastream-item-name { + @apply truncate text-[0.875rem] font-medium leading-tight text-slate-900; + } + + .mapping-column-item-target { + @apply inline-flex shrink-0 items-center gap-1 text-[0.75rem] font-normal text-slate-500; + } + + .mapping-item-dot { + @apply h-2 w-2 shrink-0 rounded-full; + background: rgb(15 23 42 / 0.18); + } + + .mapping-item-badge { + @apply inline-flex h-5.5 w-5.5 shrink-0 items-center justify-center rounded-full border bg-transparent text-[0.6875rem] font-extrabold; + font-variant-numeric: tabular-nums; + } + + .mapping-item-badge-filled { + background-color: var(--mapping-badge); + border-color: var(--mapping-badge); + color: #ffffff; + font-weight: 800; + text-shadow: 0 1px 0 rgb(0 0 0 / 0.12); + } + + .mapping-connector-item-mapped { + background-color: var(--mapping-surface); + border-color: var(--mapping-border); + color: var(--mapping-text); + } + + .mapping-connector-item-mapped:hover { + background-color: var(--mapping-surface); + border-color: var(--mapping-border); + } + + .mapping-connector-item-mapped .mapping-column-item-name, + .mapping-connector-item-mapped .mapping-column-item-target, + .mapping-connector-item-mapped .mapping-datastream-item-name, + .mapping-connector-item-mapped .mapping-datastream-item-detail { + color: var(--mapping-text); + } + + .mapping-datastream-item-shell { + @apply flex items-stretch gap-2; + } + + .mapping-datastream-item-disabled { + @apply cursor-not-allowed opacity-45; + } + + .mapping-datastream-item-current { + @apply shadow-sm; + } + + .mapping-datastream-item-copy { + @apply flex min-w-0 flex-1 flex-col gap-0.5; + } + + .mapping-datastream-item-detail { + @apply truncate text-[0.75rem] font-normal text-slate-500; + } + + .mapping-datastream-meta-button { + @apply inline-flex shrink-0 items-center justify-center whitespace-nowrap rounded-md border border-slate-200 bg-white px-2.5 text-[0.625rem] font-semibold uppercase tracking-[0.08em] text-slate-500 transition-colors hover:border-slate-300 hover:bg-slate-50 hover:text-slate-700; + } + + .mapping-datastream-modal-backdrop { + @apply fixed inset-0 z-50 flex items-center justify-center bg-slate-950/45 p-4; + backdrop-filter: blur(2px); + } + + .mapping-datastream-modal { + @apply flex w-full max-w-3xl flex-col overflow-hidden rounded-2xl border border-slate-200 bg-white shadow-2xl; + max-height: min(42rem, calc(100vh - 2rem)); + } + + .mapping-datastream-modal-header { + @apply flex items-start justify-between gap-4 border-b border-slate-200 px-5 py-4; + } + + .mapping-datastream-modal-copy { + @apply min-w-0; + } + + .mapping-datastream-modal-kicker { + @apply text-[0.6875rem] font-medium uppercase tracking-[0.08em] text-slate-500; + } + + .mapping-datastream-modal-close { + @apply inline-flex h-9 w-9 shrink-0 items-center justify-center rounded-full border border-slate-200 bg-white text-xl leading-none text-slate-500 transition-colors hover:border-slate-300 hover:bg-slate-50 hover:text-slate-700; + } + + .mapping-datastream-modal-body { + @apply min-h-0 overflow-auto px-5 py-4; + } + + .mapping-datastream-modal-state { + @apply flex min-h-[14rem] flex-col items-center justify-center gap-3 text-center text-sm text-slate-500; + } + + .mapping-datastream-modal-state-error { + @apply text-rose-600; + } + + .mapping-datastream-metadata-sections { + @apply grid gap-3; + } + + .mapping-datastream-metadata-section { + @apply rounded-xl border border-slate-200 bg-slate-50/70; + } + + .mapping-datastream-metadata-section-title { + @apply border-b border-slate-200 px-4 py-3 text-sm font-semibold text-slate-900; + } + + .mapping-datastream-metadata-list { + @apply grid gap-px bg-slate-200; + } + + .mapping-datastream-metadata-item { + @apply grid gap-1 bg-white px-4 py-3 md:grid-cols-[12rem_minmax(0,1fr)] md:items-start md:gap-4; + } + + .mapping-datastream-metadata-item dt { + @apply text-[0.6875rem] font-medium uppercase tracking-[0.08em] text-slate-500; + } + + .mapping-datastream-metadata-item dd { + @apply text-sm font-medium break-words text-slate-800; + } + + .mapping-datastream-modal-footer { + @apply flex justify-end border-t border-slate-200 px-5 py-4; + } + + .mapping-connector-section { + @apply px-2.5 py-1 text-[0.6875rem] font-medium text-slate-500; + } + + .mapping-connector-divider { + @apply mx-2.5 mt-1.5 h-px bg-slate-200; + } + + .mapping-thing-group { + @apply px-2.5 pt-4 pb-1 text-[0.75rem] font-normal text-slate-500; + } + + .preview-card { + @apply w-full; + } + + .preview-workbench { + @apply flex min-h-0 flex-col; + } + + .pipeline-editor-shell-fullscreen .preview-workbench { + @apply h-full overflow-hidden; + } + + .preview-shell { + @apply grid overflow-hidden rounded-xl border lg:grid-cols-[minmax(0,28rem)_minmax(0,1fr)]; + height: calc(100vh - 16rem); + } + + .pipeline-editor-shell-fullscreen .preview-shell { + @apply rounded-none border-x-0 border-t-0 border-b-0; + height: calc(100vh - (4.75rem - 2px)); + } + + .preview-panel { + @apply grid min-h-0 bg-transparent; + } + + .preview-panel-settings { + @apply grid-rows-[minmax(0,1fr)]; + } + + .preview-panel-settings .preview-panel-body { + @apply px-5 py-5; + } + + .pipeline-editor-shell-fullscreen .preview-panel-settings .preview-panel-body { + @apply overflow-hidden; + } + + .preview-panel-data { + @apply grid-rows-[minmax(0,1fr)_auto]; + } + + .preview-panel + .preview-panel { + @apply border-t lg:border-t-0 lg:border-l; + } + + .preview-panel-header { + @apply border-b px-4 py-3; + } + + .preview-panel-header-row { + @apply flex items-start justify-between gap-4; + } + + .preview-panel-label { + @apply text-[0.6875rem] font-medium uppercase tracking-[0.08em]; + } + + .preview-panel-title { + @apply mt-1 text-[0.9375rem] font-semibold leading-tight; + } + + .preview-panel-meta { + @apply text-[0.75rem] font-medium; + } + + .preview-panel-body { + @apply min-h-0 overflow-auto px-4 py-4; + } + + .preview-panel-body-table { + @apply min-h-0 overflow-hidden p-0; + } + + .onboarding-shell .preview-panel-settings .pipeline-subcard, + .onboarding-shell .preview-panel-settings .transformer-section { + @apply border-0 bg-transparent p-0 shadow-none; + } + + .preview-panel-footer { + @apply flex items-center justify-between gap-3 border-t px-4 py-3 text-[0.75rem]; + } + + .preview-header { + @apply flex flex-col gap-4 border-b border-slate-100 pb-4; + } + + .preview-guidance { + @apply mt-2 text-[0.75rem] leading-5 text-slate-500; + } + + .preview-summary { + @apply flex flex-wrap gap-2; + } + + .preview-toggle { + @apply inline-flex items-center gap-3 text-[0.8125rem] text-slate-600; + } + + .preview-toggle-input { + @apply h-4 w-4 accent-sky-600; + } + + .preview-toggle-label { + @apply font-medium; + } + + .preview-column-header { + @apply flex min-w-[9rem] flex-col items-start gap-2; + } + + .preview-tag { + @apply relative inline-flex items-center rounded-md border px-2.5 py-1 text-[10px] font-semibold uppercase tracking-[0.16em] transition-colors; + background-color: var(--preview-tag-bg); + border-color: var(--preview-tag-border); + color: var(--preview-tag-text); + } + + .preview-tag-row { + @apply mr-3 cursor-grab rounded-r-none border-r-0; + transform: translateY(var(--preview-handle-offset, 0px)); + will-change: transform; + } + + .preview-tag-row::after, + .preview-tag-row::before { + content: ""; + @apply absolute top-1/2 -translate-y-1/2; + clip-path: polygon(0 0, 100% 50%, 0 100%); + pointer-events: none; + } + + .preview-tag-row::before { + right: -0.75rem; + height: calc(100% + 2px); + width: 0.75rem; + background-color: var(--preview-tag-border); + } + + .preview-tag-row::after { + right: -0.6875rem; + height: calc(100% - 2px); + width: 0.6875rem; + background-color: var(--preview-tag-bg); + pointer-events: none; + } + + .preview-tag-column { + @apply cursor-grab self-start; + transform: translateX(var(--preview-column-handle-offset, 0px)); + will-change: transform; + } + + .preview-tag-column::after { + content: ""; + @apply absolute -bottom-1.5 left-1/2 h-2.5 w-2.5 -translate-x-1/2 rotate-45; + background-color: var(--preview-tag-bg); + border-right: 1px solid var(--preview-tag-border); + border-bottom: 1px solid var(--preview-tag-border); + } + + .preview-tag-timestamp { + --preview-tag-bg: #fef3c7; + --preview-tag-border: #fcd34d; + --preview-tag-text: #b45309; + } + + .preview-tag-header { + --preview-tag-bg: #e0f2fe; + --preview-tag-border: #7dd3fc; + --preview-tag-text: #0369a1; + } + + .preview-tag-data { + --preview-tag-bg: #d1fae5; + --preview-tag-border: #6ee7b7; + --preview-tag-text: #047857; + } + + .preview-tag:active { + @apply cursor-grabbing; + } + + .preview-column-handle-dragging { + @apply transition-none; + } + + .preview-raw { + @apply mt-4 rounded-xl border border-slate-200 bg-slate-950 p-3 pl-22 text-xs text-slate-200; + } + + .preview-raw-line-shell { + @apply relative; + } + + .preview-raw-line { + @apply grid w-full cursor-pointer appearance-none grid-cols-[4.75rem_minmax(0,1fr)] gap-3 rounded-md border-0 border-b border-slate-800/80 bg-transparent px-1 py-1.5 text-left transition-colors last:border-b-0; + } + + .preview-raw-line:hover { + @apply bg-slate-800/55; + } + + .preview-raw-line-header { + @apply bg-sky-950/60; + } + + .preview-raw-line-data { + @apply bg-emerald-950/40; + } + + .preview-line-number-shell { + @apply flex flex-col items-start gap-1; + } + + .preview-line-number { + @apply font-mono text-slate-400; + } + + .preview-row-handle-dragging { + @apply transition-none; + } + + .preview-header-button { + @apply block w-full appearance-none rounded-md border-0 bg-transparent px-1.5 py-1.5 text-left font-mono transition-colors hover:bg-slate-200/70; + } + + .preview-cell-line-number { + @apply w-44 whitespace-nowrap text-slate-400; + } + + .preview-line-cell { + @apply align-middle; + } + + .preview-line-controls { + @apply flex min-w-[10rem] items-center gap-2; + } + + .preview-table-row-header td { + @apply bg-sky-50/90; + } + + .preview-table-row-data td { + @apply bg-emerald-50/80; + } + + .preview-table-shell { + @apply h-full overflow-auto rounded-none border-0; + } + + .preview-table { + @apply min-w-full border-collapse text-left text-xs; + } + + .preview-table thead { + @apply bg-slate-100; + } + + .preview-table tbody tr:nth-child(odd) { + @apply bg-white; + } + + .preview-table tbody tr:nth-child(even) { + @apply bg-slate-50/70; + } + + .preview-cell { + @apply border-b border-slate-200 px-3 py-2.5 font-mono text-slate-700; + } + + .preview-col-timestamp { + @apply bg-brand-50 text-brand-700; + } + + .preview-col-mapped { + @apply bg-emerald-50 text-emerald-700; + } + + .preview-footer { + @apply mt-4 flex flex-wrap items-center justify-between gap-3 text-xs text-slate-500; + } + + .preview-more-button { + @apply rounded-lg border border-slate-300 bg-slate-50 px-3.5 py-2 text-xs font-medium text-slate-700 shadow-sm hover:border-slate-400 hover:bg-slate-100; + } + + .preview-placeholder { + @apply flex h-full min-h-[20rem] flex-col items-center justify-center gap-4 text-center; + } + + .validation-panel { + @apply rounded-xl border border-red-200 bg-red-50 p-4; + } + + .validation-list { + @apply mt-3 list-disc pl-5 text-sm leading-6 text-red-700; + } + + .empty-panel { + @apply flex min-h-[18rem] flex-col items-center justify-center gap-4 p-8 text-center; + } + + .empty-icon { + @apply flex h-14 w-14 items-center justify-center rounded-2xl bg-brand-50 font-mono text-lg font-semibold text-brand-700; + } + + .card-stack { + @apply flex flex-col gap-4; + } + + .job-card { + @apply p-5 transition-shadow hover:shadow-[var(--shadow-card-hover)]; + } + + .job-card-top { + @apply flex flex-col gap-4 md:flex-row md:items-start md:justify-between; + } + + .job-card-title-row { + @apply flex items-center gap-3; + } + + .job-meta { + @apply mt-2 text-sm text-slate-500; + } + + .job-card-actions { + @apply mt-5 flex flex-wrap gap-2 border-t border-slate-100 pt-4; + } + + .onboarding-shell .preview-guidance, + .onboarding-shell .preview-footer, + .onboarding-shell .preview-cell-line-number, + .onboarding-shell .preview-toggle { + @apply text-slate-400; + } + + .onboarding-shell .preview-header { + @apply border-0; + } + + .onboarding-shell .preview-raw { + @apply border-0; + } + + .onboarding-shell .preview-raw-line:hover { + @apply bg-white/8; + } + + .onboarding-shell .preview-tag-header { + --preview-tag-bg: #082f49; + --preview-tag-border: rgb(14 165 233 / 0.5); + --preview-tag-text: #bae6fd; + } + + .onboarding-shell .preview-tag-data { + --preview-tag-bg: #022c22; + --preview-tag-border: rgb(16 185 129 / 0.5); + --preview-tag-text: #a7f3d0; + } + + .onboarding-shell .preview-tag-timestamp { + --preview-tag-bg: #451a03; + --preview-tag-border: rgb(245 158 11 / 0.5); + --preview-tag-text: #fde68a; + } + + .onboarding-shell .preview-table thead { + background: #26282b; + } + + .onboarding-shell .preview-table tbody tr:nth-child(odd) { + background: #2d3033; + } + + .onboarding-shell .preview-table tbody tr:nth-child(even) { + background: #2a2c30; + } + + .onboarding-shell .preview-cell { + border-color: rgb(255 255 255 / 0.06); + @apply text-slate-200; + } + + .onboarding-shell .preview-header-button { + @apply text-slate-100 hover:bg-white/8; + } + + .onboarding-shell .preview-table-row-header td { + @apply bg-sky-950/15; + } + + .onboarding-shell .preview-table-row-data td { + @apply bg-emerald-950/15; + } + + .onboarding-shell .preview-col-timestamp { + @apply bg-amber-950/35 text-amber-200; + } + + .onboarding-shell .preview-col-mapped { + @apply bg-emerald-950/35 text-emerald-200; + } + + .onboarding-shell .preview-more-button { + @apply border-slate-700 bg-[#16191b] text-slate-100 hover:border-slate-600 hover:bg-[#1d2124]; + } + + .pill-success, + .pill-warning, + .pill-danger, + .pill-muted, + .pill-info { + @apply inline-flex items-center rounded-full px-3 py-1 text-xs font-medium; + } + + .pill-success { + @apply bg-emerald-50 text-emerald-700; + } + + .pill-warning { + @apply bg-amber-50 text-amber-700; + } + + .pill-danger { + @apply bg-red-50 text-red-700; + } + + .pill-muted { + @apply bg-slate-100 text-slate-500; + } + + .pill-info { + @apply bg-brand-50 text-brand-700; + } + + .dashboard-shell.page-shell { + @apply max-w-none gap-0 overflow-hidden px-0 pb-0; + width: 100%; + height: 100vh; + height: 100dvh; + padding-top: calc(4.75rem - 2px); + } + + .dashboard-toolbar { + @apply mx-auto flex w-full max-w-7xl flex-wrap items-center gap-3 px-8 py-4; + border-bottom: 1px solid rgb(255 255 255 / 0.06); + } + + .dashboard-filter-field { + @apply flex min-w-0 flex-1 basis-80 items-center; + } + + .dashboard-filter-input.input { + @apply h-9 rounded-lg text-sm; + } + + .dashboard-status-filters { + @apply flex flex-wrap items-center gap-2; + } + + .dashboard-status-filter { + @apply inline-flex cursor-pointer items-center gap-2 rounded-full border px-3 py-1 text-xs font-medium transition-colors; + border-color: rgb(255 255 255 / 0.08); + background: rgb(255 255 255 / 0.035); + color: rgb(203 213 225 / 0.88); + } + + .dashboard-status-filter:hover { + border-color: rgb(255 255 255 / 0.18); + background: rgb(255 255 255 / 0.06); + color: #f1f5f9; + } + + .dashboard-status-filter-active { + border-color: rgb(56 189 248 / 0.45); + background: rgb(8 47 73 / 0.42); + color: #e0f2fe; + } + + .dashboard-status-filter-count { + @apply inline-flex min-w-5 items-center justify-center rounded-full px-1.5 text-[0.6875rem] font-semibold; + background: rgb(255 255 255 / 0.08); + color: inherit; + } + + .dashboard-status-filter-active .dashboard-status-filter-count { + background: rgb(56 189 248 / 0.26); + } + + .dashboard-body { + @apply flex min-h-0 flex-col overflow-y-auto; + } + + .dashboard-list { + @apply mx-auto flex w-full max-w-7xl flex-col; + } + + .data-source-row { + @apply flex flex-col gap-3 px-8 py-5 transition-colors; + border-top: 1px solid rgb(255 255 255 / 0.06); + } + + .data-source-row:hover { + background: rgb(255 255 255 / 0.02); + } + + .data-source-row-head { + @apply flex flex-wrap items-start justify-between gap-3; + } + + .data-source-row-head-actions { + @apply flex shrink-0 items-center gap-2; + } + + .data-source-row-title-block { + @apply min-w-0 flex-1; + } + + .data-source-row-title-line { + @apply flex items-center gap-2; + } + + .data-source-row-title { + @apply min-w-0 truncate text-[0.9375rem] font-semibold leading-tight text-slate-100; + letter-spacing: -0.005em; + } + + .data-source-row-title-edit { + @apply flex flex-wrap items-center gap-2; + } + + .data-source-row-title-edit-actions { + @apply flex flex-wrap items-center gap-1.5; + } + + .data-source-name-input { + @apply min-w-[14rem] max-w-[28rem] flex-1; + } + + .data-source-name-edit-trigger { + @apply inline-flex h-7 w-7 shrink-0 cursor-pointer items-center justify-center rounded-md border transition-colors; + border-color: rgb(255 255 255 / 0.1); + background: rgb(255 255 255 / 0.03); + color: rgb(203 213 225 / 0.88); + } + + .data-source-name-edit-trigger:hover { + border-color: rgb(255 255 255 / 0.2); + background: rgb(255 255 255 / 0.08); + color: rgb(248 250 252 / 0.98); + } + + .data-source-name-edit-trigger svg { + width: 0.875rem; + height: 0.875rem; + } + + .data-source-row-file { + @apply mt-1.5 truncate text-[0.8125rem] text-slate-50; + font-family: var(--font-mono); + } + + .data-source-row-file-link { + border: 0; + background: transparent; + padding: 0; + cursor: pointer; + color: #f8fafc; + font: inherit; + text-align: left; + text-decoration: underline; + text-decoration-color: rgb(148 163 184 / 0.4); + text-underline-offset: 0.2em; + } + + .data-source-row-file-link:hover { + color: #ffffff; + text-decoration-color: rgb(186 230 253 / 0.85); + } + + .data-source-row-meta { + @apply flex flex-wrap items-center justify-between gap-3; + } + + .data-source-row-actions { + @apply flex flex-wrap items-center gap-1.5; + } + + .data-source-row-mapping-count { + @apply whitespace-nowrap text-right text-[0.75rem] text-slate-400; + } + + .data-source-action { + @apply inline-flex cursor-pointer items-center gap-1.5 rounded-md border px-2.5 py-1 text-[0.75rem] font-medium transition-colors; + border-color: rgb(255 255 255 / 0.1); + background: rgb(255 255 255 / 0.03); + color: rgb(226 232 240 / 0.96); + } + + .data-source-action:hover:not(:disabled) { + border-color: rgb(255 255 255 / 0.22); + background: rgb(255 255 255 / 0.07); + } + + .data-source-action:disabled { + @apply cursor-not-allowed opacity-55; + } + + .data-source-action-logs { + color: #bae6fd; + border-color: rgb(56 189 248 / 0.28); + background: rgb(8 47 73 / 0.32); + } + + .data-source-action-logs:hover:not(:disabled) { + border-color: rgb(56 189 248 / 0.5); + background: rgb(12 74 110 / 0.42); + } + + .data-source-action-run { + color: #bbf7d0; + border-color: rgb(34 197 94 / 0.28); + background: rgb(20 83 45 / 0.28); + } + + .data-source-action-run:hover:not(:disabled) { + border-color: rgb(34 197 94 / 0.45); + background: rgb(21 128 61 / 0.34); + } + + .data-source-action-run-active, + .data-source-action-run-active:hover:not(:disabled) { + border-color: rgb(148 163 184 / 0.24); + background: rgb(51 65 85 / 0.28); + color: #cbd5e1; + } + + .data-source-action-run-active:disabled { + opacity: 0.9; + } + + .data-source-action-save { + color: #bbf7d0; + border-color: rgb(34 197 94 / 0.28); + background: rgb(20 83 45 / 0.28); + } + + .data-source-action-save:hover:not(:disabled) { + border-color: rgb(34 197 94 / 0.45); + background: rgb(21 128 61 / 0.34); + } + + .data-source-action-danger { + color: #fca5a5; + border-color: rgb(248 113 113 / 0.28); + background: rgb(127 29 29 / 0.16); + } + + .data-source-action-danger:hover:not(:disabled) { + border-color: rgb(248 113 113 / 0.45); + background: rgb(127 29 29 / 0.28); + } + + .data-source-status { + @apply inline-flex shrink-0 items-center gap-1.5 rounded-full border px-2.5 py-0.5 text-[0.6875rem] font-semibold uppercase tracking-[0.08em]; + } + + .data-source-status::before { + content: ""; + width: 0.375rem; + height: 0.375rem; + border-radius: 9999px; + background: currentColor; + display: inline-block; + opacity: 0.85; + } + + .data-source-status-healthy { + color: #86efac; + border-color: rgb(34 197 94 / 0.32); + background: rgb(6 78 59 / 0.35); + } + + .data-source-status-warning { + color: #fcd34d; + border-color: rgb(245 158 11 / 0.32); + background: rgb(120 53 15 / 0.32); + } + + .data-source-status-danger { + color: #fca5a5; + border-color: rgb(248 113 113 / 0.32); + background: rgb(127 29 29 / 0.32); + } + + .data-source-status-info { + color: #bae6fd; + border-color: rgb(56 189 248 / 0.32); + background: rgb(8 47 73 / 0.32); + } + + .data-source-logs-panel { + @apply mt-1 flex flex-col gap-4 rounded-xl px-4 py-4; + background: #181a1d; + } + + .data-source-logs-list { + @apply max-h-72 overflow-auto; + } + + .data-source-log-entry { + @apply grid gap-1 py-1.5 text-[0.75rem] text-slate-300 first:pt-0 last:pb-0 md:grid-cols-[12rem_4.5rem_minmax(0,1fr)] md:gap-3; + font-family: var(--font-mono); + } + + .data-source-log-timestamp { + white-space: nowrap; + color: #94a3b8; + } + + .data-source-log-level { + @apply uppercase tracking-[0.04em]; + } + + .data-source-log-level-info { + color: #7dd3fc; + } + .data-source-log-level-warning { + color: #fcd34d; + } + .data-source-log-level-error { + color: #fca5a5; + } + + .data-source-empty { + @apply mx-auto w-full max-w-7xl px-8 py-10 text-center text-sm text-slate-400; + } +} diff --git a/frontend/tests/auth-submit.test.ts b/frontend/tests/auth-submit.test.ts new file mode 100644 index 0000000..eb8082d --- /dev/null +++ b/frontend/tests/auth-submit.test.ts @@ -0,0 +1,257 @@ +import test from "node:test"; +import assert from "node:assert/strict"; + +import { + applyConnectionValidationResult, + createAuthFieldStates, + credentialFields, + resetAuthFieldStates, + runAuthSubmission, + validateAuthFieldsForSubmit, + type AuthFieldName, + type FieldValidationState, +} from "../auth-submit"; +import type { ConnectionTestResponse, ServerConfig } from "../api/hydroserver"; + +function createServerConfig( + overrides: Partial = {} +): ServerConfig { + return { + auth_type: "apikey", + url: "https://example.com", + api_key: "valid-key", + username: "", + password: "", + workspace_id: "", + workspace_name: "", + ...overrides, + }; +} + +function createMarker() { + const fieldStates = createAuthFieldStates(); + + const markField = ( + field: AuthFieldName, + nextState: FieldValidationState["state"], + message: string | null = null + ) => { + fieldStates[field] = { state: nextState, message }; + }; + + return { fieldStates, markField }; +} + +function createResult( + overrides: Partial = {} +): ConnectionTestResponse { + return { + ok: false, + state: "error", + message: "Generic auth error", + invalid_field: null, + instance_name: "example.com", + workspace_id: null, + workspace_name: null, + workspace_count: 0, + datastream_count: 0, + permissions_ok: false, + ...overrides, + }; +} + +test("validateAuthFieldsForSubmit rejects malformed URLs", () => { + const { fieldStates, markField } = createMarker(); + + const valid = validateAuthFieldsForSubmit( + createServerConfig({ url: "not-a-url" }), + markField + ); + + assert.equal(valid, false); + assert.deepEqual(fieldStates.url, { + state: "invalid", + message: "Please enter a full http:// or https:// URL.", + }); +}); + +test("validateAuthFieldsForSubmit requires an API key for API key auth", () => { + const { fieldStates, markField } = createMarker(); + + const valid = validateAuthFieldsForSubmit( + createServerConfig({ api_key: "" }), + markField + ); + + assert.equal(valid, false); + assert.deepEqual(fieldStates.api_key, { + state: "invalid", + message: "Please enter your API key.", + }); +}); + +test("validateAuthFieldsForSubmit requires username, password, and workspace name for userpass auth", () => { + const { fieldStates, markField } = createMarker(); + + const valid = validateAuthFieldsForSubmit( + createServerConfig({ + auth_type: "userpass", + api_key: "", + username: "", + password: "", + }), + markField + ); + + assert.equal(valid, false); + assert.deepEqual(fieldStates.username, { + state: "invalid", + message: "Please enter your username.", + }); + assert.deepEqual(fieldStates.password, { + state: "invalid", + message: "Please enter your password.", + }); + assert.deepEqual(fieldStates.workspace_name, { + state: "invalid", + message: "Please enter a workspace name.", + }); +}); + +test("applyConnectionValidationResult marks only the URL when HydroServer is unreachable", () => { + const { fieldStates, markField } = createMarker(); + const server = createServerConfig(); + + applyConnectionValidationResult( + server, + createResult({ message: "Couldn't reach HydroServer. Check the server URL and try again." }), + markField + ); + + assert.equal(fieldStates.url.state, "invalid"); + assert.equal(fieldStates.api_key.state, "idle"); +}); + +test("applyConnectionValidationResult marks the credential field when auth fails", () => { + const { fieldStates, markField } = createMarker(); + const server = createServerConfig(); + + applyConnectionValidationResult( + server, + createResult({ message: "That API key is invalid. Check the API key and try again." }), + markField + ); + + assert.equal(fieldStates.url.state, "valid"); + assert.equal(fieldStates.api_key.state, "invalid"); + assert.equal( + fieldStates.api_key.message, + "That API key is invalid. Check the API key and try again." + ); +}); + +test("applyConnectionValidationResult marks only workspace name when the workspace is invalid", () => { + const { fieldStates, markField } = createMarker(); + const server = createServerConfig({ + auth_type: "userpass", + api_key: "", + username: "user@example.com", + password: "hunter2", + workspace_name: "Missing Workspace", + }); + + applyConnectionValidationResult( + server, + createResult({ + invalid_field: "workspace_name", + message: + "No related workspace named \"Missing Workspace\" was found for this account. Check the workspace name and try again.", + }), + markField + ); + + assert.equal(fieldStates.url.state, "valid"); + assert.equal(fieldStates.username.state, "valid"); + assert.equal(fieldStates.password.state, "valid"); + assert.equal(fieldStates.workspace_name.state, "invalid"); + assert.equal( + fieldStates.workspace_name.message, + "No related workspace named \"Missing Workspace\" was found for this account. Check the workspace name and try again." + ); +}); + +test("resetAuthFieldStates clears state for the current auth mode", () => { + const fieldStates = createAuthFieldStates(); + fieldStates.url = { state: "invalid", message: "Bad URL" }; + fieldStates.api_key = { state: "invalid", message: "Bad key" }; + fieldStates.username = { state: "invalid", message: "Bad user" }; + fieldStates.password = { state: "invalid", message: "Bad password" }; + fieldStates.workspace_name = { state: "invalid", message: "Bad workspace" }; + + resetAuthFieldStates(fieldStates, "apikey"); + + for (const field of ["url", ...credentialFields("apikey")] as AuthFieldName[]) { + assert.deepEqual(fieldStates[field], { state: "idle", message: null }); + } +}); + +test("runAuthSubmission always clears submitting after success", async () => { + const transitions: boolean[] = []; + let renderCount = 0; + + const result = await runAuthSubmission({ + setSubmitting: (value) => { + transitions.push(value); + }, + render: () => { + renderCount += 1; + }, + action: async () => "done", + }); + + assert.equal(result, "done"); + assert.deepEqual(transitions, [true, false]); + assert.equal(renderCount, 2); +}); + +test("runAuthSubmission always clears submitting after an early return path", async () => { + const transitions: boolean[] = []; + let renderCount = 0; + + await runAuthSubmission({ + setSubmitting: (value) => { + transitions.push(value); + }, + render: () => { + renderCount += 1; + }, + action: async () => { + return; + }, + }); + + assert.deepEqual(transitions, [true, false]); + assert.equal(renderCount, 2); +}); + +test("runAuthSubmission always clears submitting after an exception", async () => { + const transitions: boolean[] = []; + let renderCount = 0; + + await assert.rejects(() => + runAuthSubmission({ + setSubmitting: (value) => { + transitions.push(value); + }, + render: () => { + renderCount += 1; + }, + action: async () => { + throw new Error("boom"); + }, + }) + ); + + assert.deepEqual(transitions, [true, false]); + assert.equal(renderCount, 2); +}); diff --git a/frontend/tests/pipeline-submit.test.ts b/frontend/tests/pipeline-submit.test.ts new file mode 100644 index 0000000..7298d2f --- /dev/null +++ b/frontend/tests/pipeline-submit.test.ts @@ -0,0 +1,93 @@ +import test from "node:test" +import assert from "node:assert/strict" + +import { + createPipelineFieldStates, + validatePipelineFieldsForSubmit, + type PipelineFieldName, +} from "../pipeline-submit" +import { createEmptyPipelineForm } from "../composables/state" + +function validateForm(params?: { + hasPreview?: boolean + previewHeaders?: string[] + mutate?: ReturnType +}) { + const form = params?.mutate ?? createEmptyPipelineForm() + const fieldStates = createPipelineFieldStates() + + const valid = validatePipelineFieldsForSubmit({ + form, + hasPreview: params?.hasPreview ?? true, + previewHeaders: params?.previewHeaders ?? ["timestamp", "value"], + markField: (field, nextState, message) => { + fieldStates[field] = { + state: nextState, + message: message ?? null, + } + }, + }) + + return { valid, fieldStates } +} + +test("validatePipelineFieldsForSubmit requires a preview before continuing", () => { + const form = createEmptyPipelineForm() + form.filePath = "/tmp/preview.csv" + form.timestamp.key = "timestamp" + + const { valid, fieldStates } = validateForm({ + hasPreview: false, + mutate: form, + }) + + assert.equal(valid, false) + assert.equal(fieldStates.file_path.state, "invalid") +}) + +test("validatePipelineFieldsForSubmit rejects timestamp keys outside the preview", () => { + const form = createEmptyPipelineForm() + form.filePath = "/tmp/preview.csv" + form.timestamp.key = "missing_column" + + const { valid, fieldStates } = validateForm({ mutate: form }) + + assert.equal(valid, false) + assert.equal(fieldStates.timestamp_key.state, "invalid") +}) + +test("validatePipelineFieldsForSubmit enforces controlled timezone vocabularies", () => { + const form = createEmptyPipelineForm() + form.filePath = "/tmp/preview.csv" + form.timestamp.key = "timestamp" + form.timestamp.format = "naive" + form.timestamp.timezoneMode = "daylightSavings" + form.timestamp.timezone = "Mars/Olympus_Mons" + + const { valid, fieldStates } = validateForm({ mutate: form }) + + assert.equal(valid, false) + assert.equal(fieldStates.timezone.state, "invalid") +}) + +test("validatePipelineFieldsForSubmit accepts a valid index-based configuration", () => { + const form = createEmptyPipelineForm() + form.filePath = "/tmp/preview.csv" + form.hasHeaderRow = false + form.identifierType = "index" + form.dataStartRow = 1 + form.timestamp.key = "1" + form.timestamp.format = "naive" + form.timestamp.timezoneMode = "fixedOffset" + form.timestamp.timezone = "-0700" + + const { valid, fieldStates } = validateForm({ + mutate: form, + previewHeaders: ["Column 1", "Column 2"], + }) + + assert.equal(valid, true) + for (const field of Object.keys(fieldStates) as PipelineFieldName[]) { + assert.notEqual(fieldStates[field].state, "invalid") + } +}) diff --git a/frontend/tests/useAppModel.test.ts b/frontend/tests/useAppModel.test.ts new file mode 100644 index 0000000..f561295 --- /dev/null +++ b/frontend/tests/useAppModel.test.ts @@ -0,0 +1,223 @@ +import test from "node:test" +import assert from "node:assert/strict" + +import { + requiresDesktopServiceSetup, + resolvePostAuthRoute, + resolveAuthenticatedRoute, + shouldBootstrapDesktopDaemon, + shouldApplyDaemonConnectionState, + shouldHydrateAuthDraftFromDaemon, + shouldRefreshServiceStatusOnFocus, +} from "../composables/useAppModel" + +test("connected users with saved datasources default to the dashboard", () => { + assert.equal( + resolveAuthenticatedRoute({ + route: "welcome", + hasSavedDatasources: true, + pipelineReadyForMapping: false, + serviceReady: true, + }), + "dashboard" + ) +}) + +test("connected users without saved datasources default to onboarding", () => { + assert.equal( + resolveAuthenticatedRoute({ + route: "welcome", + hasSavedDatasources: false, + pipelineReadyForMapping: false, + serviceReady: true, + }), + "jobs-new" + ) +}) + +test("dashboard route redirects to onboarding when no datasources exist", () => { + assert.equal( + resolveAuthenticatedRoute({ + route: "dashboard", + hasSavedDatasources: false, + pipelineReadyForMapping: false, + serviceReady: true, + }), + "jobs-new" + ) +}) + +test("users can still stay in onboarding even when datasources already exist", () => { + assert.equal( + resolveAuthenticatedRoute({ + route: "jobs-new", + hasSavedDatasources: true, + pipelineReadyForMapping: false, + serviceReady: true, + }), + "jobs-new" + ) +}) + +test("connected users are sent to service setup when the background service is unavailable", () => { + assert.equal( + resolveAuthenticatedRoute({ + route: "dashboard", + hasSavedDatasources: true, + pipelineReadyForMapping: false, + serviceReady: false, + }), + "service" + ) +}) + +test("post-auth redirect sends users with saved datasources to the dashboard", () => { + assert.equal( + resolvePostAuthRoute({ + hasSavedDatasources: true, + serviceReady: true, + }), + "dashboard" + ) +}) + +test("post-auth redirect sends users without saved datasources to onboarding", () => { + assert.equal( + resolvePostAuthRoute({ + hasSavedDatasources: false, + serviceReady: true, + }), + "jobs-new" + ) +}) + +test("desktop runtime blocks on service setup before authentication when service is unavailable", () => { + assert.equal( + requiresDesktopServiceSetup({ + tauriRuntime: true, + serviceReady: false, + daemonReady: false, + }), + true + ) +}) + +test("desktop runtime blocks on service setup when daemon bootstrap has not completed", () => { + assert.equal( + requiresDesktopServiceSetup({ + tauriRuntime: true, + serviceReady: true, + daemonReady: false, + }), + true + ) +}) + +test("browser runtime does not require OS service setup", () => { + assert.equal( + requiresDesktopServiceSetup({ + tauriRuntime: false, + serviceReady: false, + daemonReady: false, + }), + false + ) +}) + +test("desktop runtime skips daemon bootstrap until the service is ready", () => { + assert.equal( + shouldBootstrapDesktopDaemon({ + tauriRuntime: true, + serviceStatus: { + supported: true, + installed: false, + running: false, + label: "", + plist_path: "", + executable_path: "", + status_message: "", + }, + }), + false + ) +}) + +test("desktop runtime bootstraps the daemon once the service is running", () => { + assert.equal( + shouldBootstrapDesktopDaemon({ + tauriRuntime: true, + serviceStatus: { + supported: true, + installed: true, + running: true, + label: "", + plist_path: "", + executable_path: "", + status_message: "", + }, + }), + true + ) +}) + +test("daemon snapshots do not overwrite an auth draft with local edits", () => { + assert.equal( + shouldHydrateAuthDraftFromDaemon({ + authSubmitting: false, + authDraftDirty: true, + }), + false + ) +}) + +test("daemon snapshots still hydrate a clean auth draft", () => { + assert.equal( + shouldHydrateAuthDraftFromDaemon({ + authSubmitting: false, + authDraftDirty: false, + }), + true + ) +}) + +test("daemon snapshots do not downgrade connection state during auth submission", () => { + assert.equal( + shouldApplyDaemonConnectionState({ + authSubmitting: true, + snapshotConnectionState: "not_configured", + }), + false + ) +}) + +test("connected daemon snapshots can still apply during auth submission", () => { + assert.equal( + shouldApplyDaemonConnectionState({ + authSubmitting: true, + snapshotConnectionState: "connected", + }), + true + ) +}) + +test("focus refresh is skipped while a service action is in progress", () => { + assert.equal( + shouldRefreshServiceStatusOnFocus({ + loading: false, + connected: true, + serviceActionSubmitting: true, + }), + false + ) +}) + +test("focus refresh still runs when the app is connected and idle", () => { + assert.equal( + shouldRefreshServiceStatusOnFocus({ + loading: false, + connected: true, + serviceActionSubmitting: false, + }), + true + ) +}) diff --git a/frontend/tests/useAuth.test.ts b/frontend/tests/useAuth.test.ts new file mode 100644 index 0000000..20e9744 --- /dev/null +++ b/frontend/tests/useAuth.test.ts @@ -0,0 +1,184 @@ +import test from "node:test" +import assert from "node:assert/strict" + +import { emptyServerConfig, state } from "../composables/state" +import { + disconnectHydroServer, + submitAuthConfig, + updateAuthDraftField, +} from "../composables/useAuth" +import { createAuthFieldStates } from "../auth-submit" + +const originalFetch = globalThis.fetch +const originalWindow = globalThis.window + +function jsonResponse(body: unknown): Response { + return new Response(JSON.stringify(body), { + status: 200, + headers: { "Content-Type": "application/json" }, + }) +} + +function resetAuthState(): void { + state.authDraft = emptyServerConfig() + state.authDraftDirty = false + state.authFieldStates = createAuthFieldStates() + state.authSubmitting = false + state.postAuthRedirectPending = false + state.connectionSummary = null + state.lastConnectionState = null + state.config = null + state.serviceStatus = null + state.serviceActionError = null + state.jobStatuses = [] +} + +test.beforeEach(() => { + resetAuthState() + globalThis.fetch = originalFetch + Object.defineProperty(globalThis, "window", { + value: { location: { hash: "#welcome" } }, + configurable: true, + writable: true, + }) +}) + +test.after(() => { + globalThis.fetch = originalFetch + if (originalWindow === undefined) { + Reflect.deleteProperty(globalThis, "window") + } else { + Object.defineProperty(globalThis, "window", { + value: originalWindow, + configurable: true, + writable: true, + }) + } +}) + +test("submitAuthConfig saves a valid API key login state", async () => { + const requests: string[] = [] + + state.authDraft = { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret-key", + username: "", + password: "", + workspace_id: "", + workspace_name: "", + } + + globalThis.fetch = async (input, init) => { + const url = String(input) + requests.push(`${init?.method ?? "GET"} ${url}`) + + if (url.includes("/connection/validate-url")) { + return jsonResponse({ + ok: true, + message: "Looks good.", + instance_name: "Example", + }) + } + + if (url.endsWith("/connection/test")) { + return jsonResponse({ + ok: true, + state: "connected", + message: "Connected.", + invalid_field: null, + instance_name: "Example", + workspace_id: "workspace-1", + workspace_name: "Primary Workspace", + workspace_count: 1, + datastream_count: 12, + permissions_ok: true, + }) + } + + if (url.endsWith("/config/server")) { + return jsonResponse({ + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret-key", + username: "", + password: "", + workspace_id: "workspace-1", + workspace_name: "Primary Workspace", + }, + jobs: [], + }) + } + + throw new Error(`Unexpected request: ${url}`) + } + + await submitAuthConfig("welcome-form") + + assert.deepEqual(requests, [ + "GET /api/connection/validate-url?url=https%3A%2F%2Fexample.com", + "POST /api/connection/test", + "PUT /api/config/server", + "POST /api/connection/test", + ]) + assert.equal(state.authSubmitting, false) + assert.equal(state.lastConnectionState, "connected") + assert.equal(state.config?.server.workspace_id, "workspace-1") + assert.equal(state.connectionSummary?.workspace_name, "Primary Workspace") + assert.equal(state.authDraftDirty, false) + assert.equal(state.postAuthRedirectPending, true) +}) + +test("updating the host URL marks the auth draft dirty", () => { + updateAuthDraftField("welcome-form", "url", "https://example.com") + + assert.equal(state.authDraft.url, "https://example.com") + assert.equal(state.authDraftDirty, true) +}) + +test("disconnect keeps the live service status instead of clearing it", async () => { + state.config = { + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret-key", + username: "", + password: "", + workspace_id: "workspace-1", + workspace_name: "Primary Workspace", + }, + jobs: [], + } + state.serviceStatus = { + supported: true, + installed: true, + running: true, + label: "Streaming Data Loader", + plist_path: "", + executable_path: "", + status_message: "Running", + } + + globalThis.fetch = async (input, init) => { + const url = String(input) + + if (url.endsWith("/config/server") && init?.method === "DELETE") { + return jsonResponse({ + version: 1, + server: emptyServerConfig(), + jobs: [], + }) + } + + throw new Error(`Unexpected request: ${url}`) + } + + await disconnectHydroServer() + + assert.equal(state.lastConnectionState, "not_configured") + assert.equal(state.serviceStatus?.running, true) + assert.equal(state.serviceStatus?.installed, true) +}) diff --git a/frontend/tests/useMapping.test.ts b/frontend/tests/useMapping.test.ts new file mode 100644 index 0000000..073b381 --- /dev/null +++ b/frontend/tests/useMapping.test.ts @@ -0,0 +1,282 @@ +import test from "node:test" +import assert from "node:assert/strict" + +import type { CsvPreviewResponse, DatastreamSummary } from "../api/hydroserver" +import { createPipelineFieldStates } from "../pipeline-submit" +import { + buildPipelineColumnMappings, + buildDatastreamBrowserEntries, + buildMappingSourceColumns, + loadPipelineDatastreams, + pipelineThingOptions, + syncPipelineMappingDrafts, + updatePipelineMappingDatastream, + updatePipelineMappingThing, +} from "../composables/useMapping" +import { createEmptyPipelineForm, PREVIEW_PAGE_SIZE, state } from "../composables/state" + +const originalFetch = globalThis.fetch + +function createPreview(): CsvPreviewResponse { + return { + raw_lines: [ + "recorded_at,stage_cfs,temp_c", + "2024-01-01T00:00:00Z,1.2,7.4", + ], + parsed_rows: [ + ["recorded_at", "stage_cfs", "temp_c"], + ["2024-01-01T00:00:00Z", "1.2", "7.4"], + ], + detected_header_row: 1, + detected_data_start_row: 2, + detected_delimiter: ",", + total_lines: 2, + encoding: "utf-8", + } +} + +function datastream( + overrides: Partial +): DatastreamSummary { + return { + id: "stream-1", + name: "Stage Datastream", + thing_id: "thing-1", + thing_name: "Alpha Site", + observed_property_name: "Stage", + processing_level_definition: "Raw", + unit_name: "cubic feet per second", + unit_symbol: "cfs", + sampled_medium: "Surface water", + sensor_name: "Pressure transducer", + result_type: "Measure", + ...overrides, + } +} + +function jsonResponse(body: unknown): Response { + return new Response(JSON.stringify(body), { + status: 200, + headers: { "Content-Type": "application/json" }, + }) +} + +function resetMappingState(): void { + state.pipelineForm = createEmptyPipelineForm() + state.pipelinePreview = createPreview() + state.pipelineSelectionTarget = null + state.pipelinePreviewRowsRequested = PREVIEW_PAGE_SIZE + state.pipelineFieldStates = createPipelineFieldStates() + state.pipelineValidationAttempted = false + state.pipelineReadyForMapping = true + state.validatedPipelineSettings = { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + } + state.pipelineDatastreams = [] + state.pipelineDatastreamsLoading = false + state.pipelineMappingDrafts = [] + state.validatedColumnMappings = [] +} + +test.beforeEach(() => { + resetMappingState() + globalThis.fetch = originalFetch +}) + +test.after(() => { + globalThis.fetch = originalFetch +}) + +test("buildMappingSourceColumns excludes the timestamp column and uses 1-based keys in index mode", () => { + assert.deepEqual( + buildMappingSourceColumns(["Timestamp", "Stage", "Temperature"], "index", "1"), + [ + { csvColumn: "2", label: "2 · Stage" }, + { csvColumn: "3", label: "3 · Temperature" }, + ] + ) +}) + +test("changing the selected thing clears a datastream from a different thing", () => { + state.pipelineDatastreams = [ + datastream({}), + datastream({ + id: "stream-2", + name: "Temperature Datastream", + thing_id: "thing-2", + thing_name: "Beta Site", + observed_property_name: "Temperature", + unit_name: "degree Celsius", + unit_symbol: "degC", + }), + ] + + syncPipelineMappingDrafts() + updatePipelineMappingDatastream("stage_cfs", "stream-1") + updatePipelineMappingThing("stage_cfs", "thing-2") + + assert.equal( + state.pipelineMappingDrafts.find((draft) => draft.csvColumn === "stage_cfs") + ?.datastreamId, + "" + ) +}) + +test("selecting a datastream that is already mapped leaves the existing mapping in place", () => { + state.pipelineDatastreams = [ + datastream({}), + datastream({ + id: "stream-2", + name: "Temperature Datastream", + observed_property_name: "Temperature", + unit_name: "degree Celsius", + unit_symbol: "degC", + }), + ] + + syncPipelineMappingDrafts() + updatePipelineMappingDatastream("stage_cfs", "stream-1") + updatePipelineMappingDatastream("temp_c", "stream-1") + + assert.equal( + state.pipelineMappingDrafts.find((draft) => draft.csvColumn === "stage_cfs") + ?.datastreamId, + "stream-1" + ) + assert.equal( + state.pipelineMappingDrafts.find((draft) => draft.csvColumn === "temp_c") + ?.datastreamId, + "" + ) +}) + +test("buildPipelineColumnMappings uses the selected datastream names", () => { + state.pipelineDatastreams = [ + datastream({}), + datastream({ + id: "stream-2", + name: "Temperature Datastream", + thing_id: "thing-2", + thing_name: "Beta Site", + observed_property_name: "Temperature", + unit_name: "degree Celsius", + unit_symbol: "degC", + }), + ] + + syncPipelineMappingDrafts() + updatePipelineMappingDatastream("stage_cfs", "stream-1") + updatePipelineMappingDatastream("temp_c", "stream-2") + + assert.deepEqual(buildPipelineColumnMappings(), [ + { + csv_column: "stage_cfs", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + { + csv_column: "temp_c", + datastream_id: "stream-2", + datastream_name: "Temperature Datastream", + }, + ]) + assert.equal(state.validatedColumnMappings.length, 2) +}) + +test("loadPipelineDatastreams sorts thing options by thing name", async () => { + globalThis.fetch = async () => + jsonResponse([ + datastream({ + id: "stream-2", + thing_id: "thing-2", + thing_name: "Zulu Site", + }), + datastream({ + id: "stream-1", + thing_id: "thing-1", + thing_name: "Alpha Site", + }), + datastream({ + id: "stream-3", + thing_id: "thing-1", + thing_name: "Alpha Site", + observed_property_name: "Temperature", + }), + ]) + + await loadPipelineDatastreams(true) + + assert.deepEqual(pipelineThingOptions.value, [ + { id: "thing-1", name: "Alpha Site" }, + { id: "thing-2", name: "Zulu Site" }, + ]) +}) + +test("buildDatastreamBrowserEntries groups datastreams by thing name and includes mapped labels", () => { + const entries = buildDatastreamBrowserEntries( + [ + datastream({ + id: "stream-2", + thing_id: "thing-2", + thing_name: "Zulu Site", + observed_property_name: "Temperature", + }), + datastream({ + id: "stream-1", + thing_id: "thing-1", + thing_name: "Alpha Site", + }), + ], + [{ csvColumn: "stage_cfs", thingId: "thing-1", datastreamId: "stream-1" }], + [ + { csvColumn: "stage_cfs", label: "stage_cfs" }, + { csvColumn: "temp_c", label: "temp_c" }, + ] + ) + + assert.deepEqual(entries, [ + { + kind: "thing", + key: "thing-thing-1", + thingId: "thing-1", + thingName: "Alpha Site", + }, + { + kind: "datastream", + key: "datastream-stream-1", + datastream: datastream({ + id: "stream-1", + thing_id: "thing-1", + thing_name: "Alpha Site", + }), + mappedCsvColumn: "stage_cfs", + mappedColumnLabel: "stage_cfs", + }, + { + kind: "thing", + key: "thing-thing-2", + thingId: "thing-2", + thingName: "Zulu Site", + }, + { + kind: "datastream", + key: "datastream-stream-2", + datastream: datastream({ + id: "stream-2", + thing_id: "thing-2", + thing_name: "Zulu Site", + observed_property_name: "Temperature", + }), + mappedCsvColumn: null, + mappedColumnLabel: null, + }, + ]) +}) diff --git a/frontend/tests/usePipeline.test.ts b/frontend/tests/usePipeline.test.ts new file mode 100644 index 0000000..e5891c7 --- /dev/null +++ b/frontend/tests/usePipeline.test.ts @@ -0,0 +1,1163 @@ +import test from "node:test" +import assert from "node:assert/strict" + +import type { CsvPreviewResponse } from "../api/hydroserver" +import { + editPipelineCsvSetup, + editPipelineMappings, + editPipelineSourceFile, + abandonPipelineCreation, + applyPreviewColumnSelection, + buildPipelineTransformerSettings, + createPipelineDatasource, + loadPipelinePreview, + selectedPreviewTimestampColumn, + setPipelineHasHeaderRow, + showMorePreviewLines, + submitPipelineConfig, + updatePipelineField, +} from "../composables/usePipeline" +import { + createEmptyPipelineForm, + PREVIEW_PAGE_INCREMENT, + PREVIEW_PAGE_SIZE, + state, +} from "../composables/state" +import { createPipelineFieldStates } from "../pipeline-submit" + +const originalFetch = globalThis.fetch +const originalWindow = globalThis.window + +function createPreview( + overrides: Partial = {} +): CsvPreviewResponse { + return { + raw_lines: [ + "recorded_at,value", + "2024-01-01T00:00:00Z,1.2", + ], + parsed_rows: [ + ["recorded_at", "value"], + ["2024-01-01T00:00:00Z", "1.2"], + ], + detected_header_row: 1, + detected_data_start_row: 2, + detected_delimiter: ",", + total_lines: 2, + encoding: "utf-8", + ...overrides, + } +} + +function jsonResponse(body: unknown): Response { + return new Response(JSON.stringify(body), { + status: 200, + headers: { "Content-Type": "application/json" }, + }) +} + +function resetPipelineState(): void { + state.pipelineForm = createEmptyPipelineForm() + state.pipelinePreview = null + state.pipelineSelectionTarget = null + state.pipelineEditorStartStep = null + state.pipelinePreviewRowsRequested = PREVIEW_PAGE_SIZE + state.pipelineFieldStates = createPipelineFieldStates() + state.pipelineValidationAttempted = false + state.pipelineReadyForMapping = false + state.validatedPipelineSettings = null + state.pipelineDatastreams = [] + state.pipelineDatastreamsLoading = false + state.pipelineMappingDrafts = [] + state.validatedColumnMappings = [] + state.pipelineEditTarget = null + state.connectionSummary = null + state.lastConnectionState = null + state.config = null + state.pipelineCreateSubmitting = false +} + +test.beforeEach(() => { + resetPipelineState() + globalThis.fetch = originalFetch +}) + +test.after(() => { + globalThis.fetch = originalFetch + if (originalWindow === undefined) { + Reflect.deleteProperty(globalThis, "window") + } else { + Object.defineProperty(globalThis, "window", { + value: originalWindow, + configurable: true, + writable: true, + }) + } +}) + +test("disabling the header row forces index mode and preserves the timestamp selection", () => { + state.pipelinePreview = createPreview() + state.pipelineForm.hasHeaderRow = true + state.pipelineForm.headerRow = 1 + state.pipelineForm.dataStartRow = 2 + state.pipelineForm.identifierType = "name" + state.pipelineForm.timestamp.key = "recorded_at" + + setPipelineHasHeaderRow(false) + + assert.equal(state.pipelineForm.hasHeaderRow, false) + assert.equal(state.pipelineForm.identifierType, "index") + assert.equal(state.pipelineForm.timestamp.key, "1") +}) + +test("preview column selection stores a 1-based index in index mode", () => { + state.pipelinePreview = createPreview() + state.pipelineForm.hasHeaderRow = true + state.pipelineForm.headerRow = 1 + state.pipelineForm.dataStartRow = 2 + state.pipelineForm.identifierType = "index" + state.pipelineForm.timestamp.key = "1" + + applyPreviewColumnSelection("value") + + assert.equal(state.pipelineForm.timestamp.key, "2") + assert.equal(selectedPreviewTimestampColumn.value, "value") +}) + +test("loading more preview rows does not overwrite manual transformer fixes", async () => { + const responses = [ + createPreview({ + raw_lines: Array.from( + { length: PREVIEW_PAGE_SIZE }, + (_, index) => + index === 0 + ? "recorded_at,value" + : `2024-01-${String(index).padStart(2, "0")}T00:00:00Z,${index}` + ), + parsed_rows: Array.from( + { length: PREVIEW_PAGE_SIZE }, + (_, index) => + index === 0 + ? ["recorded_at", "value"] + : [`2024-01-${String(index).padStart(2, "0")}T00:00:00Z`, String(index)] + ), + total_lines: PREVIEW_PAGE_SIZE + PREVIEW_PAGE_INCREMENT, + }), + createPreview({ + raw_lines: Array.from( + { length: PREVIEW_PAGE_SIZE + PREVIEW_PAGE_INCREMENT }, + (_, index) => + index === 0 + ? "recorded_at,value" + : `2024-01-${String(index).padStart(2, "0")}T00:00:00Z,${index}` + ), + parsed_rows: Array.from( + { length: PREVIEW_PAGE_SIZE + PREVIEW_PAGE_INCREMENT }, + (_, index) => + index === 0 + ? ["recorded_at", "value"] + : [`2024-01-${String(index).padStart(2, "0")}T00:00:00Z`, String(index)] + ), + total_lines: PREVIEW_PAGE_SIZE + PREVIEW_PAGE_INCREMENT, + }), + ] + + let callCount = 0 + globalThis.fetch = async () => jsonResponse(responses[callCount++]) + + await loadPipelinePreview("/tmp/preview.csv") + updatePipelineField("delimiter", ";") + await showMorePreviewLines() + + assert.equal(callCount, 2) + assert.equal(state.pipelineForm.delimiter, ";") + assert.equal( + state.pipelinePreviewRowsRequested, + PREVIEW_PAGE_SIZE + PREVIEW_PAGE_INCREMENT + ) + assert.equal( + state.pipelinePreview?.raw_lines.length, + PREVIEW_PAGE_SIZE + PREVIEW_PAGE_INCREMENT + ) +}) + +test("loading a preview auto-detects an ISO timestamp column and timestamp settings", async () => { + globalThis.fetch = async () => + jsonResponse( + createPreview({ + raw_lines: [ + "value,recorded_at,status", + "1.2,2024-01-01T00:00:00Z,ok", + "1.4,2024-01-02T00:00:00Z,ok", + ], + parsed_rows: [ + ["value", "recorded_at", "status"], + ["1.2", "2024-01-01T00:00:00Z", "ok"], + ["1.4", "2024-01-02T00:00:00Z", "ok"], + ], + total_lines: 3, + }) + ) + + await loadPipelinePreview("/tmp/iso-preview.csv") + + assert.equal(state.pipelineForm.identifierType, "name") + assert.equal(state.pipelineForm.timestamp.key, "recorded_at") + assert.equal(state.pipelineForm.timestamp.format, "ISO8601") + assert.equal(state.pipelineForm.timestamp.timezoneMode, "embeddedOffset") +}) + +test("loading a preview auto-detects a custom timestamp format", async () => { + globalThis.fetch = async () => + jsonResponse( + createPreview({ + raw_lines: [ + "timestamp,value", + "04/07/2026 13:45:00,1.2", + "04/07/2026 13:50:00,1.3", + ], + parsed_rows: [ + ["timestamp", "value"], + ["04/07/2026 13:45:00", "1.2"], + ["04/07/2026 13:50:00", "1.3"], + ], + total_lines: 3, + }) + ) + + await loadPipelinePreview("/tmp/custom-preview.csv") + + assert.equal(state.pipelineForm.timestamp.key, "timestamp") + assert.equal(state.pipelineForm.timestamp.format, "custom") + assert.equal(state.pipelineForm.timestamp.customFormat, "%m/%d/%Y %H:%M:%S") + assert.equal(state.pipelineForm.timestamp.timezoneMode, "utc") +}) + +test("loading a preview falls back to the first column when no timestamp column is detectable", async () => { + globalThis.fetch = async () => + jsonResponse( + createPreview({ + raw_lines: [ + "sensor,value,status", + "alpha,1.2,ok", + "beta,1.4,ok", + ], + parsed_rows: [ + ["sensor", "value", "status"], + ["alpha", "1.2", "ok"], + ["beta", "1.4", "ok"], + ], + total_lines: 3, + }) + ) + + await loadPipelinePreview("/tmp/fallback-preview.csv") + + assert.equal(state.pipelineForm.timestamp.key, "sensor") + assert.equal(state.pipelineForm.timestamp.format, "ISO8601") + assert.equal(state.pipelineForm.timestamp.timezoneMode, "embeddedOffset") +}) + +test("custom timestamp formats default to UTC timezone handling", () => { + assert.equal(state.pipelineForm.timestamp.format, "ISO8601") + assert.equal(state.pipelineForm.timestamp.timezoneMode, "embeddedOffset") + + updatePipelineField("timestamp_format", "custom") + + assert.equal(state.pipelineForm.timestamp.format, "custom") + assert.equal(state.pipelineForm.timestamp.timezoneMode, "utc") + assert.equal( + state.pipelineForm.timestamp.customFormat, + "%Y-%m-%d %H:%M:%S" + ) +}) + +test("selecting daylight-savings mode defaults to an IANA timezone", () => { + updatePipelineField("timestamp_format", "naive") + updatePipelineField("timezone_mode", "daylightSavings") + + assert.equal(state.pipelineForm.timestamp.format, "naive") + assert.equal(state.pipelineForm.timestamp.timezoneMode, "daylightSavings") + assert.equal(state.pipelineForm.timestamp.timezone, "America/Denver") +}) + +test("switching timezone modes resets the controlled vocabulary selection", () => { + updatePipelineField("timestamp_format", "naive") + updatePipelineField("timezone_mode", "fixedOffset") + updatePipelineField("timezone", "-0600") + updatePipelineField("timezone_mode", "daylightSavings") + + assert.equal(state.pipelineForm.timestamp.timezoneMode, "daylightSavings") + assert.equal(state.pipelineForm.timestamp.timezone, "America/Denver") +}) + +test("serializing the pipeline form matches hydroserver csv transformer settings", () => { + state.pipelineForm.hasHeaderRow = true + state.pipelineForm.headerRow = 1 + state.pipelineForm.dataStartRow = 2 + state.pipelineForm.delimiter = "|" + state.pipelineForm.identifierType = "name" + state.pipelineForm.timestamp = { + key: "recorded_at", + format: "custom", + customFormat: "%m/%d/%Y %H:%M:%S", + timezoneMode: "daylightSavings", + timezone: "America/Denver", + } + + assert.deepEqual(buildPipelineTransformerSettings(), { + headerRow: 1, + dataStartRow: 2, + delimiter: "|", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "custom", + customFormat: "%m/%d/%Y %H:%M:%S", + timezoneMode: "daylightSavings", + timezone: "America/Denver", + }, + }) +}) + +test("serializing index mode clears headerRow so hydroserverpy skips file headers", () => { + state.pipelineForm.hasHeaderRow = true + state.pipelineForm.headerRow = 1 + state.pipelineForm.dataStartRow = 2 + state.pipelineForm.identifierType = "index" + state.pipelineForm.timestamp.key = "1" + + assert.deepEqual(buildPipelineTransformerSettings(), { + headerRow: null, + dataStartRow: 2, + delimiter: ",", + identifierType: "index", + timestamp: { + key: "1", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }) +}) + +test("submitPipelineConfig marks the transformer as ready for mapping when validation passes", () => { + state.pipelinePreview = createPreview() + state.pipelineForm.filePath = "/tmp/preview.csv" + state.pipelineForm.hasHeaderRow = true + state.pipelineForm.headerRow = 1 + state.pipelineForm.dataStartRow = 2 + state.pipelineForm.identifierType = "name" + state.pipelineForm.timestamp.key = "recorded_at" + + submitPipelineConfig() + + assert.equal(state.pipelineReadyForMapping, true) + assert.deepEqual(state.validatedPipelineSettings, { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }) +}) + +test("changing the form after a submit attempt revalidates and clears mapping readiness", () => { + state.pipelinePreview = createPreview() + state.pipelineForm.filePath = "/tmp/preview.csv" + state.pipelineForm.timestamp.key = "recorded_at" + + submitPipelineConfig() + updatePipelineField("timestamp_format", "custom") + updatePipelineField("custom_timestamp_format", "") + + assert.equal(state.pipelineReadyForMapping, false) + assert.equal(state.validatedPipelineSettings, null) + assert.equal( + state.pipelineFieldStates.custom_timestamp_format.state, + "invalid" + ) +}) + +test("createPipelineDatasource sends the expected payload and resets the wizard after success", async () => { + let requestBody: Record | null = null + + Object.defineProperty(globalThis, "window", { + value: { location: { hash: "#jobs/new/mapping" } }, + configurable: true, + writable: true, + }) + + globalThis.fetch = async (_input, init) => { + const url = String(_input) + if (url.endsWith("/jobs")) { + requestBody = JSON.parse(String(init?.body ?? "{}")) as Record + return jsonResponse({ + id: "job-1", + name: "river.stage", + enabled: true, + file_path: "/tmp/data/river.stage.csv", + schedule_minutes: 15, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + recent_logs: [], + status: "pending", + status_message: "Ready for the first run", + last_pushed_timestamp: null, + last_run_at: null, + last_error: null, + }) + } + + if (url.endsWith("/config")) { + return jsonResponse({ + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [ + { + id: "job-1", + name: "river.stage", + enabled: true, + file_path: "/tmp/data/river.stage.csv", + schedule_minutes: 15, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + }, + ], + }) + } + + throw new Error(`Unexpected request: ${url}`) + } + + state.connectionSummary = { + ok: true, + state: "connected", + message: "Connected", + invalid_field: null, + instance_name: "HydroServer", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + workspace_count: 1, + datastream_count: 2, + permissions_ok: true, + } + state.lastConnectionState = "connected" + state.config = { + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [], + } + state.pipelinePreview = createPreview() + state.pipelineForm.filePath = "/tmp/data/river.stage.csv" + state.pipelineReadyForMapping = true + state.validatedPipelineSettings = { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + } + state.validatedColumnMappings = [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ] + + await createPipelineDatasource() + + assert.deepEqual(requestBody, { + name: "river.stage", + enabled: true, + file_path: "/tmp/data/river.stage.csv", + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + }) + assert.equal(state.pipelineForm.filePath, "") + assert.equal(state.pipelinePreview, null) + assert.equal(state.pipelineReadyForMapping, false) + assert.deepEqual(state.validatedColumnMappings, []) + assert.deepEqual(state.pipelineMappingDrafts, []) + assert.equal(state.pipelineCreateSubmitting, false) + assert.equal(globalThis.window.location.hash, "#dashboard") + assert.equal(state.config?.jobs.length, 1) +}) + +test("editPipelineSourceFile preloads an existing datasource on step 1", async () => { + Object.defineProperty(globalThis, "window", { + value: { location: { hash: "#dashboard" } }, + configurable: true, + writable: true, + }) + + globalThis.fetch = async (_input) => { + const url = String(_input) + if (url.includes("/csv/preview")) { + return jsonResponse(createPreview()) + } + + throw new Error(`Unexpected request: ${url}`) + } + + state.config = { + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [ + { + id: "job-1", + name: "existing-source", + enabled: true, + file_path: "/tmp/data/existing.csv", + schedule_minutes: 15, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + }, + ], + } + + await editPipelineSourceFile("job-1") + + assert.equal(state.pipelineEditTarget?.jobId, "job-1") + assert.equal(state.pipelineEditorStartStep, 1) + assert.equal(state.pipelineForm.filePath, "/tmp/data/existing.csv") + assert.notEqual(state.pipelinePreview, null) + assert.deepEqual(state.validatedPipelineSettings, { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }) + assert.equal(globalThis.window.location.hash, "#jobs/new") +}) + +test("editPipelineCsvSetup opens the existing datasource on step 2", async () => { + Object.defineProperty(globalThis, "window", { + value: { location: { hash: "#dashboard" } }, + configurable: true, + writable: true, + }) + + globalThis.fetch = async (_input) => { + const url = String(_input) + if (url.includes("/csv/preview")) { + return jsonResponse(createPreview()) + } + + throw new Error(`Unexpected request: ${url}`) + } + + state.config = { + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [ + { + id: "job-1", + name: "existing-source", + enabled: true, + file_path: "/tmp/data/existing.csv", + schedule_minutes: 15, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [], + }, + ], + } + + await editPipelineCsvSetup("job-1") + + assert.equal(state.pipelineEditTarget?.jobId, "job-1") + assert.equal(state.pipelineEditorStartStep, 2) + assert.notEqual(state.pipelinePreview, null) + assert.equal(globalThis.window.location.hash, "#jobs/new") +}) + +test("editPipelineMappings preloads mappings and routes to step 3", async () => { + Object.defineProperty(globalThis, "window", { + value: { location: { hash: "#dashboard" } }, + configurable: true, + writable: true, + }) + + globalThis.fetch = async (_input) => { + const url = String(_input) + if (url.includes("/csv/preview")) { + return jsonResponse(createPreview()) + } + + if (url.endsWith("/datastreams")) { + return jsonResponse([ + { + id: "stream-1", + name: "Stage Datastream", + thing_id: "thing-1", + thing_name: "Station 1", + observed_property_name: "Stage", + processing_level_definition: "Raw", + unit_name: "meter", + unit_symbol: "m", + sampled_medium: "Water", + sensor_name: "Sensor 1", + result_type: "number", + }, + ]) + } + + throw new Error(`Unexpected request: ${url}`) + } + + state.config = { + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [ + { + id: "job-1", + name: "existing-source", + enabled: true, + file_path: "/tmp/data/existing.csv", + schedule_minutes: 15, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + }, + ], + } + + await editPipelineMappings("job-1") + + assert.equal(state.pipelineEditTarget?.jobId, "job-1") + assert.equal(state.pipelineReadyForMapping, true) + assert.deepEqual(state.pipelineMappingDrafts, [ + { + csvColumn: "value", + thingId: "thing-1", + datastreamId: "stream-1", + }, + ]) + assert.deepEqual(state.validatedColumnMappings, [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ]) + assert.equal(globalThis.window.location.hash, "#jobs/new/mapping") +}) + +test("createPipelineDatasource updates an existing datasource when editing", async () => { + let requestUrl = "" + let requestMethod = "" + let requestBody: Record | null = null + + Object.defineProperty(globalThis, "window", { + value: { location: { hash: "#jobs/new/mapping" } }, + configurable: true, + writable: true, + }) + + globalThis.fetch = async (_input, init) => { + const url = String(_input) + if (url.endsWith("/jobs/job-1")) { + requestUrl = url + requestMethod = String(init?.method ?? "") + requestBody = JSON.parse(String(init?.body ?? "{}")) as Record + return jsonResponse({ + id: "job-1", + name: "existing-source", + enabled: false, + file_path: "/tmp/data/updated.csv", + schedule_minutes: 30, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + recent_logs: [], + status: "pending", + status_message: "Ready for the next run", + last_pushed_timestamp: null, + last_run_at: null, + last_error: null, + }) + } + + if (url.endsWith("/config")) { + return jsonResponse({ + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [ + { + id: "job-1", + name: "existing-source", + enabled: false, + file_path: "/tmp/data/updated.csv", + schedule_minutes: 30, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + }, + ], + }) + } + + throw new Error(`Unexpected request: ${url}`) + } + + state.connectionSummary = { + ok: true, + state: "connected", + message: "Connected", + invalid_field: null, + instance_name: "HydroServer", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + workspace_count: 1, + datastream_count: 2, + permissions_ok: true, + } + state.lastConnectionState = "connected" + state.config = { + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [ + { + id: "job-1", + name: "existing-source", + enabled: false, + file_path: "/tmp/data/existing.csv", + schedule_minutes: 30, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + }, + ], + } + state.pipelineEditTarget = { + jobId: "job-1", + name: "existing-source", + enabled: false, + scheduleMinutes: 30, + } + state.pipelinePreview = createPreview() + state.pipelineForm.filePath = "/tmp/data/updated.csv" + state.pipelineReadyForMapping = true + state.validatedPipelineSettings = { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + } + state.validatedColumnMappings = [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ] + + await createPipelineDatasource() + + assert.equal(requestMethod, "PUT") + assert.match(requestUrl, /\/jobs\/job-1$/) + assert.deepEqual(requestBody, { + name: "existing-source", + enabled: false, + file_path: "/tmp/data/updated.csv", + schedule_minutes: 30, + file_config: { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + }, + column_mappings: [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ], + }) + assert.equal(state.pipelineEditTarget, null) + assert.equal(globalThis.window.location.hash, "#dashboard") + assert.equal(state.config?.jobs[0]?.file_path, "/tmp/data/updated.csv") +}) + +test("createPipelineDatasource blocks submission when no columns are mapped", async () => { + let fetchCalled = false + + globalThis.fetch = async () => { + fetchCalled = true + return jsonResponse({}) + } + + state.connectionSummary = { + ok: true, + state: "connected", + message: "Connected", + invalid_field: null, + instance_name: "HydroServer", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + workspace_count: 1, + datastream_count: 2, + permissions_ok: true, + } + state.lastConnectionState = "connected" + state.config = { + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [], + } + state.pipelinePreview = createPreview() + state.pipelineForm.filePath = "/tmp/data/river.stage.csv" + state.validatedPipelineSettings = { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + } + + await createPipelineDatasource() + + assert.equal(fetchCalled, false) + assert.equal(state.pipelineCreateSubmitting, false) +}) + +test("createPipelineDatasource clears submitting state and keeps step-3 state on failure", async () => { + globalThis.fetch = async () => + new Response(JSON.stringify({ detail: "Create failed" }), { + status: 400, + headers: { "Content-Type": "application/json" }, + }) + + state.connectionSummary = { + ok: true, + state: "connected", + message: "Connected", + invalid_field: null, + instance_name: "HydroServer", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + workspace_count: 1, + datastream_count: 2, + permissions_ok: true, + } + state.lastConnectionState = "connected" + state.config = { + version: 1, + server: { + auth_type: "apikey", + url: "https://example.com", + api_key: "secret", + username: "", + password: "", + workspace_id: "workspace-123", + workspace_name: "Primary Workspace", + }, + jobs: [], + } + state.pipelinePreview = createPreview() + state.pipelineForm.filePath = "/tmp/data/river.stage.csv" + state.pipelineReadyForMapping = true + state.validatedPipelineSettings = { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + } + state.validatedColumnMappings = [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ] + + await createPipelineDatasource() + + assert.equal(state.pipelineForm.filePath, "/tmp/data/river.stage.csv") + assert.notEqual(state.pipelinePreview, null) + assert.equal(state.pipelineCreateSubmitting, false) +}) + +test("abandonPipelineCreation resets the wizard and returns to the dashboard", () => { + Object.defineProperty(globalThis, "window", { + value: { location: { hash: "#jobs/new/mapping" } }, + configurable: true, + writable: true, + }) + + state.pipelinePreview = createPreview() + state.pipelineForm.filePath = "/tmp/data/river.stage.csv" + state.pipelineSelectionTarget = "header-row" + state.pipelineValidationAttempted = true + state.pipelineReadyForMapping = true + state.validatedPipelineSettings = { + headerRow: 1, + dataStartRow: 2, + delimiter: ",", + identifierType: "name", + timestamp: { + key: "recorded_at", + format: "ISO8601", + timezoneMode: "embeddedOffset", + }, + } + state.pipelineMappingDrafts = [ + { + csvColumn: "value", + thingId: "thing-1", + datastreamId: "stream-1", + }, + ] + state.validatedColumnMappings = [ + { + csv_column: "value", + datastream_id: "stream-1", + datastream_name: "Stage Datastream", + }, + ] + state.pipelineCreateSubmitting = true + + abandonPipelineCreation() + + assert.equal(state.pipelineForm.filePath, "") + assert.equal(state.pipelinePreview, null) + assert.equal(state.pipelineSelectionTarget, null) + assert.equal(state.pipelineValidationAttempted, false) + assert.equal(state.pipelineReadyForMapping, false) + assert.equal(state.validatedPipelineSettings, null) + assert.deepEqual(state.pipelineMappingDrafts, []) + assert.deepEqual(state.validatedColumnMappings, []) + assert.equal(state.pipelineCreateSubmitting, false) + assert.equal(globalThis.window.location.hash, "#dashboard") +}) diff --git a/frontend/views/DashboardView.vue b/frontend/views/DashboardView.vue new file mode 100644 index 0000000..3d1729f --- /dev/null +++ b/frontend/views/DashboardView.vue @@ -0,0 +1,819 @@ + + + diff --git a/frontend/views/PipelineEditorView.vue b/frontend/views/PipelineEditorView.vue new file mode 100644 index 0000000..cda34a0 --- /dev/null +++ b/frontend/views/PipelineEditorView.vue @@ -0,0 +1,186 @@ + + + diff --git a/frontend/views/PipelineMappingView.vue b/frontend/views/PipelineMappingView.vue new file mode 100644 index 0000000..af72875 --- /dev/null +++ b/frontend/views/PipelineMappingView.vue @@ -0,0 +1,1101 @@ + + + diff --git a/frontend/views/ServiceSetupView.vue b/frontend/views/ServiceSetupView.vue new file mode 100644 index 0000000..b373dde --- /dev/null +++ b/frontend/views/ServiceSetupView.vue @@ -0,0 +1,131 @@ + + + diff --git a/frontend/views/WelcomeView.vue b/frontend/views/WelcomeView.vue new file mode 100644 index 0000000..c3c18d4 --- /dev/null +++ b/frontend/views/WelcomeView.vue @@ -0,0 +1,19 @@ + + + diff --git a/frontend/vite-env.d.ts b/frontend/vite-env.d.ts new file mode 100644 index 0000000..fcd5093 --- /dev/null +++ b/frontend/vite-env.d.ts @@ -0,0 +1,16 @@ +/// + +interface ImportMetaEnv { + readonly VITE_API_BASE_URL?: string +} + +interface ImportMeta { + readonly env: ImportMetaEnv +} + +declare module "*.vue" { + import type { DefineComponent } from "vue" + + const component: DefineComponent, Record, unknown> + export default component +} diff --git a/icons/128x128.png b/icons/128x128.png new file mode 100644 index 0000000..808fc64 Binary files /dev/null and b/icons/128x128.png differ diff --git a/icons/128x128@2x.png b/icons/128x128@2x.png new file mode 100644 index 0000000..eb41745 Binary files /dev/null and b/icons/128x128@2x.png differ diff --git a/icons/32x32.png b/icons/32x32.png new file mode 100644 index 0000000..de79ca0 Binary files /dev/null and b/icons/32x32.png differ diff --git a/icons/Square107x107Logo.png b/icons/Square107x107Logo.png new file mode 100644 index 0000000..3f389ac Binary files /dev/null and b/icons/Square107x107Logo.png differ diff --git a/icons/Square142x142Logo.png b/icons/Square142x142Logo.png new file mode 100644 index 0000000..e3a692f Binary files /dev/null and b/icons/Square142x142Logo.png differ diff --git a/icons/Square150x150Logo.png b/icons/Square150x150Logo.png new file mode 100644 index 0000000..a04b350 Binary files /dev/null and b/icons/Square150x150Logo.png differ diff --git a/icons/Square284x284Logo.png b/icons/Square284x284Logo.png new file mode 100644 index 0000000..d816ff3 Binary files /dev/null and b/icons/Square284x284Logo.png differ diff --git a/icons/Square30x30Logo.png b/icons/Square30x30Logo.png new file mode 100644 index 0000000..b4b6b4f Binary files /dev/null and b/icons/Square30x30Logo.png differ diff --git a/icons/Square310x310Logo.png b/icons/Square310x310Logo.png new file mode 100644 index 0000000..3dcba48 Binary files /dev/null and b/icons/Square310x310Logo.png differ diff --git a/icons/Square44x44Logo.png b/icons/Square44x44Logo.png new file mode 100644 index 0000000..5c1105c Binary files /dev/null and b/icons/Square44x44Logo.png differ diff --git a/icons/Square71x71Logo.png b/icons/Square71x71Logo.png new file mode 100644 index 0000000..57eeae4 Binary files /dev/null and b/icons/Square71x71Logo.png differ diff --git a/icons/Square89x89Logo.png b/icons/Square89x89Logo.png new file mode 100644 index 0000000..78a3b52 Binary files /dev/null and b/icons/Square89x89Logo.png differ diff --git a/icons/StoreLogo.png b/icons/StoreLogo.png new file mode 100644 index 0000000..7f5f1ed Binary files /dev/null and b/icons/StoreLogo.png differ diff --git a/icons/icon-color-thick.svg b/icons/icon-color-thick.svg new file mode 100644 index 0000000..24a5ec1 --- /dev/null +++ b/icons/icon-color-thick.svg @@ -0,0 +1,23 @@ + + + + + + + + + diff --git a/icons/icon-color.svg b/icons/icon-color.svg new file mode 100644 index 0000000..50d0dc1 --- /dev/null +++ b/icons/icon-color.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/icons/icon-desktop-thick.svg b/icons/icon-desktop-thick.svg new file mode 100644 index 0000000..c08cea1 --- /dev/null +++ b/icons/icon-desktop-thick.svg @@ -0,0 +1,26 @@ + + + + + + + + + + + + diff --git a/icons/icon-desktop.svg b/icons/icon-desktop.svg new file mode 100644 index 0000000..f12424a --- /dev/null +++ b/icons/icon-desktop.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/icons/icon-tray-white.svg b/icons/icon-tray-white.svg new file mode 100644 index 0000000..71ec93c --- /dev/null +++ b/icons/icon-tray-white.svg @@ -0,0 +1,25 @@ + + + + + + + + + + + diff --git a/icons/icon.icns b/icons/icon.icns new file mode 100644 index 0000000..71abb38 Binary files /dev/null and b/icons/icon.icns differ diff --git a/icons/icon.ico b/icons/icon.ico new file mode 100644 index 0000000..ed5efe3 Binary files /dev/null and b/icons/icon.ico differ diff --git a/icons/icon.png b/icons/icon.png new file mode 100644 index 0000000..e2ffdf3 Binary files /dev/null and b/icons/icon.png differ diff --git a/icons/tray-icon.png b/icons/tray-icon.png new file mode 100644 index 0000000..cb11110 Binary files /dev/null and b/icons/tray-icon.png differ diff --git a/index.html b/index.html new file mode 100644 index 0000000..7bb52c2 --- /dev/null +++ b/index.html @@ -0,0 +1,12 @@ + + + + + + HydroServer Streaming Data Loader + + +
+ + + diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..00b572b --- /dev/null +++ b/package-lock.json @@ -0,0 +1,3144 @@ +{ + "name": "streaming-data-loader", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "streaming-data-loader", + "version": "0.1.0", + "dependencies": { + "@tauri-apps/api": "^2.8.0", + "@tauri-apps/plugin-dialog": "^2.0.0", + "vue": "^3.5.32" + }, + "devDependencies": { + "@tailwindcss/cli": "^4.1.4", + "@tauri-apps/cli": "^2.10.1", + "@types/node": "^22.13.10", + "@vitejs/plugin-vue": "^6.0.1", + "prettier": "^2.8.8", + "tailwindcss": "^4.1.4", + "tsx": "^4.21.0", + "typescript": "^5.6.3", + "vite": "^6.0.3" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.2.tgz", + "integrity": "sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", + "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", + "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", + "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", + "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", + "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", + "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", + "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", + "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", + "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", + "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", + "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", + "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", + "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", + "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", + "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", + "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", + "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", + "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", + "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", + "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", + "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", + "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", + "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", + "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", + "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", + "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@parcel/watcher": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.6.tgz", + "integrity": "sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "detect-libc": "^2.0.3", + "is-glob": "^4.0.3", + "node-addon-api": "^7.0.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "@parcel/watcher-android-arm64": "2.5.6", + "@parcel/watcher-darwin-arm64": "2.5.6", + "@parcel/watcher-darwin-x64": "2.5.6", + "@parcel/watcher-freebsd-x64": "2.5.6", + "@parcel/watcher-linux-arm-glibc": "2.5.6", + "@parcel/watcher-linux-arm-musl": "2.5.6", + "@parcel/watcher-linux-arm64-glibc": "2.5.6", + "@parcel/watcher-linux-arm64-musl": "2.5.6", + "@parcel/watcher-linux-x64-glibc": "2.5.6", + "@parcel/watcher-linux-x64-musl": "2.5.6", + "@parcel/watcher-win32-arm64": "2.5.6", + "@parcel/watcher-win32-ia32": "2.5.6", + "@parcel/watcher-win32-x64": "2.5.6" + } + }, + "node_modules/@parcel/watcher-android-arm64": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.6.tgz", + "integrity": "sha512-YQxSS34tPF/6ZG7r/Ih9xy+kP/WwediEUsqmtf0cuCV5TPPKw/PQHRhueUo6JdeFJaqV3pyjm0GdYjZotbRt/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-darwin-arm64": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.6.tgz", + "integrity": "sha512-Z2ZdrnwyXvvvdtRHLmM4knydIdU9adO3D4n/0cVipF3rRiwP+3/sfzpAwA/qKFL6i1ModaabkU7IbpeMBgiVEA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-darwin-x64": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.6.tgz", + "integrity": "sha512-HgvOf3W9dhithcwOWX9uDZyn1lW9R+7tPZ4sug+NGrGIo4Rk1hAXLEbcH1TQSqxts0NYXXlOWqVpvS1SFS4fRg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-freebsd-x64": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.6.tgz", + "integrity": "sha512-vJVi8yd/qzJxEKHkeemh7w3YAn6RJCtYlE4HPMoVnCpIXEzSrxErBW5SJBgKLbXU3WdIpkjBTeUNtyBVn8TRng==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm-glibc": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.6.tgz", + "integrity": "sha512-9JiYfB6h6BgV50CCfasfLf/uvOcJskMSwcdH1PHH9rvS1IrNy8zad6IUVPVUfmXr+u+Km9IxcfMLzgdOudz9EQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm-musl": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.6.tgz", + "integrity": "sha512-Ve3gUCG57nuUUSyjBq/MAM0CzArtuIOxsBdQ+ftz6ho8n7s1i9E1Nmk/xmP323r2YL0SONs1EuwqBp2u1k5fxg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm64-glibc": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.6.tgz", + "integrity": "sha512-f2g/DT3NhGPdBmMWYoxixqYr3v/UXcmLOYy16Bx0TM20Tchduwr4EaCbmxh1321TABqPGDpS8D/ggOTaljijOA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-arm64-musl": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.6.tgz", + "integrity": "sha512-qb6naMDGlbCwdhLj6hgoVKJl2odL34z2sqkC7Z6kzir8b5W65WYDpLB6R06KabvZdgoHI/zxke4b3zR0wAbDTA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-x64-glibc": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.6.tgz", + "integrity": "sha512-kbT5wvNQlx7NaGjzPFu8nVIW1rWqV780O7ZtkjuWaPUgpv2NMFpjYERVi0UYj1msZNyCzGlaCWEtzc+exjMGbQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-linux-x64-musl": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.6.tgz", + "integrity": "sha512-1JRFeC+h7RdXwldHzTsmdtYR/Ku8SylLgTU/reMuqdVD7CtLwf0VR1FqeprZ0eHQkO0vqsbvFLXUmYm/uNKJBg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-arm64": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.6.tgz", + "integrity": "sha512-3ukyebjc6eGlw9yRt678DxVF7rjXatWiHvTXqphZLvo7aC5NdEgFufVwjFfY51ijYEWpXbqF5jtrK275z52D4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-ia32": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.6.tgz", + "integrity": "sha512-k35yLp1ZMwwee3Ez/pxBi5cf4AoBKYXj00CZ80jUz5h8prpiaQsiRPKQMxoLstNuqe2vR4RNPEAEcjEFzhEz/g==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher-win32-x64": { + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.6.tgz", + "integrity": "sha512-hbQlYcCq5dlAX9Qx+kFb0FHue6vbjlf0FrNzSKdYK2APUf7tGfGxQCk2ihEREmbR6ZMc0MVAD5RIX/41gpUzTw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.2", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.2.tgz", + "integrity": "sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.1.tgz", + "integrity": "sha512-d6FinEBLdIiK+1uACUttJKfgZREXrF0Qc2SmLII7W2AD8FfiZ9Wjd+rD/iRuf5s5dWrr1GgwXCvPqOuDquOowA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.1.tgz", + "integrity": "sha512-YjG/EwIDvvYI1YvYbHvDz/BYHtkY4ygUIXHnTdLhG+hKIQFBiosfWiACWortsKPKU/+dUwQQCKQM3qrDe8c9BA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.1.tgz", + "integrity": "sha512-mjCpF7GmkRtSJwon+Rq1N8+pI+8l7w5g9Z3vWj4T7abguC4Czwi3Yu/pFaLvA3TTeMVjnu3ctigusqWUfjZzvw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.1.tgz", + "integrity": "sha512-haZ7hJ1JT4e9hqkoT9R/19XW2QKqjfJVv+i5AGg57S+nLk9lQnJ1F/eZloRO3o9Scy9CM3wQ9l+dkXtcBgN5Ew==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.1.tgz", + "integrity": "sha512-czw90wpQq3ZsAVBlinZjAYTKduOjTywlG7fEeWKUA7oCmpA8xdTkxZZlwNJKWqILlq0wehoZcJYfBvOyhPTQ6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.1.tgz", + "integrity": "sha512-KVB2rqsxTHuBtfOeySEyzEOB7ltlB/ux38iu2rBQzkjbwRVlkhAGIEDiiYnO2kFOkJp+Z7pUXKyrRRFuFUKt+g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.1.tgz", + "integrity": "sha512-L+34Qqil+v5uC0zEubW7uByo78WOCIrBvci69E7sFASRl0X7b/MB6Cqd1lky/CtcSVTydWa2WZwFuWexjS5o6g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.1.tgz", + "integrity": "sha512-n83O8rt4v34hgFzlkb1ycniJh7IR5RCIqt6mz1VRJD6pmhRi0CXdmfnLu9dIUS6buzh60IvACM842Ffb3xd6Gg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.1.tgz", + "integrity": "sha512-Nql7sTeAzhTAja3QXeAI48+/+GjBJ+QmAH13snn0AJSNL50JsDqotyudHyMbO2RbJkskbMbFJfIJKWA6R1LCJQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.1.tgz", + "integrity": "sha512-+pUymDhd0ys9GcKZPPWlFiZ67sTWV5UU6zOJat02M1+PiuSGDziyRuI/pPue3hoUwm2uGfxdL+trT6Z9rxnlMA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.1.tgz", + "integrity": "sha512-VSvgvQeIcsEvY4bKDHEDWcpW4Yw7BtlKG1GUT4FzBUlEKQK0rWHYBqQt6Fm2taXS+1bXvJT6kICu5ZwqKCnvlQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.1.tgz", + "integrity": "sha512-4LqhUomJqwe641gsPp6xLfhqWMbQV04KtPp7/dIp0nzPxAkNY1AbwL5W0MQpcalLYk07vaW9Kp1PBhdpZYYcEw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.1.tgz", + "integrity": "sha512-tLQQ9aPvkBxOc/EUT6j3pyeMD6Hb8QF2BTBnCQWP/uu1lhc9AIrIjKnLYMEroIz/JvtGYgI9dF3AxHZNaEH0rw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.1.tgz", + "integrity": "sha512-RMxFhJwc9fSXP6PqmAz4cbv3kAyvD1etJFjTx4ONqFP9DkTkXsAMU4v3Vyc5BgzC+anz7nS/9tp4obsKfqkDHg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.1.tgz", + "integrity": "sha512-QKgFl+Yc1eEk6MmOBfRHYF6lTxiiiV3/z/BRrbSiW2I7AFTXoBFvdMEyglohPj//2mZS4hDOqeB0H1ACh3sBbg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.1.tgz", + "integrity": "sha512-RAjXjP/8c6ZtzatZcA1RaQr6O1TRhzC+adn8YZDnChliZHviqIjmvFwHcxi4JKPSDAt6Uhf/7vqcBzQJy0PDJg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.1.tgz", + "integrity": "sha512-wcuocpaOlaL1COBYiA89O6yfjlp3RwKDeTIA0hM7OpmhR1Bjo9j31G1uQVpDlTvwxGn2nQs65fBFL5UFd76FcQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.1.tgz", + "integrity": "sha512-77PpsFQUCOiZR9+LQEFg9GClyfkNXj1MP6wRnzYs0EeWbPcHs02AXu4xuUbM1zhwn3wqaizle3AEYg5aeoohhg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.1.tgz", + "integrity": "sha512-5cIATbk5vynAjqqmyBjlciMJl1+R/CwX9oLk/EyiFXDWd95KpHdrOJT//rnUl4cUcskrd0jCCw3wpZnhIHdD9w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.1.tgz", + "integrity": "sha512-cl0w09WsCi17mcmWqqglez9Gk8isgeWvoUZ3WiJFYSR3zjBQc2J5/ihSjpl+VLjPqjQ/1hJRcqBfLjssREQILw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.1.tgz", + "integrity": "sha512-4Cv23ZrONRbNtbZa37mLSueXUCtN7MXccChtKpUnQNgF010rjrjfHx3QxkS2PI7LqGT5xXyYs1a7LbzAwT0iCA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.1.tgz", + "integrity": "sha512-i1okWYkA4FJICtr7KpYzFpRTHgy5jdDbZiWfvny21iIKky5YExiDXP+zbXzm3dUcFpkEeYNHgQ5fuG236JPq0g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.1.tgz", + "integrity": "sha512-u09m3CuwLzShA0EYKMNiFgcjjzwqtUMLmuCJLeZWjjOYA3IT2Di09KaxGBTP9xVztWyIWjVdsB2E9goMjZvTQg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.1.tgz", + "integrity": "sha512-k+600V9Zl1CM7eZxJgMyTUzmrmhB/0XZnF4pRypKAlAgxmedUA+1v9R+XOFv56W4SlHEzfeMtzujLJD22Uz5zg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.1.tgz", + "integrity": "sha512-lWMnixq/QzxyhTV6NjQJ4SFo1J6PvOX8vUx5Wb4bBPsEb+8xZ89Bz6kOXpfXj9ak9AHTQVQzlgzBEc1SyM27xQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@tailwindcss/cli": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/cli/-/cli-4.2.2.tgz", + "integrity": "sha512-iJS+8kAFZ8HPqnh0O5DHCLjo4L6dD97DBQEkrhfSO4V96xeefUus2jqsBs1dUMt3OU9Ks4qIkiY0mpL5UW+4LQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@parcel/watcher": "^2.5.1", + "@tailwindcss/node": "4.2.2", + "@tailwindcss/oxide": "4.2.2", + "enhanced-resolve": "^5.19.0", + "mri": "^1.2.0", + "picocolors": "^1.1.1", + "tailwindcss": "4.2.2" + }, + "bin": { + "tailwindcss": "dist/index.mjs" + } + }, + "node_modules/@tailwindcss/node": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.2.tgz", + "integrity": "sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.5", + "enhanced-resolve": "^5.19.0", + "jiti": "^2.6.1", + "lightningcss": "1.32.0", + "magic-string": "^0.30.21", + "source-map-js": "^1.2.1", + "tailwindcss": "4.2.2" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.2.tgz", + "integrity": "sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-x64": "4.2.2", + "@tailwindcss/oxide-freebsd-x64": "4.2.2", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.2", + "@tailwindcss/oxide-linux-arm64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-arm64-musl": "4.2.2", + "@tailwindcss/oxide-linux-x64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-x64-musl": "4.2.2", + "@tailwindcss/oxide-wasm32-wasi": "4.2.2", + "@tailwindcss/oxide-win32-arm64-msvc": "4.2.2", + "@tailwindcss/oxide-win32-x64-msvc": "4.2.2" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.2.tgz", + "integrity": "sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.2.tgz", + "integrity": "sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.2.tgz", + "integrity": "sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.2.tgz", + "integrity": "sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.2.tgz", + "integrity": "sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.2.tgz", + "integrity": "sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.2.tgz", + "integrity": "sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.2.tgz", + "integrity": "sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.2.tgz", + "integrity": "sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.2.tgz", + "integrity": "sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.8.1", + "@emnapi/runtime": "^1.8.1", + "@emnapi/wasi-threads": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.1.1", + "@tybys/wasm-util": "^0.10.1", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.2.tgz", + "integrity": "sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.2.tgz", + "integrity": "sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tauri-apps/api": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/api/-/api-2.10.1.tgz", + "integrity": "sha512-hKL/jWf293UDSUN09rR69hrToyIXBb8CjGaWC7gfinvnQrBVvnLr08FeFi38gxtugAVyVcTa5/FD/Xnkb1siBw==", + "license": "Apache-2.0 OR MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/tauri" + } + }, + "node_modules/@tauri-apps/cli": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli/-/cli-2.10.1.tgz", + "integrity": "sha512-jQNGF/5quwORdZSSLtTluyKQ+o6SMa/AUICfhf4egCGFdMHqWssApVgYSbg+jmrZoc8e1DscNvjTnXtlHLS11g==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "bin": { + "tauri": "tauri.js" + }, + "engines": { + "node": ">= 10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/tauri" + }, + "optionalDependencies": { + "@tauri-apps/cli-darwin-arm64": "2.10.1", + "@tauri-apps/cli-darwin-x64": "2.10.1", + "@tauri-apps/cli-linux-arm-gnueabihf": "2.10.1", + "@tauri-apps/cli-linux-arm64-gnu": "2.10.1", + "@tauri-apps/cli-linux-arm64-musl": "2.10.1", + "@tauri-apps/cli-linux-riscv64-gnu": "2.10.1", + "@tauri-apps/cli-linux-x64-gnu": "2.10.1", + "@tauri-apps/cli-linux-x64-musl": "2.10.1", + "@tauri-apps/cli-win32-arm64-msvc": "2.10.1", + "@tauri-apps/cli-win32-ia32-msvc": "2.10.1", + "@tauri-apps/cli-win32-x64-msvc": "2.10.1" + } + }, + "node_modules/@tauri-apps/cli-darwin-arm64": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.10.1.tgz", + "integrity": "sha512-Z2OjCXiZ+fbYZy7PmP3WRnOpM9+Fy+oonKDEmUE6MwN4IGaYqgceTjwHucc/kEEYZos5GICve35f7ZiizgqEnQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-darwin-x64": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.10.1.tgz", + "integrity": "sha512-V/irQVvjPMGOTQqNj55PnQPVuH4VJP8vZCN7ajnj+ZS8Kom1tEM2hR3qbbIRoS3dBKs5mbG8yg1WC+97dq17Pw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-linux-arm-gnueabihf": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.10.1.tgz", + "integrity": "sha512-Hyzwsb4VnCWKGfTw+wSt15Z2pLw2f0JdFBfq2vHBOBhvg7oi6uhKiF87hmbXOBXUZaGkyRDkCHsdzJcIfoJC2w==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-linux-arm64-gnu": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.10.1.tgz", + "integrity": "sha512-OyOYs2t5GkBIvyWjA1+h4CZxTcdz1OZPCWAPz5DYEfB0cnWHERTnQ/SLayQzncrT0kwRoSfSz9KxenkyJoTelA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-linux-arm64-musl": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.10.1.tgz", + "integrity": "sha512-MIj78PDDGjkg3NqGptDOGgfXks7SYJwhiMh8SBoZS+vfdz7yP5jN18bNaLnDhsVIPARcAhE1TlsZe/8Yxo2zqg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-linux-riscv64-gnu": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-riscv64-gnu/-/cli-linux-riscv64-gnu-2.10.1.tgz", + "integrity": "sha512-X0lvOVUg8PCVaoEtEAnpxmnkwlE1gcMDTqfhbefICKDnOTJ5Est3qL0SrWxizDackIOKBcvtpejrSiVpuJI1kw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-linux-x64-gnu": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.10.1.tgz", + "integrity": "sha512-2/12bEzsJS9fAKybxgicCDFxYD1WEI9kO+tlDwX5znWG2GwMBaiWcmhGlZ8fi+DMe9CXlcVarMTYc0L3REIRxw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-linux-x64-musl": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.10.1.tgz", + "integrity": "sha512-Y8J0ZzswPz50UcGOFuXGEMrxbjwKSPgXftx5qnkuMs2rmwQB5ssvLb6tn54wDSYxe7S6vlLob9vt0VKuNOaCIQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-win32-arm64-msvc": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.10.1.tgz", + "integrity": "sha512-iSt5B86jHYAPJa/IlYw++SXtFPGnWtFJriHn7X0NFBVunF6zu9+/zOn8OgqIWSl8RgzhLGXQEEtGBdR4wzpVgg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-win32-ia32-msvc": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.10.1.tgz", + "integrity": "sha512-gXyxgEzsFegmnWywYU5pEBURkcFN/Oo45EAwvZrHMh+zUSEAvO5E8TXsgPADYm31d1u7OQU3O3HsYfVBf2moHw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/cli-win32-x64-msvc": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.10.1.tgz", + "integrity": "sha512-6Cn7YpPFwzChy0ERz6djKEmUehWrYlM+xTaNzGPgZocw3BD7OfwfWHKVWxXzdjEW2KfKkHddfdxK1XXTYqBRLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 OR MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tauri-apps/plugin-dialog": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@tauri-apps/plugin-dialog/-/plugin-dialog-2.6.0.tgz", + "integrity": "sha512-q4Uq3eY87TdcYzXACiYSPhmpBA76shgmQswGkSVio4C82Sz2W4iehe9TnKYwbq7weHiL88Yw19XZm7v28+Micg==", + "license": "MIT OR Apache-2.0", + "dependencies": { + "@tauri-apps/api": "^2.8.0" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.19.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.15.tgz", + "integrity": "sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@vitejs/plugin-vue": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-6.0.5.tgz", + "integrity": "sha512-bL3AxKuQySfk1iGcBsQnoRVexTPJq0Z/ixFVM8OhVJAP6ZXXXLtM7NFKWhLl30Kg7uTBqIaPXbh+nuQCuBDedg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rolldown/pluginutils": "1.0.0-rc.2" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "peerDependencies": { + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0", + "vue": "^3.2.25" + } + }, + "node_modules/@vue/compiler-core": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.32.tgz", + "integrity": "sha512-4x74Tbtqnda8s/NSD6e1Dr5p1c8HdMU5RWSjMSUzb8RTcUQqevDCxVAitcLBKT+ie3o0Dl9crc/S/opJM7qBGQ==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.2", + "@vue/shared": "3.5.32", + "entities": "^7.0.1", + "estree-walker": "^2.0.2", + "source-map-js": "^1.2.1" + } + }, + "node_modules/@vue/compiler-dom": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.32.tgz", + "integrity": "sha512-ybHAu70NtiEI1fvAUz3oXZqkUYEe5J98GjMDpTGl5iHb0T15wQYLR4wE3h9xfuTNA+Cm2f4czfe8B4s+CCH57Q==", + "license": "MIT", + "dependencies": { + "@vue/compiler-core": "3.5.32", + "@vue/shared": "3.5.32" + } + }, + "node_modules/@vue/compiler-sfc": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.32.tgz", + "integrity": "sha512-8UYUYo71cP/0YHMO814TRZlPuUUw3oifHuMR7Wp9SNoRSrxRQnhMLNlCeaODNn6kNTJsjFoQ/kqIj4qGvya4Xg==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.2", + "@vue/compiler-core": "3.5.32", + "@vue/compiler-dom": "3.5.32", + "@vue/compiler-ssr": "3.5.32", + "@vue/shared": "3.5.32", + "estree-walker": "^2.0.2", + "magic-string": "^0.30.21", + "postcss": "^8.5.8", + "source-map-js": "^1.2.1" + } + }, + "node_modules/@vue/compiler-ssr": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.32.tgz", + "integrity": "sha512-Gp4gTs22T3DgRotZ8aA/6m2jMR+GMztvBXUBEUOYOcST+giyGWJ4WvFd7QLHBkzTxkfOt8IELKNdpzITLbA2rw==", + "license": "MIT", + "dependencies": { + "@vue/compiler-dom": "3.5.32", + "@vue/shared": "3.5.32" + } + }, + "node_modules/@vue/reactivity": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.32.tgz", + "integrity": "sha512-/ORasxSGvZ6MN5gc+uE364SxFdJ0+WqVG0CENXaGW58TOCdrAW76WWaplDtECeS1qphvtBZtR+3/o1g1zL4xPQ==", + "license": "MIT", + "dependencies": { + "@vue/shared": "3.5.32" + } + }, + "node_modules/@vue/runtime-core": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.32.tgz", + "integrity": "sha512-pDrXCejn4UpFDFmMd27AcJEbHaLemaE5o4pbb7sLk79SRIhc6/t34BQA7SGNgYtbMnvbF/HHOftYBgFJtUoJUQ==", + "license": "MIT", + "dependencies": { + "@vue/reactivity": "3.5.32", + "@vue/shared": "3.5.32" + } + }, + "node_modules/@vue/runtime-dom": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.32.tgz", + "integrity": "sha512-1CDVv7tv/IV13V8Nip1k/aaObVbWqRlVCVezTwx3K07p7Vxossp5JU1dcPNhJk3w347gonIUT9jQOGutyJrSVQ==", + "license": "MIT", + "dependencies": { + "@vue/reactivity": "3.5.32", + "@vue/runtime-core": "3.5.32", + "@vue/shared": "3.5.32", + "csstype": "^3.2.3" + } + }, + "node_modules/@vue/server-renderer": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.32.tgz", + "integrity": "sha512-IOjm2+JQwRFS7W28HNuJeXQle9KdZbODFY7hFGVtnnghF51ta20EWAZJHX+zLGtsHhaU6uC9BGPV52KVpYryMQ==", + "license": "MIT", + "dependencies": { + "@vue/compiler-ssr": "3.5.32", + "@vue/shared": "3.5.32" + }, + "peerDependencies": { + "vue": "3.5.32" + } + }, + "node_modules/@vue/shared": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.32.tgz", + "integrity": "sha512-ksNyrmRQzWJJ8n3cRDuSF7zNNontuJg1YHnmWRJd2AMu8Ij2bqwiiri2lH5rHtYPZjj4STkNcgcmiQqlOjiYGg==", + "license": "MIT" + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "license": "MIT" + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.1.tgz", + "integrity": "sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.3.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/entities": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-7.0.1.tgz", + "integrity": "sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/esbuild": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", + "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.12", + "@esbuild/android-arm": "0.25.12", + "@esbuild/android-arm64": "0.25.12", + "@esbuild/android-x64": "0.25.12", + "@esbuild/darwin-arm64": "0.25.12", + "@esbuild/darwin-x64": "0.25.12", + "@esbuild/freebsd-arm64": "0.25.12", + "@esbuild/freebsd-x64": "0.25.12", + "@esbuild/linux-arm": "0.25.12", + "@esbuild/linux-arm64": "0.25.12", + "@esbuild/linux-ia32": "0.25.12", + "@esbuild/linux-loong64": "0.25.12", + "@esbuild/linux-mips64el": "0.25.12", + "@esbuild/linux-ppc64": "0.25.12", + "@esbuild/linux-riscv64": "0.25.12", + "@esbuild/linux-s390x": "0.25.12", + "@esbuild/linux-x64": "0.25.12", + "@esbuild/netbsd-arm64": "0.25.12", + "@esbuild/netbsd-x64": "0.25.12", + "@esbuild/openbsd-arm64": "0.25.12", + "@esbuild/openbsd-x64": "0.25.12", + "@esbuild/openharmony-arm64": "0.25.12", + "@esbuild/sunos-x64": "0.25.12", + "@esbuild/win32-arm64": "0.25.12", + "@esbuild/win32-ia32": "0.25.12", + "@esbuild/win32-x64": "0.25.12" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT" + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-tsconfig": { + "version": "4.13.7", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.7.tgz", + "integrity": "sha512-7tN6rFgBlMgpBML5j8typ92BKFi2sFQvIdpAqLA2beia5avZDrMs0FLZiM5etShWq5irVyGcGMEA1jcDaK7A/Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/lightningcss": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz", + "integrity": "sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.32.0", + "lightningcss-darwin-arm64": "1.32.0", + "lightningcss-darwin-x64": "1.32.0", + "lightningcss-freebsd-x64": "1.32.0", + "lightningcss-linux-arm-gnueabihf": "1.32.0", + "lightningcss-linux-arm64-gnu": "1.32.0", + "lightningcss-linux-arm64-musl": "1.32.0", + "lightningcss-linux-x64-gnu": "1.32.0", + "lightningcss-linux-x64-musl": "1.32.0", + "lightningcss-win32-arm64-msvc": "1.32.0", + "lightningcss-win32-x64-msvc": "1.32.0" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz", + "integrity": "sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz", + "integrity": "sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz", + "integrity": "sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz", + "integrity": "sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz", + "integrity": "sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz", + "integrity": "sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz", + "integrity": "sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz", + "integrity": "sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz", + "integrity": "sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz", + "integrity": "sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz", + "integrity": "sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-addon-api": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", + "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prettier": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/rollup": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.1.tgz", + "integrity": "sha512-VmtB2rFU/GroZ4oL8+ZqXgSA38O6GR8KSIvWmEFv63pQ0G6KaBH9s07PO8XTXP4vI+3UJUEypOfjkGfmSBBR0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.60.1", + "@rollup/rollup-android-arm64": "4.60.1", + "@rollup/rollup-darwin-arm64": "4.60.1", + "@rollup/rollup-darwin-x64": "4.60.1", + "@rollup/rollup-freebsd-arm64": "4.60.1", + "@rollup/rollup-freebsd-x64": "4.60.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.1", + "@rollup/rollup-linux-arm-musleabihf": "4.60.1", + "@rollup/rollup-linux-arm64-gnu": "4.60.1", + "@rollup/rollup-linux-arm64-musl": "4.60.1", + "@rollup/rollup-linux-loong64-gnu": "4.60.1", + "@rollup/rollup-linux-loong64-musl": "4.60.1", + "@rollup/rollup-linux-ppc64-gnu": "4.60.1", + "@rollup/rollup-linux-ppc64-musl": "4.60.1", + "@rollup/rollup-linux-riscv64-gnu": "4.60.1", + "@rollup/rollup-linux-riscv64-musl": "4.60.1", + "@rollup/rollup-linux-s390x-gnu": "4.60.1", + "@rollup/rollup-linux-x64-gnu": "4.60.1", + "@rollup/rollup-linux-x64-musl": "4.60.1", + "@rollup/rollup-openbsd-x64": "4.60.1", + "@rollup/rollup-openharmony-arm64": "4.60.1", + "@rollup/rollup-win32-arm64-msvc": "4.60.1", + "@rollup/rollup-win32-ia32-msvc": "4.60.1", + "@rollup/rollup-win32-x64-gnu": "4.60.1", + "@rollup/rollup-win32-x64-msvc": "4.60.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tailwindcss": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.2.tgz", + "integrity": "sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/tapable": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.2.tgz", + "integrity": "sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tsx": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz", + "integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.27.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/tsx/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/esbuild": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.7", + "@esbuild/android-arm": "0.27.7", + "@esbuild/android-arm64": "0.27.7", + "@esbuild/android-x64": "0.27.7", + "@esbuild/darwin-arm64": "0.27.7", + "@esbuild/darwin-x64": "0.27.7", + "@esbuild/freebsd-arm64": "0.27.7", + "@esbuild/freebsd-x64": "0.27.7", + "@esbuild/linux-arm": "0.27.7", + "@esbuild/linux-arm64": "0.27.7", + "@esbuild/linux-ia32": "0.27.7", + "@esbuild/linux-loong64": "0.27.7", + "@esbuild/linux-mips64el": "0.27.7", + "@esbuild/linux-ppc64": "0.27.7", + "@esbuild/linux-riscv64": "0.27.7", + "@esbuild/linux-s390x": "0.27.7", + "@esbuild/linux-x64": "0.27.7", + "@esbuild/netbsd-arm64": "0.27.7", + "@esbuild/netbsd-x64": "0.27.7", + "@esbuild/openbsd-arm64": "0.27.7", + "@esbuild/openbsd-x64": "0.27.7", + "@esbuild/openharmony-arm64": "0.27.7", + "@esbuild/sunos-x64": "0.27.7", + "@esbuild/win32-arm64": "0.27.7", + "@esbuild/win32-ia32": "0.27.7", + "@esbuild/win32-x64": "0.27.7" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "devOptional": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", + "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vue": { + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.32.tgz", + "integrity": "sha512-vM4z4Q9tTafVfMAK7IVzmxg34rSzTFMyIe0UUEijUCkn9+23lj0WRfA83dg7eQZIUlgOSGrkViIaCfqSAUXsMw==", + "license": "MIT", + "dependencies": { + "@vue/compiler-dom": "3.5.32", + "@vue/compiler-sfc": "3.5.32", + "@vue/runtime-dom": "3.5.32", + "@vue/server-renderer": "3.5.32", + "@vue/shared": "3.5.32" + }, + "peerDependencies": { + "typescript": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..55cbc81 --- /dev/null +++ b/package.json @@ -0,0 +1,33 @@ +{ + "name": "streaming-data-loader", + "private": true, + "type": "module", + "dependencies": { + "@tauri-apps/api": "^2.8.0", + "@tauri-apps/plugin-dialog": "^2.0.0", + "vue": "^3.5.32" + }, + "scripts": { + "bootstrap:frontend": "node ./scripts/bootstrap-frontend.mjs", + "dev": "vite", + "dev:tauri": "node ./scripts/tauri-dev.mjs", + "build": "npm run tailwind:build && tsc --noEmit -p tsconfig.json && tsc --noEmit -p tsconfig.node.json && vite build", + "preview": "vite preview", + "test": "npm run test:frontend", + "test:frontend": "tsx --test frontend/tests/**/*.test.ts", + "tailwind:watch": "tailwindcss -i ./frontend/styles.css -o ./frontend/generated.css --watch", + "tailwind:build": "tailwindcss -i ./frontend/styles.css -o ./frontend/generated.css --minify", + "tauri": "node ./scripts/run-tauri.mjs" + }, + "devDependencies": { + "@tailwindcss/cli": "^4.1.4", + "@tauri-apps/cli": "^2.10.1", + "@types/node": "^22.13.10", + "@vitejs/plugin-vue": "^6.0.1", + "prettier": "^2.8.8", + "tailwindcss": "^4.1.4", + "tsx": "^4.21.0", + "typescript": "^5.6.3", + "vite": "^6.0.3" + } +} diff --git a/pyi-hooks/hook-orderedmultidict.py b/pyi-hooks/hook-orderedmultidict.py deleted file mode 100644 index 1023a8d..0000000 --- a/pyi-hooks/hook-orderedmultidict.py +++ /dev/null @@ -1,5 +0,0 @@ -from PyInstaller.utils.hooks import collect_data_files - -module_collection_mode = "py+pyz" -hiddenimports = ["orderedmultidict.__version__", "demjson3"] -datas = collect_data_files('orderedmultidict') diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index df62288..0000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -PySide6==6.6.1 -appdirs==1.4.4 -hydroserverpy==1.8.0 -pandas==2.2.3 -numpy==2.2.4 -APScheduler==3.10.1 -pytz>=2023.3 -requests >= 2.27.1 -Pillow==9.5.0 diff --git a/scripts/bootstrap-frontend.mjs b/scripts/bootstrap-frontend.mjs new file mode 100644 index 0000000..f9ccdfc --- /dev/null +++ b/scripts/bootstrap-frontend.mjs @@ -0,0 +1,40 @@ +import { createHash } from "node:crypto" +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs" +import { dirname, resolve } from "node:path" +import { fileURLToPath } from "node:url" +import { spawnSync } from "node:child_process" + +const rootDir = resolve(dirname(fileURLToPath(import.meta.url)), "..") +const cacheDir = resolve(rootDir, ".cache") +const stampPath = resolve(cacheDir, "frontend-package-lock.sha256") +const lockfilePath = resolve(rootDir, "package-lock.json") +const nodeModulesPath = resolve(rootDir, "node_modules") + +function run(command, args) { + const result = spawnSync(command, args, { + cwd: rootDir, + stdio: "inherit", + shell: process.platform === "win32", + }) + + if (result.status !== 0) { + process.exit(result.status ?? 1) + } +} + +function sha256(path) { + return createHash("sha256").update(readFileSync(path)).digest("hex") +} + +mkdirSync(cacheDir, { recursive: true }) + +const currentHash = sha256(lockfilePath) +const storedHash = existsSync(stampPath) ? readFileSync(stampPath, "utf8") : "" +const needsInstall = !existsSync(nodeModulesPath) || storedHash !== currentHash + +if (needsInstall) { + run("npm", ["install"]) + writeFileSync(stampPath, currentHash) +} + +run("npm", ["run", "tailwind:build"]) diff --git a/scripts/run-tauri.mjs b/scripts/run-tauri.mjs new file mode 100644 index 0000000..31a4fdd --- /dev/null +++ b/scripts/run-tauri.mjs @@ -0,0 +1,62 @@ +import { spawnSync } from "node:child_process" +import { dirname, resolve } from "node:path" +import { fileURLToPath } from "node:url" + +const rootDir = resolve(dirname(fileURLToPath(import.meta.url)), "..") +const tauriArgs = process.argv.slice(2) +const isDevCommand = tauriArgs.includes("dev") +const devConfigDir = resolve(rootDir, ".sdl-dev-data") + +function run(command, args, options = {}) { + return spawnSync(command, args, { + cwd: rootDir, + stdio: "inherit", + shell: process.platform === "win32", + ...options, + }) +} + +function commandExists(command, args = ["--version"]) { + const result = spawnSync(command, args, { + cwd: rootDir, + stdio: "ignore", + shell: process.platform === "win32", + }) + + return result.status === 0 +} + +if (!commandExists("cargo")) { + console.error("") + console.error("Tauri desktop preview requires the Rust toolchain, but `cargo` is not installed or not on PATH.") + console.error("") + console.error("Install it with one of these:") + console.error(" 1. curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh") + console.error(" 2. brew install rustup-init && rustup-init") + console.error("") + console.error("Then restart your terminal and verify:") + console.error(" cargo --version") + console.error("") + console.error("After that, run:") + console.error(" npm run tauri dev") + console.error("") + process.exit(1) +} + +if (!commandExists("rustc")) { + console.error("") + console.error("Rust appears to be partially installed: `cargo` exists but `rustc` does not.") + console.error("Run `rustup default stable` and try again.") + console.error("") + process.exit(1) +} + +const result = run("npx", ["--no-install", "tauri", ...tauriArgs], { + env: { + ...process.env, + ...(isDevCommand && !process.env.SDL_CONFIG_DIR + ? { SDL_CONFIG_DIR: devConfigDir } + : {}), + }, +}) +process.exit(result.status ?? 1) diff --git a/scripts/tauri-dev.mjs b/scripts/tauri-dev.mjs new file mode 100644 index 0000000..392663e --- /dev/null +++ b/scripts/tauri-dev.mjs @@ -0,0 +1,107 @@ +import { dirname, resolve } from "node:path" +import { fileURLToPath } from "node:url" +import { spawn } from "node:child_process" +import { existsSync, readFileSync } from "node:fs" +import net from "node:net" + +const rootDir = resolve(dirname(fileURLToPath(import.meta.url)), "..") +const children = [] + +function spawnTask(command, args, options = {}) { + const child = spawn(command, args, { + cwd: rootDir, + env: { + ...process.env, + ...options.env, + }, + stdio: "inherit", + shell: process.platform === "win32", + }) + + children.push(child) + + child.on("exit", code => { + if (shuttingDown) { + return + } + + shuttingDown = true + shutdown(code ?? 0) + }) + + return child +} + +let shuttingDown = false + +function shutdown(exitCode) { + for (const child of children) { + if (!child.killed) { + child.kill("SIGTERM") + } + } + + setTimeout(() => process.exit(exitCode), 150) +} + +process.on("SIGINT", () => shutdown(0)) +process.on("SIGTERM", () => shutdown(0)) + +function loadEnvFile() { + const envPath = resolve(rootDir, ".env.development") + + if (!existsSync(envPath)) { + return {} + } + + const env = {} + + for (const line of readFileSync(envPath, "utf8").split(/\r?\n/)) { + const trimmed = line.trim() + if (!trimmed || trimmed.startsWith("#")) { + continue + } + + const separatorIndex = trimmed.indexOf("=") + if (separatorIndex === -1) { + continue + } + + const key = trimmed.slice(0, separatorIndex).trim() + const value = trimmed.slice(separatorIndex + 1).trim() + env[key] = value + } + + return env +} + +function isPortOpen(host, port) { + return new Promise(resolvePromise => { + const socket = net.connect({ host, port: Number(port) }) + + socket.once("connect", () => { + socket.end() + resolvePromise(true) + }) + + socket.once("error", () => { + resolvePromise(false) + }) + }) +} + +const devEnv = loadEnvFile() +const frontendHost = devEnv.SDL_FRONTEND_HOST || "localhost" +const frontendPort = devEnv.SDL_FRONTEND_PORT || "1420" + +spawnTask("npm", ["run", "tailwind:watch"]) + +if (await isPortOpen(frontendHost, frontendPort)) { + console.log(`Vite dev server already detected on ${frontendHost}:${frontendPort}; reusing it for Tauri.`) +} else { + spawnTask("npm", ["run", "dev"], { + env: { + SDL_OPEN_BROWSER: "false", + }, + }) +} diff --git a/src/app.py b/src/app.py deleted file mode 100644 index 85f33cb..0000000 --- a/src/app.py +++ /dev/null @@ -1,570 +0,0 @@ -import os -import json -import sys -import logging -import webbrowser -import subprocess -import hydroserverpy -from scheduler import DataLoaderScheduler -from logging.handlers import RotatingFileHandler -from appdirs import user_data_dir -from PySide6.QtCore import Qt -from PySide6.QtGui import QAction, QIcon, QPixmap -from PySide6.QtWidgets import QApplication, QMainWindow, QSystemTrayIcon, QMenu, QWidget, QVBoxLayout, QLabel, \ - QLineEdit, QHBoxLayout, QPushButton, QMessageBox, QCheckBox - - -class StreamingDataLoader(QMainWindow): - - def __init__(self): - super(StreamingDataLoader, self).__init__() - - self.service = None - self.scheduler = None - - self.instance_name = None - self.workspace_name = None - self.hydroserver_url = None - self.hydroserver_api_key = None - self.hydroserver_username = None - self.hydroserver_password = None - self.connected = False - self.paused = False - - self.status_action = None - self.connection_action = None - self.dashboard_action = None - self.logging_action = None - self.pause_action = None - self.quit_action = None - - self.url_input = None - self.workspace_input = None - self.instance_input = None - self.api_key_input = None - self.email_input = None - self.password_input = None - self.auth_toggle_checkbox = None - - self.api_key_input_widget = None - self.basic_auth_input_widget = None - - self.assets_path = getattr(sys, '_MEIPASS', 'assets') - self.app_dir = user_data_dir('Streaming Data Loader', 'CIROH') - self.app_version = 'dev' - - if not os.path.exists(self.app_dir): - os.makedirs(self.app_dir) - - try: - with open(os.path.join(self.assets_path, 'version.txt')) as f: - self.app_version = f.read().strip() - except FileNotFoundError: - pass - - self.init_ui() - self.get_settings() - - data_loader = self.connect_to_hydroserver() - - self.update_gui() - - if self.connected: - self.scheduler = DataLoaderScheduler( - hs_api=self.service, - data_loader=data_loader - ) - - if not self.connected: - self.show() - - def init_ui(self): - """Builds the app UI including system tray menu and connection window""" - - # System Tray Icon - tray_icon = QSystemTrayIcon(self) - tray_icon_image = QIcon(os.path.join(self.assets_path, "app_icon.png")) - tray_icon_image.setIsMask(True) - tray_icon.setIcon(tray_icon_image) - - # System Tray Menu - tray_menu = QMenu(self) - self.setup_tray_menu_status(tray_menu) - tray_menu.addSeparator() - self.setup_tray_menu_actions(tray_menu) - tray_menu.addSeparator() - self.setup_tray_menu_controls(tray_menu) - tray_icon.setContextMenu(tray_menu) - tray_icon.show() - - # HydroServer Connection Window - self.setWindowTitle(f'Streaming Data Loader ({self.app_version})') - self.setGeometry(300, 300, 550, 550) - self.setFixedSize(550, 550) - central_widget = QWidget(self) - self.setCentralWidget(central_widget) - layout = QVBoxLayout(central_widget) - self.setup_connection_dialog(layout) - - def setup_tray_menu_status(self, tray_menu): - """Components to build menu status""" - - # System Tray Menu Status - self.status_action = QAction(self) - self.status_action.setEnabled(False) - tray_menu.addAction(self.status_action) - - def setup_tray_menu_actions(self, tray_menu): - """Components to build menu actions""" - - # System Tray Menu Open Connection Window - self.connection_action = QAction('HydroServer Connection', self) - self.connection_action.triggered.connect(lambda: self.show()) - tray_menu.addAction(self.connection_action) - - # System Tray Menu View Tasks - self.dashboard_action = QAction('View Tasks', self) - dashboard_icon = QIcon(os.path.join(self.assets_path, 'database.png')) - dashboard_icon.setIsMask(True) - self.dashboard_action.setIcon(dashboard_icon) - self.dashboard_action.triggered.connect(self.open_orchestration_dashboard) - tray_menu.addAction(self.dashboard_action) - - # System Tray Menu View Logs - self.logging_action = QAction('View Log Output', self) - logging_icon = QIcon(os.path.join(self.assets_path, 'description.png')) - logging_icon.setIsMask(True) - self.logging_action.setIcon(logging_icon) - self.logging_action.triggered.connect(self.open_logs) - tray_menu.addAction(self.logging_action) - - def setup_tray_menu_controls(self, tray_menu): - """Components to build menu controls""" - - # System Tray Menu Pause/Resume App - self.pause_action = QAction('Pause', self) - self.pause_action.triggered.connect(self.toggle_paused) - tray_menu.addAction(self.pause_action) - - # System Tray Menu Shut Down App - self.quit_action = QAction('Shut Down', self) - quit_icon = QIcon(os.path.join(self.assets_path, 'exit.png')) - quit_icon.setIsMask(True) - self.quit_action.setIcon(quit_icon) - self.quit_action.triggered.connect(app.quit) - tray_menu.addAction(self.quit_action) - - def setup_connection_dialog(self, layout): - """Components to build connection window""" - - # HydroServer Logo - logo_label = QLabel(self) - logo_label.setPixmap( - QPixmap(os.path.join(self.assets_path, 'setup_icon.png')).scaledToWidth(500, Qt.SmoothTransformation) - ) - logo_layout = QVBoxLayout() - logo_layout.addWidget(logo_label, alignment=Qt.AlignCenter) - logo_layout.setContentsMargins(10, 10, 10, 10) - layout.addLayout(logo_layout) - - # Window Settings - label_width = 150 - input_layout = QVBoxLayout() - input_layout.setContentsMargins(20, 20, 20, 20) - - # HydroServer URL Input - url_box_layout = QHBoxLayout() - url_label = QLabel(f'HydroServer URL:', self) - url_label.setFixedWidth(label_width) - url_box_layout.addWidget(url_label, alignment=Qt.AlignRight) - self.url_input = QLineEdit(self) - self.url_input.setStyleSheet('padding: 5px;') - self.url_input.setPlaceholderText('Enter the HydroServer URL to connect to.') - url_box_layout.addWidget(self.url_input) - layout.addLayout(url_box_layout) - - # Workspace Name Input - workspace_box_layout = QHBoxLayout() - workspace_label = QLabel(f'Workspace Name:', self) - workspace_label.setFixedWidth(label_width) - workspace_box_layout.addWidget(workspace_label, alignment=Qt.AlignRight) - self.workspace_input = QLineEdit(self) - self.workspace_input.setStyleSheet('padding: 5px;') - self.workspace_input.setPlaceholderText('Enter the name of the workspace to use.') - workspace_box_layout.addWidget(self.workspace_input) - layout.addLayout(workspace_box_layout) - - # Instance Name Input - instance_box_layout = QHBoxLayout() - instance_label = QLabel(f'Instance Name:', self) - instance_label.setFixedWidth(label_width) - instance_box_layout.addWidget(instance_label, alignment=Qt.AlignRight) - self.instance_input = QLineEdit(self) - self.instance_input.setStyleSheet('padding: 5px;') - self.instance_input.setPlaceholderText('Enter a name for this streaming data loader.') - instance_box_layout.addWidget(self.instance_input) - layout.addLayout(instance_box_layout) - - # API Key Authentication Input - self.api_key_input_widget = QWidget(self) - api_key_input_layout = QVBoxLayout() - self.api_key_input_widget.setLayout(api_key_input_layout) - api_key_input_layout.setContentsMargins(0, 0, 0, 0) - - api_key_box_layout = QHBoxLayout() - api_key_label = QLabel('HydroServer API Key:', self) - api_key_label.setFixedWidth(label_width) - api_key_box_layout.addWidget(api_key_label, alignment=Qt.AlignRight) - self.api_key_input = QLineEdit(self) - self.api_key_input.setStyleSheet('padding: 5px;') - self.api_key_input.setEchoMode(getattr(QLineEdit, 'Password')) - self.api_key_input.setPlaceholderText('Enter your HydroServer API key.') - api_key_box_layout.addWidget(self.api_key_input) - api_key_input_layout.addLayout(api_key_box_layout) - - layout.addWidget(self.api_key_input_widget, alignment=Qt.AlignTop) - - # Basic Authentication Input - self.basic_auth_input_widget = QWidget(self) - basic_auth_input_layout = QVBoxLayout() - self.basic_auth_input_widget.setLayout(basic_auth_input_layout) - basic_auth_input_layout.setContentsMargins(0, 0, 0, 0) - - email_box_layout = QHBoxLayout() - email_label = QLabel('HydroServer Email:', self) - email_label.setFixedWidth(label_width) - email_box_layout.addWidget(email_label, alignment=Qt.AlignRight) - self.email_input = QLineEdit(self) - self.email_input.setStyleSheet('padding: 5px;') - self.email_input.setPlaceholderText('Enter your HydroServer Email.') - email_box_layout.addWidget(self.email_input) - basic_auth_input_layout.addLayout(email_box_layout) - - password_box_layout = QHBoxLayout() - password_label = QLabel('HydroServer Password:', self) - password_label.setFixedWidth(label_width) - password_box_layout.addWidget(password_label, alignment=Qt.AlignRight) - self.password_input = QLineEdit(self) - self.password_input.setStyleSheet('padding: 5px;') - self.password_input.setEchoMode(getattr(QLineEdit, 'Password')) - self.password_input.setPlaceholderText('Enter your HydroServer Password.') - password_box_layout.addWidget(self.password_input) - basic_auth_input_layout.addLayout(password_box_layout) - - layout.addWidget(self.basic_auth_input_widget) - - # Authentication Mode Toggle - self.auth_toggle_checkbox = QCheckBox("Authenticate with username and password", self) - self.auth_toggle_checkbox.setStyleSheet('padding: 5px;') - self.auth_toggle_checkbox.stateChanged.connect(lambda: self.toggle_auth_input()) - layout.addWidget(self.auth_toggle_checkbox) - - # Window Actions Settings - actions_layout = QHBoxLayout() - actions_layout.setContentsMargins(0, 0, 20, 20) - actions_layout.addStretch(1) - - # Confirm Button - confirm_button = QPushButton('Confirm', self) - confirm_button.clicked.connect(lambda: self.confirm_settings()) - confirm_button.setStyleSheet( - 'background-color: #007BFF; color: white; border: 1px solid #007BFF; border-radius: 8px; padding: 8px;' - 'hover { background-color: #0056b3; }' - ) - confirm_button.setCursor(Qt.PointingHandCursor) - confirm_button.setFixedSize(80, 30) - actions_layout.addWidget(confirm_button) - - # Cancel Button - cancel_button = QPushButton('Cancel', self) - cancel_button.clicked.connect(lambda: self.hide()) - cancel_button.setStyleSheet( - 'border: 1px solid #707070; border-radius: 8px; padding: 8px;' - 'hover { background-color: #e0e0e0; }' - ) - cancel_button.setCursor(Qt.PointingHandCursor) - cancel_button.setFixedSize(80, 30) - actions_layout.addWidget(cancel_button) - - layout.addLayout(actions_layout) - - def toggle_auth_input(self): - """Switches between API key and email/password authentication inputs.""" - - if self.auth_toggle_checkbox.isChecked(): - self.api_key_input_widget.setVisible(False) - self.basic_auth_input_widget.setVisible(True) - else: - self.basic_auth_input_widget.setVisible(False) - self.api_key_input_widget.setVisible(True) - - def open_orchestration_dashboard(self): - """Opens user's Orchestration Dashboard in a browser window""" - - webbrowser.open(f'{self.hydroserver_url}/orchestration') - - def open_logs(self): - """Opens app log file in a text viewer""" - - subprocess.call(['open', os.path.join(self.app_dir, 'streaming_data_loader.log')]) - - def toggle_paused(self): - """Toggles whether the app is paused or not""" - - self.paused = not self.paused - if self.connected and self.paused is True: - self.scheduler.pause() - elif self.connected and self.paused is False: - self.scheduler.resume() - self.update_gui() - - def connect_to_hydroserver(self): - """Uses connection settings to register app on HydroServer""" - - if not all([ - self.hydroserver_url, self.workspace_name, self.instance_name - ]) or ( - not (self.hydroserver_username and self.hydroserver_password) and not self.hydroserver_api_key - ): - self.connected = False - return 'Missing required connection parameters.' - - try: - if self.hydroserver_api_key: - self.service = hydroserverpy.HydroServer( - host=self.hydroserver_url, - apikey=self.hydroserver_api_key - ) - else: - self.service = hydroserverpy.HydroServer( - host=self.hydroserver_url, - email=self.hydroserver_username, - password=self.hydroserver_password - ) - except: - self.connected = False - return 'Failed to connect to HydroServer.' - - workspaces = self.service.workspaces.list(is_associated=True, fetch_all=True) - workspace = next((workspace for workspace in workspaces.items if workspace.name == self.workspace_name), None) - - orchestration_systems = self.service.orchestrationsystems.list(workspace=workspace, fetch_all=True) - orchestration_system = next(( - orchestration_system for orchestration_system in orchestration_systems.items - if orchestration_system.name == self.instance_name - ), None) - - if not workspace: - self.connected = False - return 'The provided workspace was not found.' - - if not orchestration_system: - try: - orchestration_system = self.service.orchestrationsystems.create( - name=self.instance_name, - workspace=workspace, - orchestration_system_type="SDL" - ) - except (Exception,) as e: - print(e) - return 'Failed to register Streaming Data Loader instance.' - - self.connected = True - - return orchestration_system - - def get_settings(self): - """Get settings from settings file""" - - settings_path = os.path.join(self.app_dir, 'settings.json') - if os.path.exists(settings_path): - with open(settings_path, 'r') as settings_file: - settings = json.loads(settings_file.read() or 'null') or {} - self.hydroserver_url = settings.get('url') - self.hydroserver_api_key = settings.get('apikey') - self.hydroserver_username = settings.get('username') - self.hydroserver_password = settings.get('password') - self.workspace_name = settings.get('workspace') - self.instance_name = settings.get('name') - self.paused = settings.get('paused') - - def update_settings( - self, - hydroserver_url=None, - instance_name=None, - workspace_name=None, - hydroserver_api_key=None, - hydroserver_username=None, - hydroserver_password=None, - use_api_key=True, - paused=None - ): - """Update settings file with new settings""" - - if use_api_key is True: - api_key = hydroserver_api_key if hydroserver_api_key is not None else self.hydroserver_api_key - username = None - password = None - else: - api_key = None - username = hydroserver_username if hydroserver_username is not None else self.hydroserver_username - password = hydroserver_password if hydroserver_password is not None else self.hydroserver_password - - settings_path = os.path.join(self.app_dir, 'settings.json') - with open(settings_path, 'w') as settings_file: - settings_file.write(json.dumps({ - 'url': hydroserver_url if hydroserver_url is not None else self.hydroserver_url, - 'name': instance_name if instance_name is not None else self.instance_name, - 'workspace': workspace_name if workspace_name is not None else self.workspace_name, - 'apikey': api_key, - 'username': username, - 'password': password, - 'paused': paused if paused is not None else self.paused - })) - self.get_settings() - - def confirm_settings(self): - """Handle the user updating connection settings""" - - if not all([ - self.url_input.text(), self.workspace_input.text(), self.instance_input.text() - ]) or ( - not (self.email_input.text() and self.password_input.text()) and not self.api_key_input.text() - ): - return self.show_message( - title='Missing Required Fields', - message='All fields are required to register the Streaming Data Loader app on HydroServer.' - ) - - self.update_settings( - hydroserver_url=self.url_input.text(), - instance_name=self.instance_input.text(), - workspace_name=self.workspace_input.text(), - hydroserver_api_key=self.api_key_input.text(), - hydroserver_username=self.email_input.text(), - hydroserver_password=self.password_input.text(), - use_api_key=not self.auth_toggle_checkbox.isChecked(), - ) - - connection_message = self.connect_to_hydroserver() - self.update_gui() - - if self.connected is False: - return self.show_message( - title='Connection Failed', - message=connection_message - ) - - if self.scheduler: - self.scheduler.terminate() - - self.scheduler = DataLoaderScheduler( - hs_api=self.service, - data_loader=connection_message - ) - - if self.paused is True: - self.scheduler.pause() - - self.show_message( - title='Streaming Data Loader Setup Complete', - message='The Streaming Data Loader has been successfully registered and is now running.' - ) - - self.hide() - - @staticmethod - def show_message(title, message): - """Show a message window to the user""" - - message_box = QMessageBox() - message_box.setWindowTitle(title) - message_box.setText(message) - message_box.exec_() - - def update_gui(self): - """Update UI elements when settings/state changes""" - - if self.paused: - pause_action_text = 'Resume' - pause_action_icon = 'resume.png' - else: - pause_action_text = 'Pause' - pause_action_icon = 'pause.png' - - if self.connected and not self.paused: - status = 'Running' - connection_icon = 'connected.png' - data_sources_enabled = True - elif self.connected and self.paused: - status = 'Paused' - connection_icon = 'connected.png' - data_sources_enabled = True - else: - status = 'Not Connected' - connection_icon = 'disconnected.png' - data_sources_enabled = False - - self.status_action.setText(f'Status: {status}') - - connected_icon = QIcon(os.path.join(self.assets_path, connection_icon)) - connected_icon.setIsMask(True) - self.connection_action.setIcon(connected_icon) - self.dashboard_action.setEnabled(data_sources_enabled) - - self.pause_action.setText(pause_action_text) - pause_icon = QIcon(os.path.join(self.assets_path, pause_action_icon)) - pause_icon.setIsMask(True) - self.pause_action.setIcon(pause_icon) - - if self.isHidden(): - self.url_input.setText(self.hydroserver_url if self.hydroserver_url else 'https://www.hydroserver.org') - self.instance_input.setText(self.instance_name if self.instance_name else '') - self.workspace_input.setText(self.workspace_name if self.workspace_name else '') - self.api_key_input.setText(self.hydroserver_api_key if self.hydroserver_api_key else '') - self.email_input.setText(self.hydroserver_username if self.hydroserver_username else '') - self.password_input.setText(self.hydroserver_password if self.hydroserver_password else '') - self.auth_toggle_checkbox.setChecked(bool(self.email_input.text())) - self.api_key_input_widget.setVisible(not bool(self.email_input.text())) - self.basic_auth_input_widget.setVisible(bool(self.email_input.text())) - - -if __name__ == '__main__': - - hydroloader_logger = logging.getLogger('hydroloader') - scheduler_logger = logging.getLogger('scheduler') - - stream_handler = logging.StreamHandler() - hydroloader_logger.addHandler(stream_handler) - scheduler_logger.addHandler(stream_handler) - - user_dir = user_data_dir('Streaming Data Loader', 'CIROH') - - if not os.path.exists(user_dir): - os.makedirs(user_dir) - - log_path = os.path.join(user_dir, 'streaming_data_loader.log') - - log_handler = RotatingFileHandler( - filename=log_path, - mode='a', - maxBytes=20 * 1024 * 1024, - backupCount=3 - ) - hydroloader_logger.addHandler(log_handler) - scheduler_logger.addHandler(log_handler) - - logging.basicConfig( - format='%(asctime)s %(levelname)-8s %(message)s', - level=logging.INFO, - datefmt='%Y-%m-%d %H:%M:%S', - force=True, - handlers=[ - log_handler, stream_handler - ] - ) - - app = QApplication(sys.argv) - app.setQuitOnLastWindowClosed(False) - window = StreamingDataLoader() - sys.exit(app.exec_()) diff --git a/src/assets/app_icon.png b/src/assets/app_icon.png deleted file mode 100644 index 5737995..0000000 Binary files a/src/assets/app_icon.png and /dev/null differ diff --git a/src/assets/connected.png b/src/assets/connected.png deleted file mode 100644 index 1a9d700..0000000 Binary files a/src/assets/connected.png and /dev/null differ diff --git a/src/assets/database.png b/src/assets/database.png deleted file mode 100644 index 528df37..0000000 Binary files a/src/assets/database.png and /dev/null differ diff --git a/src/assets/description.png b/src/assets/description.png deleted file mode 100644 index 5332820..0000000 Binary files a/src/assets/description.png and /dev/null differ diff --git a/src/assets/disconnected.png b/src/assets/disconnected.png deleted file mode 100644 index eb93682..0000000 Binary files a/src/assets/disconnected.png and /dev/null differ diff --git a/src/assets/exit.png b/src/assets/exit.png deleted file mode 100644 index 8d36c64..0000000 Binary files a/src/assets/exit.png and /dev/null differ diff --git a/src/assets/pause.png b/src/assets/pause.png deleted file mode 100644 index 4e745f1..0000000 Binary files a/src/assets/pause.png and /dev/null differ diff --git a/src/assets/resume.png b/src/assets/resume.png deleted file mode 100644 index cefc7e9..0000000 Binary files a/src/assets/resume.png and /dev/null differ diff --git a/src/assets/setup_icon.png b/src/assets/setup_icon.png deleted file mode 100644 index e77fd33..0000000 Binary files a/src/assets/setup_icon.png and /dev/null differ diff --git a/src/commands.rs b/src/commands.rs new file mode 100644 index 0000000..dec459e --- /dev/null +++ b/src/commands.rs @@ -0,0 +1,89 @@ +use std::{path::Path, process::Command}; + +use tauri::AppHandle; + +use crate::{ + daemon_launcher, + models::{ActionResponse, DaemonConnectionInfo, ServiceStatusResponse}, + service_manager, +}; + +#[tauri::command] +pub async fn get_daemon_connection(app: AppHandle) -> Result { + daemon_launcher::ensure_daemon_connection(&app).await +} + +#[tauri::command] +pub fn get_service_status(app: AppHandle) -> Result { + service_manager::get_service_status(&app) +} + +#[tauri::command] +pub fn install_os_service(app: AppHandle) -> Result { + service_manager::install_service(&app) +} + +#[tauri::command] +pub fn restart_os_service(app: AppHandle) -> Result { + service_manager::restart_service(&app) +} + +#[tauri::command] +pub fn uninstall_os_service(app: AppHandle) -> Result { + service_manager::uninstall_service(&app) +} + +#[tauri::command] +pub fn reveal_file_in_folder(path: String) -> Result { + let target = Path::new(&path); + if !target.exists() { + return Err("That file no longer exists.".to_string()); + } + + reveal_path_with_platform_file_manager(target)?; + + Ok(ActionResponse { + ok: true, + message: "Opened the file location.".to_string(), + }) +} + +fn reveal_path_with_platform_file_manager(path: &Path) -> Result<(), String> { + #[cfg(target_os = "macos")] + { + return run_command(Command::new("open").arg("-R").arg(path)); + } + + #[cfg(target_os = "windows")] + { + let select_arg = format!("/select,{}", path.display()); + return run_command(Command::new("explorer").arg(select_arg)); + } + + #[cfg(target_os = "linux")] + { + let directory = if path.is_dir() { + path.to_path_buf() + } else { + path.parent() + .map(Path::to_path_buf) + .unwrap_or_else(|| path.to_path_buf()) + }; + return run_command(Command::new("xdg-open").arg(directory)); + } + + #[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))] + { + let _ = path; + Err("Opening file locations is not supported on this operating system.".to_string()) + } +} + +fn run_command(command: &mut Command) -> Result<(), String> { + let status = command.status().map_err(|err| err.to_string())?; + if status.success() { + Ok(()) + } else { + Err("The operating system could not open that file location.".to_string()) + } +} diff --git a/src/config_store.rs b/src/config_store.rs new file mode 100644 index 0000000..d3cdc67 --- /dev/null +++ b/src/config_store.rs @@ -0,0 +1,1189 @@ +use std::{ + fs, + io::{BufRead, BufReader, Write}, + path::{Path, PathBuf}, + sync::{ + atomic::{AtomicU64, Ordering}, + Mutex, + }, + time::{SystemTime, UNIX_EPOCH}, +}; + +use chrono::{DateTime, Utc}; +use serde_json::{json, Value}; + +use crate::models::{ + AppConfig, AppStateFile, ColumnMapping, FileConfig, JobConfig, JobCursor, JobLogEntry, + JobUpsertRequest, PersistedDatasource, ServerConfig, WorkspaceStateFile, +}; + +static JOB_COUNTER: AtomicU64 = AtomicU64::new(1); +const JOB_LOG_ROTATE_BYTES: u64 = 5 * 1024 * 1024; +const JOB_LOG_ROTATE_FILES: usize = 7; + +pub struct ConfigStore { + config_dir: PathBuf, + config_path: PathBuf, + legacy_state_path: PathBuf, + workspace_dir: PathBuf, + logs_dir: PathBuf, + job_logs_dir: PathBuf, + lock: Mutex<()>, +} + +impl ConfigStore { + pub fn new(config_dir: PathBuf) -> Self { + let logs_dir = config_dir.join("logs"); + let job_logs_dir = logs_dir.join("jobs"); + Self { + config_path: config_dir.join("config.json"), + legacy_state_path: config_dir.join("state.json"), + workspace_dir: config_dir.join("workspaces"), + logs_dir, + job_logs_dir, + config_dir, + lock: Mutex::new(()), + } + } + + pub fn ensure(&self) -> Result<(), String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked() + } + + pub fn load(&self) -> Result { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let mut config = self.read_config_locked()?; + config.jobs = self.active_jobs_locked(&config.server)?; + Ok(config) + } + + pub fn set_server( + &self, + server: ServerConfig, + workspace_name: &str, + ) -> Result { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let mut config = self.read_config_locked()?; + config.server = server.normalized(); + self.write_config_locked(&config)?; + self.ensure_workspace_file_locked( + &config.server.workspace_id, + workspace_name, + &config.server.url, + )?; + config.jobs = self.active_jobs_locked(&config.server)?; + Ok(config) + } + + pub fn clear_server(&self) -> Result { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let mut config = self.read_config_locked()?; + config.server = ServerConfig::default(); + self.write_config_locked(&config)?; + config.jobs.clear(); + Ok(config) + } + + pub fn list_jobs(&self) -> Result, String> { + Ok(self.load()?.jobs) + } + + pub fn get_job(&self, job_id: &str) -> Result, String> { + Ok(self + .get_persisted_datasource(job_id)? + .map(|datasource| datasource.to_job_config())) + } + + pub fn get_persisted_datasource( + &self, + job_id: &str, + ) -> Result, String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let Some(workspace) = self.load_active_workspace_locked()? else { + return Ok(None); + }; + + Ok(workspace + .datasources + .into_iter() + .find(|item| item.id == job_id)) + } + + pub fn create_job(&self, request: JobUpsertRequest) -> Result { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let mut workspace = self.require_active_workspace_locked()?; + let job = JobConfig::from_request(generate_job_id(), request)?; + workspace + .datasources + .push(PersistedDatasource::from_job(job.clone(), None, None)); + self.write_workspace_locked(&workspace)?; + Ok(job) + } + + pub fn update_job( + &self, + job_id: &str, + request: JobUpsertRequest, + ) -> Result, String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let mut workspace = self.require_active_workspace_locked()?; + + for datasource in &mut workspace.datasources { + if datasource.id != job_id { + continue; + } + + let updated_job = JobConfig::from_request(job_id.to_string(), request)?; + *datasource = PersistedDatasource::from_job( + updated_job.clone(), + Some(datasource.to_cursor()), + None, + ); + self.write_workspace_locked(&workspace)?; + return Ok(Some(updated_job)); + } + + Ok(None) + } + + pub fn delete_job(&self, job_id: &str) -> Result { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let Some(mut workspace) = self.load_active_workspace_locked()? else { + return Ok(false); + }; + + let original_len = workspace.datasources.len(); + workspace + .datasources + .retain(|datasource| datasource.id != job_id); + if workspace.datasources.len() == original_len { + return Ok(false); + } + + self.write_workspace_locked(&workspace)?; + Ok(true) + } + + pub fn set_job_enabled( + &self, + job_id: &str, + enabled: bool, + ) -> Result, String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let mut workspace = self.require_active_workspace_locked()?; + + for datasource in &mut workspace.datasources { + if datasource.id != job_id { + continue; + } + datasource.enabled = enabled; + let job = datasource.to_job_config(); + self.write_workspace_locked(&workspace)?; + return Ok(Some(job)); + } + + Ok(None) + } + + pub fn cursor_for(&self, job_id: &str) -> Result { + Ok(self + .get_persisted_datasource(job_id)? + .map(|datasource| datasource.to_cursor()) + .unwrap_or_default()) + } + + pub fn logs_for(&self, job_id: &str, limit: usize) -> Result, String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + + let entries = self.read_job_logs_locked(job_id, limit)?; + if !entries.is_empty() { + return Ok(entries); + } + + let workspace = self.load_active_workspace_locked()?; + let migrated_entries = self.read_job_logs_locked(job_id, limit)?; + if !migrated_entries.is_empty() { + return Ok(migrated_entries); + } + + Ok(workspace + .and_then(|workspace| { + workspace + .datasources + .into_iter() + .find(|item| item.id == job_id) + }) + .map(|datasource| { + let count = datasource.recent_logs.len(); + datasource + .recent_logs + .into_iter() + .skip(count.saturating_sub(limit)) + .collect() + }) + .unwrap_or_default()) + } + + /// Atomically record a successful batch upload for a specific datastream. + /// Advances the datastream's cursor, clears its error, and recomputes the + /// job-level aggregates from the surviving datastreams. + pub fn record_datastream_success( + &self, + job_id: &str, + datastream_id: &str, + max_row_index: u64, + max_timestamp: DateTime, + last_run_at: DateTime, + ) -> Result<(), String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let Some(mut workspace) = self.load_active_workspace_locked()? else { + return Ok(()); + }; + + for datasource in &mut workspace.datasources { + if datasource.id != job_id { + continue; + } + + let entry = datasource + .datastream_cursors + .entry(datastream_id.to_string()) + .or_default(); + entry.last_pushed_row_index = Some( + entry + .last_pushed_row_index + .map(|current| current.max(max_row_index)) + .unwrap_or(max_row_index), + ); + entry.last_pushed_timestamp = Some( + entry + .last_pushed_timestamp + .map(|current| current.max(max_timestamp)) + .unwrap_or(max_timestamp), + ); + entry.last_error = None; + + datasource.last_run_at = Some(last_run_at); + recompute_job_aggregates(datasource); + self.write_workspace_locked(&workspace)?; + return Ok(()); + } + + Ok(()) + } + + /// Clear all per-datastream cursors for a job after the watched CSV was + /// rotated or truncated. Without this, `record_datastream_success` keeps + /// `.max()`ing against the pre-rotation high-water mark and the scanner + /// re-queues the same rows on every tick (bug_001). + pub fn reset_job_datastream_cursors(&self, job_id: &str) -> Result<(), String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let Some(mut workspace) = self.load_active_workspace_locked()? else { + return Ok(()); + }; + + for datasource in &mut workspace.datasources { + if datasource.id != job_id { + continue; + } + datasource.datastream_cursors.clear(); + recompute_job_aggregates(datasource); + self.write_workspace_locked(&workspace)?; + return Ok(()); + } + + Ok(()) + } + + /// Atomically clear the job-level `last_error` and update `last_run_at`. + /// Used by the scanner after a successful scan iteration. Taking the + /// config lock for the entire read-modify-write means a concurrent + /// `set_job_running` can't be clobbered between a separate read and write + /// (bug_004). + pub fn clear_last_error(&self, job_id: &str, last_run_at: DateTime) -> Result<(), String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let Some(mut workspace) = self.load_active_workspace_locked()? else { + return Ok(()); + }; + + for datasource in &mut workspace.datasources { + if datasource.id != job_id { + continue; + } + + datasource.last_error = None; + datasource.last_run_at = Some(last_run_at); + self.write_workspace_locked(&workspace)?; + return Ok(()); + } + + Ok(()) + } + + /// Atomically record a failed batch upload for a specific datastream. + /// Sets the datastream's error without advancing its cursor and + /// recomputes the job-level aggregates. + pub fn record_datastream_failure( + &self, + job_id: &str, + datastream_id: &str, + error_message: &str, + last_run_at: DateTime, + ) -> Result<(), String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let Some(mut workspace) = self.load_active_workspace_locked()? else { + return Ok(()); + }; + + for datasource in &mut workspace.datasources { + if datasource.id != job_id { + continue; + } + + let entry = datasource + .datastream_cursors + .entry(datastream_id.to_string()) + .or_default(); + entry.last_error = Some(error_message.to_string()); + + datasource.last_run_at = Some(last_run_at); + recompute_job_aggregates(datasource); + self.write_workspace_locked(&workspace)?; + return Ok(()); + } + + Ok(()) + } + + pub fn append_log(&self, job_id: &str, entry: JobLogEntry) -> Result { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let has_job = self + .load_active_workspace_locked()? + .map(|workspace| { + workspace + .datasources + .into_iter() + .any(|item| item.id == job_id) + }) + .unwrap_or(false); + if has_job { + self.append_job_log_locked(job_id, &entry)?; + } + + Ok(entry) + } + + pub fn set_job_running(&self, job_id: &str, is_running: bool) -> Result { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let Some(mut workspace) = self.load_active_workspace_locked()? else { + return Ok(false); + }; + + for datasource in &mut workspace.datasources { + if datasource.id != job_id { + continue; + } + + datasource.is_running = is_running; + self.write_workspace_locked(&workspace)?; + return Ok(true); + } + + Ok(false) + } + + pub fn clear_all_running_jobs(&self) -> Result<(), String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + + let config = self.read_config_locked()?; + let Some(mut workspace) = self.load_workspace_locked(&config.server.workspace_id)? else { + return Ok(()); + }; + + let mut changed = false; + for datasource in &mut workspace.datasources { + if datasource.is_running { + datasource.is_running = false; + changed = true; + } + } + + if changed { + self.write_workspace_locked(&workspace)?; + } + + Ok(()) + } + + pub fn delete_job_runtime(&self, job_id: &str) -> Result<(), String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + let Some(mut workspace) = self.load_active_workspace_locked()? else { + self.delete_job_logs_locked(job_id)?; + return Ok(()); + }; + + for datasource in &mut workspace.datasources { + if datasource.id != job_id { + continue; + } + datasource.last_pushed_timestamp = None; + datasource.last_pushed_row_index = None; + datasource.last_run_at = None; + datasource.last_error = None; + datasource.is_running = false; + datasource.datastream_cursors.clear(); + datasource.recent_logs.clear(); + self.write_workspace_locked(&workspace)?; + break; + } + self.delete_job_logs_locked(job_id)?; + + Ok(()) + } + + pub fn job_log_file_path(&self, job_id: &str) -> Result, String> { + let _guard = self + .lock + .lock() + .map_err(|_| "Config lock poisoned.".to_string())?; + self.ensure_locked()?; + Ok(self.job_log_paths_oldest_to_newest(job_id).pop()) + } + + fn ensure_locked(&self) -> Result<(), String> { + fs::create_dir_all(&self.config_dir).map_err(|err| err.to_string())?; + fs::create_dir_all(&self.workspace_dir).map_err(|err| err.to_string())?; + fs::create_dir_all(&self.logs_dir).map_err(|err| err.to_string())?; + fs::create_dir_all(&self.job_logs_dir).map_err(|err| err.to_string())?; + + if !self.config_path.exists() { + self.write_config_locked(&AppConfig::default())?; + } + + self.migrate_legacy_workspace_data_locked() + } + + fn read_config_locked(&self) -> Result { + if !self.config_path.exists() { + return Ok(AppConfig::default()); + } + + let contents = fs::read_to_string(&self.config_path).map_err(|err| err.to_string())?; + let value: Value = serde_json::from_str(&contents).map_err(|err| err.to_string())?; + + let version = value.get("version").and_then(Value::as_u64).unwrap_or(1) as u32; + let server = value + .get("server") + .cloned() + .map(parse_server_config) + .transpose()? + .unwrap_or_default(); + let jobs = value + .get("jobs") + .and_then(Value::as_array) + .map(|items| { + items + .iter() + .cloned() + .map(parse_job_config) + .collect::, _>>() + }) + .transpose()? + .unwrap_or_default(); + + Ok(AppConfig { + version, + server, + launch_at_login_initialized: value + .get("launch_at_login_initialized") + .and_then(Value::as_bool) + .unwrap_or(false), + jobs, + }) + } + + fn write_config_locked(&self, config: &AppConfig) -> Result<(), String> { + let payload = json!({ + "version": config.version, + "server": config.server.clone().normalized(), + "launch_at_login_initialized": config.launch_at_login_initialized, + }); + write_json_file(&self.config_path, &payload) + } + + fn workspace_path(&self, workspace_id: &str) -> PathBuf { + self.workspace_dir.join(format!("{workspace_id}.json")) + } + + fn ensure_workspace_file_locked( + &self, + workspace_id: &str, + workspace_name: &str, + hydroserver_url: &str, + ) -> Result, String> { + let workspace_id = workspace_id.trim(); + if workspace_id.is_empty() { + return Ok(None); + } + + let path = self.workspace_path(workspace_id); + if path.exists() { + let mut workspace = self + .load_workspace_locked(workspace_id)? + .unwrap_or_default(); + let mut changed = false; + + if !workspace_name.trim().is_empty() + && workspace.workspace_name != workspace_name.trim() + { + workspace.workspace_name = workspace_name.trim().to_string(); + changed = true; + } + if !hydroserver_url.trim().is_empty() + && workspace.hydroserver_url != hydroserver_url.trim() + { + workspace.hydroserver_url = hydroserver_url.trim().to_string(); + changed = true; + } + + if changed { + self.write_workspace_locked(&workspace)?; + } + + return Ok(Some(workspace)); + } + + let workspace = WorkspaceStateFile { + version: 1, + workspace_id: workspace_id.to_string(), + workspace_name: workspace_name.trim().to_string(), + hydroserver_url: hydroserver_url.trim().to_string(), + datasources: Vec::new(), + }; + self.write_workspace_locked(&workspace)?; + Ok(Some(workspace)) + } + + fn load_workspace_locked( + &self, + workspace_id: &str, + ) -> Result, String> { + let workspace_id = workspace_id.trim(); + if workspace_id.is_empty() { + return Ok(None); + } + + let path = self.workspace_path(workspace_id); + if !path.exists() { + return Ok(None); + } + + let contents = fs::read_to_string(path).map_err(|err| err.to_string())?; + let value: Value = serde_json::from_str(&contents).map_err(|err| err.to_string())?; + let mut workspace = parse_workspace_state(value)?; + let mut changed = false; + for datasource in &mut workspace.datasources { + if let Some(migrated_file_path) = + self.migrate_generated_test_csv_path(&datasource.file_path) + { + datasource.file_path = migrated_file_path; + changed = true; + } + + if datasource.recent_logs.is_empty() { + continue; + } + + if !self.job_logs_exist_locked(&datasource.id) { + self.append_job_logs_locked(&datasource.id, &datasource.recent_logs)?; + } + datasource.recent_logs.clear(); + changed = true; + } + + if changed { + self.write_workspace_locked(&workspace)?; + } + + Ok(Some(workspace)) + } + + fn migrate_generated_test_csv_path(&self, file_path: &str) -> Option { + let original = Path::new(file_path); + if original.exists() { + return None; + } + + let mut relative = PathBuf::new(); + let mut found_generated_test_dir = false; + for component in original.components() { + if found_generated_test_dir { + relative.push(component.as_os_str()); + } else if component.as_os_str() == "generated-test-csv" { + found_generated_test_dir = true; + } + } + + if !found_generated_test_dir || relative.as_os_str().is_empty() { + return None; + } + + let candidate = self.config_dir.join("generated-test-csv").join(relative); + candidate + .exists() + .then(|| candidate.to_string_lossy().into_owned()) + } + + fn load_active_workspace_locked(&self) -> Result, String> { + let config = self.read_config_locked()?; + self.load_workspace_locked(&config.server.workspace_id) + } + + fn require_active_workspace_locked(&self) -> Result { + let config = self.read_config_locked()?; + self.ensure_workspace_file_locked(&config.server.workspace_id, "", &config.server.url)? + .ok_or_else(|| "No active workspace is configured.".to_string()) + } + + fn write_workspace_locked(&self, workspace: &WorkspaceStateFile) -> Result<(), String> { + let path = self.workspace_path(&workspace.workspace_id); + let payload = serde_json::to_value(workspace).map_err(|err| err.to_string())?; + write_json_file(&path, &payload) + } + + fn job_log_path(&self, job_id: &str) -> PathBuf { + self.job_logs_dir.join(format!("{job_id}.log")) + } + + fn rotated_job_log_path(&self, job_id: &str, index: usize) -> PathBuf { + self.job_logs_dir.join(format!("{job_id}.{index}.log")) + } + + fn job_log_paths_oldest_to_newest(&self, job_id: &str) -> Vec { + let mut paths = Vec::new(); + for index in (1..=JOB_LOG_ROTATE_FILES).rev() { + let rotated = self.rotated_job_log_path(job_id, index); + if rotated.exists() { + paths.push(rotated); + } + } + + let current = self.job_log_path(job_id); + if current.exists() { + paths.push(current); + } + + paths + } + + fn job_logs_exist_locked(&self, job_id: &str) -> bool { + self.job_log_path(job_id).exists() + || (1..=JOB_LOG_ROTATE_FILES) + .any(|index| self.rotated_job_log_path(job_id, index).exists()) + } + + fn append_job_logs_locked(&self, job_id: &str, entries: &[JobLogEntry]) -> Result<(), String> { + for entry in entries { + self.append_job_log_locked(job_id, entry)?; + } + Ok(()) + } + + fn append_job_log_locked(&self, job_id: &str, entry: &JobLogEntry) -> Result<(), String> { + let payload = serde_json::to_string(entry).map_err(|err| err.to_string())?; + let line = format!("{payload}\n"); + self.rotate_job_logs_locked(job_id, line.len() as u64)?; + + let mut file = fs::OpenOptions::new() + .create(true) + .append(true) + .open(self.job_log_path(job_id)) + .map_err(|err| err.to_string())?; + file.write_all(line.as_bytes()) + .map_err(|err| err.to_string()) + } + + fn rotate_job_logs_locked(&self, job_id: &str, incoming_bytes: u64) -> Result<(), String> { + let current = self.job_log_path(job_id); + let current_len = current + .metadata() + .map(|metadata| metadata.len()) + .unwrap_or_default(); + if current_len + incoming_bytes <= JOB_LOG_ROTATE_BYTES { + return Ok(()); + } + + let oldest = self.rotated_job_log_path(job_id, JOB_LOG_ROTATE_FILES); + if oldest.exists() { + fs::remove_file(&oldest).map_err(|err| err.to_string())?; + } + + for index in (1..JOB_LOG_ROTATE_FILES).rev() { + let source = self.rotated_job_log_path(job_id, index); + if source.exists() { + fs::rename(&source, self.rotated_job_log_path(job_id, index + 1)) + .map_err(|err| err.to_string())?; + } + } + + if current.exists() { + fs::rename(¤t, self.rotated_job_log_path(job_id, 1)) + .map_err(|err| err.to_string())?; + } + + Ok(()) + } + + fn read_job_logs_locked(&self, job_id: &str, limit: usize) -> Result, String> { + let mut entries = Vec::new(); + for path in self.job_log_paths_oldest_to_newest(job_id) { + let file = fs::File::open(path).map_err(|err| err.to_string())?; + for line in BufReader::new(file).lines() { + let line = line.map_err(|err| err.to_string())?; + let trimmed = line.trim(); + if trimmed.is_empty() { + continue; + } + if let Ok(entry) = serde_json::from_str::(trimmed) { + entries.push(entry); + } + } + } + + if entries.len() > limit { + let keep_from = entries.len() - limit; + entries = entries.split_off(keep_from); + } + + Ok(entries) + } + + fn delete_job_logs_locked(&self, job_id: &str) -> Result<(), String> { + for path in self.job_log_paths_oldest_to_newest(job_id) { + if path.exists() { + fs::remove_file(path).map_err(|err| err.to_string())?; + } + } + + Ok(()) + } + + fn active_jobs_locked(&self, server: &ServerConfig) -> Result, String> { + let Some(workspace) = self.load_workspace_locked(&server.workspace_id)? else { + return Ok(Vec::new()); + }; + + Ok(workspace + .datasources + .into_iter() + .map(|datasource| datasource.to_job_config()) + .collect()) + } + + fn migrate_legacy_workspace_data_locked(&self) -> Result<(), String> { + let config = self.read_config_locked()?; + let workspace_id = config.server.workspace_id.trim().to_string(); + if workspace_id.is_empty() { + return Ok(()); + } + + let legacy_jobs = config.jobs.clone(); + let legacy_state = self.read_legacy_state_locked()?; + if legacy_jobs.is_empty() && legacy_state.is_none() { + return Ok(()); + } + + let path = self.workspace_path(&workspace_id); + if path.exists() { + if !legacy_jobs.is_empty() { + let stripped_config = AppConfig { + version: config.version, + server: config.server, + launch_at_login_initialized: config.launch_at_login_initialized, + jobs: Vec::new(), + }; + self.write_config_locked(&stripped_config)?; + } + return Ok(()); + } + + let workspace = WorkspaceStateFile { + version: 1, + workspace_id: workspace_id.clone(), + workspace_name: String::new(), + hydroserver_url: config.server.url.clone(), + datasources: legacy_jobs + .into_iter() + .map(|job| { + let cursor = legacy_state + .as_ref() + .and_then(|state| state.cursors.get(&job.id).cloned()); + let recent_logs = legacy_state + .as_ref() + .and_then(|state| state.logs.get(&job.id).cloned()); + PersistedDatasource::from_job(job, cursor, recent_logs) + }) + .collect(), + }; + + self.write_workspace_locked(&workspace)?; + + let stripped_config = AppConfig { + version: config.version, + server: config.server, + launch_at_login_initialized: config.launch_at_login_initialized, + jobs: Vec::new(), + }; + self.write_config_locked(&stripped_config) + } + + fn read_legacy_state_locked(&self) -> Result, String> { + if !self.legacy_state_path.exists() { + return Ok(None); + } + + let contents = + fs::read_to_string(&self.legacy_state_path).map_err(|err| err.to_string())?; + let state: AppStateFile = serde_json::from_str(&contents).map_err(|err| err.to_string())?; + if state.cursors.is_empty() && state.logs.is_empty() { + return Ok(None); + } + + Ok(Some(state)) + } +} + +fn parse_server_config(value: Value) -> Result { + let server: ServerConfig = serde_json::from_value(value).map_err(|err| err.to_string())?; + Ok(server.normalized()) +} + +fn parse_job_config(value: Value) -> Result { + let mut job: JobConfig = + serde_json::from_value(normalize_job_value(value)).map_err(|err| err.to_string())?; + job = job.normalized()?; + Ok(job) +} + +fn parse_workspace_state(value: Value) -> Result { + let version = value.get("version").and_then(Value::as_u64).unwrap_or(1) as u32; + let workspace_id = value + .get("workspace_id") + .and_then(Value::as_str) + .unwrap_or_default() + .trim() + .to_string(); + let workspace_name = value + .get("workspace_name") + .and_then(Value::as_str) + .unwrap_or_default() + .trim() + .to_string(); + let hydroserver_url = value + .get("hydroserver_url") + .and_then(Value::as_str) + .unwrap_or_default() + .trim() + .to_string(); + let datasources = value + .get("datasources") + .and_then(Value::as_array) + .map(|items| { + items + .iter() + .cloned() + .map(parse_persisted_datasource) + .collect::, _>>() + }) + .transpose()? + .unwrap_or_default(); + + Ok(WorkspaceStateFile { + version, + workspace_id, + workspace_name, + hydroserver_url, + datasources, + }) +} + +fn parse_persisted_datasource(value: Value) -> Result { + let mut datasource: PersistedDatasource = + serde_json::from_value(normalize_job_value(value)).map_err(|err| err.to_string())?; + + let normalized_job = datasource.to_job_config().normalized()?; + datasource.id = normalized_job.id; + datasource.name = normalized_job.name; + datasource.enabled = normalized_job.enabled; + datasource.file_path = normalized_job.file_path; + datasource.schedule_minutes = normalized_job.schedule_minutes; + datasource.file_config = normalized_job.file_config; + datasource.column_mappings = normalized_job.column_mappings; + Ok(datasource) +} + +fn normalize_job_value(value: Value) -> Value { + let mut value = value; + if let Some(object) = value.as_object_mut() { + if let Some(file_config) = object.get("file_config").cloned() { + object.insert( + "file_config".to_string(), + migrate_file_config_value(file_config), + ); + } + if let Some(column_mappings) = object.get("column_mappings").cloned() { + object.insert( + "column_mappings".to_string(), + normalize_column_mappings_value(column_mappings), + ); + } + } + value +} + +fn normalize_column_mappings_value(value: Value) -> Value { + let Value::Array(items) = value else { + return Value::Array(Vec::new()); + }; + + Value::Array( + items + .into_iter() + .filter_map(|item| match serde_json::from_value::(item) { + Ok(mapping) => Some(serde_json::to_value(mapping.normalized().ok()?).ok()?), + Err(_) => None, + }) + .collect(), + ) +} + +fn migrate_file_config_value(value: Value) -> Value { + let Some(object) = value.as_object() else { + return serde_json::to_value(FileConfig::default()).unwrap_or(Value::Null); + }; + + if object.contains_key("timestamp") + || object.contains_key("identifierType") + || object.contains_key("identifier_type") + { + return value; + } + + let legacy_key = string_field(object, &["timestamp_column", "timestampColumn"]) + .unwrap_or_else(|| "timestamp".to_string()); + let legacy_format = string_field(object, &["timestamp_format", "timestampFormat"]); + let legacy_timezone = string_field(object, &["timezone"]); + + let mut timestamp = json!({ + "key": legacy_key, + }); + + if let Some(format) = legacy_format { + timestamp["format"] = Value::String("custom".to_string()); + timestamp["customFormat"] = Value::String(format); + } else { + timestamp["format"] = Value::String("ISO8601".to_string()); + } + + match legacy_timezone { + Some(timezone) if timezone.contains('/') => { + timestamp["timezoneMode"] = Value::String("daylightSavings".to_string()); + timestamp["timezone"] = Value::String(timezone); + if timestamp["format"] == Value::String("ISO8601".to_string()) { + timestamp["format"] = Value::String("naive".to_string()); + } + } + Some(timezone) if timezone.eq_ignore_ascii_case("UTC") => { + timestamp["timezoneMode"] = Value::String("utc".to_string()); + if timestamp["format"] == Value::String("ISO8601".to_string()) { + timestamp["format"] = Value::String("naive".to_string()); + } + } + Some(timezone) => { + timestamp["timezoneMode"] = Value::String("fixedOffset".to_string()); + timestamp["timezone"] = Value::String(timezone); + if timestamp["format"] == Value::String("ISO8601".to_string()) { + timestamp["format"] = Value::String("naive".to_string()); + } + } + None => { + timestamp["timezoneMode"] = Value::String("embeddedOffset".to_string()); + } + } + + json!({ + "headerRow": object + .get("headerRow") + .cloned() + .or_else(|| object.get("header_row").cloned()) + .unwrap_or(Value::from(1)), + "dataStartRow": object + .get("dataStartRow") + .cloned() + .or_else(|| object.get("data_start_row").cloned()) + .unwrap_or(Value::from(2)), + "delimiter": object + .get("delimiter") + .cloned() + .unwrap_or_else(|| Value::String(",".to_string())), + "identifierType": object + .get("identifierType") + .cloned() + .or_else(|| object.get("identifier_type").cloned()) + .unwrap_or_else(|| Value::String("name".to_string())), + "timestamp": timestamp, + }) +} + +fn string_field(object: &serde_json::Map, keys: &[&str]) -> Option { + keys.iter() + .find_map(|key| object.get(*key)) + .and_then(Value::as_str) + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) +} + +/// Recomputes the job-level `last_pushed_row_index`, `last_pushed_timestamp`, +/// and `last_error` from the per-datastream cursors of the currently-configured +/// column mappings. The job-level fields are derived aggregates used for the UI +/// status display; the per-datastream cursors are authoritative for resumption. +fn recompute_job_aggregates(datasource: &mut PersistedDatasource) { + let active_ids: Vec<&str> = datasource + .column_mappings + .iter() + .map(|mapping| mapping.datastream_id.as_str()) + .collect(); + + if active_ids.is_empty() { + datasource.last_pushed_row_index = None; + datasource.last_pushed_timestamp = None; + datasource.last_error = None; + return; + } + + let mut min_row: Option = None; + let mut min_ts: Option> = None; + let mut any_missing_row = false; + let mut any_missing_ts = false; + let mut aggregate_error: Option = None; + + for id in &active_ids { + let cursor = datasource.datastream_cursors.get(*id); + match cursor.and_then(|c| c.last_pushed_row_index) { + Some(idx) => min_row = Some(min_row.map_or(idx, |current| current.min(idx))), + None => any_missing_row = true, + } + match cursor.and_then(|c| c.last_pushed_timestamp) { + Some(ts) => min_ts = Some(min_ts.map_or(ts, |current| current.min(ts))), + None => any_missing_ts = true, + } + if aggregate_error.is_none() { + if let Some(error) = cursor.and_then(|c| c.last_error.clone()) { + aggregate_error = Some(error); + } + } + } + + datasource.last_pushed_row_index = if any_missing_row { None } else { min_row }; + datasource.last_pushed_timestamp = if any_missing_ts { None } else { min_ts }; + datasource.last_error = aggregate_error; +} + +fn write_json_file(path: &Path, value: &Value) -> Result<(), String> { + let payload = serde_json::to_string_pretty(value).map_err(|err| err.to_string())?; + fs::write(path, format!("{payload}\n")).map_err(|err| err.to_string()) +} + +fn generate_job_id() -> String { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|duration| duration.as_nanos()) + .unwrap_or_default(); + let counter = JOB_COUNTER.fetch_add(1, Ordering::Relaxed) as u128; + let mixed = nanos ^ (counter << 32) ^ ((std::process::id() as u128) << 64); + let hex = format!("{mixed:032x}"); + format!( + "{}-{}-{}-{}-{}", + &hex[0..8], + &hex[8..12], + &hex[12..16], + &hex[16..20], + &hex[20..32] + ) +} + +#[cfg(test)] +#[path = "tests/config_store.rs"] +mod tests; diff --git a/src/csv_preview.rs b/src/csv_preview.rs new file mode 100644 index 0000000..feb9286 --- /dev/null +++ b/src/csv_preview.rs @@ -0,0 +1,209 @@ +use std::{fs, path::PathBuf}; + +use chrono::{NaiveDate, NaiveDateTime}; +use csv::ReaderBuilder; + +use crate::models::CsvPreviewResponse; + +const DELIMITER_CANDIDATES: [char; 5] = [',', '\t', ';', '|', ' ']; + +pub fn preview_csv(path: &str, rows: usize) -> Result { + let file_path = expand_path(path)?; + if !file_path.exists() { + return Err("Can't find the data file. It may have been moved or renamed.".to_string()); + } + + let bytes = fs::read(&file_path).map_err(|err| err.to_string())?; + let (raw_text, encoding) = decode_text(&bytes)?; + let raw_lines: Vec = raw_text.lines().map(|line| line.to_string()).collect(); + let delimiter = detect_delimiter(raw_lines.iter().take(rows).map(String::as_str)); + let parsed_rows: Vec> = raw_lines + .iter() + .take(rows) + .filter(|line| !line.trim().is_empty()) + .map(|line| parse_line(line, delimiter)) + .collect(); + + let header_index = detect_header_row(&parsed_rows); + let data_start_index = detect_data_start_row(&parsed_rows, header_index); + + Ok(CsvPreviewResponse { + raw_lines: raw_lines.into_iter().take(rows).collect(), + parsed_rows: match header_index { + Some(index) => parsed_rows.into_iter().skip(index).collect(), + None => parsed_rows, + }, + detected_header_row: header_index.map(|index| index as u32 + 1), + detected_data_start_row: data_start_index.map(|index| index as u32 + 1), + detected_delimiter: delimiter.to_string(), + total_lines: raw_text.lines().count(), + encoding, + }) +} + +pub fn detect_delimiter<'a>(lines: impl Iterator) -> char { + let sampled_lines: Vec<&str> = lines.filter(|line| !line.trim().is_empty()).collect(); + let mut best_delimiter = ','; + let mut best_score = -1_i64; + + for delimiter in DELIMITER_CANDIDATES { + let counts: Vec = sampled_lines + .iter() + .map(|line| parse_line(line, delimiter).len()) + .collect(); + + if counts.is_empty() { + continue; + } + + let mut frequency = std::collections::HashMap::::new(); + for count in counts { + *frequency.entry(count).or_insert(0) += 1; + } + + if let Some((mode_count, occurrences)) = frequency + .into_iter() + .max_by_key(|(count, occurrences)| (*occurrences, *count)) + { + let score = (occurrences * mode_count) as i64; + if score > best_score { + best_score = score; + best_delimiter = delimiter; + } + } + } + + best_delimiter +} + +pub fn parse_line(line: &str, delimiter: char) -> Vec { + let mut reader = ReaderBuilder::new() + .has_headers(false) + .delimiter(delimiter as u8) + .from_reader(line.as_bytes()); + + reader + .records() + .next() + .transpose() + .ok() + .flatten() + .map(|record| record.iter().map(|value| value.to_string()).collect()) + .unwrap_or_else(|| vec![line.to_string()]) +} + +fn expand_path(path: &str) -> Result { + let trimmed = path.trim(); + if trimmed.is_empty() { + return Err("Choose a CSV file path.".to_string()); + } + + if let Some(stripped) = trimmed.strip_prefix("~/") { + let home = std::env::var("HOME") + .or_else(|_| std::env::var("USERPROFILE")) + .map_err(|_| "Couldn't resolve the home directory.".to_string())?; + return Ok(PathBuf::from(home).join(stripped)); + } + + Ok(PathBuf::from(trimmed)) +} + +pub(crate) fn decode_text(bytes: &[u8]) -> Result<(String, String), String> { + if bytes.starts_with(&[0xEF, 0xBB, 0xBF]) { + let text = String::from_utf8(bytes[3..].to_vec()) + .map_err(|_| "Couldn't read the file encoding. Try exporting as UTF-8.".to_string())?; + return Ok((text, "utf-8-sig".to_string())); + } + + if let Ok(text) = String::from_utf8(bytes.to_vec()) { + return Ok((text, "utf-8".to_string())); + } + + let latin1 = bytes.iter().map(|byte| *byte as char).collect::(); + Ok((latin1, "latin-1".to_string())) +} + +fn detect_header_row(rows: &[Vec]) -> Option { + for (index, row) in rows.iter().enumerate() { + let cleaned: Vec<&str> = row + .iter() + .map(|cell| cell.trim()) + .filter(|cell| !cell.is_empty()) + .collect(); + if cleaned.len() < 3 { + continue; + } + + if cleaned.iter().all(|cell| !looks_numeric_or_timestamp(cell)) { + return Some(index); + } + } + + if rows.is_empty() { + None + } else { + Some(0) + } +} + +fn detect_data_start_row(rows: &[Vec], header_index: Option) -> Option { + let header_index = header_index?; + let expected_columns = rows.get(header_index).map(Vec::len).unwrap_or_default(); + + for index in (header_index + 1)..rows.len() { + let row: Vec = rows[index] + .iter() + .map(|cell| cell.trim().to_string()) + .collect(); + if row.len() != expected_columns { + continue; + } + + let meaningful: Vec<&str> = row + .iter() + .map(String::as_str) + .filter(|cell| !cell.is_empty()) + .collect(); + if meaningful.len() < 2 { + continue; + } + + let numeric_or_timestamp_count = meaningful + .iter() + .filter(|cell| looks_numeric_or_timestamp(cell)) + .count(); + + if numeric_or_timestamp_count >= usize::max(2, meaningful.len() / 2) { + return Some(index); + } + } + + None +} + +fn looks_numeric_or_timestamp(value: &str) -> bool { + if value.parse::().is_ok() { + return true; + } + + parse_preview_timestamp(value).is_some() +} + +fn parse_preview_timestamp(value: &str) -> Option<()> { + let trimmed = value.trim(); + if trimmed.is_empty() { + return None; + } + + for format in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%dT%H:%M:%S", "%m/%d/%Y %H:%M"] { + if NaiveDateTime::parse_from_str(trimmed, format).is_ok() { + return Some(()); + } + } + + if NaiveDate::parse_from_str(trimmed, "%Y-%m-%d").is_ok() { + return Some(()); + } + + None +} diff --git a/src/daemon_api.rs b/src/daemon_api.rs new file mode 100644 index 0000000..4fff609 --- /dev/null +++ b/src/daemon_api.rs @@ -0,0 +1,643 @@ +use std::{convert::Infallible, fs, io, net::SocketAddr, path::PathBuf}; + +use axum::{ + extract::{Query, State}, + http::{header, HeaderMap, Method, StatusCode}, + response::{ + sse::{Event, KeepAlive, Sse}, + IntoResponse, Response, + }, + routing::{get, post}, + Json, Router, +}; +use rand::RngCore; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use tokio::{net::TcpListener, sync::oneshot, task::JoinHandle}; +use tokio_stream::{wrappers::WatchStream, StreamExt}; +use tower_http::cors::{Any, CorsLayer}; + +use crate::{ + daemon_state::DaemonState, + models::{ActionResponse, DaemonConnectionInfo, JobUpsertRequest, ServerConfig}, + service_paths::daemon_endpoint_path, +}; + +#[derive(Clone)] +struct ApiState { + daemon: DaemonState, + token: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct PersistedDaemonEndpoint { + base_url: String, + token: String, + pid: u32, +} + +#[derive(Debug, Deserialize)] +struct AccessTokenQuery { + access_token: String, +} + +#[derive(Debug, Deserialize)] +struct UrlPayload { + url: String, +} + +#[derive(Debug, Deserialize)] +struct ServerPayload { + server: ServerConfig, +} + +#[derive(Debug, Deserialize)] +struct JobIdPayload { + job_id: String, +} + +#[derive(Debug, Deserialize)] +struct JobPayload { + payload: JobUpsertRequest, +} + +#[derive(Debug, Deserialize)] +struct UpdateJobPayload { + job_id: String, + payload: JobUpsertRequest, +} + +#[derive(Debug, Deserialize)] +struct DatastreamPayload { + datastream_id: String, +} + +#[derive(Debug, Deserialize)] +struct CsvPreviewPayload { + path: String, + rows: Option, +} + +pub struct DaemonApiServer { + endpoint_path: PathBuf, + join_handle: JoinHandle<()>, + shutdown_tx: Option>, + token: String, +} + +impl DaemonApiServer { + pub async fn start(daemon: DaemonState, config_dir: PathBuf) -> Result { + let token = generate_token(); + let listener = TcpListener::bind("127.0.0.1:0") + .await + .map_err(|err| err.to_string())?; + let address = listener.local_addr().map_err(|err| err.to_string())?; + let base_url = format!("http://{}", format_socket_addr(address)); + let endpoint_path = daemon_endpoint_path(&config_dir); + + persist_endpoint( + &endpoint_path, + &PersistedDaemonEndpoint { + base_url: base_url.clone(), + token: token.clone(), + pid: std::process::id(), + }, + )?; + + let app_state = ApiState { + daemon, + token: token.clone(), + }; + let cors = CorsLayer::new() + .allow_origin(Any) + .allow_methods([Method::GET, Method::POST, Method::OPTIONS]) + .allow_headers(Any); + let router = Router::new() + .route("/api/commands/ping", post(ping)) + .route("/api/commands/bootstrap", post(bootstrap)) + .route("/api/commands/get-health", post(get_health)) + .route("/api/commands/get-config", post(get_config)) + .route("/api/commands/get-jobs", post(get_jobs)) + .route("/api/commands/get-job", post(get_job)) + .route("/api/commands/get-job-logs", post(get_job_logs)) + .route( + "/api/commands/update-server-config", + post(update_server_config), + ) + .route( + "/api/commands/clear-server-config", + post(clear_server_config), + ) + .route("/api/commands/test-connection", post(test_connection)) + .route( + "/api/commands/validate-server-url", + post(validate_server_url), + ) + .route("/api/commands/create-job", post(create_job)) + .route("/api/commands/update-job", post(update_job)) + .route("/api/commands/delete-job", post(delete_job)) + .route("/api/commands/run-job-now", post(run_job_now)) + .route("/api/commands/enable-job", post(enable_job)) + .route("/api/commands/disable-job", post(disable_job)) + .route("/api/commands/get-datastreams", post(get_datastreams)) + .route( + "/api/commands/get-datastream-detail", + post(get_datastream_detail), + ) + .route("/api/commands/get-csv-preview", post(get_csv_preview)) + .route("/api/status", get(status_stream)) + .layer(cors) + .with_state(app_state); + + let (shutdown_tx, shutdown_rx) = oneshot::channel::<()>(); + let join_handle = tokio::spawn(async move { + let server = axum::serve(listener, router).with_graceful_shutdown(async move { + let _ = shutdown_rx.await; + }); + + if let Err(error) = server.await { + tracing::error!(error = %error, "daemon API server stopped unexpectedly"); + } + }); + + Ok(Self { + endpoint_path, + join_handle, + shutdown_tx: Some(shutdown_tx), + token, + }) + } + + pub async fn shutdown(mut self) { + if let Some(tx) = self.shutdown_tx.take() { + let _ = tx.send(()); + } + let _ = self.join_handle.await; + remove_endpoint_if_current(&self.endpoint_path, &self.token); + } +} + +#[derive(Debug)] +pub enum ConnectionReadError { + MissingEndpoint, + Incomplete, + Fatal(String), +} + +pub fn read_connection_info( + config_dir: PathBuf, +) -> Result { + let endpoint_path = daemon_endpoint_path(&config_dir); + let payload = match fs::read_to_string(&endpoint_path) { + Ok(payload) => payload, + Err(err) if err.kind() == io::ErrorKind::NotFound => { + return Err(ConnectionReadError::MissingEndpoint); + } + Err(err) => { + return Err(ConnectionReadError::Fatal(format!( + "Couldn't read the daemon endpoint file at {}: {err}", + endpoint_path.display() + ))); + } + }; + + let endpoint: PersistedDaemonEndpoint = match serde_json::from_str(&payload) { + Ok(endpoint) => endpoint, + Err(_) => return Err(ConnectionReadError::Incomplete), + }; + + Ok(DaemonConnectionInfo { + base_url: endpoint.base_url, + token: endpoint.token, + }) +} + +fn persist_endpoint(path: &PathBuf, endpoint: &PersistedDaemonEndpoint) -> Result<(), String> { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).map_err(|err| err.to_string())?; + } + let payload = serde_json::to_vec_pretty(endpoint).map_err(|err| err.to_string())?; + + let tmp_path = path.with_extension(format!("json.tmp.{}", std::process::id())); + fs::write(&tmp_path, &payload).map_err(|err| { + format!( + "Couldn't stage daemon endpoint file at {}: {err}", + tmp_path.display() + ) + })?; + + if let Err(err) = fs::rename(&tmp_path, path) { + let _ = fs::remove_file(&tmp_path); + return Err(format!( + "Couldn't publish daemon endpoint file at {}: {err}", + path.display() + )); + } + + Ok(()) +} + +fn remove_endpoint_if_current(path: &PathBuf, token: &str) { + let Ok(payload) = fs::read_to_string(path) else { + return; + }; + let Ok(endpoint) = serde_json::from_str::(&payload) else { + return; + }; + if endpoint.token == token { + let _ = fs::remove_file(path); + } +} + +fn format_socket_addr(address: SocketAddr) -> String { + match address { + SocketAddr::V4(v4) => v4.to_string(), + SocketAddr::V6(v6) => format!("[{}]:{}", v6.ip(), v6.port()), + } +} + +fn generate_token() -> String { + let mut bytes = [0_u8; 24]; + rand::thread_rng().fill_bytes(&mut bytes); + bytes.iter().map(|byte| format!("{byte:02x}")).collect() +} + +fn authorize(headers: &HeaderMap, token: &str) -> Result<(), Response> { + let Some(value) = headers.get(header::AUTHORIZATION) else { + return Err((StatusCode::UNAUTHORIZED, "Missing bearer token.").into_response()); + }; + let Ok(value) = value.to_str() else { + return Err((StatusCode::UNAUTHORIZED, "Invalid bearer token.").into_response()); + }; + let expected = format!("Bearer {token}"); + if value != expected { + return Err((StatusCode::UNAUTHORIZED, "Invalid bearer token.").into_response()); + } + Ok(()) +} + +fn command_error(error: String) -> Response { + ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ "detail": error })), + ) + .into_response() +} + +fn parse_json_payload( + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Result { + payload + .map(|Json(value)| value) + .map_err(|rejection| command_error(rejection.to_string())) +} + +async fn ping(State(state): State, headers: HeaderMap) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + + Json(ActionResponse { + ok: true, + message: "pong".to_string(), + }) + .into_response() +} + +async fn bootstrap(State(state): State, headers: HeaderMap) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + + match state.daemon.bootstrap() { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn get_health(State(state): State, headers: HeaderMap) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + + match state.daemon.health() { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn get_config(State(state): State, headers: HeaderMap) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + + match state.daemon.config() { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn get_jobs(State(state): State, headers: HeaderMap) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + + match state.daemon.jobs() { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn get_job( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.get_job(&payload.job_id) { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn get_job_logs( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.get_job_logs(&payload.job_id) { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn update_server_config( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.update_server_config(payload.server).await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn clear_server_config(State(state): State, headers: HeaderMap) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + + match state.daemon.clear_server_config().await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn test_connection( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.test_connection(payload.server).await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn validate_server_url( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.validate_server_url(payload.url).await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn create_job( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.create_job(payload.payload).await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn update_job( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state + .daemon + .update_job(&payload.job_id, payload.payload) + .await + { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn delete_job( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.delete_job(&payload.job_id).await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn run_job_now( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.run_job_now(&payload.job_id).await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn enable_job( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.enable_job(&payload.job_id).await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn disable_job( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.disable_job(&payload.job_id).await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn get_datastreams(State(state): State, headers: HeaderMap) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + + match state.daemon.get_datastreams().await { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn get_datastream_detail( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state + .daemon + .get_datastream_detail(&payload.datastream_id) + .await + { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn get_csv_preview( + State(state): State, + headers: HeaderMap, + payload: Result, axum::extract::rejection::JsonRejection>, +) -> Response { + if let Err(response) = authorize(&headers, &state.token) { + return response; + } + let payload = match parse_json_payload(payload) { + Ok(payload) => payload, + Err(response) => return response, + }; + + match state.daemon.get_csv_preview(payload.path, payload.rows) { + Ok(response) => Json(response).into_response(), + Err(error) => command_error(error), + } +} + +async fn status_stream( + State(state): State, + Query(query): Query, +) -> Response { + if query.access_token != state.token { + return (StatusCode::UNAUTHORIZED, "Invalid access token.").into_response(); + } + + let stream = WatchStream::new(state.daemon.subscribe_status()).map(|snapshot| { + let payload = serde_json::to_string(&snapshot).unwrap_or_else(|_| "{}".to_string()); + Ok::(Event::default().event("status").data(payload)) + }); + + Sse::new(stream) + .keep_alive(KeepAlive::new().interval(std::time::Duration::from_secs(15))) + .into_response() +} diff --git a/src/daemon_launcher.rs b/src/daemon_launcher.rs new file mode 100644 index 0000000..601cd3d --- /dev/null +++ b/src/daemon_launcher.rs @@ -0,0 +1,116 @@ +use std::{ + path::PathBuf, + process::{Command, Stdio}, + time::{Duration, Instant}, +}; + +use tauri::AppHandle; + +use crate::{ + daemon_api::{read_connection_info, ConnectionReadError}, + models::DaemonConnectionInfo, + runtime, + service_paths::resolve_shared_service_config_dir, +}; + +const DAEMON_STARTUP_TIMEOUT: Duration = Duration::from_secs(20); +const DAEMON_POLL_INTERVAL: Duration = Duration::from_millis(150); + +pub async fn ensure_daemon_connection( + app_handle: &AppHandle, +) -> Result { + let _ = runtime::resolve_config_dir(app_handle)?; + let config_dir = resolve_shared_service_config_dir()?; + + if let Some(connection) = read_live_connection(config_dir.clone()).await? { + return Ok(connection); + } + + #[cfg(windows)] + { + let service_status = crate::service_manager::get_service_status(app_handle)?; + if service_status.supported { + if !service_status.installed { + return Err("Install the background service to continue.".to_string()); + } + if !service_status.running { + return Err("Restart the background service to continue.".to_string()); + } + + return wait_for_live_connection(config_dir).await; + } + } + + spawn_daemon_process(resolve_service_executable_path()?)?; + wait_for_live_connection(config_dir).await +} + +async fn wait_for_live_connection(config_dir: PathBuf) -> Result { + let started_at = Instant::now(); + loop { + if let Some(connection) = read_live_connection(config_dir.clone()).await? { + return Ok(connection); + } + + if started_at.elapsed() >= DAEMON_STARTUP_TIMEOUT { + return Err("The daemon did not become ready in time.".to_string()); + } + + tokio::time::sleep(DAEMON_POLL_INTERVAL).await; + } +} + +async fn read_live_connection(config_dir: PathBuf) -> Result, String> { + let connection = match read_connection_info(config_dir) { + Ok(connection) => connection, + Err(ConnectionReadError::MissingEndpoint) | Err(ConnectionReadError::Incomplete) => { + return Ok(None); + } + Err(ConnectionReadError::Fatal(error)) => return Err(error), + }; + + if ping(&connection).await { + return Ok(Some(connection)); + } + + Ok(None) +} + +async fn ping(connection: &DaemonConnectionInfo) -> bool { + let client = reqwest::Client::new(); + let Ok(response) = client + .post(format!( + "{}/api/commands/ping", + connection.base_url.trim_end_matches('/') + )) + .bearer_auth(&connection.token) + .header(reqwest::header::CONTENT_TYPE, "application/json") + .body("{}") + .send() + .await + else { + return false; + }; + + response.status().is_success() +} + +fn spawn_daemon_process(executable_path: PathBuf) -> Result<(), String> { + Command::new(executable_path) + .arg("--service") + .stdin(Stdio::null()) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .spawn() + .map(|_| ()) + .map_err(|err| err.to_string()) +} + +fn resolve_service_executable_path() -> Result { + #[cfg(target_os = "linux")] + if let Some(appimage_path) = std::env::var_os("APPIMAGE") { + return Ok(PathBuf::from(appimage_path)); + } + + std::env::current_exe().map_err(|err| err.to_string()) +} diff --git a/src/daemon_state.rs b/src/daemon_state.rs new file mode 100644 index 0000000..e3e881b --- /dev/null +++ b/src/daemon_state.rs @@ -0,0 +1,329 @@ +use std::{path::PathBuf, sync::Arc}; + +use tokio::{ + sync::watch, + task::JoinHandle, + time::{interval, Duration, MissedTickBehavior}, +}; + +use crate::{ + csv_preview::preview_csv, + models::{ + ActionResponse, AppBootstrapResponse, AppConfig, ConnectionTestResponse, + CsvPreviewResponse, DaemonStatusSnapshot, DatastreamDetail, DatastreamSummary, JobDetail, + JobLogsResponse, JobStatusSummary, JobUpsertRequest, LogLevel, ServerConfig, + ServerUrlValidationResponse, + }, + pipeline::PipelineService, + runtime::AppState, +}; + +const STATUS_POLL_INTERVAL: Duration = Duration::from_millis(750); + +#[derive(Clone)] +pub struct DaemonState { + inner: Arc, +} + +struct DaemonStateInner { + app: AppState, + pipeline: PipelineService, + status_tx: watch::Sender, +} + +impl DaemonState { + pub async fn new(config_dir: PathBuf) -> Result { + let app = AppState::new(config_dir)?; + app.initialize()?; + app.config_store().clear_all_running_jobs()?; + + let pipeline = PipelineService::new(app.config_store_handle(), app.hydroserver_handle()); + pipeline.initialize().await?; + + let snapshot = app.status_snapshot()?; + let (status_tx, _) = watch::channel(snapshot); + + Ok(Self { + inner: Arc::new(DaemonStateInner { + app, + pipeline, + status_tx, + }), + }) + } + + pub fn subscribe_status(&self) -> watch::Receiver { + self.inner.status_tx.subscribe() + } + + pub fn clear_all_running_jobs(&self) -> Result<(), String> { + self.inner.app.config_store().clear_all_running_jobs() + } + + pub fn start_status_monitor(&self) -> JoinHandle<()> { + let state = self.clone(); + tokio::spawn(async move { + let mut ticker = interval(STATUS_POLL_INTERVAL); + ticker.set_missed_tick_behavior(MissedTickBehavior::Delay); + + loop { + ticker.tick().await; + let _ = state.publish_status(); + } + }) + } + + pub async fn shutdown(&self) { + self.inner.pipeline.shutdown().await; + } + + pub fn bootstrap(&self) -> Result { + self.inner.app.bootstrap() + } + + pub fn health(&self) -> Result { + self.inner.app.health() + } + + pub fn config(&self) -> Result { + self.inner.app.config() + } + + pub fn jobs(&self) -> Result, String> { + self.inner + .app + .config_store() + .list_jobs()? + .iter() + .map(|job| self.inner.app.build_job_summary(job)) + .collect() + } + + pub fn get_job(&self, job_id: &str) -> Result { + let Some(job) = self.inner.app.config_store().get_job(job_id)? else { + return Err("That job could not be found.".to_string()); + }; + self.inner.app.build_job_detail(&job) + } + + pub fn get_job_logs(&self, job_id: &str) -> Result { + if self.inner.app.config_store().get_job(job_id)?.is_none() { + return Err("That job could not be found.".to_string()); + } + + Ok(JobLogsResponse { + entries: self.inner.app.config_store().logs_for(job_id, 200)?, + log_file_path: self + .inner + .app + .config_store() + .job_log_file_path(job_id)? + .map(|path| path.to_string_lossy().into_owned()), + }) + } + + pub async fn update_server_config(&self, server: ServerConfig) -> Result { + let normalized = server.validated_for_connection()?; + let connection = self + .inner + .app + .hydroserver() + .test_connection(&normalized) + .await; + if !connection.ok { + return Err(connection.message); + } + + let workspace_id = connection.workspace_id.unwrap_or_default(); + let workspace_name = connection + .workspace_name + .clone() + .unwrap_or_else(|| normalized.workspace_name.clone()); + + let config = self.inner.app.config_store().set_server( + ServerConfig { + workspace_id, + workspace_name, + ..normalized + }, + connection.workspace_name.as_deref().unwrap_or_default(), + )?; + + self.inner.pipeline.reload().await?; + self.publish_status()?; + Ok(config) + } + + pub async fn clear_server_config(&self) -> Result { + let config = self.inner.app.config_store().clear_server()?; + self.inner.pipeline.reload().await?; + self.publish_status()?; + Ok(config) + } + + pub async fn test_connection( + &self, + server: ServerConfig, + ) -> Result { + Ok(self + .inner + .app + .hydroserver() + .test_connection(&server.normalized()) + .await) + } + + pub async fn validate_server_url( + &self, + url: String, + ) -> Result { + Ok(self.inner.app.hydroserver().validate_url(&url).await) + } + + pub async fn create_job(&self, payload: JobUpsertRequest) -> Result { + let job = self.inner.app.config_store().create_job(payload)?; + let _ = self + .inner + .app + .append_log(&job.id, "Job created", LogLevel::Info); + self.inner.pipeline.reload().await?; + self.publish_status()?; + self.inner.app.build_job_detail(&job) + } + + pub async fn update_job( + &self, + job_id: &str, + payload: JobUpsertRequest, + ) -> Result { + let Some(job) = self.inner.app.config_store().update_job(job_id, payload)? else { + return Err("That job could not be found.".to_string()); + }; + let _ = self + .inner + .app + .append_log(&job.id, "Job updated", LogLevel::Info); + self.inner.pipeline.reload().await?; + self.publish_status()?; + self.inner.app.build_job_detail(&job) + } + + pub async fn delete_job(&self, job_id: &str) -> Result { + if !self.inner.app.config_store().delete_job(job_id)? { + return Err("That job could not be found.".to_string()); + } + self.inner.app.config_store().delete_job_runtime(job_id)?; + self.inner.pipeline.reload().await?; + self.publish_status()?; + Ok(ActionResponse { + ok: true, + message: "Job deleted.".to_string(), + }) + } + + pub async fn run_job_now(&self, job_id: &str) -> Result { + let job = self + .inner + .app + .config_store() + .get_job(job_id)? + .ok_or_else(|| "That job could not be found.".to_string())?; + + if !job.enabled { + return Err("Enable this data source before requesting a manual run.".to_string()); + } + + self.inner.pipeline.run_job_now(job_id).await?; + self.inner + .app + .append_log(job_id, "Manual run requested", LogLevel::Info)?; + self.publish_status()?; + + Ok(ActionResponse { + ok: true, + message: "Run requested.".to_string(), + }) + } + + pub async fn enable_job(&self, job_id: &str) -> Result { + let Some(job) = self + .inner + .app + .config_store() + .set_job_enabled(job_id, true)? + else { + return Err("That job could not be found.".to_string()); + }; + let _ = self + .inner + .app + .append_log(&job.id, "Job enabled", LogLevel::Info); + self.inner.pipeline.reload().await?; + self.publish_status()?; + Ok(ActionResponse { + ok: true, + message: "Job enabled.".to_string(), + }) + } + + pub async fn disable_job(&self, job_id: &str) -> Result { + let Some(job) = self + .inner + .app + .config_store() + .set_job_enabled(job_id, false)? + else { + return Err("That job could not be found.".to_string()); + }; + let _ = self + .inner + .app + .append_log(&job.id, "Job disabled", LogLevel::Warning); + self.inner.pipeline.reload().await?; + self.publish_status()?; + Ok(ActionResponse { + ok: true, + message: "Job disabled.".to_string(), + }) + } + + pub async fn get_datastreams(&self) -> Result, String> { + let config = self.inner.app.config()?; + self.inner + .app + .hydroserver() + .list_datastreams(&config.server) + .await + .map_err(|_| "Couldn't load datastreams from HydroServer right now.".to_string()) + } + + pub async fn get_datastream_detail( + &self, + datastream_id: &str, + ) -> Result { + let config = self.inner.app.config()?; + self.inner + .app + .hydroserver() + .get_datastream_detail(&config.server, datastream_id) + .await + .map_err(|_| { + "Couldn't load datastream metadata from HydroServer right now.".to_string() + }) + } + + pub fn get_csv_preview( + &self, + path: String, + rows: Option, + ) -> Result { + let rows = rows.unwrap_or(100).clamp(1, 500); + preview_csv(&path, rows) + } + + pub fn publish_status(&self) -> Result<(), String> { + let snapshot = self.inner.app.status_snapshot()?; + let _ = self.inner.status_tx.send(snapshot); + Ok(()) + } +} diff --git a/src/file_watcher.rs b/src/file_watcher.rs new file mode 100644 index 0000000..fca6904 --- /dev/null +++ b/src/file_watcher.rs @@ -0,0 +1,95 @@ +use std::{ + collections::HashSet, + path::{Path, PathBuf}, + time::Duration, +}; + +use notify_debouncer_mini::notify::{RecommendedWatcher, RecursiveMode}; +use notify_debouncer_mini::{new_debouncer, DebounceEventResult, DebouncedEventKind, Debouncer}; +use tokio::sync::mpsc; +use tracing::{error, warn}; + +const WATCH_DEBOUNCE_WINDOW: Duration = Duration::from_millis(500); + +pub struct FilesystemWatcher { + _debouncer: Debouncer, +} + +impl FilesystemWatcher { + pub fn start( + watched_files: impl IntoIterator, + event_tx: mpsc::UnboundedSender, + ) -> Result, String> { + let watched_files: HashSet = watched_files.into_iter().collect(); + if watched_files.is_empty() { + return Ok(None); + } + + let watched_dirs = watched_files + .iter() + .filter_map(|path| path.parent().map(Path::to_path_buf)) + .collect::>(); + let watched_files_for_handler = watched_files.clone(); + + let mut debouncer = new_debouncer(WATCH_DEBOUNCE_WINDOW, move |result| { + handle_debounced_events(result, &watched_files_for_handler, &event_tx); + }) + .map_err(|err| err.to_string())?; + + for dir in watched_dirs { + if let Err(err) = debouncer.watcher().watch(&dir, RecursiveMode::NonRecursive) { + warn!( + dir = %dir.display(), + error = %err, + "couldn't watch directory; files in this path won't trigger until it becomes available" + ); + } + } + + Ok(Some(Self { + _debouncer: debouncer, + })) + } +} + +fn handle_debounced_events( + result: DebounceEventResult, + watched_files: &HashSet, + event_tx: &mpsc::UnboundedSender, +) { + match result { + Ok(events) => { + let mut changed = HashSet::new(); + for event in events { + if matches!( + event.kind, + DebouncedEventKind::Any | DebouncedEventKind::AnyContinuous + ) { + if let Some(path) = canonicalize_if_possible(&event.path) { + if watched_files.contains(&path) { + changed.insert(path); + } + } + } + } + + for path in changed { + if event_tx.send(path).is_err() { + warn!("filesystem watcher event dropped because the pipeline is shutting down"); + break; + } + } + } + Err(error) => error!(?error, "filesystem watcher reported an error"), + } +} + +fn canonicalize_if_possible(path: &Path) -> Option { + path.canonicalize() + .ok() + .or_else(|| Some(path.to_path_buf())) +} + +#[cfg(test)] +#[path = "tests/file_watcher.rs"] +mod tests; diff --git a/src/hydroserver.rs b/src/hydroserver.rs new file mode 100644 index 0000000..887c6a3 --- /dev/null +++ b/src/hydroserver.rs @@ -0,0 +1,1453 @@ +use std::{collections::HashMap, sync::Mutex, time::Duration}; + +use chrono::{DateTime, Utc}; +use reqwest::{ + header::{HeaderMap, HeaderValue, ACCEPT, AUTHORIZATION, CONTENT_TYPE}, + Client, Method, Response, StatusCode, +}; +use serde_json::{json, Value}; + +use crate::models::{ + normalize_url, AuthType, ConnectionState, ConnectionTestResponse, DatastreamDetail, + DatastreamObservedPropertyDetail, DatastreamProcessingLevelDetail, DatastreamSensorDetail, + DatastreamSummary, DatastreamThingDetail, DatastreamThingLocationDetail, DatastreamUnitDetail, + ServerConfig, ServerUrlValidationResponse, +}; + +const AUTH_ROUTE: &str = "/api/auth"; +const BASE_ROUTE: &str = "/api/data"; +const DATASTREAM_PAGE_SIZE: usize = 1000; +const DATASTREAM_CACHE_TTL_SECONDS: i64 = 300; + +#[derive(Debug, Clone)] +pub struct ObservationPayloadRow { + pub phenomenon_time: DateTime, + pub result: Value, +} + +pub struct HydroServerService { + http: Client, + datastream_cache: Mutex, Vec)>>, +} + +impl HydroServerService { + pub fn new() -> Result { + let http = Client::builder() + .timeout(Duration::from_secs(60)) + .build() + .map_err(|err| err.to_string())?; + + Ok(Self { + http, + datastream_cache: Mutex::new(HashMap::new()), + }) + } + + pub async fn validate_url(&self, url: &str) -> ServerUrlValidationResponse { + let normalized_url = normalize_url(url); + if normalized_url.is_empty() { + return ServerUrlValidationResponse { + ok: false, + message: "Enter the HydroServer URL.".to_string(), + instance_name: None, + }; + } + + let auth_probe_url = format!("{normalized_url}{AUTH_ROUTE}/app/session"); + let data_probe_url = format!("{normalized_url}{BASE_ROUTE}/workspaces"); + + let auth_response = self + .http + .get(&auth_probe_url) + .header(ACCEPT, "application/json") + .send() + .await; + + if let Ok(response) = auth_response { + if looks_like_hydroserver_auth_response(response).await { + let instance_name = instance_name(&normalized_url); + return ServerUrlValidationResponse { + ok: true, + message: format!("HydroServer API detected at {instance_name}."), + instance_name: Some(instance_name), + }; + } + } else if let Err(err) = auth_response { + if err.is_connect() || err.is_timeout() { + return ServerUrlValidationResponse { + ok: false, + message: "Couldn't reach that URL. Check the server URL and try again." + .to_string(), + instance_name: None, + }; + } + } + + match self + .http + .get(&data_probe_url) + .header(ACCEPT, "application/json") + .send() + .await + { + Ok(response) => { + if looks_like_hydroserver_data_response(response).await { + let instance_name = instance_name(&normalized_url); + ServerUrlValidationResponse { + ok: true, + message: format!("HydroServer API detected at {instance_name}."), + instance_name: Some(instance_name), + } + } else { + ServerUrlValidationResponse { + ok: false, + message: "That URL responded, but it doesn't look like a HydroServer instance exposing the expected API.".to_string(), + instance_name: None, + } + } + } + Err(err) if err.is_connect() || err.is_timeout() => ServerUrlValidationResponse { + ok: false, + message: "Couldn't reach that URL. Check the server URL and try again.".to_string(), + instance_name: None, + }, + Err(_) => ServerUrlValidationResponse { + ok: false, + message: "Couldn't validate that HydroServer URL right now.".to_string(), + instance_name: None, + }, + } + } + + pub async fn test_connection(&self, server: &ServerConfig) -> ConnectionTestResponse { + if !server.is_configured() { + return ConnectionTestResponse { + ok: false, + state: ConnectionState::NotConfigured, + message: "Enter the HydroServer URL and a valid set of credentials.".to_string(), + invalid_field: None, + instance_name: None, + workspace_id: None, + workspace_name: None, + workspace_count: 0, + datastream_count: 0, + permissions_ok: false, + }; + } + + let mut session = HydroServerSession::new(self.http.clone(), server.clone().normalized()); + match session.associated_workspaces().await { + Ok(workspaces) => { + let workspace_count = workspaces.len() as u32; + let selected_workspace = match server.auth_type { + AuthType::Apikey => resolve_api_key_workspace(server, &workspaces), + AuthType::Userpass => resolve_userpass_workspace(server, &workspaces), + }; + + match selected_workspace { + Ok(Some((workspace_id, workspace_name))) => { + let instance_name = instance_name(&server.url); + ConnectionTestResponse { + ok: true, + state: ConnectionState::Connected, + message: format!("Connected to {instance_name}."), + invalid_field: None, + instance_name: Some(instance_name), + workspace_id: Some(workspace_id), + workspace_name: Some(workspace_name), + workspace_count, + datastream_count: 0, + permissions_ok: true, + } + } + Ok(None) => ConnectionTestResponse { + ok: false, + state: ConnectionState::Error, + message: "That API key is invalid or is not attached to any accessible workspace. Check the API key permissions and try again.".to_string(), + invalid_field: Some("api_key".to_string()), + instance_name: Some(instance_name(&server.url)), + workspace_id: None, + workspace_name: None, + workspace_count, + datastream_count: 0, + permissions_ok: false, + }, + Err((invalid_field, message)) => ConnectionTestResponse { + ok: false, + state: ConnectionState::Error, + message, + invalid_field: Some(invalid_field.to_string()), + instance_name: Some(instance_name(&server.url)), + workspace_id: None, + workspace_name: None, + workspace_count, + datastream_count: 0, + permissions_ok: false, + }, + } + } + Err(RequestError::Connection) | Err(RequestError::Timeout) => ConnectionTestResponse { + ok: false, + state: ConnectionState::Error, + message: "Couldn't reach HydroServer. Check the server URL and try again.".to_string(), + invalid_field: Some("url".to_string()), + instance_name: None, + workspace_id: None, + workspace_name: None, + workspace_count: 0, + datastream_count: 0, + permissions_ok: false, + }, + Err(RequestError::Http { + status: Some(StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN), + .. + }) => ConnectionTestResponse { + ok: false, + state: ConnectionState::Error, + message: "These credentials are invalid or do not have the permissions the loader needs. Make sure they can access workspaces, datastreams, and orchestration systems.".to_string(), + invalid_field: Some(match server.auth_type { + AuthType::Apikey => "api_key".to_string(), + AuthType::Userpass => "username".to_string(), + }), + instance_name: None, + workspace_id: None, + workspace_name: None, + workspace_count: 0, + datastream_count: 0, + permissions_ok: false, + }, + Err(RequestError::Http { .. }) => ConnectionTestResponse { + ok: false, + state: ConnectionState::Error, + message: "HydroServer returned an error while testing the connection. Try again in a moment.".to_string(), + invalid_field: None, + instance_name: None, + workspace_id: None, + workspace_name: None, + workspace_count: 0, + datastream_count: 0, + permissions_ok: false, + }, + Err(RequestError::Other(_)) => ConnectionTestResponse { + ok: false, + state: ConnectionState::Error, + message: "Couldn't complete the HydroServer connection test.".to_string(), + invalid_field: None, + instance_name: None, + workspace_id: None, + workspace_name: None, + workspace_count: 0, + datastream_count: 0, + permissions_ok: false, + }, + } + } + + pub async fn list_datastreams( + &self, + server: &ServerConfig, + ) -> Result, String> { + if !server.is_configured() { + return Ok(Vec::new()); + } + + let normalized = server.clone().normalized(); + let mut session = HydroServerSession::new(self.http.clone(), normalized.clone()); + let workspace_id = if normalized.workspace_id.is_empty() { + session + .associated_workspace() + .await + .map_err(|err| err.to_string())? + .0 + .unwrap_or_default() + } else { + normalized.workspace_id.clone() + }; + + if workspace_id.is_empty() { + return Ok(Vec::new()); + } + + let cache_key = datastream_cache_key(&normalized, &workspace_id); + if let Some(cached) = self.cached_datastreams(&cache_key) { + return Ok(cached); + } + + if let Some(datastreams) = self + .list_datastreams_from_bootstrap(&mut session, &workspace_id) + .await? + { + self.set_cached_datastreams(&cache_key, datastreams.clone()); + return Ok(datastreams); + } + + if let Some(datastreams) = self + .list_datastreams_expanded(&mut session, &workspace_id) + .await? + { + self.set_cached_datastreams(&cache_key, datastreams.clone()); + return Ok(datastreams); + } + + let datastreams = session + .fetch_all_collection( + &format!("{BASE_ROUTE}/datastreams"), + &[("workspace_id", workspace_id.clone())], + ) + .await + .map_err(|err| err.to_string())?; + + if datastreams.is_empty() { + return Ok(Vec::new()); + } + + let things_by_id = session + .fetch_collection_lookup(&format!("{BASE_ROUTE}/things"), &workspace_id) + .await + .unwrap_or_default(); + let observed_properties_by_id = session + .fetch_collection_lookup(&format!("{BASE_ROUTE}/observed-properties"), &workspace_id) + .await + .unwrap_or_default(); + let processing_levels_by_id = session + .fetch_collection_lookup(&format!("{BASE_ROUTE}/processing-levels"), &workspace_id) + .await + .unwrap_or_default(); + let units_by_id = session + .fetch_collection_lookup(&format!("{BASE_ROUTE}/units"), &workspace_id) + .await + .unwrap_or_default(); + let sensors_by_id = session + .fetch_collection_lookup(&format!("{BASE_ROUTE}/sensors"), &workspace_id) + .await + .unwrap_or_default(); + + let summaries = datastreams + .iter() + .map(|item| { + datastream_to_summary( + item, + &things_by_id, + &observed_properties_by_id, + &processing_levels_by_id, + &units_by_id, + &sensors_by_id, + ) + }) + .collect::>(); + + self.set_cached_datastreams(&cache_key, summaries.clone()); + Ok(summaries) + } + + pub async fn get_datastream_detail( + &self, + server: &ServerConfig, + datastream_id: &str, + ) -> Result { + if !server.is_configured() { + return Err("Connect to HydroServer before loading datastream metadata.".to_string()); + } + + let normalized = server.clone().normalized(); + let mut session = HydroServerSession::new(self.http.clone(), normalized.clone()); + let workspace_id = if normalized.workspace_id.is_empty() { + session + .associated_workspace() + .await + .map_err(|err| err.to_string())? + .0 + .unwrap_or_default() + } else { + normalized.workspace_id.clone() + }; + + let payload = session + .request_json( + Method::GET, + &format!("{BASE_ROUTE}/datastreams/{datastream_id}"), + &[ + ("workspace_id", workspace_id), + ("expand_related", "true".to_string()), + ], + None, + ) + .await + .map_err(|err| err.to_string())?; + + Ok(expanded_datastream_to_detail(&payload)) + } + + pub(crate) async fn post_observations_batch( + &self, + server: &ServerConfig, + datastream_id: &str, + observations: &[ObservationPayloadRow], + ) -> Result<(), RequestError> { + if observations.is_empty() { + return Ok(()); + } + + let mut session = HydroServerSession::new(self.http.clone(), server.clone().normalized()); + // The earlier Rust port posted ["timestamp", "value"], which does not match the + // HydroServer bulk observation schema. The API expects SensorThings field names. + let body = json!({ + "fields": ["phenomenonTime", "result"], + "data": observations + .iter() + .map(|row| json!([row.phenomenon_time.to_rfc3339(), row.result])) + .collect::>(), + }); + + session + .request_void( + Method::POST, + &format!("{BASE_ROUTE}/datastreams/{datastream_id}/observations/bulk-create"), + &[("mode", "insert".to_string())], + Some(body), + ) + .await + } + + async fn list_datastreams_from_bootstrap( + &self, + session: &mut HydroServerSession, + workspace_id: &str, + ) -> Result>, String> { + let payload = match session + .request_json( + Method::GET, + &format!("{BASE_ROUTE}/datastreams/visualization-bootstrap"), + &[("workspace_id", workspace_id.to_string())], + None, + ) + .await + { + Ok(payload) => payload, + Err(_) => return Ok(None), + }; + + let Some(datastreams) = payload.get("datastreams").and_then(Value::as_array) else { + return Ok(None); + }; + let Some(things) = payload.get("things").and_then(Value::as_array) else { + return Ok(None); + }; + let Some(observed_properties) = payload + .get("observed_properties") + .or_else(|| payload.get("observedProperties")) + .and_then(Value::as_array) + else { + return Ok(None); + }; + let Some(processing_levels) = payload + .get("processing_levels") + .or_else(|| payload.get("processingLevels")) + .and_then(Value::as_array) + else { + return Ok(None); + }; + + let units_by_id = match session + .fetch_collection_lookup(&format!("{BASE_ROUTE}/units"), workspace_id) + .await + { + Ok(units) => units, + Err(_) => return Ok(None), + }; + + let things_by_id = map_items_by_id(things); + let observed_properties_by_id = map_items_by_id(observed_properties); + let processing_levels_by_id = map_items_by_id(processing_levels); + + Ok(Some( + datastreams + .iter() + .map(|datastream| { + let thing_id = string_value(datastream, &["thing_id", "thingId"]); + let observed_property_id = + string_value(datastream, &["observed_property_id", "observedPropertyId"]); + let processing_level_id = + string_value(datastream, &["processing_level_id", "processingLevelId"]); + let unit_id = string_value(datastream, &["unit_id", "unitId"]); + + DatastreamSummary { + id: string_value(datastream, &["id", "uid"]).unwrap_or_default(), + name: string_value(datastream, &["name"]) + .unwrap_or_else(|| "Unnamed datastream".to_string()), + thing_id: thing_id.clone().unwrap_or_default(), + thing_name: string_value_from_map(&things_by_id, &thing_id, &["name"]), + observed_property_name: string_value_from_map( + &observed_properties_by_id, + &observed_property_id, + &["name"], + ), + processing_level_definition: string_value_from_map( + &processing_levels_by_id, + &processing_level_id, + &["definition"], + ), + unit_name: string_value_from_map(&units_by_id, &unit_id, &["name"]), + unit_symbol: string_value_from_map(&units_by_id, &unit_id, &["symbol"]), + sampled_medium: String::new(), + sensor_name: String::new(), + result_type: String::new(), + } + }) + .collect(), + )) + } + + async fn list_datastreams_expanded( + &self, + session: &mut HydroServerSession, + workspace_id: &str, + ) -> Result>, String> { + let mut page = 1_u32; + let mut datastreams = Vec::new(); + + loop { + let response = match session + .request_response( + Method::GET, + &format!("{BASE_ROUTE}/datastreams"), + &[ + ("workspace_id", workspace_id.to_string()), + ("expand_related", "true".to_string()), + ("page", page.to_string()), + ("page_size", DATASTREAM_PAGE_SIZE.to_string()), + ], + None, + ) + .await + { + Ok(response) => response, + Err(_) => return Ok(None), + }; + + let headers = response.headers().clone(); + let payload = response + .json::() + .await + .map_err(|err| err.to_string())?; + let Some(items) = payload.as_array() else { + return Ok(None); + }; + + if items.is_empty() { + break; + } + + datastreams.extend(items.iter().map(expanded_datastream_to_summary)); + + if let Some(total_pages) = header_int(&headers, "X-Total-Pages") { + if page >= total_pages { + break; + } + } else if items.len() < DATASTREAM_PAGE_SIZE { + break; + } + + page += 1; + } + + Ok(Some(datastreams)) + } + + fn cached_datastreams(&self, cache_key: &str) -> Option> { + let mut cache = self.datastream_cache.lock().ok()?; + let (cached_at, datastreams) = cache.get(cache_key)?.clone(); + if Utc::now().signed_duration_since(cached_at).num_seconds() > DATASTREAM_CACHE_TTL_SECONDS + { + cache.remove(cache_key); + return None; + } + Some(datastreams) + } + + fn set_cached_datastreams(&self, cache_key: &str, datastreams: Vec) { + if let Ok(mut cache) = self.datastream_cache.lock() { + cache.insert(cache_key.to_string(), (Utc::now(), datastreams)); + } + } +} + +struct HydroServerSession { + http: Client, + server: ServerConfig, + bearer_token: Option, +} + +impl HydroServerSession { + fn new(http: Client, server: ServerConfig) -> Self { + Self { + http, + server, + bearer_token: None, + } + } + + async fn associated_workspace( + &mut self, + ) -> Result<(Option, Option, u32), RequestError> { + let workspaces = self.associated_workspaces().await?; + let workspace_count = workspaces.len() as u32; + let first_workspace = workspaces.first(); + Ok(( + first_workspace.map(|(id, _)| id.clone()), + first_workspace.map(|(_, name)| name.clone()), + workspace_count, + )) + } + + async fn associated_workspaces(&mut self) -> Result, RequestError> { + let items = self + .fetch_all_collection( + &format!("{BASE_ROUTE}/workspaces"), + &[("is_associated", "true".to_string())], + ) + .await?; + + Ok(items + .into_iter() + .filter_map(|item| { + let id = string_value(&item, &["id", "uid"])?; + let name = string_value(&item, &["name"])?; + Some((id, name)) + }) + .collect()) + } + + async fn fetch_collection_lookup( + &mut self, + path: &str, + workspace_id: &str, + ) -> Result, RequestError> { + let items = self + .fetch_all_collection(path, &[("workspace_id", workspace_id.to_string())]) + .await?; + Ok(items + .into_iter() + .filter_map(|item| { + let id = string_value(&item, &["id", "uid"])?; + Some((id, item)) + }) + .collect()) + } + + async fn fetch_all_collection( + &mut self, + path: &str, + params: &[(&str, String)], + ) -> Result, RequestError> { + let mut page = 1_u32; + let mut items = Vec::new(); + + loop { + let mut page_params = params.to_vec(); + page_params.push(("page", page.to_string())); + page_params.push(("page_size", DATASTREAM_PAGE_SIZE.to_string())); + let response = self + .request_response(Method::GET, path, &page_params, None) + .await?; + let headers = response.headers().clone(); + let payload = response + .json::() + .await + .map_err(|err| RequestError::Other(err.to_string()))?; + let Some(page_items) = payload.as_array() else { + return Ok(Vec::new()); + }; + + if page_items.is_empty() { + break; + } + items.extend(page_items.iter().cloned()); + + if let Some(total_pages) = header_int(&headers, "X-Total-Pages") { + if page >= total_pages { + break; + } + } else if page_items.len() < DATASTREAM_PAGE_SIZE { + break; + } + + page += 1; + } + + Ok(items) + } + + async fn request_json( + &mut self, + method: Method, + path: &str, + params: &[(&str, String)], + body: Option, + ) -> Result { + let response = self.request_response(method, path, params, body).await?; + if response.status() == StatusCode::NO_CONTENT { + return Ok(Value::Null); + } + response + .json::() + .await + .map_err(|err| RequestError::Other(err.to_string())) + } + + async fn request_void( + &mut self, + method: Method, + path: &str, + params: &[(&str, String)], + body: Option, + ) -> Result<(), RequestError> { + self.request_response(method, path, params, body) + .await + .map(|_| ()) + } + + async fn request_response( + &mut self, + method: Method, + path: &str, + params: &[(&str, String)], + body: Option, + ) -> Result { + let url = build_url(&self.server.url, path); + + for attempt in 0..2 { + let mut request = self + .http + .request(method.clone(), &url) + .header(ACCEPT, "application/json"); + + if !params.is_empty() { + request = request.query(params); + } + + if let Some(payload) = body.clone() { + request = request + .header(CONTENT_TYPE, "application/json") + .json(&payload); + } + + request = self.apply_auth(request).await?; + + let response = match request.send().await { + Ok(response) => response, + Err(err) if err.is_connect() => return Err(RequestError::Connection), + Err(err) if err.is_timeout() => return Err(RequestError::Timeout), + Err(err) => return Err(RequestError::Other(err.to_string())), + }; + + if response.status().is_success() { + return Ok(response); + } + + if attempt == 0 + && self.server.auth_type == AuthType::Userpass + && matches!( + response.status(), + StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN + ) + { + self.bearer_token = None; + continue; + } + + let status = response.status(); + let message = response_error_message(response).await; + return Err(RequestError::Http { + status: Some(status), + message, + }); + } + + Err(RequestError::Other( + "HydroServer request failed after retry.".to_string(), + )) + } + + async fn apply_auth( + &mut self, + request: reqwest::RequestBuilder, + ) -> Result { + match self.server.auth_type { + AuthType::Apikey => Ok(request.header("X-API-Key", self.server.api_key.clone())), + AuthType::Userpass => { + let token = self.session_token().await?; + Ok(request.header( + AUTHORIZATION, + HeaderValue::from_str(&format!("Bearer {token}")) + .map_err(|err| RequestError::Other(err.to_string()))?, + )) + } + } + } + + async fn session_token(&mut self) -> Result { + if let Some(token) = &self.bearer_token { + return Ok(token.clone()); + } + + let payload = json!({ + "email": self.server.username, + "password": self.server.password, + }); + + let response = self + .http + .post(build_url( + &self.server.url, + &format!("{AUTH_ROUTE}/app/session"), + )) + .header(CONTENT_TYPE, "application/json") + .header(ACCEPT, "application/json") + .json(&payload) + .send() + .await + .map_err(|err| { + if err.is_connect() { + RequestError::Connection + } else if err.is_timeout() { + RequestError::Timeout + } else { + RequestError::Other(err.to_string()) + } + })?; + + if !response.status().is_success() { + let status = response.status(); + let message = response_error_message(response).await; + return Err(RequestError::Http { + status: Some(status), + message, + }); + } + + let payload = response + .json::() + .await + .map_err(|err| RequestError::Other(err.to_string()))?; + let token = payload + .get("meta") + .and_then(|meta| meta.get("session_token")) + .and_then(Value::as_str) + .ok_or_else(|| { + RequestError::Other("Authentication failed: No access token returned.".to_string()) + })? + .to_string(); + + self.bearer_token = Some(token.clone()); + Ok(token) + } +} + +#[derive(Debug)] +pub(crate) enum RequestError { + Connection, + Timeout, + Http { + status: Option, + message: String, + }, + Other(String), +} + +impl std::fmt::Display for RequestError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + RequestError::Connection => write!(f, "Couldn't reach HydroServer."), + RequestError::Timeout => write!(f, "HydroServer request timed out."), + RequestError::Http { message, .. } | RequestError::Other(message) => { + write!(f, "{message}") + } + } + } +} + +impl RequestError { + pub(crate) fn is_retryable(&self) -> bool { + match self { + Self::Connection | Self::Timeout => true, + Self::Http { + status: Some(status), + .. + } => status.is_server_error(), + Self::Http { .. } | Self::Other(_) => false, + } + } + + pub(crate) fn is_conflict(&self) -> bool { + matches!( + self, + Self::Http { + status: Some(status), + .. + } if *status == StatusCode::CONFLICT + ) + } +} + +fn resolve_api_key_workspace( + server: &ServerConfig, + workspaces: &[(String, String)], +) -> Result, (&'static str, String)> { + if workspaces.is_empty() { + return Ok(None); + } + + // If the user previously saved a workspace, require that the current API + // key still has access to it. Without this check, rotating the key to one + // attached to a different workspace would silently redirect uploads to + // that other workspace (bug_008). + let requested_workspace_id = server.workspace_id.trim(); + if !requested_workspace_id.is_empty() { + return workspaces + .iter() + .find(|(id, _)| id == requested_workspace_id) + .cloned() + .map(Some) + .ok_or_else(|| { + ( + "api_key", + "This API key does not have access to the saved workspace. The key may have been rotated or its permissions changed. Re-select a workspace this key can access.".to_string(), + ) + }); + } + + // No saved workspace — this is the initial connection. Pick the first + // accessible one so the UI can offer it as the default. + Ok(workspaces.first().cloned()) +} + +fn resolve_userpass_workspace( + server: &ServerConfig, + workspaces: &[(String, String)], +) -> Result, (&'static str, String)> { + if workspaces.is_empty() { + return Err(( + "workspace_name", + "These credentials are valid, but this account does not have edit access to any related workspaces.".to_string(), + )); + } + + let requested_workspace_id = server.workspace_id.trim(); + if !requested_workspace_id.is_empty() { + return workspaces + .iter() + .find(|(id, _)| id == requested_workspace_id) + .cloned() + .map(Some) + .ok_or_else(|| { + ( + "workspace_name", + "The saved workspace is no longer available to this account. Enter one of your related workspace names and try again.".to_string(), + ) + }); + } + + let requested_workspace_name = server.workspace_name.trim(); + if requested_workspace_name.is_empty() { + return Err(( + "workspace_name", + "Please enter a workspace name.".to_string(), + )); + } + + if let Some(workspace) = workspaces + .iter() + .find(|(_, name)| name.trim() == requested_workspace_name) + { + return Ok(Some(workspace.clone())); + } + + if let Some(workspace) = workspaces + .iter() + .find(|(_, name)| name.trim().eq_ignore_ascii_case(requested_workspace_name)) + { + return Ok(Some(workspace.clone())); + } + + Err(( + "workspace_name", + format!( + "No related workspace named \"{requested_workspace_name}\" was found for this account. Check the workspace name and try again." + ), + )) +} + +fn build_url(base_url: &str, path: &str) -> String { + format!( + "{}/{}", + normalize_url(base_url), + path.trim_start_matches('/') + ) +} + +fn instance_name(url: &str) -> String { + reqwest::Url::parse(url) + .ok() + .and_then(|parsed| parsed.host_str().map(str::to_string)) + .filter(|host| !host.is_empty()) + .unwrap_or_else(|| url.to_string()) +} + +async fn looks_like_hydroserver_auth_response(response: Response) -> bool { + if !matches!( + response.status(), + StatusCode::OK + | StatusCode::UNAUTHORIZED + | StatusCode::FORBIDDEN + | StatusCode::METHOD_NOT_ALLOWED + | StatusCode::UNPROCESSABLE_ENTITY + ) { + return false; + } + + let Some(payload) = response_json(response).await else { + return false; + }; + + let Some(payload) = payload.as_object() else { + return false; + }; + + payload + .get("meta") + .and_then(Value::as_object) + .map(|meta| meta.contains_key("is_authenticated")) + .unwrap_or(false) + || payload + .get("data") + .and_then(Value::as_object) + .map(|data| data.contains_key("flows")) + .unwrap_or(false) + || payload.get("detail").and_then(Value::as_array).is_some() +} + +async fn looks_like_hydroserver_data_response(response: Response) -> bool { + if !matches!( + response.status(), + StatusCode::OK | StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN + ) { + return false; + } + + let Some(payload) = response_json(response).await else { + return false; + }; + + payload.is_array() + || payload + .as_object() + .map(|object| object.contains_key("detail") || object.contains_key("status")) + .unwrap_or(false) +} + +async fn response_json(response: Response) -> Option { + let content_type = response + .headers() + .get(CONTENT_TYPE) + .and_then(|value| value.to_str().ok()) + .unwrap_or_default() + .to_ascii_lowercase(); + if !content_type.contains("json") { + return None; + } + + response.json::().await.ok() +} + +async fn response_error_message(response: Response) -> String { + let status = response.status(); + let text = response.text().await.unwrap_or_default(); + let detail = serde_json::from_str::(&text) + .ok() + .and_then(|payload| payload.get("detail").cloned()) + .map(format_error_detail); + + detail.unwrap_or_else(|| format!("HydroServer returned status {}.", status.as_u16())) +} + +fn format_error_detail(detail: Value) -> String { + match detail { + Value::String(message) if !message.trim().is_empty() => message, + Value::Array(items) => items + .iter() + .find_map(|item| item.get("msg").and_then(Value::as_str)) + .map(str::to_string) + .unwrap_or_else(|| Value::Array(items).to_string()), + value => value.to_string(), + } +} + +fn datastream_cache_key(server: &ServerConfig, workspace_id: &str) -> String { + let credential = match server.auth_type { + AuthType::Apikey => server.api_key.trim(), + AuthType::Userpass => server.username.trim(), + }; + + format!( + "{:?}|{}|{}|{}", + server.auth_type, + normalize_url(&server.url), + workspace_id.trim(), + credential + ) +} + +fn expanded_datastream_to_summary(item: &Value) -> DatastreamSummary { + let thing = item.get("thing"); + let observed_property = item + .get("observed_property") + .or_else(|| item.get("observedProperty")); + let processing_level = item + .get("processing_level") + .or_else(|| item.get("processingLevel")); + let unit = item.get("unit"); + let sensor = item.get("sensor"); + + let thing_id = string_value(item, &["thing_id", "thingId"]) + .or_else(|| thing.and_then(|thing| string_value(thing, &["id", "uid"]))) + .unwrap_or_default(); + let observed_property_id = string_value(item, &["observed_property_id", "observedPropertyId"]) + .or_else(|| observed_property.and_then(|value| string_value(value, &["id", "uid"]))) + .unwrap_or_default(); + let processing_level_id = string_value(item, &["processing_level_id", "processingLevelId"]) + .or_else(|| processing_level.and_then(|value| string_value(value, &["id", "uid"]))) + .unwrap_or_default(); + let unit_id = string_value(item, &["unit_id", "unitId"]) + .or_else(|| unit.and_then(|value| string_value(value, &["id", "uid"]))) + .unwrap_or_default(); + let _ = (observed_property_id, processing_level_id, unit_id); + + DatastreamSummary { + id: string_value(item, &["id", "uid"]).unwrap_or_default(), + name: string_value(item, &["name"]).unwrap_or_else(|| "Unnamed datastream".to_string()), + thing_id, + thing_name: thing + .and_then(|value| string_value(value, &["name"])) + .unwrap_or_default(), + observed_property_name: observed_property + .and_then(|value| string_value(value, &["name"])) + .unwrap_or_default(), + processing_level_definition: processing_level + .and_then(|value| string_value(value, &["definition"])) + .unwrap_or_default(), + unit_name: unit + .and_then(|value| string_value(value, &["name"])) + .unwrap_or_default(), + unit_symbol: unit + .and_then(|value| string_value(value, &["symbol"])) + .unwrap_or_default(), + sampled_medium: string_value(item, &["sampled_medium", "sampledMedium"]) + .unwrap_or_default(), + sensor_name: sensor + .and_then(|value| string_value(value, &["name"])) + .unwrap_or_default(), + result_type: string_value(item, &["result_type", "resultType"]).unwrap_or_default(), + } +} + +fn expanded_datastream_to_detail(item: &Value) -> DatastreamDetail { + let thing = item.get("thing"); + let location = thing + .and_then(|value| value.get("location")) + .or_else(|| thing.and_then(|value| value.get("Location"))); + let observed_property = item + .get("observed_property") + .or_else(|| item.get("observedProperty")); + let processing_level = item + .get("processing_level") + .or_else(|| item.get("processingLevel")); + let unit = item.get("unit"); + let sensor = item.get("sensor"); + + DatastreamDetail { + id: string_value(item, &["id", "uid"]).unwrap_or_default(), + name: string_value(item, &["name"]).unwrap_or_else(|| "Unnamed datastream".to_string()), + description: string_value(item, &["description"]).unwrap_or_default(), + sampled_medium: string_value(item, &["sampled_medium", "sampledMedium"]) + .unwrap_or_default(), + result_type: string_value(item, &["result_type", "resultType"]).unwrap_or_default(), + observation_type: string_value(item, &["observation_type", "observationType"]) + .unwrap_or_default(), + no_data_value: scalar_string_value(item, &["no_data_value", "noDataValue"]), + aggregation_statistic: string_value( + item, + &["aggregation_statistic", "aggregationStatistic"], + ) + .unwrap_or_default(), + intended_time_spacing: scalar_string_value( + item, + &["intended_time_spacing", "intendedTimeSpacing"], + ), + intended_time_spacing_unit: string_value( + item, + &["intended_time_spacing_unit", "intendedTimeSpacingUnit"], + ) + .unwrap_or_default(), + time_aggregation_interval: scalar_string_value( + item, + &["time_aggregation_interval", "timeAggregationInterval"], + ), + time_aggregation_interval_unit: string_value( + item, + &[ + "time_aggregation_interval_unit", + "timeAggregationIntervalUnit", + ], + ) + .unwrap_or_default(), + phenomenon_begin_time: scalar_string_value( + item, + &["phenomenon_begin_time", "phenomenonBeginTime"], + ), + phenomenon_end_time: scalar_string_value( + item, + &["phenomenon_end_time", "phenomenonEndTime"], + ), + value_count: scalar_string_value(item, &["value_count", "valueCount"]), + is_private: bool_value(item, &["is_private", "isPrivate"]), + is_visible: bool_value(item, &["is_visible", "isVisible"]), + thing: DatastreamThingDetail { + id: string_value(item, &["thing_id", "thingId"]) + .or_else(|| thing.and_then(|value| string_value(value, &["id", "uid"]))) + .unwrap_or_default(), + name: thing + .and_then(|value| string_value(value, &["name"])) + .unwrap_or_default(), + description: thing + .and_then(|value| string_value(value, &["description"])) + .unwrap_or_default(), + sampling_feature_code: thing + .and_then(|value| { + string_value(value, &["sampling_feature_code", "samplingFeatureCode"]) + }) + .unwrap_or_default(), + site_type: thing + .and_then(|value| string_value(value, &["site_type", "siteType"])) + .unwrap_or_default(), + sampling_feature_type: thing + .and_then(|value| { + string_value(value, &["sampling_feature_type", "samplingFeatureType"]) + }) + .unwrap_or_default(), + is_private: thing + .map(|value| bool_value(value, &["is_private", "isPrivate"])) + .unwrap_or(false), + location: DatastreamThingLocationDetail { + latitude: scalar_string_value(location.unwrap_or(item), &["latitude"]), + longitude: scalar_string_value(location.unwrap_or(item), &["longitude"]), + elevation_m: scalar_string_value( + location.unwrap_or(item), + &["elevation_m", "elevationM"], + ), + elevation_datum: string_value( + location.unwrap_or(item), + &["elevation_datum", "elevationDatum"], + ) + .unwrap_or_default(), + admin_area_1: string_value( + location.unwrap_or(item), + &["admin_area_1", "adminArea1"], + ) + .unwrap_or_default(), + admin_area_2: string_value( + location.unwrap_or(item), + &["admin_area_2", "adminArea2"], + ) + .unwrap_or_default(), + country: string_value(location.unwrap_or(item), &["country"]).unwrap_or_default(), + }, + }, + observed_property: DatastreamObservedPropertyDetail { + id: string_value(item, &["observed_property_id", "observedPropertyId"]) + .or_else(|| observed_property.and_then(|value| string_value(value, &["id", "uid"]))) + .unwrap_or_default(), + name: observed_property + .and_then(|value| string_value(value, &["name"])) + .unwrap_or_default(), + definition: observed_property + .and_then(|value| string_value(value, &["definition"])) + .unwrap_or_default(), + description: observed_property + .and_then(|value| string_value(value, &["description"])) + .unwrap_or_default(), + property_type: observed_property + .and_then(|value| string_value(value, &["type"])) + .unwrap_or_default(), + code: observed_property + .and_then(|value| string_value(value, &["code"])) + .unwrap_or_default(), + }, + unit: DatastreamUnitDetail { + id: string_value(item, &["unit_id", "unitId"]) + .or_else(|| unit.and_then(|value| string_value(value, &["id", "uid"]))) + .unwrap_or_default(), + name: unit + .and_then(|value| string_value(value, &["name"])) + .unwrap_or_default(), + symbol: unit + .and_then(|value| string_value(value, &["symbol"])) + .unwrap_or_default(), + definition: unit + .and_then(|value| string_value(value, &["definition"])) + .unwrap_or_default(), + unit_type: unit + .and_then(|value| string_value(value, &["type"])) + .unwrap_or_default(), + }, + sensor: DatastreamSensorDetail { + id: string_value(item, &["sensor_id", "sensorId"]) + .or_else(|| sensor.and_then(|value| string_value(value, &["id", "uid"]))) + .unwrap_or_default(), + name: sensor + .and_then(|value| string_value(value, &["name"])) + .unwrap_or_default(), + description: sensor + .and_then(|value| string_value(value, &["description"])) + .unwrap_or_default(), + manufacturer: sensor + .and_then(|value| string_value(value, &["manufacturer"])) + .unwrap_or_default(), + model: sensor + .and_then(|value| string_value(value, &["model"])) + .unwrap_or_default(), + method_type: sensor + .and_then(|value| string_value(value, &["method_type", "methodType"])) + .unwrap_or_default(), + method_code: sensor + .and_then(|value| string_value(value, &["method_code", "methodCode"])) + .unwrap_or_default(), + method_link: sensor + .and_then(|value| string_value(value, &["method_link", "methodLink"])) + .unwrap_or_default(), + encoding_type: sensor + .and_then(|value| string_value(value, &["encoding_type", "encodingType"])) + .unwrap_or_default(), + model_link: sensor + .and_then(|value| string_value(value, &["model_link", "modelLink"])) + .unwrap_or_default(), + }, + processing_level: DatastreamProcessingLevelDetail { + id: string_value(item, &["processing_level_id", "processingLevelId"]) + .or_else(|| processing_level.and_then(|value| string_value(value, &["id", "uid"]))) + .unwrap_or_default(), + code: processing_level + .and_then(|value| string_value(value, &["code"])) + .unwrap_or_default(), + definition: processing_level + .and_then(|value| string_value(value, &["definition"])) + .unwrap_or_default(), + explanation: processing_level + .and_then(|value| string_value(value, &["explanation"])) + .unwrap_or_default(), + }, + } +} + +fn datastream_to_summary( + item: &Value, + things_by_id: &HashMap, + observed_properties_by_id: &HashMap, + processing_levels_by_id: &HashMap, + units_by_id: &HashMap, + sensors_by_id: &HashMap, +) -> DatastreamSummary { + let thing_id = string_value(item, &["thing_id", "thingId"]).unwrap_or_default(); + let observed_property_id = + string_value(item, &["observed_property_id", "observedPropertyId"]).unwrap_or_default(); + let processing_level_id = + string_value(item, &["processing_level_id", "processingLevelId"]).unwrap_or_default(); + let unit_id = string_value(item, &["unit_id", "unitId"]).unwrap_or_default(); + let sensor_id = string_value(item, &["sensor_id", "sensorId"]).unwrap_or_default(); + + DatastreamSummary { + id: string_value(item, &["id", "uid"]).unwrap_or_default(), + name: string_value(item, &["name"]).unwrap_or_else(|| "Unnamed datastream".to_string()), + thing_id: thing_id.clone(), + thing_name: string_value_from_map(things_by_id, &Some(thing_id), &["name"]), + observed_property_name: string_value_from_map( + observed_properties_by_id, + &Some(observed_property_id), + &["name"], + ), + processing_level_definition: string_value_from_map( + processing_levels_by_id, + &Some(processing_level_id), + &["definition"], + ), + unit_name: string_value_from_map(units_by_id, &Some(unit_id.clone()), &["name"]), + unit_symbol: string_value_from_map(units_by_id, &Some(unit_id), &["symbol"]), + sampled_medium: string_value(item, &["sampled_medium", "sampledMedium"]) + .unwrap_or_default(), + sensor_name: string_value_from_map(sensors_by_id, &Some(sensor_id), &["name"]), + result_type: string_value(item, &["result_type", "resultType"]).unwrap_or_default(), + } +} + +fn map_items_by_id(items: &[Value]) -> HashMap { + items + .iter() + .filter_map(|item| string_value(item, &["id", "uid"]).map(|id| (id, item.clone()))) + .collect() +} + +fn string_value(item: &Value, keys: &[&str]) -> Option { + keys.iter() + .find_map(|key| item.get(*key)) + .and_then(Value::as_str) + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) +} + +fn scalar_string_value(item: &Value, keys: &[&str]) -> String { + keys.iter() + .find_map(|key| item.get(*key)) + .and_then(value_to_string) + .unwrap_or_default() +} + +fn value_to_string(value: &Value) -> Option { + match value { + Value::String(text) => { + let trimmed = text.trim(); + (!trimmed.is_empty()).then(|| trimmed.to_string()) + } + Value::Number(number) => Some(number.to_string()), + Value::Bool(value) => Some(value.to_string()), + _ => None, + } +} + +fn bool_value(item: &Value, keys: &[&str]) -> bool { + keys.iter() + .find_map(|key| item.get(*key)) + .and_then(Value::as_bool) + .unwrap_or(false) +} + +fn string_value_from_map( + items: &HashMap, + id: &Option, + keys: &[&str], +) -> String { + id.as_ref() + .and_then(|key| items.get(key)) + .and_then(|item| string_value(item, keys)) + .unwrap_or_default() +} + +fn header_int(headers: &HeaderMap, header: &str) -> Option { + headers + .get(header) + .and_then(|value| value.to_str().ok()) + .and_then(|value| value.parse::().ok()) +} + +#[cfg(test)] +#[path = "tests/hydroserver.rs"] +mod tests; diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..66d879c --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,89 @@ +mod commands; +mod config_store; +mod csv_preview; +mod daemon_api; +mod daemon_launcher; +mod daemon_state; +mod file_watcher; +mod hydroserver; +mod logging; +mod models; +mod observation_queue; +mod pipeline; +mod runtime; +mod service_manager; +mod service_paths; +mod service_runtime; +mod timestamp; +mod uploader; +pub use service_manager::maybe_handle_service_management_cli; +pub use service_runtime::run_daemon; +pub use logging::init_process_logging_from_args; + +#[cfg(windows)] +use tauri::Manager; + +#[cfg(windows)] +use windows::Win32::Graphics::Dwm::{ + DWMWA_BORDER_COLOR, DWMWA_CAPTION_COLOR, DwmSetWindowAttribute, +}; + +#[cfg(windows)] +const WINDOW_CHROME_DARK_BACKGROUND_RGB: u32 = 0x33312f; + +#[cfg_attr(mobile, tauri::mobile_entry_point)] +pub fn run() { + logging::init_desktop_logging(); + + tauri::Builder::default() + .setup(|app| { + #[cfg(windows)] + if let Some(window) = app.get_webview_window("main") { + if let Err(error) = apply_windows_chrome_color(&window) { + tracing::warn!(error = %error, "Couldn't apply the Windows chrome color override"); + } + } + + Ok(()) + }) + .plugin(tauri_plugin_dialog::init()) + .plugin(tauri_plugin_opener::init()) + .invoke_handler(tauri::generate_handler![ + commands::get_daemon_connection, + commands::get_service_status, + commands::install_os_service, + commands::restart_os_service, + commands::uninstall_os_service, + commands::reveal_file_in_folder, + ]) + .build(tauri::generate_context!()) + .expect("error while building tauri application") + .run(|_, _| {}); +} + +#[cfg(windows)] +fn apply_windows_chrome_color(window: &tauri::WebviewWindow) -> Result<(), String> { + let hwnd = window.hwnd().map_err(|error| error.to_string())?; + let chrome_color = WINDOW_CHROME_DARK_BACKGROUND_RGB; + + // DWM expects COLORREF in 0x00bbggrr order, so this constant matches #2f3133. + unsafe { + DwmSetWindowAttribute( + hwnd, + DWMWA_BORDER_COLOR, + &chrome_color as *const _ as _, + std::mem::size_of_val(&chrome_color) as u32, + ) + } + .map_err(|error| error.to_string())?; + + unsafe { + DwmSetWindowAttribute( + hwnd, + DWMWA_CAPTION_COLOR, + &chrome_color as *const _ as _, + std::mem::size_of_val(&chrome_color) as u32, + ) + } + .map_err(|error| error.to_string()) +} diff --git a/src/logging.rs b/src/logging.rs new file mode 100644 index 0000000..c8141c6 --- /dev/null +++ b/src/logging.rs @@ -0,0 +1,148 @@ +use std::{ + backtrace::Backtrace, + panic, + sync::{Once, OnceLock}, +}; + +use tracing_appender::non_blocking::WorkerGuard; + +use crate::service_paths::resolve_shared_logs_dir; + +static LOGGING_INIT: Once = Once::new(); +static PANIC_HOOK_INIT: Once = Once::new(); +static LOG_GUARD: OnceLock = OnceLock::new(); + +#[derive(Clone, Copy)] +enum LogContext { + Desktop, + Daemon, + ServiceManager, +} + +impl LogContext { + fn as_str(self) -> &'static str { + match self { + Self::Desktop => "desktop", + Self::Daemon => "daemon", + Self::ServiceManager => "service-manager", + } + } + + fn file_name(self) -> &'static str { + match self { + Self::Desktop => "desktop.log", + Self::Daemon => "daemon.log", + Self::ServiceManager => "service-manager.log", + } + } +} + +pub fn init_desktop_logging() { + init_logging(LogContext::Desktop); +} + +pub fn init_daemon_logging() { + init_logging(LogContext::Daemon); +} + +pub fn init_process_logging_from_args() { + if std::env::args_os().any(|arg| arg == "--service") { + init_logging(LogContext::Daemon); + return; + } + + let has_service_management_args = std::env::args_os().any(|arg| { + matches!( + arg.to_string_lossy().as_ref(), + "--windows-service-action" + | "--linux-service-action" + | "--windows-service-result-file" + | "--linux-service-result-file" + ) + }); + + if has_service_management_args { + init_logging(LogContext::ServiceManager); + return; + } + + init_logging(LogContext::Desktop); +} + +fn init_logging(context: LogContext) { + LOGGING_INIT.call_once(|| { + if let Err(error) = init_file_logging(context) { + let _ = tracing_subscriber::fmt() + .with_target(true) + .with_file(true) + .with_line_number(true) + .with_thread_ids(true) + .with_max_level(tracing::Level::INFO) + .try_init(); + eprintln!( + "Couldn't initialize file logging for {}: {}", + context.as_str(), + error + ); + } + }); + + install_panic_hook(); +} + +fn init_file_logging(context: LogContext) -> Result<(), String> { + let logs_dir = resolve_shared_logs_dir()?; + let log_path = logs_dir.join(context.file_name()); + let file_appender = tracing_appender::rolling::never(&logs_dir, context.file_name()); + let (non_blocking, guard) = tracing_appender::non_blocking(file_appender); + let _ = LOG_GUARD.set(guard); + + tracing_subscriber::fmt() + .with_ansi(false) + .with_target(true) + .with_file(true) + .with_line_number(true) + .with_thread_ids(true) + .with_max_level(tracing::Level::INFO) + .with_writer(non_blocking) + .try_init() + .map_err(|err| err.to_string())?; + + tracing::info!( + process = context.as_str(), + log_file = %log_path.display(), + "persistent file logging initialized" + ); + + Ok(()) +} + +fn install_panic_hook() { + PANIC_HOOK_INIT.call_once(|| { + let default_hook = panic::take_hook(); + panic::set_hook(Box::new(move |panic_info| { + let location = panic_info + .location() + .map(|location| format!("{}:{}", location.file(), location.line())) + .unwrap_or_else(|| "unknown".to_string()); + + let message = if let Some(message) = panic_info.payload().downcast_ref::<&str>() { + (*message).to_string() + } else if let Some(message) = panic_info.payload().downcast_ref::() { + message.clone() + } else { + "panic payload is not a string".to_string() + }; + + let backtrace = Backtrace::force_capture(); + tracing::error!( + panic.message = %message, + panic.location = %location, + panic.backtrace = %backtrace, + "application panicked" + ); + + default_hook(panic_info); + })); + }); +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..33b8d54 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,21 @@ +// Prevents additional console window on Windows in release, DO NOT REMOVE!! +#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] + +fn main() { + streaming_data_loader_lib::init_process_logging_from_args(); + + if let Some(exit_code) = streaming_data_loader_lib::maybe_handle_service_management_cli() { + std::process::exit(exit_code); + } + + if std::env::args().any(|arg| arg == "--service") { + if let Err(error) = streaming_data_loader_lib::run_daemon() { + tracing::error!(error = %error, "daemon exited with an error"); + eprintln!("{error}"); + std::process::exit(1); + } + return; + } + + streaming_data_loader_lib::run() +} diff --git a/src/models.rs b/src/models.rs new file mode 100644 index 0000000..5622827 --- /dev/null +++ b/src/models.rs @@ -0,0 +1,1003 @@ +use std::collections::HashMap; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Default)] +#[serde(rename_all = "snake_case")] +pub enum AuthType { + #[default] + Apikey, + Userpass, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "snake_case")] +pub enum ConnectionState { + #[default] + NotConfigured, + Configured, + Connected, + Error, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "snake_case")] +pub enum LogLevel { + #[default] + Info, + Warning, + Error, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "snake_case")] +pub enum JobStatus { + Healthy, + Warning, + Error, + Disabled, + #[default] + Pending, + Running, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +pub enum TimestampFormatType { + #[default] + #[serde(rename = "ISO8601")] + Iso8601, + #[serde(rename = "naive")] + Naive, + #[serde(rename = "custom")] + Custom, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "lowercase")] +pub enum IdentifierType { + #[default] + Name, + Index, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +pub enum TimezoneModeType { + #[default] + #[serde(rename = "embeddedOffset")] + EmbeddedOffset, + #[serde(rename = "utc")] + Utc, + #[serde(rename = "fixedOffset")] + FixedOffset, + #[serde(rename = "daylightSavings")] + DaylightSavings, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ServerConfig { + #[serde(default)] + pub auth_type: AuthType, + #[serde(default)] + pub url: String, + #[serde(default)] + pub api_key: String, + #[serde(default)] + pub username: String, + #[serde(default)] + pub password: String, + #[serde(default)] + pub workspace_id: String, + #[serde(default)] + pub workspace_name: String, +} + +impl Default for ServerConfig { + fn default() -> Self { + Self { + auth_type: AuthType::Apikey, + url: String::new(), + api_key: String::new(), + username: String::new(), + password: String::new(), + workspace_id: String::new(), + workspace_name: String::new(), + } + } +} + +impl ServerConfig { + pub fn normalized(mut self) -> Self { + self.url = normalize_url(&self.url); + self.api_key = self.api_key.trim().to_string(); + self.username = self.username.trim().to_string(); + self.password = self.password.trim().to_string(); + self.workspace_id = self.workspace_id.trim().to_string(); + self.workspace_name = self.workspace_name.trim().to_string(); + + match self.auth_type { + AuthType::Apikey => { + self.username.clear(); + self.password.clear(); + } + AuthType::Userpass => { + self.api_key.clear(); + } + } + + self + } + + pub fn validated_for_connection(self) -> Result { + let server = self.normalized(); + + if server.url.is_empty() { + return Err("Host URL is required.".to_string()); + } + + match server.auth_type { + AuthType::Apikey if server.api_key.is_empty() => { + Err("API key is required.".to_string()) + } + AuthType::Userpass + if server.username.is_empty() + || server.password.is_empty() + || (server.workspace_id.is_empty() && server.workspace_name.is_empty()) => + { + Err("Username, password, and workspace name are required.".to_string()) + } + _ => Ok(server), + } + } + + pub fn is_configured(&self) -> bool { + if self.url.trim().is_empty() { + return false; + } + + match self.auth_type { + AuthType::Apikey => !self.api_key.trim().is_empty(), + AuthType::Userpass => { + !self.username.trim().is_empty() + && !self.password.trim().is_empty() + && (!self.workspace_id.trim().is_empty() + || !self.workspace_name.trim().is_empty()) + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct TimestampConfig { + #[serde(default = "default_timestamp_key")] + pub key: String, + #[serde(default)] + pub format: TimestampFormatType, + #[serde(default, rename = "customFormat", alias = "custom_format")] + pub custom_format: Option, + #[serde(default, rename = "timezoneMode", alias = "timezone_mode")] + pub timezone_mode: TimezoneModeType, + #[serde(default)] + pub timezone: Option, +} + +impl Default for TimestampConfig { + fn default() -> Self { + Self { + key: default_timestamp_key(), + format: TimestampFormatType::Iso8601, + custom_format: None, + timezone_mode: TimezoneModeType::EmbeddedOffset, + timezone: None, + } + } +} + +impl TimestampConfig { + pub fn normalized(mut self) -> Result { + self.key = self.key.trim().to_string(); + self.custom_format = self + .custom_format + .take() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()); + self.timezone = self + .timezone + .take() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()); + + if self.key.is_empty() { + self.key = default_timestamp_key(); + } + + match self.format { + TimestampFormatType::Custom => { + if self.custom_format.is_none() { + return Err( + "Custom timestamp formats require a customFormat value.".to_string() + ); + } + } + TimestampFormatType::Iso8601 | TimestampFormatType::Naive => { + self.custom_format = None; + } + } + + if self.format == TimestampFormatType::Iso8601 { + self.timezone_mode = TimezoneModeType::EmbeddedOffset; + self.timezone = None; + return Ok(self); + } + + if self.timezone_mode == TimezoneModeType::EmbeddedOffset { + self.timezone_mode = TimezoneModeType::Utc; + } + + match self.timezone_mode { + TimezoneModeType::Utc => { + self.timezone = None; + } + TimezoneModeType::FixedOffset | TimezoneModeType::DaylightSavings => { + if self.timezone.is_none() { + return Err( + "Timezone is required when using fixedOffset or daylightSavings timestamp modes." + .to_string(), + ); + } + } + TimezoneModeType::EmbeddedOffset => {} + } + + Ok(self) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct FileConfig { + #[serde( + default = "default_optional_header_row", + rename = "headerRow", + alias = "header_row" + )] + pub header_row: Option, + #[serde( + default = "default_data_start_row", + rename = "dataStartRow", + alias = "data_start_row" + )] + pub data_start_row: u32, + #[serde(default = "default_delimiter")] + pub delimiter: String, + #[serde(default, rename = "identifierType", alias = "identifier_type")] + pub identifier_type: IdentifierType, + #[serde(default)] + pub timestamp: TimestampConfig, +} + +impl Default for FileConfig { + fn default() -> Self { + Self { + header_row: Some(default_header_row()), + data_start_row: default_data_start_row(), + delimiter: default_delimiter(), + identifier_type: IdentifierType::default(), + timestamp: TimestampConfig::default(), + } + } +} + +impl FileConfig { + pub fn normalized(mut self) -> Result { + self.delimiter = if self.delimiter.is_empty() { + default_delimiter() + } else { + self.delimiter + }; + self.timestamp = self.timestamp.normalized()?; + + if !matches!(self.delimiter.as_str(), "," | ";" | "\t" | "|" | " ") { + return Err("Delimiter must be one of ',', ';', tab, '|', or space.".to_string()); + } + + match self.identifier_type { + IdentifierType::Index => { + self.header_row = None; + let timestamp_index = self.timestamp.key.parse::().map_err(|_| { + "timestamp.key must be a positive integer when using index-based CSV identifiers." + .to_string() + })?; + + if timestamp_index == 0 { + return Err( + "timestamp.key must be a positive integer when using index-based CSV identifiers." + .to_string(), + ); + } + } + IdentifierType::Name => { + if self.header_row.is_none() { + return Err( + "headerRow is required when using name-based column identifiers." + .to_string(), + ); + } + } + } + + if let Some(header_row) = self.header_row { + if self.data_start_row <= header_row { + return Err( + "dataStartRow must be greater than headerRow when a header row is configured." + .to_string(), + ); + } + } + + Ok(self) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ColumnMapping { + pub csv_column: String, + pub datastream_id: String, + pub datastream_name: String, +} + +impl ColumnMapping { + pub fn normalized(mut self) -> Result { + self.csv_column = self.csv_column.trim().to_string(); + self.datastream_id = self.datastream_id.trim().to_string(); + self.datastream_name = self.datastream_name.trim().to_string(); + + if self.csv_column.is_empty() + || self.datastream_id.is_empty() + || self.datastream_name.is_empty() + { + return Err( + "Column mappings require csv_column, datastream_id, and datastream_name." + .to_string(), + ); + } + + Ok(self) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct JobUpsertRequest { + pub name: String, + #[serde(default = "default_enabled")] + pub enabled: bool, + pub file_path: String, + #[serde(default = "default_schedule_minutes")] + pub schedule_minutes: u32, + pub file_config: FileConfig, + #[serde(default)] + pub column_mappings: Vec, +} + +impl JobUpsertRequest { + pub fn normalized(mut self) -> Result { + self.name = self.name.trim().to_string(); + self.file_path = self.file_path.trim().to_string(); + self.file_config = self.file_config.normalized()?; + self.column_mappings = self + .column_mappings + .into_iter() + .map(ColumnMapping::normalized) + .collect::, _>>()?; + + if self.name.is_empty() { + return Err("Job name is required.".to_string()); + } + if self.file_path.is_empty() { + return Err("File path is required.".to_string()); + } + if self.schedule_minutes == 0 { + return Err("schedule_minutes must be greater than 0.".to_string()); + } + + Ok(self) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct JobConfig { + pub id: String, + pub name: String, + #[serde(default = "default_enabled")] + pub enabled: bool, + pub file_path: String, + #[serde(default = "default_schedule_minutes")] + pub schedule_minutes: u32, + pub file_config: FileConfig, + #[serde(default)] + pub column_mappings: Vec, +} + +impl JobConfig { + pub fn from_request(id: String, request: JobUpsertRequest) -> Result { + let request = request.normalized()?; + Ok(Self { + id, + name: request.name, + enabled: request.enabled, + file_path: request.file_path, + schedule_minutes: request.schedule_minutes, + file_config: request.file_config, + column_mappings: request.column_mappings, + }) + } + + pub fn normalized(mut self) -> Result { + self.id = self.id.trim().to_string(); + if self.id.is_empty() { + return Err("Job id is required.".to_string()); + } + + let request = JobUpsertRequest { + name: self.name, + enabled: self.enabled, + file_path: self.file_path, + schedule_minutes: self.schedule_minutes, + file_config: self.file_config, + column_mappings: self.column_mappings, + } + .normalized()?; + + self.name = request.name; + self.enabled = request.enabled; + self.file_path = request.file_path; + self.schedule_minutes = request.schedule_minutes; + self.file_config = request.file_config; + self.column_mappings = request.column_mappings; + Ok(self) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct JobCursor { + #[serde(default)] + pub last_pushed_timestamp: Option>, + #[serde(default)] + pub last_pushed_row_index: Option, + #[serde(default)] + pub last_run_at: Option>, + #[serde(default)] + pub last_error: Option, + #[serde(default)] + pub is_running: bool, + #[serde(default)] + pub datastream_cursors: HashMap, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamCursor { + #[serde(default)] + pub last_pushed_timestamp: Option>, + #[serde(default)] + pub last_pushed_row_index: Option, + #[serde(default)] + pub last_error: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct JobLogEntry { + pub timestamp: DateTime, + #[serde(default)] + pub level: LogLevel, + pub message: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct PersistedDatasource { + pub id: String, + pub name: String, + #[serde(default = "default_enabled")] + pub enabled: bool, + pub file_path: String, + #[serde(default = "default_schedule_minutes")] + pub schedule_minutes: u32, + pub file_config: FileConfig, + #[serde(default)] + pub column_mappings: Vec, + #[serde(default)] + pub last_pushed_timestamp: Option>, + #[serde(default)] + pub last_pushed_row_index: Option, + #[serde(default)] + pub last_run_at: Option>, + #[serde(default)] + pub last_error: Option, + #[serde(default)] + pub is_running: bool, + #[serde(default)] + pub datastream_cursors: HashMap, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub recent_logs: Vec, +} + +impl PersistedDatasource { + pub fn to_job_config(&self) -> JobConfig { + JobConfig { + id: self.id.clone(), + name: self.name.clone(), + enabled: self.enabled, + file_path: self.file_path.clone(), + schedule_minutes: self.schedule_minutes, + file_config: self.file_config.clone(), + column_mappings: self.column_mappings.clone(), + } + } + + pub fn to_cursor(&self) -> JobCursor { + let mut datastream_cursors = self.datastream_cursors.clone(); + // Backfill per-datastream entries for currently-configured mappings from + // the job-level aggregate so state persisted before per-datastream + // tracking continues to resume correctly. + for mapping in &self.column_mappings { + datastream_cursors + .entry(mapping.datastream_id.clone()) + .or_insert_with(|| DatastreamCursor { + last_pushed_timestamp: self.last_pushed_timestamp, + last_pushed_row_index: self.last_pushed_row_index, + last_error: self.last_error.clone(), + }); + } + JobCursor { + last_pushed_timestamp: self.last_pushed_timestamp, + last_pushed_row_index: self.last_pushed_row_index, + last_run_at: self.last_run_at, + last_error: self.last_error.clone(), + is_running: self.is_running, + datastream_cursors, + } + } + + pub fn from_job( + job: JobConfig, + cursor: Option, + recent_logs: Option>, + ) -> Self { + let cursor = cursor.unwrap_or_default(); + Self { + id: job.id, + name: job.name, + enabled: job.enabled, + file_path: job.file_path, + schedule_minutes: job.schedule_minutes, + file_config: job.file_config, + column_mappings: job.column_mappings, + last_pushed_timestamp: cursor.last_pushed_timestamp, + last_pushed_row_index: cursor.last_pushed_row_index, + last_run_at: cursor.last_run_at, + last_error: cursor.last_error, + is_running: cursor.is_running, + datastream_cursors: cursor.datastream_cursors, + recent_logs: recent_logs.unwrap_or_default(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct AppConfig { + #[serde(default = "default_version")] + pub version: u32, + #[serde(default)] + pub server: ServerConfig, + #[serde(default)] + pub launch_at_login_initialized: bool, + #[serde(default)] + pub jobs: Vec, +} + +impl Default for AppConfig { + fn default() -> Self { + Self { + version: default_version(), + server: ServerConfig::default(), + launch_at_login_initialized: false, + jobs: Vec::new(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct AppStateFile { + #[serde(default)] + pub cursors: HashMap, + #[serde(default)] + pub logs: HashMap>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct WorkspaceStateFile { + #[serde(default = "default_version")] + pub version: u32, + pub workspace_id: String, + #[serde(default)] + pub workspace_name: String, + #[serde(default)] + pub hydroserver_url: String, + #[serde(default)] + pub datasources: Vec, +} + +impl Default for WorkspaceStateFile { + fn default() -> Self { + Self { + version: default_version(), + workspace_id: String::new(), + workspace_name: String::new(), + hydroserver_url: String::new(), + datasources: Vec::new(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ConnectionStatus { + pub state: ConnectionState, + pub message: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct HealthResponse { + pub status: String, + pub version: String, + pub config_dir: String, + pub server_configured: bool, + pub connection: ConnectionStatus, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct AppBootstrapResponse { + pub health: HealthResponse, + pub config: AppConfig, + #[serde(default)] + pub jobs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct DaemonConnectionInfo { + pub base_url: String, + pub token: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct DaemonStatusSnapshot { + pub health: HealthResponse, + pub config: AppConfig, + #[serde(default)] + pub jobs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ConnectionTestResponse { + pub ok: bool, + pub state: ConnectionState, + pub message: String, + #[serde(default)] + pub invalid_field: Option, + #[serde(default)] + pub instance_name: Option, + #[serde(default)] + pub workspace_id: Option, + #[serde(default)] + pub workspace_name: Option, + #[serde(default)] + pub workspace_count: u32, + #[serde(default)] + pub datastream_count: u32, + #[serde(default)] + pub permissions_ok: bool, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ServerUrlValidationResponse { + pub ok: bool, + pub message: String, + #[serde(default)] + pub instance_name: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ActionResponse { + #[serde(default = "default_true")] + pub ok: bool, + pub message: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ServiceStatusResponse { + #[serde(default = "default_true")] + pub supported: bool, + #[serde(default)] + pub installed: bool, + #[serde(default)] + pub running: bool, + #[serde(default)] + pub label: String, + #[serde(default)] + pub plist_path: String, + #[serde(default)] + pub executable_path: String, + #[serde(default)] + pub status_message: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamSummary { + pub id: String, + pub name: String, + #[serde(default)] + pub thing_id: String, + #[serde(default)] + pub thing_name: String, + #[serde(default)] + pub observed_property_name: String, + #[serde(default)] + pub processing_level_definition: String, + #[serde(default)] + pub unit_name: String, + #[serde(default)] + pub unit_symbol: String, + #[serde(default)] + pub sampled_medium: String, + #[serde(default)] + pub sensor_name: String, + #[serde(default)] + pub result_type: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamThingLocationDetail { + #[serde(default)] + pub latitude: String, + #[serde(default)] + pub longitude: String, + #[serde(default)] + pub elevation_m: String, + #[serde(default)] + pub elevation_datum: String, + #[serde(default)] + pub admin_area_1: String, + #[serde(default)] + pub admin_area_2: String, + #[serde(default)] + pub country: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamThingDetail { + #[serde(default)] + pub id: String, + #[serde(default)] + pub name: String, + #[serde(default)] + pub description: String, + #[serde(default)] + pub sampling_feature_code: String, + #[serde(default)] + pub site_type: String, + #[serde(default)] + pub sampling_feature_type: String, + #[serde(default)] + pub is_private: bool, + #[serde(default)] + pub location: DatastreamThingLocationDetail, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamObservedPropertyDetail { + #[serde(default)] + pub id: String, + #[serde(default)] + pub name: String, + #[serde(default)] + pub definition: String, + #[serde(default)] + pub description: String, + #[serde(default)] + pub property_type: String, + #[serde(default)] + pub code: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamUnitDetail { + #[serde(default)] + pub id: String, + #[serde(default)] + pub name: String, + #[serde(default)] + pub symbol: String, + #[serde(default)] + pub definition: String, + #[serde(default)] + pub unit_type: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamSensorDetail { + #[serde(default)] + pub id: String, + #[serde(default)] + pub name: String, + #[serde(default)] + pub description: String, + #[serde(default)] + pub manufacturer: String, + #[serde(default)] + pub model: String, + #[serde(default)] + pub method_type: String, + #[serde(default)] + pub method_code: String, + #[serde(default)] + pub method_link: String, + #[serde(default)] + pub encoding_type: String, + #[serde(default)] + pub model_link: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamProcessingLevelDetail { + #[serde(default)] + pub id: String, + #[serde(default)] + pub code: String, + #[serde(default)] + pub definition: String, + #[serde(default)] + pub explanation: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DatastreamDetail { + pub id: String, + #[serde(default)] + pub name: String, + #[serde(default)] + pub description: String, + #[serde(default)] + pub sampled_medium: String, + #[serde(default)] + pub result_type: String, + #[serde(default)] + pub observation_type: String, + #[serde(default)] + pub no_data_value: String, + #[serde(default)] + pub aggregation_statistic: String, + #[serde(default)] + pub intended_time_spacing: String, + #[serde(default)] + pub intended_time_spacing_unit: String, + #[serde(default)] + pub time_aggregation_interval: String, + #[serde(default)] + pub time_aggregation_interval_unit: String, + #[serde(default)] + pub phenomenon_begin_time: String, + #[serde(default)] + pub phenomenon_end_time: String, + #[serde(default)] + pub value_count: String, + #[serde(default)] + pub is_private: bool, + #[serde(default)] + pub is_visible: bool, + #[serde(default)] + pub thing: DatastreamThingDetail, + #[serde(default)] + pub observed_property: DatastreamObservedPropertyDetail, + #[serde(default)] + pub unit: DatastreamUnitDetail, + #[serde(default)] + pub sensor: DatastreamSensorDetail, + #[serde(default)] + pub processing_level: DatastreamProcessingLevelDetail, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct JobStatusSummary { + pub id: String, + pub name: String, + pub enabled: bool, + pub file_path: String, + pub schedule_minutes: u32, + pub file_config: FileConfig, + pub column_mappings: Vec, + pub status: JobStatus, + pub status_message: String, + #[serde(default)] + pub last_pushed_timestamp: Option>, + #[serde(default)] + pub last_run_at: Option>, + #[serde(default)] + pub last_error: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct JobDetail { + pub id: String, + pub name: String, + pub enabled: bool, + pub file_path: String, + pub schedule_minutes: u32, + pub file_config: FileConfig, + pub column_mappings: Vec, + pub status: JobStatus, + pub status_message: String, + #[serde(default)] + pub last_pushed_timestamp: Option>, + #[serde(default)] + pub last_run_at: Option>, + #[serde(default)] + pub last_error: Option, + #[serde(default)] + pub recent_logs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct JobLogsResponse { + #[serde(default)] + pub entries: Vec, + #[serde(default)] + pub log_file_path: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct CsvPreviewResponse { + pub raw_lines: Vec, + pub parsed_rows: Vec>, + pub detected_header_row: Option, + pub detected_data_start_row: Option, + pub detected_delimiter: String, + pub total_lines: usize, + pub encoding: String, +} + +pub fn normalize_url(url: &str) -> String { + url.trim().trim_end_matches('/').to_string() +} + +fn default_version() -> u32 { + 1 +} + +fn default_true() -> bool { + true +} + +fn default_enabled() -> bool { + true +} + +fn default_schedule_minutes() -> u32 { + 15 +} + +fn default_timestamp_key() -> String { + "timestamp".to_string() +} + +fn default_header_row() -> u32 { + 1 +} + +fn default_optional_header_row() -> Option { + Some(default_header_row()) +} + +fn default_data_start_row() -> u32 { + 2 +} + +fn default_delimiter() -> String { + ",".to_string() +} diff --git a/src/observation_queue.rs b/src/observation_queue.rs new file mode 100644 index 0000000..c8f1311 --- /dev/null +++ b/src/observation_queue.rs @@ -0,0 +1,58 @@ +use chrono::{DateTime, Utc}; +use serde_json::Value; +use std::sync::Arc; +use tokio::sync::mpsc; + +use crate::models::ServerConfig; + +#[derive(Debug, Clone)] +pub struct ObservationContext { + pub server: Arc, + pub job_id: String, + pub datastream_id: String, + pub datastream_name: String, +} + +#[derive(Debug, Clone)] +pub struct QueuedObservation { + pub context: Arc, + pub timestamp: DateTime, + pub row_index: u64, + pub value: Value, +} + +#[derive(Clone)] +pub struct ObservationSender { + inner: mpsc::Sender, +} + +pub struct ObservationReceiver { + inner: mpsc::Receiver, +} + +pub fn bounded(capacity: usize) -> (ObservationSender, ObservationReceiver) { + let (tx, rx) = mpsc::channel(capacity); + ( + ObservationSender { inner: tx }, + ObservationReceiver { inner: rx }, + ) +} + +impl ObservationSender { + pub async fn send(&self, observation: QueuedObservation) -> Result<(), String> { + self.inner + .send(observation) + .await + .map_err(|_| "Observation queue is shutting down.".to_string()) + } +} + +impl ObservationReceiver { + pub async fn recv(&mut self) -> Option { + self.inner.recv().await + } +} + +#[cfg(test)] +#[path = "tests/observation_queue.rs"] +mod tests; diff --git a/src/package/macos/settings.py b/src/package/macos/settings.py deleted file mode 100644 index d5da335..0000000 --- a/src/package/macos/settings.py +++ /dev/null @@ -1,9 +0,0 @@ -files = ['dist/Streaming Data Loader.app'] -symlinks = {'Applications': '/Applications'} - -background = 'builtin-arrow' -hide = ['.background.tiff'] -icon_locations = { - 'Streaming Data Loader.app': (0, 110), - 'Applications': (420, 110) -} diff --git a/src/pipeline.rs b/src/pipeline.rs new file mode 100644 index 0000000..0ce85c2 --- /dev/null +++ b/src/pipeline.rs @@ -0,0 +1,828 @@ +use std::{ + collections::{HashMap, HashSet}, + fs, + path::{Path, PathBuf}, + sync::{Arc, Mutex as StdMutex}, + time::Instant, +}; + +use chrono::{DateTime, Utc}; +use csv::ReaderBuilder; +use serde_json::Value; +use tokio::{ + sync::{mpsc, Mutex, RwLock}, + task::JoinHandle, + time::{interval, MissedTickBehavior}, +}; +use tracing::{debug, error, info}; + +use crate::{ + config_store::ConfigStore, + file_watcher::FilesystemWatcher, + hydroserver::HydroServerService, + models::{JobConfig, JobCursor, JobLogEntry, LogLevel, ServerConfig}, + observation_queue::{ + bounded, ObservationContext, ObservationReceiver, ObservationSender, QueuedObservation, + }, + timestamp::parse_timestamp_to_utc, + uploader::spawn_upload_worker, +}; + +const DEFAULT_QUEUE_CAPACITY: usize = 10_000; + +#[derive(Clone)] +pub struct PipelineService { + inner: Arc, +} + +struct PipelineInner { + config_store: Arc, + observation_tx: Mutex>, + hydroserver: Arc, + event_tx: mpsc::UnboundedSender, + watch_plan: RwLock, + watcher: Mutex>, + row_counts: Mutex>, + in_flight_paths: Mutex>, + last_scan_times: Mutex>, + event_task: StdMutex>>, + uploader_task: StdMutex>>, + schedule_task: StdMutex>>, + // Held until the first initialize() call, then consumed by start_background_tasks. + pending_event_rx: StdMutex>>, + pending_observation_rx: StdMutex>, +} + +#[derive(Clone, Default)] +struct WatchPlan { + jobs_by_path: HashMap>, + server: Option>, +} + +#[derive(Debug)] +struct ParsedObservation { + datastream_id: String, + datastream_name: String, + timestamp: DateTime, + row_index: u64, + value: Value, +} + +#[derive(Debug)] +struct JobScanResult { + file_row_count: usize, + observations: Vec, + reset_detected: bool, +} + +#[cfg_attr(not(test), allow(dead_code))] +#[derive(Clone, Copy)] +enum ScanMode { + Incremental, + FullResync, +} + +impl PipelineService { + pub fn new(config_store: Arc, hydroserver: Arc) -> Self { + let queue_capacity = std::env::var("SDL_QUEUE_CAPACITY") + .ok() + .and_then(|value| value.trim().parse::().ok()) + .filter(|value| *value > 0) + .unwrap_or(DEFAULT_QUEUE_CAPACITY); + let (observation_tx, observation_rx) = bounded(queue_capacity); + let (event_tx, event_rx) = mpsc::unbounded_channel(); + + Self { + inner: Arc::new(PipelineInner { + config_store: config_store.clone(), + observation_tx: Mutex::new(Some(observation_tx)), + hydroserver: hydroserver.clone(), + event_tx, + watch_plan: RwLock::new(WatchPlan::default()), + watcher: Mutex::new(None), + row_counts: Mutex::new(HashMap::new()), + in_flight_paths: Mutex::new(HashSet::new()), + last_scan_times: Mutex::new(HashMap::new()), + event_task: StdMutex::new(None), + uploader_task: StdMutex::new(None), + schedule_task: StdMutex::new(None), + pending_event_rx: StdMutex::new(Some(event_rx)), + pending_observation_rx: StdMutex::new(Some(observation_rx)), + }), + } + } + + pub async fn initialize(&self) -> Result<(), String> { + self.start_background_tasks(); + self.reload().await + } + + pub async fn reload(&self) -> Result<(), String> { + let snapshot = self.load_watch_plan().await?; + + { + let mut watch_plan = self.inner.watch_plan.write().await; + *watch_plan = snapshot.clone(); + } + + let watched_paths = snapshot.jobs_by_path.keys().cloned().collect::>(); + info!( + watched_file_count = watched_paths.len(), + "reloading pipeline watcher" + ); + let watcher = FilesystemWatcher::start(watched_paths.clone(), self.inner.event_tx.clone())?; + *self.inner.watcher.lock().await = watcher; + + // Seed row_counts from persisted cursors for paths not yet tracked in memory. + // This prevents re-uploading the entire file history after a process restart. + let seeds = self.load_cursor_row_seeds(&snapshot).await; + { + let mut row_counts = self.inner.row_counts.lock().await; + row_counts.retain(|path, _| snapshot.jobs_by_path.contains_key(path)); + for (path, seed) in seeds { + row_counts.entry(path).or_insert(seed); + } + } + + for path in &watched_paths { + debug!(file = %path.display(), "queuing initial scan"); + } + for path in watched_paths { + let _ = self.inner.event_tx.send(path); + } + + Ok(()) + } + + pub async fn run_job_now(&self, job_id: &str) -> Result<(), String> { + let watch_plan = self.inner.watch_plan.read().await; + let Some((path, job, server)) = watch_plan.jobs_by_path.iter().find_map(|(path, jobs)| { + jobs.iter() + .find(|job| job.id == job_id) + .map(|job| (path.clone(), job.clone(), watch_plan.server.clone())) + }) else { + return Err("That job could not be found.".to_string()); + }; + let Some(server) = server else { + return Err("Configure HydroServer before requesting a manual run.".to_string()); + }; + drop(watch_plan); + + let path = normalize_watched_path(path); + if !self.begin_path_scan(&path).await { + return Err("That data source is already running.".to_string()); + } + + let previous_row_count = { + let row_counts = self.inner.row_counts.lock().await; + row_counts.get(&path).copied().unwrap_or_default() + }; + + let outcome = self + .scan_job( + path.clone(), + server, + job, + previous_row_count, + ScanMode::FullResync, + ) + .await; + + if let Ok((row_count, _reset_detected)) = outcome.as_ref() { + self.inner + .row_counts + .lock() + .await + .insert(path.clone(), *row_count); + } + + self.end_path_scan(&path).await; + outcome.map(|_| ()) + } + + pub async fn shutdown(&self) { + info!("pipeline shutting down; draining pending uploads"); + *self.inner.watcher.lock().await = None; + + for slot in [&self.inner.event_task, &self.inner.schedule_task] { + if let Ok(mut guard) = slot.lock() { + if let Some(task) = guard.take() { + task.abort(); + } + } + } + + // Drop the observation sender so the uploader sees channel-closed and + // drains its remaining batches instead of waiting forever. + self.inner.observation_tx.lock().await.take(); + + let uploader_task = self + .inner + .uploader_task + .lock() + .ok() + .and_then(|mut slot| slot.take()); + + if let Some(task) = uploader_task { + let _ = tokio::time::timeout(std::time::Duration::from_secs(30), task).await; + } + } + + fn start_background_tasks(&self) { + let (mut event_rx, observation_rx) = match ( + self.inner + .pending_event_rx + .lock() + .ok() + .and_then(|mut g| g.take()), + self.inner + .pending_observation_rx + .lock() + .ok() + .and_then(|mut g| g.take()), + ) { + (Some(e), Some(o)) => (e, o), + _ => return, // already started + }; + + let service = self.clone(); + let event_task = tokio::spawn(async move { + while let Some(path) = event_rx.recv().await { + if let Err(error) = service.scan_path(path).await { + error!(error = %error, "filesystem-triggered scan failed"); + } + } + }); + + let uploader_task = spawn_upload_worker( + observation_rx, + self.inner.hydroserver.clone(), + self.inner.config_store.clone(), + ); + + // Scheduler: every 60 s, re-queue any path whose jobs are overdue per + // their schedule_minutes setting. This catches files on network drives + // or other filesystems that don't reliably fire OS change events. + let service = self.clone(); + let schedule_task = tokio::spawn(async move { + const POLL_INTERVAL: std::time::Duration = std::time::Duration::from_secs(60); + let mut ticker = interval(POLL_INTERVAL); + ticker.set_missed_tick_behavior(MissedTickBehavior::Delay); + loop { + ticker.tick().await; + + let overdue_paths = { + let watch_plan = service.inner.watch_plan.read().await; + let last_scan_times = service.inner.last_scan_times.lock().await; + overdue_paths(Instant::now(), &watch_plan, &last_scan_times) + }; + + for path in overdue_paths { + debug!(file = %path.display(), "scheduled poll triggered scan"); + let _ = service.inner.event_tx.send(path); + } + } + }); + + if let Ok(mut slot) = self.inner.event_task.lock() { + *slot = Some(event_task); + } + if let Ok(mut slot) = self.inner.uploader_task.lock() { + *slot = Some(uploader_task); + } + if let Ok(mut slot) = self.inner.schedule_task.lock() { + *slot = Some(schedule_task); + } + } + + async fn load_watch_plan(&self) -> Result { + let config_store = self.inner.config_store.clone(); + tokio::task::spawn_blocking(move || { + let config = config_store.load()?; + if !config.server.is_configured() { + return Ok(WatchPlan::default()); + } + + let server = Arc::new(config.server.clone().normalized()); + let jobs_by_path = config.jobs.into_iter().filter(|job| job.enabled).fold( + HashMap::new(), + |mut acc, job| { + acc.entry(normalize_watched_path(&job.file_path)) + .or_insert_with(Vec::new) + .push(job); + acc + }, + ); + + Ok(WatchPlan { + jobs_by_path, + server: Some(server), + }) + }) + .await + .map_err(|err| err.to_string())? + } + + /// Returns the minimum confirmed `last_pushed_row_index` across all jobs for + /// each watched path. Used to seed `row_counts` on startup so incremental + /// scans resume from where they left off instead of re-uploading everything. + async fn load_cursor_row_seeds(&self, snapshot: &WatchPlan) -> HashMap { + let config_store = self.inner.config_store.clone(); + let pairs: Vec<(PathBuf, Vec)> = snapshot + .jobs_by_path + .iter() + .map(|(path, jobs)| (path.clone(), jobs.iter().map(|j| j.id.clone()).collect())) + .collect(); + + tokio::task::spawn_blocking(move || { + let mut seeds: HashMap = HashMap::new(); + for (path, job_ids) in pairs { + for job_id in &job_ids { + if let Ok(cursor) = config_store.cursor_for(job_id) { + if let Some(row_index) = cursor.last_pushed_row_index { + let entry = seeds.entry(path.clone()).or_insert(row_index as usize); + // Use the minimum across all jobs sharing this path so the + // slowest job drives where the scan resumes. + *entry = (*entry).min(row_index as usize); + } + } + } + } + seeds + }) + .await + .unwrap_or_default() + } + + async fn scan_path(&self, path: PathBuf) -> Result<(), String> { + let path = normalize_watched_path(path); + if !self.begin_path_scan(&path).await { + debug!(file = %path.display(), "skipping scan; already in flight"); + return Ok(()); + } + // Record scan start time so the scheduler can determine when to retry. + self.inner + .last_scan_times + .lock() + .await + .insert(path.clone(), Instant::now()); + debug!(file = %path.display(), "scanning watched file for new rows"); + + let outcome = async { + let snapshot = { + let watch_plan = self.inner.watch_plan.read().await; + let jobs = watch_plan + .jobs_by_path + .get(&path) + .cloned() + .unwrap_or_default(); + let server = watch_plan.server.clone(); + (server, jobs) + }; + + let (Some(server), jobs) = snapshot else { + return Ok(()); + }; + + if jobs.is_empty() { + return Ok(()); + } + + let previous_row_count = { + let row_counts = self.inner.row_counts.lock().await; + row_counts.get(&path).copied().unwrap_or_default() + }; + + let mut latest_row_count = previous_row_count; + let mut reset_any = false; + for job in jobs { + match self + .scan_job( + path.clone(), + server.clone(), + job, + previous_row_count, + ScanMode::Incremental, + ) + .await + { + Ok((row_count, reset_detected)) => { + if reset_detected { + reset_any = true; + latest_row_count = row_count; + } else if !reset_any { + latest_row_count = latest_row_count.max(row_count); + } + } + Err(error) => { + if !reset_any { + latest_row_count = latest_row_count.max(previous_row_count); + } + error!(file = %path.display(), error = %error, "job scan failed"); + } + } + } + + self.inner + .row_counts + .lock() + .await + .insert(path.clone(), latest_row_count); + + Ok(()) + } + .await; + + self.end_path_scan(&path).await; + outcome + } + + async fn scan_job( + &self, + _path: PathBuf, + server: Arc, + job: JobConfig, + previous_row_count: usize, + mode: ScanMode, + ) -> Result<(usize, bool), String> { + self.set_job_running(&job.id, true).await?; + + let outcome = async { + let cursor = self.load_cursor(&job.id).await?; + let job_for_scan = job.clone(); + + let result = tokio::task::spawn_blocking(move || { + scan_job_file(job_for_scan, previous_row_count, cursor, mode) + }) + .await + .map_err(|err| err.to_string())??; + + if result.reset_detected { + self.reset_job_datastream_cursors(&job.id).await?; + self.append_log( + &job.id, + "Detected that the watched CSV file was replaced or truncated; rescanning from the configured data start row.", + LogLevel::Warning, + ) + .await?; + } + + if result.observations.is_empty() { + self.clear_last_error(&job.id).await?; + if matches!(mode, ScanMode::FullResync) { + self.append_log( + &job.id, + "No new observations were available to queue.", + LogLevel::Info, + ) + .await?; + } + return Ok((result.file_row_count, result.reset_detected)); + } + + let mut queued = 0usize; + for observation in result.observations { + let context = Arc::new(ObservationContext { + server: server.clone(), + job_id: job.id.clone(), + datastream_id: observation.datastream_id, + datastream_name: observation.datastream_name, + }); + let tx = self.inner.observation_tx.lock().await; + let Some(tx) = tx.as_ref() else { + return Err("Pipeline is shutting down.".to_string()); + }; + tx.send(QueuedObservation { + context, + timestamp: observation.timestamp, + row_index: observation.row_index, + value: observation.value, + }) + .await?; + queued += 1; + } + + self.clear_last_error(&job.id).await?; + + info!( + job_id = %job.id, + queued_count = queued, + file = %job.file_path, + "queued observations from watched CSV file" + ); + + Ok((result.file_row_count, result.reset_detected)) + } + .await; + + let running_clear = self.set_job_running(&job.id, false).await; + if let Err(error) = running_clear { + return Err(error); + } + + outcome + } + + async fn begin_path_scan(&self, path: &Path) -> bool { + let mut in_flight = self.inner.in_flight_paths.lock().await; + in_flight.insert(path.to_path_buf()) + } + + async fn end_path_scan(&self, path: &Path) { + self.inner.in_flight_paths.lock().await.remove(path); + } + + async fn load_cursor(&self, job_id: &str) -> Result { + let config_store = self.inner.config_store.clone(); + let job_id = job_id.to_string(); + tokio::task::spawn_blocking(move || config_store.cursor_for(&job_id)) + .await + .map_err(|err| err.to_string())? + } + + async fn clear_last_error(&self, job_id: &str) -> Result<(), String> { + let config_store = self.inner.config_store.clone(); + let job_id = job_id.to_string(); + tokio::task::spawn_blocking(move || config_store.clear_last_error(&job_id, Utc::now())) + .await + .map_err(|err| err.to_string())? + } + + async fn reset_job_datastream_cursors(&self, job_id: &str) -> Result<(), String> { + let config_store = self.inner.config_store.clone(); + let job_id = job_id.to_string(); + tokio::task::spawn_blocking(move || config_store.reset_job_datastream_cursors(&job_id)) + .await + .map_err(|err| err.to_string())? + } + + async fn set_job_running(&self, job_id: &str, is_running: bool) -> Result<(), String> { + let config_store = self.inner.config_store.clone(); + let job_id = job_id.to_string(); + tokio::task::spawn_blocking(move || config_store.set_job_running(&job_id, is_running)) + .await + .map_err(|err| err.to_string())? + .map(|_| ()) + } + + async fn append_log(&self, job_id: &str, message: &str, level: LogLevel) -> Result<(), String> { + let config_store = self.inner.config_store.clone(); + let job_id = job_id.to_string(); + let message = message.to_string(); + tokio::task::spawn_blocking(move || { + config_store.append_log( + &job_id, + JobLogEntry { + timestamp: Utc::now(), + level, + message, + }, + )?; + Ok::<(), String>(()) + }) + .await + .map_err(|err| err.to_string())? + } +} + +fn overdue_paths( + now: Instant, + watch_plan: &WatchPlan, + last_scan_times: &HashMap, +) -> Vec { + watch_plan + .jobs_by_path + .iter() + .filter_map(|(path, jobs)| { + let min_interval_secs = jobs + .iter() + .map(|job| job.schedule_minutes as u64 * 60) + .min() + .unwrap_or(u64::MAX); + + let Some(last_scan) = last_scan_times.get(path) else { + // Initial scan (queued by reload) handles the first run. + return None; + }; + + (now.duration_since(*last_scan).as_secs() >= min_interval_secs).then(|| path.clone()) + }) + .collect() +} + +fn scan_job_file( + job: JobConfig, + previous_row_count: usize, + cursor: JobCursor, + mode: ScanMode, +) -> Result { + let bytes = fs::read(&job.file_path).map_err(|err| err.to_string())?; + let (csv_text, _encoding) = crate::csv_preview::decode_text(&bytes)?; + let delimiter = job + .file_config + .delimiter + .chars() + .next() + .ok_or_else(|| "Delimiter is required.".to_string())?; + let rows = read_csv_rows(&csv_text, delimiter)?; + let file_row_count = rows.len(); + + if rows.is_empty() { + return Ok(JobScanResult { + file_row_count, + observations: Vec::new(), + reset_detected: false, + }); + } + + let data_start_index = job.file_config.data_start_row.saturating_sub(1) as usize; + let timestamp_index = resolve_column_index(&rows, &job, &job.file_config.timestamp.key)?; + let mapping_indexes = job + .column_mappings + .iter() + .map(|mapping| { + resolve_column_index(&rows, &job, &mapping.csv_column).map(|index| { + ( + mapping.datastream_id.clone(), + mapping.datastream_name.clone(), + index, + ) + }) + }) + .collect::, _>>()?; + + let reset_detected = + matches!(mode, ScanMode::Incremental) && file_row_count < previous_row_count; + // Compute the earliest row any active datastream still needs. A datastream + // with no cursor entry falls back to the in-memory previous_row_count + // (nothing to backtrack to); a cursor with a failure still has its prior + // last_pushed_row_index, so rows past it get re-emitted by the per-row + // filter below. + let min_needed_start = mapping_indexes + .iter() + .map(|(datastream_id, _, _)| { + cursor + .datastream_cursors + .get(datastream_id) + .and_then(|c| c.last_pushed_row_index) + .map(|idx| idx as usize) + .unwrap_or(previous_row_count) + }) + .min() + .unwrap_or(previous_row_count); + let start_index = match mode { + // Fast-skip to whichever is earlier: the in-memory row count (all + // datastreams caught up this session) or the minimum row any datastream + // still needs (so retries aren't skipped). + ScanMode::Incremental if !reset_detected => { + data_start_index.max(previous_row_count.min(min_needed_start)) + } + ScanMode::Incremental | ScanMode::FullResync => data_start_index, + }; + + let mut observations = Vec::new(); + for (row_number, row) in rows.iter().enumerate().skip(start_index) { + let csv_row_number = row_number as u64 + 1; + let timestamp_value = row + .get(timestamp_index) + .map(String::as_str) + .unwrap_or_default() + .trim(); + if timestamp_value.is_empty() { + continue; + } + + let timestamp = parse_timestamp_to_utc(timestamp_value, &job.file_config.timestamp) + .map_err(|error| format!("Row {csv_row_number}: {error}"))?; + + for (datastream_id, datastream_name, column_index) in &mapping_indexes { + // Skip rows this datastream has already confirmed pushed. In + // Incremental mode, compare row_index; in FullResync (triggered by + // a manual "Run Now"), compare against the cursor's timestamp so + // previously-loaded history isn't re-uploaded. + let datastream_cursor = cursor.datastream_cursors.get(datastream_id); + let already_pushed = if reset_detected { + false + } else { + match mode { + ScanMode::Incremental => datastream_cursor + .and_then(|c| c.last_pushed_row_index) + .map(|last| csv_row_number <= last) + .unwrap_or(false), + ScanMode::FullResync => datastream_cursor + .map(|c| !is_newer_than_datastream_cursor(timestamp, csv_row_number, c)) + .unwrap_or(false), + } + }; + if already_pushed { + continue; + } + + let value = row + .get(*column_index) + .map(String::as_str) + .unwrap_or_default() + .trim(); + if value.is_empty() { + continue; + } + + observations.push(ParsedObservation { + datastream_id: datastream_id.clone(), + datastream_name: datastream_name.clone(), + timestamp, + row_index: csv_row_number, + value: parse_observation_value(value), + }); + } + } + + Ok(JobScanResult { + file_row_count, + observations, + reset_detected, + }) +} + +fn resolve_column_index(rows: &[Vec], job: &JobConfig, key: &str) -> Result { + if job.file_config.identifier_type == crate::models::IdentifierType::Index { + let index = key + .trim() + .parse::() + .map_err(|_| format!("Column index '{key}' is invalid."))?; + if index == 0 { + return Err(format!("Column index '{key}' is invalid.")); + } + return Ok(index - 1); + } + + let header_row = job.file_config.header_row.ok_or_else(|| { + "headerRow is required when using name-based column identifiers.".to_string() + })?; + let header_index = header_row.saturating_sub(1) as usize; + let header = rows + .get(header_index) + .ok_or_else(|| "The configured header row does not exist in the file.".to_string())?; + + let target = key.trim(); + header + .iter() + .position(|value| value.trim() == target) + .or_else(|| { + header + .iter() + .position(|value| value.trim().eq_ignore_ascii_case(target)) + }) + .ok_or_else(|| { + format!( + "Column '{key}' was not found in the configured header row. Confirm the delimiter and headerRow settings match the source file." + ) + }) +} + +fn read_csv_rows(csv_text: &str, delimiter: char) -> Result>, String> { + let delimiter = delimiter as u8; + ReaderBuilder::new() + .has_headers(false) + .delimiter(delimiter) + .flexible(true) + .from_reader(csv_text.as_bytes()) + .records() + .map(|record| { + record + .map(|record| record.iter().map(|value| value.to_string()).collect()) + .map_err(|err| err.to_string()) + }) + .collect() +} + +fn parse_observation_value(value: &str) -> Value { + value + .parse::() + .map(Value::from) + .unwrap_or_else(|_| Value::String(value.to_string())) +} + +fn normalize_watched_path(path: impl AsRef) -> PathBuf { + let path = PathBuf::from(path.as_ref()); + path.canonicalize().unwrap_or(path) +} + +fn is_newer_than_datastream_cursor( + timestamp: DateTime, + row_index: u64, + cursor: &crate::models::DatastreamCursor, +) -> bool { + match cursor.last_pushed_timestamp { + Some(last_timestamp) if timestamp < last_timestamp => false, + Some(last_timestamp) if timestamp == last_timestamp => cursor + .last_pushed_row_index + .map(|last_row_index| row_index > last_row_index) + .unwrap_or(false), + _ => true, + } +} + +#[cfg(test)] +#[path = "tests/pipeline.rs"] +mod tests; diff --git a/src/runtime.rs b/src/runtime.rs new file mode 100644 index 0000000..2fcc5ea --- /dev/null +++ b/src/runtime.rs @@ -0,0 +1,352 @@ +use std::{ + fs, + path::{Path, PathBuf}, + sync::Arc, +}; + +use chrono::Utc; +use tauri::{AppHandle, Manager}; + +use crate::{ + config_store::ConfigStore, + hydroserver::HydroServerService, + models::{ + AppBootstrapResponse, AppConfig, ConnectionState, ConnectionStatus, DaemonStatusSnapshot, + HealthResponse, JobConfig, JobCursor, JobDetail, JobLogEntry, JobStatus, JobStatusSummary, + LogLevel, ServerConfig, + }, + service_paths::{ + active_app_directory_name, resolve_shared_service_config_dir, APP_DIRECTORY_NAME, + DEV_APP_DIRECTORY_NAME, + }, +}; + +const APP_VERSION: &str = env!("CARGO_PKG_VERSION"); +#[cfg_attr(not(test), allow(dead_code))] +const BUNDLE_IDENTIFIER: &str = "com.streaming-data-loader"; +#[cfg_attr(not(test), allow(dead_code))] +const LEGACY_BUNDLE_IDENTIFIER: &str = "com.streaming-data-loader.app"; + +#[derive(Clone)] +pub struct AppState { + inner: Arc, +} + +struct AppStateInner { + settings: AppSettings, + config_store: Arc, + hydroserver: Arc, +} + +#[derive(Debug, Clone)] +pub struct AppSettings { + pub version: String, + pub config_dir: PathBuf, +} + +impl AppState { + pub fn new(config_dir: PathBuf) -> Result { + let config_store = Arc::new(ConfigStore::new(config_dir.clone())); + let hydroserver = Arc::new(HydroServerService::new()?); + + Ok(Self { + inner: Arc::new(AppStateInner { + settings: AppSettings { + version: APP_VERSION.to_string(), + config_dir, + }, + config_store, + hydroserver, + }), + }) + } + + pub fn initialize(&self) -> Result<(), String> { + self.inner.config_store.ensure() + } + + pub fn health(&self) -> Result { + let config = self.inner.config_store.load()?; + Ok(HealthResponse { + status: "ok".to_string(), + version: self.inner.settings.version.clone(), + config_dir: self.inner.settings.config_dir.to_string_lossy().to_string(), + server_configured: config.server.is_configured(), + connection: connection_status(&config.server), + }) + } + + pub fn config(&self) -> Result { + self.inner.config_store.load() + } + + pub fn config_store(&self) -> &ConfigStore { + self.inner.config_store.as_ref() + } + + pub fn config_store_handle(&self) -> Arc { + self.inner.config_store.clone() + } + + pub fn hydroserver(&self) -> &HydroServerService { + self.inner.hydroserver.as_ref() + } + + pub fn hydroserver_handle(&self) -> Arc { + self.inner.hydroserver.clone() + } + + pub fn status_snapshot(&self) -> Result { + let health = self.health()?; + let config = self.config()?; + let jobs = config + .jobs + .iter() + .map(|job| self.build_job_summary(job)) + .collect::, _>>()?; + + Ok(DaemonStatusSnapshot { + health, + config, + jobs, + }) + } + + pub fn bootstrap(&self) -> Result { + let snapshot = self.status_snapshot()?; + Ok(AppBootstrapResponse { + health: snapshot.health, + config: snapshot.config, + jobs: snapshot.jobs, + }) + } + + pub fn build_job_summary(&self, job: &JobConfig) -> Result { + let cursor = self.inner.config_store.cursor_for(&job.id)?; + let (status, status_message) = derive_job_status(job, &cursor); + Ok(JobStatusSummary { + id: job.id.clone(), + name: job.name.clone(), + enabled: job.enabled, + file_path: job.file_path.clone(), + schedule_minutes: job.schedule_minutes, + file_config: job.file_config.clone(), + column_mappings: job.column_mappings.clone(), + status, + status_message, + last_pushed_timestamp: cursor.last_pushed_timestamp, + last_run_at: cursor.last_run_at, + last_error: cursor.last_error, + }) + } + + pub fn build_job_detail(&self, job: &JobConfig) -> Result { + let summary = self.build_job_summary(job)?; + Ok(JobDetail { + id: summary.id, + name: summary.name, + enabled: summary.enabled, + file_path: summary.file_path, + schedule_minutes: summary.schedule_minutes, + file_config: summary.file_config, + column_mappings: summary.column_mappings, + status: summary.status, + status_message: summary.status_message, + last_pushed_timestamp: summary.last_pushed_timestamp, + last_run_at: summary.last_run_at, + last_error: summary.last_error, + recent_logs: self.inner.config_store.logs_for(&job.id, 200)?, + }) + } + + pub fn append_log( + &self, + job_id: &str, + message: &str, + level: LogLevel, + ) -> Result { + let entry = JobLogEntry { + timestamp: Utc::now(), + level, + message: message.to_string(), + }; + self.inner.config_store.append_log(job_id, entry) + } +} + +#[cfg_attr(not(test), allow(dead_code))] +pub fn resolve_config_dir(app_handle: &AppHandle) -> Result { + if cfg!(target_os = "macos") || cfg!(target_os = "windows") { + let preferred_dir = resolve_shared_service_config_dir()?; + migrate_legacy_config_dir(app_handle, &preferred_dir)?; + return Ok(preferred_dir); + } + + let preferred_dir = preferred_user_data_dir( + app_handle.path().app_data_dir().ok(), + app_handle.path().home_dir().ok(), + )?; + + migrate_legacy_config_dir(app_handle, &preferred_dir)?; + + if try_create_dir(&preferred_dir) { + return Ok(preferred_dir); + } + + if let Ok(home_dir) = app_handle.path().home_dir() { + let fallback_dir = home_dir.join(active_app_directory_name()); + migrate_legacy_config_dir(app_handle, &fallback_dir)?; + fs::create_dir_all(&fallback_dir).map_err(|err| err.to_string())?; + return Ok(fallback_dir); + } + + Err("Couldn't resolve an application data directory.".to_string()) +} + +#[cfg_attr(not(test), allow(dead_code))] +fn preferred_user_data_dir( + app_data_dir: Option, + home_dir: Option, +) -> Result { + if let Some(app_data_dir) = app_data_dir { + return Ok(if cfg!(debug_assertions) { + app_data_dir.join("dev") + } else { + app_data_dir + }); + } + + if let Some(home_dir) = home_dir { + return Ok(home_dir.join(active_app_directory_name())); + } + + Err("Couldn't resolve an application data directory.".to_string()) +} + +#[cfg_attr(not(test), allow(dead_code))] +fn try_create_dir(path: &Path) -> bool { + fs::create_dir_all(path).is_ok() +} + +#[cfg_attr(not(test), allow(dead_code))] +fn migrate_legacy_config_dir(app_handle: &AppHandle, target_dir: &Path) -> Result<(), String> { + if has_runtime_state(target_dir) { + return Ok(()); + } + + let Some(source_dir) = legacy_config_candidates(app_handle) + .into_iter() + .find(|candidate| candidate != target_dir && has_runtime_state(candidate)) + else { + return Ok(()); + }; + + move_or_copy_dir_contents(&source_dir, target_dir) +} + +#[cfg_attr(not(test), allow(dead_code))] +fn legacy_config_candidates(app_handle: &AppHandle) -> Vec { + let mut candidates = Vec::new(); + + #[cfg(windows)] + { + candidates.extend(crate::service_paths::legacy_shared_service_config_dirs()); + } + + if let Ok(data_dir) = app_handle.path().data_dir() { + candidates.push(data_dir.join(LEGACY_BUNDLE_IDENTIFIER)); + candidates.push(data_dir.join(BUNDLE_IDENTIFIER)); + } + + if let Ok(document_dir) = app_handle.path().document_dir() { + candidates.push(document_dir.join(APP_DIRECTORY_NAME)); + if cfg!(debug_assertions) { + candidates.push(document_dir.join(DEV_APP_DIRECTORY_NAME)); + } + } + + if let Ok(current_dir) = std::env::current_dir() { + candidates.push(current_dir.join("Streaming Data Loader Data")); + } + + if let Ok(home_dir) = std::env::var("HOME").or_else(|_| std::env::var("USERPROFILE")) { + let home_dir = PathBuf::from(home_dir); + candidates.push(home_dir.join(APP_DIRECTORY_NAME)); + if cfg!(debug_assertions) { + candidates.push(home_dir.join(DEV_APP_DIRECTORY_NAME)); + } + } + + candidates +} + +#[cfg_attr(not(test), allow(dead_code))] +fn has_runtime_state(path: &Path) -> bool { + path.join("config.json").exists() || path.join("workspaces").is_dir() +} + +#[cfg_attr(not(test), allow(dead_code))] +fn copy_dir_contents(source_dir: &Path, target_dir: &Path) -> Result<(), String> { + fs::create_dir_all(target_dir).map_err(|err| err.to_string())?; + + for entry in fs::read_dir(source_dir).map_err(|err| err.to_string())? { + let entry = entry.map_err(|err| err.to_string())?; + let source_path = entry.path(); + let target_path = target_dir.join(entry.file_name()); + + if source_path.is_dir() { + copy_dir_contents(&source_path, &target_path)?; + } else if source_path.is_file() && !target_path.exists() { + fs::copy(&source_path, &target_path).map_err(|err| err.to_string())?; + } + } + + Ok(()) +} + +#[cfg_attr(not(test), allow(dead_code))] +fn move_or_copy_dir_contents(source_dir: &Path, target_dir: &Path) -> Result<(), String> { + if let Some(parent) = target_dir.parent() { + fs::create_dir_all(parent).map_err(|err| err.to_string())?; + } + + if !target_dir.exists() && fs::rename(source_dir, target_dir).is_ok() { + return Ok(()); + } + + copy_dir_contents(source_dir, target_dir) +} + +fn connection_status(server: &ServerConfig) -> ConnectionStatus { + if !server.is_configured() { + return ConnectionStatus { + state: ConnectionState::NotConfigured, + message: "HydroServer not configured".to_string(), + }; + } + + ConnectionStatus { + state: ConnectionState::Configured, + message: "HydroServer configured".to_string(), + } +} + +fn derive_job_status(job: &JobConfig, cursor: &JobCursor) -> (JobStatus, String) { + if cursor.is_running { + return (JobStatus::Running, "Running now".to_string()); + } + if !job.enabled { + return (JobStatus::Disabled, "Paused".to_string()); + } + if let Some(last_error) = &cursor.last_error { + return (JobStatus::Error, last_error.clone()); + } + if cursor.last_pushed_timestamp.is_none() { + return (JobStatus::Pending, "Watching for new rows".to_string()); + } + (JobStatus::Healthy, "Watching for new rows".to_string()) +} + +#[cfg(test)] +#[path = "tests/runtime.rs"] +mod tests; diff --git a/src/scheduler.py b/src/scheduler.py deleted file mode 100644 index fb049a6..0000000 --- a/src/scheduler.py +++ /dev/null @@ -1,216 +0,0 @@ -import traceback -import logging -from apscheduler.schedulers.background import BackgroundScheduler -from apscheduler.triggers.cron import CronTrigger -from apscheduler.triggers.interval import IntervalTrigger -from pytz import utc -from datetime import datetime -from PySide6.QtCore import QObject - - -logger = logging.getLogger("scheduler") - - -class DataLoaderScheduler(QObject): - - def __init__(self, hs_api, data_loader=None): - super().__init__() - - self.data_loader = data_loader - - self.scheduler = BackgroundScheduler(timezone=utc) - self.scheduler.add_job( - lambda: self.check_tasks(), - id="sdl-scheduler", - trigger="interval", - seconds=60, - next_run_time=datetime.utcnow() - ) - - logging.getLogger("apscheduler.executors.default").setLevel(logging.WARNING) - - self.hs_api = hs_api - self.timeout = 60 - - self.scheduler.start() - self.job = None - - def terminate(self): - self.scheduler.shutdown(wait=True) - - def pause(self): - if self.scheduler.running: - self.scheduler.pause() - - def resume(self): - self.scheduler.resume() - - def check_tasks(self): - """ - The check_tasks function is used to check the status of all tasks associated with a given SDL - instance. It will iterate through each task and call the update_task function for each one. - - :param self - :return: The tasks - """ - - try: - success, message = self.check_data_loader() - if success is False: - logging.error(message) - except Exception as e: - logging.error(traceback.format_exc()) - logging.error(e) - - try: - tasks = self.hs_api.tasks.list(orchestration_system=self.data_loader, fetch_all=True) - for task in tasks.items: - self.update_task(task) - except Exception as e: - logging.error(traceback.format_exc()) - logging.error(e) - - def check_data_loader(self): - """ - The check_data_loader function checks to see if the data loader name provided by the user exists. If it does - not, it creates a new data loader with that name. If it does exist, then it sets self.data_loader to that - existing data loader. - - :param self - :return: A tuple containing a boolean and a string - """ - - try: - data_loader = self.hs_api.orchestrationsystems.get(uid=self.data_loader.uid) - except (Exception,) as e: - return False, str(e) - - self.data_loader = data_loader - - return True, '' - - def update_task(self, task): - """ - The update_task function is called when a user updates the schedule of an existing task. It checks to see - if the task has a scheduled job, and if it does not, it adds one. If there is already - a scheduled job for that task, then update_task calls update_schedule to change the schedule. - - :param self - :param task: Identify the task that is being updated - :return: bool - """ - - scheduled_jobs = { - scheduled_job.id: scheduled_job - for scheduled_job in self.scheduler.get_jobs() - if scheduled_job.id != 'hydroloader-scheduler' - } - - if str(task.uid) not in scheduled_jobs.keys(): - self.add_schedule(task) - else: - self.update_schedule(task, scheduled_jobs[str(task.uid)]) - - return True - - def add_schedule(self, task): - """ - The add_schedule function is used to add a schedule for the task. The function takes in a - TaskGetResponse object as an argument, which contains all the information needed to create and run - scheduled data loading tasks. - - :param self - :param task: TaskGetResponse: Pass the task object to the function - :return: None - """ - - schedule_range = {} - if task.start_time: - schedule_range['start_time'] = task.start_time - - if task.interval and task.interval_period: - self.scheduler.add_job( - lambda: self.load_data(task=task), - IntervalTrigger( - start_date=task.start_time, - **{task.interval_period: task.interval} - ), - id=str(task.uid), - **schedule_range - ) - elif task.crontab: - self.scheduler.add_job( - lambda: self.load_data(task=task), - CronTrigger.from_crontab(task.crontab, timezone='UTC'), - id=str(task.uid), - **schedule_range - ) - - def update_schedule(self, task, scheduled_job): - """ - The update_schedule function is called when a task is updated. - It checks if the crontab or interval has changed, and if so, removes the old job from the scheduler and adds a - new one. If neither have changed, it does nothing. - - :param self - :param task: TaskGetResponse: Get the task information - :param scheduled_job: Get the job id and trigger - :return: None - """ - - if ( - (isinstance(scheduled_job.trigger, CronTrigger) and not task.crontab) or - (isinstance(scheduled_job.trigger, IntervalTrigger) and not task.interval) - ): - self.scheduler.remove_job(scheduled_job.id) - - if isinstance(scheduled_job.trigger, CronTrigger): - task_trigger = CronTrigger.from_crontab(task.crontab, timezone='UTC') - task_trigger_value = str(task_trigger) - scheduled_job_trigger_value = str(scheduled_job.trigger) - elif isinstance(scheduled_job.trigger, IntervalTrigger): - task_trigger = IntervalTrigger( - start_date=task.start_time, - **{task.interval_period: task.interval} - ) - task_trigger_value = task_trigger.interval_length - scheduled_job_trigger_value = scheduled_job.trigger.interval_length - else: - task_trigger_value = None - scheduled_job_trigger_value = None - - if task_trigger_value != scheduled_job_trigger_value: - self.scheduler.remove_job(scheduled_job.id) - - if not self.scheduler.get_job(scheduled_job.id) and \ - (task.crontab or task.interval): - self.add_schedule(task) - - @staticmethod - def load_data(task): - """ - The load_data function is used to load data as defined in a task into - HydroServer. The function takes in a single argument, which is an object - representing the task that you want to load. This function will then - call on the service's 'load_data' method, passing in the ID of the task as - an argument. - - :param task: Identify the task that you want to load - :return: None - """ - - task.refresh() - - if task.paused is True: - logging.info(f'Task {task.name} is paused: Skipping') - return - - logging.info(f'Loading data for task {task.name}') - - try: - task.run_local() - logging.info(f'Finished loading data for task {task.name}') - - except Exception as e: - logging.error(traceback.format_exc()) - logging.error(e) diff --git a/src/service_manager.rs b/src/service_manager.rs new file mode 100644 index 0000000..648adf7 --- /dev/null +++ b/src/service_manager.rs @@ -0,0 +1,1304 @@ +use std::{ + fs, + path::PathBuf, + process::Command, + time::{SystemTime, UNIX_EPOCH}, +}; + +use tauri::AppHandle; + +use crate::models::ServiceStatusResponse; +#[cfg(windows)] +use crate::service_paths::SERVICE_CONFIG_DIR_FLAG; + +#[cfg(target_os = "macos")] +use crate::service_paths::active_app_directory_name; + +#[cfg(any(target_os = "macos", target_os = "linux"))] +use std::path::Path; + +#[cfg(target_os = "macos")] +use crate::service_paths::default_shared_service_config_dir; + +#[cfg(windows)] +use std::{ + ffi::{OsStr, OsString}, + path::Path, + time::{Duration, Instant}, +}; + +#[cfg(windows)] +use windows_service::{ + service::{ + ServiceAccess, ServiceErrorControl, ServiceInfo, ServiceStartType, ServiceState, + ServiceType, + }, + service_manager::{ServiceManager, ServiceManagerAccess}, + Error as WindowsServiceError, +}; + +#[cfg(target_os = "linux")] +use std::ffi::{OsStr, OsString}; + +#[cfg(target_os = "macos")] +const SERVICE_LABEL: &str = "com.hydroserver.sdl"; +#[cfg(target_os = "macos")] +const SERVICE_PLIST_PATH: &str = "/Library/LaunchDaemons/com.hydroserver.sdl.plist"; + +#[cfg(target_os = "linux")] +const LINUX_SERVICE_NAME: &str = "streaming-data-loader.service"; +#[cfg(target_os = "linux")] +const LINUX_SERVICE_DISPLAY_NAME: &str = "Streaming Data Loader"; +#[cfg(target_os = "linux")] +const LINUX_SERVICE_PATH: &str = "/etc/systemd/system/streaming-data-loader.service"; +#[cfg(target_os = "linux")] +const LINUX_SERVICE_ACTION_FLAG: &str = "--linux-service-action"; +#[cfg(target_os = "linux")] +const LINUX_SERVICE_RESULT_FLAG: &str = "--linux-service-result-file"; +#[cfg(target_os = "linux")] +const LINUX_SERVICE_USER_FLAG: &str = "--linux-service-user"; +#[cfg(target_os = "linux")] +const LINUX_SERVICE_HOME_FLAG: &str = "--linux-service-home"; +#[cfg(target_os = "linux")] +const LINUX_SERVICE_CONFIG_FLAG: &str = "--linux-service-config-dir"; +#[cfg(target_os = "linux")] +const LINUX_SERVICE_EXEC_FLAG: &str = "--linux-service-exec-path"; + +#[cfg(windows)] +pub(crate) const WINDOWS_SERVICE_NAME: &str = "StreamingDataLoader"; +#[cfg(windows)] +const WINDOWS_SERVICE_DISPLAY_NAME: &str = "Streaming Data Loader"; +#[cfg(windows)] +const WINDOWS_SERVICE_DESCRIPTION: &str = + "Background CSV watcher and uploader for Streaming Data Loader."; +#[cfg(windows)] +const WINDOWS_SERVICE_ACTION_FLAG: &str = "--windows-service-action"; +#[cfg(windows)] +const WINDOWS_SERVICE_RESULT_FLAG: &str = "--windows-service-result-file"; +#[cfg(windows)] +const WINDOWS_SERVICE_WAIT_TIMEOUT: Duration = Duration::from_secs(30); +#[cfg(windows)] +const WINDOWS_STATUS_POLL_INTERVAL: Duration = Duration::from_millis(500); +#[cfg(windows)] +const WINDOWS_DAEMON_PID_FILENAME: &str = "daemon.pid"; +#[cfg(windows)] +const ERROR_SERVICE_DOES_NOT_EXIST: i32 = 1060; +#[cfg(windows)] +const ERROR_SERVICE_ALREADY_RUNNING: i32 = 1056; +#[cfg(windows)] +const ERROR_SERVICE_NOT_ACTIVE: i32 = 1062; +#[cfg(windows)] +const ERROR_SERVICE_EXISTS: i32 = 1073; +#[cfg(windows)] +const ERROR_SERVICE_MARKED_FOR_DELETE: i32 = 1072; + +pub fn get_service_status(app_handle: &AppHandle) -> Result { + #[cfg(target_os = "macos")] + { + return get_macos_service_status(app_handle); + } + + #[cfg(target_os = "linux")] + { + let _ = app_handle; + return get_linux_service_status(); + } + + #[cfg(windows)] + { + let _ = app_handle; + return get_windows_service_status(); + } + + #[cfg(not(any(target_os = "macos", target_os = "linux", windows)))] + { + let _ = app_handle; + Ok(unsupported_service_status( + "Background service management is only available on macOS, Windows, and Linux systemd hosts.", + )) + } +} + +pub fn install_service(app_handle: &AppHandle) -> Result { + #[cfg(target_os = "macos")] + { + return install_macos_service(app_handle); + } + + #[cfg(target_os = "linux")] + { + run_linux_elevated_action(app_handle, "install")?; + return get_linux_service_status(); + } + + #[cfg(windows)] + { + run_windows_elevated_action(app_handle, "install")?; + return get_windows_service_status(); + } + + #[cfg(not(any(target_os = "macos", target_os = "linux", windows)))] + { + let _ = app_handle; + Err("Background service management isn't supported on this OS.".to_string()) + } +} + +pub fn restart_service(app_handle: &AppHandle) -> Result { + #[cfg(target_os = "macos")] + { + return restart_macos_service(app_handle); + } + + #[cfg(target_os = "linux")] + { + run_linux_elevated_action(app_handle, "restart")?; + return get_linux_service_status(); + } + + #[cfg(windows)] + { + run_windows_elevated_action(app_handle, "restart")?; + return get_windows_service_status(); + } + + #[cfg(not(any(target_os = "macos", target_os = "linux", windows)))] + { + let _ = app_handle; + Err("Background service management isn't supported on this OS.".to_string()) + } +} + +pub fn uninstall_service(app_handle: &AppHandle) -> Result { + #[cfg(target_os = "macos")] + { + return uninstall_macos_service(app_handle); + } + + #[cfg(target_os = "linux")] + { + run_linux_elevated_action(app_handle, "uninstall")?; + return get_linux_service_status(); + } + + #[cfg(windows)] + { + run_windows_elevated_action(app_handle, "uninstall")?; + return get_windows_service_status(); + } + + #[cfg(not(any(target_os = "macos", target_os = "linux", windows)))] + { + let _ = app_handle; + Err("Background service management isn't supported on this OS.".to_string()) + } +} + +pub fn maybe_handle_service_management_cli() -> Option { + #[cfg(target_os = "linux")] + { + return maybe_handle_linux_management_cli(); + } + + #[cfg(windows)] + { + return maybe_handle_windows_management_cli(); + } + + #[cfg(not(any(target_os = "linux", windows)))] + { + None + } +} + +#[allow(dead_code)] +fn unsupported_service_status(message: &str) -> ServiceStatusResponse { + ServiceStatusResponse { + supported: false, + installed: false, + running: false, + label: String::new(), + plist_path: String::new(), + executable_path: String::new(), + status_message: message.to_string(), + } +} + +#[cfg(target_os = "macos")] +fn get_macos_service_status(app_handle: &AppHandle) -> Result { + let executable_path = service_executable_path(app_handle)?; + let plist_path = PathBuf::from(SERVICE_PLIST_PATH); + let installed = plist_path.exists(); + let launchctl_output = launchctl_print_output(); + let launchctl_running = launchctl_output + .as_deref() + .map(|output| output.contains("state = running") || output.contains("pid =")) + .unwrap_or(false); + let process_running = daemon_process_running(); + let running = installed && (launchctl_running || process_running); + + let status_message = match (installed, running) { + (false, _) => String::new(), + (true, true) => { + "The background service is installed and running. It will persist app closure and user logout." + .to_string() + } + (true, false) => { + "The background service is installed but not currently running. Restart it to resume background loading." + .to_string() + } + }; + + Ok(ServiceStatusResponse { + supported: true, + installed, + running, + label: SERVICE_LABEL.to_string(), + plist_path: plist_path.to_string_lossy().into_owned(), + executable_path: executable_path.to_string_lossy().into_owned(), + status_message, + }) +} + +#[cfg(target_os = "macos")] +fn install_macos_service(app_handle: &AppHandle) -> Result { + let plist_contents = render_macos_plist(app_handle)?; + let temp_path = write_temp_script( + "install", + &format!( + "set -e\nmkdir -p {shared_dir} {logs_dir}\ncat > {temp_plist} <<'PLIST'\n{plist}\nPLIST\ncp {temp_plist} {system_plist}\nchmod 644 {system_plist}\nchown root:wheel {system_plist}\n/bin/launchctl bootout system {system_plist} >/dev/null 2>&1 || true\n/bin/launchctl bootstrap system {system_plist}\n/bin/launchctl kickstart -k system/{label} >/dev/null 2>&1 || true\nrm -f {temp_plist}\n", + shared_dir = shell_quote(default_shared_service_config_dir()?.to_string_lossy().as_ref()), + logs_dir = shell_quote( + default_shared_service_config_dir()? + .join("logs") + .to_string_lossy() + .as_ref() + ), + temp_plist = shell_quote(temp_plist_path().to_string_lossy().as_ref()), + plist = plist_contents, + system_plist = shell_quote(SERVICE_PLIST_PATH), + label = SERVICE_LABEL, + ), + )?; + + let result = run_macos_elevated_script(&temp_path); + let _ = fs::remove_file(&temp_path); + result?; + get_macos_service_status(app_handle) +} + +#[cfg(target_os = "macos")] +fn restart_macos_service(app_handle: &AppHandle) -> Result { + if !Path::new(SERVICE_PLIST_PATH).exists() { + return Err("The background service is not installed.".to_string()); + } + + let temp_path = write_temp_script( + "restart", + &format!( + "set -e\n/bin/launchctl bootout system {system_plist} >/dev/null 2>&1 || true\n/bin/launchctl bootstrap system {system_plist}\n/bin/launchctl kickstart -k system/{label}\n", + system_plist = shell_quote(SERVICE_PLIST_PATH), + label = SERVICE_LABEL, + ), + )?; + + let result = run_macos_elevated_script(&temp_path); + let _ = fs::remove_file(&temp_path); + result?; + get_macos_service_status(app_handle) +} + +#[cfg(target_os = "macos")] +fn uninstall_macos_service(app_handle: &AppHandle) -> Result { + let temp_path = write_temp_script( + "uninstall", + &format!( + "set -e\n/bin/launchctl bootout system {system_plist} >/dev/null 2>&1 || true\nrm -f {system_plist}\n", + system_plist = shell_quote(SERVICE_PLIST_PATH), + ), + )?; + + let result = run_macos_elevated_script(&temp_path); + let _ = fs::remove_file(&temp_path); + result?; + get_macos_service_status(app_handle) +} + +#[cfg(target_os = "macos")] +fn render_macos_plist(app_handle: &AppHandle) -> Result { + let executable_path = service_executable_path(app_handle)?; + let shared_dir = default_shared_service_config_dir()?; + let logs_dir = shared_dir.join("logs"); + + Ok(format!( + r#" + + + + Label + {label} + + ProgramArguments + + {program} + --service + + + RunAtLoad + + + KeepAlive + + + WorkingDirectory + {working_dir} + + StandardOutPath + {stdout_path} + + StandardErrorPath + {stderr_path} + + +"#, + label = SERVICE_LABEL, + program = xml_escape(&executable_path.to_string_lossy()), + working_dir = xml_escape(&shared_dir.to_string_lossy()), + stdout_path = xml_escape(&logs_dir.join("daemon.stdout.log").to_string_lossy()), + stderr_path = xml_escape(&logs_dir.join("daemon.stderr.log").to_string_lossy()), + )) +} + +#[cfg(target_os = "macos")] +fn launchctl_print_output() -> Option { + let output = Command::new("/bin/launchctl") + .arg("print") + .arg(format!("system/{SERVICE_LABEL}")) + .output() + .ok()?; + + if !output.status.success() { + return None; + } + + Some(String::from_utf8_lossy(&output.stdout).into_owned()) +} + +#[cfg(target_os = "macos")] +fn daemon_process_running() -> bool { + let Ok(output) = Command::new("/usr/bin/pgrep") + .arg("-af") + .arg("streaming-data-loader --service") + .output() + else { + return false; + }; + + output.status.success() && !String::from_utf8_lossy(&output.stdout).trim().is_empty() +} + +#[cfg(target_os = "macos")] +fn run_macos_elevated_script(script_path: &Path) -> Result<(), String> { + let command = format!("/bin/sh {}", script_path.display()); + let output = Command::new("/usr/bin/osascript") + .arg("-e") + .arg(format!( + r#"do shell script "{}" with administrator privileges"#, + applescript_escape(&command) + )) + .output() + .map_err(|err| err.to_string())?; + + if output.status.success() { + return Ok(()); + } + + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string(); + let message = stderr + .strip_prefix("execution error: ") + .unwrap_or(&stderr) + .trim() + .to_string(); + + if !message.is_empty() { + return Err(message); + } + if !stdout.is_empty() { + return Err(stdout); + } + + Err("The background service command did not complete.".to_string()) +} + +#[cfg(target_os = "linux")] +fn get_linux_service_status() -> Result { + if !linux_systemd_supported()? { + return Ok(unsupported_service_status( + "Background service management requires a Linux systemd host with systemctl available.", + )); + } + + let properties = linux_query_service_properties()?; + let load_state = properties + .get("LoadState") + .map(String::as_str) + .unwrap_or("not-found"); + let active_state = properties + .get("ActiveState") + .map(String::as_str) + .unwrap_or("inactive"); + let unit_file_state = properties + .get("UnitFileState") + .map(String::as_str) + .unwrap_or("bad"); + let fragment_path = properties + .get("FragmentPath") + .cloned() + .unwrap_or_else(|| LINUX_SERVICE_PATH.to_string()); + + let installed = Path::new(LINUX_SERVICE_PATH).exists() + || load_state != "not-found" + || matches!( + unit_file_state, + "enabled" + | "enabled-runtime" + | "disabled" + | "static" + | "indirect" + | "linked" + | "linked-runtime" + | "alias" + | "masked" + ); + let running = matches!(active_state, "active" | "activating" | "reloading"); + + let status_message = match (installed, running) { + (false, _) => String::new(), + (true, true) => { + "The background service is installed and running. It will persist app closure and user logout." + .to_string() + } + (true, false) => { + "The background service is installed but not currently running. Restart it to resume background loading." + .to_string() + } + }; + + Ok(ServiceStatusResponse { + supported: true, + installed, + running, + label: LINUX_SERVICE_DISPLAY_NAME.to_string(), + plist_path: fragment_path, + executable_path: String::new(), + status_message, + }) +} + +#[cfg(target_os = "linux")] +fn linux_systemd_supported() -> Result { + let output = match Command::new("systemctl") + .args(["show", "--property=Version", "--value"]) + .output() + { + Ok(output) => output, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(false), + Err(err) => return Err(err.to_string()), + }; + + if output.status.success() { + return Ok(true); + } + + let stderr = String::from_utf8_lossy(&output.stderr); + if stderr.contains("System has not been booted with systemd") { + return Ok(false); + } + + Ok(false) +} + +#[cfg(target_os = "linux")] +fn linux_query_service_properties() -> Result, String> { + let output = Command::new("systemctl") + .args([ + "show", + LINUX_SERVICE_NAME, + "--property=LoadState,ActiveState,UnitFileState,FragmentPath", + "--no-page", + ]) + .output() + .map_err(|err| err.to_string())?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + if stderr.contains("System has not been booted with systemd") { + return Err( + "Background service management requires systemd and is unavailable on this Linux host." + .to_string(), + ); + } + if !stderr.is_empty() { + return Err(stderr); + } + } + + Ok(parse_systemctl_properties(&String::from_utf8_lossy( + &output.stdout, + ))) +} + +#[cfg(target_os = "linux")] +fn maybe_handle_linux_management_cli() -> Option { + let mut args = std::env::args_os().skip(1); + let mut action: Option = None; + let mut result_file: Option = None; + let mut user: Option = None; + let mut home: Option = None; + let mut config_dir: Option = None; + let mut exec_path: Option = None; + + while let Some(arg) = args.next() { + if arg == OsStr::new(LINUX_SERVICE_ACTION_FLAG) { + action = args.next(); + } else if arg == OsStr::new(LINUX_SERVICE_RESULT_FLAG) { + result_file = args.next().map(PathBuf::from); + } else if arg == OsStr::new(LINUX_SERVICE_USER_FLAG) { + user = args.next(); + } else if arg == OsStr::new(LINUX_SERVICE_HOME_FLAG) { + home = args.next(); + } else if arg == OsStr::new(LINUX_SERVICE_CONFIG_FLAG) { + config_dir = args.next().map(PathBuf::from); + } else if arg == OsStr::new(LINUX_SERVICE_EXEC_FLAG) { + exec_path = args.next().map(PathBuf::from); + } + } + + let action = action?; + let result = run_linux_management_action( + action.as_os_str(), + LinuxServiceContext { + user, + home, + config_dir, + exec_path, + }, + ); + + if let Some(path) = result_file { + match &result { + Ok(()) => { + let _ = fs::write(&path, ""); + } + Err(message) => { + let _ = fs::write(&path, message); + } + } + } + + Some(if result.is_ok() { 0 } else { 1 }) +} + +#[cfg(target_os = "linux")] +struct LinuxServiceContext { + user: Option, + home: Option, + config_dir: Option, + exec_path: Option, +} + +#[cfg(target_os = "linux")] +fn run_linux_elevated_action(app_handle: &AppHandle, action: &str) -> Result<(), String> { + let executable_path = service_executable_path(app_handle)?; + let result_path = temp_result_path("linux-service"); + let config_dir = crate::runtime::resolve_config_dir(app_handle)?; + let user = std::env::var_os("USER") + .or_else(|| std::env::var_os("LOGNAME")) + .ok_or_else(|| { + "Couldn't determine the Linux account for the background service.".to_string() + })?; + let home = std::env::var_os("HOME") + .ok_or_else(|| "Couldn't determine the current user's home directory.".to_string())?; + + let status = Command::new("pkexec") + .arg(&executable_path) + .arg(LINUX_SERVICE_ACTION_FLAG) + .arg(action) + .arg(LINUX_SERVICE_RESULT_FLAG) + .arg(&result_path) + .arg(LINUX_SERVICE_USER_FLAG) + .arg(&user) + .arg(LINUX_SERVICE_HOME_FLAG) + .arg(&home) + .arg(LINUX_SERVICE_CONFIG_FLAG) + .arg(&config_dir) + .arg(LINUX_SERVICE_EXEC_FLAG) + .arg(&executable_path) + .status() + .map_err(|err| { + if err.kind() == std::io::ErrorKind::NotFound { + "Couldn't find `pkexec`. Install polkit support to manage the Linux background service." + .to_string() + } else { + format!("Couldn't launch the Linux elevation prompt: {err}") + } + })?; + + let message = fs::read_to_string(&result_path) + .ok() + .map(|contents| contents.trim().to_string()) + .filter(|contents| !contents.is_empty()); + let _ = fs::remove_file(&result_path); + + if status.success() { + return Ok(()); + } + + Err(message.unwrap_or_else(|| { + "The Linux background service action failed or was canceled.".to_string() + })) +} + +#[cfg(target_os = "linux")] +fn run_linux_management_action(action: &OsStr, context: LinuxServiceContext) -> Result<(), String> { + match action.to_string_lossy().as_ref() { + "install" => install_linux_service(context), + "restart" => restart_linux_service(), + "uninstall" => uninstall_linux_service(), + _ => Err("Unknown Linux service action.".to_string()), + } +} + +#[cfg(target_os = "linux")] +fn install_linux_service(context: LinuxServiceContext) -> Result<(), String> { + let user = context + .user + .ok_or_else(|| "Missing Linux service user.".to_string())?; + let home = context + .home + .ok_or_else(|| "Missing Linux service home directory.".to_string())?; + let config_dir = context + .config_dir + .ok_or_else(|| "Missing Linux service config directory.".to_string())?; + let exec_path = context + .exec_path + .ok_or_else(|| "Missing Linux service executable path.".to_string())?; + + fs::create_dir_all(&config_dir).map_err(|err| err.to_string())?; + let unit_contents = render_linux_unit( + user.to_string_lossy().as_ref(), + home.to_string_lossy().as_ref(), + &config_dir, + &exec_path, + ); + fs::write(LINUX_SERVICE_PATH, unit_contents).map_err(|err| err.to_string())?; + + run_systemctl(&["daemon-reload"])?; + run_systemctl(&["enable", "--now", LINUX_SERVICE_NAME])?; + Ok(()) +} + +#[cfg(target_os = "linux")] +fn restart_linux_service() -> Result<(), String> { + if !Path::new(LINUX_SERVICE_PATH).exists() { + return Err("The background service is not installed.".to_string()); + } + + run_systemctl(&["restart", LINUX_SERVICE_NAME]) +} + +#[cfg(target_os = "linux")] +fn uninstall_linux_service() -> Result<(), String> { + if Path::new(LINUX_SERVICE_PATH).exists() { + let _ = run_systemctl(&["disable", "--now", LINUX_SERVICE_NAME]); + let _ = run_systemctl(&["reset-failed", LINUX_SERVICE_NAME]); + fs::remove_file(LINUX_SERVICE_PATH).map_err(|err| err.to_string())?; + run_systemctl(&["daemon-reload"])?; + } + + Ok(()) +} + +#[cfg(target_os = "linux")] +fn render_linux_unit(user: &str, home: &str, config_dir: &Path, exec_path: &Path) -> String { + format!( + "[Unit]\nDescription={display_name}\nAfter=network-online.target\nWants=network-online.target\n\n[Service]\nType=simple\nUser={user}\nWorkingDirectory={working_dir}\nEnvironment=\"HOME={home}\"\nEnvironment=\"SDL_CONFIG_DIR={config_dir}\"\nExecStart=\"{exec_path}\" --service\nRestart=always\nRestartSec=2\n\n[Install]\nWantedBy=multi-user.target\n", + display_name = LINUX_SERVICE_DISPLAY_NAME, + user = systemd_escape(user), + working_dir = systemd_escape(&config_dir.to_string_lossy()), + home = systemd_escape(home), + config_dir = systemd_escape(&config_dir.to_string_lossy()), + exec_path = systemd_escape(&exec_path.to_string_lossy()), + ) +} + +#[cfg(target_os = "linux")] +fn run_systemctl(args: &[&str]) -> Result<(), String> { + let output = Command::new("systemctl") + .args(args) + .output() + .map_err(|err| err.to_string())?; + + if output.status.success() { + return Ok(()); + } + + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string(); + if !stderr.is_empty() { + return Err(stderr); + } + if !stdout.is_empty() { + return Err(stdout); + } + Err("The Linux system service command did not complete.".to_string()) +} + +#[cfg(target_os = "linux")] +fn parse_systemctl_properties(output: &str) -> std::collections::HashMap { + output + .lines() + .filter_map(|line| { + let (key, value) = line.split_once('=')?; + Some((key.to_string(), value.to_string())) + }) + .collect() +} + +#[cfg(windows)] +fn get_windows_service_status() -> Result { + let manager = ServiceManager::local_computer( + None::<&str>, + ServiceManagerAccess::CONNECT | ServiceManagerAccess::ENUMERATE_SERVICE, + ) + .map_err(format_windows_service_error)?; + + let service = match manager.open_service( + WINDOWS_SERVICE_NAME, + ServiceAccess::QUERY_STATUS | ServiceAccess::QUERY_CONFIG, + ) { + Ok(service) => service, + Err(error) if windows_service_unavailable(&error) => { + return Ok(ServiceStatusResponse { + supported: true, + installed: false, + running: false, + label: WINDOWS_SERVICE_DISPLAY_NAME.to_string(), + plist_path: String::new(), + executable_path: String::new(), + status_message: String::new(), + }); + } + Err(error) => return Err(format_windows_service_error(error)), + }; + + let status = service + .query_status() + .map_err(format_windows_service_error)?; + let config = service + .query_config() + .map_err(format_windows_service_error)?; + let running = windows_service_is_active(status.current_state); + + let status_message = match running { + true => { + "The background service is installed and running. It will persist app closure and user logout." + .to_string() + } + false => { + "The background service is installed but not currently running. Restart it to resume background loading." + .to_string() + } + }; + + Ok(ServiceStatusResponse { + supported: true, + installed: true, + running, + label: WINDOWS_SERVICE_DISPLAY_NAME.to_string(), + plist_path: String::new(), + executable_path: config.executable_path.to_string_lossy().into_owned(), + status_message, + }) +} + +#[cfg(windows)] +fn maybe_handle_windows_management_cli() -> Option { + let mut args = std::env::args_os().skip(1); + let mut action: Option = None; + let mut result_file: Option = None; + let mut config_dir: Option = None; + + while let Some(arg) = args.next() { + if arg == OsStr::new(WINDOWS_SERVICE_ACTION_FLAG) { + action = args.next(); + } else if let Some(value) = + inline_windows_path_flag_value(&arg, WINDOWS_SERVICE_RESULT_FLAG) + { + result_file = Some(value); + } else if arg == OsStr::new(WINDOWS_SERVICE_RESULT_FLAG) { + result_file = args.next().map(PathBuf::from); + } else if let Some(value) = inline_windows_path_flag_value(&arg, SERVICE_CONFIG_DIR_FLAG) { + config_dir = Some(value); + } else if arg == OsStr::new(SERVICE_CONFIG_DIR_FLAG) { + config_dir = args.next().map(PathBuf::from); + } + } + + let action = action?; + let result = run_windows_management_action(action.as_os_str(), config_dir); + + if let Some(path) = result_file { + match &result { + Ok(()) => { + let _ = fs::write(&path, ""); + } + Err(message) => { + let _ = fs::write(&path, message); + } + } + } + + Some(if result.is_ok() { 0 } else { 1 }) +} + +#[cfg(windows)] +fn run_windows_elevated_action(app_handle: &AppHandle, action: &str) -> Result<(), String> { + let executable_path = service_executable_path(app_handle)?; + let result_path = temp_result_path("windows-service"); + let config_dir = crate::runtime::resolve_config_dir(app_handle)?; + tracing::info!( + action, + executable_path = %executable_path.display(), + config_dir = %config_dir.display(), + "requesting elevated Windows background service action" + ); + let result_file_arg = windows_inline_path_flag_argument(WINDOWS_SERVICE_RESULT_FLAG, &result_path); + let config_dir_arg = windows_inline_path_flag_argument(SERVICE_CONFIG_DIR_FLAG, &config_dir); + let script = format!( + "$proc = Start-Process -FilePath '{}' -Verb RunAs -WindowStyle Hidden -Wait -PassThru -ArgumentList @('{}', '{}', '{}', '{}'); exit $proc.ExitCode", + powershell_quote(&executable_path.to_string_lossy()), + WINDOWS_SERVICE_ACTION_FLAG, + action, + powershell_quote(&result_file_arg), + powershell_quote(&config_dir_arg) + ); + + let status = Command::new("powershell") + .args([ + "-NoProfile", + "-ExecutionPolicy", + "Bypass", + "-Command", + &script, + ]) + .status() + .map_err(|err| format!("Couldn't launch the Windows elevation prompt: {err}"))?; + + let message = fs::read_to_string(&result_path) + .ok() + .map(|contents| contents.trim().to_string()) + .filter(|contents| !contents.is_empty()); + let _ = fs::remove_file(&result_path); + + if status.success() { + tracing::info!(action, "Windows background service action completed"); + return Ok(()); + } + + let message = message.unwrap_or_else(|| { + "The Windows background service action failed or was canceled.".to_string() + }); + tracing::error!(action, error = %message, "Windows background service action failed"); + Err(message) +} + +#[cfg(windows)] +fn run_windows_management_action( + action: &OsStr, + config_dir: Option, +) -> Result<(), String> { + tracing::info!( + action = %action.to_string_lossy(), + config_dir = %config_dir + .as_ref() + .map(|path| path.display().to_string()) + .unwrap_or_else(|| "".to_string()), + "handling Windows background service management action" + ); + match action.to_string_lossy().as_ref() { + "install" => install_windows_service(config_dir), + "restart" => restart_windows_service(config_dir), + "uninstall" => uninstall_windows_service(config_dir), + _ => Err("Unknown Windows service action.".to_string()), + } +} + +#[cfg(windows)] +fn install_windows_service(config_dir: Option) -> Result<(), String> { + let manager = ServiceManager::local_computer( + None::<&str>, + ServiceManagerAccess::CONNECT | ServiceManagerAccess::CREATE_SERVICE, + ) + .map_err(format_windows_service_error)?; + + if let Ok(_existing) = manager.open_service(WINDOWS_SERVICE_NAME, ServiceAccess::QUERY_STATUS) { + return Err("The background service is already installed.".to_string()); + } + + let config_dir = + config_dir.unwrap_or(crate::service_paths::resolve_shared_service_config_dir()?); + + stop_existing_windows_daemon(&config_dir)?; + + let executable_path = std::env::current_exe().map_err(|err| err.to_string())?; + tracing::info!( + executable_path = %executable_path.display(), + config_dir = %config_dir.display(), + "installing Windows background service" + ); + let service_info = ServiceInfo { + name: OsString::from(WINDOWS_SERVICE_NAME), + display_name: OsString::from(WINDOWS_SERVICE_DISPLAY_NAME), + service_type: ServiceType::OWN_PROCESS, + start_type: ServiceStartType::AutoStart, + error_control: ServiceErrorControl::Normal, + executable_path, + launch_arguments: vec![ + OsString::from("--service"), + windows_service_config_dir_launch_argument(&config_dir), + ], + dependencies: vec![], + account_name: None, + account_password: None, + }; + + let service = manager + .create_service( + &service_info, + ServiceAccess::QUERY_STATUS + | ServiceAccess::START + | ServiceAccess::STOP + | ServiceAccess::DELETE + | ServiceAccess::CHANGE_CONFIG, + ) + .map_err(format_windows_service_error)?; + + let _ = service.set_description(WINDOWS_SERVICE_DESCRIPTION); + let empty_args: [&OsStr; 0] = []; + service + .start(&empty_args) + .map_err(format_windows_service_error)?; + wait_for_windows_service_state(&service, ServiceState::Running) +} + +#[cfg(windows)] +fn stop_existing_windows_daemon(config_dir: &Path) -> Result<(), String> { + let pid_path = config_dir.join(WINDOWS_DAEMON_PID_FILENAME); + let endpoint_path = crate::service_paths::daemon_endpoint_path(config_dir); + + let Some(pid) = fs::read_to_string(&pid_path) + .ok() + .and_then(|contents| contents.trim().parse::().ok()) + else { + return Ok(()); + }; + + let stop_script = format!( + "$p = Get-Process -Id {pid} -ErrorAction SilentlyContinue; if ($p) {{ Stop-Process -Id {pid} -Force -ErrorAction SilentlyContinue }}" + ); + let _ = Command::new("powershell") + .args([ + "-NoProfile", + "-ExecutionPolicy", + "Bypass", + "-Command", + &stop_script, + ]) + .status(); + + std::thread::sleep(Duration::from_secs(1)); + + let _ = fs::remove_file(&endpoint_path); + let _ = fs::remove_file(&pid_path); + Ok(()) +} + +#[cfg(windows)] +fn restart_windows_service(config_dir: Option) -> Result<(), String> { + let manager = ServiceManager::local_computer(None::<&str>, ServiceManagerAccess::CONNECT) + .map_err(format_windows_service_error)?; + let service_access = ServiceAccess::QUERY_STATUS + | ServiceAccess::QUERY_CONFIG + | ServiceAccess::CHANGE_CONFIG + | ServiceAccess::START + | ServiceAccess::STOP; + let service = manager + .open_service(WINDOWS_SERVICE_NAME, service_access) + .map_err(format_windows_service_error)?; + + if let Some(config_dir) = config_dir { + tracing::info!(config_dir = %config_dir.display(), "syncing Windows service launch config before restart"); + sync_windows_service_launch_config(&service, &config_dir)?; + } + + tracing::info!("restarting Windows background service"); + stop_windows_service_if_needed(&service)?; + let empty_args: [&OsStr; 0] = []; + service + .start(&empty_args) + .map_err(format_windows_service_error)?; + wait_for_windows_service_state(&service, ServiceState::Running) +} + +#[cfg(windows)] +fn uninstall_windows_service(_config_dir: Option) -> Result<(), String> { + let manager = ServiceManager::local_computer(None::<&str>, ServiceManagerAccess::CONNECT) + .map_err(format_windows_service_error)?; + let service = manager + .open_service( + WINDOWS_SERVICE_NAME, + ServiceAccess::QUERY_STATUS | ServiceAccess::STOP | ServiceAccess::DELETE, + ) + .map_err(format_windows_service_error)?; + + tracing::info!("uninstalling Windows background service"); + stop_windows_service_if_needed(&service)?; + service.delete().map_err(format_windows_service_error) +} + +#[cfg(windows)] +fn sync_windows_service_launch_config( + service: &windows_service::service::Service, + config_dir: &Path, +) -> Result<(), String> { + let existing_config = service + .query_config() + .map_err(format_windows_service_error)?; + let desired_launch_arguments = vec![ + OsString::from("--service"), + windows_service_config_dir_launch_argument(config_dir), + ]; + + let desired_config = ServiceInfo { + name: OsString::from(WINDOWS_SERVICE_NAME), + display_name: existing_config.display_name, + service_type: existing_config.service_type, + start_type: existing_config.start_type, + error_control: existing_config.error_control, + executable_path: existing_config.executable_path, + launch_arguments: desired_launch_arguments, + dependencies: existing_config.dependencies, + account_name: existing_config.account_name, + account_password: None, + }; + + service + .change_config(&desired_config) + .map_err(format_windows_service_error) +} + +#[cfg(windows)] +fn stop_windows_service_if_needed( + service: &windows_service::service::Service, +) -> Result<(), String> { + let status = service + .query_status() + .map_err(format_windows_service_error)?; + match status.current_state { + ServiceState::Stopped => Ok(()), + ServiceState::StopPending => wait_for_windows_service_state(service, ServiceState::Stopped), + _ => { + service.stop().map_err(format_windows_service_error)?; + wait_for_windows_service_state(service, ServiceState::Stopped) + } + } +} + +#[cfg(windows)] +fn wait_for_windows_service_state( + service: &windows_service::service::Service, + desired_state: ServiceState, +) -> Result<(), String> { + let deadline = Instant::now() + WINDOWS_SERVICE_WAIT_TIMEOUT; + + loop { + let status = service + .query_status() + .map_err(format_windows_service_error)?; + if status.current_state == desired_state { + return Ok(()); + } + + if Instant::now() >= deadline { + return Err(match desired_state { + ServiceState::Running => { + "Timed out waiting for the Windows background service to start.".to_string() + } + ServiceState::Stopped => { + "Timed out waiting for the Windows background service to stop.".to_string() + } + _ => "Timed out waiting for the Windows background service.".to_string(), + }); + } + + std::thread::sleep(WINDOWS_STATUS_POLL_INTERVAL); + } +} + +#[cfg(windows)] +fn windows_service_is_active(state: ServiceState) -> bool { + matches!( + state, + ServiceState::Running | ServiceState::StartPending | ServiceState::ContinuePending + ) +} + +#[cfg(windows)] +fn windows_service_not_found(error: &WindowsServiceError) -> bool { + match error { + WindowsServiceError::Winapi(io_error) => { + io_error.raw_os_error() == Some(ERROR_SERVICE_DOES_NOT_EXIST) + } + _ => false, + } +} + +#[cfg(windows)] +fn windows_service_unavailable(error: &WindowsServiceError) -> bool { + windows_service_not_found(error) || windows_service_marked_for_delete(error) +} + +#[cfg(windows)] +fn windows_service_marked_for_delete(error: &WindowsServiceError) -> bool { + match error { + WindowsServiceError::Winapi(io_error) => { + io_error.raw_os_error() == Some(ERROR_SERVICE_MARKED_FOR_DELETE) + } + _ => false, + } +} + +#[cfg(windows)] +fn format_windows_service_error(error: WindowsServiceError) -> String { + match error { + WindowsServiceError::Winapi(io_error) => match io_error.raw_os_error() { + Some(ERROR_SERVICE_DOES_NOT_EXIST) => { + "The background service is not installed.".to_string() + } + Some(ERROR_SERVICE_MARKED_FOR_DELETE) => { + "The background service is being removed.".to_string() + } + Some(ERROR_SERVICE_ALREADY_RUNNING) => { + "The background service is already running.".to_string() + } + Some(ERROR_SERVICE_NOT_ACTIVE) => "The background service is not running.".to_string(), + Some(ERROR_SERVICE_EXISTS) => { + "The background service is already installed.".to_string() + } + Some(5) => "Administrator privileges are required to manage the background service." + .to_string(), + _ => io_error.to_string(), + }, + _ => error.to_string(), + } +} + +fn service_executable_path(_app_handle: &AppHandle) -> Result { + #[cfg(target_os = "linux")] + if let Some(appimage_path) = std::env::var_os("APPIMAGE") { + return Ok(PathBuf::from(appimage_path)); + } + + std::env::current_exe().map_err(|err| err.to_string()) +} + +#[cfg(target_os = "macos")] +fn write_temp_script(kind: &str, contents: &str) -> Result { + let path = std::env::temp_dir().join(format!( + "sdl-service-{kind}-{}.sh", + SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|duration| duration.as_nanos()) + .unwrap_or_default() + )); + fs::write(&path, contents).map_err(|err| err.to_string())?; + Ok(path) +} + +#[cfg(target_os = "macos")] +fn temp_plist_path() -> PathBuf { + std::env::temp_dir().join(format!( + "{}.plist", + active_app_directory_name().replace(' ', "-") + )) +} + +#[cfg(any(target_os = "linux", windows))] +fn temp_result_path(kind: &str) -> PathBuf { + std::env::temp_dir().join(format!( + "sdl-{kind}-{}.txt", + SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|duration| duration.as_nanos()) + .unwrap_or_default() + )) +} + +#[cfg(target_os = "macos")] +fn shell_quote(value: &str) -> String { + format!("'{}'", value.replace('\'', "'\"'\"'")) +} + +#[cfg(target_os = "linux")] +fn systemd_escape(value: &str) -> String { + value + .replace('\\', "\\\\") + .replace('"', "\\\"") + .replace(' ', "\\x20") +} + +#[cfg(target_os = "macos")] +fn applescript_escape(value: &str) -> String { + value.replace('\\', "\\\\").replace('"', "\\\"") +} + +#[cfg(windows)] +fn powershell_quote(value: &str) -> String { + value.replace('\'', "''") +} + +#[cfg(windows)] +fn windows_service_config_dir_launch_argument(config_dir: &Path) -> OsString { + OsString::from(format!( + r#"{SERVICE_CONFIG_DIR_FLAG}="{}""#, + config_dir.to_string_lossy() + )) +} + +#[cfg(windows)] +fn windows_inline_path_flag_argument(flag: &str, path: &Path) -> String { + format!(r#"{flag}="{}""#, path.to_string_lossy()) +} + +#[cfg(windows)] +fn inline_windows_path_flag_value(arg: &OsString, flag: &str) -> Option { + let prefix = format!("{flag}="); + let value = arg.to_string_lossy(); + let raw_path = value.strip_prefix(&prefix)?; + let path = raw_path.trim_matches('"'); + if path.is_empty() { + return None; + } + + Some(PathBuf::from(path)) +} + +#[cfg(target_os = "macos")] +fn xml_escape(value: &str) -> String { + value + .replace('&', "&") + .replace('<', "<") + .replace('>', ">") +} diff --git a/src/service_paths.rs b/src/service_paths.rs new file mode 100644 index 0000000..c78604a --- /dev/null +++ b/src/service_paths.rs @@ -0,0 +1,143 @@ +use std::{ + ffi::OsString, + fs, + path::{Path, PathBuf}, +}; + +pub const APP_DIRECTORY_NAME: &str = "Streaming Data Loader"; +pub const DEV_APP_DIRECTORY_NAME: &str = "Streaming Data Loader Dev"; +#[cfg(windows)] +pub const WINDOWS_SHARED_APP_DIRECTORY_NAME: &str = "StreamingDataLoader"; +#[cfg(windows)] +pub const WINDOWS_SHARED_DEV_APP_DIRECTORY_NAME: &str = "StreamingDataLoaderDev"; +pub const SERVICE_CONFIG_DIR_FLAG: &str = "--service-config-dir"; + +const DAEMON_ENDPOINT_FILENAME: &str = "daemon.endpoint.json"; +const LOGS_DIRECTORY_NAME: &str = "logs"; + +pub fn active_app_directory_name() -> &'static str { + if cfg!(debug_assertions) { + DEV_APP_DIRECTORY_NAME + } else { + APP_DIRECTORY_NAME + } +} + +pub fn active_shared_service_directory_name() -> &'static str { + #[cfg(windows)] + { + return if cfg!(debug_assertions) { + WINDOWS_SHARED_DEV_APP_DIRECTORY_NAME + } else { + WINDOWS_SHARED_APP_DIRECTORY_NAME + }; + } + + #[cfg(not(windows))] + { + active_app_directory_name() + } +} + +pub fn resolve_shared_service_config_dir() -> Result { + if let Some(config_dir) = service_config_dir_override_from_args(std::env::args_os()) { + fs::create_dir_all(&config_dir).map_err(|err| err.to_string())?; + return Ok(config_dir); + } + + if let Ok(config_dir) = std::env::var("SDL_CONFIG_DIR") { + let candidate = PathBuf::from(config_dir); + fs::create_dir_all(&candidate).map_err(|err| err.to_string())?; + return Ok(candidate); + } + + default_shared_service_config_dir() +} + +pub fn default_shared_service_config_dir() -> Result { + #[cfg(target_os = "macos")] + { + let candidate = PathBuf::from("/Users/Shared").join(active_shared_service_directory_name()); + fs::create_dir_all(&candidate).map_err(|err| err.to_string())?; + return Ok(candidate); + } + + #[cfg(target_os = "windows")] + { + let candidate = windows_program_data_dir().join(active_shared_service_directory_name()); + fs::create_dir_all(&candidate).map_err(|err| err.to_string())?; + return Ok(candidate); + } + + #[cfg(not(any(target_os = "macos", target_os = "windows")))] + { + let home_dir = std::env::var("HOME") + .or_else(|_| std::env::var("USERPROFILE")) + .map(PathBuf::from) + .map_err(|_| "Couldn't resolve an application data directory.".to_string())?; + let candidate = home_dir.join(active_shared_service_directory_name()); + fs::create_dir_all(&candidate).map_err(|err| err.to_string())?; + Ok(candidate) + } +} + +pub fn resolve_shared_logs_dir() -> Result { + let config_dir = resolve_shared_service_config_dir()?; + let logs_dir = config_dir.join(LOGS_DIRECTORY_NAME); + fs::create_dir_all(&logs_dir).map_err(|err| err.to_string())?; + Ok(logs_dir) +} + +pub fn daemon_endpoint_path(config_dir: &Path) -> PathBuf { + config_dir.join(DAEMON_ENDPOINT_FILENAME) +} + +#[cfg(windows)] +pub fn legacy_shared_service_config_dirs() -> Vec { + let program_data = windows_program_data_dir(); + vec![ + program_data.join(APP_DIRECTORY_NAME), + program_data.join(DEV_APP_DIRECTORY_NAME), + ] +} + +pub(crate) fn service_config_dir_override_from_args(args: I) -> Option +where + I: IntoIterator, + T: Into, +{ + let mut args = args.into_iter().map(Into::into); + while let Some(arg) = args.next() { + if arg == OsString::from(SERVICE_CONFIG_DIR_FLAG) { + return args.next().map(PathBuf::from); + } + if let Some(inline_path) = inline_service_config_dir_override(&arg) { + return Some(inline_path); + } + } + + None +} + +fn inline_service_config_dir_override(arg: &OsString) -> Option { + let prefix = format!("{SERVICE_CONFIG_DIR_FLAG}="); + let value = arg.to_string_lossy(); + let raw_path = value.strip_prefix(&prefix)?; + let path = raw_path.trim_matches('"'); + if path.is_empty() { + return None; + } + + Some(PathBuf::from(path)) +} + +#[cfg(windows)] +fn windows_program_data_dir() -> PathBuf { + std::env::var("PROGRAMDATA") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from(r"C:\ProgramData")) +} + +#[cfg(test)] +#[path = "tests/service_paths.rs"] +mod tests; diff --git a/src/service_runtime.rs b/src/service_runtime.rs new file mode 100644 index 0000000..e35928a --- /dev/null +++ b/src/service_runtime.rs @@ -0,0 +1,275 @@ +use std::{ + fs::{File, OpenOptions}, + future::Future, + io::{Seek, SeekFrom, Write}, + path::Path, +}; + +use crate::{ + daemon_api::DaemonApiServer, daemon_state::DaemonState, + service_paths::resolve_shared_service_config_dir, +}; +use fs2::FileExt; + +#[cfg(windows)] +use std::{ffi::OsString, sync::mpsc, time::Duration}; + +#[cfg(windows)] +use windows_service::{ + define_windows_service, + service::{ + ServiceControl, ServiceControlAccept, ServiceExitCode, ServiceState, ServiceStatus, + ServiceType, + }, + service_control_handler::{self, ServiceControlHandlerResult}, + service_dispatcher, Error as WindowsServiceError, +}; + +const PID_LOCK_FILENAME: &str = "daemon.pid"; + +#[cfg(windows)] +const WINDOWS_SERVICE_DISPATCHER_CONNECT_ERROR: i32 = 1063; +#[cfg(windows)] +const WINDOWS_SERVICE_TYPE: ServiceType = ServiceType::OWN_PROCESS; + +#[cfg(windows)] +define_windows_service!(ffi_windows_service_main, windows_service_main); + +pub fn run_daemon() -> Result<(), String> { + crate::logging::init_daemon_logging(); + + #[cfg(windows)] + { + if let Some(result) = try_run_under_windows_service_manager() { + return result; + } + } + + run_console_daemon() +} + +type ReadyCallback = Box; + +fn run_console_daemon() -> Result<(), String> { + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .map_err(|err| err.to_string())?; + + runtime.block_on(run_daemon_until(wait_for_shutdown_signal(), None)) +} + +async fn run_daemon_until(shutdown: F, on_ready: Option) -> Result<(), String> +where + F: Future>, +{ + let config_dir = resolve_shared_service_config_dir()?; + + let _pid_lock = acquire_daemon_pid_lock(&config_dir)?; + + tracing::info!(config_dir = %config_dir.display(), "starting SDL daemon"); + + let daemon = DaemonState::new(config_dir.clone()).await?; + daemon.clear_all_running_jobs()?; + daemon.publish_status()?; + let status_task = daemon.start_status_monitor(); + let api_server = DaemonApiServer::start(daemon.clone(), config_dir).await?; + + if let Some(on_ready) = on_ready { + on_ready(); + } + + let shutdown_result = shutdown.await; + + status_task.abort(); + let _ = status_task.await; + api_server.shutdown().await; + daemon.shutdown().await; + daemon.clear_all_running_jobs()?; + + tracing::info!("SDL daemon stopped"); + shutdown_result +} + +async fn wait_for_shutdown_signal() -> Result<(), String> { + #[cfg(unix)] + { + use tokio::signal::unix::{signal, SignalKind}; + + let (mut sigterm, mut sigint) = match ( + signal(SignalKind::terminate()), + signal(SignalKind::interrupt()), + ) { + (Ok(term), Ok(int)) => (term, int), + (Err(error), _) | (_, Err(error)) => { + return Err(format!("failed to install OS signal handlers: {error}")); + } + }; + + tokio::select! { + _ = sigterm.recv() => {} + _ = sigint.recv() => {} + } + + Ok(()) + } + + #[cfg(windows)] + { + tokio::signal::ctrl_c() + .await + .map_err(|error| format!("failed to install Ctrl-C handler: {error}")) + } + + #[cfg(not(any(unix, windows)))] + { + tokio::signal::ctrl_c() + .await + .map_err(|error| format!("failed to install Ctrl-C handler: {error}")) + } +} + +fn acquire_daemon_pid_lock(config_dir: &Path) -> Result { + let pid_path = config_dir.join(PID_LOCK_FILENAME); + let file = OpenOptions::new() + .create(true) + .read(true) + .write(true) + .truncate(false) + .open(&pid_path) + .map_err(|err| { + format!( + "Couldn't open daemon pid file at {}: {err}", + pid_path.display() + ) + })?; + + FileExt::try_lock_exclusive(&file).map_err(|_| { + format!( + "Another streaming-data-loader daemon is already running (lock held at {}). \ + If this is stale, stop the service and delete the file before restarting.", + pid_path.display() + ) + })?; + + // Overwrite the file contents with the current PID. Best-effort — the lock + // itself is what enforces single-instance; the PID is informational. + let _ = file.set_len(0); + let _ = (&file).seek(SeekFrom::Start(0)); + let _ = writeln!(&file, "{}", std::process::id()); + + Ok(file) +} + +#[cfg(windows)] +fn try_run_under_windows_service_manager() -> Option> { + match service_dispatcher::start( + crate::service_manager::WINDOWS_SERVICE_NAME, + ffi_windows_service_main, + ) { + Ok(()) => Some(Ok(())), + Err(WindowsServiceError::Winapi(error)) + if error.raw_os_error() == Some(WINDOWS_SERVICE_DISPATCHER_CONNECT_ERROR) => + { + None + } + Err(error) => Some(Err(format!( + "Couldn't attach to the Windows Service Control Manager: {error}" + ))), + } +} + +#[cfg(windows)] +fn windows_service_main(_arguments: Vec) { + if let Err(error) = run_windows_service() { + tracing::error!(error = %error, "Windows service stopped with an error"); + } +} + +#[cfg(windows)] +fn run_windows_service() -> Result<(), String> { + let (shutdown_tx, shutdown_rx) = mpsc::channel::<()>(); + let event_handler = move |control_event| -> ServiceControlHandlerResult { + match control_event { + ServiceControl::Interrogate => ServiceControlHandlerResult::NoError, + ServiceControl::Stop | ServiceControl::Shutdown => { + let _ = shutdown_tx.send(()); + ServiceControlHandlerResult::NoError + } + _ => ServiceControlHandlerResult::NotImplemented, + } + }; + + let status_handle = service_control_handler::register( + crate::service_manager::WINDOWS_SERVICE_NAME, + event_handler, + ) + .map_err(|error| error.to_string())?; + + status_handle + .set_service_status(ServiceStatus { + service_type: WINDOWS_SERVICE_TYPE, + current_state: ServiceState::StartPending, + controls_accepted: ServiceControlAccept::empty(), + exit_code: ServiceExitCode::NO_ERROR, + checkpoint: 1, + wait_hint: Duration::from_secs(30), + process_id: None, + }) + .map_err(|error| error.to_string())?; + + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .map_err(|err| err.to_string())?; + + let ready_status_handle = status_handle; + let on_ready: ReadyCallback = Box::new(move || { + if let Err(error) = ready_status_handle.set_service_status(ServiceStatus { + service_type: WINDOWS_SERVICE_TYPE, + current_state: ServiceState::Running, + controls_accepted: ServiceControlAccept::STOP | ServiceControlAccept::SHUTDOWN, + exit_code: ServiceExitCode::NO_ERROR, + checkpoint: 0, + wait_hint: Duration::default(), + process_id: None, + }) { + tracing::error!(error = %error, "Couldn't transition Windows service to Running"); + } + }); + + let result = runtime.block_on(run_daemon_until( + async move { + tokio::task::spawn_blocking(move || shutdown_rx.recv()) + .await + .map_err(|err| err.to_string())? + .map_err(|_| "The Windows service control channel disconnected.".to_string())?; + Ok(()) + }, + Some(on_ready), + )); + + let exit_code = if result.is_ok() { + ServiceExitCode::NO_ERROR + } else { + ServiceExitCode::ServiceSpecific(1) + }; + + status_handle + .set_service_status(ServiceStatus { + service_type: WINDOWS_SERVICE_TYPE, + current_state: ServiceState::Stopped, + controls_accepted: ServiceControlAccept::empty(), + exit_code, + checkpoint: 0, + wait_hint: Duration::default(), + process_id: None, + }) + .map_err(|error| error.to_string())?; + + result +} + +#[cfg(test)] +#[path = "tests/service_runtime.rs"] +mod tests; diff --git a/src/tests/config_store.rs b/src/tests/config_store.rs new file mode 100644 index 0000000..64bfdec --- /dev/null +++ b/src/tests/config_store.rs @@ -0,0 +1,571 @@ +use super::ConfigStore; +use crate::models::{ + ColumnMapping, FileConfig, JobLogEntry, JobUpsertRequest, LogLevel, ServerConfig, +}; +use chrono::Utc; +use std::{ + fs, + path::{Path, PathBuf}, + time::{SystemTime, UNIX_EPOCH}, +}; + +#[test] +fn append_log_persists_to_job_log_file_without_growing_workspace_json() { + let temp_dir = unique_temp_dir("config-store-logs"); + let store = ConfigStore::new(temp_dir.clone()); + store.ensure().expect("ensure store"); + store + .set_server( + ServerConfig { + url: "https://example.com".to_string(), + workspace_id: "workspace-1".to_string(), + workspace_name: "Workspace 1".to_string(), + ..ServerConfig::default() + }, + "Workspace 1", + ) + .expect("set server"); + + let job = store + .create_job(JobUpsertRequest { + name: "Test source".to_string(), + enabled: true, + file_path: "/tmp/source.csv".to_string(), + schedule_minutes: 15, + file_config: FileConfig::default(), + column_mappings: Vec::new(), + }) + .expect("create job"); + + let first = JobLogEntry { + timestamp: Utc::now(), + level: LogLevel::Info, + message: "first message".to_string(), + }; + let second = JobLogEntry { + timestamp: Utc::now(), + level: LogLevel::Warning, + message: "second message".to_string(), + }; + + store + .append_log(&job.id, first.clone()) + .expect("append first log"); + store + .append_log(&job.id, second.clone()) + .expect("append second log"); + + let log_path = store + .job_log_file_path(&job.id) + .expect("get log file path") + .expect("job log file should exist"); + let logs = store.logs_for(&job.id, 200).expect("load logs"); + + assert_eq!(logs, vec![first, second]); + assert!(log_path.exists()); + + let workspace_contents = + fs::read_to_string(temp_dir.join("workspaces").join("workspace-1.json")) + .expect("read workspace file"); + assert!( + !workspace_contents.contains("recent_logs"), + "workspace JSON should not embed recent_logs once file-backed logging is enabled" + ); + + remove_temp_dir(&temp_dir); +} + +#[test] +fn loading_workspace_migrates_embedded_recent_logs_to_file_backed_logs() { + let temp_dir = unique_temp_dir("config-store-migration"); + let store = ConfigStore::new(temp_dir.clone()); + store.ensure().expect("ensure store"); + + fs::write( + temp_dir.join("config.json"), + r#"{ + "version": 1, + "server": { + "auth_type": "apikey", + "url": "https://example.com", + "api_key": "", + "username": "", + "password": "", + "workspace_id": "workspace-2", + "workspace_name": "Workspace 2" + } +} +"#, + ) + .expect("write config"); + + let migrated_entry = JobLogEntry { + timestamp: Utc::now(), + level: LogLevel::Error, + message: "migrated message".to_string(), + }; + + fs::create_dir_all(temp_dir.join("workspaces")).expect("create workspaces dir"); + let legacy_workspace = serde_json::json!({ + "version": 1, + "workspace_id": "workspace-2", + "workspace_name": "Workspace 2", + "hydroserver_url": "https://example.com", + "datasources": [ + { + "id": "job-1", + "name": "Migrated source", + "enabled": true, + "file_path": "/tmp/migrated.csv", + "schedule_minutes": 15, + "file_config": { + "headerRow": 1, + "dataStartRow": 2, + "delimiter": ",", + "identifierType": "name", + "timestamp": { + "key": "timestamp", + "format": "ISO8601", + "timezoneMode": "embeddedOffset" + } + }, + "column_mappings": [], + "recent_logs": [ + { + "timestamp": migrated_entry.timestamp, + "level": "error", + "message": "migrated message" + } + ] + } + ] + }); + fs::write( + temp_dir.join("workspaces").join("workspace-2.json"), + format!( + "{}\n", + serde_json::to_string_pretty(&legacy_workspace).expect("serialize workspace") + ), + ) + .expect("write workspace"); + + let logs = store.logs_for("job-1", 200).expect("load migrated logs"); + assert_eq!(logs, vec![migrated_entry]); + + let workspace_contents = + fs::read_to_string(temp_dir.join("workspaces").join("workspace-2.json")) + .expect("read migrated workspace"); + assert!( + !workspace_contents.contains("recent_logs"), + "workspace JSON should be rewritten without embedded recent logs after migration" + ); + assert!( + store + .job_log_file_path("job-1") + .expect("get migrated log path") + .is_some(), + "migration should create a durable job log file" + ); + + remove_temp_dir(&temp_dir); +} + +#[test] +fn loading_workspace_rewrites_generated_test_csv_paths_into_current_config_dir() { + let temp_dir = unique_temp_dir("config-store-generated-paths"); + let store = ConfigStore::new(temp_dir.clone()); + store.ensure().expect("ensure store"); + + fs::write( + temp_dir.join("config.json"), + r#"{ + "version": 1, + "server": { + "auth_type": "apikey", + "url": "https://example.com", + "api_key": "", + "username": "", + "password": "", + "workspace_id": "workspace-3", + "workspace_name": "Workspace 3" + } +} +"#, + ) + .expect("write config"); + + let generated_dir = temp_dir.join("generated-test-csv").join("sample-batch"); + fs::create_dir_all(&generated_dir).expect("create generated test dir"); + let csv_path = generated_dir.join("sample.csv"); + fs::write(&csv_path, "timestamp,value\n2026-04-15T00:00:00Z,1\n").expect("write csv"); + + fs::create_dir_all(temp_dir.join("workspaces")).expect("create workspaces dir"); + let legacy_workspace = serde_json::json!({ + "version": 1, + "workspace_id": "workspace-3", + "workspace_name": "Workspace 3", + "hydroserver_url": "https://example.com", + "datasources": [ + { + "id": "job-1", + "name": "Generated source", + "enabled": true, + "file_path": "/Users/example/Library/Application Support/com.streaming-data-loader.app/generated-test-csv/sample-batch/sample.csv", + "schedule_minutes": 15, + "file_config": { + "headerRow": 1, + "dataStartRow": 2, + "delimiter": ",", + "identifierType": "name", + "timestamp": { + "key": "timestamp", + "format": "ISO8601", + "timezoneMode": "embeddedOffset" + } + }, + "column_mappings": [] + } + ] + }); + fs::write( + temp_dir.join("workspaces").join("workspace-3.json"), + format!( + "{}\n", + serde_json::to_string_pretty(&legacy_workspace).expect("serialize workspace") + ), + ) + .expect("write workspace"); + + let job = store + .get_job("job-1") + .expect("load job") + .expect("job should exist"); + assert_eq!(job.file_path, csv_path.to_string_lossy()); + + let workspace_contents = + fs::read_to_string(temp_dir.join("workspaces").join("workspace-3.json")) + .expect("read workspace"); + assert!( + workspace_contents.contains(csv_path.to_string_lossy().as_ref()), + "workspace JSON should be rewritten to the current generated-test-csv location" + ); + + remove_temp_dir(&temp_dir); +} + +#[test] +fn running_state_is_persisted_and_can_be_cleared_globally() { + let temp_dir = unique_temp_dir("config-store-running"); + let store = ConfigStore::new(temp_dir.clone()); + store.ensure().expect("ensure store"); + store + .set_server( + ServerConfig { + url: "https://example.com".to_string(), + workspace_id: "workspace-running".to_string(), + workspace_name: "Workspace Running".to_string(), + ..ServerConfig::default() + }, + "Workspace Running", + ) + .expect("set server"); + + let job = store + .create_job(JobUpsertRequest { + name: "Running source".to_string(), + enabled: true, + file_path: "/tmp/running.csv".to_string(), + schedule_minutes: 15, + file_config: FileConfig::default(), + column_mappings: Vec::new(), + }) + .expect("create job"); + + store + .set_job_running(&job.id, true) + .expect("set job running"); + assert!( + store.cursor_for(&job.id).expect("load cursor").is_running, + "cursor should reflect persisted running state" + ); + + store + .clear_all_running_jobs() + .expect("clear all running jobs"); + assert!( + !store.cursor_for(&job.id).expect("load cursor").is_running, + "global running-state reset should clear persisted flags" + ); + + remove_temp_dir(&temp_dir); +} + +/// A successful upload for one datastream should advance only that +/// datastream's cursor, leaving a behind sibling's cursor and error intact. +/// The job-level aggregate must then reflect the MIN of the two. +#[test] +fn record_datastream_success_isolates_per_datastream_state() { + let temp_dir = unique_temp_dir("config-store-per-ds-success"); + let store = ConfigStore::new(temp_dir.clone()); + store.ensure().expect("ensure store"); + store + .set_server( + ServerConfig { + url: "https://example.com".to_string(), + workspace_id: "workspace-partial".to_string(), + workspace_name: "Partial Failure".to_string(), + ..ServerConfig::default() + }, + "Partial Failure", + ) + .expect("set server"); + + let job = store + .create_job(JobUpsertRequest { + name: "Multi-datastream job".to_string(), + enabled: true, + file_path: "/tmp/multi.csv".to_string(), + schedule_minutes: 15, + file_config: FileConfig::default(), + column_mappings: vec![ + ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-stage".to_string(), + datastream_name: "Stage".to_string(), + }, + ColumnMapping { + csv_column: "WaterTemp_C".to_string(), + datastream_id: "ds-temp".to_string(), + datastream_name: "Water Temp".to_string(), + }, + ], + }) + .expect("create job"); + + // Simulate a prior failure on ds-temp at row 5. + let failed_at = Utc::now(); + store + .record_datastream_failure(&job.id, "ds-temp", "network error", failed_at) + .expect("record temp failure"); + + // Now record a success on ds-stage all the way through row 8. + let stage_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 20, 0) + .unwrap() + .and_utc(); + store + .record_datastream_success(&job.id, "ds-stage", 8, stage_ts, Utc::now()) + .expect("record stage success"); + + let cursor = store.cursor_for(&job.id).expect("load cursor"); + + let stage = cursor + .datastream_cursors + .get("ds-stage") + .expect("stage cursor present"); + assert_eq!(stage.last_pushed_row_index, Some(8)); + assert_eq!(stage.last_error, None); + + let temp = cursor + .datastream_cursors + .get("ds-temp") + .expect("temp cursor present"); + assert_eq!( + temp.last_pushed_row_index, None, + "failed sibling's cursor must not advance" + ); + assert_eq!( + temp.last_error.as_deref(), + Some("network error"), + "failed sibling's error must survive the other datastream's success" + ); + + // Job-level aggregate is MIN across mappings: ds-temp has no row yet, so + // the aggregate should be None (can't skip rows any datastream still + // needs). + assert_eq!( + cursor.last_pushed_row_index, None, + "aggregate row must be None while any mapping has no confirmed cursor" + ); + assert_eq!( + cursor.last_error.as_deref(), + Some("network error"), + "aggregate error should surface the still-failing datastream" + ); + + remove_temp_dir(&temp_dir); +} + +/// Fix #2: record_datastream_failure should only touch the specified +/// datastream's cursor, never the sibling's confirmed state. +#[test] +fn record_datastream_failure_preserves_sibling_progress() { + let temp_dir = unique_temp_dir("config-store-per-ds-failure"); + let store = ConfigStore::new(temp_dir.clone()); + store.ensure().expect("ensure store"); + store + .set_server( + ServerConfig { + url: "https://example.com".to_string(), + workspace_id: "workspace-fail".to_string(), + workspace_name: "Fail Isolation".to_string(), + ..ServerConfig::default() + }, + "Fail Isolation", + ) + .expect("set server"); + + let job = store + .create_job(JobUpsertRequest { + name: "Multi-datastream job".to_string(), + enabled: true, + file_path: "/tmp/multi.csv".to_string(), + schedule_minutes: 15, + file_config: FileConfig::default(), + column_mappings: vec![ + ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-stage".to_string(), + datastream_name: "Stage".to_string(), + }, + ColumnMapping { + csv_column: "WaterTemp_C".to_string(), + datastream_id: "ds-temp".to_string(), + datastream_name: "Water Temp".to_string(), + }, + ], + }) + .expect("create job"); + + let stage_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 20, 0) + .unwrap() + .and_utc(); + let temp_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 05, 0) + .unwrap() + .and_utc(); + store + .record_datastream_success(&job.id, "ds-stage", 8, stage_ts, Utc::now()) + .expect("record stage success"); + store + .record_datastream_success(&job.id, "ds-temp", 5, temp_ts, Utc::now()) + .expect("record temp success"); + + // Now record a failure on ds-temp — stage's confirmed cursor at row 8 + // must not regress. + store + .record_datastream_failure(&job.id, "ds-temp", "timeout", Utc::now()) + .expect("record temp failure"); + + let cursor = store.cursor_for(&job.id).expect("load cursor"); + + let stage = cursor + .datastream_cursors + .get("ds-stage") + .expect("stage cursor"); + assert_eq!(stage.last_pushed_row_index, Some(8)); + assert_eq!(stage.last_error, None); + + let temp = cursor + .datastream_cursors + .get("ds-temp") + .expect("temp cursor"); + assert_eq!( + temp.last_pushed_row_index, + Some(5), + "temp's prior successful cursor must persist through a later failure" + ); + assert_eq!(temp.last_error.as_deref(), Some("timeout")); + + // Job-level aggregate row is MIN(5, 8) = 5 — matches what the scan needs + // in order to backtrack for the still-failing datastream. + assert_eq!(cursor.last_pushed_row_index, Some(5)); + + remove_temp_dir(&temp_dir); +} + +/// bug_004: clear_last_error must not clobber is_running. Previously the +/// pipeline did a separate cursor_for + update_cursor, and a concurrent +/// set_job_running(true) landing between the two would be overwritten by the +/// subsequent write of the stale is_running=false value. +#[test] +fn clear_last_error_does_not_clobber_concurrently_set_is_running() { + let temp_dir = unique_temp_dir("config-store-clear-error-race"); + let store = ConfigStore::new(temp_dir.clone()); + store.ensure().expect("ensure store"); + store + .set_server( + ServerConfig { + url: "https://example.com".to_string(), + workspace_id: "workspace-race".to_string(), + workspace_name: "Race".to_string(), + ..ServerConfig::default() + }, + "Race", + ) + .expect("set server"); + + let job = store + .create_job(JobUpsertRequest { + name: "Race job".to_string(), + enabled: true, + file_path: "/tmp/race.csv".to_string(), + schedule_minutes: 15, + file_config: FileConfig::default(), + column_mappings: Vec::new(), + }) + .expect("create job"); + + // Seed a job-level error and mark the job as running. + store + .record_datastream_failure(&job.id, "ds-x", "transient error", Utc::now()) + .expect("record failure"); + store + .set_job_running(&job.id, true) + .expect("set job running"); + assert!( + store.cursor_for(&job.id).expect("cursor").is_running, + "precondition: job should be running" + ); + + // Clear the last error. Must leave is_running untouched. + store + .clear_last_error(&job.id, Utc::now()) + .expect("clear error"); + + let cursor = store.cursor_for(&job.id).expect("cursor after clear"); + assert!( + cursor.is_running, + "is_running must survive clear_last_error (bug_004)" + ); + assert!( + cursor.last_error.is_none(), + "job-level last_error should be cleared" + ); + assert!( + cursor.last_run_at.is_some(), + "last_run_at should be updated to mark the retry attempt" + ); + + remove_temp_dir(&temp_dir); +} + +fn unique_temp_dir(label: &str) -> PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time should be after epoch") + .as_nanos(); + let root = std::env::temp_dir().join(format!("sdl-{label}-{nanos}")); + fs::create_dir_all(&root).expect("create temp root"); + root +} + +fn remove_temp_dir(path: &Path) { + if path.exists() { + fs::remove_dir_all(path).expect("remove temp dir"); + } +} diff --git a/src/tests/file_watcher.rs b/src/tests/file_watcher.rs new file mode 100644 index 0000000..6e5cfcd --- /dev/null +++ b/src/tests/file_watcher.rs @@ -0,0 +1,61 @@ +use super::*; +use notify_debouncer_mini::DebouncedEvent; + +fn make_event(path: PathBuf, kind: DebouncedEventKind) -> DebouncedEvent { + DebouncedEvent { path, kind } +} + +#[test] +fn burst_writes_collapse_into_single_event() { + let dir = std::env::temp_dir(); + let file_path = dir.join(format!("sdl-debounce-test-{}.csv", std::process::id())); + std::fs::write(&file_path, "test").expect("create temp file"); + let canonical = file_path + .canonicalize() + .unwrap_or_else(|_| file_path.clone()); + + let watched: HashSet = [canonical.clone()].into_iter().collect(); + let (tx, mut rx) = mpsc::unbounded_channel(); + + // Simulate 10 rapid events for the same file — the kind a burst write produces + let events: Vec = (0..10) + .map(|_| make_event(file_path.clone(), DebouncedEventKind::Any)) + .collect(); + + handle_debounced_events(Ok(events), &watched, &tx); + + // Only one message should be sent despite 10 events + let mut count = 0; + while rx.try_recv().is_ok() { + count += 1; + } + assert_eq!(count, 1, "burst of 10 events should collapse to 1 send"); + + let _ = std::fs::remove_file(&file_path); +} + +#[test] +fn unwatched_files_are_ignored() { + let dir = std::env::temp_dir(); + let watched_file = dir.join("sdl-watched.csv"); + let unwatched_file = dir.join("sdl-unwatched.csv"); + std::fs::write(&watched_file, "test").expect("create temp file"); + std::fs::write(&unwatched_file, "test").expect("create temp file"); + let canonical = watched_file + .canonicalize() + .unwrap_or_else(|_| watched_file.clone()); + + let watched: HashSet = [canonical].into_iter().collect(); + let (tx, mut rx) = mpsc::unbounded_channel(); + + let events = vec![make_event(unwatched_file.clone(), DebouncedEventKind::Any)]; + handle_debounced_events(Ok(events), &watched, &tx); + + assert!( + rx.try_recv().is_err(), + "events for unwatched files should be ignored" + ); + + let _ = std::fs::remove_file(&watched_file); + let _ = std::fs::remove_file(&unwatched_file); +} diff --git a/src/tests/hydroserver.rs b/src/tests/hydroserver.rs new file mode 100644 index 0000000..090fe90 --- /dev/null +++ b/src/tests/hydroserver.rs @@ -0,0 +1,151 @@ +use super::*; +use chrono::TimeZone; + +#[test] +fn observation_batch_payload_matches_sensorthings_schema() { + let observations = vec![ + ObservationPayloadRow { + phenomenon_time: Utc.with_ymd_and_hms(2026, 4, 3, 8, 0, 0).unwrap(), + result: json!(2.41), + }, + ObservationPayloadRow { + phenomenon_time: Utc.with_ymd_and_hms(2026, 4, 3, 8, 5, 0).unwrap(), + result: json!("qualitative"), + }, + ]; + + let body = json!({ + "fields": ["phenomenonTime", "result"], + "data": observations + .iter() + .map(|row| json!([row.phenomenon_time.to_rfc3339(), row.result])) + .collect::>(), + }); + + assert_eq!( + body["fields"], + json!(["phenomenonTime", "result"]), + "fields must use SensorThings naming" + ); + + let data = body["data"].as_array().expect("data should be array"); + assert_eq!(data.len(), 2); + assert_eq!(data[0][0], "2026-04-03T08:00:00+00:00"); + assert_eq!(data[0][1], 2.41); + assert_eq!(data[1][1], "qualitative"); +} + +#[test] +fn build_url_normalizes_correctly() { + assert_eq!( + build_url("https://example.com/", "/api/data/test"), + "https://example.com/api/data/test" + ); + assert_eq!( + build_url("https://example.com", "api/data/test"), + "https://example.com/api/data/test" + ); +} + +#[test] +fn request_error_retryable_classification() { + assert!(RequestError::Connection.is_retryable()); + assert!(RequestError::Timeout.is_retryable()); + assert!(RequestError::Http { + status: Some(StatusCode::INTERNAL_SERVER_ERROR), + message: "error".to_string() + } + .is_retryable()); + assert!(RequestError::Http { + status: Some(StatusCode::BAD_GATEWAY), + message: "error".to_string() + } + .is_retryable()); + + assert!(!RequestError::Http { + status: Some(StatusCode::BAD_REQUEST), + message: "error".to_string() + } + .is_retryable()); + assert!(!RequestError::Http { + status: Some(StatusCode::UNPROCESSABLE_ENTITY), + message: "error".to_string() + } + .is_retryable()); + assert!(!RequestError::Other("misc".to_string()).is_retryable()); +} + +fn server_with_workspace(workspace_id: &str) -> ServerConfig { + ServerConfig { + auth_type: AuthType::Apikey, + url: "https://example.com".to_string(), + api_key: "test-key".to_string(), + workspace_id: workspace_id.to_string(), + workspace_name: String::new(), + ..ServerConfig::default() + } +} + +/// bug_008: When a persisted API key can no longer access the saved +/// workspace (e.g., key rotated to a different workspace), we must surface an +/// error instead of silently picking a different workspace from the list. +#[test] +fn resolve_api_key_workspace_errors_when_saved_workspace_is_unreachable() { + let server = server_with_workspace("workspace-saved"); + let accessible = vec![ + ("workspace-a".to_string(), "Workspace A".to_string()), + ("workspace-b".to_string(), "Workspace B".to_string()), + ]; + + let result = resolve_api_key_workspace(&server, &accessible); + let (field, message) = result.expect_err("should error when saved workspace is missing"); + assert_eq!(field, "api_key"); + assert!( + message.contains("does not have access"), + "message should explain the key lost access, got: {message}" + ); +} + +#[test] +fn resolve_api_key_workspace_returns_saved_workspace_when_accessible() { + let server = server_with_workspace("workspace-saved"); + let accessible = vec![ + ("workspace-a".to_string(), "Workspace A".to_string()), + ("workspace-saved".to_string(), "Saved Workspace".to_string()), + ]; + + let (id, name) = resolve_api_key_workspace(&server, &accessible) + .expect("should succeed") + .expect("should return saved workspace"); + assert_eq!(id, "workspace-saved"); + assert_eq!(name, "Saved Workspace"); +} + +#[test] +fn resolve_api_key_workspace_picks_first_when_none_saved() { + let server = server_with_workspace(""); + let accessible = vec![ + ("workspace-a".to_string(), "Workspace A".to_string()), + ("workspace-b".to_string(), "Workspace B".to_string()), + ]; + + let (id, _) = resolve_api_key_workspace(&server, &accessible) + .expect("should succeed") + .expect("should return first workspace"); + assert_eq!( + id, "workspace-a", + "initial connection defaults to the first accessible workspace" + ); +} + +#[test] +fn resolve_api_key_workspace_returns_none_when_no_workspaces_accessible() { + let server = server_with_workspace("workspace-saved"); + let empty: Vec<(String, String)> = Vec::new(); + + let result = resolve_api_key_workspace(&server, &empty).expect("empty list is not an error"); + assert!( + result.is_none(), + "no accessible workspaces should produce None (test_connection surfaces the key-level error)" + ); +} diff --git a/src/tests/observation_queue.rs b/src/tests/observation_queue.rs new file mode 100644 index 0000000..379f279 --- /dev/null +++ b/src/tests/observation_queue.rs @@ -0,0 +1,43 @@ +use super::*; +use serde_json::json; +use std::sync::Arc; + +fn test_observation() -> QueuedObservation { + QueuedObservation { + context: Arc::new(ObservationContext { + server: Arc::new(crate::models::ServerConfig::default()), + job_id: "test".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Test".to_string(), + }), + timestamp: chrono::Utc::now(), + row_index: 1, + value: json!(1.0), + } +} + +#[tokio::test] +async fn bounded_queue_respects_capacity() { + let (tx, mut rx) = bounded(3); + + // Fill the queue to capacity + tx.send(test_observation()).await.expect("send 1"); + tx.send(test_observation()).await.expect("send 2"); + tx.send(test_observation()).await.expect("send 3"); + + // The 4th send should not complete immediately (channel full) + let tx_clone = tx.clone(); + let handle = tokio::spawn(async move { tx_clone.send(test_observation()).await }); + + // Give the spawn a moment to attempt the send + tokio::time::sleep(std::time::Duration::from_millis(50)).await; + assert!( + !handle.is_finished(), + "send should block when queue is full" + ); + + // Drain one item to unblock + rx.recv().await.expect("recv"); + let result = handle.await.expect("join"); + assert!(result.is_ok(), "send should succeed after space freed"); +} diff --git a/src/tests/pipeline.rs b/src/tests/pipeline.rs new file mode 100644 index 0000000..73ae18f --- /dev/null +++ b/src/tests/pipeline.rs @@ -0,0 +1,1648 @@ +use super::{ + normalize_watched_path, overdue_paths, read_csv_rows, scan_job_file, PipelineService, ScanMode, + WatchPlan, +}; +use crate::{ + config_store::ConfigStore, + hydroserver::HydroServerService, + models::{ + AuthType, ColumnMapping, FileConfig, IdentifierType, JobConfig, JobCursor, + JobUpsertRequest, ServerConfig, TimestampConfig, + }, +}; +use chrono::Utc; +use std::{ + collections::{HashMap, HashSet}, + path::PathBuf, + sync::Arc, + time::{Duration, Instant}, +}; + +fn sample_job(path: &str) -> JobConfig { + JobConfig { + id: "job-1".to_string(), + name: "Example".to_string(), + enabled: true, + file_path: path.to_string(), + schedule_minutes: 15, + file_config: FileConfig { + header_row: Some(3), + data_start_row: 4, + delimiter: ",".to_string(), + identifier_type: IdentifierType::Name, + timestamp: TimestampConfig::default(), + }, + column_mappings: vec![ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }], + } +} + +fn sample_job_request(path: &str) -> JobUpsertRequest { + let sample = sample_job(path); + JobUpsertRequest { + name: sample.name, + enabled: sample.enabled, + file_path: sample.file_path, + schedule_minutes: sample.schedule_minutes, + file_config: sample.file_config, + column_mappings: sample.column_mappings, + } +} + +fn sample_server(url: String) -> ServerConfig { + ServerConfig { + auth_type: AuthType::Apikey, + url, + api_key: "test-api-key".to_string(), + workspace_id: "workspace-1".to_string(), + workspace_name: "Test Workspace".to_string(), + ..ServerConfig::default() + } +} + +fn temp_test_dir(label: &str) -> PathBuf { + let path = std::env::temp_dir().join(format!( + "sdl-{label}-{}-{}", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + std::fs::create_dir_all(&path).expect("create temp dir"); + path +} + +#[test] +fn read_csv_rows_allows_variable_width_preamble_rows() { + let csv_text = "\ +Station,Example Creek at Demo Site +Generated At,2026-04-03 09:00:00 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +"; + + let rows = read_csv_rows(csv_text, ',').expect("csv should parse"); + + assert_eq!(rows.len(), 4); + assert_eq!(rows[0], vec!["Station", "Example Creek at Demo Site"]); + assert_eq!(rows[2].len(), 3); +} + +#[test] +fn scan_job_file_only_returns_appended_rows() { + let path = std::env::temp_dir().join(format!( + "sdl-pipeline-test-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + std::fs::write( + &path, + "\ +Station,Example Creek at Demo Site +Generated At,2026-04-03 09:00:00 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +", + ) + .expect("write csv"); + + let result = scan_job_file( + sample_job(path.to_str().expect("utf-8 path")), + 4, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("scan should succeed"); + + assert_eq!(result.file_row_count, 5); + assert_eq!(result.observations.len(), 1); + assert_eq!(result.observations[0].row_index, 5); + + let _ = std::fs::remove_file(path); +} + +#[test] +fn scan_persists_row_count_across_successive_events() { + let path = std::env::temp_dir().join(format!( + "sdl-pipeline-persist-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + // Initial write: 3 header/preamble rows + 2 data rows = 5 total + std::fs::write( + &path, + "\ +Station,Example Creek at Demo Site +Generated At,2026-04-03 09:00:00 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +", + ) + .expect("write csv"); + + let job = sample_job(path.to_str().expect("utf-8 path")); + + // First scan with previous_row_count=0 sees both data rows + let result1 = + scan_job_file(job.clone(), 0, JobCursor::default(), ScanMode::Incremental).expect("scan 1"); + assert_eq!(result1.file_row_count, 5); + assert_eq!(result1.observations.len(), 2); + + // Second scan with previous_row_count=5 sees nothing new + let result2 = scan_job_file( + job.clone(), + result1.file_row_count, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("scan 2"); + assert_eq!(result2.observations.len(), 0); + + // Append one row + std::fs::write( + &path, + "\ +Station,Example Creek at Demo Site +Generated At,2026-04-03 09:00:00 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 +", + ) + .expect("append csv"); + + // Third scan with previous_row_count=5 sees only the new row + let result3 = scan_job_file( + job.clone(), + result2.file_row_count, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("scan 3"); + assert_eq!(result3.file_row_count, 6); + assert_eq!(result3.observations.len(), 1); + assert_eq!(result3.observations[0].row_index, 6); + + let _ = std::fs::remove_file(path); +} + +#[test] +fn scan_detects_file_truncation_and_rescans() { + let path = std::env::temp_dir().join(format!( + "sdl-pipeline-truncate-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + std::fs::write( + &path, + "\ +Station,Example Creek at Demo Site +Generated At,2026-04-03 09:00:00 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 +", + ) + .expect("write csv"); + + let job = sample_job(path.to_str().expect("utf-8 path")); + + // First scan sees 3 data rows + let result1 = + scan_job_file(job.clone(), 0, JobCursor::default(), ScanMode::Incremental).expect("scan 1"); + assert_eq!(result1.file_row_count, 6); + assert_eq!(result1.observations.len(), 3); + + // Truncate and rewrite with fewer rows + std::fs::write( + &path, + "\ +Station,Example Creek at Demo Site +Generated At,2026-04-03 10:00:00 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 09:00:00,2.60,8.1 +", + ) + .expect("rewrite csv"); + + // Scan detects reset (4 < 6) and rescans from data_start_row + let result2 = scan_job_file( + job.clone(), + result1.file_row_count, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("scan 2"); + assert!(result2.reset_detected); + assert_eq!(result2.file_row_count, 4); + assert_eq!(result2.observations.len(), 1); + + let _ = std::fs::remove_file(path); +} + +// --------------------------------------------------------------- +// Edge-case tests for real-world CSV files +// --------------------------------------------------------------- + +/// Many environmental-monitoring loggers emit 50–200 lines of metadata +/// (station name, serial number, units row, etc.) before the actual +/// header + data begin. The user sets `header_row` and `data_start_row` +/// to skip past all of that. +#[test] +fn large_comment_preamble_is_skipped_correctly() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-preamble-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let mut csv = String::new(); + // 100 lines of free-form metadata + for i in 1..=100 { + csv.push_str(&format!("Comment line {i}: some logger metadata\n")); + } + // header on row 101, data starts at row 102 + csv.push_str("Timestamp,Stage_ft,WaterTemp_C\n"); + csv.push_str("2026-04-03 08:00:00,2.41,7.8\n"); + csv.push_str("2026-04-03 08:05:00,2.45,7.9\n"); + csv.push_str("2026-04-03 08:10:00,2.50,8.0\n"); + + std::fs::write(&path, &csv).expect("write csv"); + + let job = JobConfig { + id: "job-preamble".to_string(), + name: "Preamble Test".to_string(), + enabled: true, + file_path: path.to_str().unwrap().to_string(), + schedule_minutes: 15, + file_config: FileConfig { + header_row: Some(101), + data_start_row: 102, + delimiter: ",".to_string(), + identifier_type: IdentifierType::Name, + timestamp: TimestampConfig::default(), + }, + column_mappings: vec![ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }], + }; + + let result = scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental) + .expect("scan with large preamble"); + assert_eq!(result.file_row_count, 104); + assert_eq!(result.observations.len(), 3); + assert_eq!(result.observations[0].row_index, 102); + assert_eq!(result.observations[2].row_index, 104); + + let _ = std::fs::remove_file(path); +} + +/// Campbell Scientific CR1000-style files have a 4-line header: station +/// info, column names, units row, and processing description — only the +/// second line is the "real" header. +#[test] +fn campbell_scientific_style_four_line_header() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-campbell-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +\"TOA5\",\"CR1000\",\"CPU:TestSite.CR1X\",\"12345\",\"CR1000.Std.32.06\",\"30490\",\"MyTable\" +\"TIMESTAMP\",\"RECORD\",\"Stage_ft\",\"WaterTemp_C\" +\"TS\",\"RN\",\"ft\",\"Deg C\" +\"\",\"\",\"Avg\",\"Avg\" +\"2026-04-03 08:00:00\",1,2.41,7.8 +\"2026-04-03 08:05:00\",2,2.45,7.9 +\"2026-04-03 08:10:00\",3,2.50,8.0 +"; + + std::fs::write(&path, csv).expect("write csv"); + + let job = JobConfig { + id: "job-campbell".to_string(), + name: "Campbell".to_string(), + enabled: true, + file_path: path.to_str().unwrap().to_string(), + schedule_minutes: 15, + file_config: FileConfig { + header_row: Some(2), + data_start_row: 5, + delimiter: ",".to_string(), + identifier_type: IdentifierType::Name, + timestamp: TimestampConfig { + key: "TIMESTAMP".to_string(), + ..TimestampConfig::default() + }, + }, + column_mappings: vec![ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }], + }; + + let result = scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental) + .expect("scan campbell file"); + assert_eq!(result.observations.len(), 3); + assert_eq!(result.observations[0].row_index, 5); + + let _ = std::fs::remove_file(path); +} + +#[test] +fn empty_csv_file_returns_zero_observations() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-empty-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + std::fs::write(&path, "").expect("write csv"); + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 0, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("empty file should not error"); + assert_eq!(result.file_row_count, 0); + assert_eq!(result.observations.len(), 0); + assert!(!result.reset_detected); + + let _ = std::fs::remove_file(path); +} + +#[test] +fn header_only_file_returns_zero_observations() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-headeronly-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + std::fs::write( + &path, + "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +", + ) + .expect("write csv"); + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 0, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("header-only should succeed"); + assert_eq!(result.file_row_count, 3); + assert_eq!(result.observations.len(), 0); + + let _ = std::fs::remove_file(path); +} + +/// Real sensor data often has gaps — e.g. a data column is blank when +/// the sensor was offline. Blank observation values should be skipped +/// without breaking other columns or rows. +#[test] +fn sparse_rows_with_missing_values_are_handled() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-sparse-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,,7.9 +2026-04-03 08:10:00,2.50, +2026-04-03 08:15:00,, +2026-04-03 08:20:00,2.55,8.1 +"; + + std::fs::write(&path, csv).expect("write csv"); + + let job = JobConfig { + column_mappings: vec![ + ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-stage".to_string(), + datastream_name: "Stage".to_string(), + }, + ColumnMapping { + csv_column: "WaterTemp_C".to_string(), + datastream_id: "ds-temp".to_string(), + datastream_name: "Temp".to_string(), + }, + ], + ..sample_job(path.to_str().unwrap()) + }; + + let result = + scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental).expect("sparse scan"); + + // Row 4: both present (2 obs), row 5: temp only (1), row 6: stage only (1), + // row 7: neither (0), row 8: both (2) => 6 total + assert_eq!(result.observations.len(), 6); + + // Verify the observations are from the right datastreams + let stage_obs: Vec<_> = result + .observations + .iter() + .filter(|o| o.datastream_id == "ds-stage") + .collect(); + let temp_obs: Vec<_> = result + .observations + .iter() + .filter(|o| o.datastream_id == "ds-temp") + .collect(); + assert_eq!(stage_obs.len(), 3); // rows 4, 6, 8 + assert_eq!(temp_obs.len(), 3); // rows 4, 5, 8 + + let _ = std::fs::remove_file(path); +} + +/// Empty timestamp rows should be silently skipped. +#[test] +fn rows_with_empty_timestamps_are_skipped() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-emptyts-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 + ,2.55,8.1 +2026-04-03 08:20:00,2.60,8.2 +"; + + std::fs::write(&path, csv).expect("write csv"); + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 0, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("empty-ts scan"); + + // Only rows with valid timestamps: 4, 6, 8 + assert_eq!(result.observations.len(), 3); + assert_eq!(result.observations[0].row_index, 4); + assert_eq!(result.observations[1].row_index, 6); + assert_eq!(result.observations[2].row_index, 8); + + let _ = std::fs::remove_file(path); +} + +/// Quoted CSV fields containing the delimiter character itself. +#[test] +fn quoted_fields_with_embedded_commas() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-quoted-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + // The preamble has commas inside quotes; data values should still parse. + let csv = "\ +\"Station Name\",\"Example Creek, East Fork\" +\"Generated At\",\"April 3, 2026\" +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +"; + + std::fs::write(&path, csv).expect("write csv"); + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 0, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("quoted-fields scan"); + assert_eq!(result.observations.len(), 2); + + let _ = std::fs::remove_file(path); +} + +/// Tab-delimited files are common from certain loggers and spreadsheet +/// exports. +#[test] +fn tab_delimited_file() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-tab-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "Station\tExample Creek\n\ + Generated At\t2026-04-03\n\ + Timestamp\tStage_ft\tWaterTemp_C\n\ + 2026-04-03 08:00:00\t2.41\t7.8\n\ + 2026-04-03 08:05:00\t2.45\t7.9\n"; + + std::fs::write(&path, csv).expect("write csv"); + + let job = JobConfig { + file_config: FileConfig { + delimiter: "\t".to_string(), + ..sample_job("").file_config.clone() + }, + ..sample_job(path.to_str().unwrap()) + }; + + let result = scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental) + .expect("tab-delimited scan"); + assert_eq!(result.observations.len(), 2); + + let _ = std::fs::remove_file(path); +} + +/// Windows tools write \r\n line endings. The csv crate strips them, +/// but we should verify the pipeline handles this end-to-end. +#[test] +fn windows_crlf_line_endings() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-crlf-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "Station,Example Creek\r\n\ + Generated At,2026-04-03\r\n\ + Timestamp,Stage_ft,WaterTemp_C\r\n\ + 2026-04-03 08:00:00,2.41,7.8\r\n\ + 2026-04-03 08:05:00,2.45,7.9\r\n"; + + std::fs::write(&path, csv).expect("write csv"); + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 0, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("crlf scan"); + assert_eq!(result.observations.len(), 2); + + let _ = std::fs::remove_file(path); +} + +/// Some users configure jobs by column index rather than name +/// (e.g. when there is no header row, or it's unreliable). +#[test] +fn index_based_column_identification() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-index-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + // No meaningful header — data starts immediately at row 1 + let csv = "\ +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 +"; + std::fs::write(&path, csv).expect("write csv"); + + let job = JobConfig { + id: "job-index".to_string(), + name: "Index Job".to_string(), + enabled: true, + file_path: path.to_str().unwrap().to_string(), + schedule_minutes: 15, + file_config: FileConfig { + header_row: None, + data_start_row: 1, + delimiter: ",".to_string(), + identifier_type: IdentifierType::Index, + timestamp: TimestampConfig { + key: "1".to_string(), // column 1 = timestamp + ..TimestampConfig::default() + }, + }, + column_mappings: vec![ColumnMapping { + csv_column: "2".to_string(), // column 2 = Stage + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }], + }; + + let result = scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental) + .expect("index-based scan"); + assert_eq!(result.observations.len(), 3); + + let _ = std::fs::remove_file(path); +} + +/// Column name lookup should be case-insensitive ("timestamp" matches +/// "TIMESTAMP" or "Timestamp"). +#[test] +fn case_insensitive_header_matching() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-case-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +TIMESTAMP,STAGE_FT,WATERTEMP_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +"; + std::fs::write(&path, csv).expect("write csv"); + + // Job config uses lowercase column names vs uppercase in file + let job = JobConfig { + file_config: FileConfig { + timestamp: TimestampConfig { + key: "timestamp".to_string(), + ..TimestampConfig::default() + }, + ..sample_job("").file_config.clone() + }, + column_mappings: vec![ColumnMapping { + csv_column: "stage_ft".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }], + ..sample_job(path.to_str().unwrap()) + }; + + let result = scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental) + .expect("case-insensitive scan"); + assert_eq!(result.observations.len(), 2); + + let _ = std::fs::remove_file(path); +} + +/// FullResync with a cursor should skip rows that were already pushed, +/// so a "Run Now" doesn't re-upload the full history. +#[test] +fn full_resync_respects_cursor() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-resync-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 +2026-04-03 08:15:00,2.55,8.1 +"; + std::fs::write(&path, csv).expect("write csv"); + + // Cursor says we already pushed through 08:05 (row 5) + let last_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 5, 0) + .unwrap() + .and_utc(); + let mut datastream_cursors = std::collections::HashMap::new(); + datastream_cursors.insert( + "ds-1".to_string(), + crate::models::DatastreamCursor { + last_pushed_timestamp: Some(last_ts), + last_pushed_row_index: Some(5), + last_error: None, + }, + ); + let cursor = JobCursor { + last_pushed_timestamp: Some(last_ts), + last_pushed_row_index: Some(5), + last_run_at: None, + last_error: None, + is_running: false, + datastream_cursors, + }; + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 0, + cursor, + ScanMode::FullResync, + ) + .expect("full resync scan"); + + // Should only return rows after the cursor: 08:10 and 08:15 + assert_eq!(result.observations.len(), 2); + assert_eq!(result.observations[0].row_index, 6); + assert_eq!(result.observations[1].row_index, 7); + + let _ = std::fs::remove_file(path); +} + +/// Multiple column mappings from the same file — each data row should +/// produce one observation per mapping (when the value is non-empty). +#[test] +fn multiple_column_mappings_produce_correct_observations() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-multi-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C,Conductivity +2026-04-03 08:00:00,2.41,7.8,145.2 +2026-04-03 08:05:00,2.45,7.9,146.0 +"; + std::fs::write(&path, csv).expect("write csv"); + + let job = JobConfig { + column_mappings: vec![ + ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-stage".to_string(), + datastream_name: "Stage".to_string(), + }, + ColumnMapping { + csv_column: "WaterTemp_C".to_string(), + datastream_id: "ds-temp".to_string(), + datastream_name: "Temp".to_string(), + }, + ColumnMapping { + csv_column: "Conductivity".to_string(), + datastream_id: "ds-cond".to_string(), + datastream_name: "Cond".to_string(), + }, + ], + ..sample_job(path.to_str().unwrap()) + }; + + let result = scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental) + .expect("multi-mapping scan"); + + // 2 data rows * 3 mappings = 6 observations + assert_eq!(result.observations.len(), 6); + + let stage: Vec<_> = result + .observations + .iter() + .filter(|o| o.datastream_id == "ds-stage") + .collect(); + let temp: Vec<_> = result + .observations + .iter() + .filter(|o| o.datastream_id == "ds-temp") + .collect(); + let cond: Vec<_> = result + .observations + .iter() + .filter(|o| o.datastream_id == "ds-cond") + .collect(); + assert_eq!(stage.len(), 2); + assert_eq!(temp.len(), 2); + assert_eq!(cond.len(), 2); + + let _ = std::fs::remove_file(path); +} + +/// Values with leading/trailing whitespace should be trimmed and still +/// parse correctly as numbers. +#[test] +fn whitespace_padded_values_are_trimmed() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-ws-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C + 2026-04-03 08:00:00 , 2.41 , 7.8 + 2026-04-03 08:05:00 , 2.45 , 7.9 +"; + std::fs::write(&path, csv).expect("write csv"); + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 0, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("whitespace scan"); + assert_eq!(result.observations.len(), 2); + + // Values should parse as floats, not strings + assert_eq!(result.observations[0].value, serde_json::json!(2.41)); + assert_eq!(result.observations[1].value, serde_json::json!(2.45)); + + let _ = std::fs::remove_file(path); +} + +/// File that doesn't exist should return a clear error, not a panic. +#[test] +fn missing_file_produces_clear_error() { + let path = "/tmp/sdl-nonexistent-file-that-does-not-exist-99999.csv"; + let result = scan_job_file( + sample_job(path), + 0, + JobCursor::default(), + ScanMode::Incremental, + ); + assert!(result.is_err()); + let msg = result.unwrap_err(); + assert!( + msg.contains("No such file") || msg.contains("not found") || msg.contains("cannot find"), + "error should mention the missing file: {msg}" + ); +} + +/// A column referenced in the mapping that doesn't exist in the header +/// should produce a clear error pointing at the column name. +#[test] +fn missing_column_produces_clear_error() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-missingcol-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +"; + std::fs::write(&path, csv).expect("write csv"); + + let job = JobConfig { + column_mappings: vec![ColumnMapping { + csv_column: "Discharge_cfs".to_string(), // does not exist + datastream_id: "ds-1".to_string(), + datastream_name: "Discharge".to_string(), + }], + ..sample_job(path.to_str().unwrap()) + }; + + let result = scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental); + assert!(result.is_err()); + let msg = result.unwrap_err(); + assert!( + msg.contains("Discharge_cfs"), + "error should name the missing column: {msg}" + ); + + let _ = std::fs::remove_file(path); +} + +/// Values that look like strings (e.g. "good", "suspect") should be +/// preserved as JSON strings, while numbers become JSON numbers. +#[test] +fn mixed_numeric_and_string_observation_values() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-mixed-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,low,sensor_error +2026-04-03 08:10:00,-0.5,0 +"; + std::fs::write(&path, csv).expect("write csv"); + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 0, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("mixed values scan"); + assert_eq!(result.observations.len(), 3); + + assert_eq!(result.observations[0].value, serde_json::json!(2.41)); + assert_eq!(result.observations[1].value, serde_json::json!("low")); + assert_eq!(result.observations[2].value, serde_json::json!(-0.5)); + + let _ = std::fs::remove_file(path); +} + +/// Semicolon-delimited files (common in European locales). +#[test] +fn semicolon_delimited_file() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-semi-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station;Example Creek\n\ +Generated At;2026-04-03\n\ +Timestamp;Stage_ft;WaterTemp_C\n\ +2026-04-03 08:00:00;2.41;7.8\n\ +2026-04-03 08:05:00;2.45;7.9\n"; + + std::fs::write(&path, csv).expect("write csv"); + + let job = JobConfig { + file_config: FileConfig { + delimiter: ";".to_string(), + ..sample_job("").file_config.clone() + }, + ..sample_job(path.to_str().unwrap()) + }; + + let result = + scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental).expect("semicolon scan"); + assert_eq!(result.observations.len(), 2); + + let _ = std::fs::remove_file(path); +} + +/// Incremental scan where previous_row_count is beyond the +/// data_start_row but the file hasn't grown — zero observations. +#[test] +fn incremental_no_change_returns_zero() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-nochange-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +"; + std::fs::write(&path, csv).expect("write csv"); + + // Simulate having already seen all 5 rows + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 5, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("no-change scan"); + assert_eq!(result.observations.len(), 0); + assert!(!result.reset_detected); + + let _ = std::fs::remove_file(path); +} + +/// BOM-prefixed UTF-8 files (common when CSV is saved from Excel). +/// The BOM bytes (\xEF\xBB\xBF) must not corrupt the first field. +#[test] +fn utf8_bom_does_not_corrupt_first_column() { + let path = std::env::temp_dir().join(format!( + "sdl-edge-bom-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + // Write BOM + CSV content + let mut bytes = vec![0xEF, 0xBB, 0xBF]; + bytes.extend_from_slice(b"Timestamp,Stage_ft,WaterTemp_C\n"); + bytes.extend_from_slice(b"2026-04-03 08:00:00,2.41,7.8\n"); + bytes.extend_from_slice(b"2026-04-03 08:05:00,2.45,7.9\n"); + std::fs::write(&path, &bytes).expect("write bom csv"); + + let job = JobConfig { + id: "job-bom".to_string(), + name: "BOM Test".to_string(), + enabled: true, + file_path: path.to_str().unwrap().to_string(), + schedule_minutes: 15, + file_config: FileConfig { + header_row: Some(1), + data_start_row: 2, + delimiter: ",".to_string(), + identifier_type: IdentifierType::Name, + timestamp: TimestampConfig::default(), + }, + column_mappings: vec![ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }], + }; + + let result = scan_job_file(job, 0, JobCursor::default(), ScanMode::Incremental); + + // The BOM bytes become part of the first cell when read as UTF-8. + // This is a known limitation — verify the test captures current behavior. + // If this fails with a column-not-found error, we need a BOM-stripping fix. + match &result { + Ok(r) => { + // If it works, great — both observations should be present + assert_eq!(r.observations.len(), 2); + } + Err(msg) => { + // If it fails because the BOM corrupted "Timestamp", that's a + // real bug we need to fix. + assert!( + msg.contains("Timestamp") || msg.contains("not found"), + "unexpected error: {msg}" + ); + // Flag this as a known issue rather than letting it silently pass + panic!("BOM corrupts the first header cell — needs a fix in scan_job_file: {msg}"); + } + } + + let _ = std::fs::remove_file(path); +} + +#[test] +fn scheduler_overdue_path_selection_uses_shortest_job_interval_and_skips_unscanned_paths() { + let now = Instant::now(); + let shared_path = PathBuf::from("/tmp/sdl-scheduler-shared.csv"); + let fifteen_minute_path = PathBuf::from("/tmp/sdl-scheduler-fifteen.csv"); + let never_scanned_path = PathBuf::from("/tmp/sdl-scheduler-never.csv"); + + let watch_plan = WatchPlan { + jobs_by_path: HashMap::from([ + ( + shared_path.clone(), + vec![ + JobConfig { + id: "job-fast".to_string(), + schedule_minutes: 5, + ..sample_job(shared_path.to_str().expect("utf-8 path")) + }, + JobConfig { + id: "job-slow".to_string(), + schedule_minutes: 30, + ..sample_job(shared_path.to_str().expect("utf-8 path")) + }, + ], + ), + ( + fifteen_minute_path.clone(), + vec![sample_job( + fifteen_minute_path.to_str().expect("utf-8 path"), + )], + ), + ( + never_scanned_path.clone(), + vec![JobConfig { + id: "job-never".to_string(), + schedule_minutes: 1, + ..sample_job(never_scanned_path.to_str().expect("utf-8 path")) + }], + ), + ]), + server: None, + }; + + let last_scan_times = HashMap::from([ + (shared_path.clone(), now - Duration::from_secs(6 * 60)), + ( + fifteen_minute_path.clone(), + now - Duration::from_secs(14 * 60), + ), + ]); + + let overdue: HashSet<_> = overdue_paths(now, &watch_plan, &last_scan_times) + .into_iter() + .collect(); + + assert!( + overdue.contains(&shared_path), + "a shared path should use the shortest schedule interval across its jobs" + ); + assert!( + !overdue.contains(&fifteen_minute_path), + "a 15-minute job scanned 14 minutes ago should not be overdue yet" + ); + assert!( + !overdue.contains(&never_scanned_path), + "paths with no recorded scan time should wait for the initial scan queued by reload" + ); +} + +#[test] +fn large_csv_scan_produces_bounded_observations() { + let path = std::env::temp_dir().join(format!( + "sdl-pipeline-large-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + // Generate a 10,000-row CSV using epoch seconds to avoid invalid dates + let mut csv = String::from( + "Station,Example Creek\nGenerated At,2026-04-03\nTimestamp,Stage_ft,WaterTemp_C\n", + ); + let base = chrono::NaiveDate::from_ymd_opt(2026, 1, 1) + .unwrap() + .and_hms_opt(0, 0, 0) + .unwrap(); + for i in 0..10_000u64 { + let ts = base + chrono::Duration::minutes(i as i64 * 5); + csv.push_str(&format!( + "{},{:.2},{:.1}\n", + ts.format("%Y-%m-%dT%H:%M:%S"), + 2.0 + (i as f64) * 0.01, + 7.0 + (i as f64) * 0.001, + )); + } + std::fs::write(&path, &csv).expect("write large csv"); + + let job = sample_job(path.to_str().expect("utf-8 path")); + + // Full scan from row 0 — should produce exactly 10,000 observations + let result = scan_job_file(job.clone(), 0, JobCursor::default(), ScanMode::Incremental) + .expect("scan large file"); + assert_eq!(result.file_row_count, 10_003); // 3 header + 10,000 data + assert_eq!(result.observations.len(), 10_000); + + // Incremental scan with previous_row_count = full file — should produce 0 + let result2 = scan_job_file( + job.clone(), + result.file_row_count, + JobCursor::default(), + ScanMode::Incremental, + ) + .expect("incremental scan of unchanged file"); + assert_eq!(result2.observations.len(), 0); + + let _ = std::fs::remove_file(path); +} + +/// Fix #1: On restart row_counts is empty (0). load_cursor_row_seeds seeds it +/// from the persisted cursor so we don't re-scan already-uploaded rows. +/// This test verifies that scanning with previous_row_count seeded from the +/// cursor's last_pushed_row_index correctly skips already-pushed rows. +#[test] +fn scan_seeded_from_cursor_skips_already_pushed_rows() { + let path = std::env::temp_dir().join(format!( + "sdl-seed-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + // 3 header rows + 4 data rows (rows 4-7 in 1-indexed terms) + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 +2026-04-03 08:15:00,2.55,8.1 +"; + std::fs::write(&path, csv).expect("write csv"); + + // Simulate: cursor says rows 4-6 were already pushed (max_row_index = 6). + // Seed previous_row_count = 6 (as load_cursor_row_seeds would produce). + // The scan should only return row 7. + let last_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 10, 0) + .unwrap() + .and_utc(); + let mut datastream_cursors = std::collections::HashMap::new(); + datastream_cursors.insert( + "ds-1".to_string(), + crate::models::DatastreamCursor { + last_pushed_timestamp: Some(last_ts), + last_pushed_row_index: Some(6), + last_error: None, + }, + ); + let cursor = JobCursor { + last_pushed_timestamp: Some(last_ts), + last_pushed_row_index: Some(6), + last_run_at: None, + last_error: None, + is_running: false, + datastream_cursors, + }; + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 6, // seeded from cursor.last_pushed_row_index + cursor, + ScanMode::Incremental, + ) + .expect("seeded scan"); + + assert_eq!( + result.observations.len(), + 1, + "only the new row should be returned" + ); + assert_eq!(result.observations[0].row_index, 7); + + let _ = std::fs::remove_file(path); +} + +#[tokio::test] +async fn restart_load_cursor_row_seeds_prevents_duplicate_scans() { + let temp_dir = temp_test_dir("restart-seeds"); + let config_dir = temp_dir.join("config"); + let csv_path = temp_dir.join("source.csv"); + + std::fs::write( + &csv_path, + "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +", + ) + .expect("write csv"); + + let config_store = Arc::new(ConfigStore::new(config_dir)); + config_store.ensure().expect("ensure config store"); + config_store + .set_server( + sample_server("http://127.0.0.1:9".to_string()), + "Test Workspace", + ) + .expect("set server"); + let job = config_store + .create_job(sample_job_request(csv_path.to_str().expect("utf-8 path"))) + .expect("create job"); + + let first_runtime = PipelineService::new( + config_store.clone(), + Arc::new(HydroServerService::new().expect("hydroserver service")), + ); + let first_snapshot = first_runtime + .load_watch_plan() + .await + .expect("load watch plan"); + let normalized_path = normalize_watched_path(&csv_path); + assert!( + first_runtime + .load_cursor_row_seeds(&first_snapshot) + .await + .get(&normalized_path) + .is_none(), + "new jobs should not have a row-count seed before any uploads succeed" + ); + + let persisted_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 5, 0) + .unwrap() + .and_utc(); + config_store + .record_datastream_success(&job.id, "ds-1", 5, persisted_ts, Utc::now()) + .expect("persist cursor"); + + std::fs::write( + &csv_path, + "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 +", + ) + .expect("append csv row"); + + let restarted_runtime = PipelineService::new( + config_store.clone(), + Arc::new(HydroServerService::new().expect("hydroserver service")), + ); + let restarted_snapshot = restarted_runtime + .load_watch_plan() + .await + .expect("load watch plan after restart"); + let seeds = restarted_runtime + .load_cursor_row_seeds(&restarted_snapshot) + .await; + assert_eq!( + seeds.get(&normalized_path), + Some(&5usize), + "restart should seed row counts from the persisted cursor" + ); + + let cursor = config_store.cursor_for(&job.id).expect("load cursor"); + let result = scan_job_file(job, 5, cursor, ScanMode::Incremental).expect("scan after restart"); + assert_eq!(result.observations.len(), 1); + assert_eq!(result.observations[0].row_index, 6); + + let _ = std::fs::remove_dir_all(temp_dir); +} + +#[tokio::test] +async fn shared_file_scans_use_one_baseline_for_all_jobs() { + let temp_dir = temp_test_dir("shared-file-jobs"); + let config_dir = temp_dir.join("config"); + let csv_path = temp_dir.join("source.csv"); + + std::fs::write( + &csv_path, + "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +", + ) + .expect("write csv"); + + let config_store = Arc::new(ConfigStore::new(config_dir)); + config_store.ensure().expect("ensure config store"); + let server = sample_server("http://127.0.0.1:9".to_string()); + config_store + .set_server(server.clone(), "Test Workspace") + .expect("set server"); + + let first_job = config_store + .create_job(sample_job_request(csv_path.to_str().expect("utf-8 path"))) + .expect("create first job"); + + let mut second_request = sample_job_request(csv_path.to_str().expect("utf-8 path")); + second_request.name = "Second job".to_string(); + second_request.column_mappings = vec![ColumnMapping { + csv_column: "WaterTemp_C".to_string(), + datastream_id: "ds-2".to_string(), + datastream_name: "WaterTemp".to_string(), + }]; + let second_job = config_store + .create_job(second_request) + .expect("create second job"); + + let runtime = PipelineService::new( + config_store, + Arc::new(HydroServerService::new().expect("hydroserver service")), + ); + let normalized_path = normalize_watched_path(&csv_path); + + runtime + .inner + .row_counts + .lock() + .await + .insert(normalized_path.clone(), 4); + + let first_result = runtime + .scan_job( + normalized_path.clone(), + Arc::new(server.clone()), + first_job, + 4, + ScanMode::Incremental, + ) + .await + .expect("scan first job"); + assert_eq!(first_result, (5, false)); + + let second_result = runtime + .scan_job( + normalized_path.clone(), + Arc::new(server), + second_job, + 4, + ScanMode::Incremental, + ) + .await + .expect("scan second job"); + assert_eq!(second_result, (5, false)); + + let shared_row_count = runtime + .inner + .row_counts + .lock() + .await + .get(&normalized_path) + .copied(); + assert_eq!( + shared_row_count, + Some(4), + "per-job scans should not advance the shared file baseline until the whole path scan completes" + ); + + let _ = std::fs::remove_dir_all(temp_dir); +} + +/// Fix #2: When the previous upload failed (cursor.last_error is set), the +/// scan must backtrack to cursor.last_pushed_row_index and retry the failed +/// rows, even if previous_row_count (in-memory) is already past them. +#[test] +fn scan_retries_failed_rows_when_cursor_has_error() { + let path = std::env::temp_dir().join(format!( + "sdl-retry-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + // 3 header rows + 5 data rows (rows 4-8 in 1-indexed terms) + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 +2026-04-03 08:15:00,2.55,8.1 +2026-04-03 08:20:00,2.60,8.2 +"; + std::fs::write(&path, csv).expect("write csv"); + + // Scenario: rows 4-5 were pushed successfully (last_pushed_row_index=5). + // Rows 6-8 were scanned and queued but the upload failed (last_error set). + // In-memory previous_row_count = 8 (scan advanced past the failed rows). + // Expected: incremental scan should backtrack to row 5 and re-queue rows 6-8. + let last_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 5, 0) + .unwrap() + .and_utc(); + let mut datastream_cursors = std::collections::HashMap::new(); + datastream_cursors.insert( + "ds-1".to_string(), + crate::models::DatastreamCursor { + last_pushed_timestamp: Some(last_ts), + last_pushed_row_index: Some(5), + last_error: Some("network error".to_string()), + }, + ); + let cursor = JobCursor { + last_pushed_timestamp: Some(last_ts), + last_pushed_row_index: Some(5), + last_run_at: None, + last_error: Some("network error".to_string()), + is_running: false, + datastream_cursors, + }; + + let result = scan_job_file( + sample_job(path.to_str().unwrap()), + 8, // in-memory row count is already at 8 + cursor, + ScanMode::Incremental, + ) + .expect("retry scan"); + + // With Fix #2: should re-scan rows 6, 7, 8 (backtracked to last_pushed_row_index=5) + assert_eq!( + result.observations.len(), + 3, + "failed rows should be retried" + ); + assert_eq!(result.observations[0].row_index, 6); + assert_eq!(result.observations[1].row_index, 7); + assert_eq!(result.observations[2].row_index, 8); + + let _ = std::fs::remove_file(path); +} + +/// Fix #2 (multi-datastream partial failure): When a job maps the same CSV file +/// to multiple datastreams, an upload failure on one must not advance the other +/// datastream's cursor past unuploaded rows. The scan must read per-datastream +/// cursors and only re-emit rows a given datastream hasn't yet confirmed. +#[test] +fn scan_respects_per_datastream_cursors_after_partial_failure() { + let path = std::env::temp_dir().join(format!( + "sdl-partial-failure-{}-{}.csv", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + + // 3 header rows + 5 data rows (rows 4-8). Two measurement columns. + let csv = "\ +Station,Example Creek +Generated At,2026-04-03 +Timestamp,Stage_ft,WaterTemp_C +2026-04-03 08:00:00,2.41,7.8 +2026-04-03 08:05:00,2.45,7.9 +2026-04-03 08:10:00,2.50,8.0 +2026-04-03 08:15:00,2.55,8.1 +2026-04-03 08:20:00,2.60,8.2 +"; + std::fs::write(&path, csv).expect("write csv"); + + // Job mapping the CSV to two datastreams. + let mut job = sample_job(path.to_str().expect("utf-8 path")); + job.column_mappings = vec![ + ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-stage".to_string(), + datastream_name: "Stage".to_string(), + }, + ColumnMapping { + csv_column: "WaterTemp_C".to_string(), + datastream_id: "ds-temp".to_string(), + datastream_name: "Water Temp".to_string(), + }, + ]; + + // Scenario: Stage uploaded all 5 rows successfully (row 8), but Temp + // failed after only 2 rows (last_pushed=5 + error). Pre-fix, the job-level + // cursor would have advanced to 8, causing rows 6-8 for Temp to be + // silently dropped. With per-datastream cursors, the scan backtracks for + // Temp only. + let stage_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 20, 0) + .unwrap() + .and_utc(); + let temp_ts = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 5, 0) + .unwrap() + .and_utc(); + let mut datastream_cursors = HashMap::new(); + datastream_cursors.insert( + "ds-stage".to_string(), + crate::models::DatastreamCursor { + last_pushed_timestamp: Some(stage_ts), + last_pushed_row_index: Some(8), + last_error: None, + }, + ); + datastream_cursors.insert( + "ds-temp".to_string(), + crate::models::DatastreamCursor { + last_pushed_timestamp: Some(temp_ts), + last_pushed_row_index: Some(5), + last_error: Some("network error".to_string()), + }, + ); + let cursor = JobCursor { + last_pushed_timestamp: Some(temp_ts), + last_pushed_row_index: Some(5), // MIN aggregate of per-datastream cursors + last_run_at: None, + last_error: Some("network error".to_string()), + is_running: false, + datastream_cursors, + }; + + // In-memory row count from the most recent scan is 8 (the file hasn't + // changed since then). + let result = scan_job_file(job, 8, cursor, ScanMode::Incremental).expect("partial retry scan"); + + // Only Temp (ds-temp) should have observations for rows 6, 7, 8. Stage is + // caught up at row 8 and should emit nothing. + let stage_obs: Vec<_> = result + .observations + .iter() + .filter(|o| o.datastream_id == "ds-stage") + .collect(); + let temp_obs: Vec<_> = result + .observations + .iter() + .filter(|o| o.datastream_id == "ds-temp") + .collect(); + + assert!( + stage_obs.is_empty(), + "caught-up datastream should not re-emit rows, got {:?}", + stage_obs.iter().map(|o| o.row_index).collect::>() + ); + assert_eq!( + temp_obs.len(), + 3, + "behind datastream should re-emit rows past its last_pushed_row_index" + ); + assert_eq!(temp_obs[0].row_index, 6); + assert_eq!(temp_obs[1].row_index, 7); + assert_eq!(temp_obs[2].row_index, 8); + + let _ = std::fs::remove_file(path); +} diff --git a/src/tests/runtime.rs b/src/tests/runtime.rs new file mode 100644 index 0000000..edc6d64 --- /dev/null +++ b/src/tests/runtime.rs @@ -0,0 +1,220 @@ +use super::{ + active_app_directory_name, copy_dir_contents, has_runtime_state, move_or_copy_dir_contents, + preferred_user_data_dir, AppState, APP_DIRECTORY_NAME, DEV_APP_DIRECTORY_NAME, +}; +use crate::models::{ + AuthType, ColumnMapping, FileConfig, IdentifierType, JobUpsertRequest, ServerConfig, + TimestampConfig, +}; +use std::{ + fs, + path::{Path, PathBuf}, + time::{SystemTime, UNIX_EPOCH}, +}; + +#[test] +fn copy_dir_contents_copies_nested_runtime_state() { + let temp_root = unique_temp_dir("runtime-copy"); + let source = temp_root.join("source"); + let target = temp_root.join("target"); + + fs::create_dir_all(source.join("workspaces")).expect("create source workspaces"); + fs::write(source.join("config.json"), "{}").expect("write config"); + fs::write( + source.join("workspaces").join("workspace.json"), + "{\"datasources\":[]}", + ) + .expect("write workspace"); + + copy_dir_contents(&source, &target).expect("copy runtime state"); + + assert!(target.join("config.json").exists()); + assert!(target.join("workspaces").join("workspace.json").exists()); + + remove_temp_dir(&temp_root); +} + +#[test] +fn copy_dir_contents_does_not_overwrite_existing_files() { + let temp_root = unique_temp_dir("runtime-preserve"); + let source = temp_root.join("source"); + let target = temp_root.join(APP_DIRECTORY_NAME); + + fs::create_dir_all(&source).expect("create source"); + fs::create_dir_all(&target).expect("create target"); + fs::write(source.join("config.json"), "{\"url\":\"new\"}").expect("write source"); + fs::write(target.join("config.json"), "{\"url\":\"existing\"}").expect("write target"); + + copy_dir_contents(&source, &target).expect("copy runtime state"); + + let persisted = fs::read_to_string(target.join("config.json")).expect("read target"); + assert_eq!(persisted, "{\"url\":\"existing\"}"); + + remove_temp_dir(&temp_root); +} + +#[test] +fn move_or_copy_dir_contents_moves_source_when_target_is_missing() { + let temp_root = unique_temp_dir("runtime-move"); + let source = temp_root.join("source"); + let target = temp_root.join("target"); + + fs::create_dir_all(&source).expect("create source"); + fs::write(source.join("config.json"), "{}").expect("write config"); + + move_or_copy_dir_contents(&source, &target).expect("move runtime state"); + + assert!(!source.exists()); + assert!(target.join("config.json").exists()); + + remove_temp_dir(&temp_root); +} + +#[test] +fn has_runtime_state_detects_config_or_workspace_dir() { + let temp_root = unique_temp_dir("runtime-state"); + let config_only = temp_root.join("config-only"); + let workspace_only = temp_root.join("workspace-only"); + + fs::create_dir_all(&config_only).expect("create config dir"); + fs::create_dir_all(workspace_only.join("workspaces")).expect("create workspace dir"); + fs::write(config_only.join("config.json"), "{}").expect("write config"); + + assert!(has_runtime_state(&config_only)); + assert!(has_runtime_state(&workspace_only)); + assert!(!has_runtime_state(&temp_root.join("empty"))); + + remove_temp_dir(&temp_root); +} + +#[test] +fn active_app_directory_name_matches_build_mode() { + let expected = if cfg!(debug_assertions) { + DEV_APP_DIRECTORY_NAME + } else { + APP_DIRECTORY_NAME + }; + + assert_eq!(active_app_directory_name(), expected); +} + +#[test] +fn preferred_user_data_dir_uses_app_data_dir_before_home_dir() { + let temp_root = unique_temp_dir("runtime-app-data"); + let app_data_dir = temp_root.join("app-data").join("com.streaming-data-loader"); + let home_dir = temp_root.join("home"); + + let resolved = + preferred_user_data_dir(Some(app_data_dir.clone()), Some(home_dir)).expect("resolve dir"); + + let expected = if cfg!(debug_assertions) { + app_data_dir.join("dev") + } else { + app_data_dir + }; + + assert_eq!(resolved, expected); + + remove_temp_dir(&temp_root); +} + +#[test] +fn preferred_user_data_dir_falls_back_to_home_dir_without_documents() { + let temp_root = unique_temp_dir("runtime-home-fallback"); + let home_dir = temp_root.join("home"); + + let resolved = preferred_user_data_dir(None, Some(home_dir.clone())).expect("resolve dir"); + + assert_eq!(resolved, home_dir.join(active_app_directory_name())); + + remove_temp_dir(&temp_root); +} + +#[test] +fn status_snapshot_includes_job_runtime_details() { + let temp_root = unique_temp_dir("runtime-start-job"); + let csv_path = temp_root.join("example.csv"); + fs::write( + &csv_path, + "\ +Timestamp,Stage_ft +", + ) + .expect("write csv"); + + let state = AppState::new(temp_root.clone()).expect("create app state"); + state.config_store().ensure().expect("ensure config store"); + state + .config_store() + .set_server( + ServerConfig { + auth_type: AuthType::Apikey, + url: "https://example.com".to_string(), + api_key: "test-api-key".to_string(), + workspace_id: "workspace-1".to_string(), + workspace_name: "Test Workspace".to_string(), + ..ServerConfig::default() + }, + "Test Workspace", + ) + .expect("set server"); + + let job = state + .config_store() + .create_job(JobUpsertRequest { + name: "Example".to_string(), + enabled: true, + file_path: csv_path.to_string_lossy().into_owned(), + schedule_minutes: 15, + file_config: FileConfig { + header_row: Some(1), + data_start_row: 2, + delimiter: ",".to_string(), + identifier_type: IdentifierType::Name, + timestamp: TimestampConfig::default(), + }, + column_mappings: vec![ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }], + }) + .expect("create job"); + + state + .append_log( + &job.id, + "Manual run requested", + crate::models::LogLevel::Info, + ) + .expect("append log"); + + let snapshot = state.status_snapshot().expect("build status snapshot"); + let summary = snapshot + .jobs + .iter() + .find(|entry| entry.id == job.id) + .expect("job summary"); + + assert_eq!(summary.name, "Example"); + assert_eq!(summary.status, crate::models::JobStatus::Pending); + assert_eq!(snapshot.config.jobs.len(), 1); + + remove_temp_dir(&temp_root); +} + +fn unique_temp_dir(label: &str) -> PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time should be after epoch") + .as_nanos(); + let root = std::env::temp_dir().join(format!("sdl-{label}-{nanos}")); + fs::create_dir_all(&root).expect("create temp root"); + root +} + +fn remove_temp_dir(path: &Path) { + if path.exists() { + fs::remove_dir_all(path).expect("remove temp dir"); + } +} diff --git a/src/tests/service_paths.rs b/src/tests/service_paths.rs new file mode 100644 index 0000000..51e6617 --- /dev/null +++ b/src/tests/service_paths.rs @@ -0,0 +1,69 @@ +use super::{ + active_shared_service_directory_name, service_config_dir_override_from_args, + SERVICE_CONFIG_DIR_FLAG, +}; +use std::{ffi::OsString, path::PathBuf}; + +#[test] +fn service_config_dir_override_reads_following_argument() { + let override_path = service_config_dir_override_from_args([ + OsString::from("streaming-data-loader.exe"), + OsString::from("--service"), + OsString::from(SERVICE_CONFIG_DIR_FLAG), + OsString::from(r"C:\Projects\streaming-data-loader\.sdl-dev-data"), + ]); + + assert_eq!( + override_path, + Some(PathBuf::from( + r"C:\Projects\streaming-data-loader\.sdl-dev-data" + )) + ); +} + +#[test] +fn service_config_dir_override_ignores_missing_value() { + let override_path = service_config_dir_override_from_args([ + OsString::from("streaming-data-loader.exe"), + OsString::from(SERVICE_CONFIG_DIR_FLAG), + ]); + + assert_eq!(override_path, None); +} + +#[test] +fn service_config_dir_override_reads_inline_argument_with_spaces() { + let override_path = service_config_dir_override_from_args([ + OsString::from("streaming-data-loader.exe"), + OsString::from("--service"), + OsString::from(r#"--service-config-dir="C:\ProgramData\Streaming Data Loader""#), + ]); + + assert_eq!( + override_path, + Some(PathBuf::from(r"C:\ProgramData\Streaming Data Loader")) + ); +} + +#[test] +fn service_config_dir_override_reads_inline_argument_without_quotes() { + let override_path = service_config_dir_override_from_args([ + OsString::from("streaming-data-loader.exe"), + OsString::from("--service"), + OsString::from(r"--service-config-dir=C:\ProgramData\Streaming"), + ]); + + assert_eq!(override_path, Some(PathBuf::from(r"C:\ProgramData\Streaming"))); +} + +#[cfg(windows)] +#[test] +fn active_shared_service_directory_name_uses_windows_safe_folder_name() { + let expected = if cfg!(debug_assertions) { + "StreamingDataLoaderDev" + } else { + "StreamingDataLoader" + }; + + assert_eq!(active_shared_service_directory_name(), expected); +} diff --git a/src/tests/service_runtime.rs b/src/tests/service_runtime.rs new file mode 100644 index 0000000..f148a01 --- /dev/null +++ b/src/tests/service_runtime.rs @@ -0,0 +1,59 @@ +use super::{acquire_daemon_pid_lock, PID_LOCK_FILENAME}; +use std::{fs, path::PathBuf, time::UNIX_EPOCH}; + +fn unique_temp_dir(label: &str) -> PathBuf { + let nanos = std::time::SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time") + .as_nanos(); + let root = std::env::temp_dir().join(format!("sdl-{label}-{nanos}")); + fs::create_dir_all(&root).expect("create temp root"); + root +} + +/// bug_007: a second daemon pointed at the same shared config dir must fail +/// to acquire the pid lock while the first one is running. +#[test] +fn second_daemon_cannot_acquire_pid_lock_while_first_is_held() { + let dir = unique_temp_dir("pid-lock"); + let first = acquire_daemon_pid_lock(&dir).expect("first lock should succeed"); + + let second = acquire_daemon_pid_lock(&dir); + assert!( + second.is_err(), + "second lock acquisition should fail while first is held" + ); + let message = second.unwrap_err(); + assert!( + message.contains("already running"), + "error should explain that another daemon is running, got: {message}" + ); + + drop(first); + + // Once the first lock is released the second daemon can start. + let after = acquire_daemon_pid_lock(&dir); + assert!( + after.is_ok(), + "dropping the first lock should free the pid file, got: {after:?}" + ); + + let _ = fs::remove_dir_all(&dir); +} + +/// Lock acquisition writes the current PID to the pid file so operators can +/// see which daemon holds the lock. +#[test] +fn pid_lock_records_current_process_id() { + let dir = unique_temp_dir("pid-lock-content"); + let _guard = acquire_daemon_pid_lock(&dir).expect("acquire lock"); + + let contents = fs::read_to_string(dir.join(PID_LOCK_FILENAME)).expect("read pid file"); + let parsed: u32 = contents + .trim() + .parse() + .unwrap_or_else(|_| panic!("pid file should contain a u32, got: {contents:?}")); + assert_eq!(parsed, std::process::id()); + + let _ = fs::remove_dir_all(&dir); +} diff --git a/src/tests/uploader.rs b/src/tests/uploader.rs new file mode 100644 index 0000000..cb58374 --- /dev/null +++ b/src/tests/uploader.rs @@ -0,0 +1,386 @@ +use super::*; +use crate::{ + config_store::ConfigStore, + models::{ + AuthType, ColumnMapping, FileConfig, IdentifierType, JobUpsertRequest, LogLevel, + ServerConfig, TimestampConfig, + }, + observation_queue::bounded, +}; +use serde_json::json; +use std::{ + collections::VecDeque, + path::PathBuf, + sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, + }, +}; +use tokio::{ + io::{AsyncReadExt, AsyncWriteExt}, + net::TcpListener, + sync::{oneshot, Mutex}, + task::JoinHandle, + time::timeout, +}; + +struct TestObservationServer { + base_url: String, + request_count: Arc, + bodies: Arc>>, + shutdown: Option>, + task: JoinHandle<()>, +} + +impl TestObservationServer { + async fn spawn(statuses: Vec) -> Self { + let listener = TcpListener::bind("127.0.0.1:0") + .await + .expect("bind test server"); + let addr = listener.local_addr().expect("listener addr"); + let request_count = Arc::new(AtomicUsize::new(0)); + let bodies = Arc::new(Mutex::new(Vec::new())); + let statuses = Arc::new(Mutex::new(VecDeque::from(statuses))); + let (shutdown_tx, mut shutdown_rx) = oneshot::channel(); + + let task = tokio::spawn({ + let request_count = request_count.clone(); + let bodies = bodies.clone(); + let statuses = statuses.clone(); + + async move { + loop { + tokio::select! { + _ = &mut shutdown_rx => break, + accept_result = listener.accept() => { + let Ok((mut socket, _)) = accept_result else { + break; + }; + let Some(body) = read_request_body(&mut socket).await else { + continue; + }; + + request_count.fetch_add(1, Ordering::SeqCst); + bodies.lock().await.push(body); + + let status = statuses.lock().await.pop_front().unwrap_or(200); + let payload = if status >= 400 { + json!({ "detail": "temporary outage" }).to_string() + } else { + "{}".to_string() + }; + let response = format!( + "HTTP/1.1 {status} {}\r\nContent-Type: application/json\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}", + reason_phrase(status), + payload.len(), + payload + ); + let _ = socket.write_all(response.as_bytes()).await; + } + } + } + } + }); + + Self { + base_url: format!("http://{addr}"), + request_count, + bodies, + shutdown: Some(shutdown_tx), + task, + } + } + + fn request_count(&self) -> usize { + self.request_count.load(Ordering::SeqCst) + } + + async fn bodies(&self) -> Vec { + self.bodies.lock().await.clone() + } + + async fn shutdown(mut self) { + if let Some(shutdown) = self.shutdown.take() { + let _ = shutdown.send(()); + } + let _ = self.task.await; + } +} + +async fn read_request_body(socket: &mut tokio::net::TcpStream) -> Option { + let mut buffer = Vec::new(); + let mut header_end = None; + let mut content_length = 0usize; + + loop { + let mut chunk = [0u8; 2048]; + let bytes_read = socket.read(&mut chunk).await.ok()?; + if bytes_read == 0 { + break; + } + buffer.extend_from_slice(&chunk[..bytes_read]); + + if header_end.is_none() { + if let Some(index) = buffer.windows(4).position(|window| window == b"\r\n\r\n") { + header_end = Some(index + 4); + let headers = String::from_utf8_lossy(&buffer[..index + 4]); + content_length = headers + .lines() + .find_map(|line| { + let (name, value) = line.split_once(':')?; + name.eq_ignore_ascii_case("content-length") + .then(|| value.trim().parse::().ok()) + .flatten() + }) + .unwrap_or(0); + } + } + + if let Some(end) = header_end { + if buffer.len() >= end + content_length { + return Some( + String::from_utf8_lossy(&buffer[end..end + content_length]).into_owned(), + ); + } + } + } + + header_end.map(|end| String::from_utf8_lossy(&buffer[end..]).into_owned()) +} + +fn reason_phrase(status: u16) -> &'static str { + match status { + 200 => "OK", + 201 => "Created", + 409 => "Conflict", + 500 => "Internal Server Error", + 502 => "Bad Gateway", + _ => "OK", + } +} + +fn temp_test_dir(label: &str) -> PathBuf { + let path = std::env::temp_dir().join(format!( + "sdl-uploader-{label}-{}-{}", + std::process::id(), + Utc::now().timestamp_nanos_opt().unwrap_or_default() + )); + std::fs::create_dir_all(&path).expect("create temp dir"); + path +} + +fn sample_server(url: String) -> ServerConfig { + ServerConfig { + auth_type: AuthType::Apikey, + url, + api_key: "test-api-key".to_string(), + workspace_id: "workspace-1".to_string(), + workspace_name: "Test Workspace".to_string(), + ..ServerConfig::default() + } +} + +fn sample_job_request(file_path: &str) -> JobUpsertRequest { + JobUpsertRequest { + name: "Uploader Test".to_string(), + enabled: true, + file_path: file_path.to_string(), + schedule_minutes: 15, + file_config: FileConfig { + header_row: Some(3), + data_start_row: 4, + delimiter: ",".to_string(), + identifier_type: IdentifierType::Name, + timestamp: TimestampConfig::default(), + }, + column_mappings: vec![ColumnMapping { + csv_column: "Stage_ft".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }], + } +} + +fn test_observation(job_id: &str, datastream_id: &str, row_index: u64) -> QueuedObservation { + QueuedObservation { + context: Arc::new(ObservationContext { + server: Arc::new(crate::models::ServerConfig::default()), + job_id: job_id.to_string(), + datastream_id: datastream_id.to_string(), + datastream_name: "Test".to_string(), + }), + timestamp: Utc::now(), + row_index, + value: json!(1.0), + } +} + +#[test] +fn batch_key_groups_by_server_and_datastream() { + let obs_a1 = test_observation("job-1", "ds-a", 1); + let obs_a2 = test_observation("job-1", "ds-a", 2); + let obs_b1 = test_observation("job-1", "ds-b", 1); + + let key_a1 = BatchKey::from(&obs_a1); + let key_a2 = BatchKey::from(&obs_a2); + let key_b1 = BatchKey::from(&obs_b1); + + assert_eq!(key_a1, key_a2, "same datastream should produce same key"); + assert_ne!(key_a1, key_b1, "different datastreams should differ"); +} + +#[test] +fn summarize_batch_tracks_max_timestamp_and_row_index() { + let t1 = Utc::now() - chrono::Duration::minutes(10); + let t2 = Utc::now(); + + let batch = PendingBatch { + context: Arc::new(ObservationContext { + server: Arc::new(crate::models::ServerConfig::default()), + job_id: "job-1".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Test".to_string(), + }), + rows: vec![ + QueuedObservation { + context: Arc::new(ObservationContext { + server: Arc::new(crate::models::ServerConfig::default()), + job_id: "job-1".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Test".to_string(), + }), + timestamp: t1, + row_index: 5, + value: json!(1.0), + }, + QueuedObservation { + context: Arc::new(ObservationContext { + server: Arc::new(crate::models::ServerConfig::default()), + job_id: "job-1".to_string(), + datastream_id: "ds-1".to_string(), + datastream_name: "Test".to_string(), + }), + timestamp: t2, + row_index: 10, + value: json!(2.0), + }, + ], + }; + + let summaries = summarize_batch(&batch); + let summary = summaries.get("job-1").expect("should have job-1"); + assert_eq!(summary.observation_count, 2); + assert_eq!(summary.max_timestamp, t2); + assert_eq!(summary.max_row_index, 10); +} + +#[test] +fn batch_size_constant_is_reasonable() { + assert_eq!(BATCH_SIZE, 500); + assert_eq!(FLUSH_INTERVAL, Duration::from_secs(1)); +} + +#[tokio::test] +async fn upload_worker_retries_transient_server_errors_and_persists_success() { + let server = TestObservationServer::spawn(vec![500, 200]).await; + let temp_dir = temp_test_dir("retry"); + let config_dir = temp_dir.join("config"); + let source_path = temp_dir.join("source.csv"); + std::fs::write(&source_path, "placeholder").expect("write source placeholder"); + + let config_store = Arc::new(ConfigStore::new(config_dir)); + config_store.ensure().expect("ensure config store"); + let server_config = sample_server(server.base_url.clone()); + config_store + .set_server(server_config.clone(), "Test Workspace") + .expect("set server"); + let job = config_store + .create_job(sample_job_request( + source_path.to_str().expect("utf-8 path"), + )) + .expect("create job"); + + let (tx, rx) = bounded(8); + let worker = spawn_upload_worker( + rx, + Arc::new(HydroServerService::new().expect("hydroserver service")), + config_store.clone(), + ); + + let first_timestamp = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 0, 0) + .unwrap() + .and_utc(); + let second_timestamp = chrono::NaiveDate::from_ymd_opt(2026, 4, 3) + .unwrap() + .and_hms_opt(8, 5, 0) + .unwrap() + .and_utc(); + + let context = Arc::new(ObservationContext { + server: Arc::new(server_config), + job_id: job.id.clone(), + datastream_id: "ds-1".to_string(), + datastream_name: "Stage".to_string(), + }); + + tx.send(QueuedObservation { + context: context.clone(), + timestamp: first_timestamp, + row_index: 4, + value: json!(2.41), + }) + .await + .expect("send first observation"); + tx.send(QueuedObservation { + context, + timestamp: second_timestamp, + row_index: 5, + value: json!(2.45), + }) + .await + .expect("send second observation"); + drop(tx); + + timeout(Duration::from_secs(10), worker) + .await + .expect("worker timeout") + .expect("join worker"); + + assert_eq!( + server.request_count(), + 2, + "the uploader should retry once after the transient 500 response" + ); + + let bodies = server.bodies().await; + assert_eq!(bodies.len(), 2); + assert_eq!( + bodies[0], bodies[1], + "retries should resend the same payload" + ); + assert!( + bodies[0].contains("\"phenomenonTime\"") && bodies[0].contains("\"result\""), + "the request body should use the HydroServer bulk observation schema" + ); + + let cursor = config_store.cursor_for(&job.id).expect("load cursor"); + assert_eq!(cursor.last_error, None); + assert_eq!(cursor.last_pushed_row_index, Some(5)); + assert_eq!(cursor.last_pushed_timestamp, Some(second_timestamp)); + + let logs = config_store.logs_for(&job.id, 50).expect("load logs"); + assert!( + logs.iter().any(|entry| { + entry.level == LogLevel::Info + && entry + .message + .contains("Loaded 2 observation(s) to datastream Stage.") + }), + "successful uploads should be recorded in the job log" + ); + + server.shutdown().await; + let _ = std::fs::remove_dir_all(temp_dir); +} diff --git a/src/timestamp.rs b/src/timestamp.rs new file mode 100644 index 0000000..a22a776 --- /dev/null +++ b/src/timestamp.rs @@ -0,0 +1,201 @@ +use chrono::{ + DateTime, Duration, FixedOffset, LocalResult, NaiveDate, NaiveDateTime, TimeZone, Utc, +}; +use chrono_tz::Tz; + +use crate::models::{TimestampConfig, TimestampFormatType, TimezoneModeType}; + +pub fn parse_timestamp_to_utc( + raw_value: &str, + config: &TimestampConfig, +) -> Result, String> { + let value = raw_value.trim(); + if value.is_empty() { + return Err("Timestamp value is empty.".to_string()); + } + + match config.format { + TimestampFormatType::Iso8601 => parse_iso8601_to_utc(value), + TimestampFormatType::Naive | TimestampFormatType::Custom => { + let naive = parse_naive_timestamp(value, config)?; + localize_naive_to_utc(naive, config) + } + } +} + +pub fn validate_fixed_offset(value: &str) -> Result { + let clean = value.trim(); + let bytes = clean.as_bytes(); + + let (hours_str, minutes_str) = match bytes.len() { + 5 if bytes[3] != b':' => (&clean[1..3], &clean[3..5]), + 6 if bytes[3] == b':' => (&clean[1..3], &clean[4..6]), + _ => return Err(invalid_offset_message(clean)), + }; + + let sign = match bytes[0] { + b'+' => 1, + b'-' => -1, + _ => return Err(invalid_offset_message(clean)), + }; + + let hours = hours_str + .parse::() + .map_err(|_| invalid_offset_message(clean))?; + let minutes = minutes_str + .parse::() + .map_err(|_| invalid_offset_message(clean))?; + + if hours > 14 || minutes >= 60 || (hours == 14 && minutes != 0) { + return Err(invalid_offset_message(clean)); + } + + FixedOffset::east_opt(sign * (hours * 3600 + minutes * 60)) + .ok_or_else(|| invalid_offset_message(clean)) +} + +fn parse_iso8601_to_utc(value: &str) -> Result, String> { + let normalized = normalize_iso8601(value); + if let Ok(parsed) = DateTime::parse_from_rfc3339(&normalized) { + return Ok(parsed.with_timezone(&Utc)); + } + + let naive = parse_flexible_naive(value) + .ok_or_else(|| format!("Couldn't parse timestamp '{value}' as ISO8601."))?; + Ok(DateTime::::from_naive_utc_and_offset(naive, Utc)) +} + +fn parse_naive_timestamp(value: &str, config: &TimestampConfig) -> Result { + match config.format { + TimestampFormatType::Custom => { + let format = config.custom_format.as_deref().ok_or_else(|| { + "Custom timestamp formats require a customFormat value.".to_string() + })?; + parse_naive_with_format(value, format).ok_or_else(|| { + format!("Couldn't parse timestamp '{value}' with format '{format}'.") + }) + } + TimestampFormatType::Naive => parse_flexible_naive(value) + .ok_or_else(|| format!("Couldn't parse timestamp '{value}' as a naive timestamp.")), + TimestampFormatType::Iso8601 => unreachable!(), + } +} + +fn parse_naive_with_format(value: &str, format: &str) -> Option { + NaiveDateTime::parse_from_str(value, format) + .ok() + .or_else(|| { + NaiveDate::parse_from_str(value, format) + .ok() + .and_then(|date| date.and_hms_opt(0, 0, 0)) + }) +} + +fn parse_flexible_naive(value: &str) -> Option { + for format in [ + "%Y-%m-%d %H:%M:%S%.f", + "%Y-%m-%dT%H:%M:%S%.f", + "%Y-%m-%d %H:%M", + "%Y-%m-%dT%H:%M", + "%m/%d/%Y %H:%M:%S", + "%m/%d/%Y %H:%M", + "%Y/%m/%d %H:%M:%S", + "%Y/%m/%d %H:%M", + "%m/%d/%Y", + "%Y/%m/%d", + "%Y-%m-%d", + ] { + if let Some(parsed) = parse_naive_with_format(value, format) { + return Some(parsed); + } + } + + None +} + +fn localize_naive_to_utc( + naive: NaiveDateTime, + config: &TimestampConfig, +) -> Result, String> { + match config.timezone_mode { + TimezoneModeType::Utc | TimezoneModeType::EmbeddedOffset => { + Ok(DateTime::::from_naive_utc_and_offset(naive, Utc)) + } + TimezoneModeType::FixedOffset => { + let offset = validate_fixed_offset(config.timezone.as_deref().ok_or_else(|| { + "Timezone is required when using fixedOffset or daylightSavings timestamp modes." + .to_string() + })?)?; + let localized = offset + .from_local_datetime(&naive) + .single() + .ok_or_else(|| format!("Couldn't localize timestamp '{naive}' with offset."))?; + Ok(localized.with_timezone(&Utc)) + } + TimezoneModeType::DaylightSavings => { + let timezone_name = config.timezone.as_deref().ok_or_else(|| { + "Timezone is required when using fixedOffset or daylightSavings timestamp modes." + .to_string() + })?; + let timezone = timezone_name + .parse::() + .map_err(|_| format!("Invalid IANA timezone '{timezone_name}'."))?; + localize_with_timezone_shift_forward(timezone, naive) + .map(|value| value.with_timezone(&Utc)) + } + } +} + +fn localize_with_timezone_shift_forward( + timezone: Tz, + naive: NaiveDateTime, +) -> Result, String> { + let mut candidate = naive; + for _ in 0..180 { + match timezone.from_local_datetime(&candidate) { + LocalResult::Single(value) => return Ok(value), + LocalResult::Ambiguous(_, latest) => return Ok(latest), + LocalResult::None => { + candidate += Duration::minutes(1); + } + } + } + + Err(format!( + "Couldn't localize timestamp '{naive}' in timezone '{}'.", + timezone + )) +} + +fn normalize_iso8601(value: &str) -> String { + let mut normalized = value.trim().to_string(); + if normalized.contains(' ') && !normalized.contains('T') { + normalized = normalized.replacen(' ', "T", 1); + } + + if let Some(stripped) = normalized.strip_suffix('Z') { + return format!("{stripped}Z"); + } + + if normalized.len() >= 5 { + let suffix = &normalized[normalized.len() - 5..]; + if (suffix.starts_with('+') || suffix.starts_with('-')) + && suffix[1..].chars().all(|char| char.is_ascii_digit()) + { + return format!( + "{}{}:{}", + &normalized[..normalized.len() - 5], + &suffix[..3], + &suffix[3..] + ); + } + } + + normalized +} + +fn invalid_offset_message(value: &str) -> String { + format!( + "Invalid timestamp UTC offset '{value}'. UTC offsets must be specified in ±HHMM or ±HH:MM format (e.g: '-0700' or '-07:00') with hours between 00 and 14 and minutes between 00 and 59." + ) +} diff --git a/src/uploader.rs b/src/uploader.rs new file mode 100644 index 0000000..d362965 --- /dev/null +++ b/src/uploader.rs @@ -0,0 +1,312 @@ +use std::{collections::HashMap, num::NonZeroU32, sync::Arc, time::Duration}; + +use chrono::{DateTime, Utc}; +use governor::{clock::DefaultClock, state::InMemoryState, Quota, RateLimiter}; +use tokio::{ + task::JoinHandle, + time::{interval, sleep, MissedTickBehavior}, +}; +use tracing::{error, info, warn}; + +use crate::{ + config_store::ConfigStore, + hydroserver::{HydroServerService, ObservationPayloadRow}, + models::LogLevel, + observation_queue::{ObservationContext, ObservationReceiver, QueuedObservation}, +}; + +const BATCH_SIZE: usize = 500; +const FLUSH_INTERVAL: Duration = Duration::from_secs(1); +const DEFAULT_REQUESTS_PER_SECOND: u32 = 10; +const MAX_RETRIES: usize = 3; + +type DirectRateLimiter = + RateLimiter; + +pub fn spawn_upload_worker( + mut receiver: ObservationReceiver, + hydroserver: Arc, + config_store: Arc, +) -> JoinHandle<()> { + tokio::spawn(async move { + let rps = std::env::var("SDL_REQUESTS_PER_SECOND") + .ok() + .and_then(|value| value.trim().parse::().ok()) + .filter(|value| *value > 0) + .unwrap_or(DEFAULT_REQUESTS_PER_SECOND); + let rate_limiter = RateLimiter::direct(Quota::per_second( + NonZeroU32::new(rps).expect("non-zero rate"), + )); + let mut batches: HashMap = HashMap::new(); + let mut flush_timer = interval(FLUSH_INTERVAL); + flush_timer.set_missed_tick_behavior(MissedTickBehavior::Delay); + + loop { + tokio::select! { + maybe_item = receiver.recv() => { + match maybe_item { + Some(item) => { + let key = BatchKey::from(&item); + let batch = batches.entry(key.clone()).or_insert_with(|| PendingBatch { + context: item.context.clone(), + rows: Vec::new(), + }); + batch.rows.push(item); + + if batch.rows.len() >= BATCH_SIZE { + if let Some(batch) = batches.remove(&key) { + flush_batch(batch, &hydroserver, &config_store, &rate_limiter).await; + } + } + } + None => break, + } + } + _ = flush_timer.tick() => { + if batches.is_empty() { + continue; + } + + let pending = batches.drain().map(|(_, batch)| batch).collect::>(); + for batch in pending { + flush_batch(batch, &hydroserver, &config_store, &rate_limiter).await; + } + } + } + } + + let pending = batches.drain().map(|(_, batch)| batch).collect::>(); + for batch in pending { + flush_batch(batch, &hydroserver, &config_store, &rate_limiter).await; + } + }) +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct BatchKey { + server_signature: String, + datastream_id: String, +} + +impl BatchKey { + fn from(item: &QueuedObservation) -> Self { + let server = item.context.server.as_ref(); + let credential = match server.auth_type { + crate::models::AuthType::Apikey => server.api_key.trim(), + crate::models::AuthType::Userpass => server.username.trim(), + }; + + Self { + server_signature: format!( + "{:?}|{}|{}|{}", + server.auth_type, server.url, server.workspace_id, credential + ), + datastream_id: item.context.datastream_id.clone(), + } + } +} + +struct PendingBatch { + context: Arc, + rows: Vec, +} + +async fn flush_batch( + batch: PendingBatch, + hydroserver: &Arc, + config_store: &Arc, + rate_limiter: &DirectRateLimiter, +) { + if batch.rows.is_empty() { + return; + } + + rate_limiter.until_ready().await; + + let payload = batch + .rows + .iter() + .map(|row| ObservationPayloadRow { + phenomenon_time: row.timestamp, + result: row.value.clone(), + }) + .collect::>(); + + let result = upload_with_retry(hydroserver, &batch.context, &payload).await; + match result { + Ok(()) => { + info!( + job_id = %batch.context.job_id, + datastream_id = %batch.context.datastream_id, + observation_count = batch.rows.len(), + "uploaded observation batch" + ); + persist_success(config_store.clone(), &batch).await; + } + Err(message) => { + error!( + job_id = %batch.context.job_id, + datastream_id = %batch.context.datastream_id, + observation_count = batch.rows.len(), + error = %message, + "failed to upload observation batch" + ); + persist_failure(config_store.clone(), &batch, &message).await; + } + } +} + +async fn upload_with_retry( + hydroserver: &Arc, + context: &ObservationContext, + payload: &[ObservationPayloadRow], +) -> Result<(), String> { + let mut backoff = Duration::from_millis(500); + + for attempt in 0..=MAX_RETRIES { + match hydroserver + .post_observations_batch(context.server.as_ref(), &context.datastream_id, payload) + .await + { + Ok(()) => { + if attempt > 0 { + info!( + datastream_id = %context.datastream_id, + attempt = attempt + 1, + "upload succeeded after retry" + ); + } + return Ok(()); + } + Err(error) if error.is_conflict() => { + // Observations already exist on the server — treat as success + // so the cursor advances and we don't re-attempt indefinitely. + return Ok(()); + } + Err(error) if error.is_retryable() && attempt < MAX_RETRIES => { + let jitter = jitter_duration(backoff); + let delay = backoff + jitter; + warn!( + datastream_id = %context.datastream_id, + attempt = attempt + 1, + delay_ms = delay.as_millis(), + error = %error, + "upload attempt failed with a retryable error" + ); + sleep(delay).await; + backoff *= 2; + } + Err(error) => return Err(error.to_string()), + } + } + + Err("Observation upload failed after retries.".to_string()) +} + +/// Returns a jitter of 0..25% of the base duration, derived from system time nanos. +fn jitter_duration(base: Duration) -> Duration { + let nanos = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map(|d| d.subsec_nanos()) + .unwrap_or(0); + let jitter_fraction = (nanos % 250) as f64 / 1000.0; // 0.0 to 0.249 + Duration::from_secs_f64(base.as_secs_f64() * jitter_fraction) +} + +async fn persist_success(config_store: Arc, batch: &PendingBatch) { + let datastream_id = batch.context.datastream_id.clone(); + let datastream_name = batch.context.datastream_name.clone(); + let updates = summarize_batch(batch); + + for update in updates.into_values() { + let config_store = config_store.clone(); + let datastream_id = datastream_id.clone(); + let datastream_name = datastream_name.clone(); + let observation_count = update.observation_count; + let _ = tokio::task::spawn_blocking(move || { + config_store.record_datastream_success( + &update.job_id, + &datastream_id, + update.max_row_index, + update.max_timestamp, + Utc::now(), + )?; + config_store.append_log( + &update.job_id, + crate::models::JobLogEntry { + timestamp: Utc::now(), + level: LogLevel::Info, + message: format!( + "Loaded {observation_count} observation(s) to datastream {datastream_name}." + ), + }, + )?; + Ok::<(), String>(()) + }) + .await; + } +} + +async fn persist_failure(config_store: Arc, batch: &PendingBatch, message: &str) { + let datastream_id = batch.context.datastream_id.clone(); + let message = message.to_string(); + let updates = summarize_batch(batch); + + for update in updates.into_values() { + let config_store = config_store.clone(); + let datastream_id = datastream_id.clone(); + let message = message.clone(); + let _ = tokio::task::spawn_blocking(move || { + config_store.record_datastream_failure( + &update.job_id, + &datastream_id, + &message, + Utc::now(), + )?; + config_store.append_log( + &update.job_id, + crate::models::JobLogEntry { + timestamp: Utc::now(), + level: LogLevel::Error, + message: message.clone(), + }, + )?; + Ok::<(), String>(()) + }) + .await; + } +} + +fn summarize_batch(batch: &PendingBatch) -> HashMap { + let mut updates = HashMap::new(); + for row in &batch.rows { + let entry = updates + .entry(row.context.job_id.clone()) + .or_insert_with(|| JobUploadSummary { + job_id: row.context.job_id.clone(), + max_timestamp: row.timestamp, + max_row_index: row.row_index, + observation_count: 0, + }); + + if row.timestamp > entry.max_timestamp { + entry.max_timestamp = row.timestamp; + } + if row.row_index > entry.max_row_index { + entry.max_row_index = row.row_index; + } + entry.observation_count += 1; + } + updates +} + +struct JobUploadSummary { + job_id: String, + max_timestamp: DateTime, + max_row_index: u64, + observation_count: usize, +} + +#[cfg(test)] +#[path = "tests/uploader.rs"] +mod tests; diff --git a/tauri.conf.json b/tauri.conf.json new file mode 100644 index 0000000..b0ec960 --- /dev/null +++ b/tauri.conf.json @@ -0,0 +1,42 @@ +{ + "$schema": "https://schema.tauri.app/config/2", + "productName": "Streaming Data Loader", + "identifier": "com.streaming-data-loader", + "build": { + "beforeDevCommand": "npm run dev:tauri", + "devUrl": "http://localhost:1420", + "beforeBuildCommand": "npm run build", + "frontendDist": "dist" + }, + "app": { + "windows": [ + { + "title": "Streaming Data Loader", + "width": 1280, + "height": 840, + "minWidth": 1024, + "minHeight": 720, + "center": true, + "resizable": true, + "theme": "Dark", + "titleBarStyle": "Transparent", + "backgroundColor": "#2f3133", + "visible": true + } + ], + "security": { + "csp": null + } + }, + "bundle": { + "active": true, + "targets": "all", + "icon": [ + "icons/32x32.png", + "icons/128x128.png", + "icons/128x128@2x.png", + "icons/icon.icns", + "icons/icon.ico" + ] + } +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..7ebc9e6 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2020", + "useDefineForClassFields": true, + "module": "ESNext", + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "skipLibCheck": true, + "moduleResolution": "Bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true + }, + "include": ["frontend/**/*.ts", "frontend/**/*.d.ts", "frontend/tests/**/*.ts"] +} diff --git a/tsconfig.node.json b/tsconfig.node.json new file mode 100644 index 0000000..3efc26c --- /dev/null +++ b/tsconfig.node.json @@ -0,0 +1,12 @@ +{ + "compilerOptions": { + "module": "ESNext", + "moduleResolution": "Bundler", + "resolveJsonModule": true, + "allowSyntheticDefaultImports": true, + "types": ["node"], + "noEmit": true, + "strict": true + }, + "include": ["vite.config.ts"] +} diff --git a/vite.config.ts b/vite.config.ts new file mode 100644 index 0000000..9fef9a6 --- /dev/null +++ b/vite.config.ts @@ -0,0 +1,23 @@ +import vue from "@vitejs/plugin-vue" +import { defineConfig, loadEnv } from "vite" + +export default defineConfig(({ mode }) => { + const env = loadEnv(mode, process.cwd(), "") + const frontendHost = env.SDL_FRONTEND_HOST || "localhost" + const frontendPort = Number(env.SDL_FRONTEND_PORT || "1420") + const shouldOpenBrowser = env.SDL_OPEN_BROWSER !== "false" + + return { + clearScreen: false, + plugins: [vue()], + server: { + host: frontendHost, + open: shouldOpenBrowser, + port: frontendPort, + strictPort: true, + watch: { + ignored: ["**/target/**"], + }, + }, + } +})