diff --git a/.changeset/bright-cache-dance.md b/.changeset/bright-cache-dance.md new file mode 100644 index 000000000..6de15ee0c --- /dev/null +++ b/.changeset/bright-cache-dance.md @@ -0,0 +1,5 @@ +--- +"varlock": minor +--- + +add caching system with cache() resolver, random value generators, and plugin cache API diff --git a/.changeset/red-wasps-kick.md b/.changeset/red-wasps-kick.md new file mode 100644 index 000000000..897c3607a --- /dev/null +++ b/.changeset/red-wasps-kick.md @@ -0,0 +1,5 @@ +--- +"varlock": minor +--- + +add new varlock() function for built-in encryption diff --git a/.github/workflows/binary-release.yaml b/.github/workflows/binary-release.yaml index 29bcaefdd..9e7e70f10 100644 --- a/.github/workflows/binary-release.yaml +++ b/.github/workflows/binary-release.yaml @@ -12,6 +12,9 @@ on: release: types: [published] +permissions: + contents: read + concurrency: ${{ github.workflow }}-${{ github.ref }} jobs: @@ -24,10 +27,42 @@ jobs: run: | echo "$GITHUB_CONTEXT" + # Build and sign the macOS native binary (cache hit if already built in CI) + build-native-macos: + if: github.event_name == 'workflow_dispatch' || startsWith(github.ref_name, 'varlock@') + uses: ./.github/workflows/build-native-macos.yaml + with: + mode: release + version: ${{ github.event_name == 'workflow_dispatch' && inputs.version || github.ref_name }} + artifact-name: native-bin-macos-signed + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Notarize the signed binary for production distribution + notarize-native-macos: + needs: build-native-macos + if: github.event_name == 'workflow_dispatch' || startsWith(github.ref_name, 'varlock@') + uses: ./.github/workflows/notarize-native-macos.yaml + with: + source-artifact-name: native-bin-macos-signed + artifact-name: native-bin-macos-release + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Build Rust native binaries for Linux and Windows + build-native-rust: + if: github.event_name == 'workflow_dispatch' || startsWith(github.ref_name, 'varlock@') + uses: ./.github/workflows/build-native-rust.yaml + with: + artifact-name: native-bin-rust + release-binaries: + needs: [notarize-native-macos, build-native-rust] # was using github.ref.tag_name, but it seems that when publishing multiple tags at once, it was behaving weirdly if: github.event_name == 'workflow_dispatch' || startsWith(github.ref_name, 'varlock@') runs-on: ubuntu-latest + permissions: + contents: write steps: - uses: actions/checkout@v6 - name: Setup Bun @@ -63,6 +98,36 @@ jobs: echo "RELEASE_TAG=varlock@${{ inputs.version }}" >> $GITHUB_ENV echo "RELEASE_VERSION=${{ inputs.version }}" >> $GITHUB_ENV + # Download the signed macOS native binary + - name: Download macOS native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-macos-release + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + - name: Restore native binary execute permission + run: chmod +x packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt + + # Download Rust native binaries for Linux and Windows + - name: Download Linux x64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-linux-x64 + path: packages/varlock/native-bins/linux-x64 + - name: Download Linux arm64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-linux-arm64 + path: packages/varlock/native-bins/linux-arm64 + - name: Download Windows x64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-win32-x64 + path: packages/varlock/native-bins/win32-x64 + - name: Restore Rust binary execute permissions + run: | + chmod +x packages/varlock/native-bins/linux-x64/varlock-local-encrypt + chmod +x packages/varlock/native-bins/linux-arm64/varlock-local-encrypt + - name: build libs run: bun run build:libs env: diff --git a/.github/workflows/build-native-macos.yaml b/.github/workflows/build-native-macos.yaml new file mode 100644 index 000000000..b9e236a32 --- /dev/null +++ b/.github/workflows/build-native-macos.yaml @@ -0,0 +1,219 @@ +name: Build macOS native binary + +# Reusable workflow that compiles, bundles, and Developer ID signs the +# VarlockEnclave Swift binary on a macOS runner. +# +# The Swift .build directory is cached by source hash, so the compile +# step (~minutes) is near-instant on cache hit. The .app bundle wrapping +# (plist, icon, signing) always runs since it varies by mode/version. +# +# Notarization is intentionally NOT included here — it's a separate +# workflow for production releases. + +permissions: + contents: read + +on: + workflow_call: + inputs: + mode: + description: 'Build mode: dev, preview, or release (affects bundle metadata)' + type: string + default: 'preview' + version: + description: 'Bundle version string (e.g. 1.2.3)' + type: string + default: '0.0.0-preview' + artifact-name: + description: 'Name for the uploaded artifact' + type: string + default: 'native-bin-macos' + secrets: + OP_CI_TOKEN: + required: true + +jobs: + build-swift-binary: + runs-on: macos-latest + steps: + - uses: actions/checkout@v6 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + + # skip bun dep caching since less likely to hit + + - name: Install node deps + run: bun install + + - name: Enable turborepo build cache + uses: rharkor/caching-for-turbo@v2.3.11 + + # Cache the Swift .build directory so compilation is fast on unchanged source + - name: Compute Swift source hash + id: swift-hash + run: | + HASH=$(find packages/encryption-binary-swift/swift -type f | sort | xargs shasum -a 256 | shasum -a 256 | cut -d' ' -f1) + echo "hash=$HASH" >> $GITHUB_OUTPUT + echo "Swift source hash: $HASH" + + - name: Cache Swift build artifacts + uses: actions/cache@v5 + with: + path: packages/encryption-binary-swift/swift/.build + key: varlock-swift-build-${{ steps.swift-hash.outputs.hash }} + + # Build varlock JS so we can use it to resolve secrets from 1Password + - name: Build varlock libs + run: bun run build:libs + + # Load secrets from 1Password via varlock (scoped to the Swift package) + - name: Load signing secrets + uses: dmno-dev/varlock-action@v1.0.1 + with: + working-directory: packages/encryption-binary-swift + env: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Import signing certificate into a temporary keychain + - name: Import signing certificate + run: | + KEYCHAIN_PATH=$RUNNER_TEMP/signing.keychain-db + KEYCHAIN_PASSWORD=$(openssl rand -base64 24) + + echo "$APPLE_CERTIFICATE_BASE64" | base64 --decode > $RUNNER_TEMP/certificate.p12 + + security create-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + security set-keychain-settings -lut 21600 "$KEYCHAIN_PATH" + security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + + security import $RUNNER_TEMP/certificate.p12 \ + -P "$APPLE_CERTIFICATE_PASSWORD" \ + -A -t cert -f pkcs12 \ + -k "$KEYCHAIN_PATH" + + security set-key-partition-list -S apple-tool:,apple:,codesign: \ + -s -k "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + + security list-keychains -d user -s "$KEYCHAIN_PATH" login.keychain-db + + echo "APPLE_SIGNING_IDENTITY=$APPLE_SIGNING_IDENTITY" >> $GITHUB_ENV + + # Compile (cached), bundle with mode-specific metadata, and sign + - name: Build, bundle, and sign + run: | + bun run --filter @varlock/encryption-binary-swift build:swift \ + -- --mode ${{ inputs.mode }} --version ${{ inputs.version }} --sign "$APPLE_SIGNING_IDENTITY" + + - name: Verify binary + run: | + APP_PATH="packages/varlock/native-bins/darwin/VarlockEnclave.app" + echo "=== App bundle contents ===" + ls -la "$APP_PATH/Contents/MacOS/" + echo "=== Binary architectures ===" + lipo -info "$APP_PATH/Contents/MacOS/varlock-local-encrypt" + echo "=== Code signature ===" + codesign -dvv "$APP_PATH" 2>&1 || true + echo "=== Info.plist ===" + cat "$APP_PATH/Contents/Info.plist" + + # Test the binary (using --no-auth since CI has no biometric) + # Keys are still Secure Enclave-backed, just without user presence requirement + - name: Test binary - status + run: | + BIN="packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt" + echo "=== status ===" + $BIN status + $BIN status | python3 -c "import sys,json; d=json.load(sys.stdin); assert d['ok'], 'status not ok'" + + - name: Test binary - SE key lifecycle + encrypt/decrypt roundtrip + run: | + BIN="packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt" + + echo "=== generate-key (--no-auth for CI) ===" + $BIN generate-key --key-id ci-test --no-auth + + echo "=== key-exists ===" + $BIN key-exists --key-id ci-test | python3 -c "import sys,json; d=json.load(sys.stdin); assert d['exists']" + + echo "=== encrypt ===" + PLAINTEXT=$(printf 'hello from macOS CI' | base64) + CIPHERTEXT=$($BIN encrypt --key-id ci-test --data "$PLAINTEXT" | python3 -c "import sys,json; print(json.load(sys.stdin)['ciphertext'])") + echo "Ciphertext: ${CIPHERTEXT:0:40}..." + + echo "=== decrypt (one-shot, no auth needed) ===" + DECRYPTED=$($BIN decrypt --key-id ci-test --data "$CIPHERTEXT" | python3 -c "import sys,json; print(json.load(sys.stdin)['plaintext'])") + echo "Decrypted: $DECRYPTED" + + if [ "$DECRYPTED" != "hello from macOS CI" ]; then + echo "::error::Roundtrip failed! Expected 'hello from macOS CI', got '$DECRYPTED'" + exit 1 + fi + + echo "=== delete-key ===" + $BIN delete-key --key-id ci-test + + echo "All macOS binary tests passed" + + - name: Test JS→Swift interop + run: | + BIN="packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt" + + # Generate SE key (no auth for CI) + $BIN generate-key --key-id interop-test --no-auth + + # Get the public key from the SE binary + PUBLIC_KEY=$($BIN generate-key --key-id interop-tmp --no-auth > /dev/null 2>&1; echo "skip") + # Actually, get public key by generating and reading the output + GEN_OUTPUT=$($BIN key-exists --key-id interop-test) + + # Use the SE binary's encrypt to get the public key indirectly: + # generate-key already printed it — let's re-generate to capture it + $BIN delete-key --key-id interop-test > /dev/null + PUBLIC_KEY=$($BIN generate-key --key-id interop-test --no-auth | python3 -c "import sys,json; print(json.load(sys.stdin)['publicKey'])") + echo "SE Public Key: ${PUBLIC_KEY:0:20}..." + + # Encrypt with JS using the SE public key + CIPHERTEXT=$(bun -e " + const { encrypt } = await import('./packages/varlock/src/lib/local-encrypt/crypto.ts'); + const result = await encrypt('$PUBLIC_KEY', 'javascript to secure enclave'); + process.stdout.write(result); + ") + echo "JS Ciphertext: ${CIPHERTEXT:0:40}..." + + # Decrypt with Swift SE binary (proves JS wire format is SE-compatible) + DECRYPTED=$($BIN decrypt --key-id interop-test --data "$CIPHERTEXT" | python3 -c "import sys,json; print(json.load(sys.stdin)['plaintext'])") + + if [ "$DECRYPTED" != "javascript to secure enclave" ]; then + echo "::error::JS→Swift interop failed! Got: $DECRYPTED" + exit 1 + fi + echo "✓ JS→Swift SE: '$DECRYPTED'" + + # Cleanup + $BIN delete-key --key-id interop-test + echo "All macOS interop tests passed" + + - name: Upload native binary artifact + uses: actions/upload-artifact@v7 + with: + name: ${{ inputs.artifact-name }} + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + retention-days: 7 + + # Cache the signed .app so other jobs (e.g. release-preview) can restore + # it on a Linux runner without needing a macOS build + - name: Cache signed .app bundle + uses: actions/cache/save@v5 + with: + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + key: native-bin-macos-signed-${{ hashFiles('packages/encryption-binary-swift/swift/**') }} + + - name: Cleanup signing keychain + if: always() + run: | + KEYCHAIN_PATH=$RUNNER_TEMP/signing.keychain-db + if [ -f "$KEYCHAIN_PATH" ]; then + security delete-keychain "$KEYCHAIN_PATH" || true + fi + rm -f $RUNNER_TEMP/certificate.p12 diff --git a/.github/workflows/build-native-rust.yaml b/.github/workflows/build-native-rust.yaml new file mode 100644 index 000000000..7a37fd382 --- /dev/null +++ b/.github/workflows/build-native-rust.yaml @@ -0,0 +1,207 @@ +name: Build Rust native binaries + +# Reusable workflow that compiles the varlock-local-encrypt Rust binary +# for Linux and Windows targets. +# +# Builds are cached by Cargo.lock + source hash. Each platform builds +# natively on its own runner for maximum compatibility. +# +# Output: native binaries uploaded as artifacts, ready to be bundled +# into the varlock npm package and CLI release archives. + +permissions: + contents: read + +on: + workflow_call: + inputs: + artifact-name: + description: 'Base name for uploaded artifacts (suffixed with platform)' + type: string + default: 'native-bin-rust' + +jobs: + build: + strategy: + matrix: + include: + - os: ubuntu-latest + target: x86_64-unknown-linux-gnu + native-bin-subdir: linux-x64 + binary-name: varlock-local-encrypt + test-interop: true # only need interop on one platform — crypto is identical + - os: ubuntu-24.04-arm + target: aarch64-unknown-linux-gnu + native-bin-subdir: linux-arm64 + binary-name: varlock-local-encrypt + test-interop: false + - os: windows-latest + target: x86_64-pc-windows-msvc + native-bin-subdir: win32-x64 + binary-name: varlock-local-encrypt.exe + test-interop: false + + runs-on: ${{ matrix.os }} + name: Build ${{ matrix.native-bin-subdir }} + + steps: + - uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + with: + targets: ${{ matrix.target }} + + # Cache Cargo registry + build artifacts by lockfile hash + - name: Cache Cargo + uses: actions/cache@v5 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + packages/encryption-binary-rust/target + key: rust-${{ matrix.target }}-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock') }}-${{ hashFiles('packages/encryption-binary-rust/src/**') }} + restore-keys: | + rust-${{ matrix.target }}-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock') }}- + rust-${{ matrix.target }}- + + - name: Install UPX + shell: bash + run: | + if [[ "${{ matrix.os }}" == *"ubuntu"* ]]; then + sudo apt-get update && sudo apt-get install -y upx-ucl + elif [[ "${{ matrix.os }}" == *"windows"* ]]; then + choco install upx --yes + fi + + - name: Build release binary + working-directory: packages/encryption-binary-rust + run: cargo build --release --target ${{ matrix.target }} + + - name: Prepare artifact + shell: bash + run: | + ARTIFACT_DIR="native-bins/${{ matrix.native-bin-subdir }}" + mkdir -p "$ARTIFACT_DIR" + cp "packages/encryption-binary-rust/target/${{ matrix.target }}/release/${{ matrix.binary-name }}" "$ARTIFACT_DIR/" + echo "=== Binary info (before UPX) ===" + ls -la "$ARTIFACT_DIR/${{ matrix.binary-name }}" + file "$ARTIFACT_DIR/${{ matrix.binary-name }}" || true + echo "=== UPX compress ===" + upx --best "$ARTIFACT_DIR/${{ matrix.binary-name }}" + echo "=== Binary info (after UPX) ===" + ls -la "$ARTIFACT_DIR/${{ matrix.binary-name }}" + + # Run Rust unit tests + - name: Run unit tests + working-directory: packages/encryption-binary-rust + run: cargo test --release --target ${{ matrix.target }} + + # Test the built binary end-to-end (one-shot commands, no biometric) + # Uses python (not python3) for Windows compat; printf for reliable base64 input + - name: Test binary - status + shell: bash + run: | + BIN="native-bins/${{ matrix.native-bin-subdir }}/${{ matrix.binary-name }}" + PY=$(command -v python3 || command -v python) + echo "=== status ===" + $BIN status + $BIN status | $PY -c "import sys,json; d=json.load(sys.stdin); assert d['ok'], 'status not ok'" + + - name: Test binary - key lifecycle + encrypt/decrypt roundtrip + shell: bash + run: | + BIN="native-bins/${{ matrix.native-bin-subdir }}/${{ matrix.binary-name }}" + PY=$(command -v python3 || command -v python) + + echo "=== generate-key ===" + $BIN generate-key --key-id ci-test + $BIN key-exists --key-id ci-test | $PY -c "import sys,json; d=json.load(sys.stdin); assert d['exists']" + + echo "=== encrypt ===" + PLAINTEXT=$($PY -c "import base64; print(base64.b64encode(b'hello from CI').decode())") + CIPHERTEXT=$($BIN encrypt --key-id ci-test --data "$PLAINTEXT" | $PY -c "import sys,json; print(json.load(sys.stdin)['ciphertext'])") + echo "Ciphertext: ${CIPHERTEXT:0:40}..." + + echo "=== decrypt ===" + DECRYPTED=$($BIN decrypt --key-id ci-test --data "$CIPHERTEXT" | $PY -c "import sys,json; print(json.load(sys.stdin)['plaintext'])") + echo "Decrypted: $DECRYPTED" + + if [ "$DECRYPTED" != "hello from CI" ]; then + echo "::error::Roundtrip failed! Expected 'hello from CI', got '$DECRYPTED'" + exit 1 + fi + + echo "=== delete-key ===" + $BIN delete-key --key-id ci-test + + echo "All binary tests passed" + + # Cross-platform interop: encrypt with Rust, decrypt with JS, and vice versa + # Only on platforms where Bun is available and keys aren't DPAPI-protected + - name: Setup Bun (for interop test) + if: matrix.test-interop + uses: oven-sh/setup-bun@v2 + + - name: Install JS deps (for interop test) + if: matrix.test-interop + run: bun install + + # Bidirectional Rust↔JS interop test (Linux x64 only — crypto is platform-independent) + - name: Setup Bun (for interop test) + if: matrix.test-interop + uses: oven-sh/setup-bun@v2 + + - name: Install JS deps (for interop test) + if: matrix.test-interop + run: bun install + + - name: Test Rust↔JS interop + if: matrix.test-interop + shell: bash + run: | + BIN="native-bins/${{ matrix.native-bin-subdir }}/${{ matrix.binary-name }}" + + # Generate key with Rust (no DPAPI on Linux = plaintext PKCS8) + $BIN generate-key --key-id interop-test + KEY_FILE="$HOME/.config/varlock/local-encrypt/keys/interop-test.json" + PUBLIC_KEY=$(python3 -c "import json; print(json.load(open('$KEY_FILE'))['publicKey'])") + PRIVATE_KEY=$(python3 -c "import json; print(json.load(open('$KEY_FILE'))['protectedPrivateKey'])") + + # Rust→JS: encrypt with Rust, decrypt with JS + PLAINTEXT_B64=$(python3 -c "import base64; print(base64.b64encode(b'rust to javascript').decode())") + CIPHERTEXT=$($BIN encrypt --key-id interop-test --data "$PLAINTEXT_B64" | python3 -c "import sys,json; print(json.load(sys.stdin)['ciphertext'])") + RESULT=$(bun -e " + const { decrypt } = await import('./packages/varlock/src/lib/local-encrypt/crypto.ts'); + const result = await decrypt('$PRIVATE_KEY', '$PUBLIC_KEY', '$CIPHERTEXT'); + process.stdout.write(result); + ") + [ "$RESULT" = "rust to javascript" ] || { echo "::error::Rust→JS failed! Got: $RESULT"; exit 1; } + echo "Rust→JS: '$RESULT'" + + # JS→Rust: encrypt with JS, decrypt with Rust + CIPHERTEXT=$(bun -e " + const { encrypt } = await import('./packages/varlock/src/lib/local-encrypt/crypto.ts'); + const result = await encrypt('$PUBLIC_KEY', 'javascript to rust'); + process.stdout.write(result); + ") + RESULT=$($BIN decrypt --key-id interop-test --data "$CIPHERTEXT" | python3 -c "import sys,json; print(json.load(sys.stdin)['plaintext'])") + [ "$RESULT" = "javascript to rust" ] || { echo "::error::JS→Rust failed! Got: $RESULT"; exit 1; } + echo "JS→Rust: '$RESULT'" + + $BIN delete-key --key-id interop-test + echo "All interop tests passed" + + - name: Upload artifact + uses: actions/upload-artifact@v7 + with: + name: ${{ inputs.artifact-name }}-${{ matrix.native-bin-subdir }} + path: native-bins/${{ matrix.native-bin-subdir }}/ + retention-days: 7 + + # Cache the built binary so preview releases on other runners can restore it + - name: Cache built binary + uses: actions/cache/save@v5 + with: + path: native-bins/${{ matrix.native-bin-subdir }}/ + key: native-bin-rust-${{ matrix.native-bin-subdir }}-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }} diff --git a/.github/workflows/notarize-native-macos.yaml b/.github/workflows/notarize-native-macos.yaml new file mode 100644 index 000000000..8169a7c2e --- /dev/null +++ b/.github/workflows/notarize-native-macos.yaml @@ -0,0 +1,91 @@ +name: Notarize macOS native binary + +# Reusable workflow that takes an already-signed .app bundle artifact, +# submits it to Apple for notarization, and staples the ticket. +# Requires a macOS runner for xcrun. + +permissions: + contents: read + +on: + workflow_call: + inputs: + source-artifact-name: + description: 'Name of the signed .app artifact to notarize' + type: string + required: true + artifact-name: + description: 'Name for the notarized artifact' + type: string + default: 'native-bin-macos-notarized' + secrets: + OP_CI_TOKEN: + required: true + +jobs: + notarize: + runs-on: macos-latest + steps: + - uses: actions/checkout@v6 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + + # skip bun dep caching since less likely to hit + + - name: Install node deps + run: bun install + + - name: Enable turborepo build cache + uses: rharkor/caching-for-turbo@v2.3.11 + + - name: Build varlock libs + run: bun run build:libs + + - name: Download signed .app bundle + uses: actions/download-artifact@v8 + with: + name: ${{ inputs.source-artifact-name }} + path: VarlockEnclave.app + + # Load secrets from 1Password via varlock (scoped to the Swift package) + - name: Load signing secrets + uses: dmno-dev/varlock-action@v1.0.1 + with: + working-directory: packages/encryption-binary-swift + env: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + - name: Notarize and staple + working-directory: packages/encryption-binary-swift + run: | + APP_PATH="$GITHUB_WORKSPACE/VarlockEnclave.app" + + # Create a zip for notarization submission + ditto -c -k --keepParent "$APP_PATH" $RUNNER_TEMP/VarlockEnclave.zip + + # Submit for notarization and wait + xcrun notarytool submit $RUNNER_TEMP/VarlockEnclave.zip \ + --apple-id "$APPLE_ID" \ + --password "$APPLE_APP_PASSWORD" \ + --team-id "$APPLE_TEAM_ID" \ + --wait + + # Staple the notarization ticket to the app bundle + xcrun stapler staple "$APP_PATH" + env: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + - name: Verify notarization + run: | + echo "=== Code signature ===" + codesign -dvv VarlockEnclave.app 2>&1 || true + echo "=== Notarization staple ===" + xcrun stapler validate VarlockEnclave.app + + - name: Upload notarized artifact + uses: actions/upload-artifact@v7 + with: + name: ${{ inputs.artifact-name }} + path: VarlockEnclave.app + retention-days: 7 diff --git a/.github/workflows/release-preview.yaml b/.github/workflows/release-preview.yaml deleted file mode 100644 index 5859e5ca2..000000000 --- a/.github/workflows/release-preview.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: Release _preview_ packages -on: - pull_request: - push: - branches-ignore: - - main - - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v6 - with: - # by default only the current commit is fetched - # but we need more history to be able to compare to main - # TODO: ideally we would just fetch the history between origin/main and the current commit - fetch-depth: 0 - - name: Setup Bun - uses: oven-sh/setup-bun@v2 - - name: Cache bun dependencies - uses: actions/cache@v5 - with: - path: ~/.bun/install/cache - key: bun-${{ runner.os }}-${{ hashFiles('bun.lock') }} - restore-keys: | - bun-${{ runner.os }}- - - name: Use Node.js 24.x - uses: actions/setup-node@v6 - with: - node-version: "24.x" - - name: Install node deps - run: bun install - - name: Enable turborepo build cache - uses: rharkor/caching-for-turbo@v2.3.11 - - # ------------------------------------------------------------ - - name: Build publishable npm packages - run: bun run build:libs - env: - BUILD_TYPE: preview - # we use a custom script to run `npx pkg-pr-new publish` - # so that we can determine which packages to release - - name: Release preview packages - run: bun run scripts/release-preview.ts diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 66143316e..69c4f9dc6 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -5,11 +5,40 @@ on: branches: - main +permissions: + contents: read + concurrency: ${{ github.workflow }}-${{ github.ref }} jobs: + # Build and sign the macOS native binary (cache hit if already built in CI) + build-native-macos: + uses: ./.github/workflows/build-native-macos.yaml + with: + mode: release + artifact-name: native-bin-macos-signed + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Notarize for production npm distribution + notarize-native-macos: + needs: build-native-macos + uses: ./.github/workflows/notarize-native-macos.yaml + with: + source-artifact-name: native-bin-macos-signed + artifact-name: native-bin-macos-npm + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Build Rust native binaries for Linux and Windows + build-native-rust: + uses: ./.github/workflows/build-native-rust.yaml + with: + artifact-name: native-bin-rust + release: name: Release + needs: [notarize-native-macos, build-native-rust] runs-on: ubuntu-latest permissions: id-token: write # Required for OIDC @@ -45,6 +74,36 @@ jobs: - name: Update npm run: npm install -g npm@latest + # Download signed macOS native binary so it's included in the npm package + - name: Download macOS native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-macos-npm + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + - name: Restore native binary execute permission + run: chmod +x packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt + + # Download Rust native binaries for Linux and Windows + - name: Download Linux x64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-linux-x64 + path: packages/varlock/native-bins/linux-x64 + - name: Download Linux arm64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-linux-arm64 + path: packages/varlock/native-bins/linux-arm64 + - name: Download Windows x64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-win32-x64 + path: packages/varlock/native-bins/win32-x64 + - name: Restore Rust binary execute permissions + run: | + chmod +x packages/varlock/native-bins/linux-x64/varlock-local-encrypt + chmod +x packages/varlock/native-bins/linux-arm64/varlock-local-encrypt + # ------------------------------------------------------------ - name: Create Release Pull Request or Publish to npm id: changesets diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index dddb41bab..89c6be41c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -2,19 +2,26 @@ name: CI test suite on: pull_request: push: + branches: [main] +permissions: + contents: read jobs: - build: + build-and-test: runs-on: ubuntu-latest - + outputs: + swift-changed: ${{ steps.check-swift.outputs.changed }} + rust-changed: ${{ steps.check-rust.outputs.changed }} steps: - uses: actions/checkout@v6 + with: + fetch-depth: 0 - name: Setup Bun uses: oven-sh/setup-bun@v2 + # this caching step is kind of a wash - # downloading the cache adds a few seconds - # and then installing is a bit faster + # downloading the cache adds a few seconds and then installing is a bit faster - name: Cache bun dependencies uses: actions/cache@v5 with: @@ -28,22 +35,10 @@ jobs: node-version: "24.x" - name: Install js deps (w/ bun) run: bun install - - name: Dogfood varlock-action - id: varlock - uses: dmno-dev/varlock-action@v1.0.1 - with: - working-directory: smoke-tests/smoke-test-basic - show-summary: 'false' - fail-on-error: 'true' - output-format: 'json' - - name: Verify varlock-action output - run: | - test -n "${{ steps.varlock.outputs.json-env }}" - echo "Varlock action output is present" - name: Enable turborepo build cache uses: rharkor/caching-for-turbo@v2.3.11 - # ------------------------------------------------------------ + # lint, build, tests --------------------------------- - name: ESLint run: bun run lint - name: TypeScript type check @@ -52,3 +47,156 @@ jobs: run: bun run build:libs - name: Run tests run: bun run test:ci + + # Check if native binary source changed (used to gate native builds) + - name: Check for Swift source changes + id: check-swift + run: | + if git diff --name-only origin/main...HEAD | grep -q '^packages/encryption-binary-swift/'; then + echo "changed=true" >> $GITHUB_OUTPUT + else + echo "changed=false" >> $GITHUB_OUTPUT + fi + - name: Check for Rust source changes + id: check-rust + run: | + if git diff --name-only origin/main...HEAD | grep -q '^packages/encryption-binary-rust/'; then + echo "changed=true" >> $GITHUB_OUTPUT + else + echo "changed=false" >> $GITHUB_OUTPUT + fi + + # Build + sign the macOS native binary if Swift source changed (warms the cache) + # this must be done on a mac-os runner + build-native-macos: + needs: build-and-test + if: needs.build-and-test.outputs.swift-changed == 'true' + uses: ./.github/workflows/build-native-macos.yaml + with: + artifact-name: native-bin-macos-ci + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Build Rust native binaries if source changed (warms the cache) + build-native-rust: + needs: build-and-test + if: needs.build-and-test.outputs.rust-changed == 'true' + uses: ./.github/workflows/build-native-rust.yaml + with: + artifact-name: native-bin-rust-ci + + # Publish preview packages via pkg-pr-new + release-preview-packages: + needs: [build-and-test, build-native-macos, build-native-rust] + # Run even if native builds were skipped (source unchanged), but not if anything failed. Skip on main. + if: always() && !failure() && !cancelled() && github.ref_name != 'main' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + - name: Cache bun dependencies + uses: actions/cache@v5 + with: + path: ~/.bun/install/cache + key: bun-${{ runner.os }}-${{ hashFiles('bun.lock') }} + restore-keys: | + bun-${{ runner.os }}- + - name: Use Node.js 24.x + uses: actions/setup-node@v6 + with: + node-version: "24.x" + - name: Install node deps + run: bun install + - name: Enable turborepo build cache + uses: rharkor/caching-for-turbo@v2.3.11 + + # Determine which packages will be preview-released + - name: Check release packages + id: check-release + run: bun run scripts/check-release-packages.ts + + # Get signed macOS .app if varlock is being released + # If the macOS build ran this run (swift changed), download the artifact directly + # Otherwise, restore from cross-run cache + - name: Download macOS native binary (from this run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.swift-changed == 'true' + uses: actions/download-artifact@v8 + with: + name: native-bin-macos-ci + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + - name: Restore cached macOS native binary (from prior run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.swift-changed != 'true' + uses: actions/cache/restore@v5 + with: + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + key: native-bin-macos-signed-${{ hashFiles('packages/encryption-binary-swift/swift/**') }} + - name: Verify and fix native binary permissions + if: steps.check-release.outputs.includes-varlock == 'true' + run: | + BINARY=packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt + if [ ! -f "$BINARY" ]; then + echo "::error::macOS native binary not found — cannot publish varlock preview without it" + exit 1 + fi + chmod +x "$BINARY" + + # Get Rust native binaries if varlock is being released + # If the Rust build ran this run, download the artifacts; otherwise restore from cache + - name: Download Rust binaries (from this run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed == 'true' + uses: actions/download-artifact@v8 + with: + pattern: native-bin-rust-ci-* + path: packages/varlock/native-bins/ + merge-multiple: false + # Flatten: download-artifact creates subdirs per artifact name, but we need linux-x64/ etc. + - name: Flatten Rust artifact directories (from this run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed == 'true' + run: | + cd packages/varlock/native-bins + for dir in native-bin-rust-ci-*/; do + subdir=$(echo "$dir" | sed 's/native-bin-rust-ci-//' | sed 's/\///') + mv "$dir" "$subdir" 2>/dev/null || true + done + + - name: Restore cached Rust binaries (from prior run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed != 'true' + run: | + CACHE_KEY_SUFFIX="${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }}" + for SUBDIR in linux-x64 linux-arm64 win32-x64; do + echo "Restoring native-bin-rust-$SUBDIR..." + done + - uses: actions/cache/restore@v5 + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed != 'true' + with: + path: packages/varlock/native-bins/linux-x64/ + key: native-bin-rust-linux-x64-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }} + - uses: actions/cache/restore@v5 + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed != 'true' + with: + path: packages/varlock/native-bins/linux-arm64/ + key: native-bin-rust-linux-arm64-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }} + - uses: actions/cache/restore@v5 + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed != 'true' + with: + path: packages/varlock/native-bins/win32-x64/ + key: native-bin-rust-win32-x64-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }} + + - name: Fix Rust binary permissions + if: steps.check-release.outputs.includes-varlock == 'true' + run: | + for BIN in packages/varlock/native-bins/linux-*/varlock-local-encrypt; do + [ -f "$BIN" ] && chmod +x "$BIN" && echo "Fixed: $BIN" + done + + - name: Build publishable npm packages + run: bun run build:libs + env: + BUILD_TYPE: preview + - name: Release preview packages + run: bun run scripts/release-preview.ts + env: + RELEASE_PACKAGES: ${{ steps.check-release.outputs.packages }} diff --git a/.gitignore b/.gitignore index b523d3843..4f30189e6 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,5 @@ smoke-tests/pnpm-lock.yaml framework-tests/.packed framework-tests/.test-projects .magent +.claude/worktrees/ eslint-output.txt diff --git a/bun.lock b/bun.lock index 6246abf5b..7b0959f5a 100644 --- a/bun.lock +++ b/bun.lock @@ -9,11 +9,10 @@ "@cloudflare/vite-plugin": "^1.30.1", "@eslint/js": "^10.0.1", "@stylistic/eslint-plugin": "^5.9.0", + "@types/node": "catalog:", "@typescript-eslint/eslint-plugin": "^8.56.1", "@typescript-eslint/parser": "^8.56.1", "@varlock/changeset-changelog": "workspace:*", - "@varlock/cloudflare-integration": "workspace:*", - "@varlock/keepass-plugin": "workspace:*", "@varlock/tsconfig": "workspace:*", "eslint": "^10.0.2", "eslint-plugin-es-x": "^9.5.0", @@ -25,6 +24,7 @@ "turbo": "^2.8.12", "typescript": "catalog:", "typescript-eslint": "^8.56.1", + "varlock": "workspace:*", }, }, "packages/changeset-changelog": { @@ -44,6 +44,18 @@ "vitest": "catalog:", }, }, + "packages/encryption-binary-rust": { + "name": "@varlock/encryption-binary-rust", + "version": "0.0.0", + }, + "packages/encryption-binary-swift": { + "name": "@varlock/encryption-binary-swift", + "version": "0.0.1", + "devDependencies": { + "@varlock/1password-plugin": "workspace:*", + "varlock": "workspace:*", + }, + }, "packages/env-spec-parser": { "name": "@env-spec/parser", "version": "0.2.0", @@ -116,7 +128,7 @@ }, "packages/integrations/nextjs": { "name": "@varlock/nextjs-integration", - "version": "0.3.2", + "version": "0.3.3", "devDependencies": { "@types/node": "catalog:", "tsup": "catalog:", @@ -130,7 +142,7 @@ }, "packages/integrations/vite": { "name": "@varlock/vite-integration", - "version": "0.2.9", + "version": "0.2.10", "devDependencies": { "@types/node": "catalog:", "ast-matcher": "^1.2.0", @@ -148,7 +160,7 @@ }, "packages/plugins/1password": { "name": "@varlock/1password-plugin", - "version": "0.3.2", + "version": "0.3.3", "devDependencies": { "@1password/sdk": "0.4.1-beta.1", "@1password/sdk-core": "0.4.1-beta.1", @@ -355,7 +367,7 @@ }, "packages/varlock": { "name": "varlock", - "version": "0.7.1", + "version": "0.7.2", "bin": { "varlock": "./bin/cli.js", }, @@ -1306,6 +1318,10 @@ "@varlock/dashlane-plugin": ["@varlock/dashlane-plugin@workspace:packages/plugins/dashlane"], + "@varlock/encryption-binary-rust": ["@varlock/encryption-binary-rust@workspace:packages/encryption-binary-rust"], + + "@varlock/encryption-binary-swift": ["@varlock/encryption-binary-swift@workspace:packages/encryption-binary-swift"], + "@varlock/expo-integration": ["@varlock/expo-integration@workspace:packages/integrations/expo"], "@varlock/google-secret-manager-plugin": ["@varlock/google-secret-manager-plugin@workspace:packages/plugins/google-secret-manager"], diff --git a/eslint.config.mjs b/eslint.config.mjs index 324e9337c..c2e14f552 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -54,6 +54,7 @@ export default tseslint.config( '**/out', '**/next-env.d.ts', '.magent', + '.claude', 'framework-tests/.test-projects', 'framework-tests/.packed', ], @@ -159,6 +160,7 @@ export default tseslint.config( }, }, { + // allow console.log in some scripts/tests/etc files: [ 'scripts/**', 'ignore/**', @@ -168,6 +170,8 @@ export default tseslint.config( 'packages/varlock/scripts/**', 'smoke-tests/**', 'framework-tests/**', + 'packages/encryption-binary-swift/scripts/**', + 'packages/encryption-binary-rust/scripts/**', ], rules: { 'no-console': 0, @@ -179,22 +183,6 @@ export default tseslint.config( '@typescript-eslint/no-require-imports': 0, }, }, - { - // plugin files use triple-slash directives for the `plugin` global type - // which is injected at runtime by varlock via globalThis - files: [ - 'smoke-tests/**/plugins/**', - 'packages/varlock/src/env-graph/test/plugins/**', - ], - languageOptions: { - globals: { - plugin: 'readonly', - }, - }, - rules: { - '@typescript-eslint/triple-slash-reference': 0, - }, - }, { // these files use build-time globals declared in globals.d.ts files: [ diff --git a/package.json b/package.json index 0cefe3340..6e284b2c9 100644 --- a/package.json +++ b/package.json @@ -18,8 +18,9 @@ "test:ci": "turbo test:ci --filter=\"!smoke-test-*\"", "smoke-test": "cd smoke-tests && bun run test", "test:frameworks": "cd framework-tests && bun run test", - "typecheck": "turbo typecheck --filter=\"!@varlock/website\" --filter=\"!smoke-test-*\" --filter=\"!varlock-docs-mcp\"", - "check": "bun run lint && bun run typecheck && bun run build:libs && bun run test:ci", + "typecheck": "tsc --noEmit", + "typecheck:all": "turbo typecheck --filter=\"!@varlock/website\" --filter=\"!smoke-test-*\" --filter=\"!varlock-docs-mcp\"", + "check": "bun run lint && bun run typecheck:all && bun run build:libs && bun run test:ci", "dev": "turbo run dev --concurrency=40 --parallel --filter=\"!smoke-test-*\"", "lint": "eslint .", "lint:fix": "eslint . --fix", @@ -34,12 +35,11 @@ "@cloudflare/vite-plugin": "^1.30.1", "@eslint/js": "^10.0.1", "@stylistic/eslint-plugin": "^5.9.0", + "@types/node": "catalog:", "@typescript-eslint/eslint-plugin": "^8.56.1", "@typescript-eslint/parser": "^8.56.1", "@varlock/changeset-changelog": "workspace:*", "@varlock/tsconfig": "workspace:*", - "@varlock/cloudflare-integration": "workspace:*", - "@varlock/keepass-plugin": "workspace:*", "eslint": "^10.0.2", "eslint-plugin-es-x": "^9.5.0", "eslint-plugin-fix-disabled-rules": "^0.0.2", @@ -49,7 +49,8 @@ "globals": "^17.3.0", "turbo": "^2.8.12", "typescript": "catalog:", - "typescript-eslint": "^8.56.1" + "typescript-eslint": "^8.56.1", + "varlock": "workspace:*" }, "packageManager": "bun@1.3.11", "engines": { diff --git a/packages/encryption-binary-rust/.gitignore b/packages/encryption-binary-rust/.gitignore new file mode 100644 index 000000000..2f7896d1d --- /dev/null +++ b/packages/encryption-binary-rust/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/packages/encryption-binary-rust/Cargo.lock b/packages/encryption-binary-rust/Cargo.lock new file mode 100644 index 000000000..0dc359fd3 --- /dev/null +++ b/packages/encryption-binary-rust/Cargo.lock @@ -0,0 +1,744 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "rand_core", + "typenum", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest", + "elliptic-curve", + "signature", + "spki", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest", + "ff", + "generic-array", + "group", + "hkdf", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "ff" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "generic-array" +version = "0.14.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + +[[package]] +name = "itoa" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" + +[[package]] +name = "libc" +version = "0.2.184" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "nix" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +dependencies = [ + "bitflags", + "cfg-if", + "cfg_aliases", + "libc", + "memoffset", +] + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "varlock-local-encrypt" +version = "0.1.0" +dependencies = [ + "aes-gcm", + "base64", + "elliptic-curve", + "hkdf", + "hmac", + "libc", + "nix", + "p256", + "rand", + "serde", + "serde_json", + "sha2", + "windows", +] + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "windows" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" +dependencies = [ + "windows-core", + "windows-targets", +] + +[[package]] +name = "windows-core" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-result", + "windows-strings", + "windows-targets", +] + +[[package]] +name = "windows-implement" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "zerocopy" +version = "0.8.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eed437bf9d6692032087e337407a86f04cd8d6a16a37199ed57949d415bd68e9" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e3cd084b1788766f53af483dd21f93881ff30d7320490ec3ef7526d203bad4" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/packages/encryption-binary-rust/Cargo.toml b/packages/encryption-binary-rust/Cargo.toml new file mode 100644 index 000000000..0b47dcf2c --- /dev/null +++ b/packages/encryption-binary-rust/Cargo.toml @@ -0,0 +1,55 @@ +[package] +name = "varlock-local-encrypt" +version = "0.1.0" +edition = "2021" +description = "Cross-platform local encryption binary for Varlock (Windows/Linux)" + +[[bin]] +name = "varlock-local-encrypt" +path = "src/main.rs" + +[dependencies] +# ECIES crypto (pure Rust, no OpenSSL) +p256 = { version = "0.13", default-features = false, features = ["ecdh", "pkcs8", "std"] } +elliptic-curve = { version = "0.13", features = ["sec1", "pkcs8"] } +hkdf = "0.12" +hmac = "0.12" +sha2 = "0.10" +aes-gcm = "0.10" +rand = "0.8" + +# Serialization +serde = { version = "1", features = ["derive"] } +serde_json = "1" +base64 = "0.22" + +# Signal handling (Unix) +[target.'cfg(unix)'.dependencies] +libc = "0.2" + +# Platform — Linux (nix for peer credentials in IPC) +[target.'cfg(target_os = "linux")'.dependencies] +nix = { version = "0.29", features = ["process", "socket", "user", "fs"] } + +# Platform — Windows +[target.'cfg(target_os = "windows")'.dependencies] +windows = { version = "0.58", features = [ + # DPAPI + "Win32_Security_Cryptography", + "Win32_Security_Credentials", + # Named pipes + "Win32_System_Pipes", + "Win32_Storage_FileSystem", + "Win32_Foundation", + "Win32_System_IO", + # Windows Hello (UserConsentVerifier) + "Security_Credentials_UI", + "Foundation", +] } + +[profile.release] +strip = true +lto = true +opt-level = "z" # optimize for size +codegen-units = 1 +panic = "abort" diff --git a/packages/encryption-binary-rust/package.json b/packages/encryption-binary-rust/package.json new file mode 100644 index 000000000..2732a443e --- /dev/null +++ b/packages/encryption-binary-rust/package.json @@ -0,0 +1,12 @@ +{ + "name": "@varlock/encryption-binary-rust", + "version": "0.0.0", + "private": true, + "scripts": { + "build:current": "bun run scripts/build-rust.ts", + "build:linux-x64": "bun run scripts/build-rust.ts --target x86_64-unknown-linux-gnu", + "build:linux-arm64": "bun run scripts/build-rust.ts --target aarch64-unknown-linux-gnu", + "build:windows-x64": "bun run scripts/build-rust.ts --target x86_64-pc-windows-msvc", + "build:windows-arm64": "bun run scripts/build-rust.ts --target aarch64-pc-windows-msvc" + } +} diff --git a/packages/encryption-binary-rust/scripts/build-rust.ts b/packages/encryption-binary-rust/scripts/build-rust.ts new file mode 100644 index 000000000..56d344010 --- /dev/null +++ b/packages/encryption-binary-rust/scripts/build-rust.ts @@ -0,0 +1,129 @@ +#!/usr/bin/env bun + +/** + * Build script for the varlock-local-encrypt Rust binary. + * + * Usage: + * bun run scripts/build-rust.ts # build for current platform + * bun run scripts/build-rust.ts --target x86_64-unknown-linux-gnu + * bun run scripts/build-rust.ts --target x86_64-pc-windows-msvc + * + * The binary is placed in packages/varlock/native-bins/[-]/ + */ + +import { execSync } from 'node:child_process'; +import path from 'node:path'; +import fs from 'node:fs'; + +// ── CLI args ──────────────────────────────────────────────────── + +const args = process.argv.slice(2); + +function getArg(flag: string): string | undefined { + const idx = args.indexOf(flag); + return idx >= 0 ? args[idx + 1] : undefined; +} + +const target = getArg('--target'); + +// ── Paths ─────────────────────────────────────────────────────── + +const rustDir = path.resolve(import.meta.dir, '..'); +const varlockPkgDir = path.resolve(import.meta.dir, '..', '..', 'varlock'); +const binaryName = process.platform === 'win32' && !target?.includes('linux') + ? 'varlock-local-encrypt.exe' + : 'varlock-local-encrypt'; + +/** + * Map a Rust target triple to the native-bins subdirectory name. + */ +function getOutputSubdir(rustTarget?: string): string { + if (!rustTarget) { + // Current platform + if (process.platform === 'darwin') return 'darwin'; + if (process.platform === 'win32') return `win32-${process.arch}`; + return `${process.platform}-${process.arch}`; + } + + // Parse Rust target triple: --[-] + const parts = rustTarget.split('-'); + const arch = parts[0]; + const os = parts[2]; + + let nodeArch = arch; + if (arch === 'x86_64') nodeArch = 'x64'; + else if (arch === 'aarch64') nodeArch = 'arm64'; + + if (os === 'linux') return `linux-${nodeArch}`; + if (os === 'windows') return `win32-${nodeArch}`; + if (os === 'darwin' || os === 'apple') return 'darwin'; + return `${os}-${nodeArch}`; +} + +function run(cmd: string, opts?: { cwd?: string }) { + console.log(`> ${cmd}`); + execSync(cmd, { stdio: 'inherit', cwd: opts?.cwd ?? rustDir }); +} + +// ── Build ─────────────────────────────────────────────────────── + +const buildArgs = ['cargo', 'build', '--release']; +if (target) { + buildArgs.push('--target', target); +} + +run(buildArgs.join(' ')); + +// ── Copy to native-bins ───────────────────────────────────────── + +const subdir = getOutputSubdir(target); +const outputDir = path.join(varlockPkgDir, 'native-bins', subdir); +fs.mkdirSync(outputDir, { recursive: true }); + +// Find the built binary +let sourceBinary: string; +if (target) { + const targetBinaryName = target.includes('windows') + ? 'varlock-local-encrypt.exe' + : 'varlock-local-encrypt'; + sourceBinary = path.join(rustDir, 'target', target, 'release', targetBinaryName); +} else { + sourceBinary = path.join(rustDir, 'target', 'release', binaryName); +} + +if (!fs.existsSync(sourceBinary)) { + console.error(`Build succeeded but binary not found at: ${sourceBinary}`); + process.exit(1); +} + +const destBinary = path.join(outputDir, binaryName); +fs.copyFileSync(sourceBinary, destBinary); + +// Ensure executable +if (process.platform !== 'win32') { + fs.chmodSync(destBinary, 0o755); +} + +const rawStats = fs.statSync(destBinary); +const rawSizeKB = Math.round(rawStats.size / 1024); + +// UPX compress on Linux/Windows (macOS is not reliably supported) +const skipUpx = args.includes('--no-upx'); +const isMacOS = !target ? process.platform === 'darwin' : (target.includes('darwin') || target.includes('apple')); +if (!skipUpx && !isMacOS) { + try { + console.log('\nCompressing with UPX...'); + execSync(`upx --best "${destBinary}"`, { stdio: 'inherit' }); + } catch { + console.warn('UPX compression failed (is upx installed?), continuing with uncompressed binary'); + } +} + +const stats = fs.statSync(destBinary); +const sizeKB = Math.round(stats.size / 1024); + +console.log(`\nBuilt: ${destBinary}`); +const sizeStr = rawSizeKB !== sizeKB ? `${sizeKB} KB (${rawSizeKB} KB before UPX)` : `${sizeKB} KB`; +console.log(`Size: ${sizeStr}`); +console.log(`Platform: ${subdir}`); +console.log('Done!'); diff --git a/packages/encryption-binary-rust/src/crypto.rs b/packages/encryption-binary-rust/src/crypto.rs new file mode 100644 index 000000000..a62874571 --- /dev/null +++ b/packages/encryption-binary-rust/src/crypto.rs @@ -0,0 +1,252 @@ +//! ECIES implementation matching the JS (crypto.ts) and Swift (SecureEnclaveManager.swift) schemes. +//! +//! Wire-compatible payload format: +//! version(1) | ephemeralPubKey(65) | nonce(12) | ciphertext(N) | tag(16) +//! +//! Crypto: +//! - P-256 ECDH key agreement +//! - HKDF-SHA256 (salt: "varlock-ecies-v1", info: ephemeralPub || recipientPub) +//! - AES-256-GCM with random 12-byte nonce + +use aes_gcm::{ + aead::{Aead, KeyInit}, + Aes256Gcm, Nonce, +}; +use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; +use elliptic_curve::sec1::{FromEncodedPoint, ToEncodedPoint}; +use hkdf::Hkdf; +use elliptic_curve::pkcs8::{DecodePrivateKey, EncodePrivateKey}; +use p256::{ + ecdh::EphemeralSecret, + elliptic_curve::rand_core::OsRng, + PublicKey, SecretKey, +}; +use sha2::Sha256; + +const PAYLOAD_VERSION: u8 = 0x01; +const HKDF_SALT: &[u8] = b"varlock-ecies-v1"; +const PUBLIC_KEY_LENGTH: usize = 65; // uncompressed P-256: 0x04 || x(32) || y(32) +const NONCE_LENGTH: usize = 12; +const TAG_LENGTH: usize = 16; +const HEADER_LENGTH: usize = 1 + PUBLIC_KEY_LENGTH + NONCE_LENGTH; + +/// A P-256 key pair with base64-encoded components. +pub struct KeyPair { + /// Base64-encoded uncompressed P-256 public key (65 bytes raw) + pub public_key: String, + /// Base64-encoded PKCS8 DER private key + pub private_key: String, +} + +/// Generate a new P-256 key pair. +/// +/// Returns the public key as uncompressed SEC1 (65 bytes, base64) and +/// the private key as PKCS8 DER (base64), matching the JS/Swift format. +pub fn generate_key_pair() -> Result { + let secret_key = SecretKey::random(&mut OsRng); + + // Public key: uncompressed SEC1 encoding (65 bytes) + let public_key_point = secret_key.public_key().to_encoded_point(false); + let public_key_bytes = public_key_point.as_bytes(); + + // Private key: PKCS8 DER encoding + let private_key_pkcs8 = secret_key + .to_pkcs8_der() + .map_err(|e| format!("Failed to encode private key as PKCS8: {e}"))?; + + Ok(KeyPair { + public_key: BASE64.encode(public_key_bytes), + private_key: BASE64.encode(private_key_pkcs8.as_bytes()), + }) +} + +/// Encrypt plaintext using ECIES with the recipient's public key. +/// +/// Only needs the public key — no private key or biometric auth required. +/// Returns base64-encoded ciphertext payload. +pub fn encrypt(public_key_base64: &str, plaintext: &[u8]) -> Result { + let recipient_pub_bytes = BASE64 + .decode(public_key_base64) + .map_err(|e| format!("Invalid public key base64: {e}"))?; + + if recipient_pub_bytes.len() != PUBLIC_KEY_LENGTH { + return Err(format!( + "Invalid public key length: {} (expected {})", + recipient_pub_bytes.len(), + PUBLIC_KEY_LENGTH + )); + } + + // Import recipient public key + let recipient_point = p256::EncodedPoint::from_bytes(&recipient_pub_bytes) + .map_err(|e| format!("Invalid public key encoding: {e}"))?; + let recipient_pub = PublicKey::from_encoded_point(&recipient_point) + .into_option() + .ok_or("Invalid P-256 public key point")?; + + // Generate ephemeral key pair + let ephemeral_secret = EphemeralSecret::random(&mut OsRng); + let ephemeral_pub = ephemeral_secret.public_key(); + let ephemeral_pub_bytes = ephemeral_pub.to_encoded_point(false); + let ephemeral_pub_raw = ephemeral_pub_bytes.as_bytes(); // 65 bytes + + // ECDH: ephemeral private × recipient public → shared secret + let shared_secret = ephemeral_secret.diffie_hellman(&recipient_pub); + let shared_secret_bytes = shared_secret.raw_secret_bytes(); + + // HKDF-SHA256 → AES-256 key + // info = ephemeralPubKey || recipientPubKey + let mut info = Vec::with_capacity(PUBLIC_KEY_LENGTH * 2); + info.extend_from_slice(ephemeral_pub_raw); + info.extend_from_slice(&recipient_pub_bytes); + + let hk = Hkdf::::new(Some(HKDF_SALT), shared_secret_bytes); + let mut aes_key = [0u8; 32]; + hk.expand(&info, &mut aes_key) + .map_err(|e| format!("HKDF expand failed: {e}"))?; + + // AES-256-GCM encrypt + let cipher = Aes256Gcm::new_from_slice(&aes_key) + .map_err(|e| format!("AES key init failed: {e}"))?; + + let mut nonce_bytes = [0u8; NONCE_LENGTH]; + rand::RngCore::fill_bytes(&mut OsRng, &mut nonce_bytes); + let nonce = Nonce::from_slice(&nonce_bytes); + + let ciphertext_with_tag = cipher + .encrypt(nonce, plaintext) + .map_err(|e| format!("AES-GCM encryption failed: {e}"))?; + + // AES-GCM appends tag to ciphertext — split for wire format + let ct_len = ciphertext_with_tag.len() - TAG_LENGTH; + let ciphertext = &ciphertext_with_tag[..ct_len]; + let tag = &ciphertext_with_tag[ct_len..]; + + // Assemble payload: version(1) | ephemeralPub(65) | nonce(12) | ciphertext(N) | tag(16) + let mut payload = Vec::with_capacity(HEADER_LENGTH + ciphertext.len() + TAG_LENGTH); + payload.push(PAYLOAD_VERSION); + payload.extend_from_slice(ephemeral_pub_raw); + payload.extend_from_slice(&nonce_bytes); + payload.extend_from_slice(ciphertext); + payload.extend_from_slice(tag); + + Ok(BASE64.encode(&payload)) +} + +/// Decrypt ciphertext using ECIES with the recipient's private key. +/// +/// `private_key_base64` is PKCS8 DER, `public_key_base64` is uncompressed SEC1. +/// `ciphertext_base64` is the base64-encoded wire-format payload. +/// Returns decrypted plaintext bytes. +pub fn decrypt( + private_key_base64: &str, + public_key_base64: &str, + ciphertext_base64: &str, +) -> Result, String> { + let payload = BASE64 + .decode(ciphertext_base64) + .map_err(|e| format!("Invalid ciphertext base64: {e}"))?; + + if payload.len() < HEADER_LENGTH + TAG_LENGTH { + return Err("Payload too short".into()); + } + + // Parse payload + let version = payload[0]; + if version != PAYLOAD_VERSION { + return Err(format!("Unsupported payload version: {version}")); + } + + let ephemeral_pub_raw = &payload[1..1 + PUBLIC_KEY_LENGTH]; + let nonce_bytes = &payload[1 + PUBLIC_KEY_LENGTH..HEADER_LENGTH]; + let ciphertext_and_tag = &payload[HEADER_LENGTH..]; + + if ciphertext_and_tag.len() < TAG_LENGTH { + return Err("Payload too short for tag".into()); + } + + // Import private key from PKCS8 DER + let private_key_der = BASE64 + .decode(private_key_base64) + .map_err(|e| format!("Invalid private key base64: {e}"))?; + let secret_key = SecretKey::from_pkcs8_der(&private_key_der) + .map_err(|e| format!("Invalid PKCS8 private key: {e}"))?; + + // Import ephemeral public key + let ephemeral_point = p256::EncodedPoint::from_bytes(ephemeral_pub_raw) + .map_err(|e| format!("Invalid ephemeral public key: {e}"))?; + let ephemeral_pub = PublicKey::from_encoded_point(&ephemeral_point) + .into_option() + .ok_or("Invalid ephemeral P-256 point")?; + + // Recipient public key bytes for HKDF info + let recipient_pub_bytes = BASE64 + .decode(public_key_base64) + .map_err(|e| format!("Invalid public key base64: {e}"))?; + + // ECDH: recipient private × ephemeral public → shared secret + let shared_secret = p256::ecdh::diffie_hellman( + secret_key.to_nonzero_scalar(), + ephemeral_pub.as_affine(), + ); + let shared_secret_bytes = shared_secret.raw_secret_bytes(); + + // HKDF-SHA256 → AES-256 key (must match encrypt side) + let mut info = Vec::with_capacity(PUBLIC_KEY_LENGTH * 2); + info.extend_from_slice(ephemeral_pub_raw); + info.extend_from_slice(&recipient_pub_bytes); + + let hk = Hkdf::::new(Some(HKDF_SALT), shared_secret_bytes); + let mut aes_key = [0u8; 32]; + hk.expand(&info, &mut aes_key) + .map_err(|e| format!("HKDF expand failed: {e}"))?; + + // AES-256-GCM decrypt + // aes-gcm expects ciphertext || tag concatenated (same as wire format after header) + let cipher = Aes256Gcm::new_from_slice(&aes_key) + .map_err(|e| format!("AES key init failed: {e}"))?; + let nonce = Nonce::from_slice(nonce_bytes); + + let plaintext = cipher + .decrypt(nonce, ciphertext_and_tag) + .map_err(|_| "Decryption failed: invalid ciphertext or key".to_string())?; + + Ok(plaintext) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_roundtrip() { + let kp = generate_key_pair().unwrap(); + let plaintext = b"hello world"; + let encrypted = encrypt(&kp.public_key, plaintext).unwrap(); + let decrypted = decrypt(&kp.private_key, &kp.public_key, &encrypted).unwrap(); + assert_eq!(decrypted, plaintext); + } + + #[test] + fn test_payload_format() { + let kp = generate_key_pair().unwrap(); + let encrypted = encrypt(&kp.public_key, b"test").unwrap(); + let payload = BASE64.decode(&encrypted).unwrap(); + + // Check version byte + assert_eq!(payload[0], PAYLOAD_VERSION); + // Check total minimum length: 1 + 65 + 12 + 0 + 16 = 94 + assert!(payload.len() >= HEADER_LENGTH + TAG_LENGTH); + // Check ephemeral public key starts with 0x04 (uncompressed) + assert_eq!(payload[1], 0x04); + } + + #[test] + fn test_different_keys_cannot_decrypt() { + let kp1 = generate_key_pair().unwrap(); + let kp2 = generate_key_pair().unwrap(); + let encrypted = encrypt(&kp1.public_key, b"secret").unwrap(); + let result = decrypt(&kp2.private_key, &kp2.public_key, &encrypted); + assert!(result.is_err()); + } +} diff --git a/packages/encryption-binary-rust/src/daemon.rs b/packages/encryption-binary-rust/src/daemon.rs new file mode 100644 index 000000000..d01c9b036 --- /dev/null +++ b/packages/encryption-binary-rust/src/daemon.rs @@ -0,0 +1,326 @@ +//! Daemon mode — long-lived process with IPC server, session management, and auto-shutdown. +//! +//! Matches the Swift daemon's behavior: +//! - Accepts connections over Unix socket (Linux) or named pipe (Windows) +//! - Handles: decrypt, encrypt, ping, invalidate-session +//! - On Windows with Hello: requires biometric before first decrypt per session +//! - No prompt-secret (no GUI on Linux — handled by terminal prompt in TS) +//! - Auto-shutdown after 30 minutes of inactivity +//! - Session invalidation on SIGTERM/SIGINT + +use crate::crypto; +use crate::ipc::{IpcServer, MessageHandler}; +use crate::key_store; +use serde_json::{json, Value}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::{Arc, Mutex}; +use std::time::{Duration, Instant}; + +const DEFAULT_KEY_ID: &str = "varlock-default"; +const DAEMON_INACTIVITY_TIMEOUT: Duration = Duration::from_secs(30 * 60); // 30 minutes +const SESSION_TIMEOUT: Duration = Duration::from_secs(5 * 60); // 5 minutes per session + +/// Per-TTY session state. +struct SessionManager { + /// Map of TTY IDs to their session creation time. + /// Sessions expire after SESSION_TIMEOUT. + active_sessions: std::collections::HashMap, + /// Last IPC activity timestamp for daemon timeout. + last_activity: Instant, + /// Whether biometric verification is available on this platform. + biometric_available: bool, +} + +impl SessionManager { + fn new() -> Self { + let info = key_store::get_platform_info(); + Self { + active_sessions: std::collections::HashMap::new(), + last_activity: Instant::now(), + biometric_available: info.biometric_available, + } + } + + fn note_activity(&mut self) { + self.last_activity = Instant::now(); + } + + fn is_session_warm(&self, tty_id: &Option) -> bool { + let key = tty_id.as_deref().unwrap_or("__no_tty__"); + match self.active_sessions.get(key) { + Some(created_at) => created_at.elapsed() < SESSION_TIMEOUT, + None => false, + } + } + + fn mark_session_warm(&mut self, tty_id: &Option) { + let key = tty_id.as_deref().unwrap_or("__no_tty__").to_string(); + self.active_sessions.insert(key, Instant::now()); + } + + fn invalidate_all(&mut self) { + self.active_sessions.clear(); + } + + #[allow(dead_code)] + fn has_any_sessions(&self) -> bool { + self.active_sessions.values().any(|t| t.elapsed() < SESSION_TIMEOUT) + } + + fn is_timed_out(&self) -> bool { + self.last_activity.elapsed() > DAEMON_INACTIVITY_TIMEOUT + } + + /// Whether the next decrypt should require biometric verification. + fn needs_biometric(&self, tty_id: &Option) -> bool { + self.biometric_available && !self.is_session_warm(tty_id) + } +} + +/// Run the daemon. +pub fn run_daemon(socket_path: &str, pid_path: Option<&str>) -> Result<(), String> { + // Write PID file + if let Some(pid_path) = pid_path { + if let Some(parent) = std::path::Path::new(pid_path).parent() { + let _ = std::fs::create_dir_all(parent); + } + std::fs::write(pid_path, std::process::id().to_string()) + .map_err(|e| format!("Failed to write PID file: {e}"))?; + } + + let session_manager = Arc::new(Mutex::new(SessionManager::new())); + let mut server = IpcServer::new(socket_path); + + // Activity callback + let sm_activity = session_manager.clone(); + server.set_activity_callback(move || { + if let Ok(mut sm) = sm_activity.lock() { + sm.note_activity(); + } + }); + + // Message handler + let sm_handler = session_manager.clone(); + let handler: MessageHandler = Box::new(move |message: Value, tty_id: Option| { + let action = message + .get("action") + .and_then(|v| v.as_str()) + .unwrap_or(""); + + match action { + "decrypt" => handle_decrypt(&message, &tty_id, &sm_handler), + "encrypt" => handle_encrypt(&message), + "ping" => handle_ping(&tty_id, &sm_handler), + "invalidate-session" => handle_invalidate(&sm_handler), + _ => json!({"error": format!("Unknown action: {action}")}), + } + }); + server.set_message_handler(handler); + + let running = server.running_flag(); + + // Signal handling + let pid_path_owned = pid_path.map(|s| s.to_string()); + + #[cfg(unix)] + { + let _ = ctrlc_handler(running.clone()); + } + + // Inactivity timeout checker + session expiry cleanup + let sm_timeout = session_manager.clone(); + let running_timeout = running.clone(); + std::thread::spawn(move || { + loop { + std::thread::sleep(Duration::from_secs(60)); + if !running_timeout.load(Ordering::SeqCst) { + break; + } + if let Ok(mut sm) = sm_timeout.lock() { + // Clean up expired sessions + sm.active_sessions.retain(|_, created_at| { + created_at.elapsed() < SESSION_TIMEOUT + }); + + if sm.is_timed_out() { + running_timeout.store(false, Ordering::SeqCst); + break; + } + } + } + }); + + // Print ready message (matches Swift daemon format) + let ready = json!({ + "ready": true, + "pid": std::process::id(), + "socketPath": socket_path, + }); + println!("{}", ready); + use std::io::Write; + let _ = std::io::stdout().flush(); + + // Start server (blocks) + let result = server.start(); + + // Cleanup + if let Some(pp) = &pid_path_owned { + let _ = std::fs::remove_file(pp); + } + + result +} + +// ── Message handlers ───────────────────────────────────────────── + +fn handle_decrypt( + message: &Value, + tty_id: &Option, + sm: &Arc>, +) -> Value { + let payload = match message.get("payload") { + Some(p) => p, + None => return json!({"error": "Missing payload"}), + }; + + let ciphertext_b64 = match payload.get("ciphertext").and_then(|v| v.as_str()) { + Some(ct) => ct, + None => return json!({"error": "Missing or invalid ciphertext in payload"}), + }; + + let key_id = payload + .get("keyId") + .and_then(|v| v.as_str()) + .unwrap_or(DEFAULT_KEY_ID); + + // Check if biometric verification is needed + let needs_bio = sm.lock().map(|s| s.needs_biometric(tty_id)).unwrap_or(false); + + if needs_bio { + match verify_user_presence() { + Ok(true) => {} // Verified — proceed + Ok(false) => return json!({"error": "User verification cancelled"}), + Err(e) => return json!({"error": format!("Biometric verification failed: {e}")}), + } + } + + // Load key and decrypt + match key_store::load_key(key_id) { + Ok((private_key_der, public_key_b64)) => { + let private_key_b64 = base64::Engine::encode( + &base64::engine::general_purpose::STANDARD, + &private_key_der, + ); + + match crypto::decrypt(&private_key_b64, &public_key_b64, ciphertext_b64) { + Ok(plaintext_bytes) => { + match String::from_utf8(plaintext_bytes) { + Ok(plaintext) => { + // Mark session as warm + if let Ok(mut session) = sm.lock() { + session.mark_session_warm(tty_id); + } + json!({"result": plaintext}) + } + Err(_) => json!({"error": "Decrypted data is not valid UTF-8"}), + } + } + Err(e) => json!({"error": e}), + } + } + Err(e) => json!({"error": e}), + } +} + +fn handle_encrypt(message: &Value) -> Value { + let payload = match message.get("payload") { + Some(p) => p, + None => return json!({"error": "Missing payload"}), + }; + + let plaintext = match payload.get("plaintext").and_then(|v| v.as_str()) { + Some(pt) => pt, + None => return json!({"error": "Missing plaintext in payload"}), + }; + + let key_id = payload + .get("keyId") + .and_then(|v| v.as_str()) + .unwrap_or(DEFAULT_KEY_ID); + + match key_store::load_public_key(key_id) { + Ok(public_key_b64) => match crypto::encrypt(&public_key_b64, plaintext.as_bytes()) { + Ok(ciphertext) => json!({"result": ciphertext}), + Err(e) => json!({"error": e}), + }, + Err(e) => json!({"error": e}), + } +} + +fn handle_ping(tty_id: &Option, sm: &Arc>) -> Value { + let session_warm = sm + .lock() + .map(|s| s.is_session_warm(tty_id)) + .unwrap_or(false); + + json!({ + "result": { + "pong": true, + "sessionWarm": session_warm, + "ttyId": tty_id.as_deref().unwrap_or(""), + } + }) +} + +fn handle_invalidate(sm: &Arc>) -> Value { + if let Ok(mut session) = sm.lock() { + session.invalidate_all(); + } + json!({"result": "all sessions invalidated"}) +} + +// ── Biometric verification ─────────────────────────────────────── + +/// Verify user presence using platform-specific biometric. +/// Returns Ok(true) if verified, Ok(false) if cancelled. +fn verify_user_presence() -> Result { + #[cfg(target_os = "windows")] + { + crate::key_store::windows_hello::verify_user("Varlock needs to decrypt your secrets") + } + + #[cfg(not(target_os = "windows"))] + { + // No biometric on Linux — sessions are always warm + Ok(true) + } +} + +// ── Signal handling ────────────────────────────────────────────── + +#[cfg(unix)] +fn ctrlc_handler(running: Arc) -> Result<(), String> { + unsafe { + libc::signal(libc::SIGTERM, signal_handler as *const () as libc::sighandler_t); + libc::signal(libc::SIGINT, signal_handler as *const () as libc::sighandler_t); + } + + RUNNING_FLAG + .lock() + .map_err(|e| format!("Failed to set signal handler: {e}"))? + .replace(running); + + Ok(()) +} + +#[cfg(unix)] +static RUNNING_FLAG: std::sync::Mutex>> = std::sync::Mutex::new(None); + +#[cfg(unix)] +extern "C" fn signal_handler(_sig: libc::c_int) { + if let Ok(guard) = RUNNING_FLAG.lock() { + if let Some(ref running) = *guard { + running.store(false, Ordering::SeqCst); + } + } +} diff --git a/packages/encryption-binary-rust/src/ipc.rs b/packages/encryption-binary-rust/src/ipc.rs new file mode 100644 index 000000000..cf7881330 --- /dev/null +++ b/packages/encryption-binary-rust/src/ipc.rs @@ -0,0 +1,460 @@ +//! IPC server for the daemon mode. +//! +//! Protocol: Length-prefixed JSON over Unix domain socket (Linux) or named pipe (Windows). +//! +//! [4 bytes: UInt32 LE message length] +//! [N bytes: UTF-8 JSON] +//! +//! Request: { "id": "...", "action": "...", "payload": { ... } } +//! Response: { "id": "...", "result": ... } or { "id": "...", "error": "..." } + +use serde_json::Value; +use std::io::{Read, Write}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Arc; + +#[cfg(unix)] +use std::os::unix::net::{UnixListener, UnixStream}; + +const MAX_MESSAGE_SIZE: u32 = 10_000_000; // 10MB safety limit + + + +/// Message handler callback type. +pub type MessageHandler = Box) -> Value + Send + Sync>; + +/// IPC server that listens for length-prefixed JSON messages. +pub struct IpcServer { + socket_path: String, + running: Arc, + message_handler: Option>, + on_activity: Option>, +} + +impl IpcServer { + pub fn new(socket_path: &str) -> Self { + Self { + socket_path: socket_path.to_string(), + running: Arc::new(AtomicBool::new(false)), + message_handler: None, + on_activity: None, + } + } + + pub fn set_message_handler(&mut self, handler: MessageHandler) { + self.message_handler = Some(Arc::new(handler)); + } + + pub fn set_activity_callback(&mut self, callback: impl Fn() + Send + Sync + 'static) { + self.on_activity = Some(Arc::new(callback)); + } + + pub fn running_flag(&self) -> Arc { + self.running.clone() + } + + /// Start the IPC server. This blocks the calling thread. + #[cfg(unix)] + pub fn start(&self) -> Result<(), String> { + // Clean up stale socket + let _ = std::fs::remove_file(&self.socket_path); + + // Ensure parent directory exists + if let Some(parent) = std::path::Path::new(&self.socket_path).parent() { + std::fs::create_dir_all(parent) + .map_err(|e| format!("Failed to create socket directory: {e}"))?; + } + + let listener = UnixListener::bind(&self.socket_path) + .map_err(|e| format!("Socket bind failed: {e}"))?; + + // Set socket permissions (owner only) + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let _ = std::fs::set_permissions( + &self.socket_path, + std::fs::Permissions::from_mode(0o600), + ); + } + + // Set non-blocking so we can check the running flag + listener + .set_nonblocking(true) + .map_err(|e| format!("Failed to set non-blocking: {e}"))?; + + self.running.store(true, Ordering::SeqCst); + + while self.running.load(Ordering::SeqCst) { + match listener.accept() { + Ok((stream, _)) => { + if let Some(cb) = &self.on_activity { + cb(); + } + + let handler = self.message_handler.clone(); + let on_activity = self.on_activity.clone(); + let running = self.running.clone(); + + // Get peer TTY identity + let tty_id = get_peer_tty_id(&stream); + + std::thread::spawn(move || { + handle_client(stream, handler, on_activity, running, tty_id); + }); + } + Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => { + // No pending connection — sleep briefly and retry + std::thread::sleep(std::time::Duration::from_millis(50)); + } + Err(e) => { + if self.running.load(Ordering::SeqCst) { + eprintln!("Accept error: {e}"); + } + break; + } + } + } + + // Cleanup + let _ = std::fs::remove_file(&self.socket_path); + Ok(()) + } + + /// Start the IPC server on Windows using named pipes. + /// + /// Named pipes work with Node.js `net.connect()` out of the box — + /// the TS daemon client's `socket.connect(pipePath)` just works. + #[cfg(windows)] + pub fn start(&self) -> Result<(), String> { + use windows::Win32::Foundation::{CloseHandle, INVALID_HANDLE_VALUE}; + use windows::Win32::System::Pipes::{ + ConnectNamedPipe, CreateNamedPipeW, DisconnectNamedPipe, + PIPE_TYPE_BYTE, PIPE_READMODE_BYTE, PIPE_WAIT, + }; + use windows::Win32::Storage::FileSystem::PIPE_ACCESS_DUPLEX; + use windows::core::HSTRING; + + self.running.store(true, Ordering::SeqCst); + + let pipe_name = HSTRING::from(&self.socket_path); + + while self.running.load(Ordering::SeqCst) { + // Create a new named pipe instance for each client + let pipe_handle = unsafe { + CreateNamedPipeW( + &pipe_name, + PIPE_ACCESS_DUPLEX, + PIPE_TYPE_BYTE | PIPE_READMODE_BYTE | PIPE_WAIT, + 10, // max instances + 65536, // out buffer + 65536, // in buffer + 0, // default timeout + None, // default security + ) + }; + + if pipe_handle == INVALID_HANDLE_VALUE { + if !self.running.load(Ordering::SeqCst) { + break; + } + return Err("CreateNamedPipe failed".into()); + } + + // Wait for a client to connect (blocking) + let connected = unsafe { ConnectNamedPipe(pipe_handle, None) }; + if connected.is_err() { + // ERROR_PIPE_CONNECTED means client connected between Create and Connect — OK + // Any other error: close and retry + let last_err = unsafe { windows::Win32::Foundation::GetLastError() }; + if last_err != windows::Win32::Foundation::ERROR_PIPE_CONNECTED { + unsafe { let _ = CloseHandle(pipe_handle); } + if !self.running.load(Ordering::SeqCst) { + break; + } + continue; + } + } + + if let Some(cb) = &self.on_activity { + cb(); + } + + let handler = self.message_handler.clone(); + let on_activity = self.on_activity.clone(); + let running = self.running.clone(); + let tty_id: Option = None; + + // HANDLE is !Send, but it's safe to use from another thread + // since we transfer exclusive ownership. Pass as raw pointer. + let raw_handle = pipe_handle.0 as usize; // usize is Send + std::thread::spawn(move || { + use windows::Win32::Foundation::HANDLE; + let pipe = HANDLE(raw_handle as *mut _); + handle_windows_client(pipe, handler, on_activity, running, tty_id); + unsafe { + let _ = DisconnectNamedPipe(pipe); + let _ = CloseHandle(pipe); + } + }); + } + + Ok(()) + } + + pub fn stop(&self) { + self.running.store(false, Ordering::SeqCst); + } +} + +impl Drop for IpcServer { + fn drop(&mut self) { + self.stop(); + let _ = std::fs::remove_file(&self.socket_path); + } +} + +// ── Client handling ────────────────────────────────────────────── + +#[cfg(unix)] +fn handle_client( + mut stream: UnixStream, + handler: Option>, + on_activity: Option>, + running: Arc, + tty_id: Option, +) { + // Set blocking for reads + let _ = stream.set_nonblocking(false); + let _ = stream.set_read_timeout(Some(std::time::Duration::from_secs(300))); + + while running.load(Ordering::SeqCst) { + // Read 4-byte length prefix (little-endian) + let mut len_buf = [0u8; 4]; + match stream.read_exact(&mut len_buf) { + Ok(()) => {} + Err(_) => break, // Connection closed or error + } + + let msg_len = u32::from_le_bytes(len_buf); + if msg_len == 0 || msg_len > MAX_MESSAGE_SIZE { + break; + } + + // Read message body + let mut msg_buf = vec![0u8; msg_len as usize]; + match stream.read_exact(&mut msg_buf) { + Ok(()) => {} + Err(_) => break, + } + + // Parse JSON + let message: Value = match serde_json::from_slice(&msg_buf) { + Ok(v) => v, + Err(_) => { + let _ = send_response(&mut stream, None, &serde_json::json!({"error": "Invalid JSON"})); + continue; + } + }; + + if let Some(cb) = &on_activity { + cb(); + } + + // Handle message + let id = message.get("id").and_then(|v| v.as_str()).map(|s| s.to_string()); + + let response = if let Some(ref handler) = handler { + handler(message, tty_id.clone()) + } else { + serde_json::json!({"error": "No handler"}) + }; + + if send_response(&mut stream, id.as_deref(), &response).is_err() { + break; + } + } +} + +fn send_response(stream: &mut impl Write, id: Option<&str>, response: &Value) -> Result<(), String> { + let mut full_response = response.clone(); + if let (Some(id), Some(obj)) = (id, full_response.as_object_mut()) { + obj.insert("id".to_string(), Value::String(id.to_string())); + } + + let json_bytes = serde_json::to_vec(&full_response) + .map_err(|e| format!("Serialization failed: {e}"))?; + + let len = (json_bytes.len() as u32).to_le_bytes(); + stream.write_all(&len).map_err(|e| format!("Write failed: {e}"))?; + stream.write_all(&json_bytes).map_err(|e| format!("Write failed: {e}"))?; + stream.flush().map_err(|e| format!("Flush failed: {e}"))?; + + Ok(()) +} + +// ── Peer TTY identity (Linux) ──────────────────────────────────── + +#[cfg(target_os = "linux")] +fn get_peer_tty_id(stream: &UnixStream) -> Option { + use nix::sys::socket::{getsockopt, sockopt::PeerCredentials}; + use std::os::fd::AsFd; + + let creds = getsockopt(&stream.as_fd(), PeerCredentials).ok()?; + let pid = creds.pid(); + + if pid <= 0 { + return None; + } + + // Read the process's controlling terminal from /proc + get_tty_for_pid(pid as u32) +} + +#[cfg(target_os = "linux")] +fn get_tty_for_pid(pid: u32) -> Option { + // Read /proc//stat to get the tty_nr field (field 7, 0-indexed 6) + let stat = std::fs::read_to_string(format!("/proc/{pid}/stat")).ok()?; + + // The stat line format is: pid (comm) state ppid pgrp session tty_nr ... + // comm can contain spaces and parens, so find the last ')' first + let after_comm = stat.rfind(')')? + 2; + let fields: Vec<&str> = stat[after_comm..].split_whitespace().collect(); + + // After the closing paren: state(0) ppid(1) pgrp(2) session(3) tty_nr(4) + let tty_nr: u32 = fields.get(4)?.parse().ok()?; + if tty_nr == 0 { + return None; // No controlling tty + } + + // Get the session leader PID (field 3 after comm) + let session_pid: u32 = fields.get(3)?.parse().ok()?; + + // Get session leader start time for uniqueness + let start_time = get_process_start_time(session_pid).unwrap_or(0); + + // Convert tty_nr to a name (major:minor) + let major = (tty_nr >> 8) & 0xff; + let minor = (tty_nr & 0xff) | ((tty_nr >> 12) & 0xfff00); + let tty_name = format!("tty{major}:{minor}"); + + Some(format!("{tty_name}:{start_time}")) +} + +#[cfg(target_os = "linux")] +fn get_process_start_time(pid: u32) -> Option { + let stat = std::fs::read_to_string(format!("/proc/{pid}/stat")).ok()?; + let after_comm = stat.rfind(')')? + 2; + let fields: Vec<&str> = stat[after_comm..].split_whitespace().collect(); + // Field 19 after comm is starttime (in clock ticks since boot) + fields.get(19)?.parse().ok() +} + +#[cfg(not(any(target_os = "linux", target_os = "windows")))] +fn get_peer_tty_id(_stream: &UnixStream) -> Option { + None +} + +// ── Windows named pipe client handling ─────────────────────────── + +#[cfg(windows)] +fn handle_windows_client( + pipe: windows::Win32::Foundation::HANDLE, + handler: Option>, + on_activity: Option>, + running: Arc, + tty_id: Option, +) { + use windows::Win32::Storage::FileSystem::{ReadFile, WriteFile, FlushFileBuffers}; + + while running.load(Ordering::SeqCst) { + // Read 4-byte length prefix + let mut len_buf = [0u8; 4]; + let mut bytes_read = 0u32; + let ok = unsafe { + ReadFile(pipe, Some(&mut len_buf), Some(&mut bytes_read), None) + }; + if ok.is_err() || bytes_read != 4 { + break; + } + + let msg_len = u32::from_le_bytes(len_buf); + if msg_len == 0 || msg_len > MAX_MESSAGE_SIZE { + break; + } + + // Read message body + let mut msg_buf = vec![0u8; msg_len as usize]; + let mut total_read = 0u32; + while (total_read as usize) < msg_buf.len() { + let mut chunk_read = 0u32; + let ok = unsafe { + ReadFile( + pipe, + Some(&mut msg_buf[total_read as usize..]), + Some(&mut chunk_read), + None, + ) + }; + if ok.is_err() || chunk_read == 0 { + return; + } + total_read += chunk_read; + } + + // Parse JSON + let message: Value = match serde_json::from_slice(&msg_buf) { + Ok(v) => v, + Err(_) => { + let _ = send_windows_response(pipe, None, &serde_json::json!({"error": "Invalid JSON"})); + continue; + } + }; + + if let Some(cb) = &on_activity { + cb(); + } + + let id = message.get("id").and_then(|v| v.as_str()).map(|s| s.to_string()); + + let response = if let Some(ref handler) = handler { + handler(message, tty_id.clone()) + } else { + serde_json::json!({"error": "No handler"}) + }; + + if send_windows_response(pipe, id.as_deref(), &response).is_err() { + break; + } + } +} + +#[cfg(windows)] +fn send_windows_response( + pipe: windows::Win32::Foundation::HANDLE, + id: Option<&str>, + response: &Value, +) -> Result<(), String> { + use windows::Win32::Storage::FileSystem::{WriteFile, FlushFileBuffers}; + + let mut full_response = response.clone(); + if let (Some(id), Some(obj)) = (id, full_response.as_object_mut()) { + obj.insert("id".to_string(), Value::String(id.to_string())); + } + + let json_bytes = serde_json::to_vec(&full_response) + .map_err(|e| format!("Serialization failed: {e}"))?; + + let len = (json_bytes.len() as u32).to_le_bytes(); + + let mut written = 0u32; + unsafe { + WriteFile(pipe, Some(&len), Some(&mut written), None) + .map_err(|e| format!("Write failed: {e}"))?; + WriteFile(pipe, Some(&json_bytes), Some(&mut written), None) + .map_err(|e| format!("Write failed: {e}"))?; + let _ = FlushFileBuffers(pipe); + } + + Ok(()) +} diff --git a/packages/encryption-binary-rust/src/key_store/linux.rs b/packages/encryption-binary-rust/src/key_store/linux.rs new file mode 100644 index 000000000..3593485e8 --- /dev/null +++ b/packages/encryption-binary-rust/src/key_store/linux.rs @@ -0,0 +1,286 @@ +//! Linux key protection using TPM2 seal/unseal via tpm2-tools. +//! +//! Strategy: +//! 1. Create a TPM2 Storage Root Key (SRK) under the owner hierarchy +//! (deterministic — same template always produces the same key) +//! 2. Seal the PKCS8 private key under the SRK using tpm2_create +//! 3. Store the sealed public/private blobs on disk +//! 4. To decrypt: recreate SRK, load sealed object, unseal +//! +//! The sealed blob can ONLY be unsealed by the same machine's TPM chip. +//! Survives reboots, survives logout — the TPM hardware is the anchor. +//! +//! Requirements: +//! - TPM 2.0 hardware (present on most machines since ~2018) +//! - tpm2-tools installed (tpm2_createprimary, tpm2_create, tpm2_load, tpm2_unseal) +//! - Access to /dev/tpmrm0 (user must be in 'tss' group or have udev rule) +//! +//! Fallback: If TPM2 is not available, falls back to file-based (plaintext) storage. + +use std::io::Write; +use std::process::Command; + +/// Detailed result of TPM2 availability check. +pub enum Tpm2Status { + /// TPM2 is available and ready to use + Available, + /// tpm2-tools not installed + ToolsNotInstalled, + /// /dev/tpmrm0 doesn't exist (no TPM hardware or not enabled in BIOS) + NoDevice, + /// /dev/tpmrm0 exists but not accessible (permission issue) + PermissionDenied, + /// TPM device exists but SRK creation failed (TPM in bad state?) + SrkFailed(String), +} + +/// Check if TPM2 is available and usable. +pub fn check_tpm2_status() -> Tpm2Status { + // Check if tpm2_createprimary is in PATH + if Command::new("which") + .arg("tpm2_createprimary") + .output() + .map(|o| !o.status.success()) + .unwrap_or(true) + { + return Tpm2Status::ToolsNotInstalled; + } + + // Check if TPM device exists + let tpmrm = std::path::Path::new("/dev/tpmrm0"); + if !tpmrm.exists() { + return Tpm2Status::NoDevice; + } + + // Check if we can access it + match std::fs::metadata(tpmrm) { + Ok(_meta) => { + // Try to actually use it with a quick SRK creation + let tmp = std::env::temp_dir().join(format!("varlock-tpm-check-{}", std::process::id())); + let result = Command::new("tpm2_createprimary") + .args(["-C", "o", "-g", "sha256", "-G", "ecc256", "-c"]) + .arg(&tmp) + .output(); + + let _ = std::fs::remove_file(&tmp); + + match result { + Ok(output) if output.status.success() => Tpm2Status::Available, + Ok(output) => { + let stderr = String::from_utf8_lossy(&output.stderr); + if stderr.contains("Permission denied") || stderr.contains("TCTI") { + Tpm2Status::PermissionDenied + } else { + Tpm2Status::SrkFailed(stderr.to_string()) + } + } + Err(_) => Tpm2Status::PermissionDenied, + } + } + Err(_) => Tpm2Status::PermissionDenied, + } +} + +/// Simple check: is TPM2 available? +pub fn is_tpm2_available() -> bool { + matches!(check_tpm2_status(), Tpm2Status::Available) +} + +/// Protect a private key by sealing it with the TPM. +/// +/// Returns a blob containing the sealed public + private portions, +/// which can only be unsealed by this machine's TPM. +/// +/// Format: pub_len(4 LE) || pub_data || priv_data +pub fn tpm2_protect(private_key_der: &[u8]) -> Result, String> { + let tmp_dir = std::env::temp_dir().join(format!("varlock-tpm-{}", std::process::id())); + std::fs::create_dir_all(&tmp_dir) + .map_err(|e| format!("Failed to create temp dir: {e}"))?; + + let srk_ctx = tmp_dir.join("srk.ctx"); + let sealed_pub = tmp_dir.join("sealed.pub"); + let sealed_priv = tmp_dir.join("sealed.priv"); + let input_file = tmp_dir.join("input.dat"); + + // Clean up on exit + let _cleanup = CleanupDir(tmp_dir.clone()); + + // Write private key to temp file (restricted permissions) + { + use std::os::unix::fs::OpenOptionsExt; + let mut f = std::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .mode(0o600) + .open(&input_file) + .map_err(|e| format!("Failed to write temp file: {e}"))?; + f.write_all(private_key_der) + .map_err(|e| format!("Failed to write temp file: {e}"))?; + } + + // Step 1: Create SRK (Storage Root Key) — deterministic + run_tpm2_command( + "tpm2_createprimary", + &["-C", "o", "-g", "sha256", "-G", "ecc256", "-c"], + Some(&srk_ctx), + )?; + + // Step 2: Seal the private key under the SRK + run_tpm2_command_with_args( + "tpm2_create", + &[ + "-C", srk_ctx.to_str().unwrap(), + "-i", input_file.to_str().unwrap(), + "-u", sealed_pub.to_str().unwrap(), + "-r", sealed_priv.to_str().unwrap(), + ], + )?; + + // Step 3: Read the sealed blobs + let pub_data = std::fs::read(&sealed_pub) + .map_err(|e| format!("Failed to read sealed public blob: {e}"))?; + let priv_data = std::fs::read(&sealed_priv) + .map_err(|e| format!("Failed to read sealed private blob: {e}"))?; + + // Pack into a single blob: pub_len(4 LE) || pub_data || priv_data + let mut output = Vec::with_capacity(4 + pub_data.len() + priv_data.len()); + output.extend_from_slice(&(pub_data.len() as u32).to_le_bytes()); + output.extend_from_slice(&pub_data); + output.extend_from_slice(&priv_data); + + Ok(output) +} + +/// Unprotect a private key by unsealing it with the TPM. +pub fn tpm2_unprotect(sealed_blob: &[u8]) -> Result, String> { + if sealed_blob.len() < 4 { + return Err("Sealed blob too short".into()); + } + + // Parse: pub_len(4 LE) || pub_data || priv_data + let pub_len = u32::from_le_bytes(sealed_blob[..4].try_into().unwrap()) as usize; + if sealed_blob.len() < 4 + pub_len { + return Err("Sealed blob truncated".into()); + } + let pub_data = &sealed_blob[4..4 + pub_len]; + let priv_data = &sealed_blob[4 + pub_len..]; + + let tmp_dir = std::env::temp_dir().join(format!("varlock-tpm-{}", std::process::id())); + std::fs::create_dir_all(&tmp_dir) + .map_err(|e| format!("Failed to create temp dir: {e}"))?; + + let srk_ctx = tmp_dir.join("srk.ctx"); + let sealed_pub = tmp_dir.join("sealed.pub"); + let sealed_priv = tmp_dir.join("sealed.priv"); + let sealed_ctx = tmp_dir.join("sealed.ctx"); + + let _cleanup = CleanupDir(tmp_dir.clone()); + + // Write sealed blobs to temp files + std::fs::write(&sealed_pub, pub_data) + .map_err(|e| format!("Failed to write sealed pub: {e}"))?; + std::fs::write(&sealed_priv, priv_data) + .map_err(|e| format!("Failed to write sealed priv: {e}"))?; + + // Step 1: Recreate SRK (deterministic — same params = same key) + run_tpm2_command( + "tpm2_createprimary", + &["-C", "o", "-g", "sha256", "-G", "ecc256", "-c"], + Some(&srk_ctx), + )?; + + // Step 2: Load the sealed object + run_tpm2_command_with_args( + "tpm2_load", + &[ + "-C", srk_ctx.to_str().unwrap(), + "-u", sealed_pub.to_str().unwrap(), + "-r", sealed_priv.to_str().unwrap(), + "-c", sealed_ctx.to_str().unwrap(), + ], + )?; + + // Step 3: Unseal + let output = Command::new("tpm2_unseal") + .args(["-c", sealed_ctx.to_str().unwrap()]) + .output() + .map_err(|e| format!("Failed to run tpm2_unseal: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(format!("TPM2 unseal failed: {stderr}")); + } + + Ok(output.stdout) +} + +// ── Helpers ────────────────────────────────────────────────────── + +fn run_tpm2_command(cmd: &str, args: &[&str], ctx_path: Option<&std::path::Path>) -> Result<(), String> { + let mut command = Command::new(cmd); + command.args(args); + if let Some(ctx) = ctx_path { + command.arg(ctx); + } + + let output = command + .output() + .map_err(|e| format!("Failed to run {cmd}: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(format!("{cmd} failed: {stderr}")); + } + + Ok(()) +} + +fn run_tpm2_command_with_args(cmd: &str, args: &[&str]) -> Result<(), String> { + let output = Command::new(cmd) + .args(args) + .output() + .map_err(|e| format!("Failed to run {cmd}: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(format!("{cmd} failed: {stderr}")); + } + + Ok(()) +} + +/// RAII cleanup for temp directories. +struct CleanupDir(std::path::PathBuf); + +impl Drop for CleanupDir { + fn drop(&mut self) { + let _ = std::fs::remove_dir_all(&self.0); + } +} + +/// Get a user-friendly hint about why TPM2 isn't available. +pub fn get_tpm2_setup_hint() -> Option { + match check_tpm2_status() { + Tpm2Status::Available => None, + Tpm2Status::ToolsNotInstalled => Some( + "TPM2 hardware may be available but tpm2-tools is not installed.\n\ + Install with: sudo apt install tpm2-tools (Debian/Ubuntu)\n\ + or: sudo dnf install tpm2-tools (Fedora)\n\ + or: sudo pacman -S tpm2-tools (Arch)" + .into(), + ), + Tpm2Status::NoDevice => Some( + "No TPM2 device found (/dev/tpmrm0). TPM may need to be enabled in BIOS.".into(), + ), + Tpm2Status::PermissionDenied => Some( + "TPM2 device exists but access denied.\n\ + Add your user to the tss group: sudo usermod -aG tss $USER\n\ + Then log out and back in." + .into(), + ), + Tpm2Status::SrkFailed(e) => Some(format!( + "TPM2 device accessible but key creation failed: {e}" + )), + } +} diff --git a/packages/encryption-binary-rust/src/key_store/mod.rs b/packages/encryption-binary-rust/src/key_store/mod.rs new file mode 100644 index 000000000..f1f0eabc9 --- /dev/null +++ b/packages/encryption-binary-rust/src/key_store/mod.rs @@ -0,0 +1,368 @@ +//! Key storage abstraction. +//! +//! Each platform backend stores the P-256 private key in a protected manner: +//! - Windows: DPAPI (CryptProtectData) — encrypted to the current user session +//! - Linux: TPM2 seal/unseal via tpm2-tools — key sealed to hardware TPM chip +//! +//! All backends store the public key as plaintext (it's not secret) and the +//! private key in a platform-specific protected format. The key file format is: +//! +//! ~/.config/varlock/local-encrypt/keys/{keyId}.json +//! { +//! "keyId": "varlock-default", +//! "publicKey": "", +//! "protectedPrivateKey": "", +//! "protection": "dpapi" | "tpm2" | "none", +//! "createdAt": "2024-01-01T00:00:00Z" +//! } +//! +//! The "none" protection level stores the private key as plaintext base64 — +//! equivalent to the JS file-based backend. Used as an absolute fallback. + +use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::PathBuf; + +#[cfg(target_os = "linux")] +pub(crate) mod linux; +#[cfg(target_os = "windows")] +mod windows; +#[cfg(target_os = "windows")] +pub(crate) mod windows_hello; + +/// Which protection mechanism is used for the private key. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum Protection { + /// Windows DPAPI — encrypted to current user session + Dpapi, + /// Linux TPM2 — sealed to hardware TPM chip + Tpm2, + /// No protection — plaintext on disk (fallback) + None, +} + +impl std::fmt::Display for Protection { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Protection::Dpapi => write!(f, "dpapi"), + Protection::Tpm2 => write!(f, "tpm2"), + Protection::None => write!(f, "none"), + } + } +} + +/// Stored key file format (JSON). +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct StoredKey { + pub key_id: String, + /// Base64 uncompressed P-256 public key (65 bytes raw) + pub public_key: String, + /// Base64 protected private key (protection-dependent format) + pub protected_private_key: String, + /// How the private key is protected + pub protection: Protection, + pub created_at: String, +} + +/// Information about what key protection is available on this platform. +pub struct PlatformInfo { + /// Backend name for status output + pub backend: String, + /// Whether keys are hardware-backed (TPM) + pub hardware_backed: bool, + /// Whether biometric unlock is available + pub biometric_available: bool, + /// What protection will be used for new keys + pub protection: Protection, +} + +// ── Path helpers ────────────────────────────────────────────────── + +fn get_config_dir() -> PathBuf { + if let Ok(xdg) = std::env::var("XDG_CONFIG_HOME") { + return PathBuf::from(xdg).join("varlock"); + } + + let home = dirs_home(); + + // Backwards compat: if ~/.varlock exists, use it + let legacy = home.join(".varlock"); + if legacy.exists() { + return legacy; + } + + // Default: ~/.config/varlock (XDG standard) + home.join(".config").join("varlock") +} + +fn dirs_home() -> PathBuf { + #[cfg(target_os = "windows")] + { + std::env::var("USERPROFILE") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from("C:\\Users\\Default")) + } + #[cfg(not(target_os = "windows"))] + { + std::env::var("HOME") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from("/tmp")) + } +} + +fn get_key_store_dir() -> PathBuf { + get_config_dir().join("local-encrypt").join("keys") +} + +fn get_key_file_path(key_id: &str) -> PathBuf { + get_key_store_dir().join(format!("{key_id}.json")) +} + +// ── Platform-specific key protection ───────────────────────────── + +/// Protect a private key using the best available platform mechanism. +/// Returns (protected_bytes_base64, protection_type). +fn protect_private_key(private_key_der: &[u8]) -> (String, Protection) { + #[cfg(target_os = "windows")] + { + match windows::dpapi_protect(private_key_der) { + Ok(protected) => (BASE64.encode(&protected), Protection::Dpapi), + Err(e) => { + eprintln!("Warning: DPAPI protection failed ({e}), falling back to plaintext"); + (BASE64.encode(private_key_der), Protection::None) + } + } + } + + #[cfg(target_os = "linux")] + { + if linux::is_tpm2_available() { + match linux::tpm2_protect(private_key_der) { + Ok(protected) => (BASE64.encode(&protected), Protection::Tpm2), + Err(e) => { + eprintln!("Warning: TPM2 protection failed ({e}), falling back to plaintext"); + (BASE64.encode(private_key_der), Protection::None) + } + } + } else { + // TPM2 not available — plaintext fallback + if let Some(hint) = linux::get_tpm2_setup_hint() { + eprintln!("Note: {hint}"); + } + (BASE64.encode(private_key_der), Protection::None) + } + } + + #[cfg(not(any(target_os = "windows", target_os = "linux")))] + { + // Unsupported platform — plaintext fallback + (BASE64.encode(private_key_der), Protection::None) + } +} + +/// Unprotect a private key, returning the raw PKCS8 DER bytes. +fn unprotect_private_key(protected_base64: &str, protection: &Protection) -> Result, String> { + let protected_bytes = BASE64 + .decode(protected_base64) + .map_err(|e| format!("Invalid base64: {e}"))?; + + match protection { + Protection::None => Ok(protected_bytes), + + #[cfg(target_os = "windows")] + Protection::Dpapi => windows::dpapi_unprotect(&protected_bytes), + + #[cfg(target_os = "linux")] + Protection::Tpm2 => linux::tpm2_unprotect(&protected_bytes), + + #[allow(unreachable_patterns)] + _ => Err(format!("Protection type '{protection}' not supported on this platform")), + } +} + +// ── Public API ─────────────────────────────────────────────────── + +/// Detect the platform capabilities and return status info. +pub fn get_platform_info() -> PlatformInfo { + #[cfg(target_os = "windows")] + { + let hello_available = windows_hello::is_hello_available(); + PlatformInfo { + backend: if hello_available { "windows-hello" } else { "windows-dpapi" }.into(), + hardware_backed: false, // DPAPI is software-based; TPM NCrypt is TODO + biometric_available: hello_available, + protection: Protection::Dpapi, + } + } + + #[cfg(target_os = "linux")] + { + let tpm2_available = linux::is_tpm2_available(); + PlatformInfo { + backend: if tpm2_available { "linux-tpm2" } else { "linux-file" }.into(), + hardware_backed: tpm2_available, + biometric_available: false, // fprintd integration is TODO + protection: if tpm2_available { Protection::Tpm2 } else { Protection::None }, + } + } + + #[cfg(not(any(target_os = "windows", target_os = "linux")))] + { + PlatformInfo { + backend: "file".into(), + hardware_backed: false, + biometric_available: false, + protection: Protection::None, + } + } +} + +/// Get a setup hint for TPM2 if it could be available but isn't configured. +#[cfg(target_os = "linux")] +pub fn get_tpm2_setup_hint() -> Option { + linux::get_tpm2_setup_hint() +} + +/// Check if a key exists. +pub fn key_exists(key_id: &str) -> bool { + get_key_file_path(key_id).exists() +} + +/// List all key IDs. +pub fn list_keys() -> Vec { + let dir = get_key_store_dir(); + let entries = match fs::read_dir(&dir) { + Ok(entries) => entries, + Err(_) => return vec![], + }; + + entries + .filter_map(|e| e.ok()) + .filter_map(|e| { + let name = e.file_name().to_string_lossy().to_string(); + name.strip_suffix(".json").map(|s| s.to_string()) + }) + .collect() +} + +/// Generate a new key pair and store it with platform-specific protection. +/// Returns the base64 public key. +pub fn generate_key(key_id: &str) -> Result { + let key_pair = crate::crypto::generate_key_pair()?; + + // Decode the private key to protect it + let private_key_der = BASE64 + .decode(&key_pair.private_key) + .map_err(|e| format!("Failed to decode private key: {e}"))?; + + let (protected, protection) = protect_private_key(&private_key_der); + + let stored = StoredKey { + key_id: key_id.to_string(), + public_key: key_pair.public_key.clone(), + protected_private_key: protected, + protection, + created_at: now_iso8601(), + }; + + // Write to disk + let dir = get_key_store_dir(); + fs::create_dir_all(&dir).map_err(|e| format!("Failed to create key store: {e}"))?; + + let path = get_key_file_path(key_id); + let json = serde_json::to_string_pretty(&stored) + .map_err(|e| format!("Failed to serialize key: {e}"))?; + + // Write with restricted permissions + #[cfg(unix)] + { + use std::os::unix::fs::OpenOptionsExt; + let mut opts = fs::OpenOptions::new(); + opts.write(true).create(true).truncate(true).mode(0o600); + use std::io::Write; + let mut file = opts.open(&path).map_err(|e| format!("Failed to write key file: {e}"))?; + file.write_all(json.as_bytes()) + .map_err(|e| format!("Failed to write key file: {e}"))?; + } + #[cfg(not(unix))] + { + fs::write(&path, &json).map_err(|e| format!("Failed to write key file: {e}"))?; + } + + Ok(key_pair.public_key) +} + +/// Delete a key. +pub fn delete_key(key_id: &str) -> bool { + let path = get_key_file_path(key_id); + fs::remove_file(path).is_ok() +} + +/// Load a stored key and return (private_key_der, public_key_base64). +pub fn load_key(key_id: &str) -> Result<(Vec, String), String> { + let path = get_key_file_path(key_id); + let data = fs::read_to_string(&path).map_err(|_| format!("Key not found: {key_id}"))?; + let stored: StoredKey = + serde_json::from_str(&data).map_err(|e| format!("Corrupted key file: {e}"))?; + + let private_key_der = unprotect_private_key(&stored.protected_private_key, &stored.protection)?; + Ok((private_key_der, stored.public_key)) +} + +/// Load just the public key (no protection needed). +pub fn load_public_key(key_id: &str) -> Result { + let path = get_key_file_path(key_id); + let data = fs::read_to_string(&path).map_err(|_| format!("Key not found: {key_id}"))?; + let stored: StoredKey = + serde_json::from_str(&data).map_err(|e| format!("Corrupted key file: {e}"))?; + Ok(stored.public_key) +} + +fn now_iso8601() -> String { + // Simple ISO 8601 without external crate + let duration = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default(); + let secs = duration.as_secs(); + // Approximate UTC — good enough for metadata + let days = secs / 86400; + let time_of_day = secs % 86400; + let hours = time_of_day / 3600; + let minutes = (time_of_day % 3600) / 60; + let seconds = time_of_day % 60; + + // Calculate year/month/day from days since epoch (simplified) + let mut y = 1970i64; + let mut remaining_days = days as i64; + loop { + let days_in_year = if is_leap_year(y) { 366 } else { 365 }; + if remaining_days < days_in_year { + break; + } + remaining_days -= days_in_year; + y += 1; + } + let mut m = 1u32; + let days_in_months = if is_leap_year(y) { + [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + } else { + [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + }; + for dim in days_in_months { + if remaining_days < dim { + break; + } + remaining_days -= dim; + m += 1; + } + let d = remaining_days + 1; + + format!("{y:04}-{m:02}-{d:02}T{hours:02}:{minutes:02}:{seconds:02}Z") +} + +fn is_leap_year(y: i64) -> bool { + (y % 4 == 0 && y % 100 != 0) || y % 400 == 0 +} diff --git a/packages/encryption-binary-rust/src/key_store/windows.rs b/packages/encryption-binary-rust/src/key_store/windows.rs new file mode 100644 index 000000000..8b5226aa3 --- /dev/null +++ b/packages/encryption-binary-rust/src/key_store/windows.rs @@ -0,0 +1,108 @@ +//! Windows key protection using DPAPI (CryptProtectData / CryptUnprotectData). +//! +//! DPAPI encrypts data to the current Windows user account. The encrypted blob +//! can only be decrypted by the same user on the same machine. No additional +//! credentials are needed at decrypt time — the user's login session provides +//! the decryption key. +//! +//! Security properties: +//! - Key is never stored as plaintext on disk +//! - Encrypted to the current user's master key (derived from password) +//! - Cannot be decrypted by other users or on other machines +//! - Survives reboots (unlike Linux keyring) +//! - Does NOT require TPM (software-only, but user-scoped) + +use windows::Win32::Security::Cryptography::{ + CryptProtectData, CryptUnprotectData, CRYPT_INTEGER_BLOB, + CRYPTPROTECT_UI_FORBIDDEN, +}; +use windows::Win32::Foundation::LocalFree; +use std::ptr; + +const DPAPI_DESCRIPTION: &str = "Varlock Local Encryption Key"; + +/// Free a DPAPI-allocated buffer and copy it into a Vec. +unsafe fn copy_and_free_blob(blob: &CRYPT_INTEGER_BLOB) -> Vec { + let slice = std::slice::from_raw_parts(blob.pbData, blob.cbData as usize); + let vec = slice.to_vec(); + let hlocal = windows::Win32::Foundation::HLOCAL(blob.pbData as *mut _); + let _ = LocalFree(hlocal); + vec +} + +/// Encrypt data using DPAPI (CryptProtectData). +/// Returns the encrypted blob. +pub fn dpapi_protect(plaintext: &[u8]) -> Result, String> { + let data_in = CRYPT_INTEGER_BLOB { + cbData: plaintext.len() as u32, + pbData: plaintext.as_ptr() as *mut u8, + }; + + // Optional entropy — we use the description as additional context + let entropy_bytes: Vec = DPAPI_DESCRIPTION.encode_utf16().chain(std::iter::once(0)).collect(); + let entropy_u8: Vec = entropy_bytes.iter().flat_map(|w| w.to_le_bytes()).collect(); + let entropy = CRYPT_INTEGER_BLOB { + cbData: entropy_u8.len() as u32, + pbData: entropy_u8.as_ptr() as *mut u8, + }; + + let mut data_out = CRYPT_INTEGER_BLOB { + cbData: 0, + pbData: ptr::null_mut(), + }; + + let description: Vec = DPAPI_DESCRIPTION.encode_utf16().chain(std::iter::once(0)).collect(); + + unsafe { + CryptProtectData( + &data_in, + windows::core::PCWSTR(description.as_ptr()), + Some(&entropy), + None, + None, + CRYPTPROTECT_UI_FORBIDDEN, + &mut data_out, + ) + .map_err(|e| format!("CryptProtectData failed: {e}"))?; + } + + let encrypted = unsafe { copy_and_free_blob(&data_out) }; + Ok(encrypted) +} + +/// Decrypt data using DPAPI (CryptUnprotectData). +/// Returns the decrypted plaintext bytes. +pub fn dpapi_unprotect(encrypted: &[u8]) -> Result, String> { + let data_in = CRYPT_INTEGER_BLOB { + cbData: encrypted.len() as u32, + pbData: encrypted.as_ptr() as *mut u8, + }; + + let entropy_bytes: Vec = DPAPI_DESCRIPTION.encode_utf16().chain(std::iter::once(0)).collect(); + let entropy_u8: Vec = entropy_bytes.iter().flat_map(|w| w.to_le_bytes()).collect(); + let entropy = CRYPT_INTEGER_BLOB { + cbData: entropy_u8.len() as u32, + pbData: entropy_u8.as_ptr() as *mut u8, + }; + + let mut data_out = CRYPT_INTEGER_BLOB { + cbData: 0, + pbData: ptr::null_mut(), + }; + + unsafe { + CryptUnprotectData( + &data_in, + None, + Some(&entropy), + None, + None, + CRYPTPROTECT_UI_FORBIDDEN, + &mut data_out, + ) + .map_err(|e| format!("CryptUnprotectData failed — key may have been encrypted by a different user: {e}"))?; + } + + let decrypted = unsafe { copy_and_free_blob(&data_out) }; + Ok(decrypted) +} diff --git a/packages/encryption-binary-rust/src/key_store/windows_hello.rs b/packages/encryption-binary-rust/src/key_store/windows_hello.rs new file mode 100644 index 000000000..2711df6a8 --- /dev/null +++ b/packages/encryption-binary-rust/src/key_store/windows_hello.rs @@ -0,0 +1,102 @@ +//! Windows Hello biometric verification. +//! +//! Uses the WinRT `UserConsentVerifier` API to show the Windows Hello dialog +//! (face recognition, fingerprint, or PIN). This is the same dialog that +//! Windows uses for login and app authentication. +//! +//! The verification is decoupled from key storage (DPAPI handles that). +//! This module purely handles user presence verification. + +use windows::Security::Credentials::UI::{ + UserConsentVerificationResult, UserConsentVerifier, UserConsentVerifierAvailability, +}; + +/// Check if Windows Hello is available and configured. +pub fn is_hello_available() -> bool { + match UserConsentVerifier::CheckAvailabilityAsync() { + Ok(op) => match op.get() { + Ok(availability) => availability == UserConsentVerifierAvailability::Available, + Err(_) => false, + }, + Err(_) => false, + } +} + +/// Detailed availability check for status reporting. +pub fn get_hello_status() -> HelloStatus { + match UserConsentVerifier::CheckAvailabilityAsync() { + Ok(op) => match op.get() { + Ok(availability) => match availability { + UserConsentVerifierAvailability::Available => HelloStatus::Available, + UserConsentVerifierAvailability::DeviceNotPresent => HelloStatus::NoDevice, + UserConsentVerifierAvailability::NotConfiguredForUser => HelloStatus::NotConfigured, + UserConsentVerifierAvailability::DisabledByPolicy => HelloStatus::DisabledByPolicy, + _ => HelloStatus::Unknown, + }, + Err(e) => HelloStatus::Error(format!("{e}")), + }, + Err(e) => HelloStatus::Error(format!("{e}")), + } +} + +pub enum HelloStatus { + Available, + NoDevice, + NotConfigured, + DisabledByPolicy, + Unknown, + Error(String), +} + +/// Request user verification via Windows Hello. +/// +/// Shows the Windows Hello dialog with the given message. +/// Returns Ok(true) if verified, Ok(false) if cancelled, Err on failure. +pub fn verify_user(message: &str) -> Result { + let message = windows::core::HSTRING::from(message); + + let op = UserConsentVerifier::RequestVerificationAsync(&message) + .map_err(|e| format!("Failed to request verification: {e}"))?; + + let result = op.get().map_err(|e| format!("Verification failed: {e}"))?; + + match result { + UserConsentVerificationResult::Verified => Ok(true), + UserConsentVerificationResult::Canceled => Ok(false), + UserConsentVerificationResult::DeviceNotPresent => { + Err("Windows Hello device not present".into()) + } + UserConsentVerificationResult::NotConfiguredForUser => { + Err("Windows Hello not configured".into()) + } + UserConsentVerificationResult::DisabledByPolicy => { + Err("Windows Hello disabled by policy".into()) + } + UserConsentVerificationResult::DeviceBusy => Err("Windows Hello device busy".into()), + UserConsentVerificationResult::RetriesExhausted => { + Err("Windows Hello retries exhausted".into()) + } + _ => Err("Unknown verification result".into()), + } +} + +/// Get a setup hint if Windows Hello could be available. +pub fn get_setup_hint() -> Option { + match get_hello_status() { + HelloStatus::Available => None, + HelloStatus::NoDevice => Some( + "No Windows Hello compatible device found.\n\ + Windows Hello requires a fingerprint reader, IR camera, or compatible security key." + .into(), + ), + HelloStatus::NotConfigured => Some( + "Windows Hello is available but not set up.\n\ + Configure it in Settings > Accounts > Sign-in options." + .into(), + ), + HelloStatus::DisabledByPolicy => Some( + "Windows Hello is disabled by group policy.".into(), + ), + _ => None, + } +} diff --git a/packages/encryption-binary-rust/src/main.rs b/packages/encryption-binary-rust/src/main.rs new file mode 100644 index 000000000..3fe7b8856 --- /dev/null +++ b/packages/encryption-binary-rust/src/main.rs @@ -0,0 +1,246 @@ +//! varlock-local-encrypt — Cross-platform local encryption binary for Varlock. +//! +//! Provides the same CLI interface as the Swift macOS binary: +//! generate-key, delete-key, list-keys, key-exists, encrypt, decrypt, status, daemon +//! +//! All output is JSON. Errors return {"error": "message"}. + +mod crypto; +mod daemon; +mod ipc; +mod key_store; + +use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; +use serde_json::json; + +const DEFAULT_KEY_ID: &str = "varlock-default"; + +fn main() { + let args: Vec = std::env::args().collect(); + let command = args.get(1).map(|s| s.as_str()).unwrap_or("help"); + + match command { + "generate-key" => cmd_generate_key(&args), + "delete-key" => cmd_delete_key(&args), + "list-keys" => cmd_list_keys(), + "key-exists" => cmd_key_exists(&args), + "encrypt" => cmd_encrypt(&args), + "decrypt" => cmd_decrypt(&args), + "status" => cmd_status(), + "daemon" => cmd_daemon(&args), + "help" | "--help" | "-h" => cmd_help(), + _ => json_error(&format!("Unknown command: {command}. Run with --help for usage.")), + } +} + +// ── CLI arg helpers ────────────────────────────────────────────── + +fn get_arg(args: &[String], flag: &str) -> Option { + args.iter() + .position(|a| a == flag) + .and_then(|i| args.get(i + 1).cloned()) +} + +fn get_key_id(args: &[String]) -> String { + get_arg(args, "--key-id").unwrap_or_else(|| DEFAULT_KEY_ID.to_string()) +} + +// ── JSON output helpers ───────────────────────────────────────── + +fn json_output(value: &serde_json::Value) { + println!("{}", serde_json::to_string(value).unwrap_or_default()); +} + +fn json_error(message: &str) -> ! { + json_output(&json!({"error": message})); + std::process::exit(1); +} + +fn json_success(result: serde_json::Value) -> ! { + let mut obj = json!({"ok": true}); + if let (Some(base), Some(extra)) = (obj.as_object_mut(), result.as_object()) { + for (k, v) in extra { + base.insert(k.clone(), v.clone()); + } + } + json_output(&obj); + std::process::exit(0); +} + +// ── Commands ──────────────────────────────────────────────────── + +fn cmd_generate_key(args: &[String]) { + let key_id = get_key_id(args); + + match key_store::generate_key(&key_id) { + Ok(public_key) => { + let pub_bytes = BASE64.decode(&public_key).unwrap_or_default(); + json_success(json!({ + "keyId": key_id, + "publicKey": public_key, + "publicKeyBytes": pub_bytes.len(), + })); + } + Err(e) => json_error(&e), + } +} + +fn cmd_delete_key(args: &[String]) { + let key_id = get_key_id(args); + let deleted = key_store::delete_key(&key_id); + json_success(json!({ + "keyId": key_id, + "deleted": deleted, + })); +} + +fn cmd_list_keys() { + let keys = key_store::list_keys(); + json_success(json!({"keys": keys})); +} + +fn cmd_key_exists(args: &[String]) { + let key_id = get_key_id(args); + let exists = key_store::key_exists(&key_id); + json_success(json!({ + "keyId": key_id, + "exists": exists, + })); +} + +fn cmd_encrypt(args: &[String]) { + let key_id = get_key_id(args); + + let data_b64 = match get_arg(args, "--data") { + Some(d) => d, + None => json_error("Missing --data argument (base64-encoded plaintext)"), + }; + + let plaintext = match BASE64.decode(&data_b64) { + Ok(d) => d, + Err(_) => json_error("Invalid base64 data"), + }; + + // Load just the public key (no private key access needed) + let public_key = match key_store::load_public_key(&key_id) { + Ok(pk) => pk, + Err(e) => json_error(&e), + }; + + match crypto::encrypt(&public_key, &plaintext) { + Ok(ciphertext) => json_success(json!({"ciphertext": ciphertext})), + Err(e) => json_error(&e), + } +} + +fn cmd_decrypt(args: &[String]) { + let key_id = get_key_id(args); + + let data_b64 = match get_arg(args, "--data") { + Some(d) => d, + None => json_error("Missing --data argument (base64-encoded ciphertext)"), + }; + + // Load the full key pair (private key needed) + let (private_key_der, public_key_b64) = match key_store::load_key(&key_id) { + Ok(k) => k, + Err(e) => json_error(&e), + }; + + let private_key_b64 = BASE64.encode(&private_key_der); + + match crypto::decrypt(&private_key_b64, &public_key_b64, &data_b64) { + Ok(plaintext_bytes) => { + let plaintext = match String::from_utf8(plaintext_bytes) { + Ok(s) => s, + Err(_) => json_error("Decrypted data is not valid UTF-8"), + }; + json_success(json!({"plaintext": plaintext})); + } + Err(e) => json_error(&e), + } +} + +fn cmd_status() { + let info = key_store::get_platform_info(); + let keys = key_store::list_keys(); + + #[allow(unused_mut)] + let mut result = json!({ + "backend": info.backend, + "hardwareBacked": info.hardware_backed, + "biometricAvailable": info.biometric_available, + "protection": info.protection.to_string(), + "platform": std::env::consts::OS, + "arch": std::env::consts::ARCH, + "keys": keys, + }); + + // Include setup hints for optional features + #[cfg(target_os = "linux")] + { + if !info.hardware_backed { + if let Some(hint) = key_store::get_tpm2_setup_hint() { + result.as_object_mut().unwrap().insert( + "setupHint".to_string(), + serde_json::Value::String(hint), + ); + } + } + } + #[cfg(target_os = "windows")] + { + if !info.biometric_available { + if let Some(hint) = key_store::windows_hello::get_setup_hint() { + result.as_object_mut().unwrap().insert( + "setupHint".to_string(), + serde_json::Value::String(hint), + ); + } + } + } + + json_success(result); +} + +fn cmd_daemon(args: &[String]) { + let socket_path = match get_arg(args, "--socket-path") { + Some(sp) => sp, + None => json_error("Missing --socket-path argument"), + }; + + let pid_path = get_arg(args, "--pid-path"); + + if let Err(e) = daemon::run_daemon(&socket_path, pid_path.as_deref()) { + json_error(&format!("Failed to start daemon: {e}")); + } +} + +fn cmd_help() { + let help = r#"varlock-local-encrypt - Cross-platform local encryption for Varlock + +COMMANDS: + generate-key [--key-id ] Create a new encryption key + delete-key [--key-id ] Delete an encryption key + list-keys List all Varlock encryption keys + key-exists [--key-id ] Check if a key exists + encrypt --data [--key-id ] Encrypt data (one-shot) + decrypt --data [--key-id ] Decrypt data (one-shot) + status Check platform capabilities + daemon --socket-path [--pid-path ] Start IPC daemon + +OPTIONS: + --key-id Key identifier (default: varlock-default) + --data Base64-encoded data + --socket-path Unix socket path for daemon mode + --pid-path PID file path for daemon mode + +PLATFORM PROTECTION: + Windows: DPAPI (user-session-scoped encryption) + Linux: Kernel keyring (key held in kernel memory) + +All output is JSON. Errors return {"error": "message"}. +"#; + print!("{help}"); + std::process::exit(0); +} diff --git a/packages/encryption-binary-swift/.env.schema b/packages/encryption-binary-swift/.env.schema new file mode 100644 index 000000000..ab7c49333 --- /dev/null +++ b/packages/encryption-binary-swift/.env.schema @@ -0,0 +1,22 @@ +# @defaultSensitive=false @defaultRequired=infer +# @plugin(@varlock/1password-plugin) +# @initOp(allowAppAuth=true, token=$OP_CI_TOKEN) +# --- + +# this must be set in github actions secrets +# @type=opServiceAccountToken @sensitive +OP_CI_TOKEN= + +# Apple code signing - used in CI to sign the macOS native binary +# @sensitive +APPLE_CERTIFICATE_BASE64=op("op://VarlockCI/apple developer/APPLE_CERTIFICATE_BASE64") +# @sensitive +APPLE_CERTIFICATE_PASSWORD=op("op://VarlockCI/apple developer/APPLE_CERTIFICATE_PASSWORD") +APPLE_SIGNING_IDENTITY=op("op://VarlockCI/apple developer/APPLE_SIGNING_IDENTITY") +APPLE_TEAM_ID=op("op://VarlockCI/apple developer/APPLE_TEAM_ID") + +# Apple notarization +# @sensitive +APPLE_ID=op("op://VarlockCI/apple developer/APPLE_NOTARIZATION_APPLE_ID") +# @sensitive +APPLE_APP_PASSWORD=op("op://VarlockCI/apple developer/APPLE_NOTARIZATION_APP_PASSWORD") diff --git a/packages/encryption-binary-swift/.gitignore b/packages/encryption-binary-swift/.gitignore new file mode 100644 index 000000000..744870715 --- /dev/null +++ b/packages/encryption-binary-swift/.gitignore @@ -0,0 +1 @@ +swift/.build diff --git a/packages/encryption-binary-swift/README.md b/packages/encryption-binary-swift/README.md new file mode 100644 index 000000000..b2a2163b6 --- /dev/null +++ b/packages/encryption-binary-swift/README.md @@ -0,0 +1,32 @@ +# @varlock/encryption-binary-swift + +macOS native binary for varlock's local encryption, built in Swift. + +## Why Swift? + +Varlock uses the **Secure Enclave** for hardware-backed key storage on macOS. The Secure Enclave, Touch ID biometric prompts, and native UI (status bar menu, secure input dialogs) are only accessible through Apple's `Security`, `LocalAuthentication`, and `AppKit` frameworks — which are designed for Swift/Objective-C. Rust or other languages would require fragile FFI bindings with no stable C ABI to target. + +The `.app` bundle format is also required for custom Touch ID icons, `LSUIElement` (menu-bar-only) behavior, and proper code signing + notarization. + +Rust is planned for Windows (TPM / Windows Hello) and Linux (TPM2), where the platform APIs have C-friendly interfaces. The IPC protocol (length-prefixed JSON over a Unix socket) is the same across all platforms. + +## Structure + +- `swift/` — Swift Package Manager project (`VarlockEnclave` executable) +- `scripts/build-swift.ts` — Two-phase build: compile (cacheable) + bundle (mode-specific `.app` wrapping + codesign) +- `resources/` — App icon and other bundle resources + +## Building + +```bash +# Local dev (current arch, dev mode) +bun run build:swift:dev + +# Universal binary (arm64 + x86_64, for CI) +bun run build:swift + +# With signing and release metadata +bun run build:swift -- --mode release --version 1.2.3 --sign "Developer ID Application: ..." +``` + +Output: `packages/varlock/native-bins/darwin/VarlockEnclave.app` diff --git a/packages/encryption-binary-swift/package.json b/packages/encryption-binary-swift/package.json new file mode 100644 index 000000000..f6bd7f1fa --- /dev/null +++ b/packages/encryption-binary-swift/package.json @@ -0,0 +1,18 @@ +{ + "name": "@varlock/encryption-binary-swift", + "description": "macOS Secure Enclave encryption binary for varlock (Swift)", + "version": "0.0.1", + "private": true, + "scripts": { + "kill-daemon": "bun run scripts/kill-daemon.ts", + "build:swift": "bun run kill-daemon && bun run scripts/build-swift.ts --universal", + "build:swift:dev": "bun run kill-daemon && bun run scripts/build-swift.ts", + "clean": "rm -rf swift/.build" + }, + "devDependencies": { + "@varlock/1password-plugin": "workspace:*", + "varlock": "workspace:*" + }, + "author": "dmno-dev", + "license": "MIT" +} diff --git a/packages/encryption-binary-swift/resources/AppIcon.icns b/packages/encryption-binary-swift/resources/AppIcon.icns new file mode 100644 index 000000000..c4144cc20 Binary files /dev/null and b/packages/encryption-binary-swift/resources/AppIcon.icns differ diff --git a/packages/encryption-binary-swift/scripts/build-swift.ts b/packages/encryption-binary-swift/scripts/build-swift.ts new file mode 100644 index 000000000..965681981 --- /dev/null +++ b/packages/encryption-binary-swift/scripts/build-swift.ts @@ -0,0 +1,179 @@ +#!/usr/bin/env bun + +/** + * Build script for the VarlockEnclave Swift binary. + * + * Two-phase build: + * 1. Compile — produces a universal (or single-arch) binary. This is the slow + * step and is cached in CI by source hash. + * 2. Bundle — wraps the binary in a .app bundle with environment-specific + * metadata (name, version, bundle ID) and codesigns it. This is fast and + * can vary per build mode without recompiling. + * + * Usage: + * bun run scripts/build-swift.ts # dev build (current arch) + * bun run scripts/build-swift.ts --universal # universal binary (CI) + * bun run scripts/build-swift.ts --mode release # production bundle metadata + * bun run scripts/build-swift.ts --sign "Developer ID Application: ..." + * bun run scripts/build-swift.ts --version 1.2.3 # set bundle version + */ + +import { execSync } from 'node:child_process'; +import path from 'node:path'; +import fs from 'node:fs'; + +// ── CLI args ──────────────────────────────────────────────────── + +const args = process.argv.slice(2); + +function getArg(flag: string): string | undefined { + const idx = args.indexOf(flag); + return idx >= 0 ? args[idx + 1] : undefined; +} + +const universal = args.includes('--universal'); +const signingIdentity = getArg('--sign'); +const mode = (getArg('--mode') ?? 'dev') as 'dev' | 'preview' | 'release'; +const version = getArg('--version') ?? '0.0.0-dev'; + +// ── Paths ─────────────────────────────────────────────────────── + +const swiftDir = path.resolve(import.meta.dir, '..', 'swift'); +const binDir = path.resolve(import.meta.dir, '..', '..', 'varlock', 'native-bins', 'darwin'); +const binaryName = 'varlock-local-encrypt'; +const appBundleName = 'VarlockEnclave.app'; + +// ── Build mode config ─────────────────────────────────────────── + +interface BundleConfig { + bundleId: string; + displayName: string; + menuTitle: string; +} + +const BUNDLE_CONFIGS: Record = { + dev: { + bundleId: 'dev.varlock.enclave.dev', + displayName: 'Varlock (Dev)', + menuTitle: 'Varlock Enclave (Dev)', + }, + preview: { + bundleId: 'dev.varlock.enclave.preview', + displayName: 'Varlock (Preview)', + menuTitle: 'Varlock Enclave (Preview)', + }, + release: { + bundleId: 'dev.varlock.enclave', + displayName: 'Varlock', + menuTitle: 'Varlock Secure Enclave', + }, +}; + +const bundleConfig = BUNDLE_CONFIGS[mode]; +console.log(`Build mode: ${mode}`); +console.log(`Bundle ID: ${bundleConfig.bundleId}`); +console.log(`Display name: ${bundleConfig.displayName}`); + +function run(cmd: string, opts?: { cwd?: string }) { + console.log(`> ${cmd}`); + execSync(cmd, { stdio: 'inherit', cwd: opts?.cwd ?? swiftDir }); +} + +// ── Phase 1: Compile ──────────────────────────────────────────── + +fs.mkdirSync(binDir, { recursive: true }); + +let builtBinaryPath: string; + +if (universal) { + // Build universal binary (arm64 + x86_64) — used in CI + run('swift build -c release --arch arm64'); + run('swift build -c release --arch x86_64'); + + const arm64Binary = path.join(swiftDir, '.build', 'arm64-apple-macosx', 'release', 'VarlockEnclave'); + const x86Binary = path.join(swiftDir, '.build', 'x86_64-apple-macosx', 'release', 'VarlockEnclave'); + + builtBinaryPath = path.join(binDir, `${binaryName}-universal`); + run(`lipo -create "${arm64Binary}" "${x86Binary}" -output "${builtBinaryPath}"`); + run(`lipo -info "${builtBinaryPath}"`); +} else { + // Current platform only — fast for local dev + run('swift build -c release'); + builtBinaryPath = path.join(swiftDir, '.build', 'release', 'VarlockEnclave'); +} + +// ── Phase 2: Bundle ───────────────────────────────────────────── + +const appDir = path.join(binDir, appBundleName); +const contentsDir = path.join(appDir, 'Contents'); +const macosDir = path.join(contentsDir, 'MacOS'); +const resourcesDir = path.join(contentsDir, 'Resources'); + +// Clean previous bundle +fs.rmSync(appDir, { recursive: true, force: true }); +fs.mkdirSync(macosDir, { recursive: true }); +fs.mkdirSync(resourcesDir, { recursive: true }); + +// Copy binary into bundle +const bundleBinaryPath = path.join(macosDir, binaryName); +fs.copyFileSync(builtBinaryPath, bundleBinaryPath); +fs.chmodSync(bundleBinaryPath, 0o755); + +// Clean up temp universal binary if we created one +if (universal) { + fs.unlinkSync(builtBinaryPath); +} + +// Copy icon if it exists +const iconSrc = path.join(import.meta.dir, '..', 'resources', 'AppIcon.icns'); +const hasIcon = fs.existsSync(iconSrc); +if (hasIcon) { + fs.copyFileSync(iconSrc, path.join(resourcesDir, 'AppIcon.icns')); +} + +// Write Info.plist with environment-specific metadata +const infoPlist = ` + + + + CFBundleIdentifier + ${bundleConfig.bundleId} + CFBundleName + ${bundleConfig.displayName} + CFBundleDisplayName + ${bundleConfig.displayName} + CFBundleExecutable + ${binaryName} + CFBundlePackageType + APPL + CFBundleVersion + ${version} + CFBundleShortVersionString + ${version} + LSUIElement + ${hasIcon ? ` + CFBundleIconFile + AppIcon` : ''} + VarlockBuildMode + ${mode} + VarlockMenuTitle + ${bundleConfig.menuTitle} + +`; + +fs.writeFileSync(path.join(contentsDir, 'Info.plist'), infoPlist); + +console.log(`Built app bundle: ${appDir}`); + +// ── Codesign ──────────────────────────────────────────────────── + +if (signingIdentity) { + run(`codesign --force --deep --options runtime --sign "${signingIdentity}" "${appDir}"`); + run(`codesign --verify --verbose "${appDir}"`); + console.log('App bundle signed successfully'); +} else { + run(`codesign --force --deep --sign - "${appDir}"`); + console.log('App bundle ad-hoc signed (use --sign for proper signing)'); +} + +console.log('Done!'); diff --git a/packages/encryption-binary-swift/scripts/kill-daemon.ts b/packages/encryption-binary-swift/scripts/kill-daemon.ts new file mode 100644 index 000000000..919da7cf6 --- /dev/null +++ b/packages/encryption-binary-swift/scripts/kill-daemon.ts @@ -0,0 +1,47 @@ +#!/usr/bin/env bun + +/** + * Kill the running VarlockEnclave daemon (if any). + * + * Reads the PID from the local-encrypt daemon.pid, + * sends SIGTERM, and cleans up PID and socket files. + */ + +import path from 'node:path'; +import fs from 'node:fs'; +import { getUserVarlockDir } from '../../../packages/varlock/src/lib/user-config-dir'; + +const socketDir = path.join(getUserVarlockDir(), 'local-encrypt'); +const pidPath = path.join(socketDir, 'daemon.pid'); +const socketPath = path.join(socketDir, 'daemon.sock'); + +if (!fs.existsSync(pidPath)) { + console.log('No daemon PID file found, nothing to kill'); + process.exit(0); +} + +const pid = parseInt(fs.readFileSync(pidPath, 'utf-8').trim(), 10); +if (Number.isNaN(pid)) { + console.log('Invalid PID file, cleaning up'); + fs.unlinkSync(pidPath); + process.exit(0); +} + +try { + process.kill(pid, 'SIGTERM'); + console.log(`Killed daemon (PID ${pid})`); +} catch (err: any) { + if (err.code === 'ESRCH') { + console.log(`Daemon (PID ${pid}) was not running, cleaning up stale PID file`); + } else { + throw err; + } +} + +// Clean up PID and socket files +try { + fs.unlinkSync(pidPath); +} catch { /* ignore */ } +try { + fs.unlinkSync(socketPath); +} catch { /* ignore */ } diff --git a/packages/encryption-binary-swift/swift/Package.swift b/packages/encryption-binary-swift/swift/Package.swift new file mode 100644 index 000000000..0ada0708e --- /dev/null +++ b/packages/encryption-binary-swift/swift/Package.swift @@ -0,0 +1,20 @@ +// swift-tools-version: 5.9 +import PackageDescription + +let package = Package( + name: "VarlockEnclave", + platforms: [ + .macOS(.v13), + ], + targets: [ + .executableTarget( + name: "VarlockEnclave", + path: "Sources/VarlockEnclave", + linkerSettings: [ + .linkedFramework("Security"), + .linkedFramework("LocalAuthentication"), + .linkedFramework("AppKit"), + ] + ), + ] +) diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/IPCServer.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/IPCServer.swift new file mode 100644 index 000000000..2d7546759 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/IPCServer.swift @@ -0,0 +1,217 @@ +import Foundation + +/// Unix domain socket IPC server using length-prefixed JSON protocol. +/// +/// Protocol: +/// - 4-byte little-endian message length +/// - JSON payload: { "id": "...", "action": "...", "payload": { ... } } +/// - Response: { "id": "...", "result": "..." } or { "id": "...", "error": "..." } +final class IPCServer { + private let socketPath: String + private var socketFD: Int32 = -1 + private var clientHandlers: [Int32: DispatchWorkItem] = [:] + private let queue = DispatchQueue(label: "dev.varlock.ipc", attributes: .concurrent) + private let handlersQueue = DispatchQueue(label: "dev.varlock.ipc.handlers") + private var isRunning = false + + /// Handler for incoming messages. Second parameter is the peer's TTY identity (nil if unknown). + var messageHandler: ((_ message: [String: Any], _ ttyId: String?) -> [String: Any])? + + /// Called after accept (new client) and after each successfully parsed JSON message. + var onConnectionActivity: (() -> Void)? + + init(socketPath: String) { + self.socketPath = socketPath + } + + // MARK: - Server Lifecycle + + func start() throws { + // Clean up any stale socket file + unlink(socketPath) + + // Ensure parent directory exists + let dir = (socketPath as NSString).deletingLastPathComponent + try FileManager.default.createDirectory(atPath: dir, withIntermediateDirectories: true) + + // Create socket + socketFD = socket(AF_UNIX, SOCK_STREAM, 0) + guard socketFD >= 0 else { + throw IPCError.socketCreationFailed(String(cString: strerror(errno))) + } + + // Bind + var addr = sockaddr_un() + addr.sun_family = sa_family_t(AF_UNIX) + withUnsafeMutablePointer(to: &addr.sun_path) { ptr in + socketPath.withCString { cstr in + _ = strcpy(UnsafeMutableRawPointer(ptr).assumingMemoryBound(to: CChar.self), cstr) + } + } + + let bindResult = withUnsafePointer(to: &addr) { ptr in + ptr.withMemoryRebound(to: sockaddr.self, capacity: 1) { sockaddrPtr in + bind(socketFD, sockaddrPtr, socklen_t(MemoryLayout.size)) + } + } + guard bindResult == 0 else { + close(socketFD) + throw IPCError.bindFailed(String(cString: strerror(errno))) + } + + // Set socket permissions (owner only) + chmod(socketPath, 0o600) + + // Listen + guard listen(socketFD, 5) == 0 else { + close(socketFD) + unlink(socketPath) + throw IPCError.listenFailed(String(cString: strerror(errno))) + } + + isRunning = true + + // Accept loop on background queue + queue.async { [weak self] in + self?.acceptLoop() + } + } + + func stop() { + isRunning = false + if socketFD >= 0 { + close(socketFD) + socketFD = -1 + } + unlink(socketPath) + + // Cancel all client handlers + handlersQueue.sync { + for (fd, work) in clientHandlers { + work.cancel() + close(fd) + } + clientHandlers.removeAll() + } + } + + // MARK: - Accept Loop + + private func acceptLoop() { + while isRunning { + var clientAddr = sockaddr_un() + var clientAddrLen = socklen_t(MemoryLayout.size) + + let clientFD = withUnsafeMutablePointer(to: &clientAddr) { ptr in + ptr.withMemoryRebound(to: sockaddr.self, capacity: 1) { sockaddrPtr in + accept(socketFD, sockaddrPtr, &clientAddrLen) + } + } + + guard clientFD >= 0 else { + if !isRunning { break } + continue + } + + onConnectionActivity?() + + let workItem = DispatchWorkItem { [weak self] in + self?.handleClient(fd: clientFD) + } + handlersQueue.sync { + clientHandlers[clientFD] = workItem + } + queue.async(execute: workItem) + } + } + + // MARK: - Client Handling + + private func handleClient(fd: Int32) { + defer { + close(fd) + handlersQueue.sync { + _ = clientHandlers.removeValue(forKey: fd) + } + } + + // Resolve the peer's TTY identity once per connection + let ttyId: String? + if let peerPid = getPeerPid(fd: fd) { + ttyId = getTtyIdentifier(forPid: peerPid) + } else { + ttyId = nil + } + + while isRunning { + // Read 4-byte length prefix (little-endian) + var lengthBytes = [UInt8](repeating: 0, count: 4) + let bytesRead = recv(fd, &lengthBytes, 4, MSG_WAITALL) + guard bytesRead == 4 else { break } + + let messageLength = Int(UInt32(lengthBytes[0]) + | (UInt32(lengthBytes[1]) << 8) + | (UInt32(lengthBytes[2]) << 16) + | (UInt32(lengthBytes[3]) << 24)) + + guard messageLength > 0, messageLength < 10_000_000 else { break } // 10MB safety limit + + // Read message body + var messageData = Data(count: messageLength) + let bodyRead = messageData.withUnsafeMutableBytes { ptr in + recv(fd, ptr.baseAddress!, messageLength, MSG_WAITALL) + } + guard bodyRead == messageLength else { break } + + // Parse JSON + guard let json = try? JSONSerialization.jsonObject(with: messageData) as? [String: Any] else { + sendResponse(fd: fd, response: ["error": "Invalid JSON"]) + continue + } + + onConnectionActivity?() + + // Handle message with the peer's TTY identity + let response = messageHandler?(json, ttyId) ?? ["error": "No handler"] + sendResponse(fd: fd, id: json["id"] as? String, response: response) + } + } + + private func sendResponse(fd: Int32, id: String? = nil, response: [String: Any]) { + var fullResponse = response + if let id = id { + fullResponse["id"] = id + } + + guard let jsonData = try? JSONSerialization.data(withJSONObject: fullResponse) else { + return + } + + // Write length prefix (4 bytes, little-endian) + var length = UInt32(jsonData.count).littleEndian + _ = withUnsafeBytes(of: &length) { ptr in + send(fd, ptr.baseAddress!, 4, 0) + } + + // Write message body + jsonData.withUnsafeBytes { ptr in + _ = send(fd, ptr.baseAddress!, jsonData.count, 0) + } + } +} + +// MARK: - Errors + +enum IPCError: LocalizedError { + case socketCreationFailed(String) + case bindFailed(String) + case listenFailed(String) + + var errorDescription: String? { + switch self { + case .socketCreationFailed(let msg): return "Socket creation failed: \(msg)" + case .bindFailed(let msg): return "Socket bind failed: \(msg)" + case .listenFailed(let msg): return "Socket listen failed: \(msg)" + } + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/PeerIdentity.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/PeerIdentity.swift new file mode 100644 index 000000000..259a1e06b --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/PeerIdentity.swift @@ -0,0 +1,64 @@ +import Foundation +import Darwin + +// LOCAL_PEERPID may not be exported by Swift's Darwin module +private let LOCAL_PEERPID: Int32 = 0x002 + +/// Get the PID of the peer connected to a Unix domain socket. +func getPeerPid(fd: Int32) -> pid_t? { + var pid: pid_t = 0 + var pidSize = socklen_t(MemoryLayout.size) + let result = getsockopt(fd, SOL_LOCAL, LOCAL_PEERPID, &pid, &pidSize) + guard result == 0, pid > 0 else { return nil } + return pid +} + +/// Get process info via sysctl KERN_PROC. +private func getProcessInfo(pid: pid_t) -> kinfo_proc? { + var mib: [Int32] = [CTL_KERN, KERN_PROC, KERN_PROC_PID, pid] + var info = kinfo_proc() + var size = MemoryLayout.size + + let result = sysctl(&mib, UInt32(mib.count), &info, &size, nil, 0) + guard result == 0 else { return nil } + return info +} + +/// Get a stable TTY identifier for a process. +/// +/// Combines the TTY device name with the session leader's start time. +/// The session leader is the shell process that owns the TTY (its PID equals +/// the session ID). Using its start time prevents TTY device reuse attacks +/// (where a new terminal is allocated the same /dev/ttysNNN after the old one closed). +/// +/// Returns nil if the process has no controlling TTY (detached, CI, etc). +func getTtyIdentifier(forPid pid: pid_t) -> String? { + guard let info = getProcessInfo(pid: pid) else { return nil } + + let ttyDev = info.kp_eproc.e_tdev + // NODEV (0xFFFFFFFF) or 0 means no controlling tty + guard ttyDev != UInt32.max, ttyDev != 0 else { return nil } + + // Convert device number to name (e.g., "ttys003") + guard let namePtr = devname(dev_t(ttyDev), S_IFCHR) else { return nil } + let ttyName = String(cString: namePtr) + + // Get the session leader's start time for uniqueness. + // getsid() returns the session leader PID (the shell that owns the TTY), + // which is stable across all processes launched from the same terminal. + // (e_tpgid is the *foreground process group*, which changes on every command.) + let sessionLeaderPid = getsid(pid) + var startTimestamp: Int = 0 + + if sessionLeaderPid > 0, let leaderInfo = getProcessInfo(pid: sessionLeaderPid) { + startTimestamp = Int(leaderInfo.kp_proc.p_starttime.tv_sec) + } + + // If we couldn't get the session leader start time, fall back to the + // connecting process's own start time (less ideal but still unique per session) + if startTimestamp == 0 { + startTimestamp = Int(info.kp_proc.p_starttime.tv_sec) + } + + return "\(ttyName):\(startTimestamp)" +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureEnclaveManager.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureEnclaveManager.swift new file mode 100644 index 000000000..f86d82034 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureEnclaveManager.swift @@ -0,0 +1,304 @@ +import Foundation +import Security +import LocalAuthentication +import CryptoKit + +/// Manages Secure Enclave key operations and ECIES encrypt/decrypt. +/// +/// Uses CryptoKit's SecureEnclave.P256 API. Key "data representations" (opaque handles +/// to the SE key, NOT the private key itself) are stored as files on disk. +/// This avoids Keychain entitlement requirements that plague CLI tools. +/// +/// Crypto scheme: +/// - P-256 key stored in Secure Enclave with biometric access control +/// - ECIES: ephemeral P-256 key pair → ECDH → HKDF-SHA256 → AES-256-GCM +/// - Payload: version(1) | ephemeralPubKey(65) | nonce(12) | ciphertext(N) | tag(16) +final class SecureEnclaveManager { + static let payloadVersion: UInt8 = 0x01 + + /// Directory where key data representations are stored + static var keyStorePath: String { + let xdg = ProcessInfo.processInfo.environment["XDG_CONFIG_HOME"] + let base = xdg ?? (NSHomeDirectory() + "/.config") + return base + "/varlock/secure-enclave/keys" + } + + private static func keyFilePath(for keyId: String) -> String { + return keyStorePath + "/\(keyId).keydata" + } + + // MARK: - Key Management + + /// Create a new Secure Enclave P-256 key. + /// + /// By default, requires user presence (Touch ID, Apple Watch, or device password). + /// Pass `requireAuth: false` for CI/testing — key is still SE-backed but no biometric. + /// Saves the key data representation to disk and returns the public key. + static func generateKey(keyId: String, context: LAContext? = nil, requireAuth: Bool = true) throws -> Data { + // Create access control — with or without user presence requirement + var accessError: Unmanaged? + let flags: SecAccessControlCreateFlags = requireAuth + ? [.privateKeyUsage, .userPresence] + : [.privateKeyUsage] + guard let accessControl = SecAccessControlCreateWithFlags( + kCFAllocatorDefault, + kSecAttrAccessibleWhenUnlockedThisDeviceOnly, + flags, + &accessError + ) else { + let err = accessError?.takeRetainedValue() + throw EnclaveError.keyGenerationFailed(err?.localizedDescription ?? "Failed to create access control") + } + + // Generate the SE key via CryptoKit + let privateKey: SecureEnclave.P256.KeyAgreement.PrivateKey + do { + if let context = context { + privateKey = try SecureEnclave.P256.KeyAgreement.PrivateKey( + accessControl: accessControl, + authenticationContext: context + ) + } else { + privateKey = try SecureEnclave.P256.KeyAgreement.PrivateKey( + accessControl: accessControl + ) + } + } catch { + throw EnclaveError.keyGenerationFailed(error.localizedDescription) + } + + // Save the data representation (an opaque handle, NOT the private key) + let dataRepresentation = privateKey.dataRepresentation + let filePath = keyFilePath(for: keyId) + let dir = (filePath as NSString).deletingLastPathComponent + try FileManager.default.createDirectory(atPath: dir, withIntermediateDirectories: true) + try dataRepresentation.write(to: URL(fileURLWithPath: filePath)) + + // Set file permissions to owner-only + try FileManager.default.setAttributes( + [.posixPermissions: 0o600], + ofItemAtPath: filePath + ) + + return Data(privateKey.publicKey.x963Representation) + } + + /// Delete a key by removing its data representation file. + static func deleteKey(keyId: String) -> Bool { + let filePath = keyFilePath(for: keyId) + do { + try FileManager.default.removeItem(atPath: filePath) + return true + } catch { + return false + } + } + + /// List key IDs by scanning the key store directory. + static func listKeys() -> [String] { + let dir = keyStorePath + guard let files = try? FileManager.default.contentsOfDirectory(atPath: dir) else { + return [] + } + return files + .filter { $0.hasSuffix(".keydata") } + .map { String($0.dropLast(".keydata".count)) } + } + + /// Check if a key exists. + static func keyExists(keyId: String) -> Bool { + return FileManager.default.fileExists(atPath: keyFilePath(for: keyId)) + } + + // MARK: - Key Loading + + /// Load a Secure Enclave private key from its stored data representation. + private static func loadPrivateKey(keyId: String, context: LAContext?) throws -> SecureEnclave.P256.KeyAgreement.PrivateKey { + let filePath = keyFilePath(for: keyId) + guard let data = FileManager.default.contents(atPath: filePath) else { + throw EnclaveError.keyNotFound(keyId) + } + + do { + if let context = context { + return try SecureEnclave.P256.KeyAgreement.PrivateKey( + dataRepresentation: data, + authenticationContext: context + ) + } else { + return try SecureEnclave.P256.KeyAgreement.PrivateKey( + dataRepresentation: data + ) + } + } catch { + throw EnclaveError.keyNotFound("\(keyId) - \(error.localizedDescription)") + } + } + + // MARK: - ECIES Encrypt + + /// Encrypt plaintext using ECIES with the Secure Enclave key. + /// + /// Only needs the public key, so no biometric auth required for encryption. + /// Steps: + /// 1. Load SE key to get public key + /// 2. Generate ephemeral P-256 key pair + /// 3. ECDH: ephemeral private × SE public → shared secret + /// 4. HKDF-SHA256 derive AES-256-GCM key + /// 5. AES-256-GCM encrypt + /// 6. Return: version | ephemeralPub | nonce | ciphertext | tag + static func encrypt(plaintext: Data, keyId: String) throws -> Data { + let seKey = try loadPrivateKey(keyId: keyId, context: nil) + let sePublicKey = seKey.publicKey + let pubKeyData = Data(sePublicKey.x963Representation) + + // Generate ephemeral key pair (in software, not SE) + let ephemeralPrivateKey = P256.KeyAgreement.PrivateKey() + let ephemeralPublicKeyData = Data(ephemeralPrivateKey.publicKey.x963Representation) // 65 bytes + + // ECDH: ephemeral private × SE public + let sharedSecret = try ephemeralPrivateKey.sharedSecretFromKeyAgreement(with: sePublicKey) + + // Extract raw shared secret bytes for HKDF + let sharedSecretData = sharedSecret.withUnsafeBytes { Data($0) } + + // HKDF derive AES-256 key (using manual HKDF to match decrypt path) + let symmetricKey = SecureEnclaveManager.deriveKey( + sharedSecret: sharedSecretData, + salt: Data("varlock-ecies-v1".utf8), + info: ephemeralPublicKeyData + pubKeyData, + outputByteCount: 32 + ) + + // AES-256-GCM encrypt + let sealedBox = try AES.GCM.seal(plaintext, using: symmetricKey) + + // Assemble payload: version(1) | ephemeralPub(65) | nonce(12) | ciphertext(N) | tag(16) + var payload = Data() + payload.append(SecureEnclaveManager.payloadVersion) + payload.append(ephemeralPublicKeyData) // 65 bytes + payload.append(contentsOf: sealedBox.nonce) // 12 bytes + payload.append(sealedBox.ciphertext) // N bytes + payload.append(sealedBox.tag) // 16 bytes + + return payload + } + + // MARK: - ECIES Decrypt + + /// Decrypt ciphertext using ECIES with the Secure Enclave key. + /// Uses the provided LAContext for biometric session caching. + /// + /// Steps: + /// 1. Parse payload components + /// 2. Load SE private key with LAContext (uses cached biometric) + /// 3. ECDH: SE private × ephemeral public → shared secret + /// 4. HKDF-SHA256 derive AES-256-GCM key + /// 5. AES-256-GCM decrypt + static func decrypt(payload: Data, keyId: String, context: LAContext?) throws -> Data { + // Parse payload + guard payload.count > 1 + 65 + 12 + 16 else { + throw EnclaveError.decryptionFailed("Payload too short") + } + + let version = payload[0] + guard version == SecureEnclaveManager.payloadVersion else { + throw EnclaveError.decryptionFailed("Unsupported payload version: \(version)") + } + + let ephemeralPubKeyData = payload[1..<66] // 65 bytes + let nonce = payload[66..<78] // 12 bytes + let ciphertextAndTag = payload[78...] + guard ciphertextAndTag.count >= 16 else { + throw EnclaveError.decryptionFailed("Payload too short for tag") + } + let ciphertext = ciphertextAndTag.dropLast(16) + let tag = ciphertextAndTag.suffix(16) + + // Load SE private key with LAContext for cached biometric session + let seKey = try loadPrivateKey(keyId: keyId, context: context) + let pubKeyData = Data(seKey.publicKey.x963Representation) + + // Reconstruct ephemeral public key + let ephemeralPublicKey = try P256.KeyAgreement.PublicKey(x963Representation: ephemeralPubKeyData) + + // ECDH: SE private × ephemeral public + // CryptoKit's SecureEnclave key performs the ECDH inside the SE + let sharedSecret = try seKey.sharedSecretFromKeyAgreement(with: ephemeralPublicKey) + let sharedSecretData = sharedSecret.withUnsafeBytes { Data($0) } + + // Derive symmetric key using HKDF (must match encrypt side) + let symmetricKey = SecureEnclaveManager.deriveKey( + sharedSecret: sharedSecretData, + salt: Data("varlock-ecies-v1".utf8), + info: Data(ephemeralPubKeyData) + pubKeyData, + outputByteCount: 32 + ) + + // AES-256-GCM decrypt + let gcmNonce = try AES.GCM.Nonce(data: nonce) + let sealedBox = try AES.GCM.SealedBox(nonce: gcmNonce, ciphertext: ciphertext, tag: tag) + let decrypted = try AES.GCM.open(sealedBox, using: symmetricKey) + + return decrypted + } +} + +// MARK: - HKDF + +// We implement HKDF manually so both encrypt and decrypt paths are consistent. +// On the encrypt side we could use CryptoKit's built-in HKDF via SharedSecret, +// but on the decrypt side the SE key's sharedSecretFromKeyAgreement also returns +// a SharedSecret, so actually both paths are consistent now. +// Keeping manual HKDF for explicitness and in case we ever need raw SecKey ECDH. +extension SecureEnclaveManager { + /// HKDF-SHA256 key derivation from raw shared secret bytes. + static func deriveKey( + sharedSecret: Data, + salt: Data, + info: Data, + outputByteCount: Int + ) -> SymmetricKey { + // HKDF-Extract + let prk = HMAC.authenticationCode(for: sharedSecret, using: SymmetricKey(data: salt)) + let prkData = Data(prk) + + // HKDF-Expand + var okm = Data() + var t = Data() + var counter: UInt8 = 1 + + while okm.count < outputByteCount { + var input = t + input.append(info) + input.append(counter) + t = Data(HMAC.authenticationCode(for: input, using: SymmetricKey(data: prkData))) + okm.append(t) + counter += 1 + } + + return SymmetricKey(data: okm.prefix(outputByteCount)) + } +} + +// MARK: - Error Types + +enum EnclaveError: LocalizedError { + case keyGenerationFailed(String) + case keyNotFound(String) + case encryptionFailed(String) + case decryptionFailed(String) + case biometricFailed(String) + case notSupported(String) + + var errorDescription: String? { + switch self { + case .keyGenerationFailed(let msg): return "Key generation failed: \(msg)" + case .keyNotFound(let keyId): return "Key not found: \(keyId)" + case .encryptionFailed(let msg): return "Encryption failed: \(msg)" + case .decryptionFailed(let msg): return "Decryption failed: \(msg)" + case .biometricFailed(let msg): return "Biometric authentication failed: \(msg)" + case .notSupported(let msg): return "Not supported: \(msg)" + } + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureInputDialog.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureInputDialog.swift new file mode 100644 index 000000000..c3ba4387e --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureInputDialog.swift @@ -0,0 +1,74 @@ +import AppKit + +/// Shows a native macOS dialog with a secure text field for entering secrets. +/// Runs on the main thread and blocks until the user submits or cancels. +final class SecureInputDialog { + /// Show a secure input dialog and return the entered text, or nil if cancelled. + static func prompt(title: String, message: String, itemKey: String?) -> String? { + var result: String? + let work = { + // Ensure the app has an Edit menu so Cmd+V/C/X/A work in text fields. + // NSAlert doesn't create one, so keyboard shortcuts are dead without this. + ensureEditMenu() + + let alert = NSAlert() + alert.messageText = title + alert.informativeText = message + alert.alertStyle = .informational + alert.addButton(withTitle: "Encrypt") + alert.addButton(withTitle: "Cancel") + + let inputField = NSSecureTextField(frame: NSRect(x: 0, y: 0, width: 300, height: 24)) + inputField.placeholderString = "Enter or paste secret value..." + alert.accessoryView = inputField + + // Set the window title to include the item key for context + let appName = Bundle.main.object(forInfoDictionaryKey: "CFBundleDisplayName") as? String ?? "Varlock" + alert.window.title = itemKey.map { "\(appName) — \($0)" } ?? appName + + // Bring the app to front so the dialog is visible + NSApp.activate(ignoringOtherApps: true) + + // Make the input field the first responder after the alert is shown + alert.window.initialFirstResponder = inputField + + let response = alert.runModal() + if response == .alertFirstButtonReturn { + let value = inputField.stringValue + if !value.isEmpty { + result = value + } + } + } + + if Thread.isMainThread { + work() + } else { + DispatchQueue.main.sync { work() } + } + + return result + } + + /// Create a minimal Edit menu so standard keyboard shortcuts work. + /// Safe to call multiple times — only creates the menu once. + private static var editMenuInstalled = false + private static func ensureEditMenu() { + guard !editMenuInstalled else { return } + editMenuInstalled = true + + let mainMenu = NSApp.mainMenu ?? NSMenu() + + let editMenuItem = NSMenuItem(title: "Edit", action: nil, keyEquivalent: "") + let editMenu = NSMenu(title: "Edit") + + editMenu.addItem(withTitle: "Cut", action: #selector(NSText.cut(_:)), keyEquivalent: "x") + editMenu.addItem(withTitle: "Copy", action: #selector(NSText.copy(_:)), keyEquivalent: "c") + editMenu.addItem(withTitle: "Paste", action: #selector(NSText.paste(_:)), keyEquivalent: "v") + editMenu.addItem(withTitle: "Select All", action: #selector(NSText.selectAll(_:)), keyEquivalent: "a") + + editMenuItem.submenu = editMenu + mainMenu.addItem(editMenuItem) + NSApp.mainMenu = mainMenu + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SessionManager.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SessionManager.swift new file mode 100644 index 000000000..9c2d7e390 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SessionManager.swift @@ -0,0 +1,186 @@ +import Foundation +import LocalAuthentication +import AppKit + +/// Manages biometric authentication sessions for the daemon, scoped per-TTY. +/// +/// Each terminal must independently authenticate via Touch ID. This prevents +/// rogue processes in other terminals from piggybacking on an existing session. +/// +/// Biometric reuse timeout is handled by macOS via `touchIDAuthenticationAllowableReuseDuration`. +/// This manager handles per-TTY scoping, explicit invalidation (lock command), +/// and system events (sleep, screen lock). +final class SessionManager { + /// How long Touch ID stays unlocked per terminal before re-prompting (seconds). + /// Passed to macOS via `touchIDAuthenticationAllowableReuseDuration`. + static let sessionTimeout: TimeInterval = 300 // 5 minutes + + /// How long the daemon stays alive with no connections at all + static let daemonInactivityTimeout: TimeInterval = 1800 // 30 minutes + + /// Fallback key for processes without a controlling terminal + static let noTtyFallback = "__no_tty__" + + /// Per-TTY cached LAContext (macOS owns the timeout via reuse duration) + private var contexts: [String: LAContext] = [:] + private let queue = DispatchQueue(label: "dev.varlock.session") + + /// Called when the daemon should shut down due to inactivity + var onDaemonTimeout: (() -> Void)? + + private var daemonTimer: DispatchSourceTimer? + + init() { + setupSystemNotifications() + resetDaemonTimer() + } + + deinit { + daemonTimer?.cancel() + } + + // MARK: - Public API + + /// Get or create an authenticated LAContext for the given TTY. + /// On first call per TTY, triggers Touch ID. Subsequent calls within the + /// reuse duration return the cached context without re-prompting. + func getAuthenticatedContext(ttyId: String?) throws -> LAContext { + let key = ttyId ?? SessionManager.noTtyFallback + + return try queue.sync { + // Return cached context if available — macOS handles expiry + // via touchIDAuthenticationAllowableReuseDuration + if let context = contexts[key] { + resetDaemonTimer() + return context + } + + // Need fresh auth for this TTY + let context = LAContext() + context.touchIDAuthenticationAllowableReuseDuration = SessionManager.sessionTimeout + + // Use deviceOwnerAuthentication which accepts Touch ID, Apple Watch, + // or device password — works on machines without biometrics and + // supports the "Use Password" fallback in the Touch ID dialog. + var authError: NSError? + guard context.canEvaluatePolicy(.deviceOwnerAuthentication, error: &authError) else { + let msg = authError?.localizedDescription ?? "Authentication not available" + throw EnclaveError.biometricFailed(msg) + } + + // Synchronous authentication evaluation + let semaphore = DispatchSemaphore(value: 0) + var evalError: Error? + + context.evaluatePolicy( + .deviceOwnerAuthentication, + localizedReason: "decrypt your secrets" + ) { success, error in + if !success { + evalError = error + } + semaphore.signal() + } + + semaphore.wait() + + if let error = evalError { + throw EnclaveError.biometricFailed(error.localizedDescription) + } + + // Cache the authenticated context for this TTY + contexts[key] = context + resetDaemonTimer() + + return context + } + } + + /// Invalidate all TTY sessions (used by lock command, sleep/lock events). + func invalidateAllSessions() { + queue.sync { + for (_, context) in contexts { + context.invalidate() + } + contexts.removeAll() + } + } + + /// Resets the daemon shutdown timer (no Touch ID). Call for any IPC so the + /// process stays up while clients use ping, encrypt, etc., not only decrypt. + func noteIpcActivity() { + queue.async { [weak self] in + self?.resetDaemonTimer() + } + } + + /// Whether the given TTY has a cached session. + /// Note: the session may still re-prompt if macOS's reuse duration has expired. + func isSessionWarm(ttyId: String?) -> Bool { + let key = ttyId ?? SessionManager.noTtyFallback + return queue.sync { + return contexts[key] != nil + } + } + + /// Whether any TTY has a cached session. + func hasAnySessions() -> Bool { + return queue.sync { + return !contexts.isEmpty + } + } + + // MARK: - Private + + private func resetDaemonTimer() { + daemonTimer?.cancel() + let timer = DispatchSource.makeTimerSource(queue: queue) + timer.schedule(deadline: .now() + SessionManager.daemonInactivityTimeout) + timer.setEventHandler { [weak self] in + self?.onDaemonTimeout?() + } + timer.resume() + daemonTimer = timer + } + + // MARK: - System Notifications + + private func setupSystemNotifications() { + let workspace = NSWorkspace.shared + let notificationCenter = workspace.notificationCenter + + // Screen lock / sleep → invalidate ALL sessions + notificationCenter.addObserver( + forName: NSWorkspace.willSleepNotification, + object: nil, + queue: .main + ) { [weak self] _ in + self?.invalidateAllSessions() + } + + notificationCenter.addObserver( + forName: NSWorkspace.sessionDidResignActiveNotification, + object: nil, + queue: .main + ) { [weak self] _ in + self?.invalidateAllSessions() + } + + notificationCenter.addObserver( + forName: NSWorkspace.screensDidSleepNotification, + object: nil, + queue: .main + ) { [weak self] _ in + self?.invalidateAllSessions() + } + + // Also invalidate when screens lock (available on macOS 13+) + DistributedNotificationCenter.default().addObserver( + forName: NSNotification.Name("com.apple.screenIsLocked"), + object: nil, + queue: .main + ) { [weak self] _ in + self?.invalidateAllSessions() + } + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/StatusBarMenu.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/StatusBarMenu.swift new file mode 100644 index 000000000..0078ab94e --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/StatusBarMenu.swift @@ -0,0 +1,106 @@ +import AppKit + +/// Manages the macOS menu bar status item for the Varlock Enclave daemon. +final class StatusBarMenu: NSObject, NSMenuDelegate { + private var statusItem: NSStatusItem? + private let menu = NSMenu() + private let sessionManager: SessionManager + private let onLock: () -> Void + private let onQuit: () -> Void + + init( + sessionManager: SessionManager, + onLock: @escaping () -> Void, + onQuit: @escaping () -> Void + ) { + self.sessionManager = sessionManager + self.onLock = onLock + self.onQuit = onQuit + super.init() + setupStatusItem() + } + + private func setupStatusItem() { + statusItem = NSStatusBar.system.statusItem(withLength: NSStatusItem.squareLength) + + guard let button = statusItem?.button else { return } + updateIcon() + let menuTitle = Bundle.main.object(forInfoDictionaryKey: "VarlockMenuTitle") as? String ?? "Varlock Secure Enclave" + button.toolTip = menuTitle + + menu.delegate = self + statusItem?.menu = menu + rebuildMenuItems() + } + + private func updateIcon() { + guard let button = statusItem?.button else { return } + let hasActiveSessions = sessionManager.hasAnySessions() + button.image = nil + button.title = hasActiveSessions ? "🔓" : "🔒" + } + + // NSMenuDelegate — update items and icon each time the menu opens + func menuWillOpen(_ menu: NSMenu) { + updateIcon() + rebuildMenuItems() + } + + private func rebuildMenuItems() { + menu.removeAllItems() + + // Header + let menuTitle = Bundle.main.object(forInfoDictionaryKey: "VarlockMenuTitle") as? String ?? "Varlock Secure Enclave" + let headerItem = NSMenuItem(title: menuTitle, action: nil, keyEquivalent: "") + headerItem.isEnabled = false + menu.addItem(headerItem) + + menu.addItem(NSMenuItem.separator()) + + // Lock action — disabled with status text when already locked + let hasActiveSessions = sessionManager.hasAnySessions() + if hasActiveSessions { + let lockItem = NSMenuItem(title: "Lock", action: #selector(lockClicked), keyEquivalent: "l") + lockItem.target = self + menu.addItem(lockItem) + } else { + let lockedItem = NSMenuItem(title: "Locked", action: nil, keyEquivalent: "") + lockedItem.isEnabled = false + menu.addItem(lockedItem) + } + + menu.addItem(NSMenuItem.separator()) + + // Quit + let quitItem = NSMenuItem(title: "Quit Daemon", action: #selector(quitClicked), keyEquivalent: "q") + quitItem.target = self + menu.addItem(quitItem) + } + + @objc private func lockClicked() { + onLock() + updateIcon() + } + + @objc private func quitClicked() { + onQuit() + } + + /// Call from any thread after a session state change to update the icon + func refresh() { + // Use performSelector to ensure the update runs in the next run loop iteration + // on the main thread — more reliable than DispatchQueue.main.async with NSApplication + performSelector(onMainThread: #selector(doRefresh), with: nil, waitUntilDone: false) + } + + @objc private func doRefresh() { + updateIcon() + } + + func remove() { + if let item = statusItem { + NSStatusBar.system.removeStatusItem(item) + statusItem = nil + } + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/main.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/main.swift new file mode 100644 index 000000000..fb6add671 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/main.swift @@ -0,0 +1,353 @@ +import Foundation +import AppKit + +// MARK: - JSON Output Helpers + +func jsonOutput(_ dict: [String: Any]) { + guard let data = try? JSONSerialization.data(withJSONObject: dict), + let str = String(data: data, encoding: .utf8) else { + fputs("{\"error\":\"Failed to serialize output\"}\n", stderr) + exit(1) + } + print(str) +} + +func jsonError(_ message: String) -> Never { + jsonOutput(["error": message]) + exit(1) +} + +func jsonSuccess(_ result: [String: Any]) -> Never { + jsonOutput(["ok": true].merging(result) { _, new in new }) + exit(0) +} + +// MARK: - CLI Parsing + +let args = CommandLine.arguments +let command = args.count > 1 ? args[1] : "help" + +func getArg(_ flag: String) -> String? { + guard let idx = args.firstIndex(of: flag), idx + 1 < args.count else { return nil } + return args[idx + 1] +} + +let defaultKeyId = "varlock-default" +let noAuth = args.contains("--no-auth") // CI mode: skip biometric requirement + +switch command { + +// MARK: - generate-key + +case "generate-key": + let keyId = getArg("--key-id") ?? defaultKeyId + + do { + let pubKeyData = try SecureEnclaveManager.generateKey(keyId: keyId, requireAuth: !noAuth) + jsonSuccess([ + "keyId": keyId, + "publicKey": pubKeyData.base64EncodedString(), + "publicKeyBytes": pubKeyData.count, + ]) + } catch { + jsonError(error.localizedDescription) + } + +// MARK: - delete-key + +case "delete-key": + let keyId = getArg("--key-id") ?? defaultKeyId + let deleted = SecureEnclaveManager.deleteKey(keyId: keyId) + jsonSuccess(["keyId": keyId, "deleted": deleted]) + +// MARK: - list-keys + +case "list-keys": + let keys = SecureEnclaveManager.listKeys() + jsonSuccess(["keys": keys]) + +// MARK: - key-exists + +case "key-exists": + let keyId = getArg("--key-id") ?? defaultKeyId + let exists = SecureEnclaveManager.keyExists(keyId: keyId) + jsonSuccess(["keyId": keyId, "exists": exists]) + +// MARK: - encrypt + +case "encrypt": + let keyId = getArg("--key-id") ?? defaultKeyId + + guard let dataB64 = getArg("--data") else { + jsonError("Missing --data argument (base64-encoded plaintext)") + } + guard let plaintext = Data(base64Encoded: dataB64) else { + jsonError("Invalid base64 data") + } + + do { + let encrypted = try SecureEnclaveManager.encrypt(plaintext: plaintext, keyId: keyId) + jsonSuccess(["ciphertext": encrypted.base64EncodedString()]) + } catch { + jsonError(error.localizedDescription) + } + +// MARK: - decrypt (one-shot, for testing) + +case "decrypt": + let keyId = getArg("--key-id") ?? defaultKeyId + + guard let dataB64 = getArg("--data") else { + jsonError("Missing --data argument (base64-encoded ciphertext)") + } + guard let ciphertext = Data(base64Encoded: dataB64) else { + jsonError("Invalid base64 data") + } + + do { + let decrypted = try SecureEnclaveManager.decrypt(payload: ciphertext, keyId: keyId, context: nil) + guard let plaintext = String(data: decrypted, encoding: .utf8) else { + jsonError("Decrypted data is not valid UTF-8") + } + jsonSuccess(["plaintext": plaintext]) + } catch { + jsonError(error.localizedDescription) + } + +// MARK: - status + +case "status": + let seAvailable: Bool + #if targetEnvironment(simulator) + seAvailable = false + #else + seAvailable = true // If this binary runs on real hardware, SE is available + #endif + + jsonSuccess([ + "secureEnclaveAvailable": seAvailable, + "backend": "secure-enclave", + "hardwareBacked": seAvailable, + "biometricAvailable": seAvailable, + "platform": "darwin", + "arch": { + #if arch(arm64) + return "arm64" + #elseif arch(x86_64) + return "x86_64" + #else + return "unknown" + #endif + }(), + "keys": SecureEnclaveManager.listKeys(), + ]) + +// MARK: - daemon + +case "daemon": + guard let socketPath = getArg("--socket-path") else { + jsonError("Missing --socket-path argument") + } + + let sessionManager = SessionManager() + let server = IPCServer(socketPath: socketPath) + + // Write PID file + let pidPath = getArg("--pid-path") + if let pidPath = pidPath { + let pidDir = (pidPath as NSString).deletingLastPathComponent + try? FileManager.default.createDirectory(atPath: pidDir, withIntermediateDirectories: true) + try? "\(ProcessInfo.processInfo.processIdentifier)".write(toFile: pidPath, atomically: true, encoding: .utf8) + } + + // Status bar menu (must be created before run loop starts) + // NSApplication is needed for status bar items to work + let app = NSApplication.shared + app.setActivationPolicy(.accessory) // no Dock icon + + var statusBarMenu: StatusBarMenu? + + // Handle daemon shutdown + func shutdownDaemon() { + statusBarMenu?.remove() + server.stop() + if let pidPath = pidPath { + try? FileManager.default.removeItem(atPath: pidPath) + } + exit(0) + } + + sessionManager.onDaemonTimeout = { + shutdownDaemon() + } + + server.onConnectionActivity = { + sessionManager.noteIpcActivity() + } + + // Handle IPC messages (ttyId is resolved from the peer's controlling terminal) + server.messageHandler = { message, ttyId in + guard let action = message["action"] as? String else { + return ["error": "Missing action"] + } + + switch action { + case "decrypt": + guard let payload = message["payload"] as? [String: Any], + let ciphertextB64 = payload["ciphertext"] as? String, + let ciphertext = Data(base64Encoded: ciphertextB64) else { + return ["error": "Missing or invalid ciphertext in payload"] + } + + let keyId = (payload["keyId"] as? String) ?? defaultKeyId + + do { + let context = try sessionManager.getAuthenticatedContext(ttyId: ttyId) + let decrypted = try SecureEnclaveManager.decrypt( + payload: ciphertext, + keyId: keyId, + context: context + ) + guard let plaintext = String(data: decrypted, encoding: .utf8) else { + return ["error": "Decrypted data is not valid UTF-8"] + } + statusBarMenu?.refresh() + return ["result": plaintext] + } catch { + return ["error": error.localizedDescription] + } + + case "ping": + return [ + "result": [ + "pong": true, + "sessionWarm": sessionManager.isSessionWarm(ttyId: ttyId), + "ttyId": ttyId as Any, + ], + ] + + case "encrypt": + guard let payload = message["payload"] as? [String: Any], + let plaintextStr = payload["plaintext"] as? String else { + return ["error": "Missing plaintext in payload"] + } + + let encKeyId = (payload["keyId"] as? String) ?? defaultKeyId + guard let plaintextData = plaintextStr.data(using: .utf8) else { + return ["error": "Plaintext is not valid UTF-8"] + } + + do { + let encrypted = try SecureEnclaveManager.encrypt(plaintext: plaintextData, keyId: encKeyId) + return ["result": encrypted.base64EncodedString()] + } catch { + return ["error": error.localizedDescription] + } + + case "prompt-secret": + let promptPayload = message["payload"] as? [String: Any] + let itemKey = promptPayload?["itemKey"] as? String + let promptMessage = promptPayload?["message"] as? String + ?? "Enter the secret value to encrypt:" + + guard let value = SecureInputDialog.prompt( + title: "Varlock — Enter Secret", + message: promptMessage, + itemKey: itemKey + ) else { + return ["error": "cancelled"] + } + + // Encrypt the entered value immediately + let promptKeyId = (promptPayload?["keyId"] as? String) ?? defaultKeyId + guard let valueData = value.data(using: .utf8) else { + return ["error": "Value is not valid UTF-8"] + } + + do { + let encrypted = try SecureEnclaveManager.encrypt(plaintext: valueData, keyId: promptKeyId) + return ["result": [ + "ciphertext": encrypted.base64EncodedString(), + ]] + } catch { + return ["error": error.localizedDescription] + } + + case "invalidate-session": + sessionManager.invalidateAllSessions() + statusBarMenu?.refresh() + return ["result": "all sessions invalidated"] + + default: + return ["error": "Unknown action: \(action)"] + } + } + + // Start server + do { + try server.start() + + // Print ready message to stdout so the JS launcher knows we're ready + jsonOutput(["ready": true, "pid": ProcessInfo.processInfo.processIdentifier, "socketPath": socketPath]) + fflush(stdout) + + // Set up status bar menu + statusBarMenu = StatusBarMenu( + sessionManager: sessionManager, + onLock: { + sessionManager.invalidateAllSessions() + statusBarMenu?.refresh() + }, + onQuit: { + shutdownDaemon() + } + ) + + // We need a run loop for NSWorkspace notifications (sleep/lock detection) + // and for the status bar menu to work + signal(SIGTERM, SIG_IGN) + signal(SIGINT, SIG_IGN) + + let sigTermSource = DispatchSource.makeSignalSource(signal: SIGTERM, queue: .main) + sigTermSource.setEventHandler { shutdownDaemon() } + sigTermSource.resume() + + let sigIntSource = DispatchSource.makeSignalSource(signal: SIGINT, queue: .main) + sigIntSource.setEventHandler { shutdownDaemon() } + sigIntSource.resume() + + app.run() + } catch { + jsonError("Failed to start daemon: \(error.localizedDescription)") + } + +// MARK: - help + +case "help", "--help", "-h": + let help = """ + varlock-enclave - Secure Enclave encryption daemon for Varlock + + COMMANDS: + generate-key [--key-id ] Create a new Secure Enclave key + delete-key [--key-id ] Delete a Secure Enclave key + list-keys List all Varlock Secure Enclave keys + key-exists [--key-id ] Check if a key exists + encrypt --data [--key-id ] Encrypt data (one-shot) + decrypt --data [--key-id ] Decrypt data (one-shot, testing) + status Check Secure Enclave availability + daemon --socket-path [--pid-path ] Start IPC daemon + + OPTIONS: + --key-id Key identifier (default: varlock-default) + --data Base64-encoded data + --socket-path Unix socket path for daemon mode + --pid-path PID file path for daemon mode + + All output is JSON. Errors return {"error": "message"}. + """ + print(help) + exit(0) + +default: + jsonError("Unknown command: \(command). Run with --help for usage.") +} diff --git a/packages/plugins/1password/src/plugin.ts b/packages/plugins/1password/src/plugin.ts index 7e57edbe2..edd9a04f0 100644 --- a/packages/plugins/1password/src/plugin.ts +++ b/packages/plugins/1password/src/plugin.ts @@ -1,4 +1,6 @@ -import { type Resolver, plugin } from 'varlock/plugin-lib'; +import { + type Resolver, type PluginCacheAccessor, plugin, resolveCacheTtl, +} from 'varlock/plugin-lib'; import { createDeferredPromise, type DeferredPromise } from '@env-spec/utils/defer'; import { Client, createClient } from '@1password/sdk'; @@ -12,6 +14,15 @@ const OP_ICON = 'simple-icons:1password'; plugin.name = '1pass'; const { debug } = plugin; debug('init - version =', plugin.version); + +// capture cache accessor while the plugin proxy context is active +// (the `plugin` proxy is only valid during module initialization, not during resolve()) +let pluginCache: PluginCacheAccessor | undefined; +try { + pluginCache = plugin.cache; +} catch { + // cache not available (e.g., no encryption key) +} plugin.icon = OP_ICON; plugin.standardVars = { initDecorator: '@initOp', @@ -95,6 +106,8 @@ class OpPluginInstance { private connectHost?: string; /** API token for authenticating with the Connect server */ private connectToken?: string; + /** optional cache TTL - when set, resolved values are cached */ + cacheTtl?: string | number; constructor( readonly id: string, @@ -429,13 +442,14 @@ plugin.registerRootDecorator({ id, account, connectHost, + cacheTtlResolver: objArgs.cacheTtl, tokenResolver: objArgs.token, allowAppAuthResolver: objArgs.allowAppAuth, connectTokenResolver: objArgs.connectToken, }; }, async execute({ - id, account, connectHost, tokenResolver, allowAppAuthResolver, connectTokenResolver, + id, account, connectHost, cacheTtlResolver, tokenResolver, allowAppAuthResolver, connectTokenResolver, }) { // even if these are empty, we can't throw errors yet // in case the instance is never actually used @@ -449,6 +463,10 @@ plugin.registerRootDecorator({ connectHost, connectToken as string | undefined, ); + const cacheTtl = await resolveCacheTtl(cacheTtlResolver); + if (cacheTtl !== undefined) { + pluginInstances[id].cacheTtl = cacheTtl; + } }, }); @@ -463,7 +481,6 @@ plugin.registerDataType({ description: '1Password service accounts', url: 'https://developer.1password.com/docs/service-accounts/', }, - 'https://example.com', ], async validate(val) { if (!val.startsWith('ops_')) { @@ -541,8 +558,21 @@ plugin.registerResolverFunction({ if (typeof opReference !== 'string') { throw new SchemaError('expected op item location to resolve to a string'); } - const opValue = await selectedInstance.readItem(opReference); - return opValue; + + // check cache if cacheTtl is configured and cache is available + if (selectedInstance.cacheTtl !== undefined && pluginCache) { + const cacheKey = `op:${instanceId}:${opReference}`; + const cached = await pluginCache.get(cacheKey); + if (cached !== undefined) { + debug('cache hit for %s', cacheKey); + return cached; + } + const opValue = await selectedInstance.readItem(opReference); + await pluginCache.set(cacheKey, opValue, selectedInstance.cacheTtl); + return opValue; + } + + return await selectedInstance.readItem(opReference); }, }); @@ -602,6 +632,20 @@ plugin.registerResolverFunction({ if (typeof environmentId !== 'string') { throw new SchemaError('expected environment ID to resolve to a string'); } + + // check cache if cacheTtl is configured and cache is available + if (selectedInstance.cacheTtl !== undefined && pluginCache) { + const cacheKey = `opEnv:${instanceId}:${environmentId}`; + const cached = await pluginCache.get(cacheKey); + if (cached !== undefined) { + debug('cache hit for %s', cacheKey); + return cached; + } + const result = await selectedInstance.readEnvironment(environmentId); + await pluginCache.set(cacheKey, result, selectedInstance.cacheTtl); + return result; + } + return await selectedInstance.readEnvironment(environmentId); }, }); diff --git a/packages/plugins/aws-secrets/src/plugin.ts b/packages/plugins/aws-secrets/src/plugin.ts index 92ed43e0f..cd0eb3e46 100644 --- a/packages/plugins/aws-secrets/src/plugin.ts +++ b/packages/plugins/aws-secrets/src/plugin.ts @@ -1,4 +1,6 @@ -import { type Resolver, plugin } from 'varlock/plugin-lib'; +import { + type Resolver, type PluginCacheAccessor, plugin, resolveCacheTtl, +} from 'varlock/plugin-lib'; import { SecretsManagerClient, @@ -21,6 +23,15 @@ const { debug } = plugin; debug('init - version =', plugin.version); plugin.icon = AWS_ICON; +// capture cache accessor while the plugin proxy context is active +// (the `plugin` proxy is only valid during module initialization, not during resolve()) +let pluginCache: PluginCacheAccessor | undefined; +try { + pluginCache = plugin.cache; +} catch { + // cache not available (e.g., no encryption key) +} + plugin.standardVars = { initDecorator: '@initAws', params: { @@ -45,6 +56,8 @@ class AwsPluginInstance { private sessionToken?: string; private profile?: string; private namePrefix?: string; + /** optional cache TTL - when set, resolved values are cached */ + cacheTtl?: string | number; constructor( readonly id: string, @@ -423,6 +436,7 @@ plugin.registerRootDecorator({ secretAccessKeyResolver: objArgs.secretAccessKey, sessionTokenResolver: objArgs.sessionToken, namePrefixResolver: objArgs.namePrefix, + cacheTtlResolver: objArgs.cacheTtl, }; }, async execute({ @@ -433,6 +447,7 @@ plugin.registerRootDecorator({ secretAccessKeyResolver, sessionTokenResolver, namePrefixResolver, + cacheTtlResolver, }) { const region = await regionResolver.resolve(); const accessKeyId = await accessKeyIdResolver?.resolve(); @@ -441,6 +456,10 @@ plugin.registerRootDecorator({ const profile = await profileResolver?.resolve(); const namePrefix = await namePrefixResolver?.resolve(); pluginInstances[id].setAuth(region, accessKeyId, secretAccessKey, sessionToken, profile, namePrefix); + const cacheTtl = await resolveCacheTtl(cacheTtlResolver); + if (cacheTtl !== undefined) { + pluginInstances[id].cacheTtl = cacheTtl; + } }, }); @@ -598,6 +617,19 @@ plugin.registerResolverFunction({ // Apply namePrefix const finalSecretId = selectedInstance.applyNamePrefix(secretId); + // check cache if cacheTtl is configured and cache is available + if (selectedInstance.cacheTtl !== undefined && pluginCache) { + const cacheKey = `awsSecret:${instanceId}:${finalSecretId}`; + const cached = await pluginCache.get(cacheKey); + if (cached !== undefined) { + debug('cache hit for %s', cacheKey); + return cached; + } + const secretValue = await selectedInstance.getSecret(finalSecretId, jsonKey); + await pluginCache.set(cacheKey, secretValue, selectedInstance.cacheTtl); + return secretValue; + } + const secretValue = await selectedInstance.getSecret(finalSecretId, jsonKey); return secretValue; }, @@ -717,6 +749,19 @@ plugin.registerResolverFunction({ // Apply namePrefix const finalParameterName = selectedInstance.applyNamePrefix(parameterName); + // check cache if cacheTtl is configured and cache is available + if (selectedInstance.cacheTtl !== undefined && pluginCache) { + const cacheKey = `awsParam:${instanceId}:${finalParameterName}`; + const cached = await pluginCache.get(cacheKey); + if (cached !== undefined) { + debug('cache hit for %s', cacheKey); + return cached; + } + const parameterValue = await selectedInstance.getParameter(finalParameterName, jsonKey); + await pluginCache.set(cacheKey, parameterValue, selectedInstance.cacheTtl); + return parameterValue; + } + const parameterValue = await selectedInstance.getParameter(finalParameterName, jsonKey); return parameterValue; }, diff --git a/packages/plugins/bitwarden/src/plugin.ts b/packages/plugins/bitwarden/src/plugin.ts index 31a6a61bb..2a4978e6e 100644 --- a/packages/plugins/bitwarden/src/plugin.ts +++ b/packages/plugins/bitwarden/src/plugin.ts @@ -1,4 +1,6 @@ -import { type Resolver, plugin } from 'varlock/plugin-lib'; +import { + type Resolver, type PluginCacheAccessor, plugin, resolveCacheTtl, +} from 'varlock/plugin-lib'; import ky from 'ky'; import { Buffer } from 'node:buffer'; import { webcrypto } from 'node:crypto'; @@ -13,6 +15,15 @@ const BITWARDEN_ICON = 'simple-icons:bitwarden'; plugin.name = 'bitwarden'; const { debug } = plugin; debug('init - version =', plugin.version); + +// capture cache accessor while the plugin proxy context is active +// (the `plugin` proxy is only valid during module initialization, not during resolve()) +let pluginCache: PluginCacheAccessor | undefined; +try { + pluginCache = plugin.cache; +} catch { + // cache not available (e.g., no encryption key) +} plugin.icon = BITWARDEN_ICON; plugin.standardVars = { initDecorator: '@initBitwarden', @@ -57,6 +68,9 @@ class BitwardenPluginInstance { /** In-flight auth promise - prevents parallel resolution from triggering multiple auth requests (rate limit fix) */ private authInFlight?: Promise; + /** optional cache TTL - when set, resolved values are cached */ + cacheTtl?: string | number; + constructor( readonly id: string, ) {} @@ -336,6 +350,7 @@ plugin.registerRootDecorator({ apiUrl, identityUrl, accessTokenResolver: objArgs.accessToken, + cacheTtlResolver: objArgs.cacheTtl, }; }, async execute({ @@ -343,6 +358,7 @@ plugin.registerRootDecorator({ apiUrl, identityUrl, accessTokenResolver, + cacheTtlResolver, }) { // even if the token is empty, we can't throw errors yet // in case the instance is never actually used @@ -353,6 +369,11 @@ plugin.registerRootDecorator({ apiUrl, identityUrl, ); + + const cacheTtl = await resolveCacheTtl(cacheTtlResolver); + if (cacheTtl !== undefined) { + pluginInstances[id].cacheTtl = cacheTtl; + } }, }); @@ -485,7 +506,19 @@ plugin.registerResolverFunction({ }); } - const secretValue = await selectedInstance.getSecret(secretId); - return secretValue; + // check cache if cacheTtl is configured and cache is available + if (selectedInstance.cacheTtl !== undefined && pluginCache) { + const cacheKey = `bw:${instanceId}:${secretId}`; + const cached = await pluginCache.get(cacheKey); + if (cached !== undefined) { + debug('cache hit for %s', cacheKey); + return cached; + } + const secretValue = await selectedInstance.getSecret(secretId); + await pluginCache.set(cacheKey, secretValue, selectedInstance.cacheTtl); + return secretValue; + } + + return await selectedInstance.getSecret(secretId); }, }); diff --git a/packages/plugins/google-secret-manager/src/plugin.ts b/packages/plugins/google-secret-manager/src/plugin.ts index c5764fc09..409f50bcb 100644 --- a/packages/plugins/google-secret-manager/src/plugin.ts +++ b/packages/plugins/google-secret-manager/src/plugin.ts @@ -1,4 +1,6 @@ -import { type Resolver, plugin } from 'varlock/plugin-lib'; +import { + type Resolver, type PluginCacheAccessor, plugin, resolveCacheTtl, +} from 'varlock/plugin-lib'; import { GoogleAuth } from 'google-auth-library'; @@ -9,6 +11,14 @@ const GSM_ICON = 'devicon:googlecloud'; plugin.name = 'gsm'; const { debug } = plugin; debug('init - version =', plugin.version); +// capture cache accessor while the plugin proxy context is active +// (the `plugin` proxy is only valid during module initialization, not during resolve()) +let pluginCache: PluginCacheAccessor | undefined; +try { + pluginCache = plugin.cache; +} catch { + // cache not available (e.g., no encryption key) +} plugin.icon = GSM_ICON; plugin.standardVars = { initDecorator: '@initGsm', @@ -21,6 +31,8 @@ plugin.standardVars = { class GsmPluginInstance { private projectId?: string; private credentials?: any; + /** optional cache TTL - when set, resolved values are cached */ + cacheTtl?: string | number; constructor( readonly id: string, @@ -187,16 +199,21 @@ plugin.registerRootDecorator({ return { id, + cacheTtlResolver: objArgs.cacheTtl, projectIdResolver: objArgs.projectId, credentialsResolver: objArgs.credentials, }; }, async execute({ - id, projectIdResolver, credentialsResolver, + id, cacheTtlResolver, projectIdResolver, credentialsResolver, }) { const projectId = await projectIdResolver?.resolve(); const credentials = await credentialsResolver?.resolve(); pluginInstances[id].setAuth(projectId, credentials); + const cacheTtl = await resolveCacheTtl(cacheTtlResolver); + if (cacheTtl !== undefined) { + pluginInstances[id].cacheTtl = cacheTtl; + } }, }); @@ -333,7 +350,19 @@ plugin.registerResolverFunction({ throw new SchemaError('No secret reference provided'); } - const secretValue = await selectedInstance.readSecret(secretRef); - return secretValue; + // check cache if cacheTtl is configured and cache is available + if (selectedInstance.cacheTtl !== undefined && pluginCache) { + const cacheKey = `gsm:${instanceId}:${secretRef}`; + const cached = await pluginCache.get(cacheKey); + if (cached !== undefined) { + debug('cache hit for %s', cacheKey); + return cached; + } + const secretValue = await selectedInstance.readSecret(secretRef); + await pluginCache.set(cacheKey, secretValue, selectedInstance.cacheTtl); + return secretValue; + } + + return await selectedInstance.readSecret(secretRef); }, }); diff --git a/packages/varlock-website/src/content/docs/getting-started/introduction.mdx b/packages/varlock-website/src/content/docs/getting-started/introduction.mdx index 02dbc0b9b..819d1c082 100644 --- a/packages/varlock-website/src/content/docs/getting-started/introduction.mdx +++ b/packages/varlock-website/src/content/docs/getting-started/introduction.mdx @@ -12,7 +12,7 @@ Varlock aims to be the most comprehensive environment variable management tool. - **[AI-Safe Config](/guides/ai-tools/)** - Your `.env.schema` gives AI agents full context on your config without ever exposing secret values. Prevent leaks to AI servers by design, and scan for leaked secrets with `varlock scan` - **[Security](/guides/secrets/)** - Automatic log redaction for sensitive values, leak detection in bundled code and server responses, and proactive scanning via `varlock scan` - **[Validation & Type Safety](/reference/data-types/)** - Powerful validation capabilities with clear error messages, plus automatic type generation for IntelliSense support -- **[Secure Secrets](/guides/secrets/)** - Load secrets from provider [plugins](/plugins/overview/) (e.g., [1Password](/plugins/1password/), [AWS](/plugins/aws-secrets/), [HashiCorp Vault](/plugins/hashicorp-vault/)) or any CLI tool using [exec()](/reference/functions/#exec) +- **[Secure Secrets](/guides/secrets/)** - Built-in [device-local encryption](/guides/secrets/#local-encryption) with hardware-backed security (Secure Enclave, TPM), plus provider [plugins](/plugins/overview/) (e.g., [1Password](/plugins/1password/), [AWS](/plugins/aws-secrets/), [HashiCorp Vault](/plugins/hashicorp-vault/)) or any CLI tool using [exec()](/reference/functions/#exec) - **[Multi-Environment Management](/guides/environments/)** - Flexible environment handling with support for environment-specific files, local overrides, and value composition - **[Value Composition](/reference/functions/)** - Compose values together using functions, references, and external data sources - **[Framework Integrations](/integrations/overview/)** - Official integrations for Next.js, Vite, Astro, and more, plus support for any language via `varlock run` diff --git a/packages/varlock-website/src/content/docs/getting-started/usage.mdx b/packages/varlock-website/src/content/docs/getting-started/usage.mdx index ca3859290..fabc34254 100644 --- a/packages/varlock-website/src/content/docs/getting-started/usage.mdx +++ b/packages/varlock-website/src/content/docs/getting-started/usage.mdx @@ -41,4 +41,22 @@ See the [`varlock load` CLI Reference](/reference/cli-commands/#load) for more i Executes a command in a child process, injecting your resolved and validated environment variables. This is useful when a code-level integration is not possible. For example, if you're using a database migration tool, you can use `varlock run` to run the migration tool with the correct environment variables. Or if you're using a non-js/ts language, you can use `varlock run` to run a command and inject validated environment variables. -See the [`varlock run` CLI Reference](/reference/cli-commands/#run) for more information. \ No newline at end of file +See the [`varlock run` CLI Reference](/reference/cli-commands/#run) for more information. + +### `varlock encrypt` + + + +Encrypts sensitive values using device-local encryption. Use `--file` to encrypt all `@sensitive` plaintext values in a `.env` file in-place, or run without arguments for interactive single-value encryption. + +Encrypted values are stored as `varlock("local:")` and are automatically decrypted during `varlock load` or `varlock run`. + +See the [`varlock encrypt` CLI Reference](/reference/cli-commands/#encrypt) and the [secrets guide](/guides/secrets/#local-encryption) for more information. + +### `varlock reveal` + + + +Securely view or copy decrypted values of `@sensitive` environment variables. Values are shown in an alternate screen buffer to prevent scrollback capture. + +See the [`varlock reveal` CLI Reference](/reference/cli-commands/#reveal) for more information. \ No newline at end of file diff --git a/packages/varlock-website/src/content/docs/guides/secrets.mdx b/packages/varlock-website/src/content/docs/guides/secrets.mdx index cf526aa71..2e5f4d41b 100644 --- a/packages/varlock-website/src/content/docs/guides/secrets.mdx +++ b/packages/varlock-website/src/content/docs/guides/secrets.mdx @@ -5,21 +5,6 @@ description: Best practices for managing secrets and sensitive environment varia `varlock` uses the term _sensitive_ to describe any value that should not be exposed to the outside world. This includes secret api keys, passwords, and other generally sensitive information. Instead of relying on prefixes (e.g., `NEXT_PUBLIC_`) to know which items may be "public", varlock relies on `@decorators` to mark sensitive items explicitly. -{/* For local development, `varlock` allows you to encrypt sensitive values in your `.env.*` files using `varlock encrypt` and then decrypt them using `varlock load` or `varlock run`. - -This (currently) works exclusively for local development since it relies on encryption keys stored on your system. */} - -:::tip[Coming soon] -We'll be adding support for our own trustless, cloud-based secret storage in the very near future. -::: - -{/* ## Encryption via `varlock` - -1. [Install](/getting-started/installation) `varlock` including the desktop app -2. Add sensitive values to your `.env.*` file(s) -3. Encrypt them using `varlock encrypt` -4. Decrypt them using `varlock load` or `varlock run` */} - ## Marking `@sensitive` items Whether each item is sensitive or not is controlled by the [`@defaultSensitive`](/reference/root-decorators/#defaultsensitive) root decorator and the [`@sensitive`](/reference/item-decorators/#sensitive) item decorator. Whether you want to default to sensitive or not, or infer based on key names is up to you. For example: @@ -32,6 +17,98 @@ NON_SECRET_FOO= SECRET_FOO= ``` +## Local encryption via `varlock()` ||local-encryption|| + +Varlock includes built-in device-local encryption that lets you store encrypted secrets directly in your `.env` files. Encrypted values are safe to commit to version control — they can only be decrypted on machines that have the corresponding encryption key. + +This is ideal for: +- **Solo developers** who want encrypted secrets in their repo without relying on an external service +- **Small teams** where each developer encrypts their own local secrets +- **Avoiding plaintext** `.env.local` files on disk + +### How it works + +Secrets are encrypted using [ECIES](https://en.wikipedia.org/wiki/Integrated_Encryption_Scheme) (Elliptic Curve Integrated Encryption Scheme) with a device-local key. The best available encryption backend is selected automatically: + +| Platform | Backend | Key Storage | Biometric | +|----------|---------|-------------|-----------| +| macOS | Secure Enclave | Hardware Secure Enclave | Touch ID / Face ID | +| Windows | DPAPI + Windows Hello | Windows credential store | Windows Hello (face/fingerprint/PIN) | +| Linux | TPM2 | TPM2 hardware (sealed keys) | No | +| All platforms | File-based fallback | `~/.varlock/` directory | No | + +On macOS, the Secure Enclave provides hardware-backed encryption — keys cannot be extracted from the device, and decryption can require biometric authentication via Touch ID. On Windows, keys are protected via DPAPI (scoped to the current user) with optional Windows Hello biometric verification. On Linux, keys are sealed to the machine's TPM2 chip when available, falling back to file-based storage otherwise. + +### Quick start + +1. Add your secrets as plaintext in `.env.local` (which should be gitignored) +2. Run `varlock encrypt --file .env.local` to encrypt them in-place +3. The values are replaced with `varlock("local:")` calls +4. These encrypted values are automatically decrypted when you run `varlock load` or `varlock run` + +```env-spec title=".env.local" +# Before encryption +# @sensitive +API_KEY=sk-secret-key-12345 + +# After running `varlock encrypt --file .env.local` +# @sensitive +API_KEY=varlock("local:BGJ2a3...") +``` + +### Using prompt mode + +Instead of encrypting existing values, you can use `varlock(prompt)` as a placeholder that will prompt you to enter a secret on first load: + +```env-spec title=".env.local" +# @sensitive +API_KEY=varlock(prompt) +``` + +When varlock encounters this during `varlock load` or `varlock run`, it will: +1. Prompt you to enter the secret value (via a native dialog on macOS, or a terminal prompt otherwise) +2. Encrypt the value +3. Automatically replace `varlock(prompt)` with `varlock("local:")` in the file + +### Encrypting values + +Use [`varlock encrypt`](/reference/cli-commands/#encrypt) to encrypt values: + +```bash +# Interactive: encrypt a single value +varlock encrypt + +# Batch: encrypt all sensitive plaintext values in a file +varlock encrypt --file .env.local +``` + +In batch mode, only items marked as `@sensitive` in your schema are considered for encryption. + +### Revealing encrypted values + +Use [`varlock reveal`](/reference/cli-commands/#reveal) to securely view decrypted values: + +```bash +# Interactive picker +varlock reveal + +# Reveal a specific variable +varlock reveal API_KEY + +# Copy to clipboard (auto-clears after 10s) +varlock reveal API_KEY --copy +``` + +Values are shown in an alternate terminal screen buffer so they don't appear in your scrollback history. + +### Locking the session + +On platforms with biometric authentication (macOS Secure Enclave), decryption sessions are cached to avoid repeated Touch ID prompts. Use [`varlock lock`](/reference/cli-commands/#lock) to invalidate the session when stepping away: + +```bash +varlock lock +``` + ## Loading secrets from external sources ### Using plugins (recommended) diff --git a/packages/varlock-website/src/content/docs/plugins/1password.mdx b/packages/varlock-website/src/content/docs/plugins/1password.mdx index 59dfaf34f..8de2bbdd8 100644 --- a/packages/varlock-website/src/content/docs/plugins/1password.mdx +++ b/packages/varlock-website/src/content/docs/plugins/1password.mdx @@ -187,6 +187,7 @@ Initializes an instance of the 1Password plugin - setting up options and authent - `token` (optional): service account token. Should be a reference to a config item of type `opServiceAccountToken`. - `allowAppAuth` (optional): boolean flag to enable authenticating using the local desktop app - `account` (optional): limits the `op` cli to connect to specific 1Password account (shorthand, sign-in address, account ID, or user ID) +- `cacheTtl` (optional): when set, resolved values from `op()` and `opLoadEnvironment()` are cached locally for the specified duration. Accepts the same format as the [`cache()` function](/reference/functions/#cache) — e.g., `"5m"`, `"1h"`, `"1d"`, or `0` for forever. ```env-spec "@initOp" # @initOp(id=notProd, token=$OP_TOKEN, allowAppAuth=forEnv(dev), account=acmeco) @@ -194,6 +195,17 @@ Initializes an instance of the 1Password plugin - setting up options and authent # @type=opServiceAccountToken OP_TOKEN= ``` + +```env-spec "@initOp" title="With caching enabled" +# Cache all 1Password lookups for 1 hour +# @initOp(token=$OP_TOKEN, allowAppAuth=true, cacheTtl="1h") +``` + +Since `cacheTtl` is resolved at runtime, you can use dynamic values to conditionally enable caching: + +```env-spec "@initOp" title="Cache only in development" +# @initOp(token=$OP_TOKEN, allowAppAuth=true, cacheTtl=if(forEnv(dev), "1h")) +``` diff --git a/packages/varlock-website/src/content/docs/reference/cli-commands.mdx b/packages/varlock-website/src/content/docs/reference/cli-commands.mdx index e904ebc64..a16d63729 100644 --- a/packages/varlock-website/src/content/docs/reference/cli-commands.mdx +++ b/packages/varlock-website/src/content/docs/reference/cli-commands.mdx @@ -42,6 +42,10 @@ You can configure varlock's default behavior by adding a `varlock` key to your ` | `loadPath` | Path to a directory or specific `.env` file to use as the default entry point. Defaults to the current working directory if not set. Use a **directory path** (with trailing `/`) to automatically load all relevant files (`.env.schema`, `.env`, `.env.local`, etc.); a file path only loads that file and its explicit imports. Can be overridden by the `--path` CLI flag. Varlock looks for this config in the `package.json` in the current working directory only. | +:::note[Plugins] +While plugins cannot add additional CLI commands, they can extend varlock with additional [resolver functions](/reference/functions/), [data types](/reference/data-types/), and [decorators](/reference/item-decorators/). See the [Plugins guide](/guides/plugins/) for more information. +::: + ## Commands reference
@@ -73,6 +77,8 @@ varlock load [options] - `--show-all`: Shows all items, not just failing ones, when validation is failing - `--env`: Set the default environment flag (e.g., `--env production`), only useful if not using `@currentEnv` in `.env.schema` - `--path` / `-p`: Path to a specific `.env` file or directory to use as the entry point (overrides `varlock.loadPath` in `package.json`) +- `--clear-cache`: Clear the cache and re-resolve all values (writes new values back to cache) +- `--skip-cache`: Skip cache entirely for this invocation (no reads or writes) **Examples:** ```bash @@ -116,6 +122,8 @@ varlock run -- **Options:** - `--no-redact-stdout`: Disable stdout/stderr redaction to preserve TTY detection for interactive tools - `--path` / `-p`: Path to a specific `.env` file or directory to use as the entry point +- `--clear-cache`: Clear the cache and re-resolve all values +- `--skip-cache`: Skip cache entirely for this invocation **Examples:** ```bash @@ -238,6 +246,121 @@ You can also set it up manually -- see the [Secrets guide](/guides/secrets/#scan
+
+### `varlock encrypt` ||encrypt|| + +Encrypts sensitive values using device-local encryption. Encrypted values are stored in `.env` files using the `varlock()` resolver function and are automatically decrypted at load time. + +On macOS, encryption is hardware-backed via the Secure Enclave (with Touch ID / biometric authentication). On Windows and Linux, platform-specific secure storage is used. A pure-JavaScript file-based fallback is available on all platforms. + +```bash +varlock encrypt [options] +``` + +**Options:** +- `--file`: Path to a `.env` file — encrypts all sensitive plaintext values in-place + +**Examples:** +```bash +# Interactive mode: encrypt a single value +varlock encrypt + +# Encrypt all sensitive plaintext values in a .env file +varlock encrypt --file .env.local +``` + +In interactive mode, you'll be prompted to enter a value, and the encrypted output will be printed for you to copy into your `.env.local` file: +``` +SOME_SENSITIVE_KEY=varlock("local:") +``` + +In file mode, varlock loads the env graph, identifies `@sensitive` items with plaintext values, and lets you select which to encrypt in-place. + +:::tip +Use `varlock encrypt --file .env.local` after adding new secrets to quickly encrypt them all at once. +::: + +:::note[Alternative: prompt mode] +Instead of encrypting values ahead of time, you can use `varlock(prompt)` as a placeholder in your `.env` files. On first load, varlock will prompt you to enter the secret and automatically replace the placeholder with the encrypted value. See the [`varlock()` function reference](/reference/functions/#varlock) for details. +::: + +
+ +
+### `varlock reveal` ||reveal|| + +Securely view or copy the decrypted values of `@sensitive` environment variables. Values are displayed in an alternate terminal screen buffer so they don't persist in your scrollback history. + +```bash +varlock reveal [VAR_NAME] [options] +``` + +**Options:** +- `--copy`: Copy the value to clipboard instead of displaying (auto-clears after 10s) +- `--path` / `-p`: Path to a specific `.env` file or directory to use as the entry point +- `--env`: Set the environment (e.g., production, development, etc) + +**Examples:** +```bash +# Interactive picker to browse and reveal sensitive values +varlock reveal + +# Reveal a specific variable +varlock reveal MY_SECRET + +# Copy a value to clipboard (auto-clears after 10s) +varlock reveal MY_SECRET --copy +``` + +:::note +Non-sensitive values are not shown by `varlock reveal`. Use [`varlock printenv`](#printenv) for non-sensitive values, or to inject a sensitive value into a command. +::: + +
+ +
+### `varlock lock` ||lock|| + +Locks the encryption daemon, requiring biometric authentication (e.g., Touch ID) for the next decrypt operation. This invalidates the current biometric session cache. + +```bash +varlock lock +``` + +This command only has an effect when using a biometric-enabled encryption backend (macOS Secure Enclave or Windows Hello). On other backends, it will display a message and exit. + +:::tip +Use `varlock lock` when stepping away from your machine to ensure the next person to decrypt a secret must authenticate biometrically. +::: + +
+ +
+### `varlock cache` ||cache|| + +Manage the encrypted value cache used by [`cache()`](/reference/functions/#cache) and plugin authors. Shows cache status by default, or clears cached entries. + +```bash +varlock cache [clear] [options] +``` + +**Options:** +- `--plugin `: When clearing, only remove entries for a specific plugin + +**Examples:** +```bash +# Show cache status (entry counts, file size, location) +varlock cache + +# Clear all cache entries +varlock cache clear + +# Clear cache for a specific plugin only +varlock cache clear --plugin 1password +``` + +
+
### `varlock typegen` ||typegen|| diff --git a/packages/varlock-website/src/content/docs/reference/functions.mdx b/packages/varlock-website/src/content/docs/reference/functions.mdx index 0b4ad8e86..86dc290e6 100644 --- a/packages/varlock-website/src/content/docs/reference/functions.mdx +++ b/packages/varlock-website/src/content/docs/reference/functions.mdx @@ -22,12 +22,10 @@ CONFIG=exec(`aws ssm get-parameter --name "/config/${APP_ENV}" --with-decryption ``` -Currently, there are built-in utility functions, and soon there will be functions to handle values encrypted using varlock provided tools. - -Plugins may also register additional resolvers - which can be used to generate and transform values, or fetch data from external providers. - +There are built-in utility functions, [random value generators](#random-value-generators), a [`cache()`](#cache) function for persisting values across runs, a built-in `varlock()` function for device-local encryption, and plugin-provided resolver functions that can fetch data from external providers. See the [Plugins guide](/guides/plugins/) for more information on plugin-provided functions.
+
### `ref()` @@ -221,4 +219,157 @@ API_URL=if(isEmpty($CUSTOM_API_URL), "https://api.default.com", $CUSTOM_API_URL) ```
+## Random value generators + +These functions generate random values using cryptographically secure randomness (`node:crypto`). They are typically used with [`cache()`](#cache) to generate a value once and persist it across runs. + +
+### `randomInt()` + +Generates a random integer. By default generates between `0` and `2,147,483,647` (int32 max). + +- With 1 arg: generates between `0` and `max` (inclusive) +- With 2 args: generates between `min` and `max` (inclusive) + +```env-spec "randomInt" +# Random port between 3000 and 4000 +DEV_PORT=cache(randomInt(3000, 4000)) + +# Random integer up to 1000 +SEED=cache(randomInt(1000)) + +# Random integer with default range +BIG_NUMBER=cache(randomInt()) +``` +
+ +
+### `randomFloat()` + +Generates a random floating-point number. By default generates between `0` and `1` with 2 decimal places. + +- With 1 arg: generates between `0` and `max` +- With 2 args: generates between `min` and `max` +- `precision=N` option controls decimal places (default: 2) + +```env-spec "randomFloat" +# Random float between 0 and 1 (default) +RATE=cache(randomFloat()) + +# Random float between 10 and 20 with 4 decimal places +THRESHOLD=cache(randomFloat(10, 20, precision=4)) +``` +
+ +
+### `randomUuid()` + +Generates a random UUID v4. + +```env-spec "randomUuid" +# Unique identifier for this environment +INSTANCE_ID=cache(randomUuid()) +``` +
+ +
+### `randomHex()` + +Generates a random hexadecimal string. Argument is the byte length (each byte = 2 hex characters). Default is `16` bytes (32 hex chars). + +```env-spec "randomHex" +# 64-character hex string (32 bytes) +ENCRYPTION_KEY=cache(randomHex(32)) + @sensitive + +# 32-character hex string (default 16 bytes) +SESSION_SECRET=cache(randomHex()) + @sensitive +``` +
+ +
+### `randomString()` + +Generates a random alphanumeric string. Default length is `16` characters using `A-Za-z0-9`. + +- First arg: character length (default: 16) +- `charset=S` option: custom character set to draw from + +```env-spec "randomString" +# 32-character alphanumeric string +API_SECRET=cache(randomString(32)) + @sensitive + +# 8-character string from custom charset +PIN_CODE=cache(randomString(8, charset="0123456789")) +``` +
+ +## Caching + +
+### `cache()` + +Wraps any resolver to cache its result across invocations. Cached values are encrypted at rest using varlock's [device-local encryption](/guides/secrets/#local-encryption). + +- First arg: the resolver to cache +- `ttl=D` option: how long to cache (default: forever). Supports `s`, `m`, `h`, `d`, `w` suffixes, or `0` for forever. +- `key=S` option: use an explicit cache key instead of the auto-generated one. Useful when the same cached value should be shared across files or when you want a stable key that doesn't change with resolver edits. + +The cache automatically invalidates when you change the wrapped resolver expression (unless using a custom `key`). + +```env-spec "cache" +# Cache a random UUID forever (until manually cleared) +INSTANCE_ID=cache(randomUuid()) + +# Cache an API token for 1 hour +AUTH_TOKEN=cache(exec(`get-token.sh`), ttl="1h") + +# Cache for 30 minutes +TEMP_KEY=cache(randomHex(32), ttl="30m") + +# Use an explicit cache key (shared across files/projects) +SHARED_TOKEN=cache(exec(`fetch-org-token.sh`), ttl="1d", key="org-auth-token") +``` + +Use the [`varlock cache`](/reference/cli-commands/#cache) CLI command to view cache status or clear cached values. + +Use `--clear-cache` or `--skip-cache` flags on `varlock load` / `varlock run` to control caching behavior for a single invocation. + +:::tip +Plugin authors can also use the cache API via `plugin.cache.get()` / `plugin.cache.set()` to cache expensive API calls. See the [Plugins guide](/guides/plugins/) for more information. +::: +
+ + +
+### `varlock()` + +Decrypts a locally encrypted value, or prompts for a new secret to encrypt. This is the built-in resolver for varlock's [device-local encryption](/guides/secrets/#local-encryption) feature. + +**Decrypt mode** — pass an encrypted payload to decrypt at load time: +```env-spec "varlock" +# @sensitive +API_KEY=varlock("local:") +``` + +**Prompt mode** — prompts the user to enter a secret, encrypts it, and writes the encrypted value back to the source file: +```env-spec "varlock" +# @sensitive +API_KEY=varlock(prompt) +# also valid as a key=value param: +API_KEY=varlock(prompt=1) +``` + +On first run with `prompt` mode, you'll be asked to enter the secret value. Once entered, the file is automatically updated with the encrypted payload. On macOS with Secure Enclave, a native dialog with biometric authentication is used. + +Values are encrypted using the best available backend on your platform — see the [secrets guide](/guides/secrets/#local-encryption) for details. + +:::tip +You don't need to write `varlock()` calls by hand. Use [`varlock encrypt`](/reference/cli-commands/#encrypt) to encrypt values interactively or in bulk. +::: +
+ +
diff --git a/packages/varlock/.gitignore b/packages/varlock/.gitignore index 5b1113d9c..e6f1c68f1 100644 --- a/packages/varlock/.gitignore +++ b/packages/varlock/.gitignore @@ -1,2 +1,3 @@ dist -dist-sea \ No newline at end of file +dist-sea +native-bins diff --git a/packages/varlock/package.json b/packages/varlock/package.json index cf0df48ab..5fad62d93 100644 --- a/packages/varlock/package.json +++ b/packages/varlock/package.json @@ -44,7 +44,8 @@ ], "files": [ "/bin", - "/dist" + "/dist", + "/native-bins" ], "bin": { "varlock": "./bin/cli.js" diff --git a/packages/varlock/scripts/build-binaries.ts b/packages/varlock/scripts/build-binaries.ts index 669c63e4f..9e18248a9 100644 --- a/packages/varlock/scripts/build-binaries.ts +++ b/packages/varlock/scripts/build-binaries.ts @@ -1,10 +1,12 @@ import { execSync } from 'node:child_process'; import path from 'node:path'; +import fs from 'node:fs'; import { fileURLToPath } from 'node:url'; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const PKG_DIR = path.resolve(__dirname, '..'); const DIST_DIR = 'dist-sea'; +const NATIVE_BINS_DIR = path.join(PKG_DIR, 'native-bins'); const ENTRY = 'src/cli/cli-executable.ts'; const ALL_TARGETS = [ @@ -67,6 +69,39 @@ if (devMode) { ENTRY, ].join(' ')); + // Bundle platform-specific native binaries alongside the CLI binary + const isMac = archiveName.startsWith('macos-'); + if (isMac) { + const appBundleSrc = path.join(NATIVE_BINS_DIR, 'darwin', 'VarlockEnclave.app'); + if (fs.existsSync(appBundleSrc)) { + console.log(' Bundling macOS native binary (VarlockEnclave.app)'); + exec(`cp -R "${appBundleSrc}" "${targetDir}/VarlockEnclave.app"`); + } else { + console.log(` Warning: macOS native binary not found at ${appBundleSrc}, skipping`); + } + } + + // Bundle Rust native binary for Linux/Windows + let nativeBinSubdir: string | null = null; + if (isWin) { + nativeBinSubdir = 'win32-x64'; + } else if (archiveName.startsWith('linux-musl-')) { + nativeBinSubdir = `linux-${archiveName.replace('linux-musl-', '')}`; + } else if (archiveName.startsWith('linux-')) { + nativeBinSubdir = `linux-${archiveName.replace('linux-', '')}`; + } + + if (nativeBinSubdir && !isMac) { + const rustBinaryName = isWin ? 'varlock-local-encrypt.exe' : 'varlock-local-encrypt'; + const rustBinarySrc = path.join(NATIVE_BINS_DIR, nativeBinSubdir, rustBinaryName); + if (fs.existsSync(rustBinarySrc)) { + console.log(` Bundling Rust native binary (${nativeBinSubdir}/${rustBinaryName})`); + exec(`cp "${rustBinarySrc}" "${targetDir}/${rustBinaryName}"`); + } else { + console.log(` Warning: Rust native binary not found at ${rustBinarySrc}, skipping`); + } + } + // Archive let archive: string; let archiveCmd: string; diff --git a/packages/varlock/src/cli/cli-executable.ts b/packages/varlock/src/cli/cli-executable.ts index 4584a4aad..99ee2ed57 100644 --- a/packages/varlock/src/cli/cli-executable.ts +++ b/packages/varlock/src/cli/cli-executable.ts @@ -15,7 +15,9 @@ import { commandSpec as initCommandSpec } from './commands/init.command'; import { commandSpec as loadCommandSpec } from './commands/load.command'; import { commandSpec as runCommandSpec } from './commands/run.command'; import { commandSpec as printenvCommandSpec } from './commands/printenv.command'; -// import { commandSpec as encryptCommandSpec } from './commands/encrypt.command'; +import { commandSpec as encryptCommandSpec } from './commands/encrypt.command'; +import { commandSpec as lockCommandSpec } from './commands/lock.command'; +import { commandSpec as revealCommandSpec } from './commands/reveal.command'; // import { commandSpec as doctorCommandSpec } from './commands/doctor.command'; import { commandSpec as helpCommandSpec } from './commands/help.command'; import { commandSpec as telemetryCommandSpec } from './commands/telemetry.command'; @@ -23,6 +25,7 @@ import { commandSpec as explainCommandSpec } from './commands/explain.command'; import { commandSpec as scanCommandSpec } from './commands/scan.command'; import { commandSpec as typegenCommandSpec } from './commands/typegen.command'; import { commandSpec as installPluginCommandSpec } from './commands/install-plugin.command'; +import { commandSpec as cacheCommandSpec } from './commands/cache.command'; // import { commandSpec as loginCommandSpec } from './commands/login.command'; // import { commandSpec as pluginCommandSpec } from './commands/plugin.command'; @@ -52,7 +55,9 @@ subCommands.set('init', buildLazyCommand(initCommandSpec, async () => await impo subCommands.set('load', buildLazyCommand(loadCommandSpec, async () => await import('./commands/load.command'))); subCommands.set('run', buildLazyCommand(runCommandSpec, async () => await import('./commands/run.command'))); subCommands.set('printenv', buildLazyCommand(printenvCommandSpec, async () => await import('./commands/printenv.command'))); -// subCommands.set('encrypt', buildLazyCommand(encryptCommandSpec, async () => await import('./commands/encrypt.command'))); +subCommands.set('encrypt', buildLazyCommand(encryptCommandSpec, async () => await import('./commands/encrypt.command'))); +subCommands.set('lock', buildLazyCommand(lockCommandSpec, async () => await import('./commands/lock.command'))); +subCommands.set('reveal', buildLazyCommand(revealCommandSpec, async () => await import('./commands/reveal.command'))); // subCommands.set('doctor', buildLazyCommand(doctorCommandSpec, async () => await import('./commands/doctor.command'))); subCommands.set('explain', buildLazyCommand(explainCommandSpec, async () => await import('./commands/explain.command'))); subCommands.set('help', buildLazyCommand(helpCommandSpec, async () => await import('./commands/help.command'))); @@ -60,6 +65,7 @@ subCommands.set('telemetry', buildLazyCommand(telemetryCommandSpec, async () => subCommands.set('scan', buildLazyCommand(scanCommandSpec, async () => await import('./commands/scan.command'))); subCommands.set('typegen', buildLazyCommand(typegenCommandSpec, async () => await import('./commands/typegen.command'))); subCommands.set('install-plugin', buildLazyCommand(installPluginCommandSpec, async () => await import('./commands/install-plugin.command'))); +subCommands.set('cache', buildLazyCommand(cacheCommandSpec, async () => await import('./commands/cache.command'))); // subCommands.set('login', buildLazyCommand(loginCommandSpec, async () => await import('./commands/login.command'))); // subCommands.set('plugin', buildLazyCommand(pluginCommandSpec, async () => await import('./commands/plugin.command'))); diff --git a/packages/varlock/src/cli/commands/cache.command.ts b/packages/varlock/src/cli/commands/cache.command.ts new file mode 100644 index 000000000..27c70c776 --- /dev/null +++ b/packages/varlock/src/cli/commands/cache.command.ts @@ -0,0 +1,206 @@ +import fs from 'node:fs'; +import ansis from 'ansis'; +import { define } from 'gunshi'; +import { isCancel } from '@clack/prompts'; + +import { CacheStore } from '../../lib/cache'; +import { formatTimeAgo, formatDuration } from '../../lib/formatting'; +import * as localEncrypt from '../../lib/local-encrypt'; +import { select, confirm } from '../helpers/prompts'; +import { type TypedGunshiCommandFn } from '../helpers/gunshi-type-utils'; + +export const commandSpec = define({ + name: 'cache', + description: 'Manage the varlock cache', + args: { + plugin: { + type: 'string', + description: 'Clear cache for a specific plugin only', + }, + }, + examples: ` +Manage the encrypted value cache used by cache() and plugin authors. + +Examples: + varlock cache # Interactive cache browser + varlock cache clear # Clear all cache entries + varlock cache clear --plugin 1password # Clear cache for specific plugin +`.trim(), +}); + +type CacheEntry = { key: string; cachedAt: number; expiresAt: number }; + +function formatEntryLabel(entry: CacheEntry): string { + const ttlMs = entry.expiresAt - entry.cachedAt; + const isForever = ttlMs > 50 * 365.25 * 86_400_000; + const ttlStr = isForever ? 'forever' : formatDuration(ttlMs); + const agoStr = formatTimeAgo(entry.cachedAt); + + const parts = entry.key.split(':'); + let line1: string; + const line2 = ansis.gray(` ttl: ${ttlStr} · cached ${agoStr}`); + + if (parts[0] === 'plugin') { + const pluginName = parts[1]; + const rest = parts.slice(2).join(':'); + line1 = `${ansis.magenta(`[${pluginName}]`)} ${rest}`; + } else if (parts[0] === 'resolver' && parts[1] === 'custom') { + line1 = `${ansis.cyan('[custom]')} ${parts.slice(2).join(':')}`; + } else if (parts[0] === 'resolver') { + const itemKey = parts[2]; + const resolverText = parts.slice(3).join(':'); + line1 = `${ansis.cyan(itemKey)} ${ansis.gray('=')} ${resolverText}`; + } else { + line1 = entry.key; + } + + return `${line1}\n ${line2}`; +} + +/** Group entries by their prefix (e.g., "plugin:1pass", "resolver") */ +function groupEntries(entries: Array): Record> { + const groups: Record> = {}; + for (const entry of entries) { + const firstColon = entry.key.indexOf(':'); + const secondColon = firstColon >= 0 ? entry.key.indexOf(':', firstColon + 1) : -1; + const prefix = secondColon >= 0 ? entry.key.slice(0, secondColon) : entry.key.slice(0, firstColon); + groups[prefix] ??= []; + groups[prefix].push(entry); + } + return groups; +} + +export const commandFn: TypedGunshiCommandFn = async (ctx) => { + const positionals = (ctx.positionals ?? []).slice(ctx.commandPath?.length ?? 0); + const action = positionals[0]; + + if (!localEncrypt.keyExists()) { + console.log(ansis.gray(' No encryption key found — cache is not active.')); + return; + } + + const store = new CacheStore(); + + // non-interactive clear + if (action === 'clear') { + const pluginName = ctx.values.plugin; + let count: number; + + if (pluginName) { + count = store.clearByPrefix(`plugin:${pluginName}:`); + console.log(` Cleared ${count} cache entries for plugin "${pluginName}"`); + } else { + count = store.clearAll(); + console.log(` Cleared ${count} cache entries`); + } + return; + } + + // interactive mode (default) + while (true) { + const entries = store.listEntries(); + + if (entries.length === 0) { + console.log(ansis.gray('\n Cache is empty.\n')); + return; + } + + const groups = groupEntries(entries); + const filePath = store.getFilePath(); + const fileSize = fs.existsSync(filePath) ? fs.statSync(filePath).size : 0; + const sizeStr = fileSize < 1024 ? `${fileSize}B` : `${(fileSize / 1024).toFixed(1)}KB`; + console.log(`\n ${ansis.bold(`${entries.length} cached entries`)} ${ansis.gray(`(${sizeStr})`)}`); + + // build top-level menu: one option per group + global actions + const options: Array<{ value: string; label: string }> = []; + + for (const [prefix, items] of Object.entries(groups)) { + const label = prefix.startsWith('plugin:') + ? `${ansis.magenta(`[${prefix.replace('plugin:', '')}]`)} plugin cache` + : `${ansis.cyan('[resolver]')} cached values`; + options.push({ + value: `group:${prefix}`, + label: `${label} ${ansis.gray(`(${items.length} entries)`)}`, + }); + } + + options.push({ value: '__clear_all__', label: ansis.red(`Clear all ${entries.length} entries`) }); + options.push({ value: '__exit__', label: ansis.gray('Exit') }); + + const selected = await select({ + message: 'Select a group to browse or an action:', + options, + }); + + if (isCancel(selected) || selected === '__exit__') return; + + if (selected === '__clear_all__') { + const confirmed = await confirm({ + message: `Clear all ${entries.length} cache entries?`, + initialValue: false, + }); + if (isCancel(confirmed) || !confirmed) continue; + const count = store.clearAll(); + console.log(` Cleared ${count} entries`); + return; + } + + if (typeof selected === 'string' && selected.startsWith('group:')) { + const prefix = selected.replace('group:', ''); + const groupLabel = prefix.startsWith('plugin:') + ? `${prefix.replace('plugin:', '')} plugin` + : 'resolver cache'; + + // show all entries in the group with clear-all and delete options + while (true) { + const current = store.listEntries().filter((e) => { + const k = e.key; + const fc = k.indexOf(':'); + const sc = fc >= 0 ? k.indexOf(':', fc + 1) : -1; + const p = sc >= 0 ? k.slice(0, sc) : k.slice(0, fc); + return p === prefix; + }); + if (current.length === 0) { + console.log(ansis.gray(' No entries remaining in this group.')); + break; + } + + const entryOptions = [ + ...current.map((entry) => ({ + value: entry.key, + label: formatEntryLabel(entry), + })), + { value: '__clear_group__', label: ansis.red(`Clear all ${current.length} entries`) }, + { value: '__back__', label: ansis.gray('← Back') }, + ]; + + const entrySelected = await select({ + message: `${groupLabel} — ${current.length} entries:`, + options: entryOptions, + }); + + if (isCancel(entrySelected) || entrySelected === '__back__') break; + + if (entrySelected === '__clear_group__') { + const confirmed = await confirm({ + message: `Clear all ${current.length} entries in "${prefix}"?`, + initialValue: false, + }); + if (isCancel(confirmed) || !confirmed) continue; + store.clearByPrefix(`${prefix}:`); + console.log(ansis.gray(` Cleared ${current.length} entries`)); + break; + } + + // delete individual entry + const confirmed = await confirm({ + message: `Delete "${entrySelected}"?`, + initialValue: true, + }); + if (isCancel(confirmed) || !confirmed) continue; + store.delete(entrySelected); + console.log(ansis.gray(' Deleted')); + } + } + } +}; diff --git a/packages/varlock/src/cli/commands/encrypt.command.ts b/packages/varlock/src/cli/commands/encrypt.command.ts index ee9d926ea..eb724da1d 100644 --- a/packages/varlock/src/cli/commands/encrypt.command.ts +++ b/packages/varlock/src/cli/commands/encrypt.command.ts @@ -1,122 +1,167 @@ - import { define } from 'gunshi'; import { isCancel, password } from '@clack/prompts'; - -import { VarlockNativeAppClient } from '../../lib/native-app-client'; +import ansis from 'ansis'; +import path from 'node:path'; +import fs from 'node:fs'; + +import { + ParsedEnvSpecStaticValue, + ParsedEnvSpecFunctionCall, +} from '@env-spec/parser'; +import { FileBasedDataSource } from '../../env-graph'; +import { loadVarlockEnvGraph } from '../../lib/load-graph'; import { type TypedGunshiCommandFn } from '../helpers/gunshi-type-utils'; +import { CliExitError } from '../helpers/exit-error'; +import { multiselect } from '../helpers/prompts'; import { gracefulExit } from 'exit-hook'; +import * as localEncrypt from '../../lib/local-encrypt'; export const commandSpec = define({ name: 'encrypt', - description: 'Encrypt environment variables in your .env file', - args: {}, + description: 'Encrypt a value using device-local encryption', + args: { + 'key-id': { + type: 'string', + description: 'Encryption key ID (default: varlock-default)', + default: 'varlock-default', + }, + file: { + type: 'string', + description: 'Path to a .env file — encrypts all sensitive plaintext values in-place', + }, + }, }); +async function encryptFile(keyId: string, filePath: string) { + const resolvedPath = path.resolve(filePath); + if (!fs.existsSync(resolvedPath)) { + throw new CliExitError(`File not found: ${resolvedPath}`); + } + + // Load the full env graph and resolve to get sensitivity info from the schema + const envGraph = await loadVarlockEnvGraph(); + await envGraph.resolveEnvValues(); + + // Find the data source matching the target file + const targetSource = envGraph.sortedDataSources.find( + (s) => s instanceof FileBasedDataSource && s.fullPath === resolvedPath, + ) as FileBasedDataSource | undefined; + + if (!targetSource) { + throw new CliExitError( + `File "${filePath}" is not part of the loaded env graph`, + { suggestion: 'Make sure the file is in the project directory or imported by your schema.' }, + ); + } + + // Find sensitive items that have plaintext static values in this file + const itemsToEncrypt: Array<{ key: string; value: string }> = []; + + for (const [key, itemDef] of Object.entries(targetSource.configItemDefs)) { + const graphItem = envGraph.configSchema[key]; + if (!graphItem?.isSensitive) continue; + + // Skip items already using varlock() or another function call + if (itemDef.parsedValue instanceof ParsedEnvSpecFunctionCall) continue; + + // Only encrypt items with actual static string values + if (!(itemDef.parsedValue instanceof ParsedEnvSpecStaticValue)) continue; + const val = itemDef.parsedValue.unescapedValue; + if (val === undefined || val === '' || typeof val !== 'string') continue; + + itemsToEncrypt.push({ key, value: val }); + } + + if (itemsToEncrypt.length === 0) { + console.log('No sensitive plaintext values found to encrypt.'); + return; + } + + console.log('Only items marked as @sensitive in the schema are shown.'); + console.log('If a key is missing, add @sensitive to it in your schema file.\n'); + + const selected = await multiselect({ + message: `Confirm values to encrypt in ${filePath} ${ansis.gray('(use arrows, space to toggle, enter to confirm)')}`, + options: itemsToEncrypt.map((item) => ({ + value: item.key, + label: item.key, + })), + initialValues: itemsToEncrypt.map((item) => item.key), + }); + + if (isCancel(selected)) return gracefulExit(); + + const selectedKeys = new Set(selected as Array); + const filteredItems = itemsToEncrypt.filter((item) => selectedKeys.has(item.key)); + + if (filteredItems.length === 0) { + console.log('No items selected.'); + return; + } + + console.log(''); + + // Encrypt each value and write back using string replacement on the raw file. + // We re-read each time since prior replacements modify the file. + let encryptedCount = 0; + for (const item of filteredItems) { + const ciphertext = await localEncrypt.encryptValue(item.value, keyId); + const prefixed = `local:${ciphertext}`; + + const currentContents = fs.readFileSync(resolvedPath, 'utf-8'); + // Match the line for this key and replace the static value with varlock("local:...") + const escaped = item.key.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + const pattern = new RegExp(`^(${escaped}\\s*=\\s*).*$`, 'm'); + const updatedContents = currentContents.replace(pattern, `$1varlock("${prefixed}")`); + + if (updatedContents !== currentContents) { + fs.writeFileSync(resolvedPath, updatedContents); + encryptedCount++; + console.log(` Encrypted: ${item.key}`); + } + } + + console.log(`\nEncrypted ${encryptedCount} value${encryptedCount !== 1 ? 's' : ''} in ${filePath}`); +} + export const commandFn: TypedGunshiCommandFn = async (ctx) => { + const keyId = String(ctx.values['key-id'] || 'varlock-default'); + const backend = localEncrypt.getBackendInfo(); + + try { + await localEncrypt.ensureKey(keyId); + } catch (err) { + if (err instanceof CliExitError) throw err; + throw new CliExitError( + `Failed to check/create encryption key: ${err instanceof Error ? err.message : err}`, + ); + } + + console.log(`Using ${backend.type} backend (${backend.hardwareBacked ? 'hardware-backed' : 'file-based'})`); + + const filePath = ctx.values.file; + + // --file mode: encrypt all sensitive plaintext values in a .env file + if (filePath) { + await encryptFile(keyId, filePath); + return; + } + + // Interactive single-value mode console.log(''); - console.log('🧙 Encrypting environment variables... ✨'); - // intro('🧙 Encrypting environment variables... ✨'); const rawValue = await password({ message: 'Enter the value you want to encrypt' }); if (isCancel(rawValue)) return gracefulExit(); - const client = new VarlockNativeAppClient(); - await client.initializeSocket(); - const encryptedValue = await client.encrypt(rawValue); - - console.log('Copy this into your .env.local file and rename the key appropriately:\n'); - console.log(`SOME_SENSITIVE_KEY=varlock("${encryptedValue}")`); - - // const envGraph = await loadEnvGraph(); - // await envGraph.resolveEnvValues(); - // const resolvedEnv = envGraph.getResolvedEnvObject(); - - // TODO: need to reimplement using the new parser - - // const client = new VarlockNativeAppClient(); - // await client.initializeSocket(); - - // for (const envFile of loadedEnv.files) { - // let changeCount = 0; - // for (const itemKey in envFile.items) { - // const item = envFile.items[itemKey]; - // if (item.decorators?.sensitive) { - // if ('value' in item && item.value) { - // console.log('Encrypting', itemKey, envFile.path); - // const encryptedValue = await client.encrypt(item.value); - // delete item.value; - // (item as any).resolverName = 'varlock'; - // (item as any).resolverArgs = [encryptedValue]; - // changeCount++; - // } - // } else { - // if ('resolverName' in item && item.resolverName === 'varlock') { - // console.log('Decrypting', itemKey, envFile.path); - // const encryptedValue = item.resolverArgs[0]; - // if (typeof encryptedValue !== 'string') { - // throw new Error('Expected encrypted value to be a string'); - // } - // const decryptedValue = await client.decrypt(encryptedValue); - // (item as any).value = decryptedValue; - // delete (item as any).resolverName; - // delete (item as any).resolverArgs; - // changeCount++; - // } - // } - // } - - // const updatedEnvFileStr = dumpDotEnvContents(envFile.parsedContents); - // await fs.writeFile(envFile.path, updatedEnvFileStr); - - // log.success(`Updated ${changeCount} items in ${envFile.path}`); - // } - - // console.log(loadedEnv); - - // const unencryptedKeys: Array = []; - // parsedEnv.forEach((item) => { - // if (item.type !== 'item') return; - // if (item.key.startsWith('_VARLOCK_')) return; - // if (!('value' in item) || !item.value) return; - - // unencryptedKeys.push(item.key); - // }); - - // if (unencryptedKeys.length === 0) { - // console.log('No items to encrypt. Exiting...'); - // return; - // } - - // const selectedKeys = await multiselect({ - // message: 'Select env item(s) to encrypt 🔏', - // options: unencryptedKeys.map((key) => ({ - // value: key, - // label: key, - // })), - // initialValues: unencryptedKeys, - // required: false, - // }); - - // if (isCancel(selectedKeys) || !selectedKeys.length) { - // console.log('No items selected. Exiting...'); - // return; - // } - - // for (const item of parsedEnv) { - // if (item.type === 'item' && selectedKeys.includes(item.key)) { - // if (!('value' in item) || !item.value) throw new Error(`Item ${item.key} has no value`); - // const encryptedValue = await client.encrypt(item.value); - // delete item.value; - // (item as any).resolverName = 'varlock'; - // (item as any).resolverArgs = [encryptedValue]; - // } - // } - - // // write the updated env file - - // const updatedEnvFileStr = dumpDotEnvContents(parsedEnv); - // await fs.writeFile(envFilePath, updatedEnvFileStr); - - // outro(`Encrypted ${selectedKeys.length} items!`); + try { + const ciphertext = await localEncrypt.encryptValue(rawValue, keyId); + + console.log('\nCopy this into your .env.local file and rename the key appropriately:\n'); + console.log(`SOME_SENSITIVE_KEY=varlock("local:${ciphertext}")`); + } catch (err) { + if (err instanceof CliExitError) throw err; + throw new CliExitError( + `Encryption failed: ${err instanceof Error ? err.message : err}`, + ); + } }; - diff --git a/packages/varlock/src/cli/commands/explain.command.ts b/packages/varlock/src/cli/commands/explain.command.ts index 9126b3bfd..f375d67af 100644 --- a/packages/varlock/src/cli/commands/explain.command.ts +++ b/packages/varlock/src/cli/commands/explain.command.ts @@ -3,7 +3,7 @@ import { define } from 'gunshi'; import { gracefulExit } from 'exit-hook'; import { loadVarlockEnvGraph } from '../../lib/load-graph'; -import { formattedValue } from '../../lib/formatting'; +import { formattedValue, formatTimeAgo, formatDuration } from '../../lib/formatting'; import { redactString } from '../../runtime/lib/redaction'; import { checkForSchemaErrors, checkForNoEnvFiles, @@ -148,6 +148,27 @@ export const commandFn: TypedGunshiCommandFn = async (ctx) = } } + // Cache info + if (item.isCached || item.isCacheHit) { + console.log(''); + console.log(ansis.bold(' Cache')); + + if (item.isCacheHit) { + const hit = item._cacheHits[0]; + const ttlMs = hit.expiresAt - hit.cachedAt; + // ~100 years is our sentinel for "forever" + const ttlDisplay = ttlMs > 50 * 365.25 * 86_400_000 ? 'forever' : formatDuration(ttlMs); + console.log(` ${ansis.gray('TTL:')} ${ttlDisplay}`); + console.log(` ${ansis.blue('Status:')} hit (cached ${formatTimeAgo(hit.cachedAt)})`); + } else { + // cache miss — show TTL from the cache() resolver if available + const cacheTtl = item.cacheTtl; + const ttlDisplay = cacheTtl !== undefined ? String(cacheTtl) : 'forever'; + console.log(` ${ansis.gray('TTL:')} ${ttlDisplay}`); + console.log(` ${ansis.gray('Status:')} miss (freshly resolved)`); + } + } + // All definitions const defs = item.defs; if (defs.length) { diff --git a/packages/varlock/src/cli/commands/load.command.ts b/packages/varlock/src/cli/commands/load.command.ts index 2c64d50f3..1e60354b3 100644 --- a/packages/varlock/src/cli/commands/load.command.ts +++ b/packages/varlock/src/cli/commands/load.command.ts @@ -36,6 +36,14 @@ export const commandSpec = define({ short: 'p', description: 'Path to a specific .env file or directory to use as the entry point', }, + 'clear-cache': { + type: 'boolean', + description: 'Clear cache and re-resolve all values', + }, + 'skip-cache': { + type: 'boolean', + description: 'Skip cache entirely for this invocation', + }, }, examples: ` Loads and validates environment variables according to your .env files, and prints the results. @@ -68,6 +76,8 @@ export const commandFn: TypedGunshiCommandFn = async (ctx) = const envGraph = await loadVarlockEnvGraph({ currentEnvFallback: ctx.values.env, entryFilePath: ctx.values.path, + clearCache: ctx.values['clear-cache'], + skipCache: ctx.values['skip-cache'], }); // For json-full, always output the serialized graph — it includes `errors` and diff --git a/packages/varlock/src/cli/commands/lock.command.ts b/packages/varlock/src/cli/commands/lock.command.ts new file mode 100644 index 000000000..5128c7ac1 --- /dev/null +++ b/packages/varlock/src/cli/commands/lock.command.ts @@ -0,0 +1,26 @@ + +import { define } from 'gunshi'; + +import { type TypedGunshiCommandFn } from '../helpers/gunshi-type-utils'; +import * as localEncrypt from '../../lib/local-encrypt'; + +export const commandSpec = define({ + name: 'lock', + description: 'Lock the encryption daemon, requiring biometric for next decrypt', +}); + +export const commandFn: TypedGunshiCommandFn = async () => { + const backend = localEncrypt.getBackendInfo(); + + if (!backend.biometricAvailable) { + console.log(`The ${backend.type} backend does not support biometric lock.`); + return; + } + + try { + await localEncrypt.lockSession(); + console.log('Encryption session locked. Biometric authentication will be required for next decrypt.'); + } catch { + console.log('No encryption daemon is running — nothing to lock.'); + } +}; diff --git a/packages/varlock/src/cli/commands/printenv.command.ts b/packages/varlock/src/cli/commands/printenv.command.ts index 72f4bb3af..129278bd4 100644 --- a/packages/varlock/src/cli/commands/printenv.command.ts +++ b/packages/varlock/src/cli/commands/printenv.command.ts @@ -15,6 +15,14 @@ export const commandSpec = define({ short: 'p', description: 'Path to a specific .env file or directory (with trailing slash) to use as the entry point', }, + 'clear-cache': { + type: 'boolean', + description: 'Clear cache and re-resolve all values', + }, + 'skip-cache': { + type: 'boolean', + description: 'Skip cache entirely for this invocation', + }, }, examples: ` Prints the resolved value of a single environment variable. @@ -46,6 +54,8 @@ export const commandFn: TypedGunshiCommandFn = async (ctx) = const envGraph = await loadVarlockEnvGraph({ entryFilePath: ctx.values.path, + clearCache: ctx.values['clear-cache'], + skipCache: ctx.values['skip-cache'], }); checkForSchemaErrors(envGraph); diff --git a/packages/varlock/src/cli/commands/reveal.command.ts b/packages/varlock/src/cli/commands/reveal.command.ts new file mode 100644 index 000000000..9972cd91b --- /dev/null +++ b/packages/varlock/src/cli/commands/reveal.command.ts @@ -0,0 +1,222 @@ +import ansis from 'ansis'; +import { define } from 'gunshi'; +import { isCancel } from '@clack/prompts'; +import { gracefulExit } from 'exit-hook'; + +import { loadVarlockEnvGraph } from '../../lib/load-graph'; +import { checkForSchemaErrors, checkForNoEnvFiles } from '../helpers/error-checks'; +import { type TypedGunshiCommandFn } from '../helpers/gunshi-type-utils'; +import { CliExitError } from '../helpers/exit-error'; +import { select } from '../helpers/prompts'; +import { ConfigItem } from '../../env-graph'; +import { redactString } from '../../runtime/lib/redaction'; + +export const commandSpec = define({ + name: 'reveal', + description: 'Securely view decrypted values of sensitive environment variables', + args: { + copy: { + type: 'boolean', + description: 'Copy the value to clipboard instead of displaying (auto-clears after 10s)', + }, + path: { + type: 'string', + short: 'p', + description: 'Path to a specific .env file or directory to use as the entry point', + }, + env: { + type: 'string', + description: 'Set the environment (e.g., production, development, etc)', + }, + }, + examples: ` +Securely view the plaintext value of sensitive environment variables. +Values are shown in an alternate screen buffer so they don't persist in +terminal scrollback history. + +Examples: + varlock reveal # Interactive picker to select and reveal values + varlock reveal MY_SECRET # Reveal a specific variable + varlock reveal MY_SECRET --copy # Copy value to clipboard (auto-clears after 10s) +`.trim(), +}); + +const CLIPBOARD_CLEAR_DELAY_MS = 10_000; + +async function copyToClipboard(text: string): Promise { + const { execSync } = await import('node:child_process'); + const platform = process.platform; + + if (platform === 'darwin') { + execSync('pbcopy', { input: text }); + } else if (platform === 'linux') { + // try xclip first, then xsel + try { + execSync('xclip -selection clipboard', { input: text }); + } catch { + execSync('xsel --clipboard --input', { input: text }); + } + } else if (platform === 'win32') { + execSync('clip', { input: text }); + } else { + throw new CliExitError('Clipboard not supported on this platform'); + } +} + +async function clearClipboard(): Promise { + const { execSync } = await import('node:child_process'); + const platform = process.platform; + + try { + if (platform === 'darwin') { + execSync('pbcopy', { input: '' }); + } else if (platform === 'linux') { + try { + execSync('xclip -selection clipboard', { input: '' }); + } catch { + execSync('xsel --clipboard --input', { input: '' }); + } + } else if (platform === 'win32') { + execSync('echo. | clip', { shell: 'cmd.exe' }); + } + } catch { + // best effort + } +} + +function enterAltScreen() { + process.stdout.write('\x1b[?1049h'); // switch to alternate screen buffer + process.stdout.write('\x1b[H'); // move cursor to top-left +} + +function exitAltScreen() { + process.stdout.write('\x1b[?1049l'); // switch back to main screen buffer +} + +/** Wait for a single keypress, returns the key */ +async function waitForKeypress(): Promise { + return new Promise((resolve) => { + const wasRaw = process.stdin.isRaw; + if (process.stdin.isTTY) process.stdin.setRawMode(true); + process.stdin.resume(); + process.stdin.once('data', (data) => { + if (process.stdin.isTTY) process.stdin.setRawMode(wasRaw); + process.stdin.pause(); + resolve(data.toString()); + }); + }); +} + +function displayRevealedValue(item: ConfigItem) { + enterAltScreen(); + + const value = item.resolvedValue; + const valStr = value === undefined || value === null ? ansis.gray('(empty)') : String(value); + + console.log(''); + console.log(ansis.bold.cyan(` ${item.key}`)); + if (item.description) { + console.log(ansis.gray(` ${item.description}`)); + } + console.log(''); + console.log(` ${valStr}`); + console.log(''); + console.log(ansis.gray(' Press any key to hide...')); +} + +export const commandFn: TypedGunshiCommandFn = async (ctx) => { + const { copy: copyMode } = ctx.values; + + const envGraph = await loadVarlockEnvGraph({ + currentEnvFallback: ctx.values.env, + entryFilePath: ctx.values.path, + }); + + checkForSchemaErrors(envGraph); + checkForNoEnvFiles(envGraph); + + await envGraph.resolveEnvValues(); + + // Collect sensitive items + const sensitiveItems: Array = []; + for (const itemKey of envGraph.sortedConfigKeys) { + const item = envGraph.configSchema[itemKey]; + if (item.isSensitive && item.resolvedValue !== undefined) { + sensitiveItems.push(item); + } + } + + if (sensitiveItems.length === 0) { + console.log('No sensitive values found to reveal.'); + return; + } + + // Check if a specific variable was requested via positional arg + const positionals = (ctx.positionals ?? []).slice(ctx.commandPath?.length ?? 0); + const requestedVar = positionals[0]; + + if (requestedVar) { + // Direct reveal of a specific variable + const item = sensitiveItems.find((i) => i.key === requestedVar); + if (!item) { + // Check if it exists but isn't sensitive + if (requestedVar in envGraph.configSchema) { + throw new CliExitError(`"${requestedVar}" is not marked as sensitive`, { + suggestion: 'Use `varlock printenv` for non-sensitive values.', + }); + } + throw new CliExitError(`Variable "${requestedVar}" not found in schema`); + } + + if (copyMode) { + await copyToClipboard(String(item.resolvedValue ?? '')); + console.log(`\n Copied ${ansis.cyan(item.key)} to clipboard.`); + console.log(ansis.gray(` Clipboard will be cleared in ${CLIPBOARD_CLEAR_DELAY_MS / 1000}s.\n`)); + setTimeout(async () => { + await clearClipboard(); + console.log(ansis.gray(' Clipboard cleared.')); + gracefulExit(); + }, CLIPBOARD_CLEAR_DELAY_MS); + return; + } + + displayRevealedValue(item); + await waitForKeypress(); + exitAltScreen(); + return; + } + + // Interactive picker loop + while (true) { + const selected = await select({ + message: `Select a variable to reveal ${ansis.gray('(use arrows, enter to select)')}`, + options: sensitiveItems.map((item) => ({ + value: item.key, + label: item.key, + hint: redactString(String(item.resolvedValue ?? '')) ?? undefined, + })), + }); + + if (isCancel(selected)) return gracefulExit(); + + const item = sensitiveItems.find((i) => i.key === selected)!; + + if (copyMode) { + await copyToClipboard(String(item.resolvedValue ?? '')); + console.log(`\n Copied ${ansis.cyan(item.key)} to clipboard.`); + console.log(ansis.gray(` Clipboard will be cleared in ${CLIPBOARD_CLEAR_DELAY_MS / 1000}s.\n`)); + setTimeout(async () => { + await clearClipboard(); + console.log(ansis.gray(' Clipboard cleared.')); + gracefulExit(); + }, CLIPBOARD_CLEAR_DELAY_MS); + return; + } + + displayRevealedValue(item); + await waitForKeypress(); + exitAltScreen(); + + // Loop back to the picker to reveal another value + } +}; diff --git a/packages/varlock/src/cli/commands/run.command.ts b/packages/varlock/src/cli/commands/run.command.ts index 590815149..83ac28037 100644 --- a/packages/varlock/src/cli/commands/run.command.ts +++ b/packages/varlock/src/cli/commands/run.command.ts @@ -25,6 +25,14 @@ export const commandSpec = define({ short: 'p', description: 'Path to a specific .env file or directory to use as the entry point', }, + 'clear-cache': { + type: 'boolean', + description: 'Clear cache and re-resolve all values', + }, + 'skip-cache': { + type: 'boolean', + description: 'Skip cache entirely for this invocation', + }, }, examples: ` Executes a command in a child process, injecting your resolved and validated environment @@ -73,6 +81,8 @@ export const commandFn: TypedGunshiCommandFn = async (ctx) = const envGraph = await loadVarlockEnvGraph({ entryFilePath: ctx.values.path, + clearCache: ctx.values['clear-cache'], + skipCache: ctx.values['skip-cache'], }); checkForSchemaErrors(envGraph); checkForNoEnvFiles(envGraph); diff --git a/packages/varlock/src/cli/helpers/error-checks.ts b/packages/varlock/src/cli/helpers/error-checks.ts index 6600207d6..c2c3d6b27 100644 --- a/packages/varlock/src/cli/helpers/error-checks.ts +++ b/packages/varlock/src/cli/helpers/error-checks.ts @@ -65,6 +65,17 @@ export function checkForSchemaErrors(envGraph: EnvGraph) { } return gracefulExit(1); } + + // check for errors from decorator execute() (e.g., invalid plugin options like cacheTtl) + if (source.resolutionErrors.length) { + console.error(`🚨 Error(s) during initialization of ${source.label}`); + + for (const resErr of source.resolutionErrors) { + console.error(`- ${resErr.message}`); + showErrorLocationDetails(resErr); + } + return gracefulExit(1); + } } // now we check for any schema errors - where something about how things are wired up is invalid diff --git a/packages/varlock/src/env-graph/lib/config-item.ts b/packages/varlock/src/env-graph/lib/config-item.ts index 9f381e43f..b4975a5d0 100644 --- a/packages/varlock/src/env-graph/lib/config-item.ts +++ b/packages/varlock/src/env-graph/lib/config-item.ts @@ -9,10 +9,11 @@ import { CoercionError, EmptyRequiredValueError, ResolutionError, SchemaError, ValidationError, } from './errors'; +import type { CacheHitInfo } from './resolution-context'; import { EnvGraphDataSource } from './data-source'; import { - convertParsedValueToResolvers, type ResolvedValue, type Resolver, StaticValueResolver, + convertParsedValueToResolvers, type ResolvedValue, Resolver, StaticValueResolver, } from './resolver'; import { ItemDecoratorInstance } from './decorators'; @@ -35,6 +36,35 @@ export class ConfigItem { /** Whether this is a builtin VARLOCK_* variable */ isBuiltin?: boolean; + /** Cache hits recorded during resolution (rolled up from potentially multiple cache() resolvers) */ + _cacheHits: Array = []; + + /** Whether any value was served from cache */ + get isCacheHit() { return this._cacheHits.length > 0; } + + /** Whether this item uses cache(). */ + get isCached(): boolean { + return this._findCacheResolver(this.valueResolver) !== undefined; + } + + /** TTL string from the cache() resolver (for display in explain command). undefined = forever. */ + get cacheTtl(): string | number | undefined { + const cacheResolver = this._findCacheResolver(this.valueResolver); + if (!cacheResolver) return undefined; + const ttlResolver = cacheResolver.objArgs?.ttl; + return ttlResolver?.staticValue as string | number | undefined; + } + + private _findCacheResolver(resolver?: Resolver): Resolver | undefined { + if (!resolver) return undefined; + if (resolver.fnName === 'cache') return resolver; + for (const child of resolver.arrArgs ?? []) { + const found = this._findCacheResolver(child); + if (found) return found; + } + return undefined; + } + /** Programmatic definitions not tied to a data source (e.g. builtin vars) */ _internalDefs: Array = []; diff --git a/packages/varlock/src/env-graph/lib/env-graph.ts b/packages/varlock/src/env-graph/lib/env-graph.ts index 9c498fdcf..b679a1853 100644 --- a/packages/varlock/src/env-graph/lib/env-graph.ts +++ b/packages/varlock/src/env-graph/lib/env-graph.ts @@ -4,6 +4,7 @@ import { ConfigItem } from './config-item'; import { EnvGraphDataSource, FileBasedDataSource, ImportAliasSource } from './data-source'; import { BaseResolvers, createResolver, type ResolverChildClass } from './resolver'; +import { VarlockResolver } from '../../lib/local-encrypt/builtin-resolver'; import { BaseDataTypes, type EnvGraphDataTypeFactory } from './data-types'; import { findGraphCycles, getTransitiveDeps, type GraphAdjacencyList } from './graph-utils'; import { ResolutionError, SchemaError } from './errors'; @@ -14,6 +15,7 @@ import { } from './decorators'; import { getErrorLocation } from './error-location'; import type { VarlockPlugin } from './plugins'; +import { runWithResolutionContext, getResolutionContext } from './resolution-context'; import { getCiEnv, type CiEnvInfo } from '@varlock/ci-env-info'; import { BUILTIN_VARS, isBuiltinVar } from './builtin-vars'; @@ -63,6 +65,14 @@ export class EnvGraph { basePath?: string; + // -- Cache -- + /** @internal cache store instance, initialized during loading */ + _cacheStore?: import('../../lib/cache/cache-store').CacheStore; + /** @internal --clear-cache flag: clear cache then resolve + rewrite */ + _clearCacheMode = false; + /** @internal --skip-cache flag: skip cache entirely */ + _skipCacheMode = false; + /** root data source (.env.schema) */ rootDataSource?: EnvGraphDataSource; @@ -227,6 +237,8 @@ export class EnvGraph { for (const resolverClass of BaseResolvers) { this.registerResolver(resolverClass); } + // register built-in varlock() resolver for local encryption + this.registerResolver(VarlockResolver); // base root decorators (envFlag, generateTypes, import, etc) for (const rootDec of builtInRootDecorators) { this.registerRootDecorator(rootDec); @@ -482,7 +494,19 @@ export class EnvGraph { // mark item as beginning to actually resolve itemsToResolveStatus[itemKey] = true; // true means in progress - await item.resolve(); + await runWithResolutionContext({ + cacheStore: this._cacheStore, + skipCache: this._skipCacheMode, + clearCache: this._clearCacheMode, + cacheHits: [], + currentItem: item, + }, async () => { + await item.resolve(); + const ctx = getResolutionContext(); + if (ctx?.cacheHits.length) { + item._cacheHits = ctx.cacheHits; + } + }); markItemCompleted(itemKey); }; diff --git a/packages/varlock/src/env-graph/lib/loader.ts b/packages/varlock/src/env-graph/lib/loader.ts index d847c9c58..1b9e211dd 100644 --- a/packages/varlock/src/env-graph/lib/loader.ts +++ b/packages/varlock/src/env-graph/lib/loader.ts @@ -3,6 +3,8 @@ import path from 'node:path'; import _ from '@env-spec/utils/my-dash'; import { EnvGraph } from './env-graph'; import { DirectoryDataSource, DotEnvFileDataSource } from './data-source'; +import { CacheStore } from '../../lib/cache'; +import * as localEncrypt from '../../lib/local-encrypt'; export async function loadEnvGraph(opts?: { basePath?: string, @@ -11,10 +13,29 @@ export async function loadEnvGraph(opts?: { checkGitIgnored?: boolean, excludeDirs?: Array, currentEnvFallback?: string, + clearCache?: boolean, + skipCache?: boolean, afterInit?: (graph: EnvGraph) => Promise, }) { const graph = new EnvGraph(); + // set cache mode flags + if (opts?.clearCache) graph._clearCacheMode = true; + if (opts?.skipCache) graph._skipCacheMode = true; + + // initialize cache store (graceful — if encryption key doesn't exist, skip caching) + if (!opts?.skipCache) { + try { + await localEncrypt.ensureKey(); + graph._cacheStore = new CacheStore(); + if (graph._clearCacheMode) { + graph._cacheStore.clearAll(); + } + } catch { + // cache unavailable — proceed without caching + } + } + if (opts?.entryFilePath) { const resolvedPath = path.resolve(opts.entryFilePath); const isDirectory = opts.entryFilePath.endsWith('/') || opts.entryFilePath.endsWith(path.sep) diff --git a/packages/varlock/src/env-graph/lib/plugins.ts b/packages/varlock/src/env-graph/lib/plugins.ts index 6c79ffb7f..6e486fca8 100644 --- a/packages/varlock/src/env-graph/lib/plugins.ts +++ b/packages/varlock/src/env-graph/lib/plugins.ts @@ -14,6 +14,10 @@ import { isCancel } from '@clack/prompts'; import _ from '@env-spec/utils/my-dash'; import { pathExists } from '@env-spec/utils/fs-utils'; import { getUserVarlockDir } from '../../lib/user-config-dir'; +import { PluginCacheAccessor } from '../../lib/cache/plugin-cache-accessor'; +import type { CacheStore } from '../../lib/cache/cache-store'; +import { parseTtl } from '../../lib/cache/ttl-parser'; +import { resolveCacheTtl } from '../../lib/cache/resolve-cache-ttl'; import { confirm } from '../../cli/helpers/prompts'; @@ -79,6 +83,8 @@ const varlockPluginLibExports = { SchemaError, ResolutionError, createDebug, + parseTtl, + resolveCacheTtl, }; @@ -209,6 +215,23 @@ export class VarlockPlugin { } + // -- Cache API for plugin authors -- + private _cacheAccessor?: PluginCacheAccessor; + /** @internal set by EnvGraph when plugins are loaded */ + _cacheStore?: CacheStore; + + /** + * Scoped cache accessor for this plugin. + * Keys are automatically namespaced to prevent collisions between plugins. + */ + get cache(): PluginCacheAccessor { + if (!this._cacheAccessor) { + if (!this._cacheStore) throw new Error('Cache not available — plugin accessed cache too early'); + this._cacheAccessor = new PluginCacheAccessor(this.name, this._cacheStore); + } + return this._cacheAccessor; + } + readonly dataTypes?: Array[0]> = []; registerDataType(dataTypeDef: Parameters[0]) { this.debug('registerDataType', dataTypeDef.name); @@ -427,6 +450,11 @@ async function registerPluginInGraph(graph: EnvGraph, plugin: VarlockPlugin, plu plugin.installDecoratorInstances.push(pluginDecorator); graph.plugins.push(plugin); + // propagate cache store so plugin.cache is available during module execution + if (graph._cacheStore) { + plugin._cacheStore = graph._cacheStore; + } + // this finally executes the plugin code await plugin.executePluginModule(); diff --git a/packages/varlock/src/env-graph/lib/resolution-context.ts b/packages/varlock/src/env-graph/lib/resolution-context.ts new file mode 100644 index 000000000..8747e5bc3 --- /dev/null +++ b/packages/varlock/src/env-graph/lib/resolution-context.ts @@ -0,0 +1,37 @@ +import { AsyncLocalStorage } from 'node:async_hooks'; +import type { CacheStore } from '../../lib/cache/cache-store'; +import type { ConfigItem } from './config-item'; + +export type CacheHitInfo = { + cacheKey: string; + cachedAt: number; + expiresAt: number; +}; + +export type ResolutionContextData = { + cacheStore?: CacheStore; + skipCache: boolean; + clearCache: boolean; + /** Cache hits recorded during resolution of the current item */ + cacheHits: Array; + /** The ConfigItem currently being resolved */ + currentItem: ConfigItem; +}; + +const resolutionContextStorage = new AsyncLocalStorage(); + +/** + * Run a function within a resolution context. + * Used in resolveEnvValues() to provide per-item context to resolvers via ALS. + */ +export function runWithResolutionContext(ctx: ResolutionContextData, fn: () => T): T { + return resolutionContextStorage.run(ctx, fn); +} + +/** + * Get the current resolution context, if any. + * Called by resolvers (e.g., cache()) to access the cache store and current item. + */ +export function getResolutionContext(): ResolutionContextData | undefined { + return resolutionContextStorage.getStore(); +} diff --git a/packages/varlock/src/env-graph/lib/resolver.ts b/packages/varlock/src/env-graph/lib/resolver.ts index ebac163d3..9f197a18c 100644 --- a/packages/varlock/src/env-graph/lib/resolver.ts +++ b/packages/varlock/src/env-graph/lib/resolver.ts @@ -1,5 +1,6 @@ import { exec } from 'node:child_process'; import { promisify } from 'node:util'; +import { randomBytes, randomUUID, randomInt as cryptoRandomInt } from 'node:crypto'; import _ from '@env-spec/utils/my-dash'; import { @@ -9,6 +10,7 @@ import { import { ConfigItem } from './config-item'; import { SimpleQueue } from './simple-queue'; import { ResolutionError, SchemaError, VarlockError } from './errors'; +import { parseTtl, TTL_FOREVER } from '../../lib/cache/ttl-parser'; import type { EnvGraphDataSource } from './data-source'; import { DecoratorInstance } from './decorators'; import { getErrorLocation } from './error-location'; @@ -626,6 +628,278 @@ export const IsEmptyResolver: typeof Resolver = createResolver({ }); +// ── Random value generators ──────────────────────────────────────────── + +export const RandomIntResolver: typeof Resolver = createResolver({ + name: 'randomInt', + description: 'Generate a random integer between min and max (inclusive)', + icon: 'mdi:dice-multiple', + inferredType: 'number', + argsSchema: { + type: 'array', + arrayMinLength: 0, + arrayMaxLength: 2, + }, + process() { + const args = this.arrArgs ?? []; + let min = 0; + let max = 2_147_483_647; // int32 max + if (args.length === 1) { + if (!args[0].isStatic || typeof args[0].staticValue !== 'number') { + throw new SchemaError('randomInt() max argument must be a static number'); + } + max = args[0].staticValue as number; + } else if (args.length === 2) { + if (!args[0].isStatic || typeof args[0].staticValue !== 'number') { + throw new SchemaError('randomInt() min argument must be a static number'); + } + if (!args[1].isStatic || typeof args[1].staticValue !== 'number') { + throw new SchemaError('randomInt() max argument must be a static number'); + } + min = args[0].staticValue as number; + max = args[1].staticValue as number; + } + if (!Number.isInteger(min) || !Number.isInteger(max)) { + throw new SchemaError('randomInt() arguments must be integers'); + } + if (min > max) { + throw new SchemaError(`randomInt() min (${min}) must be <= max (${max})`); + } + return { min, max }; + }, + async resolve({ min, max }) { + // crypto.randomInt is exclusive on upper bound, so +1 for inclusive + return cryptoRandomInt(min, max + 1); + }, +}); + +export const RandomFloatResolver: typeof Resolver = createResolver({ + name: 'randomFloat', + description: 'Generate a random float between min and max', + icon: 'mdi:dice-multiple', + inferredType: 'number', + argsSchema: { + type: 'mixed', + arrayMinLength: 0, + arrayMaxLength: 2, + }, + process() { + const args = this.arrArgs ?? []; + let min = 0; + let max = 1; + if (args.length === 1) { + if (!args[0].isStatic || typeof args[0].staticValue !== 'number') { + throw new SchemaError('randomFloat() max argument must be a static number'); + } + max = args[0].staticValue as number; + } else if (args.length === 2) { + if (!args[0].isStatic || typeof args[0].staticValue !== 'number') { + throw new SchemaError('randomFloat() min argument must be a static number'); + } + if (!args[1].isStatic || typeof args[1].staticValue !== 'number') { + throw new SchemaError('randomFloat() max argument must be a static number'); + } + min = args[0].staticValue as number; + max = args[1].staticValue as number; + } + if (min > max) { + throw new SchemaError(`randomFloat() min (${min}) must be <= max (${max})`); + } + const precisionResolver = this.objArgs?.precision; + let precision = 2; + if (precisionResolver) { + if (!precisionResolver.isStatic || typeof precisionResolver.staticValue !== 'number') { + throw new SchemaError('randomFloat() precision must be a static integer'); + } + precision = precisionResolver.staticValue as number; + } + return { min, max, precision }; + }, + async resolve({ min, max, precision }) { + const value = min + Math.random() * (max - min); + return Number(value.toFixed(precision)); + }, +}); + +export const RandomUuidResolver: typeof Resolver = createResolver({ + name: 'randomUuid', + description: 'Generate a random UUID v4', + icon: 'mdi:identifier', + inferredType: 'string', + async resolve() { + return randomUUID(); + }, +}); + +export const RandomHexResolver: typeof Resolver = createResolver({ + name: 'randomHex', + description: 'Generate a random hex string of the given byte length', + icon: 'mdi:dice-multiple', + inferredType: 'string', + argsSchema: { + type: 'array', + arrayMinLength: 0, + arrayMaxLength: 1, + }, + process() { + const args = this.arrArgs ?? []; + let bytes = 16; // default 32 hex chars + if (args.length === 1) { + if (!args[0].isStatic || typeof args[0].staticValue !== 'number') { + throw new SchemaError('randomHex() length argument must be a static number'); + } + bytes = args[0].staticValue as number; + if (!Number.isInteger(bytes) || bytes < 1) { + throw new SchemaError('randomHex() length must be a positive integer'); + } + } + return { bytes }; + }, + async resolve({ bytes }) { + return randomBytes(bytes).toString('hex'); + }, +}); + +export const RandomStringResolver: typeof Resolver = createResolver({ + name: 'randomString', + description: 'Generate a random string of the given length', + icon: 'mdi:dice-multiple', + inferredType: 'string', + argsSchema: { + type: 'mixed', + arrayMinLength: 0, + arrayMaxLength: 1, + }, + process() { + const args = this.arrArgs ?? []; + let length = 16; + if (args.length === 1) { + if (!args[0].isStatic || typeof args[0].staticValue !== 'number') { + throw new SchemaError('randomString() length argument must be a static number'); + } + length = args[0].staticValue as number; + if (!Number.isInteger(length) || length < 1) { + throw new SchemaError('randomString() length must be a positive integer'); + } + } + const charsetResolver = this.objArgs?.charset; + let charset = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + if (charsetResolver) { + if (!charsetResolver.isStatic || typeof charsetResolver.staticValue !== 'string') { + throw new SchemaError('randomString() charset must be a static string'); + } + charset = charsetResolver.staticValue as string; + if (charset.length === 0) { + throw new SchemaError('randomString() charset must not be empty'); + } + } + return { length, charset }; + }, + async resolve({ length, charset }) { + const bytes = randomBytes(length); + let result = ''; + for (let i = 0; i < length; i++) { + result += charset[bytes[i] % charset.length]; + } + return result; + }, +}); + +// ── Cache resolver ───────────────────────────────────────────────────── + +export const CacheResolver: typeof Resolver = createResolver({ + name: 'cache', + description: 'Cache the result of a resolver', + icon: 'mdi:cached', + argsSchema: { + type: 'mixed', + arrayMinLength: 1, + arrayMaxLength: 1, + }, + process() { + // pass through child resolver's inferred type + const childResolver = this.arrArgs?.[0]; + if (childResolver?.inferredType) { + this.inferredType = childResolver.inferredType; + } + + // warn if the child resolver is a static value — caching a literal is pointless + if (childResolver instanceof StaticValueResolver) { + this._schemaErrors.push(new SchemaError( + 'wraps a static value which never changes — caching has no effect', + { isWarning: true }, + )); + } + + // optional explicit cache key + const keyResolver = this.objArgs?.key; + let customKey: string | undefined; + if (keyResolver) { + if (!keyResolver.isStatic || typeof keyResolver.staticValue !== 'string') { + throw new SchemaError('key must be a static string'); + } + customKey = keyResolver.staticValue as string; + } + + // optional TTL + const ttlResolver = this.objArgs?.ttl; + let ttl: string | number | undefined; + if (ttlResolver) { + if (!ttlResolver.isStatic) { + throw new SchemaError('ttl must be a static value'); + } + const ttlVal = ttlResolver.staticValue; + if (typeof ttlVal !== 'string' && typeof ttlVal !== 'number') { + throw new SchemaError('ttl must be a string like "1h" or a number (0 = forever)'); + } + parseTtl(ttlVal); + ttl = ttlVal; + } + + return { ttl, customKey }; + }, + async resolve(state) { + const { getResolutionContext } = await import('./resolution-context'); + const ctx = getResolutionContext(); + const cacheStore = ctx?.cacheStore; + const item = ctx?.currentItem; + + const childResolver = this.arrArgs![0]; + + // Use explicit key if provided, otherwise auto-generate from file/item/resolver text + let cacheKey: string; + if (state.customKey) { + cacheKey = `resolver:custom:${state.customKey}`; + } else { + const resolverText = this._parsedNode?.toString() ?? childResolver._parsedNode?.toString() ?? 'unknown'; + const filePath = (this.dataSource as any)?.fullPath ?? this.dataSource?.label ?? 'unknown'; + cacheKey = `resolver:${filePath}:${item?.key ?? 'unknown'}:${resolverText}`; + } + + if (cacheStore && !ctx?.skipCache) { + // try cache read (unless clear-cache mode) + if (!ctx?.clearCache) { + const cached = await cacheStore.get(cacheKey); + if (cached) { + ctx?.cacheHits.push({ cacheKey, cachedAt: cached.cachedAt, expiresAt: cached.expiresAt }); + return cached.value; + } + } + } + + // cache miss — resolve wrapped resolver + const childValue = await childResolver.resolve(); + + // write to cache (even in clear-cache mode — that's the "rewrite" part) + if (cacheStore && !ctx?.skipCache && childValue !== undefined) { + const ttlMs = state.ttl != null ? parseTtl(state.ttl) : TTL_FOREVER; + await cacheStore.set(cacheKey, childValue, ttlMs); + } + + return childValue; + }, +}); + // Special function for `@defaultSensitive=inferFromPrefix(PUBLIC_)` // we may want to formalize this pattern of a resolver function used in a root decorator // but resolved within the context of a specific item @@ -659,6 +933,12 @@ export const BaseResolvers: Array = [ FallbackResolver, RefResolver, ExecResolver, + RandomIntResolver, + RandomFloatResolver, + RandomUuidResolver, + RandomHexResolver, + RandomStringResolver, + CacheResolver, RemapResolver, IfsResolver, ForEnvResolver, diff --git a/packages/varlock/src/env-graph/test/cache-resolver.test.ts b/packages/varlock/src/env-graph/test/cache-resolver.test.ts new file mode 100644 index 000000000..5940103f7 --- /dev/null +++ b/packages/varlock/src/env-graph/test/cache-resolver.test.ts @@ -0,0 +1,357 @@ +/** + * Tests for the cache() resolver function. + * + * Tests schema validation, resolver wiring, and actual caching behavior + * using a random resolver and mock cache store. + */ + +import { + describe, it, expect, vi, beforeEach, afterEach, +} from 'vitest'; +import fs from 'node:fs'; +import path from 'node:path'; +import os from 'node:os'; +import { outdent } from 'outdent'; +import { DotEnvFileDataSource, EnvGraph } from '../index'; +import { Resolver } from '../lib/resolver'; +import { CacheStore } from '../../lib/cache'; + +let tempDir: string; + +// mock localEncrypt to avoid needing real encryption keys +vi.mock('../../lib/local-encrypt', () => ({ + encryptValue: vi.fn(async (value: string) => `encrypted:${value}`), + decryptValue: vi.fn(async (value: string) => value.replace('encrypted:', '')), + // eslint-disable-next-line @typescript-eslint/no-empty-function + ensureKey: vi.fn(async () => {}), + keyExists: vi.fn(() => true), +})); + +// mock user config dir to use temp directory +vi.mock('../../lib/user-config-dir', () => ({ + getUserVarlockDir: () => tempDir, +})); + +// track call counts via mutable object (closures in static def capture the reference) +const calls = { random: 0, counter: 0 }; + +// random resolver — returns a different value each time +class RandomResolver extends Resolver { + static def = { + name: 'random', + label: 'random', + icon: '', + async resolve() { + calls.random++; + return `random-${Math.random().toString(36).slice(2)}`; + }, + }; +} + +// counter resolver — increments each call +class CounterResolver extends Resolver { + static def = { + name: 'counter', + label: 'counter', + icon: '', + async resolve() { return ++calls.counter; }, + }; +} + +async function loadAndResolve(envContent: string, opts?: { + cacheStore?: CacheStore; + clearCache?: boolean; + skipCache?: boolean; +}) { + const g = new EnvGraph(); + g.registerResolver(RandomResolver); + g.registerResolver(CounterResolver); + if (opts?.cacheStore) g._cacheStore = opts.cacheStore; + if (opts?.clearCache) g._clearCacheMode = true; + if (opts?.skipCache) g._skipCacheMode = true; + const source = new DotEnvFileDataSource('.env.schema', { + overrideContents: outdent` + # @defaultRequired=false + # --- + ${envContent} + `, + }); + await g.setRootDataSource(source); + await g.finishLoad(); + await g.resolveEnvValues(); + return g; +} + +function createTestCacheStore() { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'varlock-cache-resolver-test-')); + return new CacheStore(); +} + +beforeEach(() => { + calls.random = 0; + calls.counter = 0; +}); + +afterEach(() => { + if (tempDir && fs.existsSync(tempDir)) { + fs.rmSync(tempDir, { recursive: true, force: true }); + } + vi.restoreAllMocks(); +}); + +describe('cache() resolver', () => { + describe('schema validation', () => { + it('accepts cache() without ttl (defaults to forever)', async () => { + const g = await loadAndResolve('A=cache(random())'); + const item = g.configSchema.A; + expect(item.errors.length).toBe(0); + expect(item.resolvedValue).toBeDefined(); + }); + + it('rejects cache() with invalid ttl format', async () => { + const g = await loadAndResolve('A=cache("static", ttl="invalid")'); + const item = g.configSchema.A; + expect(item.errors.length).toBeGreaterThan(0); + }); + + it('accepts cache() with valid ttl', async () => { + const g = await loadAndResolve('A=cache(random(), ttl="1h")'); + const item = g.configSchema.A; + expect(item.resolvedValue).toBeDefined(); + expect(item.errors.length).toBe(0); + }); + + it('warns when wrapping a static value', async () => { + const g = await loadAndResolve('A=cache("static-val", ttl="1h")'); + const item = g.configSchema.A; + // should still resolve (warning, not error) + expect(item.resolvedValue).toBe('static-val'); + // the warning is on the resolver's schema errors + const resolverWarnings = item.resolverSchemaErrors.filter((e) => e.isWarning); + expect(resolverWarnings.length).toBeGreaterThan(0); + expect(resolverWarnings.some((e) => e.message.includes('static value'))).toBe(true); + }); + + it('accepts cache() with ttl=0 (forever)', async () => { + const g = await loadAndResolve('A=cache(random(), ttl=0)'); + const item = g.configSchema.A; + expect(item.resolvedValue).toBeDefined(); + expect(item.errors.length).toBe(0); + }); + }); + + describe('resolution without cache store', () => { + it('resolves wrapped static value', async () => { + const g = await loadAndResolve('A=cache("world", ttl="30m")'); + expect(g.configSchema.A.resolvedValue).toBe('world'); + }); + + it('resolves wrapped function', async () => { + const g = await loadAndResolve('A=cache(counter(), ttl="1h")'); + // counter returns a number but default type is string, so it gets coerced + expect(g.configSchema.A.resolvedValue).toBe('1'); + }); + + it('works with fallback wrapping cache', async () => { + const g = await loadAndResolve('A=fallback(cache("first", ttl="1h"), "second")'); + expect(g.configSchema.A.resolvedValue).toBe('first'); + }); + }); + + describe('caching behavior with cache store', () => { + it('caches a value and returns it on second resolve', async () => { + const store = createTestCacheStore(); + + // first resolve — cache miss, resolver runs + const g1 = await loadAndResolve('A=cache(random(), ttl="1h")', { cacheStore: store }); + const firstValue = g1.configSchema.A.resolvedValue; + expect(firstValue).toBeDefined(); + expect(calls.random).toBe(1); + expect(g1.configSchema.A.isCacheHit).toBe(false); + + // second resolve — cache hit, resolver should NOT run again + const g2 = await loadAndResolve('A=cache(random(), ttl="1h")', { cacheStore: store }); + expect(g2.configSchema.A.resolvedValue).toBe(firstValue); + // random resolver was only called once total (from first resolve) + expect(calls.random).toBe(1); + expect(g2.configSchema.A.isCacheHit).toBe(true); + expect(g2.configSchema.A._cacheHits.length).toBe(1); + }); + + it('cache invalidates when resolver text changes', async () => { + const store = createTestCacheStore(); + + const g1 = await loadAndResolve('A=cache("value-1", ttl="1h")', { cacheStore: store }); + expect(g1.configSchema.A.resolvedValue).toBe('value-1'); + + // change the wrapped resolver — should NOT get cached value + const g2 = await loadAndResolve('A=cache("value-2", ttl="1h")', { cacheStore: store }); + expect(g2.configSchema.A.resolvedValue).toBe('value-2'); + expect(g2.configSchema.A.isCacheHit).toBe(false); + }); + + it('--clear-cache skips reading but rewrites', async () => { + const store = createTestCacheStore(); + + // populate cache + const g1 = await loadAndResolve('A=cache(random(), ttl="1h")', { cacheStore: store }); + const firstValue = g1.configSchema.A.resolvedValue; + + // clear-cache: should resolve fresh (not return cached value) + const g2 = await loadAndResolve('A=cache(random(), ttl="1h")', { cacheStore: store, clearCache: true }); + expect(g2.configSchema.A.resolvedValue).not.toBe(firstValue); + expect(calls.random).toBe(2); + + // third resolve without clear: should get the new cached value + const g3 = await loadAndResolve('A=cache(random(), ttl="1h")', { cacheStore: store }); + expect(g3.configSchema.A.resolvedValue).toBe(g2.configSchema.A.resolvedValue); + expect(calls.random).toBe(2); // not called again + }); + + it('--skip-cache bypasses cache entirely', async () => { + const store = createTestCacheStore(); + + // populate cache + await loadAndResolve('A=cache(random(), ttl="1h")', { cacheStore: store }); + + // skip-cache: should resolve fresh and NOT write to cache + const g2 = await loadAndResolve('A=cache(random(), ttl="1h")', { cacheStore: store, skipCache: true }); + expect(calls.random).toBe(2); + expect(g2.configSchema.A.isCacheHit).toBe(false); + + // third resolve without skip: should still get original cached value (skip didn't overwrite) + const g3 = await loadAndResolve('A=cache(random(), ttl="1h")', { cacheStore: store }); + expect(calls.random).toBe(2); // cache hit from first resolve + expect(g3.configSchema.A.isCacheHit).toBe(true); + }); + + it('uses custom key when specified', async () => { + const store = createTestCacheStore(); + + // cache with custom key + const g1 = await loadAndResolve('A=cache(random(), key="my-custom-key")', { cacheStore: store }); + const firstValue = g1.configSchema.A.resolvedValue; + + // same custom key — should hit cache even though item name could differ + const g2 = await loadAndResolve('A=cache(random(), key="my-custom-key")', { cacheStore: store }); + expect(g2.configSchema.A.resolvedValue).toBe(firstValue); + expect(g2.configSchema.A.isCacheHit).toBe(true); + + // different custom key — should NOT hit cache + const g3 = await loadAndResolve('A=cache(random(), key="other-key")', { cacheStore: store }); + expect(g3.configSchema.A.resolvedValue).not.toBe(firstValue); + expect(g3.configSchema.A.isCacheHit).toBe(false); + }); + + it('caches forever when no ttl specified', async () => { + const store = createTestCacheStore(); + + const g1 = await loadAndResolve('A=cache(random())', { cacheStore: store }); + const firstValue = g1.configSchema.A.resolvedValue; + + const g2 = await loadAndResolve('A=cache(random())', { cacheStore: store }); + expect(g2.configSchema.A.resolvedValue).toBe(firstValue); + expect(g2.configSchema.A.isCacheHit).toBe(true); + }); + + it('multiple items cache independently', async () => { + const store = createTestCacheStore(); + + const g1 = await loadAndResolve(outdent` + A=cache(random(), ttl="1h") + B=cache(random(), ttl="1h") + `, { cacheStore: store }); + expect(g1.configSchema.A.resolvedValue).toBeDefined(); + expect(g1.configSchema.B.resolvedValue).toBeDefined(); + expect(g1.configSchema.A.resolvedValue).not.toBe(g1.configSchema.B.resolvedValue); + + // check cache file was written + const stats = store.getStats(); + expect(stats.total).toBe(2); + + // both should be cached on second resolve + const g2 = await loadAndResolve(outdent` + A=cache(random(), ttl="1h") + B=cache(random(), ttl="1h") + `, { cacheStore: store }); + expect(g2.configSchema.A.isCacheHit).toBe(true); + expect(g2.configSchema.B.isCacheHit).toBe(true); + expect(g2.configSchema.A.resolvedValue).toBe(g1.configSchema.A.resolvedValue); + expect(g2.configSchema.B.resolvedValue).toBe(g1.configSchema.B.resolvedValue); + }); + }); + + describe('cacheTtl / isCached properties', () => { + it('extracts TTL from cache() resolver', async () => { + const g = await loadAndResolve('A=cache("val", ttl="2h")'); + expect(g.configSchema.A.cacheTtl).toBe('2h'); + expect(g.configSchema.A.isCached).toBe(true); + }); + + it('returns undefined TTL when no ttl specified (forever)', async () => { + const g = await loadAndResolve('A=cache("val")'); + expect(g.configSchema.A.cacheTtl).toBeUndefined(); + expect(g.configSchema.A.isCached).toBe(true); + }); + + it('isCached is false when no cache() is used', async () => { + const g = await loadAndResolve('A="plain"'); + expect(g.configSchema.A.isCached).toBe(false); + expect(g.configSchema.A.cacheTtl).toBeUndefined(); + }); + + it('finds cache() nested inside other resolvers', async () => { + const g = await loadAndResolve('A=fallback(cache("val", ttl="5m"), "other")'); + expect(g.configSchema.A.cacheTtl).toBe('5m'); + expect(g.configSchema.A.isCached).toBe(true); + }); + }); + + describe('cache hit tracking', () => { + it('reports no cache hits when no cache store', async () => { + const g = await loadAndResolve('A=cache("val", ttl="1h")'); + expect(g.configSchema.A.isCacheHit).toBe(false); + expect(g.configSchema.A._cacheHits).toEqual([]); + }); + + it('records cache hit info with cacheKey and timestamp', async () => { + const store = createTestCacheStore(); + + await loadAndResolve('A=cache("val", ttl="1h")', { cacheStore: store }); + const before = Date.now(); + const g2 = await loadAndResolve('A=cache("val", ttl="1h")', { cacheStore: store }); + + expect(g2.configSchema.A.isCacheHit).toBe(true); + const hit = g2.configSchema.A._cacheHits[0]; + expect(hit.cacheKey).toContain('resolver:'); + expect(hit.cacheKey).toContain(':A:'); + expect(hit.cachedAt).toBeLessThanOrEqual(before); + }); + }); + + describe('type inference', () => { + it('infers number type from randomInt() child', async () => { + const g = await loadAndResolve('A=cache(randomInt(1, 10))'); + const item = g.configSchema.A; + // the value should be coerced as a number, not a string + expect(typeof item.resolvedValue).toBe('number'); + }); + + it('infers string type from randomUuid() child', async () => { + const g = await loadAndResolve('A=cache(randomUuid())'); + const item = g.configSchema.A; + expect(typeof item.resolvedValue).toBe('string'); + }); + }); + + describe('various TTL formats in schema', () => { + const validTtls = ['30s', '5m', '1h', '1d', '1w']; + for (const ttl of validTtls) { + it(`accepts ttl="${ttl}"`, async () => { + const g = await loadAndResolve(`A=cache(random(), ttl="${ttl}")`); + expect(g.configSchema.A.errors.length).toBe(0); + expect(g.configSchema.A.resolvedValue).toBeDefined(); + }); + } + }); +}); diff --git a/packages/varlock/src/env-graph/test/random-resolvers.test.ts b/packages/varlock/src/env-graph/test/random-resolvers.test.ts new file mode 100644 index 000000000..9717f2c50 --- /dev/null +++ b/packages/varlock/src/env-graph/test/random-resolvers.test.ts @@ -0,0 +1,156 @@ +/** + * Tests for random value generator resolver functions: + * randomInt(), randomFloat(), randomUuid(), randomHex(), randomString() + */ + +import { describe, it, expect } from 'vitest'; +import { outdent } from 'outdent'; +import { DotEnvFileDataSource, EnvGraph } from '../index'; +import { SchemaError } from '../lib/errors'; + +async function loadAndResolve(envContent: string) { + const g = new EnvGraph(); + const source = new DotEnvFileDataSource('.env.schema', { + overrideContents: outdent` + # @defaultRequired=false + # --- + ${envContent} + `, + }); + await g.setRootDataSource(source); + await g.finishLoad(); + await g.resolveEnvValues(); + return g; +} + +describe('randomInt()', () => { + it('generates an integer with no args (0 to int32 max)', async () => { + const g = await loadAndResolve('A=randomInt()'); + const val = g.configSchema.A.resolvedValue as number; + expect(Number.isInteger(val)).toBe(true); + expect(val).toBeGreaterThanOrEqual(0); + }); + + it('generates an integer with max only', async () => { + const g = await loadAndResolve('A=randomInt(10)'); + const val = g.configSchema.A.resolvedValue as number; + expect(Number.isInteger(val)).toBe(true); + expect(val).toBeGreaterThanOrEqual(0); + expect(val).toBeLessThanOrEqual(10); + }); + + it('generates an integer in range', async () => { + const g = await loadAndResolve('A=randomInt(5, 10)'); + const val = g.configSchema.A.resolvedValue as number; + expect(Number.isInteger(val)).toBe(true); + expect(val).toBeGreaterThanOrEqual(5); + expect(val).toBeLessThanOrEqual(10); + }); + + it('rejects min > max', async () => { + const g = await loadAndResolve('A=randomInt(10, 5)'); + expect(g.configSchema.A.errors.length).toBeGreaterThan(0); + expect(g.configSchema.A.errors[0]).toBeInstanceOf(SchemaError); + }); + + it('rejects non-integer args', async () => { + const g = await loadAndResolve('A=randomInt(1.5, 10)'); + expect(g.configSchema.A.errors.length).toBeGreaterThan(0); + }); +}); + +describe('randomFloat()', () => { + it('generates a float with no args (0 to 1)', async () => { + const g = await loadAndResolve('A=randomFloat()'); + const val = g.configSchema.A.resolvedValue as number; + expect(typeof val).toBe('number'); + expect(val).toBeGreaterThanOrEqual(0); + expect(val).toBeLessThanOrEqual(1); + }); + + it('generates a float in range', async () => { + const g = await loadAndResolve('A=randomFloat(10, 20)'); + const val = g.configSchema.A.resolvedValue as number; + expect(val).toBeGreaterThanOrEqual(10); + expect(val).toBeLessThanOrEqual(20); + }); + + it('respects precision option', async () => { + const g = await loadAndResolve('A=randomFloat(0, 1, precision=4)'); + const val = g.configSchema.A.resolvedValue as number; + const decimalPlaces = val.toString().split('.')[1]?.length ?? 0; + expect(decimalPlaces).toBeLessThanOrEqual(4); + }); + + it('rejects min > max', async () => { + const g = await loadAndResolve('A=randomFloat(20, 10)'); + expect(g.configSchema.A.errors.length).toBeGreaterThan(0); + }); +}); + +describe('randomUuid()', () => { + it('generates a valid UUID v4', async () => { + const g = await loadAndResolve('A=randomUuid()'); + const val = g.configSchema.A.resolvedValue as string; + expect(val).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i); + }); + + it('generates unique values', async () => { + const g = await loadAndResolve(outdent` + A=randomUuid() + B=randomUuid() + `); + expect(g.configSchema.A.resolvedValue).not.toBe(g.configSchema.B.resolvedValue); + }); +}); + +describe('randomHex()', () => { + it('generates a hex string with default length (32 chars = 16 bytes)', async () => { + const g = await loadAndResolve('A=randomHex()'); + const val = g.configSchema.A.resolvedValue as string; + expect(val).toMatch(/^[0-9a-f]{32}$/); + }); + + it('generates a hex string with custom byte length', async () => { + const g = await loadAndResolve('A=randomHex(8)'); + const val = g.configSchema.A.resolvedValue as string; + expect(val).toMatch(/^[0-9a-f]{16}$/); // 8 bytes = 16 hex chars + }); + + it('rejects zero length', async () => { + const g = await loadAndResolve('A=randomHex(0)'); + expect(g.configSchema.A.errors.length).toBeGreaterThan(0); + }); +}); + +describe('randomString()', () => { + it('generates a string with default length (16) and charset', async () => { + const g = await loadAndResolve('A=randomString()'); + const val = g.configSchema.A.resolvedValue as string; + expect(val.length).toBe(16); + expect(val).toMatch(/^[A-Za-z0-9]+$/); + }); + + it('generates a string with custom length', async () => { + const g = await loadAndResolve('A=randomString(32)'); + const val = g.configSchema.A.resolvedValue as string; + expect(val.length).toBe(32); + }); + + it('generates a string with custom charset', async () => { + const g = await loadAndResolve('A=randomString(10, charset="abc")'); + const val = g.configSchema.A.resolvedValue as string; + expect(val.length).toBe(10); + expect(val).toMatch(/^[abc]+$/); + }); + + it('rejects zero length', async () => { + const g = await loadAndResolve('A=randomString(0)'); + expect(g.configSchema.A.errors.length).toBeGreaterThan(0); + }); + + it('rejects empty charset', async () => { + const g = await loadAndResolve("A=randomString(10, charset='')"); + expect(g.configSchema.A.errors.length).toBeGreaterThan(0); + }); +}); diff --git a/packages/varlock/src/lib/cache/cache-store.test.ts b/packages/varlock/src/lib/cache/cache-store.test.ts new file mode 100644 index 000000000..96139903a --- /dev/null +++ b/packages/varlock/src/lib/cache/cache-store.test.ts @@ -0,0 +1,214 @@ +import { + describe, it, expect, vi, beforeEach, afterEach, +} from 'vitest'; +import fs from 'node:fs'; +import path from 'node:path'; +import os from 'node:os'; +import { CacheStore } from './cache-store'; + +// mock localEncrypt to avoid needing real encryption keys +vi.mock('../local-encrypt', () => ({ + encryptValue: vi.fn(async (value: string) => `encrypted:${value}`), + decryptValue: vi.fn(async (value: string) => value.replace('encrypted:', '')), +})); + +// mock getUserVarlockDir to use a temp directory +let tempDir: string; +vi.mock('../user-config-dir', () => ({ + getUserVarlockDir: () => tempDir, +})); + +beforeEach(() => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'varlock-cache-test-')); +}); + +afterEach(() => { + fs.rmSync(tempDir, { recursive: true, force: true }); +}); + +describe('CacheStore', () => { + describe('get/set', () => { + it('returns undefined for missing key', async () => { + const store = new CacheStore(); + const result = await store.get('missing:key'); + expect(result).toBeUndefined(); + }); + + it('stores and retrieves a value', async () => { + const store = new CacheStore(); + await store.set('plugin:test:mykey', 'hello', 60_000); + const result = await store.get('plugin:test:mykey'); + expect(result).toBeDefined(); + expect(result!.value).toBe('hello'); + }); + + it('returns cachedAt timestamp', async () => { + const store = new CacheStore(); + const before = Date.now(); + await store.set('plugin:test:ts', 'val', 60_000); + const after = Date.now(); + const result = await store.get('plugin:test:ts'); + expect(result!.cachedAt).toBeGreaterThanOrEqual(before); + expect(result!.cachedAt).toBeLessThanOrEqual(after); + }); + + it('overwrites existing value', async () => { + const store = new CacheStore(); + await store.set('plugin:test:k', 'v1', 60_000); + await store.set('plugin:test:k', 'v2', 60_000); + const result = await store.get('plugin:test:k'); + expect(result!.value).toBe('v2'); + }); + }); + + describe('expiry', () => { + it('returns undefined for expired entry', async () => { + const store = new CacheStore(); + await store.set('plugin:test:exp', 'val', 1); // 1ms TTL + // wait for expiry + await new Promise((r) => { + setTimeout(r, 10); + }); + const result = await store.get('plugin:test:exp'); + expect(result).toBeUndefined(); + }); + }); + + describe('delete', () => { + it('removes a specific entry', async () => { + const store = new CacheStore(); + await store.set('plugin:test:a', 'va', 60_000); + await store.set('plugin:test:b', 'vb', 60_000); + store.delete('plugin:test:a'); + expect(await store.get('plugin:test:a')).toBeUndefined(); + expect((await store.get('plugin:test:b'))!.value).toBe('vb'); + }); + }); + + describe('clearAll', () => { + it('clears all entries and returns count', async () => { + const store = new CacheStore(); + await store.set('plugin:a:1', 'v1', 60_000); + await store.set('plugin:b:2', 'v2', 60_000); + const count = store.clearAll(); + expect(count).toBe(2); + expect(await store.get('plugin:a:1')).toBeUndefined(); + expect(await store.get('plugin:b:2')).toBeUndefined(); + }); + + it('returns 0 when empty', () => { + const store = new CacheStore(); + expect(store.clearAll()).toBe(0); + }); + }); + + describe('clearByPrefix', () => { + it('clears only entries matching prefix', async () => { + const store = new CacheStore(); + await store.set('plugin:1password:a', 'v1', 60_000); + await store.set('plugin:1password:b', 'v2', 60_000); + await store.set('plugin:aws:c', 'v3', 60_000); + await store.set('resolver:file:item:text', 'v4', 60_000); + + const count = store.clearByPrefix('plugin:1password:'); + expect(count).toBe(2); + expect(await store.get('plugin:1password:a')).toBeUndefined(); + expect(await store.get('plugin:1password:b')).toBeUndefined(); + expect((await store.get('plugin:aws:c'))!.value).toBe('v3'); + expect((await store.get('resolver:file:item:text'))!.value).toBe('v4'); + }); + }); + + describe('getStats', () => { + it('returns correct stats', async () => { + const store = new CacheStore(); + await store.set('plugin:1password:a', 'v1', 60_000); + await store.set('plugin:1password:b', 'v2', 60_000); + await store.set('plugin:aws:c', 'v3', 60_000); + await store.set('resolver:/path:ITEM:text()', 'v4', 60_000); + + const stats = store.getStats(); + expect(stats.total).toBe(4); + expect(stats.expired).toBe(0); + expect(stats.byPrefix['plugin:1password']).toBe(2); + expect(stats.byPrefix['plugin:aws']).toBe(1); + expect(stats.byPrefix['resolver:/path']).toBe(1); + }); + }); + + describe('persistence', () => { + it('persists across new CacheStore instances', async () => { + const store1 = new CacheStore(); + await store1.set('plugin:test:persist', 'persistent-value', 60_000); + + const store2 = new CacheStore(); + const result = await store2.get('plugin:test:persist'); + expect(result!.value).toBe('persistent-value'); + }); + }); + + describe('type preservation', () => { + it('preserves number type', async () => { + const store = new CacheStore(); + await store.set('plugin:test:num', 42, 60_000); + const result = await store.get('plugin:test:num'); + expect(result!.value).toBe(42); + expect(typeof result!.value).toBe('number'); + }); + + it('preserves boolean type', async () => { + const store = new CacheStore(); + await store.set('plugin:test:bool', true, 60_000); + const result = await store.get('plugin:test:bool'); + expect(result!.value).toBe(true); + expect(typeof result!.value).toBe('boolean'); + }); + + it('preserves object type', async () => { + const store = new CacheStore(); + await store.set('plugin:test:obj', { foo: 'bar', num: 1 }, 60_000); + const result = await store.get('plugin:test:obj'); + expect(result!.value).toEqual({ foo: 'bar', num: 1 }); + }); + + it('preserves array type', async () => { + const store = new CacheStore(); + await store.set('plugin:test:arr', [1, 'two', true], 60_000); + const result = await store.get('plugin:test:arr'); + expect(result!.value).toEqual([1, 'two', true]); + }); + }); + + describe('encryption', () => { + it('stores encrypted JSON-serialized values in the file', async () => { + const store = new CacheStore(); + await store.set('plugin:test:enc', 'secret', 60_000); + + const raw = fs.readFileSync(store.getFilePath(), 'utf-8'); + const data = JSON.parse(raw); + // value should be encrypted JSON, not plaintext + expect(data['plugin:test:enc'].v).toBe('encrypted:"secret"'); + expect(data['plugin:test:enc'].v).not.toBe('secret'); + }); + }); + + describe('graceful degradation', () => { + it('handles missing cache file gracefully', async () => { + const store = new CacheStore(); + // no file exists yet + const result = await store.get('anything'); + expect(result).toBeUndefined(); + }); + + it('handles corrupted cache file gracefully', async () => { + const store = new CacheStore(); + // write garbage to the cache file + const dir = path.dirname(store.getFilePath()); + fs.mkdirSync(dir, { recursive: true }); + fs.writeFileSync(store.getFilePath(), 'not valid json'); + + const result = await store.get('anything'); + expect(result).toBeUndefined(); + }); + }); +}); diff --git a/packages/varlock/src/lib/cache/cache-store.ts b/packages/varlock/src/lib/cache/cache-store.ts new file mode 100644 index 000000000..6b0289519 --- /dev/null +++ b/packages/varlock/src/lib/cache/cache-store.ts @@ -0,0 +1,234 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { getUserVarlockDir } from '../user-config-dir'; +import * as localEncrypt from '../local-encrypt'; +import { createDebug } from '../debug'; + +const debug = createDebug('varlock:cache'); + +type CacheEntry = { + /** encrypted value */ + v: string; + /** createdAt (unix ms) */ + c: number; + /** expiresAt (unix ms) */ + e: number; +}; + +type CacheData = Record; + +/** + * JSON-file-based encrypted cache store. + * + * Stores one file per encryption key at `~/.config/varlock/cache/{keyId}.json`. + * Each entry's value is individually encrypted via localEncrypt. + * Cache keys are structured strings like `plugin:name:key` or `resolver:path:item:text`. + */ +export class CacheStore { + private filePath: string; + /** In-memory cache — source of truth during a session to avoid concurrent read/write races */ + private memCache?: CacheData; + + constructor(private keyId: string = 'varlock-default') { + const cacheDir = path.join(getUserVarlockDir(), 'cache'); + this.filePath = path.join(cacheDir, `${keyId}.json`); + } + + /** + * Load and return a cached value, or undefined on miss/expired/error. + * The value is JSON-parsed after decryption to preserve its original type (number, boolean, object, etc.). + */ + async get(cacheKey: string): Promise<{ value: any; cachedAt: number; expiresAt: number } | undefined> { + const data = this.loadFile(); + const entry = data[cacheKey]; + if (!entry) return undefined; + + // check expiry + if (Date.now() > entry.e) { + debug('cache expired for %s', cacheKey); + delete data[cacheKey]; + this.saveFile(data); + return undefined; + } + + try { + const plaintext = await localEncrypt.decryptValue(entry.v, this.keyId); + return { value: JSON.parse(plaintext), cachedAt: entry.c, expiresAt: entry.e }; + } catch (err) { + debug('cache decrypt failed for %s: %O', cacheKey, err); + // corrupt or key mismatch — treat as cache miss + delete data[cacheKey]; + this.saveFile(data); + return undefined; + } + } + + /** + * Encrypt and store a value with a TTL. + * The value is JSON-stringified before encryption to preserve its type on retrieval. + */ + async set(cacheKey: string, value: any, ttlMs: number): Promise { + const data = this.loadFile(); + const now = Date.now(); + + try { + const serialized = JSON.stringify(value); + const encrypted = await localEncrypt.encryptValue(serialized, this.keyId); + data[cacheKey] = { + v: encrypted, + c: now, + // Infinity TTL → use a far-future expiry (~100 years) + e: Number.isFinite(ttlMs) ? now + ttlMs : now + 100 * 365.25 * 86_400_000, + }; + this.saveFile(data); + debug('cache set %s (ttl=%dms)', cacheKey, ttlMs); + } catch (err) { + debug('cache encrypt failed for %s: %O', cacheKey, err); + // encryption failure is non-fatal — just skip caching + } + } + + /** + * Delete a specific cache entry. + */ + delete(cacheKey: string): void { + const data = this.loadFile(); + if (cacheKey in data) { + delete data[cacheKey]; + this.saveFile(data); + } + } + + /** + * Clear all cache entries. Returns the count of cleared entries. + */ + clearAll(): number { + const data = this.loadFile(); + const count = Object.keys(data).length; + if (count > 0) { + this.memCache = {}; + this.saveFile(this.memCache); + } + return count; + } + + /** + * Clear entries matching a key prefix. Returns the count of cleared entries. + * Example: `clearByPrefix("plugin:1password:")` clears all 1password plugin cache. + */ + clearByPrefix(prefix: string): number { + const data = this.loadFile(); + let count = 0; + for (const key of Object.keys(data)) { + if (key.startsWith(prefix)) { + delete data[key]; + count++; + } + } + if (count > 0) { + this.saveFile(data); + } + return count; + } + + /** + * Get cache statistics. + */ + getStats(): { total: number; expired: number; byPrefix: Record } { + const data = this.loadFile(); + const now = Date.now(); + let expired = 0; + const byPrefix: Record = {}; + + for (const [key, entry] of Object.entries(data)) { + if (now > entry.e) { + expired++; + continue; + } + // group by first two segments: "plugin:name" or "resolver" + const firstColon = key.indexOf(':'); + const secondColon = firstColon >= 0 ? key.indexOf(':', firstColon + 1) : -1; + const prefix = secondColon >= 0 ? key.slice(0, secondColon) : key.slice(0, firstColon); + byPrefix[prefix] = (byPrefix[prefix] || 0) + 1; + } + + return { + total: Object.keys(data).length, + expired, + byPrefix, + }; + } + + /** + * List all non-expired entries with their metadata (for interactive browsing). + * Values are NOT decrypted — only keys and timestamps are returned. + */ + listEntries(): Array<{ key: string; cachedAt: number; expiresAt: number }> { + const data = this.loadFile(); + const now = Date.now(); + return Object.entries(data) + .filter(([, entry]) => now <= entry.e) + .map(([key, entry]) => ({ key, cachedAt: entry.c, expiresAt: entry.e })); + } + + /** + * Get the file path for this cache store (for display purposes). + */ + getFilePath(): string { + return this.filePath; + } + + // -- internal -- + + private loadFile(): CacheData { + if (this.memCache) return this.memCache; + try { + if (!fs.existsSync(this.filePath)) { + this.memCache = {}; + return this.memCache; + } + const raw = fs.readFileSync(this.filePath, 'utf-8'); + const data = JSON.parse(raw) as CacheData; + + // cleanup expired entries while we're here + this.memCache = this.cleanup(data); + return this.memCache; + } catch (err) { + debug('cache file load failed: %O', err); + this.memCache = {}; + return this.memCache; + } + } + + private saveFile(data: CacheData): void { + try { + const dir = path.dirname(this.filePath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + + // atomic write: write to temp file then rename + const tmpPath = `${this.filePath}.tmp.${process.pid}`; + fs.writeFileSync(tmpPath, JSON.stringify(data, null, 2), 'utf-8'); + fs.renameSync(tmpPath, this.filePath); + } catch (err) { + debug('cache file save failed: %O', err); + } + } + + private cleanup(data: CacheData): CacheData { + const now = Date.now(); + let dirty = false; + for (const key of Object.keys(data)) { + if (now > data[key].e) { + delete data[key]; + dirty = true; + } + } + // write back cleaned data if anything was removed + if (dirty) { + this.saveFile(data); + } + return data; + } +} diff --git a/packages/varlock/src/lib/cache/index.ts b/packages/varlock/src/lib/cache/index.ts new file mode 100644 index 000000000..e070f9ba2 --- /dev/null +++ b/packages/varlock/src/lib/cache/index.ts @@ -0,0 +1,4 @@ +export { CacheStore } from './cache-store'; +export { parseTtl } from './ttl-parser'; +export { PluginCacheAccessor } from './plugin-cache-accessor'; +export { resolveCacheTtl } from './resolve-cache-ttl'; diff --git a/packages/varlock/src/lib/cache/plugin-cache-accessor.ts b/packages/varlock/src/lib/cache/plugin-cache-accessor.ts new file mode 100644 index 000000000..d91849d0c --- /dev/null +++ b/packages/varlock/src/lib/cache/plugin-cache-accessor.ts @@ -0,0 +1,56 @@ +import type { CacheStore } from './cache-store'; +import { parseTtl } from './ttl-parser'; + +/** + * Scoped cache accessor for plugin authors. + * + * All keys are automatically prefixed with `plugin:{pluginName}:` so plugins + * cannot collide with each other's cache entries. + * + * Cache hits are automatically recorded on the current resolution context + * (if any) so they show up in `varlock load` and `varlock explain` output. + * + * Usage in a plugin: + * ```ts + * const cached = await plugin.cache.get('vault/MyVault/item/DBCreds'); + * if (!cached) { + * const value = await fetchFromAPI(); + * await plugin.cache.set('vault/MyVault/item/DBCreds', value, '1h'); + * } + * ``` + */ +export class PluginCacheAccessor { + constructor( + private pluginName: string, + private cacheStore: CacheStore, + ) {} + + private buildKey(key: string): string { + return `plugin:${this.pluginName}:${key}`; + } + + async get(key: string): Promise { + const cacheKey = this.buildKey(key); + const result = await this.cacheStore.get(cacheKey); + if (result) { + // automatically record cache hit on the resolution context (if active) + try { + const { getResolutionContext } = await import('../../env-graph/lib/resolution-context'); + const ctx = getResolutionContext(); + ctx?.cacheHits.push({ cacheKey, cachedAt: result.cachedAt, expiresAt: result.expiresAt }); + } catch { + // resolution context not available — that's fine + } + } + return result?.value; + } + + async set(key: string, value: any, ttl: string | number): Promise { + const ttlMs = typeof ttl === 'string' ? parseTtl(ttl) : ttl; + await this.cacheStore.set(this.buildKey(key), value, ttlMs); + } + + delete(key: string): void { + this.cacheStore.delete(this.buildKey(key)); + } +} diff --git a/packages/varlock/src/lib/cache/resolve-cache-ttl.ts b/packages/varlock/src/lib/cache/resolve-cache-ttl.ts new file mode 100644 index 000000000..89dbc62d3 --- /dev/null +++ b/packages/varlock/src/lib/cache/resolve-cache-ttl.ts @@ -0,0 +1,34 @@ +import { parseTtl } from './ttl-parser'; + +/** + * Resolve and validate a cacheTtl value from a plugin's init decorator. + * + * Returns the validated TTL (string or number) if valid, or undefined if + * the resolver is not set or the resolved value is falsy (disabled). + * + * Throws on invalid TTL format — the error will be caught by the decorator + * execution handler and surfaced as a plugin-level error. + */ +export async function resolveCacheTtl( + cacheTtlResolver: { resolve(): Promise } | undefined, +): Promise { + if (!cacheTtlResolver) return undefined; + + const cacheTtl = await cacheTtlResolver.resolve(); + + // falsy values (false, undefined, '') mean caching is disabled (e.g., conditional) + if (cacheTtl === undefined || cacheTtl === false || cacheTtl === '') { + return undefined; + } + + if (typeof cacheTtl !== 'string' && typeof cacheTtl !== 'number') { + const err = new Error(`cacheTtl resolved to an invalid type (${typeof cacheTtl})`); + (err as any).tip = 'cacheTtl should resolve to a string like "1h" or a number (0 = forever)'; + throw err; + } + + // validate the format — parseTtl throws on invalid input + parseTtl(cacheTtl); + + return cacheTtl; +} diff --git a/packages/varlock/src/lib/cache/ttl-parser.test.ts b/packages/varlock/src/lib/cache/ttl-parser.test.ts new file mode 100644 index 000000000..7203c3392 --- /dev/null +++ b/packages/varlock/src/lib/cache/ttl-parser.test.ts @@ -0,0 +1,89 @@ +import { describe, it, expect } from 'vitest'; +import { parseTtl } from './ttl-parser'; + +describe('parseTtl', () => { + describe('string durations', () => { + it('parses seconds', () => { + expect(parseTtl('30s')).toBe(30_000); + }); + it('parses minutes', () => { + expect(parseTtl('5m')).toBe(300_000); + }); + it('parses hours', () => { + expect(parseTtl('1h')).toBe(3_600_000); + }); + it('parses days', () => { + expect(parseTtl('1d')).toBe(86_400_000); + }); + it('parses weeks', () => { + expect(parseTtl('1w')).toBe(604_800_000); + }); + it('handles uppercase units', () => { + expect(parseTtl('2H')).toBe(7_200_000); + }); + it('handles whitespace around value', () => { + expect(parseTtl(' 1h ')).toBe(3_600_000); + }); + it('handles fractional values', () => { + expect(parseTtl('1.5h')).toBe(5_400_000); + }); + it('parses "hr" shorthand', () => { + expect(parseTtl('1hr')).toBe(3_600_000); + }); + it('parses "hrs" shorthand', () => { + expect(parseTtl('2hrs')).toBe(7_200_000); + }); + it('parses "min" shorthand', () => { + expect(parseTtl('5min')).toBe(300_000); + }); + it('parses "mins" shorthand', () => { + expect(parseTtl('10mins')).toBe(600_000); + }); + it('parses full words', () => { + expect(parseTtl('1hour')).toBe(3_600_000); + expect(parseTtl('2days')).toBe(172_800_000); + expect(parseTtl('1week')).toBe(604_800_000); + expect(parseTtl('30seconds')).toBe(30_000); + expect(parseTtl('5minutes')).toBe(300_000); + }); + }); + + describe('bare numbers', () => { + it('treats bare numbers as milliseconds', () => { + expect(parseTtl('5000')).toBe(5000); + }); + it('handles numeric type', () => { + expect(parseTtl(3000)).toBe(3000); + }); + }); + + describe('forever (0)', () => { + it('treats 0 as forever', () => { + expect(parseTtl(0)).toBe(Infinity); + }); + it('treats "0" string as forever', () => { + expect(parseTtl('0')).toBe(Infinity); + }); + }); + + describe('error cases', () => { + it('rejects empty string', () => { + expect(() => parseTtl('')).toThrow(); + }); + it('rejects zero with unit suffix', () => { + expect(() => parseTtl('0s')).toThrow(); + }); + it('rejects negative', () => { + expect(() => parseTtl('-5m')).toThrow(); + }); + it('rejects invalid unit', () => { + expect(() => parseTtl('5x')).toThrow(); + }); + it('rejects non-numeric string', () => { + expect(() => parseTtl('abc')).toThrow(); + }); + it('rejects negative numeric', () => { + expect(() => parseTtl(-100)).toThrow(); + }); + }); +}); diff --git a/packages/varlock/src/lib/cache/ttl-parser.ts b/packages/varlock/src/lib/cache/ttl-parser.ts new file mode 100644 index 000000000..56898a1ea --- /dev/null +++ b/packages/varlock/src/lib/cache/ttl-parser.ts @@ -0,0 +1,82 @@ +const TTL_UNITS: Record = { + s: 1_000, + sec: 1_000, + secs: 1_000, + second: 1_000, + seconds: 1_000, + m: 60_000, + min: 60_000, + mins: 60_000, + minute: 60_000, + minutes: 60_000, + h: 3_600_000, + hr: 3_600_000, + hrs: 3_600_000, + hour: 3_600_000, + hours: 3_600_000, + d: 86_400_000, + day: 86_400_000, + days: 86_400_000, + w: 604_800_000, + wk: 604_800_000, + wks: 604_800_000, + week: 604_800_000, + weeks: 604_800_000, +}; + +/** Sentinel value for "cache forever" (until manually cleared) */ +export const TTL_FOREVER = Infinity; + +/** + * Parse a human-readable TTL string into milliseconds. + * + * Supported formats: + * - `0` → forever (until manually cleared) + * - `"30s"` → 30,000ms + * - `"5m"` → 300,000ms + * - `"1h"` → 3,600,000ms + * - `"1d"` → 86,400,000ms + * - `"1w"` → 604,800,000ms + * - bare number → treated as milliseconds (0 = forever) + */ +export function parseTtl(ttl: string | number): number { + if (typeof ttl === 'number') { + if (ttl === 0) return TTL_FOREVER; + if (ttl < 0 || !Number.isFinite(ttl)) { + throw new Error(`Invalid TTL: ${ttl} — must be a positive number or 0 for forever`); + } + return ttl; + } + + const trimmed = ttl.trim(); + if (!trimmed) throw new Error('TTL string cannot be empty'); + + // try bare number (ms) + const asNum = Number(trimmed); + if (!Number.isNaN(asNum)) { + if (asNum === 0) return TTL_FOREVER; + if (asNum < 0) throw new Error(`Invalid TTL: "${ttl}" — must be positive or 0 for forever`); + return asNum; + } + + const match = trimmed.match(/^(\d+(?:\.\d+)?)\s*([a-z]+)$/i); + if (!match) { + throw new Error( + `Invalid TTL: "${ttl}" — expected a number with a unit suffix (e.g. "1h", "30m", "1hr", "2days")`, + ); + } + + const value = parseFloat(match[1]); + const unit = match[2].toLowerCase(); + const multiplier = TTL_UNITS[unit]; + + if (!multiplier) { + throw new Error( + `Invalid TTL unit: "${match[2]}" — valid units: s, sec, m, min, h, hr, d, day, w, wk (and plurals)`, + ); + } + + if (value <= 0) throw new Error(`Invalid TTL: "${ttl}" — must be positive`); + + return Math.round(value * multiplier); +} diff --git a/packages/varlock/src/lib/formatting.ts b/packages/varlock/src/lib/formatting.ts index 2f2264848..a20753003 100644 --- a/packages/varlock/src/lib/formatting.ts +++ b/packages/varlock/src/lib/formatting.ts @@ -88,6 +88,32 @@ const VALIDATION_STATE_COLORS = { valid: 'cyan', } as const; +export function formatDuration(ms: number): string { + if (ms < 1000) return `${ms}ms`; + const s = Math.floor(ms / 1000); + if (s < 60) return `${s}s`; + const m = Math.floor(s / 60); + if (m < 60) return `${m}m`; + const h = Math.floor(m / 60); + if (h < 24) return `${h}h`; + const d = Math.floor(h / 24); + if (d < 7) return `${d}d`; + const w = Math.floor(d / 7); + return `${w}w`; +} + +export function formatTimeAgo(timestamp: number): string { + const diffMs = Date.now() - timestamp; + const diffS = Math.floor(diffMs / 1000); + if (diffS < 60) return `${diffS}s ago`; + const diffM = Math.floor(diffS / 60); + if (diffM < 60) return `${diffM}m ago`; + const diffH = Math.floor(diffM / 60); + if (diffH < 24) return `${diffH}h ago`; + const diffD = Math.floor(diffH / 24); + return `${diffD}d ago`; +} + export function getItemSummary(item: ConfigItem) { const summary: Array = []; const itemErrors = item.errors; @@ -109,6 +135,16 @@ export function getItemSummary(item: ConfigItem) { valAsStr = redactString(item.resolvedValue)!; } + // build inline indicators to append after the value + const indicators: Array = []; + if (item.isCacheHit) { + const oldest = Math.min(...item._cacheHits.map((h) => h.cachedAt)); + indicators.push(ansis.gray(`📦 ${formatTimeAgo(oldest)}`)); + } + if (item.isOverridden) { + indicators.push(ansis.yellow('🟡 process.env')); + } + summary.push(joinAndCompact([ ansis.gray(' └'), valAsStr, @@ -116,12 +152,9 @@ export function getItemSummary(item: ConfigItem) { ansis.gray.italic('< coerced from ') + (isSensitive ? formattedValue(item.resolvedRawValue) : formattedValue(item.resolvedRawValue, false)) ), + indicators.length > 0 && ansis.gray(' ') + indicators.join(' '), ])); - if (item.isOverridden) { - summary.push(` 🟡 ${ansis.yellow.italic('set via process.env override')}`); - } - itemErrors?.forEach((err) => { summary.push(ansis[err.isWarning ? 'yellow' : 'red'](` - ${err.isWarning ? '[WARNING] ' : ''}${err.message}`)); diff --git a/packages/varlock/src/lib/load-graph.ts b/packages/varlock/src/lib/load-graph.ts index 183075e01..2ce80dac3 100644 --- a/packages/varlock/src/lib/load-graph.ts +++ b/packages/varlock/src/lib/load-graph.ts @@ -12,6 +12,10 @@ export function loadVarlockEnvGraph(opts?: { currentEnvFallback?: string, /** Explicit entry file path - overrides package.json config */ entryFilePath?: string, + /** Clear cache and re-resolve all values */ + clearCache?: boolean, + /** Skip cache entirely for this invocation */ + skipCache?: boolean, }) { const pkgLoadPath = readVarlockPackageJsonConfig()?.loadPath; const resolvedEntryFilePath = opts?.entryFilePath ?? pkgLoadPath; @@ -43,6 +47,8 @@ export function loadVarlockEnvGraph(opts?: { return runWithWorkspaceInfo(() => loadEnvGraph({ ...opts, entryFilePath: resolvedEntryFilePath, + clearCache: opts?.clearCache, + skipCache: opts?.skipCache, afterInit: async (_g) => { // TODO: register varlock resolver }, diff --git a/packages/varlock/src/lib/local-encrypt/binary-resolver.ts b/packages/varlock/src/lib/local-encrypt/binary-resolver.ts new file mode 100644 index 000000000..08fed4880 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/binary-resolver.ts @@ -0,0 +1,140 @@ +/** + * Resolves the path to the platform-specific native helper binary. + * + * Resolution order: + * 1. SEA sibling: same directory as the running varlock binary (install.sh, homebrew) + * 2. Bundled in npm package: native-bins/[-]/ within the varlock package + * 3. Dev fallback: walk up from __dirname to find build output + * + * Returns undefined if no binary is found (file-based fallback will be used instead). + */ + +import path from 'node:path'; +import fs from 'node:fs'; +import { fileURLToPath } from 'node:url'; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + +const BINARY_NAME = 'varlock-local-encrypt'; +const MACOS_APP_BUNDLE = 'VarlockEnclave.app'; + +/** Get the binary name for the current platform */ +function getPlatformBinaryName(): string { + return process.platform === 'win32' ? `${BINARY_NAME}.exe` : BINARY_NAME; +} + +/** Get the subdirectory name within native-bins/ for the current platform */ +function getNativeBinSubdir(): string { + if (process.platform === 'darwin') return 'darwin'; + if (process.platform === 'win32') return `win32-${process.arch}`; + return `${process.platform}-${process.arch}`; +} + +/** + * Resolve the macOS .app bundle binary path, or fall back to bare binary. + */ +function resolveMacOSBinary(dir: string): string | undefined { + // Try .app bundle first (needed for custom Touch ID icon) + const appBundlePath = path.join(dir, MACOS_APP_BUNDLE, 'Contents', 'MacOS', BINARY_NAME); + if (fs.existsSync(appBundlePath)) return appBundlePath; + + // Fall back to bare binary + const barePath = path.join(dir, BINARY_NAME); + if (fs.existsSync(barePath)) return barePath; + + return undefined; +} + +/** + * Resolve the binary path for Linux/Windows. + */ +function resolveStandardBinary(dir: string): string | undefined { + const binaryPath = path.join(dir, getPlatformBinaryName()); + if (fs.existsSync(binaryPath)) return binaryPath; + return undefined; +} + +/** + * Resolve binary from a directory, handling macOS .app bundle vs standard binary. + */ +function resolveBinaryFromDir(dir: string): string | undefined { + if (process.platform === 'darwin') return resolveMacOSBinary(dir); + return resolveStandardBinary(dir); +} + +/** + * Strategy 1: Look for the binary next to the running varlock binary. + * This is the primary path for binary/SEA distribution (install.sh, homebrew). + */ +function resolveSeaSibling(): string | undefined { + const execDir = path.dirname(process.execPath); + return resolveBinaryFromDir(execDir); +} + +/** + * Strategy 2: Look for the binary bundled in the varlock npm package. + * native-bins// + */ +function resolveNpmBundled(): string | undefined { + // __dirname points to the compiled dist/ or src/ directory within the varlock package + // native-bins/ is a sibling to dist/ and src/ + const nativeBinsDir = path.resolve(__dirname, '..', '..', '..', 'native-bins', getNativeBinSubdir()); + if (fs.existsSync(nativeBinsDir)) return resolveBinaryFromDir(nativeBinsDir); + + // Also check one level up (when running from dist/) + const altDir = path.resolve(__dirname, '..', 'native-bins', getNativeBinSubdir()); + if (fs.existsSync(altDir)) return resolveBinaryFromDir(altDir); + + return undefined; +} + +/** + * Strategy 3: Development fallback — look for build output in the monorepo. + * Walks up from __dirname looking for native binary build output + */ +function resolveDevFallback(): string | undefined { + let dir = __dirname; + for (let i = 0; i < 10; i++) { + const parent = path.dirname(dir); + if (parent === dir) break; + dir = parent; + + // Check for Swift build output (macOS) + if (process.platform === 'darwin') { + const swiftBuild = path.join(dir, 'packages', 'encryption-binary-swift', 'swift', '.build', 'release', 'VarlockEnclave'); + if (fs.existsSync(swiftBuild)) return swiftBuild; + } + + // Check for Rust build output (Linux/Windows) + const rustBuild = path.join(dir, 'packages', 'encryption-binary-rust', 'target', 'release', getPlatformBinaryName()); + if (fs.existsSync(rustBuild)) return rustBuild; + } + + return undefined; +} + +/** + * Ensure the binary at the given path is executable. + * GitHub Actions artifact upload/download strips execute permissions, + * and some extraction tools may do the same. + */ +function ensureExecutable(binaryPath: string): string { + try { + fs.accessSync(binaryPath, fs.constants.X_OK); + } catch { + // Not executable — try to fix it + if (process.platform !== 'win32') { + fs.chmodSync(binaryPath, 0o755); + } + } + return binaryPath; +} + +/** + * Resolve the native helper binary path. + * Returns undefined if no binary is found — caller should fall back to pure JS. + */ +export function resolveNativeBinary(): string | undefined { + const resolved = resolveSeaSibling() ?? resolveNpmBundled() ?? resolveDevFallback(); + return resolved ? ensureExecutable(resolved) : undefined; +} diff --git a/packages/varlock/src/lib/local-encrypt/builtin-resolver.ts b/packages/varlock/src/lib/local-encrypt/builtin-resolver.ts new file mode 100644 index 000000000..469868948 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/builtin-resolver.ts @@ -0,0 +1,147 @@ +/** + * Built-in varlock() resolver function. + * + * Replaces the plugin-based resolver from @varlock/secure-enclave-plugin. + * Works cross-platform using the local-encrypt abstraction layer. + */ + +import fs from 'node:fs'; +import { createResolver, Resolver } from '../../env-graph/lib/resolver'; +import { ResolutionError, SchemaError } from '../../env-graph/lib/errors'; +import * as localEncrypt from './index'; + +const LOCAL_PREFIX = 'local:'; +const PLUGIN_ICON = 'mdi:fingerprint'; + +function escapeRegExp(str: string) { + return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +type VarlockResolverState = { + mode: 'decrypt'; + payload: string; +} | { + mode: 'prompt'; + itemKey: string; + sourceFilePath: string | undefined; +}; + +function writeBackEncryptedValue(itemKey: string, ciphertext: string, sourceFilePath: string | undefined) { + if (!sourceFilePath) return; + const currentContents = fs.readFileSync(sourceFilePath, 'utf-8'); + const pattern = new RegExp(`^(${escapeRegExp(itemKey)}\\s*=\\s*)varlock\\(prompt(?:=\\S*)?\\)`, 'm'); + const prefixedCiphertext = `${LOCAL_PREFIX}${ciphertext}`; + const updatedContents = currentContents.replace(pattern, `$1varlock("${prefixedCiphertext}")`); + if (updatedContents !== currentContents) { + fs.writeFileSync(sourceFilePath, updatedContents); + } +} + +export const VarlockResolver: typeof Resolver = createResolver({ + name: 'varlock', + label: 'Decrypt locally encrypted value', + icon: PLUGIN_ICON, + argsSchema: { + type: 'mixed', + arrayMinLength: 0, + }, + process(): VarlockResolverState { + // Check for prompt mode: varlock(prompt=1) or varlock(prompt) + const promptArg = this.objArgs?.prompt; + const isPromptPositional = this.arrArgs?.length === 1 + && this.arrArgs[0]?.isStatic + && this.arrArgs[0].staticValue === 'prompt'; + if (promptArg || isPromptPositional) { + // Resolver doesn't expose parent item in its type, but it's available at runtime + const parent = (this as any).parent; + const itemKey = parent?.key || 'unknown'; + const dataSource = this.dataSource as any; + const sourceFilePath = dataSource?.fullPath as string | undefined; + return { mode: 'prompt', itemKey, sourceFilePath }; + } + + // Normal mode: varlock("encrypted-payload") + if (!this.arrArgs || this.arrArgs.length !== 1) { + throw new SchemaError('varlock() expects a single encrypted payload string, or prompt to enter a new value'); + } + if (!this.arrArgs[0]?.isStatic) { + throw new SchemaError('varlock() expects a single static encrypted payload string'); + } + const payload = this.arrArgs[0].staticValue; + if (typeof payload !== 'string') { + throw new SchemaError('varlock() expects a string argument'); + } + return { mode: 'decrypt', payload }; + }, + async resolve(state: VarlockResolverState) { + // Ensure a key exists (first-time setup) + await localEncrypt.ensureKey(); + + if (state.mode === 'decrypt') { + let ciphertext = state.payload; + if (ciphertext.startsWith(LOCAL_PREFIX)) { + ciphertext = ciphertext.slice(LOCAL_PREFIX.length); + } + try { + return await localEncrypt.decryptValue(ciphertext); + } catch (err) { + const backend = localEncrypt.getBackendInfo(); + throw new ResolutionError( + `Decryption failed: ${err instanceof Error ? err.message : err}`, + { + tip: [ + `Backend: ${backend.type} (${backend.hardwareBacked ? 'hardware-backed' : 'file-based'})`, + 'Make sure the encryption key has not been deleted.', + 'Run `varlock encrypt --help` for more info.', + ].join('\n'), + }, + ); + } + } + + // Prompt mode: prompt user for secret, encrypt it, write back to file + const { itemKey, sourceFilePath } = state; + const backend = localEncrypt.getBackendInfo(); + + // Use daemon's native dialog on macOS Secure Enclave + if (backend.type === 'secure-enclave' && backend.biometricAvailable) { + const { DaemonClient } = await import('./daemon-client'); + const client = new DaemonClient(); + const ciphertext = await client.promptSecret({ + itemKey, + message: `Enter the secret value for ${itemKey}:`, + }); + + if (!ciphertext) { + throw new ResolutionError('Secret input was cancelled', { + tip: 'Run varlock again and enter a value, or replace prompt=1 with an encrypted value', + }); + } + + writeBackEncryptedValue(itemKey, ciphertext, sourceFilePath); + return localEncrypt.decryptValue(ciphertext); + } + + // Terminal prompt for file-based backend + if (!process.stdout.isTTY || !process.stdin.isTTY) { + throw new ResolutionError( + `No encrypted value found for ${itemKey}`, + { + tip: `Run \`varlock encrypt --file ${sourceFilePath || ''}\` to encrypt this value interactively.`, + }, + ); + } + + const { password, isCancel } = await import('@clack/prompts'); + const rawValue = await password({ message: `Enter the secret value for ${itemKey}:` }); + if (isCancel(rawValue) || !rawValue) { + throw new ResolutionError('Secret input was cancelled', { + tip: 'Run varlock again and enter a value, or replace prompt=1 with an encrypted value', + }); + } + + const ciphertext = await localEncrypt.encryptValue(rawValue); + writeBackEncryptedValue(itemKey, ciphertext, sourceFilePath); + return rawValue; + }, +}); diff --git a/packages/varlock/src/lib/local-encrypt/crypto.test.ts b/packages/varlock/src/lib/local-encrypt/crypto.test.ts new file mode 100644 index 000000000..baeca2372 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/crypto.test.ts @@ -0,0 +1,95 @@ +import { describe, it, expect } from 'vitest'; +import { createKeyPair, encrypt, decrypt } from './crypto'; + +describe('ECIES crypto', () => { + it('round-trips encrypt → decrypt', async () => { + const keyPair = await createKeyPair(); + const plaintext = 'hello world — this is a secret!'; + + const ciphertext = await encrypt(keyPair.publicKey, plaintext); + const decrypted = await decrypt(keyPair.privateKey, keyPair.publicKey, ciphertext); + + expect(decrypted).toBe(plaintext); + }); + + it('produces different ciphertext each time (random nonce)', async () => { + const keyPair = await createKeyPair(); + const plaintext = 'same input'; + + const ct1 = await encrypt(keyPair.publicKey, plaintext); + const ct2 = await encrypt(keyPair.publicKey, plaintext); + + expect(ct1).not.toBe(ct2); + + // But both decrypt to the same value + expect(await decrypt(keyPair.privateKey, keyPair.publicKey, ct1)).toBe(plaintext); + expect(await decrypt(keyPair.privateKey, keyPair.publicKey, ct2)).toBe(plaintext); + }); + + it('fails with wrong private key', async () => { + const keyPair1 = await createKeyPair(); + const keyPair2 = await createKeyPair(); + const plaintext = 'secret'; + + const ciphertext = await encrypt(keyPair1.publicKey, plaintext); + + await expect(decrypt(keyPair2.privateKey, keyPair2.publicKey, ciphertext)).rejects.toThrow(); + }); + + it('fails with truncated payload', async () => { + const keyPair = await createKeyPair(); + const ciphertext = await encrypt(keyPair.publicKey, 'test'); + + // Truncate the base64 payload + const truncated = ciphertext.slice(0, 20); + await expect(decrypt(keyPair.privateKey, keyPair.publicKey, truncated)).rejects.toThrow('Payload too short'); + }); + + it('fails with wrong version byte', async () => { + const keyPair = await createKeyPair(); + const ciphertext = await encrypt(keyPair.publicKey, 'test'); + + // Decode, change version byte, re-encode + const buf = Buffer.from(ciphertext, 'base64'); + buf[0] = 0xFF; + const tampered = buf.toString('base64'); + + await expect(decrypt(keyPair.privateKey, keyPair.publicKey, tampered)).rejects.toThrow( + 'Unsupported payload version', + ); + }); + + it('handles empty string', async () => { + const keyPair = await createKeyPair(); + const ciphertext = await encrypt(keyPair.publicKey, ''); + const decrypted = await decrypt(keyPair.privateKey, keyPair.publicKey, ciphertext); + expect(decrypted).toBe(''); + }); + + it('handles unicode and emoji', async () => { + const keyPair = await createKeyPair(); + const plaintext = 'こんにちは 🔐 résumé café'; + const ciphertext = await encrypt(keyPair.publicKey, plaintext); + const decrypted = await decrypt(keyPair.privateKey, keyPair.publicKey, ciphertext); + expect(decrypted).toBe(plaintext); + }); + + it('handles large payloads', async () => { + const keyPair = await createKeyPair(); + const plaintext = 'x'.repeat(100_000); + const ciphertext = await encrypt(keyPair.publicKey, plaintext); + const decrypted = await decrypt(keyPair.privateKey, keyPair.publicKey, ciphertext); + expect(decrypted).toBe(plaintext); + }); + + it('payload has correct structure', async () => { + const keyPair = await createKeyPair(); + const ciphertext = await encrypt(keyPair.publicKey, 'test'); + const payload = Buffer.from(ciphertext, 'base64'); + + // version(1) + ephemeralPubKey(65) + nonce(12) + ciphertext(4 for "test") + tag(16) = 98 + expect(payload[0]).toBe(0x01); // version + expect(payload[1]).toBe(0x04); // uncompressed point prefix + expect(payload.length).toBe(1 + 65 + 12 + 4 + 16); // 98 bytes + }); +}); diff --git a/packages/varlock/src/lib/local-encrypt/crypto.ts b/packages/varlock/src/lib/local-encrypt/crypto.ts new file mode 100644 index 000000000..ee51f8c1d --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/crypto.ts @@ -0,0 +1,247 @@ +/** + * Pure JS ECIES implementation using Node.js Web Crypto API. + * + * Wire-compatible with the Swift Secure Enclave implementation: + * - P-256 ECDH key agreement + * - HKDF-SHA256 (salt: "varlock-ecies-v1", info: ephemeralPub || recipientPub) + * - AES-256-GCM with random 12-byte nonce + * - Payload: version(1) | ephemeralPubKey(65) | nonce(12) | ciphertext(N) | tag(16) + * + * Adapted from PR #19's apple-crypto.ts, modified to match the custom ECIES scheme + * used by the Swift SecureEnclaveManager rather than Apple's built-in variant. + */ + +import { webcrypto } from 'node:crypto'; + +const subtle = webcrypto.subtle; + +const PAYLOAD_VERSION = 0x01; +const HKDF_SALT = new TextEncoder().encode('varlock-ecies-v1'); +const EC_ALGORITHM = { name: 'ECDH', namedCurve: 'P-256' }; + +/** Uncompressed P-256 public key is 65 bytes (0x04 || x(32) || y(32)) */ +const PUBLIC_KEY_LENGTH = 65; +const NONCE_LENGTH = 12; +const TAG_LENGTH = 16; +const HEADER_LENGTH = 1 + PUBLIC_KEY_LENGTH + NONCE_LENGTH; // version + pubkey + nonce + +// Bun's types are stricter about BufferSource (requires ArrayBuffer, not ArrayBufferLike). +// This type assertion is safe — we always work with standard ArrayBuffers. + +const bs = (data: Uint8Array | ArrayBuffer) => data as any; + +// ── Key types ────────────────────────────────────────────────────────── + +export interface EcKeyPair { + /** Base64-encoded uncompressed P-256 public key (65 bytes raw) */ + publicKey: string; + /** Base64-encoded PKCS8 private key */ + privateKey: string; +} + +// ── Utilities ────────────────────────────────────────────────────────── + +function concatBuffers(...buffers: Array): Uint8Array { + const totalLength = buffers.reduce((sum, b) => sum + b.length, 0); + const result = new Uint8Array(totalLength); + let offset = 0; + for (const buf of buffers) { + result.set(buf, offset); + offset += buf.length; + } + return result; +} + +function bufferToBase64(buffer: ArrayBuffer | Uint8Array): string { + if (buffer instanceof Uint8Array) { + return Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength).toString('base64'); + } + return Buffer.from(buffer).toString('base64'); +} + +function base64ToUint8(base64: string): Uint8Array { + const buf = Buffer.from(base64, 'base64'); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength); +} + +// ── HKDF-SHA256 ──────────────────────────────────────────────────────── + +/** + * HKDF-SHA256 (RFC 5869) — matches the Swift SecureEnclaveManager.deriveKey implementation. + * + * We implement this manually rather than using Web Crypto's built-in HKDF because + * the Web Crypto HKDF requires importing the input key material as a CryptoKey, + * which adds complexity. This manual implementation is a direct port of the Swift code. + */ +async function hkdfSha256( + ikm: Uint8Array, + salt: Uint8Array, + info: Uint8Array, + outputByteCount: number, +): Promise { + // HKDF-Extract: PRK = HMAC-SHA256(salt, IKM) + const saltKey = await subtle.importKey('raw', bs(salt), { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']); + const prk = new Uint8Array(await subtle.sign('HMAC', saltKey, bs(ikm))); + + // HKDF-Expand: OKM = T(1) || T(2) || ... + const prkKey = await subtle.importKey('raw', bs(prk), { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']); + const okm = new Uint8Array(outputByteCount); + let t = new Uint8Array(0); + let offset = 0; + let counter = 1; + + while (offset < outputByteCount) { + const input = concatBuffers(t, info, new Uint8Array([counter])); + t = new Uint8Array(await subtle.sign('HMAC', prkKey, bs(input))); + okm.set(t.slice(0, Math.min(t.length, outputByteCount - offset)), offset); + offset += t.length; + counter++; + } + + return okm; +} + +// ── Key management ───────────────────────────────────────────────────── + +/** Import a public key from its base64-encoded uncompressed representation. */ +async function importPublicKey(base64: string): Promise { + return subtle.importKey('raw', bs(base64ToUint8(base64)), EC_ALGORITHM, true, []); +} + +/** Import a private key from its base64-encoded PKCS8 representation. */ +async function importPrivateKey(base64: string): Promise { + return subtle.importKey('pkcs8', bs(base64ToUint8(base64)), EC_ALGORITHM, true, ['deriveBits']); +} + +/** Generate a new P-256 ECDH key pair. */ +export async function createKeyPair(): Promise { + const keyPair = await subtle.generateKey(EC_ALGORITHM, true, ['deriveBits']); + + const publicKeyRaw = await subtle.exportKey('raw', keyPair.publicKey); + const privateKeyPkcs8 = await subtle.exportKey('pkcs8', keyPair.privateKey); + + return { + publicKey: bufferToBase64(publicKeyRaw), + privateKey: bufferToBase64(privateKeyPkcs8), + }; +} + +// ── ECIES encrypt ────────────────────────────────────────────────────── + +/** + * Encrypt plaintext using ECIES with the recipient's public key. + * + * @param publicKeyBase64 - Base64-encoded uncompressed P-256 public key (65 bytes raw) + * @param plaintext - UTF-8 string to encrypt + * @returns Base64-encoded ciphertext payload + */ +export async function encrypt(publicKeyBase64: string, plaintext: string): Promise { + const recipientPublicKey = await importPublicKey(publicKeyBase64); + const recipientPubKeyRaw = base64ToUint8(publicKeyBase64); + + // Generate ephemeral key pair + const ephemeralKeyPair = await subtle.generateKey(EC_ALGORITHM, true, ['deriveBits']); + const ephemeralPubKeyRaw = new Uint8Array(await subtle.exportKey('raw', ephemeralKeyPair.publicKey)); + + // ECDH: ephemeral private × recipient public → shared secret (32 bytes for P-256) + const sharedSecretBits = await subtle.deriveBits( + { name: 'ECDH', public: recipientPublicKey }, + ephemeralKeyPair.privateKey, + 256, + ); + const sharedSecret = new Uint8Array(sharedSecretBits); + + // HKDF-SHA256 → AES-256 key + const info = concatBuffers(ephemeralPubKeyRaw, recipientPubKeyRaw); + const aesKey = await hkdfSha256(sharedSecret, HKDF_SALT, info, 32); + + // AES-256-GCM encrypt + const nonce = webcrypto.getRandomValues(new Uint8Array(NONCE_LENGTH)); + const plaintextBytes = new TextEncoder().encode(plaintext); + + const cryptoKey = await subtle.importKey('raw', bs(aesKey), 'AES-GCM', false, ['encrypt']); + const encrypted = new Uint8Array( + await subtle.encrypt({ name: 'AES-GCM', iv: bs(nonce), tagLength: TAG_LENGTH * 8 }, cryptoKey, bs(plaintextBytes)), + ); + + // Web Crypto appends the tag to ciphertext — split them to match Swift format + const ciphertext = encrypted.slice(0, encrypted.length - TAG_LENGTH); + const tag = encrypted.slice(encrypted.length - TAG_LENGTH); + + // Assemble payload: version(1) | ephemeralPub(65) | nonce(12) | ciphertext(N) | tag(16) + const payload = concatBuffers( + new Uint8Array([PAYLOAD_VERSION]), + ephemeralPubKeyRaw, + nonce, + ciphertext, + tag, + ); + + return bufferToBase64(payload); +} + +// ── ECIES decrypt ────────────────────────────────────────────────────── + +/** + * Decrypt ciphertext using ECIES with the recipient's private key. + * + * @param privateKeyBase64 - Base64-encoded PKCS8 private key + * @param publicKeyBase64 - Base64-encoded uncompressed P-256 public key of the recipient + * @param ciphertextBase64 - Base64-encoded ciphertext payload + * @returns Decrypted UTF-8 string + */ +export async function decrypt( + privateKeyBase64: string, + publicKeyBase64: string, + ciphertextBase64: string, +): Promise { + const payloadBytes = base64ToUint8(ciphertextBase64); + + if (payloadBytes.byteLength < HEADER_LENGTH + TAG_LENGTH) { + throw new Error('Payload too short'); + } + + // Parse payload + const version = payloadBytes[0]; + if (version !== PAYLOAD_VERSION) { + throw new Error(`Unsupported payload version: ${version}`); + } + + const ephemeralPubKeyRaw = payloadBytes.slice(1, 1 + PUBLIC_KEY_LENGTH); + const nonce = payloadBytes.slice(1 + PUBLIC_KEY_LENGTH, HEADER_LENGTH); + const ciphertextAndTag = payloadBytes.slice(HEADER_LENGTH); + + if (ciphertextAndTag.length < TAG_LENGTH) { + throw new Error('Payload too short for tag'); + } + + // Import keys + const privateKey = await importPrivateKey(privateKeyBase64); + const ephemeralPublicKey = await subtle.importKey('raw', bs(ephemeralPubKeyRaw), EC_ALGORITHM, true, []); + + // Recipient public key bytes for HKDF info + const recipientPubKeyRaw = base64ToUint8(publicKeyBase64); + + // ECDH: recipient private × ephemeral public → shared secret + const sharedSecretBits = await subtle.deriveBits( + { name: 'ECDH', public: ephemeralPublicKey }, + privateKey, + 256, + ); + const sharedSecret = new Uint8Array(sharedSecretBits); + + // HKDF-SHA256 → AES-256 key (must match encrypt side) + const info = concatBuffers(ephemeralPubKeyRaw, recipientPubKeyRaw); + const aesKey = await hkdfSha256(sharedSecret, HKDF_SALT, info, 32); + + // AES-256-GCM decrypt + // Web Crypto expects ciphertext + tag concatenated + const cryptoKey = await subtle.importKey('raw', bs(aesKey), 'AES-GCM', false, ['decrypt']); + const decrypted = await subtle.decrypt( + { name: 'AES-GCM', iv: bs(nonce), tagLength: TAG_LENGTH * 8 }, + cryptoKey, + bs(ciphertextAndTag), // already ciphertext || tag + ); + + return new TextDecoder().decode(decrypted); +} diff --git a/packages/varlock/src/lib/local-encrypt/daemon-client.ts b/packages/varlock/src/lib/local-encrypt/daemon-client.ts new file mode 100644 index 000000000..78de93357 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/daemon-client.ts @@ -0,0 +1,337 @@ +/** + * Daemon client for communicating with the native encryption helper binary. + * + * Handles daemon lifecycle (spawn, connect, reconnect) and IPC messaging + * using the 4-byte LE length-prefixed JSON protocol. + * + * - macOS/Linux: Unix domain socket + * - Windows: named pipe (TODO) + * + * Generalized from the secure-enclave plugin's EnclaveDaemonClient. + */ + +import net from 'node:net'; +import path from 'node:path'; +import fs from 'node:fs'; +import crypto from 'node:crypto'; +import { spawn } from 'node:child_process'; + +import { getUserVarlockDir } from '../user-config-dir'; +import { resolveNativeBinary } from './binary-resolver'; + +function getSocketDir(): string { + return path.join(getUserVarlockDir(), 'local-encrypt'); +} + +function getSocketPath(): string { + if (process.platform === 'win32') { + // Windows named pipe — fixed name shared by all varlock processes + return '\\\\.\\pipe\\varlock-local-encrypt'; + } + return path.join(getSocketDir(), 'daemon.sock'); +} + +function getPidPath(): string { + return path.join(getSocketDir(), 'daemon.pid'); +} + +export class DaemonClient { + private socket: net.Socket | null = null; + private messageQueue = new Map void; + reject: (error: Error) => void; + }>(); + private isConnected = false; + private buffer = Buffer.alloc(0); + private connectingPromise: Promise | null = null; + + async ensureConnected(): Promise { + if (this.isConnected && this.socket) return; + + // Deduplicate concurrent ensureConnected calls — multiple varlock() items + // may resolve concurrently and all call decrypt → ensureConnected + if (this.connectingPromise) return this.connectingPromise; + + this.connectingPromise = this.doConnect(); + try { + await this.connectingPromise; + } finally { + this.connectingPromise = null; + } + } + + /** + * Try to connect to an existing daemon without spawning a new one. + * Returns true if connected, false if no daemon is running. + */ + async tryConnect(): Promise { + if (this.isConnected && this.socket) return true; + const socketPath = getSocketPath(); + try { + await this.connectToSocket(socketPath); + return true; + } catch { + return false; + } + } + + private async doConnect(): Promise { + const socketPath = getSocketPath(); + try { + await this.connectToSocket(socketPath); + return; + } catch { + // Daemon not running, spawn it + } + + await this.spawnDaemon(); + await this.connectToSocket(socketPath); + } + + async decrypt(ciphertext: string, keyId = 'varlock-default'): Promise { + await this.ensureConnected(); + const result = await this.sendMessage({ + action: 'decrypt', + payload: { ciphertext, keyId }, + }); + if (typeof result === 'string') return result; + if (result && typeof result === 'object' && 'error' in result) { + throw new Error(String(result.error)); + } + return String(result); + } + + async promptSecret(opts?: { + itemKey?: string; + message?: string; + keyId?: string; + }): Promise { + await this.ensureConnected(); + try { + const result = await this.sendMessage({ + action: 'prompt-secret', + payload: { + itemKey: opts?.itemKey, + message: opts?.message, + keyId: opts?.keyId, + }, + }); + if (result && typeof result === 'object' && 'ciphertext' in result) { + return result.ciphertext as string; + } + return undefined; + } catch (err) { + if (err instanceof Error && err.message === 'cancelled') return undefined; + throw err; + } + } + + async invalidateSession(): Promise { + await this.ensureConnected(); + await this.sendMessage({ action: 'invalidate-session' }); + } + + cleanup(): void { + for (const { reject } of this.messageQueue.values()) { + reject(new Error('Connection closed')); + } + this.messageQueue.clear(); + this.socket?.end(); + this.socket = null; + this.isConnected = false; + this.buffer = Buffer.alloc(0); + } + + // -- Private -- + + private connectToSocket(socketPath: string): Promise { + return new Promise((resolve, reject) => { + const socket = new net.Socket(); + const timeout = setTimeout(() => { + socket.destroy(); + reject(new Error('Connection timeout')); + }, 5000); + + socket.on('connect', () => { + clearTimeout(timeout); + this.socket = socket; + this.isConnected = true; + this.buffer = Buffer.alloc(0); + resolve(); + }); + + socket.on('data', (data: Buffer) => { + this.handleData(data); + }); + + socket.on('error', (err) => { + clearTimeout(timeout); + this.isConnected = false; + reject(err); + }); + + socket.on('close', () => { + this.isConnected = false; + this.socket = null; + }); + + socket.connect(socketPath); + }); + } + + private handleData(data: Buffer): void { + this.buffer = Buffer.concat([this.buffer, data]); + + while (this.buffer.length >= 4) { + const messageLength = this.buffer.readUInt32LE(0); + if (this.buffer.length < 4 + messageLength) break; + + const messageData = this.buffer.subarray(4, 4 + messageLength); + this.buffer = this.buffer.subarray(4 + messageLength); + + try { + const message = JSON.parse(messageData.toString()); + if (message.id && this.messageQueue.has(message.id)) { + const { resolve: res, reject: rej } = this.messageQueue.get(message.id)!; + this.messageQueue.delete(message.id); + if (message.error) { + rej(new Error(message.error)); + } else { + res(message.result); + } + } + } catch { + // Ignore malformed messages + } + } + } + + private sendMessage(message: Record): Promise { + return new Promise((resolve, reject) => { + if (!this.isConnected || !this.socket) { + reject(new Error('Not connected to daemon')); + return; + } + + const messageId = `${Date.now().toString(36)}-${crypto.randomBytes(4).toString('hex')}`; + const messageWithId = { ...message, id: messageId }; + const jsonData = JSON.stringify(messageWithId); + const messageBytes = Buffer.from(jsonData, 'utf-8'); + + const lengthBuf = Buffer.alloc(4); + lengthBuf.writeUInt32LE(messageBytes.length, 0); + + this.messageQueue.set(messageId, { resolve, reject }); + this.socket.write(Buffer.concat([lengthBuf, messageBytes])); + }); + } + + private async spawnDaemon(): Promise { + const binaryPath = resolveNativeBinary(); + if (!binaryPath) { + throw new Error('Native encryption binary not found — cannot start daemon'); + } + + const socketPath = getSocketPath(); + const pidPath = getPidPath(); + const isWindows = process.platform === 'win32'; + + // Ensure PID directory exists (don't mkdir for Windows pipe paths) + if (!isWindows) { + fs.mkdirSync(path.dirname(socketPath), { recursive: true }); + } + fs.mkdirSync(path.dirname(pidPath), { recursive: true }); + + // Check for existing daemon via PID + if (fs.existsSync(pidPath)) { + try { + const pid = parseInt(fs.readFileSync(pidPath, 'utf-8').trim(), 10); + process.kill(pid, 0); // Throws if process doesn't exist + // Process is alive — wait briefly and let ensureConnected retry + await new Promise((r) => { + setTimeout(r, 500); + }); + return; + } catch { + // Stale PID file — clean up both PID and socket + } + } + + // Clean up stale files before spawning + // On Windows, named pipes don't leave files — only clean PID and Unix sockets + if (!isWindows) { + for (const file of [socketPath, pidPath]) { + if (fs.existsSync(file)) { + fs.unlinkSync(file); + } + } + // Verify socket file is actually gone + if (fs.existsSync(socketPath)) { + throw new Error(`Failed to clean up stale socket file: ${socketPath}`); + } + } else { + // Clean PID file only on Windows + if (fs.existsSync(pidPath)) { + fs.unlinkSync(pidPath); + } + } + + return new Promise((resolve, reject) => { + const child = spawn(binaryPath, [ + 'daemon', + '--socket-path', + socketPath, + '--pid-path', + pidPath, + ], { + detached: true, + stdio: ['ignore', 'pipe', 'pipe'], + }); + + const timeout = setTimeout(() => { + reject(new Error('Daemon failed to start within timeout')); + }, 10000); + + let stdoutData = ''; + let stderrData = ''; + + child.stdout!.on('data', (data: Buffer) => { + stdoutData += data.toString(); + try { + const parsed = JSON.parse(stdoutData); + if (parsed.ready) { + clearTimeout(timeout); + child.unref(); + child.stdout!.destroy(); + child.stderr!.destroy(); + resolve(); + } + } catch { + // Incomplete JSON, keep buffering + } + }); + + child.stderr!.on('data', (data: Buffer) => { + stderrData += data.toString(); + }); + + child.on('error', (err) => { + clearTimeout(timeout); + reject(new Error(`Failed to spawn daemon: ${err.message}`)); + }); + + child.on('exit', (code) => { + clearTimeout(timeout); + if (code !== 0) { + const details = [ + stderrData.trim() && `stderr: ${stderrData.trim()}`, + stdoutData.trim() && `stdout: ${stdoutData.trim()}`, + `binary: ${binaryPath}`, + `socket: ${socketPath}`, + ].filter(Boolean).join('\n'); + reject(new Error(`Daemon exited with code ${code}\n${details}`)); + } + }); + }); + } +} diff --git a/packages/varlock/src/lib/local-encrypt/file-backend.test.ts b/packages/varlock/src/lib/local-encrypt/file-backend.test.ts new file mode 100644 index 000000000..afdcbf06b --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/file-backend.test.ts @@ -0,0 +1,81 @@ +import { + describe, it, expect, beforeEach, afterEach, vi, +} from 'vitest'; +import fs from 'node:fs'; +import path from 'node:path'; +import os from 'node:os'; +import { + keyExists, generateKey, deleteKey, listKeys, encryptValue, decryptValue, +} from './file-backend'; + +// Use a temp directory for all key operations during tests +const testDir = path.join(os.tmpdir(), `varlock-test-${process.pid}`); + +vi.mock('../user-config-dir', () => ({ + getUserVarlockDir: () => testDir, +})); + +beforeEach(() => { + fs.mkdirSync(testDir, { recursive: true }); +}); + +afterEach(() => { + fs.rmSync(testDir, { recursive: true, force: true }); +}); + +describe('file-backend', () => { + it('generates and checks key existence', async () => { + expect(keyExists('test-key')).toBe(false); + const result = await generateKey('test-key'); + expect(result.keyId).toBe('test-key'); + expect(result.publicKey).toBeTruthy(); + expect(keyExists('test-key')).toBe(true); + }); + + it('uses default key id', async () => { + await generateKey(); + expect(keyExists()).toBe(true); + expect(keyExists('varlock-default')).toBe(true); + }); + + it('lists keys', async () => { + expect(listKeys()).toEqual([]); + await generateKey('key-a'); + await generateKey('key-b'); + const keys = listKeys(); + expect(keys).toContain('key-a'); + expect(keys).toContain('key-b'); + expect(keys).toHaveLength(2); + }); + + it('deletes keys', async () => { + await generateKey('to-delete'); + expect(keyExists('to-delete')).toBe(true); + expect(deleteKey('to-delete')).toBe(true); + expect(keyExists('to-delete')).toBe(false); + expect(deleteKey('nonexistent')).toBe(false); + }); + + it('round-trips encrypt → decrypt', async () => { + await generateKey('round-trip'); + const plaintext = 'super secret value!'; + const ciphertext = await encryptValue(plaintext, 'round-trip'); + const decrypted = await decryptValue(ciphertext, 'round-trip'); + expect(decrypted).toBe(plaintext); + }); + + it('fails to encrypt with nonexistent key', async () => { + await expect(encryptValue('test', 'nonexistent')).rejects.toThrow('Key not found'); + }); + + it('fails to decrypt with nonexistent key', async () => { + await expect(decryptValue('dGVzdA==', 'nonexistent')).rejects.toThrow('Key not found'); + }); + + it('fails to decrypt with wrong key', async () => { + await generateKey('key-1'); + await generateKey('key-2'); + const ciphertext = await encryptValue('secret', 'key-1'); + await expect(decryptValue(ciphertext, 'key-2')).rejects.toThrow(); + }); +}); diff --git a/packages/varlock/src/lib/local-encrypt/file-backend.ts b/packages/varlock/src/lib/local-encrypt/file-backend.ts new file mode 100644 index 000000000..e8107c3b4 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/file-backend.ts @@ -0,0 +1,103 @@ +/** + * File-based local encryption backend. + * + * Stores P-256 ECDH key pairs as JSON files on disk with restricted permissions. + * Uses the pure JS ECIES implementation for all crypto operations. + * Works on all platforms — no native binary required. + */ + +import fs from 'node:fs'; +import path from 'node:path'; +import { getUserVarlockDir } from '../user-config-dir'; +import { createKeyPair, encrypt, decrypt } from './crypto'; + +const KEY_STORE_SUBDIR = 'local-encrypt/keys'; +const DEFAULT_KEY_ID = 'varlock-default'; + +interface StoredKeyPair { + keyId: string; + publicKey: string; + privateKey: string; + createdAt: string; +} + +function getKeyStorePath(): string { + return path.join(getUserVarlockDir(), KEY_STORE_SUBDIR); +} + +function getKeyFilePath(keyId: string): string { + return path.join(getKeyStorePath(), `${keyId}.json`); +} + +// ── Key management ───────────────────────────────────────────────────── + +export function keyExists(keyId: string = DEFAULT_KEY_ID): boolean { + return fs.existsSync(getKeyFilePath(keyId)); +} + +export async function generateKey(keyId: string = DEFAULT_KEY_ID): Promise<{ keyId: string; publicKey: string }> { + const keyPair = await createKeyPair(); + + const stored: StoredKeyPair = { + keyId, + publicKey: keyPair.publicKey, + privateKey: keyPair.privateKey, + createdAt: new Date().toISOString(), + }; + + const keyStorePath = getKeyStorePath(); + fs.mkdirSync(keyStorePath, { recursive: true }); + + const filePath = getKeyFilePath(keyId); + fs.writeFileSync(filePath, JSON.stringify(stored, null, 2), { mode: 0o600 }); + + return { keyId, publicKey: keyPair.publicKey }; +} + +export function deleteKey(keyId: string = DEFAULT_KEY_ID): boolean { + const filePath = getKeyFilePath(keyId); + try { + fs.unlinkSync(filePath); + return true; + } catch { + return false; + } +} + +export function listKeys(): Array { + const keyStorePath = getKeyStorePath(); + try { + return fs.readdirSync(keyStorePath) + .filter((f) => f.endsWith('.json')) + .map((f) => f.slice(0, -5)); + } catch { + return []; + } +} + +// ── Internal key loading ─────────────────────────────────────────────── + +function loadKeyPair(keyId: string): StoredKeyPair { + const filePath = getKeyFilePath(keyId); + if (!fs.existsSync(filePath)) { + throw new Error(`Key not found: ${keyId}`); + } + const data = fs.readFileSync(filePath, 'utf-8'); + return JSON.parse(data) as StoredKeyPair; +} + +function getPublicKey(keyId: string): string { + return loadKeyPair(keyId).publicKey; +} + +// ── Encrypt / Decrypt ────────────────────────────────────────────────── + +export async function encryptValue(plaintext: string, keyId: string = DEFAULT_KEY_ID): Promise { + const publicKey = getPublicKey(keyId); + return encrypt(publicKey, plaintext); +} + +export async function decryptValue(ciphertext: string, keyId: string = DEFAULT_KEY_ID): Promise { + const stored = loadKeyPair(keyId); + return decrypt(stored.privateKey, stored.publicKey, ciphertext); +} diff --git a/packages/varlock/src/lib/local-encrypt/index.ts b/packages/varlock/src/lib/local-encrypt/index.ts new file mode 100644 index 000000000..ecdda2218 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/index.ts @@ -0,0 +1,195 @@ +/** + * Cross-platform local encryption for varlock. + * + * Provides a unified API for encrypting/decrypting secrets using the best + * available backend on the current platform: + * + * 1. macOS Secure Enclave (Swift binary) — hardware-backed, Touch ID + * 2. Windows TPM/Hello (Rust binary) — hardware-backed, Windows Hello (TODO) + * 3. Linux TPM2 (Rust binary) — hardware-backed (TODO) + * 4. File-based (pure JS) — universal fallback, no native binary needed + */ + +import { execFileSync } from 'node:child_process'; +import { resolveNativeBinary } from './binary-resolver'; +import { DaemonClient } from './daemon-client'; +import * as fileBackend from './file-backend'; +import type { BackendInfo, BackendType, NativeStatusResult } from './types'; + +export type { BackendInfo, BackendType } from './types'; + +const DEFAULT_KEY_ID = 'varlock-default'; + +// ── Native binary one-shot commands ──────────────────────────────────── + +function runNativeBinary(args: Array): string { + const binaryPath = resolveNativeBinary(); + if (!binaryPath) { + throw new Error('Native binary not found'); + } + return execFileSync(binaryPath, args, { + encoding: 'utf-8', + timeout: 30_000, + }).trim(); +} + +function runNativeBinaryJson>(args: Array): T { + const output = runNativeBinary(args); + const parsed = JSON.parse(output); + if (parsed.error) { + throw new Error(parsed.error); + } + return parsed as T; +} + +// ── Backend detection ────────────────────────────────────────────────── + +let cachedBackendInfo: BackendInfo | undefined; + +function detectBackendType(): BackendType { + const binaryPath = resolveNativeBinary(); + if (!binaryPath) return 'file'; + + switch (process.platform) { + case 'darwin': return 'secure-enclave'; + case 'win32': return 'windows-tpm'; + case 'linux': return 'linux-tpm'; + default: return 'file'; + } +} + +/** Get information about the active encryption backend. */ +export function getBackendInfo(): BackendInfo { + if (cachedBackendInfo) return cachedBackendInfo; + + const type = detectBackendType(); + const binaryPath = type !== 'file' ? resolveNativeBinary() : undefined; + + if (type !== 'file' && binaryPath) { + // Query the native binary for its actual capabilities + try { + const status = runNativeBinaryJson(['status']); + cachedBackendInfo = { + type, + platform: process.platform, + hardwareBacked: status.hardwareBacked, + biometricAvailable: status.biometricAvailable, + binaryPath, + }; + } catch { + // Binary failed — fall back to reasonable defaults + cachedBackendInfo = { + type, + platform: process.platform, + hardwareBacked: type === 'secure-enclave', + biometricAvailable: type === 'secure-enclave', + binaryPath, + }; + } + } else { + cachedBackendInfo = { + type, + platform: process.platform, + hardwareBacked: false, + biometricAvailable: false, + binaryPath: undefined, + }; + } + + return cachedBackendInfo; +} + +// ── Daemon client (singleton for biometric-enabled backends) ─────────── + +let daemonClient: DaemonClient | undefined; + +function getDaemonClient(): DaemonClient { + daemonClient ||= new DaemonClient(); + return daemonClient; +} + +// ── Key management ───────────────────────────────────────────────────── + +/** Check if a key exists. */ +export function keyExists(keyId: string = DEFAULT_KEY_ID): boolean { + const backend = getBackendInfo(); + if (backend.type === 'file') { + return fileBackend.keyExists(keyId); + } + const result = runNativeBinaryJson<{ exists: boolean }>(['key-exists', '--key-id', keyId]); + return result.exists; +} + +/** Generate a new encryption key. */ +export async function generateKey(keyId: string = DEFAULT_KEY_ID): Promise<{ keyId: string; publicKey: string }> { + const backend = getBackendInfo(); + if (backend.type === 'file') { + return fileBackend.generateKey(keyId); + } + return runNativeBinaryJson<{ keyId: string; publicKey: string }>(['generate-key', '--key-id', keyId]); +} + +/** Ensure a key exists, generating one if necessary. */ +export async function ensureKey(keyId: string = DEFAULT_KEY_ID): Promise { + if (!keyExists(keyId)) { + await generateKey(keyId); + } +} + +// ── Encrypt / Decrypt ────────────────────────────────────────────────── + +/** + * Encrypt a plaintext value. + * + * For hardware-backed backends, encryption uses the public key only (no biometric needed). + * For file-based backend, uses the pure JS ECIES implementation. + */ +export async function encryptValue(plaintext: string, keyId: string = DEFAULT_KEY_ID): Promise { + const backend = getBackendInfo(); + if (backend.type === 'file') { + return fileBackend.encryptValue(plaintext, keyId); + } + // Native binary encrypt (one-shot, no biometric needed for encrypt) + const b64Input = Buffer.from(plaintext, 'utf-8').toString('base64'); + const result = runNativeBinaryJson<{ ciphertext: string }>(['encrypt', '--key-id', keyId, '--data', b64Input]); + return result.ciphertext; +} + +/** + * Decrypt a ciphertext value. + * + * For biometric-enabled backends (macOS Secure Enclave, Windows Hello), + * uses the daemon client for session caching (avoids repeated biometric prompts). + * For file-based backend, uses the pure JS ECIES implementation. + */ +export async function decryptValue(ciphertext: string, keyId: string = DEFAULT_KEY_ID): Promise { + const backend = getBackendInfo(); + if (backend.type === 'file') { + return fileBackend.decryptValue(ciphertext, keyId); + } + + // Use daemon client for biometric backends (session caching) + if (backend.biometricAvailable) { + const client = getDaemonClient(); + return client.decrypt(ciphertext, keyId); + } + + // Non-biometric native backend (e.g., Linux TPM without polkit) — one-shot + const result = runNativeBinaryJson<{ plaintext: string }>(['decrypt', '--key-id', keyId, '--data', ciphertext]); + return result.plaintext; +} + +/** + * Invalidate the biometric session, requiring re-authentication for next decrypt. + * Connects to the running daemon without spawning one (varlock lock runs in a separate process). + */ +export async function lockSession(): Promise { + const backend = getBackendInfo(); + if (!backend.biometricAvailable) return; + const client = getDaemonClient(); + const connected = await client.tryConnect(); + if (!connected) { + throw new Error('No encryption daemon is running'); + } + await client.invalidateSession(); +} diff --git a/packages/varlock/src/lib/local-encrypt/types.ts b/packages/varlock/src/lib/local-encrypt/types.ts new file mode 100644 index 000000000..44def1ff2 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/types.ts @@ -0,0 +1,42 @@ +/** + * Shared types for the local encryption system. + */ + +/** Which encryption backend is active */ +export type BackendType = ( + | 'secure-enclave' // macOS Secure Enclave (Swift binary) + | 'windows-tpm' // Windows native (Rust binary) — DPAPI now, TPM/Hello planned + | 'linux-tpm' // Linux native (Rust binary) — kernel keyring now, TPM planned + | 'file' // Pure JS file-based (universal fallback) +); + +/** Information about the active encryption backend */ +export interface BackendInfo { + type: BackendType; + platform: NodeJS.Platform; + hardwareBacked: boolean; + biometricAvailable: boolean; + binaryPath?: string; +} + +/** IPC daemon message format (length-prefixed JSON over Unix socket or named pipe) */ +export interface DaemonMessage { + id: string; + action: 'decrypt' | 'encrypt' | 'prompt-secret' | 'ping' | 'invalidate-session'; + payload?: Record; +} + +/** IPC daemon response format */ +export interface DaemonResponse { + id: string; + result?: unknown; + error?: string; +} + +/** Result from the status command of a native binary */ +export interface NativeStatusResult { + backend: string; + hardwareBacked: boolean; + biometricAvailable: boolean; + keys: Array; +} diff --git a/packages/varlock/src/plugin-lib.ts b/packages/varlock/src/plugin-lib.ts index 77e6e754f..f196c52d1 100644 --- a/packages/varlock/src/plugin-lib.ts +++ b/packages/varlock/src/plugin-lib.ts @@ -2,6 +2,9 @@ import type { VarlockPlugin } from './env-graph/lib/plugins'; import { pluginProxy } from './plugin-context'; export type { Resolver } from './env-graph/lib/resolver'; +export type { PluginCacheAccessor } from './lib/cache/plugin-cache-accessor'; +export { parseTtl } from './lib/cache/ttl-parser'; +export { resolveCacheTtl } from './lib/cache/resolve-cache-ttl'; export { createDebug, type Debugger } from './lib/debug'; // Error classes exported directly so plugin authors can import them without diff --git a/scripts/check-release-packages.ts b/scripts/check-release-packages.ts new file mode 100644 index 000000000..30a4d420b --- /dev/null +++ b/scripts/check-release-packages.ts @@ -0,0 +1,65 @@ +/** + * Determines which packages would be published in a preview release. + * Outputs a JSON array of package paths and a flag for whether varlock is included. + * + * Usage: + * bun run scripts/check-release-packages.ts + * + * Outputs (via GITHUB_OUTPUT if available): + * packages=["path1","path2"] + * includes-varlock=true|false + */ +import { execSync } from 'node:child_process'; +import fs from 'node:fs'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { listWorkspaces } from './list-workspaces'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +const MONOREPO_ROOT = path.resolve(__dirname, '..'); + +const workspacePackagesInfo = await listWorkspaces(MONOREPO_ROOT); + +const currentBranch = process.env.GITHUB_HEAD_REF || execSync('git branch --show-current').toString().trim(); +let releasePackagePaths: Array; + +if (currentBranch === 'changeset-release/main') { + const gitDiff = execSync('git diff origin/main --name-only').toString(); + const modifiedPackageJsons = gitDiff + .split('\n') + .filter((filePath) => filePath !== 'package.json') + .filter((filePath) => filePath.endsWith('package.json')); + + releasePackagePaths = modifiedPackageJsons + .map((filePath) => `${MONOREPO_ROOT}/${filePath.replace('/package.json', '')}`) + .filter((filePath) => workspacePackagesInfo.some((p) => p.path === filePath)); +} else { + execSync('bunx changeset status --output=changesets-summary.json', { cwd: MONOREPO_ROOT }); + + const changeSetsSummaryRaw = fs.readFileSync(path.join(MONOREPO_ROOT, 'changesets-summary.json'), 'utf8'); + const changeSetsSummary = JSON.parse(changeSetsSummaryRaw); + + releasePackagePaths = changeSetsSummary.releases + .filter((r: any) => r.newVersion !== r.oldVersion) + .map((r: any) => workspacePackagesInfo.find((p) => p.name === r.name)) + .filter(Boolean) + .map((p: any) => p.path); + + fs.unlinkSync(path.join(MONOREPO_ROOT, 'changesets-summary.json')); +} + +// filter out vscode extension which is not released via npm +releasePackagePaths = releasePackagePaths.filter((p: string) => !p.endsWith('packages/vscode-plugin')); + +const includesVarlock = releasePackagePaths.some((p) => p.endsWith('packages/varlock')); + +console.log('Packages to release:', releasePackagePaths); +console.log('Includes varlock:', includesVarlock); + +// Write to GITHUB_OUTPUT if running in CI +const githubOutput = process.env.GITHUB_OUTPUT; +if (githubOutput) { + fs.appendFileSync(githubOutput, `packages=${JSON.stringify(releasePackagePaths)}\n`); + fs.appendFileSync(githubOutput, `includes-varlock=${includesVarlock}\n`); +} diff --git a/scripts/release-preview.ts b/scripts/release-preview.ts index 2f6e45e6a..85893882d 100644 --- a/scripts/release-preview.ts +++ b/scripts/release-preview.ts @@ -1,63 +1,26 @@ import { execSync, execFileSync } from 'node:child_process'; -import fs from 'node:fs'; import path from 'node:path'; import { fileURLToPath } from 'node:url'; -import { listWorkspaces } from './list-workspaces'; const __filename = fileURLToPath(import.meta.url); const MONOREPO_ROOT = path.resolve(path.dirname(__filename), '..'); -let err: unknown; -try { - const workspacePackagesInfo = await listWorkspaces(MONOREPO_ROOT); - - // Check if we're on changeset-release/main branch - const currentBranch = process.env.GITHUB_HEAD_REF || execSync('git branch --show-current').toString().trim(); - let releasePackagePaths: Array; - - console.log('current branch = ', currentBranch); - - if (currentBranch === 'changeset-release/main') { - // On changeset-release/main branch, find modified package.json files - console.log('Running on changeset-release/main branch, finding modified package.json files...'); - const gitDiff = execSync('git diff origin/main --name-only').toString(); - const modifiedPackageJsons = gitDiff - .split('\n') - .filter((filePath) => filePath !== 'package.json') // skip root package.json - .filter((filePath) => filePath.endsWith('package.json')); - - if (!modifiedPackageJsons.length) { - console.log('No modified package.json files found!'); - process.exit(0); - } - - // Get the workspace paths for modified packages - releasePackagePaths = modifiedPackageJsons - .map((filePath) => `${MONOREPO_ROOT}/${filePath.replace('/package.json', '')}`) - .filter((filePath) => workspacePackagesInfo.some((p) => p.path === filePath)); - } else { - console.log('Running on normal PR, using changesets to determine packages to release...'); - // Regular changeset-based logic - // generate summary of changed (publishable) modules according to changesets - execSync('bunx changeset status --output=changesets-summary.json'); - - const changeSetsSummaryRaw = fs.readFileSync('./changesets-summary.json', 'utf8'); - const changeSetsSummary = JSON.parse(changeSetsSummaryRaw); - - releasePackagePaths = changeSetsSummary.releases - .filter((r: any) => r.newVersion !== r.oldVersion) - .map((r: any) => workspacePackagesInfo.find((p) => p.name === r.name)) - .map((p: any) => p.path); - } +// Accept package paths from RELEASE_PACKAGES env var (set by check-release-packages step) +const releasePackagesEnv = process.env.RELEASE_PACKAGES; +if (!releasePackagesEnv) { + console.error('RELEASE_PACKAGES env var not set — run check-release-packages.ts first'); + process.exit(1); +} - // filter out vscode extension which is not released via npm - releasePackagePaths = releasePackagePaths.filter((p: string) => !p.endsWith('packages/vscode-plugin')); +const releasePackagePaths: Array = JSON.parse(releasePackagesEnv); - if (!releasePackagePaths.length) { - console.log('No packages to release!'); - process.exit(0); - } +if (!releasePackagePaths.length) { + console.log('No packages to release!'); + process.exit(0); +} +let err: unknown; +try { console.log('Updated packages to release:', releasePackagePaths); // Resolve workspace: and catalog: protocols in package.json files before publishing @@ -73,9 +36,4 @@ try { console.error('preview release failed'); console.error(_err); } - -// Only clean up changesets-summary.json if it exists (only created in changeset case) -if (fs.existsSync('./changesets-summary.json')) { - fs.unlinkSync('./changesets-summary.json'); -} process.exit(err ? 1 : 0); diff --git a/scripts/update-homebrew-formula.ts b/scripts/update-homebrew-formula.ts index 3a1c5f775..59d21f4fc 100644 --- a/scripts/update-homebrew-formula.ts +++ b/scripts/update-homebrew-formula.ts @@ -5,7 +5,7 @@ const VERSION = process.env.RELEASE_VERSION; // get checksums file from dist-sea since we are running this script just after building the binaries const checksumsStr = await fs.readFile(path.join(import.meta.dirname, '../packages/varlock/dist-sea/checksums.txt'), 'utf-8'); -const checksums = {}; +const checksums: Record = {}; checksumsStr.split('\n').forEach((line) => { if (!line.trim()) return; // skip trailing blank line const [sha256, fileName] = line.split(' '); diff --git a/smoke-tests/smoke-test-plugin/plugins/legacy-global-plugin.js b/smoke-tests/smoke-test-plugin/plugins/legacy-global-plugin.js index 59a9d4554..e491efee4 100644 --- a/smoke-tests/smoke-test-plugin/plugins/legacy-global-plugin.js +++ b/smoke-tests/smoke-test-plugin/plugins/legacy-global-plugin.js @@ -1,5 +1,7 @@ // Legacy plugin that uses the old implicit `plugin` global (no require). // Used to verify the migration error message is shown. + +// eslint-disable-next-line no-undef plugin.registerResolverFunction({ name: 'legacyTest', argsSchema: { diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 000000000..57da96b96 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "extends": "@varlock/tsconfig/base.tsconfig.json", + "compilerOptions": { + "noEmit": true, + "types": ["node"], + // bun supports .ts imports natively, but tsc needs this + "allowImportingTsExtensions": true + }, + "include": ["scripts/**/*.ts"] +} diff --git a/turbo.json b/turbo.json index 93c9000ec..5c5d5bdfa 100644 --- a/turbo.json +++ b/turbo.json @@ -30,6 +30,9 @@ "dependsOn": ["^typecheck"], "inputs": ["package.json", "tsconfig.json", "tsconfig.*.json", "src/**"] }, + "//#typecheck": { + "inputs": ["tsconfig.json", "scripts/**/*.ts"] + }, "dev": { "cache": false, "persistent": true