From ed1d30be0c2c588b3f6893e8f7d800f621b53cb8 Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Wed, 10 Dec 2025 14:53:11 +0100 Subject: [PATCH 1/2] claude skill --- experimental/apps-mcp/cmd/apps_mcp.go | 1 + experimental/apps-mcp/cmd/install_skill.go | 83 ++++++ .../lib/skill/databricks-apps/SKILL.md | 243 ++++++++++++++++++ .../databricks-apps/reference/app-kit-sdk.md | 84 ++++++ .../reference/authentication.md | 59 +++++ .../databricks-apps/reference/frontend.md | 108 ++++++++ .../databricks-apps/reference/sql-queries.md | 182 +++++++++++++ .../databricks-apps/reference/testing.md | 58 +++++ .../skill/databricks-apps/reference/trpc.md | 95 +++++++ .../lib/skill/databricks-apps/scripts/db | 5 + experimental/apps-mcp/lib/skill/skill.go | 6 + 11 files changed, 924 insertions(+) create mode 100644 experimental/apps-mcp/cmd/install_skill.go create mode 100644 experimental/apps-mcp/lib/skill/databricks-apps/SKILL.md create mode 100644 experimental/apps-mcp/lib/skill/databricks-apps/reference/app-kit-sdk.md create mode 100644 experimental/apps-mcp/lib/skill/databricks-apps/reference/authentication.md create mode 100644 experimental/apps-mcp/lib/skill/databricks-apps/reference/frontend.md create mode 100644 experimental/apps-mcp/lib/skill/databricks-apps/reference/sql-queries.md create mode 100644 experimental/apps-mcp/lib/skill/databricks-apps/reference/testing.md create mode 100644 experimental/apps-mcp/lib/skill/databricks-apps/reference/trpc.md create mode 100755 experimental/apps-mcp/lib/skill/databricks-apps/scripts/db create mode 100644 experimental/apps-mcp/lib/skill/skill.go diff --git a/experimental/apps-mcp/cmd/apps_mcp.go b/experimental/apps-mcp/cmd/apps_mcp.go index 83da91447c..68143cb3c8 100644 --- a/experimental/apps-mcp/cmd/apps_mcp.go +++ b/experimental/apps-mcp/cmd/apps_mcp.go @@ -50,6 +50,7 @@ The server communicates via stdio using the Model Context Protocol.`, cmd.Flags().StringVar(&warehouseID, "warehouse-id", "", "Databricks SQL Warehouse ID") cmd.AddCommand(newInstallCmd()) + cmd.AddCommand(newInstallSkillCmd()) cmd.AddCommand(newToolsCmd()) return cmd diff --git a/experimental/apps-mcp/cmd/install_skill.go b/experimental/apps-mcp/cmd/install_skill.go new file mode 100644 index 0000000000..9c6db618b8 --- /dev/null +++ b/experimental/apps-mcp/cmd/install_skill.go @@ -0,0 +1,83 @@ +package mcp + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + "strings" + + "github.com/databricks/cli/experimental/apps-mcp/lib/skill" + "github.com/databricks/cli/libs/cmdio" + "github.com/fatih/color" + "github.com/spf13/cobra" +) + +const cliPathPlaceholder = "__DATABRICKS_CLI_PATH__" + +func newInstallSkillCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "install-skill", + Short: "Install the databricks-apps skill for Claude Code", + Long: `Install the databricks-apps skill to ~/.claude/skills/ for use with Claude Code.`, + RunE: func(cmd *cobra.Command, args []string) error { + return runInstallSkill(cmd.Context()) + }, + } + return cmd +} + +func runInstallSkill(ctx context.Context) error { + cliPath, err := os.Executable() + if err != nil { + return fmt.Errorf("failed to get CLI path: %w", err) + } + + homeDir, err := os.UserHomeDir() + if err != nil { + return fmt.Errorf("failed to get home directory: %w", err) + } + + destDir := filepath.Join(homeDir, ".claude", "skills", "databricks-apps") + + if err := os.MkdirAll(destDir, 0o755); err != nil { + return fmt.Errorf("failed to create destination directory: %w", err) + } + + skillFS, err := fs.Sub(skill.SkillFS, "databricks-apps") + if err != nil { + return fmt.Errorf("failed to access embedded skill: %w", err) + } + + err = fs.WalkDir(skillFS, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + destPath := filepath.Join(destDir, path) + + if d.IsDir() { + return os.MkdirAll(destPath, 0o755) + } + + content, err := fs.ReadFile(skillFS, path) + if err != nil { + return fmt.Errorf("failed to read %s: %w", path, err) + } + + perm := os.FileMode(0o644) + if path == "scripts/db" { + content = []byte(strings.ReplaceAll(string(content), cliPathPlaceholder, cliPath)) + perm = 0o755 + } + + return os.WriteFile(destPath, content, perm) + }) + if err != nil { + return fmt.Errorf("failed to copy skill files: %w", err) + } + + cmdio.LogString(ctx, color.GreenString("✓ Installed databricks-apps skill to ")+destDir) + return nil +} diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/SKILL.md b/experimental/apps-mcp/lib/skill/databricks-apps/SKILL.md new file mode 100644 index 0000000000..3539557993 --- /dev/null +++ b/experimental/apps-mcp/lib/skill/databricks-apps/SKILL.md @@ -0,0 +1,243 @@ +--- +name: databricks-apps +description: Guide for building and deploying Databricks Apps using AppKit. Use when creating data visualization apps, dashboards and similar data-oriented apps. Covers scaffolding, development, validation, and deployment. +--- + +# Databricks Apps Development Guide + +## Overview + +Build full-stack TypeScript apps that query Databricks SQL warehouses and deploy to Databricks Apps. Uses AppKit SDK for data fetching and visualization. + +## CLI Wrapper + +All Databricks CLI commands use the bundled wrapper script which logs commands to stderr: + +```bash +scripts/db +``` + +--- + +# Workflow + +## Phase 1: Environment Setup + +### Verify Authentication + +```bash +scripts/db auth profiles +``` + +If auth issues, see [Authentication Reference](./reference/authentication.md). + +### Check Available Warehouses + +```bash +scripts/db sql warehouses list +``` + +Note a warehouse ID for the app. Can also set `DATABRICKS_WAREHOUSE_ID` env var. + +--- + +## Phase 2: Data Exploration + +Before scaffolding, explore available data to understand what to visualize. + +### Browse Catalogs/Schemas/Tables + +```bash +scripts/db catalogs list +scripts/db schemas list +scripts/db tables list +``` + +### Discover Table Schema + +```bash +scripts/db experimental apps-mcp tools discover-schema CATALOG.SCHEMA.TABLE +``` + +Returns: columns, types, sample data (5 rows), null counts. + +### Test Queries + +```bash +scripts/db experimental apps-mcp tools query "SELECT * FROM catalog.schema.table LIMIT 10" +``` + +**Note:** Use separate arguments for `catalogs/schemas/tables` commands. Dot notation only works in `discover-schema` and `query`. + +--- + +## Phase 3: Project Scaffolding + +### Create New App + +```bash +scripts/db experimental apps-mcp tools init-template --name my-app --description "My app description" +``` + +**Constraints:** +- App name must be ≤26 characters (dev- prefix adds 4 chars, max total 30) +- Use lowercase letters, numbers, and hyphens only + +### Project Structure + +After scaffolding: +- `server/` - Node.js backend with App Kit and tRPC +- `client/` - React frontend with App Kit hooks +- `config/queries/` - SQL query files for analytics +- `shared/` - Shared TypeScript types +- `docs/` - Detailed documentation + +--- + +## Phase 4: Development + +### Start Dev Server + +```bash +cd +npm install +npm run dev +``` + +Dev server runs on localhost (port shown in output). + +### Adding Data Visualizations + +**Step 1: Create SQL query file** +```sql +-- config/queries/my_data.sql +SELECT category, COUNT(*) as count +FROM my_table +GROUP BY category +``` + +**Step 2: Define schema in `config/queries/schema.ts`** +```typescript +export const querySchemas = { + my_data: z.array( + z.object({ + category: z.string(), + count: z.number(), + }) + ), +}; +``` + +**Step 3: Add visualization** +```typescript +import { BarChart } from '@databricks/app-kit-ui/react'; + + +``` + +### Key Documentation + +Load these references when implementing features: +- [SQL Queries](./reference/sql-queries.md) - query files, schemas, type handling, parameterization +- [App Kit SDK](./reference/app-kit-sdk.md) - imports, server setup, useAnalyticsQuery hook +- [Frontend](./reference/frontend.md) - visualization components, styling, Radix constraints +- [tRPC](./reference/trpc.md) - custom endpoints for mutations, Databricks APIs +- [Testing](./reference/testing.md) - vitest unit tests, Playwright smoke tests +- [Authentication](./reference/authentication.md) - profiles, OAuth, troubleshooting + +--- + +## Phase 5: Validation + +**Always validate before deploying:** + +```bash +scripts/db experimental apps-mcp tools validate ./ +``` + +This catches common issues: build errors, type errors, lint issues, test failures. +After the validation passes, double check by looking at the screenshot and browser console logs in `.smoke-test/` of the app dir. + + +--- + +## Phase 6: Deployment + +**Requires explicit user permission.** + +```bash +scripts/db experimental apps-mcp tools deploy +``` + +### View Deployed App + +```bash +scripts/db bundle summary +``` + +--- + +## CLI Reference + +### Exploring Data + +```bash +# List catalogs +scripts/db catalogs list + +# List schemas in catalog +scripts/db schemas list + +# List tables +scripts/db tables list + +# Discover table schemas (columns, types, sample data) +scripts/db experimental apps-mcp tools discover-schema CATALOG.SCHEMA.TABLE1 CATALOG.SCHEMA.TABLE2 + +# Run SQL query +scripts/db experimental apps-mcp tools query "SELECT * FROM catalog.schema.table LIMIT 10" +``` + +**Note:** Use separate arguments for catalog/schema in most commands: `tables list samples tpcds_sf1` (not dot notation). Dot notation only works in `discover-schema` and `query`. + +### Jobs and Clusters + +```bash +scripts/db jobs list +scripts/db jobs get +scripts/db clusters list +``` + +### Bundle Management + +```bash +scripts/db bundle summary +scripts/db bundle validate +scripts/db bundle deploy +``` + +--- + +## Local vs Deployed + +**During development:** +- Use `npm run dev` +- Access via localhost URL shown in terminal + +**After deployment:** +- Get URL from `scripts/db bundle summary` + +**Decision tree:** +- "open the app" + not deployed → localhost +- "open the app" + deployed → ask which environment +- "localhost"/"local" → always localhost + +--- + +## Best Practices + +- Always validate before deploying +- For operations affecting live environments, ask for confirmation +- Use SQL files in `config/queries/` for data retrieval (never tRPC) +- Run `npm run dev` to auto-regenerate TypeScript types after schema changes +- Check docs for numeric type handling (all SQL numbers return as strings in JSON) diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/app-kit-sdk.md b/experimental/apps-mcp/lib/skill/databricks-apps/reference/app-kit-sdk.md new file mode 100644 index 0000000000..1555cbecfa --- /dev/null +++ b/experimental/apps-mcp/lib/skill/databricks-apps/reference/app-kit-sdk.md @@ -0,0 +1,84 @@ +# Databricks App Kit SDK + +## TypeScript Import Rules + +This template uses strict TypeScript settings with `verbatimModuleSyntax: true`. **Always use `import type` for type-only imports**. + +Template enforces `noUnusedLocals` - remove unused imports immediately or build fails. + +```typescript +// CORRECT - use import type for types +import type { MyInterface, MyType } from '../../shared/types'; + +// WRONG - will fail compilation +import { MyInterface, MyType } from '../../shared/types'; +``` + +## Server Setup + +```typescript +import { createApp, server, analytics } from '@databricks/app-kit'; + +const app = await createApp({ + plugins: [ + server({ autoStart: false }), + analytics(), + ], +}); + +// Extend with custom tRPC endpoints if needed +app.server.extend((express: Application) => { + express.use('/trpc', [appRouterMiddleware()]); +}); + +await app.server.start(); +``` + +## useAnalyticsQuery Hook + +**ONLY use when displaying data in a custom way that isn't a chart or table.** + +Use cases: +- Custom HTML layouts (cards, lists, grids) +- Summary statistics and KPIs +- Conditional rendering based on data values +- Data that needs transformation before display + +```typescript +import { useAnalyticsQuery } from '@databricks/app-kit-ui/react'; +import { Skeleton } from '@/components/ui/skeleton'; + +interface QueryResult { column_name: string; value: number; } + +function CustomDisplay() { + const { data, loading, error } = useAnalyticsQuery('query_name', {}); + + if (loading) return ; + if (error) return
Error: {error}
; + + return ( +
+ {data?.map(row => ( +
+

{row.column_name}

+

{row.value}

+
+ ))} +
+ ); +} +``` + +**API:** + +```typescript +const { data, loading, error } = useAnalyticsQuery( + queryName: string, // SQL file name without .sql extension + params: Record // Query parameters +); +// Returns: { data: T | null, loading: boolean, error: string | null } +``` + +**NOT supported:** +- `enabled` - Query always executes on mount. Use conditional rendering: `{selectedId && }` +- `refetch` - Not available. Re-mount component to re-query. diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/authentication.md b/experimental/apps-mcp/lib/skill/databricks-apps/reference/authentication.md new file mode 100644 index 0000000000..b35eea8cb7 --- /dev/null +++ b/experimental/apps-mcp/lib/skill/databricks-apps/reference/authentication.md @@ -0,0 +1,59 @@ +# Authentication + +## Check Current Auth + +```bash +scripts/db auth profiles +``` + +Shows configured profiles and their status. + +## Configure New Profile + +```bash +scripts/db configure --profile +``` + +Interactive setup for new profile. + +## OAuth Login (U2M) + +```bash +scripts/db auth login --profile --host +``` + +Browser-based OAuth flow. Recommended for development. + +## Profile Switching + +Temporary switch for single command: +```bash +DATABRICKS_CONFIG_PROFILE= scripts/db +``` + +Or use `--profile` flag: +```bash +scripts/db --profile +``` + +## Environment Variables + +| Variable | Purpose | +|----------|---------| +| `DATABRICKS_HOST` | Workspace URL | +| `DATABRICKS_CONFIG_PROFILE` | Profile name from ~/.databrickscfg | +| `DATABRICKS_WAREHOUSE_ID` | Default warehouse for SQL queries | + +## Troubleshooting + +| Issue | Solution | +|-------|----------| +| No profiles configured | Run `scripts/db configure --profile ` | +| Token expired | Run `scripts/db auth login --profile --host ` | +| Wrong workspace | Check `DATABRICKS_CONFIG_PROFILE` or use `--profile` flag | +| Auth fails silently | Run `scripts/db auth profiles` to check status | + +## New Account Setup + +Don't have a Databricks account? Set up a free account at: +https://docs.databricks.com/getting-started/free-edition diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/frontend.md b/experimental/apps-mcp/lib/skill/databricks-apps/reference/frontend.md new file mode 100644 index 0000000000..6371da2b7c --- /dev/null +++ b/experimental/apps-mcp/lib/skill/databricks-apps/reference/frontend.md @@ -0,0 +1,108 @@ +# Frontend Guidelines + +## Visualization Components + +Components from `@databricks/app-kit-ui/react` handle data fetching, loading states, and error handling internally. + +Available: `AreaChart`, `BarChart`, `LineChart`, `PieChart`, `RadarChart`, `DataTable` + +**Basic Usage:** + +```typescript +import { BarChart, LineChart, DataTable } from '@databricks/app-kit-ui/react'; +import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'; + +function MyDashboard() { + return ( +
+ + Sales by Region + + + + + + + Revenue Trend + + + + +
+ ); +} +``` + +Components automatically fetch data, show loading states, display errors, and render with sensible defaults. + +**Custom Visualization (Recharts):** + +```typescript +import { BarChart } from '@databricks/app-kit-ui/react'; +import { Bar, XAxis, YAxis, CartesianGrid, Tooltip, Legend } from 'recharts'; + + + + + + + + + + +``` + +Databricks brand colors: `['#40d1f5', '#4462c9', '#EB1600', '#0B2026', '#4A4A4A', '#353a4a']` + +**Don't double-fetch:** + +```typescript +// WRONG - redundant fetch +const { data } = useAnalyticsQuery('sales_data', {}); +return ; + +// CORRECT - let component handle it +return ; +``` + +## Layout Structure + +```tsx +
+

Page Title

+
{/* form inputs */}
+
{/* list items */}
+
+``` + +## Component Organization + +- Shared UI components: `client/src/components/ui/` +- Feature components: `client/src/components/FeatureName.tsx` +- Split components when logic exceeds ~100 lines or component is reused + +## Radix UI Constraints + +- `SelectItem` cannot have `value=""`. Use sentinel value like `"all"` for "show all" options. + +## Map Libraries (react-leaflet) + +For maps with React 19, use react-leaflet v5: + +```bash +npm install react-leaflet@^5.0.0 leaflet @types/leaflet +``` + +```typescript +import 'leaflet/dist/leaflet.css'; +``` + +## Best Practices + +- Use shadcn/radix components (Button, Input, Card, etc.) for consistent UI +- **Use skeleton loaders**: Always use `` components instead of plain "Loading..." text +- Define result types in `shared/types.ts` for reuse between frontend and backend +- Handle nullable fields: `value={field || ''}` for inputs +- Type callbacks explicitly: `onChange={(e: React.ChangeEvent) => ...}` +- Forms should have loading states: `disabled={isLoading}` +- Show empty states with helpful text when no data exists diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/sql-queries.md b/experimental/apps-mcp/lib/skill/databricks-apps/reference/sql-queries.md new file mode 100644 index 0000000000..28d463ac0e --- /dev/null +++ b/experimental/apps-mcp/lib/skill/databricks-apps/reference/sql-queries.md @@ -0,0 +1,182 @@ +# SQL Query Files + +## Data Exploration + +Before writing queries, explore your data: + +```bash +# Discover table structure (columns, types, sample data, null counts) +scripts/db experimental apps-mcp tools discover-schema CATALOG.SCHEMA.TABLE + +# Test queries interactively +scripts/db experimental apps-mcp tools query "SELECT * FROM catalog.schema.table LIMIT 10" +``` + +--- + +**IMPORTANT**: ALWAYS use SQL files in `config/queries/` for data retrieval. NEVER use tRPC for SQL queries. + +- Store ALL SQL queries in `config/queries/` directory +- Name files descriptively: `trip_statistics.sql`, `user_metrics.sql`, `sales_by_region.sql` +- Reference by filename (without extension) in `useAnalyticsQuery` or directly in a visualization component passing it as `queryKey` +- App Kit automatically executes queries against configured Databricks warehouse +- Benefits: Built-in caching, proper connection pooling, better performance + +## Query Schemas + +Define the shape of QUERY RESULTS (not input parameters) in `config/queries/schema.ts` using Zod schemas. + +- **These schemas validate the COLUMNS RETURNED by SQL queries** +- Input parameters are passed separately to `useAnalyticsQuery()` as the second argument +- Schema field names must match your SQL SELECT column names/aliases + +Example: + +```typescript +import { z } from 'zod'; + +export const querySchemas = { + mocked_sales: z.array( + z.object({ + max_month_num: z.number().min(1).max(12), + }) + ), + + hello_world: z.array( + z.object({ + value: z.string(), + }) + ), +}; +``` + +**IMPORTANT: Refreshing Type Definitions** + +After adding or modifying query schemas in `config/queries/schema.ts`: + +1. **DO NOT** manually edit `client/src/appKitTypes.d.ts` - this file is auto-generated +2. Run `npm run dev` to automatically regenerate the TypeScript type definitions +3. The dev server will scan your SQL files and schema definitions and update `appKitTypes.d.ts` accordingly + +## SQL Type Handling (Critical) + +**ALL numeric values from Databricks SQL are returned as STRINGS in JSON responses.** This includes results from `ROUND()`, `AVG()`, `SUM()`, `COUNT()`, etc. Always convert before using numeric methods: + +```typescript +// WRONG - fails at runtime +{row.total_amount.toFixed(2)} + +// CORRECT - convert to number first +{Number(row.total_amount).toFixed(2)} +``` + +**Helper Functions:** + +Use the helpers from `shared/types.ts` for consistent formatting: + +```typescript +import { toNumber, formatCurrency, formatPercent } from '../../shared/types'; + +// Convert to number +const amount = toNumber(row.amount); // "123.45" -> 123.45 + +// Format as currency +const formatted = formatCurrency(row.amount); // "123.45" -> "$123.45" + +// Format as percentage +const percent = formatPercent(row.rate); // "85.5" -> "85.5%" +``` + +## Query Parameterization + +SQL queries can accept parameters to make them dynamic and reusable. + +**Key Points:** +- Parameters use colon prefix: `:parameter_name` +- Databricks infers types from values automatically +- For optional string parameters, use pattern: `(:param = '' OR column = :param)` +- **For optional date parameters, use sentinel dates** (`'1900-01-01'` and `'9999-12-31'`) instead of empty strings + +### SQL Parameter Syntax + +```sql +-- config/queries/filtered_data.sql +SELECT * +FROM my_table +WHERE column_value >= :min_value + AND column_value <= :max_value + AND category = :category + AND (:optional_filter = '' OR status = :optional_filter) +``` + +### Frontend Parameter Passing + +```typescript +const { data } = useAnalyticsQuery('filtered_data', { + min_value: minValue, + max_value: maxValue, + category: category, + optional_filter: optionalFilter || '', // empty string for optional params +}); +``` + +### Date Parameters + +For dates, use `YYYY-MM-DD` format in frontend, `CAST()` function in SQL: + +```typescript +// Date helper for query params +const daysAgo = (n: number) => new Date(Date.now() - n * 86400000).toISOString().split('T')[0]; + +const startDate = daysAgo(7); // 7 days ago +``` + +```sql +-- SQL +WHERE timestamp_column >= CAST(:start_date AS DATE) +``` + +### Optional Date Parameters - Use Sentinel Dates + +Databricks App Kit validates parameter types before query execution. **DO NOT use empty strings (`''`) for optional date parameters** as this causes validation errors. + +**CORRECT - Use Sentinel Dates:** + +```typescript +// Frontend: Use sentinel dates for "no filter" instead of empty strings +const revenueParams = { + group_by: 'month', + start_date: '1900-01-01', // Sentinel: effectively no lower bound + end_date: '9999-12-31', // Sentinel: effectively no upper bound + country: country || '', + property_type: propertyType || '', +}; +``` + +```sql +-- SQL: Simple comparison since sentinel dates are always valid +WHERE b.check_in >= CAST(:start_date AS DATE) + AND b.check_in <= CAST(:end_date AS DATE) +``` + +**WRONG - Empty Strings Cause Validation Errors:** + +```typescript +// DON'T DO THIS - causes "Invalid date format" error +const params = { + start_date: '', // Empty string triggers parameter validation error + end_date: '', +}; +``` + +**Why Sentinel Dates Work:** +- `1900-01-01` is before any real data (effectively no lower bound filter) +- `9999-12-31` is after any real data (effectively no upper bound filter) +- Always valid DATE types, so no parameter validation errors +- All real dates fall within this range, so no filtering occurs + +**Parameter Types Summary:** +- **Strings/Numbers**: Use directly in SQL with `:param_name` +- **Dates**: Format as `YYYY-MM-DD`, use with `CAST(:param AS DATE)` in SQL +- **Optional Strings**: Use empty string default, check with `(:param = '' OR column = :param)` +- **Optional Dates**: Use sentinel dates (`'1900-01-01'` and `'9999-12-31'`) instead of empty strings diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/testing.md b/experimental/apps-mcp/lib/skill/databricks-apps/reference/testing.md new file mode 100644 index 0000000000..e1628d48b9 --- /dev/null +++ b/experimental/apps-mcp/lib/skill/databricks-apps/reference/testing.md @@ -0,0 +1,58 @@ +# Testing Guidelines + +## Unit Tests (Vitest) + +**CRITICAL**: Use vitest for all tests. Put tests next to the code (e.g. src/*.test.ts) + +```typescript +import { describe, it, expect } from 'vitest'; + +describe('Feature Name', () => { + it('should do something', () => { + expect(true).toBe(true); + }); + + it('should handle async operations', async () => { + const result = await someAsyncFunction(); + expect(result).toBeDefined(); + }); +}); +``` + +**Best Practices:** +- Use `describe` blocks to group related tests +- Use `it` for individual test cases +- Use `expect` for assertions +- Tests run with `npm test` (runs `vitest run`) + +**Do not write unit tests for:** +- SQL files under `config/queries/` - little value in testing static SQL +- Types associated with queries - these are just schema definitions + +## Smoke Test (Playwright) + +The template includes a smoke test at `tests/smoke.spec.ts` that verifies the app loads correctly. + +**What the smoke test does:** +- Opens the app +- Waits for data to load (SQL query results) +- Verifies key UI elements are visible +- Captures screenshots and console logs to `.smoke-test/` directory +- Always captures artifacts, even on test failure + +**When customizing the app**, update `tests/smoke.spec.ts` to match your UI: +- Change heading selector to match your app title (replace 'Minimal Databricks App') +- Update data assertions to match your query results (replace 'hello world' check) +- Keep the test simple - just verify app loads and displays data +- The default test expects specific template content; update these expectations after customization + +**Keep smoke tests simple:** +- Only verify that the app loads and displays initial data +- Wait for key elements to appear (page title, main content) +- Capture artifacts for debugging +- Run quickly (< 5 seconds) + +**For extended E2E tests:** +- Create separate test files in `tests/` directory (e.g., `tests/user-flow.spec.ts`) +- Use `npm run test:e2e` to run all Playwright tests +- Keep complex user flows, interactions, and edge cases out of the smoke test diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/trpc.md b/experimental/apps-mcp/lib/skill/databricks-apps/reference/trpc.md new file mode 100644 index 0000000000..77100976fd --- /dev/null +++ b/experimental/apps-mcp/lib/skill/databricks-apps/reference/trpc.md @@ -0,0 +1,95 @@ +# tRPC for Custom Endpoints + +**CRITICAL**: Do NOT use tRPC for SQL queries or data retrieval. Use `config/queries/` + `useAnalyticsQuery` instead. + +Use tRPC ONLY for: + +- **Mutations**: Creating, updating, or deleting data (INSERT, UPDATE, DELETE) +- **External APIs**: Calling Databricks APIs (serving endpoints, jobs, MLflow, etc.) +- **Complex business logic**: Multi-step operations that cannot be expressed in SQL +- **File operations**: File uploads, processing, transformations +- **Custom computations**: Operations requiring TypeScript/Node.js logic + +## Server-side Pattern + +```typescript +// server/trpc.ts +import { initTRPC } from '@trpc/server'; +import { getRequestContext } from '@databricks/app-kit'; +import { z } from 'zod'; + +const t = initTRPC.create({ transformer: superjson }); +const publicProcedure = t.procedure; + +export const appRouter = t.router({ + // Example: Query a serving endpoint + queryModel: publicProcedure.input(z.object({ prompt: z.string() })).query(async ({ input: { prompt } }) => { + const { serviceDatabricksClient: client } = getRequestContext(); + const response = await client.servingEndpoints.query({ + name: 'your-endpoint-name', + messages: [{ role: 'user', content: prompt }], + }); + return response; + }), + + // Example: Mutation + createRecord: publicProcedure.input(z.object({ name: z.string() })).mutation(async ({ input }) => { + // Custom logic here + return { success: true, id: 123 }; + }), +}); +``` + +## Client-side Pattern + +```typescript +// client/src/components/MyComponent.tsx +import { trpc } from '@/lib/trpc'; +import { useState, useEffect } from 'react'; + +function MyComponent() { + const [result, setResult] = useState(null); + + useEffect(() => { + trpc.queryModel + .query({ prompt: "Hello" }) + .then(setResult) + .catch(console.error); + }, []); + + const handleCreate = async () => { + await trpc.createRecord.mutate({ name: "test" }); + }; + + return
{/* component JSX */}
; +} +``` + +## Decision Tree for Data Operations + +1. **Need to display data from SQL?** + - **Chart or Table?** -> Use visualization components (`BarChart`, `LineChart`, `DataTable`, etc.) + - **Custom display (KPIs, cards, lists)?** -> Use `useAnalyticsQuery` hook + - **Never** use tRPC for SQL SELECT statements + +2. **Need to call a Databricks API?** -> Use tRPC + - Serving endpoints (model inference) + - MLflow operations + - Jobs API + - Workspace API + +3. **Need to modify data?** -> Use tRPC mutations + - INSERT, UPDATE, DELETE operations + - Multi-step transactions + - Business logic with side effects + +4. **Need non-SQL custom logic?** -> Use tRPC + - File processing + - External API calls + - Complex computations in TypeScript + +**Summary:** +- SQL queries -> Visualization components or `useAnalyticsQuery` +- Databricks APIs -> tRPC +- Data mutations -> tRPC +- SQL queries via tRPC -> NEVER do this diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/scripts/db b/experimental/apps-mcp/lib/skill/databricks-apps/scripts/db new file mode 100755 index 0000000000..da34b4af13 --- /dev/null +++ b/experimental/apps-mcp/lib/skill/databricks-apps/scripts/db @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +CYAN='\033[0;36m' +NC='\033[0m' +echo -e "${CYAN}> databricks $*${NC}" >&2 +__DATABRICKS_CLI_PATH__ "$@" diff --git a/experimental/apps-mcp/lib/skill/skill.go b/experimental/apps-mcp/lib/skill/skill.go new file mode 100644 index 0000000000..baea5c1dfe --- /dev/null +++ b/experimental/apps-mcp/lib/skill/skill.go @@ -0,0 +1,6 @@ +package skill + +import "embed" + +//go:embed all:databricks-apps +var SkillFS embed.FS From 4a6039e759678ebbedb1767473f0e0760a8bf0fc Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Fri, 12 Dec 2025 18:03:16 +0100 Subject: [PATCH 2/2] rename --- .../skill/{databricks-apps => databricks-toolset}/SKILL.md | 6 +++--- .../reference/app-kit-sdk.md | 0 .../reference/authentication.md | 0 .../reference/frontend.md | 0 .../reference/sql-queries.md | 0 .../reference/testing.md | 0 .../reference/trpc.md | 0 .../{databricks-apps => databricks-toolset}/scripts/db | 0 8 files changed, 3 insertions(+), 3 deletions(-) rename experimental/apps-mcp/lib/skill/{databricks-apps => databricks-toolset}/SKILL.md (95%) rename experimental/apps-mcp/lib/skill/{databricks-apps => databricks-toolset}/reference/app-kit-sdk.md (100%) rename experimental/apps-mcp/lib/skill/{databricks-apps => databricks-toolset}/reference/authentication.md (100%) rename experimental/apps-mcp/lib/skill/{databricks-apps => databricks-toolset}/reference/frontend.md (100%) rename experimental/apps-mcp/lib/skill/{databricks-apps => databricks-toolset}/reference/sql-queries.md (100%) rename experimental/apps-mcp/lib/skill/{databricks-apps => databricks-toolset}/reference/testing.md (100%) rename experimental/apps-mcp/lib/skill/{databricks-apps => databricks-toolset}/reference/trpc.md (100%) rename experimental/apps-mcp/lib/skill/{databricks-apps => databricks-toolset}/scripts/db (100%) diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/SKILL.md b/experimental/apps-mcp/lib/skill/databricks-toolset/SKILL.md similarity index 95% rename from experimental/apps-mcp/lib/skill/databricks-apps/SKILL.md rename to experimental/apps-mcp/lib/skill/databricks-toolset/SKILL.md index 3539557993..29063e593b 100644 --- a/experimental/apps-mcp/lib/skill/databricks-apps/SKILL.md +++ b/experimental/apps-mcp/lib/skill/databricks-toolset/SKILL.md @@ -1,7 +1,7 @@ --- -name: databricks-apps -description: Guide for building and deploying Databricks Apps using AppKit. Use when creating data visualization apps, dashboards and similar data-oriented apps. Covers scaffolding, development, validation, and deployment. ---- +name: databricks-toolset +description: REQUIRED before building dashboards, data apps, or analytics tools. Provides project scaffolding, SQL data access, and deployment commands. Invoke this skill first - do not start implementation without it. User is a Databricks power user. +---- # Databricks Apps Development Guide diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/app-kit-sdk.md b/experimental/apps-mcp/lib/skill/databricks-toolset/reference/app-kit-sdk.md similarity index 100% rename from experimental/apps-mcp/lib/skill/databricks-apps/reference/app-kit-sdk.md rename to experimental/apps-mcp/lib/skill/databricks-toolset/reference/app-kit-sdk.md diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/authentication.md b/experimental/apps-mcp/lib/skill/databricks-toolset/reference/authentication.md similarity index 100% rename from experimental/apps-mcp/lib/skill/databricks-apps/reference/authentication.md rename to experimental/apps-mcp/lib/skill/databricks-toolset/reference/authentication.md diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/frontend.md b/experimental/apps-mcp/lib/skill/databricks-toolset/reference/frontend.md similarity index 100% rename from experimental/apps-mcp/lib/skill/databricks-apps/reference/frontend.md rename to experimental/apps-mcp/lib/skill/databricks-toolset/reference/frontend.md diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/sql-queries.md b/experimental/apps-mcp/lib/skill/databricks-toolset/reference/sql-queries.md similarity index 100% rename from experimental/apps-mcp/lib/skill/databricks-apps/reference/sql-queries.md rename to experimental/apps-mcp/lib/skill/databricks-toolset/reference/sql-queries.md diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/testing.md b/experimental/apps-mcp/lib/skill/databricks-toolset/reference/testing.md similarity index 100% rename from experimental/apps-mcp/lib/skill/databricks-apps/reference/testing.md rename to experimental/apps-mcp/lib/skill/databricks-toolset/reference/testing.md diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/reference/trpc.md b/experimental/apps-mcp/lib/skill/databricks-toolset/reference/trpc.md similarity index 100% rename from experimental/apps-mcp/lib/skill/databricks-apps/reference/trpc.md rename to experimental/apps-mcp/lib/skill/databricks-toolset/reference/trpc.md diff --git a/experimental/apps-mcp/lib/skill/databricks-apps/scripts/db b/experimental/apps-mcp/lib/skill/databricks-toolset/scripts/db similarity index 100% rename from experimental/apps-mcp/lib/skill/databricks-apps/scripts/db rename to experimental/apps-mcp/lib/skill/databricks-toolset/scripts/db