diff --git a/apps/dev-playground/server/lakebase-examples-plugin.ts b/apps/dev-playground/server/lakebase-examples-plugin.ts index 556b53c4..930653f8 100644 --- a/apps/dev-playground/server/lakebase-examples-plugin.ts +++ b/apps/dev-playground/server/lakebase-examples-plugin.ts @@ -1,4 +1,4 @@ -import { Plugin, toPlugin } from "@databricks/appkit"; +import { getUsernameWithApiLookup, Plugin, toPlugin } from "@databricks/appkit"; import type { IAppRouter } from "shared"; import * as drizzleExample from "./lakebase-examples/drizzle-example"; import * as rawExample from "./lakebase-examples/raw-driver-example"; @@ -42,12 +42,14 @@ export class LakebaseExamplesPlugin extends Plugin { } try { + const user = await getUsernameWithApiLookup(); + // Initialize all four examples in parallel await Promise.all([ - rawExample.setup(), - drizzleExample.setup(), - typeormExample.setup(), - sequelizeExample.setup(), + rawExample.setup(user), + drizzleExample.setup(user), + typeormExample.setup(user), + sequelizeExample.setup(user), ]); } catch (error) { console.error("Failed to initialize Lakebase examples:", error); diff --git a/apps/dev-playground/server/lakebase-examples/drizzle-example.ts b/apps/dev-playground/server/lakebase-examples/drizzle-example.ts index f0aa6c30..d4ede2ed 100644 --- a/apps/dev-playground/server/lakebase-examples/drizzle-example.ts +++ b/apps/dev-playground/server/lakebase-examples/drizzle-example.ts @@ -45,8 +45,8 @@ type NewActivityLog = typeof activityLogs.$inferInsert; let db: ReturnType; let pool: Pool; -export async function setup() { - pool = createLakebasePool(); +export async function setup(user?: string) { + pool = createLakebasePool({ user }); db = drizzle(pool); // For production apps, use: npx drizzle-kit push or drizzle-kit generate + migrate diff --git a/apps/dev-playground/server/lakebase-examples/raw-driver-example.ts b/apps/dev-playground/server/lakebase-examples/raw-driver-example.ts index 66d2ecb7..43b2ca3b 100644 --- a/apps/dev-playground/server/lakebase-examples/raw-driver-example.ts +++ b/apps/dev-playground/server/lakebase-examples/raw-driver-example.ts @@ -25,9 +25,9 @@ interface Product { created_at: Date; } -export async function setup() { +export async function setup(user?: string) { // Create pool with automatic OAuth token refresh - pool = createLakebasePool(); + pool = createLakebasePool({ user }); // Create schema and table (idempotent) await pool.query(` diff --git a/apps/dev-playground/server/lakebase-examples/sequelize-example.ts b/apps/dev-playground/server/lakebase-examples/sequelize-example.ts index f9f5b513..8792c99c 100644 --- a/apps/dev-playground/server/lakebase-examples/sequelize-example.ts +++ b/apps/dev-playground/server/lakebase-examples/sequelize-example.ts @@ -43,11 +43,11 @@ class Order let sequelize: Sequelize; -export async function setup() { +export async function setup(user?: string) { // @ts-expect-error password property supports a function for Lakehouse OAuth tokens sequelize = new Sequelize({ dialect: "postgres", - ...getLakebaseOrmConfig(), + ...getLakebaseOrmConfig({ user }), logging: false, }); diff --git a/apps/dev-playground/server/lakebase-examples/typeorm-example.ts b/apps/dev-playground/server/lakebase-examples/typeorm-example.ts index 171da14e..21b10831 100644 --- a/apps/dev-playground/server/lakebase-examples/typeorm-example.ts +++ b/apps/dev-playground/server/lakebase-examples/typeorm-example.ts @@ -43,16 +43,16 @@ class Task { let dataSource: DataSource; -export async function setup() { +export async function setup(user?: string) { // Create schema if not exists (TypeORM's synchronize doesn't create schemas) // See https://github.com/typeorm/typeorm/issues/3192 - const pool = createLakebasePool(); + const pool = createLakebasePool({ user }); await pool.query("CREATE SCHEMA IF NOT EXISTS typeorm_example"); await pool.end(); dataSource = new DataSource({ type: "postgres", - ...getLakebaseOrmConfig(), + ...getLakebaseOrmConfig({ user }), entities: [Task], synchronize: true, logging: false, diff --git a/docs/docs/architecture.md b/docs/docs/architecture.md index d9d26c4a..c878faec 100644 --- a/docs/docs/architecture.md +++ b/docs/docs/architecture.md @@ -47,7 +47,7 @@ The backend SDK that provides the plugin architecture and core functionality. It - Cache management and streaming capabilities - Type generation for SQL queries -See the [Plugins](./plugins.md) and [API reference](./api/appkit/) documentation for detailed information. +See the [Plugins](./plugins/index.md) and [API reference](./api/appkit/) documentation for detailed information. ### @databricks/appkit-ui @@ -87,7 +87,7 @@ Integration with Databricks services: ## See also -- [Plugins](./plugins.md): Deep dive into the plugin system +- [Plugins](./plugins/index.md): Deep dive into the plugin system - [API reference](./api/): Complete API documentation - [Development](./development/): Explore development workflows - [Core Principles](./core-principles.md): Learn about AppKit's design philosophy diff --git a/docs/docs/configuration.mdx b/docs/docs/configuration.mdx index c7ca9a32..22f0aff2 100644 --- a/docs/docs/configuration.mdx +++ b/docs/docs/configuration.mdx @@ -142,4 +142,4 @@ For advanced Databricks Apps configuration (authorization, networking, resource ## See also - [App management](./app-management.mdx) - Deploying and managing apps -- [Plugins](./plugins.md) - Plugin configuration options +- [Plugins](./plugins/index.md) - Plugin configuration options diff --git a/docs/docs/development/project-setup.md b/docs/docs/development/project-setup.md index 7f271ecb..b192abbe 100644 --- a/docs/docs/development/project-setup.md +++ b/docs/docs/development/project-setup.md @@ -232,4 +232,4 @@ Then create `config/queries/` and add your `.sql` files. - [Local development](./local-development.mdx) - Running the dev server - [Configuration](../configuration.mdx) - Environment variables -- [Plugins](../plugins.md) - Plugin configuration +- [Plugins](../plugins/index.md) - Plugin configuration diff --git a/docs/docs/development/type-generation.md b/docs/docs/development/type-generation.md index ef277f12..c6bc0f8d 100644 --- a/docs/docs/development/type-generation.md +++ b/docs/docs/development/type-generation.md @@ -104,5 +104,5 @@ data?.forEach(row => { ## See also -- [Plugins](../plugins.md) - Analytics plugin configuration +- [Plugins](../plugins/index.md) - Analytics plugin configuration - [API Reference](/docs/api/appkit-ui) - Complete UI components API documentation diff --git a/docs/docs/plugins.md b/docs/docs/plugins.md deleted file mode 100644 index 80ea8b0e..00000000 --- a/docs/docs/plugins.md +++ /dev/null @@ -1,482 +0,0 @@ ---- -sidebar_position: 3 ---- - -# Plugins - -Plugins are modular extensions that add capabilities to your AppKit application. They follow a defined lifecycle and have access to shared services like caching, telemetry, and streaming. - -For complete API documentation, see the [`Plugin`](api/appkit/Class.Plugin.md) class reference. - -## Built-in plugins - -### Server plugin - -Provides HTTP server capabilities with development and production modes. - -**Key features:** -- Express server for REST APIs -- Vite dev server with hot module reload -- Static file serving for production -- Remote tunneling to deployed backends - -The Server plugin uses the deferred initialization phase to access routes from other plugins. - -#### What it does - -- Starts an Express server (default `host=0.0.0.0`, `port=8000`) -- Mounts plugin routes under `/api//...` -- Adds `/health` endpoint (returns `{ status: "ok" }`) -- Serves frontend: - - **Development** (`NODE_ENV=development`): runs a Vite dev server in middleware mode - - **Production**: auto-detects static frontend directory (checks `dist`, `client/dist`, `build`, `public`, `out`) - -#### Minimal server example - -The smallest valid AppKit server: - -```ts -// server/index.ts -import { createApp, server } from "@databricks/appkit"; - -await createApp({ - plugins: [server()], -}); -``` - -#### Manual server start example - -When you need to extend Express with custom routes: - -```ts -import { createApp, server } from "@databricks/appkit"; - -const appkit = await createApp({ - plugins: [server({ autoStart: false })], -}); - -appkit.server.extend((app) => { - app.get("/custom", (_req, res) => res.json({ ok: true })); -}); - -await appkit.server.start(); -``` - -#### Configuration options - -```ts -import { createApp, server } from "@databricks/appkit"; - -await createApp({ - plugins: [ - server({ - port: 8000, // default: Number(process.env.DATABRICKS_APP_PORT) || 8000 - host: "0.0.0.0", // default: process.env.FLASK_RUN_HOST || "0.0.0.0" - autoStart: true, // default: true - staticPath: "dist", // optional: force a specific static directory - }), - ], -}); -``` - -### Analytics plugin - -Enables SQL query execution against Databricks SQL Warehouses. - -**Key features:** -- File-based SQL queries with automatic type generation -- Parameterized queries with type-safe [SQL helpers](api/appkit/Variable.sql.md) -- JSON and Arrow format support -- Built-in caching and retry logic -- Server-Sent Events (SSE) streaming - -#### Basic usage - -```ts -import { analytics, createApp, server } from "@databricks/appkit"; - -await createApp({ - plugins: [server(), analytics({})], -}); -``` - -#### Where queries live - -- Put `.sql` files in `config/queries/` -- Query key is the filename without `.sql` (e.g. `spend_summary.sql` → `"spend_summary"`) - -#### SQL parameters - -Use `:paramName` placeholders and optionally annotate parameter types using SQL comments: - -```sql --- @param startDate DATE --- @param endDate DATE --- @param limit NUMERIC -SELECT ... -WHERE usage_date BETWEEN :startDate AND :endDate -LIMIT :limit -``` - -**Supported `-- @param` types** (case-insensitive): -- `STRING`, `NUMERIC`, `BOOLEAN`, `DATE`, `TIMESTAMP`, `BINARY` - -#### Server-injected parameters - -`:workspaceId` is **injected by the server** and **must not** be annotated: - -```sql -WHERE workspace_id = :workspaceId -``` - -#### HTTP endpoints - -The analytics plugin exposes these endpoints (mounted under `/api/analytics`): - -- `POST /api/analytics/query/:query_key` -- `POST /api/analytics/users/me/query/:query_key` -- `GET /api/analytics/arrow-result/:jobId` -- `GET /api/analytics/users/me/arrow-result/:jobId` - -#### Format options - -- `format: "JSON"` (default) returns JSON rows -- `format: "ARROW"` returns an Arrow "statement_id" payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId` - -### Execution context and `asUser(req)` - -AppKit manages Databricks authentication via two contexts: - -- **ServiceContext** (singleton): Initialized at app startup with service principal credentials -- **ExecutionContext**: Determined at runtime - either service principal or user context - -#### Headers for user context - -- `x-forwarded-user`: required in production; identifies the user -- `x-forwarded-access-token`: required for user token passthrough - -#### Using `asUser(req)` for user-scoped operations - -The `asUser(req)` pattern allows plugins to execute operations using the requesting user's credentials: - -```ts -// In a custom plugin route handler -router.post("/users/me/data", async (req, res) => { - // Execute as the user (uses their Databricks permissions) - const result = await this.asUser(req).query("SELECT ..."); - res.json(result); -}); - -// Service principal execution (default) -router.post("/system/data", async (req, res) => { - const result = await this.query("SELECT ..."); - res.json(result); -}); -``` - -#### Context helper functions - -Exported from `@databricks/appkit`: - -- `getCurrentUserId()`: Returns user ID in user context, service user ID otherwise -- `getWorkspaceClient()`: Returns the appropriate WorkspaceClient for current context -- `getWarehouseId()`: `Promise` (from `DATABRICKS_WAREHOUSE_ID` or auto-selected in dev) -- `getWorkspaceId()`: `Promise` (from `DATABRICKS_WORKSPACE_ID` or fetched) -- `isInUserContext()`: Returns `true` if currently executing in user context - -#### Development mode behavior - -In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and falls back to the service principal. - -## Using plugins - -Configure plugins when creating your AppKit instance: - -```typescript -import { createApp, server, analytics } from "@databricks/appkit"; - -const AppKit = await createApp({ - plugins: [ - server({ port: 8000 }), - analytics(), - ], -}); -``` - -For complete configuration options, see [`createApp`](api/appkit/Function.createApp.md). - -## Plugin management - -AppKit includes a CLI for managing plugins. All commands are available under `npx @databricks/appkit plugin`. - -### Create a plugin - -Scaffold a new plugin interactively: - -```bash -npx @databricks/appkit plugin create -``` - -The wizard walks you through: -- **Placement**: In your repository (e.g. `plugins/my-plugin`) or as a standalone package -- **Metadata**: Name, display name, description -- **Resources**: Which Databricks resources the plugin needs (SQL Warehouse, Secret, etc.) and whether each is required or optional -- **Optional fields**: Author, version, license - -The command generates a complete plugin scaffold with `manifest.json`, TypeScript class, and barrel exports — ready to register in your app. - -### Sync plugin manifests - -Scan your project for plugins and generate `appkit.plugins.json`: - -```bash -npx @databricks/appkit plugin sync --write -``` - -This discovers plugin manifests from installed packages and local imports, then writes a consolidated manifest used by deployment tooling. Plugins referenced in your `createApp({ plugins: [...] })` call are automatically marked as required. - -Use the `--silent` flag in build hooks to suppress output: - -```json -{ - "scripts": { - "sync": "appkit plugin sync --write --silent", - "predev": "npm run sync", - "prebuild": "npm run sync" - } -} -``` - -### Validate manifests - -Check plugin manifests against the JSON schema: - -```bash -# Validate manifest.json in the current directory -npx @databricks/appkit plugin validate - -# Validate specific files or directories -npx @databricks/appkit plugin validate plugins/my-plugin appkit.plugins.json -``` - -The validator auto-detects whether a file is a plugin manifest or a template manifest (from `$schema`) and reports errors with humanized paths and expected values. - -### List plugins - -View registered plugins from `appkit.plugins.json` or scan a directory: - -```bash -# From appkit.plugins.json (default) -npx @databricks/appkit plugin list - -# Scan a directory for plugin folders -npx @databricks/appkit plugin list --dir plugins/ - -# JSON output for scripting -npx @databricks/appkit plugin list --json -``` - -### Add a resource to a plugin - -Interactively add a new resource requirement to an existing plugin manifest: - -```bash -npx @databricks/appkit plugin add-resource - -# Or specify the plugin directory -npx @databricks/appkit plugin add-resource --path plugins/my-plugin -``` - -## Creating custom plugins - -If you need custom API routes or background logic, implement an AppKit plugin. The fastest way is to use the CLI: - -```bash -npx @databricks/appkit plugin create -``` - -For a deeper understanding of the plugin structure, read on. - -### Basic plugin example - -Extend the [`Plugin`](api/appkit/Class.Plugin.md) class and export with `toPlugin()`: - -```typescript -import { Plugin, toPlugin } from "@databricks/appkit"; -import type express from "express"; - -class MyPlugin extends Plugin { - name = "myPlugin"; - - // Define resource requirements in the static manifest - static manifest = { - name: "myPlugin", - displayName: "My Plugin", - description: "A custom plugin", - resources: { - required: [ - { - type: "secret", - alias: "apiKey", - resourceKey: "apiKey", - description: "API key for external service", - permission: "READ", - fields: { - scope: { env: "MY_SECRET_SCOPE", description: "Secret scope" }, - key: { env: "MY_API_KEY", description: "Secret key name" } - } - } - ], - optional: [] - } - }; - - async setup() { - // Initialize your plugin - } - - myCustomMethod() { - // Some implementation - } - - async shutdown() { - // Clean up resources - } - - exports() { - // an object with the methods from this plugin to expose - return { - myCustomMethod: this.myCustomMethod - } - } -} - -export const myPlugin = toPlugin, "myPlugin">( - MyPlugin, - "myPlugin", -); -``` - -### Config-dependent resources - -The manifest defines resources as either `required` (always needed) or `optional` (may be needed). -For resources that become required based on plugin configuration, implement a static -`getResourceRequirements(config)` method: - -```typescript -interface MyPluginConfig extends BasePluginConfig { - enableCaching?: boolean; -} - -class MyPlugin extends Plugin { - name = "myPlugin"; - - static manifest = { - name: "myPlugin", - displayName: "My Plugin", - description: "A plugin with optional caching", - resources: { - required: [ - { type: "sql_warehouse", alias: "warehouse", resourceKey: "sqlWarehouse", description: "Query execution", permission: "CAN_USE", fields: { id: { env: "DATABRICKS_WAREHOUSE_ID" } } } - ], - optional: [ - // Listed as optional in manifest for static analysis - { type: "database", alias: "cache", resourceKey: "cache", description: "Query result caching (if enabled)", permission: "CAN_CONNECT_AND_CREATE", fields: { instance_name: { env: "DATABRICKS_CACHE_INSTANCE" }, database_name: { env: "DATABRICKS_CACHE_DB" } } } - ] - } - }; - - // Runtime: Convert optional resources to required based on config - static getResourceRequirements(config: MyPluginConfig) { - const resources = []; - if (config.enableCaching) { - // When caching is enabled, Database becomes required - resources.push({ - type: "database", - alias: "cache", - resourceKey: "cache", - description: "Query result caching", - permission: "CAN_CONNECT_AND_CREATE", - fields: { - instance_name: { env: "DATABRICKS_CACHE_INSTANCE" }, - database_name: { env: "DATABRICKS_CACHE_DB" }, - }, - required: true // Mark as required at runtime - }); - } - return resources; - } -} -``` - -This pattern allows: -- **Static tools** (CLI, docs) to show all possible resources -- **Runtime validation** to enforce resources based on actual configuration - -### Key extension points - -- **Route injection**: Implement `injectRoutes()` to add custom endpoints using [`IAppRouter`](api/appkit/TypeAlias.IAppRouter.md) -- **Lifecycle hooks**: Override `setup()`, and `shutdown()` methods -- **Shared services**: - - **Cache management**: Access the cache service via `this.cache`. See [`CacheConfig`](api/appkit/Interface.CacheConfig.md) for configuration. - - **Telemetry**: Instrument your plugin with traces and metrics via `this.telemetry`. See [`ITelemetry`](api/appkit/Interface.ITelemetry.md). -- **Execution interceptors**: Use `execute()` and `executeStream()` with [`StreamExecutionSettings`](api/appkit/Interface.StreamExecutionSettings.md) - -**Consuming your plugin programmatically** - -Optionally, you may want to provide a way to consume your plugin programmatically using the AppKit object. -To do that, your plugin needs to implement the `exports` method, returning an object with the methods you want to expose. From the previous example, the plugin could be consumed as follows: - -```ts -const AppKit = await createApp({ - plugins: [ - server({ port: 8000 }), - analytics(), - myPlugin(), - ], -}); - -AppKit.myPlugin.myCustomMethod(); -``` - -See the [`Plugin`](api/appkit/Class.Plugin.md) API reference for complete documentation. - -## Caching - -AppKit provides both global and plugin-level caching capabilities. - -### Global cache configuration - -```ts -await createApp({ - plugins: [server(), analytics({})], - cache: { - enabled: true, - ttl: 3600, // seconds - strictPersistence: false, - }, -}); -``` - -Storage auto-selects **Lakebase V1 (Provisioned) persistent cache when healthy**, otherwise falls back to in-memory. Support for Lakebase Autoscaling coming soon. - -### Plugin-level caching - -Inside a Plugin subclass: - -```ts -const value = await this.cache.getOrExecute( - ["myPlugin", "data", userId], - async () => expensiveWork(), - userKey, - { ttl: 300 }, -); -``` - -## Plugin phases - -Plugins initialize in three phases: - -- **Core**: Reserved for framework-level plugins. Initializes first. -- **Normal**: Default phase for application plugins. Initializes after core. -- **Deferred**: Initializes last with access to other plugin instances via `config.plugins`. Use when your plugin depends on other plugins (e.g., Server Plugin). diff --git a/docs/docs/plugins/_category_.json b/docs/docs/plugins/_category_.json new file mode 100644 index 00000000..599a8656 --- /dev/null +++ b/docs/docs/plugins/_category_.json @@ -0,0 +1,10 @@ +{ + "label": "Plugins", + "position": 3, + "collapsible": true, + "collapsed": false, + "link": { + "type": "doc", + "id": "plugins/index" + } +} diff --git a/docs/docs/plugins/analytics.md b/docs/docs/plugins/analytics.md new file mode 100644 index 00000000..7d8fe69c --- /dev/null +++ b/docs/docs/plugins/analytics.md @@ -0,0 +1,65 @@ +--- +sidebar_position: 3 +--- + +# Analytics plugin + +Enables SQL query execution against Databricks SQL Warehouses. + +**Key features:** +- File-based SQL queries with automatic type generation +- Parameterized queries with type-safe [SQL helpers](../api/appkit/Variable.sql.md) +- JSON and Arrow format support +- Built-in caching and retry logic +- Server-Sent Events (SSE) streaming + +## Basic usage + +```ts +import { analytics, createApp, server } from "@databricks/appkit"; + +await createApp({ + plugins: [server(), analytics({})], +}); +``` + +## Where queries live + +- Put `.sql` files in `config/queries/` +- Query key is the filename without `.sql` (e.g. `spend_summary.sql` → `"spend_summary"`) + +## SQL parameters + +Use `:paramName` placeholders and optionally annotate parameter types using SQL comments: + +```sql +-- @param startDate DATE +-- @param endDate DATE +-- @param limit NUMERIC +SELECT ... +WHERE usage_date BETWEEN :startDate AND :endDate +LIMIT :limit +``` + +**Supported `-- @param` types** (case-insensitive): +- `STRING`, `NUMERIC`, `BOOLEAN`, `DATE`, `TIMESTAMP`, `BINARY` + +## Server-injected parameters + +`:workspaceId` is **injected by the server** and **must not** be annotated: + +```sql +WHERE workspace_id = :workspaceId +``` + +## HTTP endpoints + +The analytics plugin exposes these endpoints (mounted under `/api/analytics`): + +- `POST /api/analytics/query/:query_key` +- `GET /api/analytics/arrow-result/:jobId` + +## Format options + +- `format: "JSON"` (default) returns JSON rows +- `format: "ARROW"` returns an Arrow "statement_id" payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId` diff --git a/docs/docs/plugins/assets/lakebase-setup/step-1.png b/docs/docs/plugins/assets/lakebase-setup/step-1.png new file mode 100644 index 00000000..168ab7bd Binary files /dev/null and b/docs/docs/plugins/assets/lakebase-setup/step-1.png differ diff --git a/docs/docs/plugins/assets/lakebase-setup/step-2.png b/docs/docs/plugins/assets/lakebase-setup/step-2.png new file mode 100644 index 00000000..5d99b912 Binary files /dev/null and b/docs/docs/plugins/assets/lakebase-setup/step-2.png differ diff --git a/docs/docs/plugins/assets/lakebase-setup/step-4.png b/docs/docs/plugins/assets/lakebase-setup/step-4.png new file mode 100644 index 00000000..11b853ef Binary files /dev/null and b/docs/docs/plugins/assets/lakebase-setup/step-4.png differ diff --git a/docs/docs/plugins/assets/lakebase-setup/step-5.png b/docs/docs/plugins/assets/lakebase-setup/step-5.png new file mode 100644 index 00000000..3fc65dce Binary files /dev/null and b/docs/docs/plugins/assets/lakebase-setup/step-5.png differ diff --git a/docs/docs/plugins/assets/lakebase-setup/step-6.png b/docs/docs/plugins/assets/lakebase-setup/step-6.png new file mode 100644 index 00000000..bb13e2bd Binary files /dev/null and b/docs/docs/plugins/assets/lakebase-setup/step-6.png differ diff --git a/docs/docs/plugins/caching.md b/docs/docs/plugins/caching.md new file mode 100644 index 00000000..d6cba4c3 --- /dev/null +++ b/docs/docs/plugins/caching.md @@ -0,0 +1,35 @@ +--- +sidebar_position: 8 +--- + +# Caching + +AppKit provides both global and plugin-level caching capabilities. + +## Global cache configuration + +```ts +await createApp({ + plugins: [server(), analytics({})], + cache: { + enabled: true, + ttl: 3600, // seconds + strictPersistence: false, + }, +}); +``` + +Storage auto-selects **Lakebase V1 (Provisioned) persistent cache when healthy**, otherwise falls back to in-memory. Support for Lakebase Autoscaling coming soon. + +## Plugin-level caching + +Inside a Plugin subclass: + +```ts +const value = await this.cache.getOrExecute( + ["myPlugin", "data", userId], + async () => expensiveWork(), + userKey, + { ttl: 300 }, +); +``` diff --git a/docs/docs/plugins/custom-plugins.md b/docs/docs/plugins/custom-plugins.md new file mode 100644 index 00000000..59a1ce51 --- /dev/null +++ b/docs/docs/plugins/custom-plugins.md @@ -0,0 +1,157 @@ +--- +sidebar_position: 7 +--- + +# Creating custom plugins + +If you need custom API routes or background logic, implement an AppKit plugin. The fastest way is to use the CLI: + +```bash +npx @databricks/appkit plugin create +``` + +For a deeper understanding of the plugin structure, read on. + +## Basic plugin example + +Extend the [`Plugin`](../api/appkit/Class.Plugin.md) class and export with `toPlugin()`: + +```typescript +import { Plugin, toPlugin } from "@databricks/appkit"; +import type express from "express"; + +class MyPlugin extends Plugin { + name = "myPlugin"; + + // Define resource requirements in the static manifest + static manifest = { + name: "myPlugin", + displayName: "My Plugin", + description: "A custom plugin", + resources: { + required: [ + { + type: "secret", + alias: "apiKey", + resourceKey: "apiKey", + description: "API key for external service", + permission: "READ", + fields: { + scope: { env: "MY_SECRET_SCOPE", description: "Secret scope" }, + key: { env: "MY_API_KEY", description: "Secret key name" } + } + } + ], + optional: [] + } + }; + + async setup() { + // Initialize your plugin + } + + myCustomMethod() { + // Some implementation + } + + async shutdown() { + // Clean up resources + } + + exports() { + // an object with the methods from this plugin to expose + return { + myCustomMethod: this.myCustomMethod + } + } +} + +export const myPlugin = toPlugin, "myPlugin">( + MyPlugin, + "myPlugin", +); +``` + +## Config-dependent resources + +The manifest defines resources as either `required` (always needed) or `optional` (may be needed). +For resources that become required based on plugin configuration, implement a static +`getResourceRequirements(config)` method: + +```typescript +interface MyPluginConfig extends BasePluginConfig { + enableCaching?: boolean; +} + +class MyPlugin extends Plugin { + name = "myPlugin"; + + static manifest = { + name: "myPlugin", + displayName: "My Plugin", + description: "A plugin with optional caching", + resources: { + required: [ + { type: "sql_warehouse", alias: "warehouse", resourceKey: "sqlWarehouse", description: "Query execution", permission: "CAN_USE", fields: { id: { env: "DATABRICKS_WAREHOUSE_ID" } } } + ], + optional: [ + // Listed as optional in manifest for static analysis + { type: "database", alias: "cache", resourceKey: "cache", description: "Query result caching (if enabled)", permission: "CAN_CONNECT_AND_CREATE", fields: { instance_name: { env: "DATABRICKS_CACHE_INSTANCE" }, database_name: { env: "DATABRICKS_CACHE_DB" } } } + ] + } + }; + + // Runtime: Convert optional resources to required based on config + static getResourceRequirements(config: MyPluginConfig) { + const resources = []; + if (config.enableCaching) { + // When caching is enabled, Database becomes required + resources.push({ + type: "database", + alias: "cache", + resourceKey: "cache", + description: "Query result caching", + permission: "CAN_CONNECT_AND_CREATE", + fields: { + instance_name: { env: "DATABRICKS_CACHE_INSTANCE" }, + database_name: { env: "DATABRICKS_CACHE_DB" }, + }, + required: true // Mark as required at runtime + }); + } + return resources; + } +} +``` + +This pattern allows: +- **Static tools** (CLI, docs) to show all possible resources +- **Runtime validation** to enforce resources based on actual configuration + +## Key extension points + +- **Route injection**: Implement `injectRoutes()` to add custom endpoints using [`IAppRouter`](../api/appkit/TypeAlias.IAppRouter.md) +- **Lifecycle hooks**: Override `setup()`, and `shutdown()` methods +- **Shared services**: + - **Cache management**: Access the cache service via `this.cache`. See [`CacheConfig`](../api/appkit/Interface.CacheConfig.md) for configuration. + - **Telemetry**: Instrument your plugin with traces and metrics via `this.telemetry`. See [`ITelemetry`](../api/appkit/Interface.ITelemetry.md). +- **Execution interceptors**: Use `execute()` and `executeStream()` with [`StreamExecutionSettings`](../api/appkit/Interface.StreamExecutionSettings.md) + +**Consuming your plugin programmatically** + +Optionally, you may want to provide a way to consume your plugin programmatically using the AppKit object. +To do that, your plugin needs to implement the `exports` method, returning an object with the methods you want to expose. From the previous example, the plugin could be consumed as follows: + +```ts +const AppKit = await createApp({ + plugins: [ + server({ port: 8000 }), + analytics(), + myPlugin(), + ], +}); + +AppKit.myPlugin.myCustomMethod(); +``` + +See the [`Plugin`](../api/appkit/Class.Plugin.md) API reference for complete documentation. diff --git a/docs/docs/plugins/execution-context.md b/docs/docs/plugins/execution-context.md new file mode 100644 index 00000000..10da4d50 --- /dev/null +++ b/docs/docs/plugins/execution-context.md @@ -0,0 +1,48 @@ +--- +sidebar_position: 5 +--- + +# Execution context + +AppKit manages Databricks authentication via two contexts: + +- **ServiceContext** (singleton): Initialized at app startup with service principal credentials +- **ExecutionContext**: Determined at runtime - either service principal or user context + +## Headers for user context + +- `x-forwarded-user`: required in production; identifies the user +- `x-forwarded-access-token`: required for user token passthrough + +## Using `asUser(req)` for user-scoped operations + +The `asUser(req)` pattern allows plugins to execute operations using the requesting user's credentials: + +```ts +// In a custom plugin route handler +router.post("/users/me/data", async (req, res) => { + // Execute as the user (uses their Databricks permissions) + const result = await this.asUser(req).query("SELECT ..."); + res.json(result); +}); + +// Service principal execution (default) +router.post("/system/data", async (req, res) => { + const result = await this.query("SELECT ..."); + res.json(result); +}); +``` + +## Context helper functions + +Exported from `@databricks/appkit`: + +- `getCurrentUserId()`: Returns user ID in user context, service user ID otherwise +- `getWorkspaceClient()`: Returns the appropriate WorkspaceClient for current context +- `getWarehouseId()`: `Promise` (from `DATABRICKS_WAREHOUSE_ID` or auto-selected in dev) +- `getWorkspaceId()`: `Promise` (from `DATABRICKS_WORKSPACE_ID` or fetched) +- `isInUserContext()`: Returns `true` if currently executing in user context + +## Development mode behavior + +In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and falls back to the service principal. diff --git a/docs/docs/plugins/index.md b/docs/docs/plugins/index.md new file mode 100644 index 00000000..a74668cb --- /dev/null +++ b/docs/docs/plugins/index.md @@ -0,0 +1,34 @@ +--- +sidebar_position: 1 +--- + +# Plugins + +Plugins are modular extensions that add capabilities to your AppKit application. They follow a defined lifecycle and have access to shared services like caching, telemetry, and streaming. + +For complete API documentation, see the [`Plugin`](../api/appkit/Class.Plugin.md) class reference. + +## Using plugins + +Configure plugins when creating your AppKit instance: + +```typescript +import { createApp, server, analytics } from "@databricks/appkit"; + +const AppKit = await createApp({ + plugins: [ + server({ port: 8000 }), + analytics(), + ], +}); +``` + +For complete configuration options, see [`createApp`](../api/appkit/Function.createApp.md). + +## Plugin phases + +Plugins initialize in three phases: + +- **Core**: Reserved for framework-level plugins. Initializes first. +- **Normal**: Default phase for application plugins. Initializes after core. +- **Deferred**: Initializes last with access to other plugin instances via `config.plugins`. Use when your plugin depends on other plugins (e.g., Server Plugin). diff --git a/docs/docs/plugins/lakebase.md b/docs/docs/plugins/lakebase.md new file mode 100644 index 00000000..e1d3ec12 --- /dev/null +++ b/docs/docs/plugins/lakebase.md @@ -0,0 +1,175 @@ +--- +sidebar_position: 4 +--- + +# Lakebase plugin + +:::info +Currently, the Lakebase plugin currently requires a one-time manual setup to connect your Databricks App with your Lakebase database. An automated setup process is planned for an upcoming future release. +::: + +Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with automatic OAuth token refresh. + +**Key features:** +- Standard `pg.Pool` compatible with any PostgreSQL library or ORM +- Automatic OAuth token refresh (1-hour tokens, 2-minute refresh buffer) +- Token caching to minimize API calls +- Built-in OpenTelemetry instrumentation (query duration, pool connections, token refresh) + +## Setting up Lakebase + +Before using the plugin, you need to connect your Databricks App's service principal to your Lakebase database. + +### 1. Find your app's service principal + +Create a Databricks App from the UI (`Compute > Apps > Create App > Create a custom app`). Navigate to the **Environment** tab and note the `DATABRICKS_CLIENT_ID` value — this is the service principal that will connect to your Lakebase database. + +![App environment tab](./assets/lakebase-setup/step-1.png) + +### 2. Find your Project ID and Branch ID + +Create a new Lakebase Postgres Autoscaling project. Navigate to your Lakebase project's branch details and switch to the **Compute** tab. Note the **Project ID** and **Branch ID** from the URL. + +![Branch details](./assets/lakebase-setup/step-2.png) + +### 3. Find your endpoint + +Use the Databricks CLI to list endpoints for the branch. Note the `name` field from the output — this is your `LAKEBASE_ENDPOINT` value. + +```bash +databricks postgres list-endpoints projects/{project-id}/branches/{branch-id} +``` + +Example output: + +```json +[ + { + "create_time": "2026-02-19T12:13:02Z", + "name": "projects/{project-id}/branches/{branch-id}/endpoints/primary" + } +] +``` + +### 4. Get connection parameters + +Click the **Connect** button on your Lakebase branch and copy the `PGHOST` and `PGDATABASE` values for later. + +![Connect dialog](./assets/lakebase-setup/step-4.png) + +### 5. Grant access to the service principal + +Navigate to the **SQL Editor** tab on your Lakebase branch. Run the following SQL against the `databricks_postgres` database, replacing `` with the value from step 1 everywhere it appears: + +```sql +-- 1. Create the extension and role +CREATE EXTENSION IF NOT EXISTS databricks_auth; +SELECT databricks_create_role('', 'SERVICE_PRINCIPAL'); + +-- 2. Basic connection & usage +GRANT CONNECT ON DATABASE "databricks_postgres" TO ""; +GRANT ALL ON SCHEMA public TO ""; + +-- 3. Grant on existing objects +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO ""; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO ""; +GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA public TO ""; +GRANT ALL PRIVILEGES ON ALL PROCEDURES IN SCHEMA public TO ""; + +-- 4. Grant on future objects +-- NOTE: This applies to objects created by the user running this script. +ALTER DEFAULT PRIVILEGES IN SCHEMA public + GRANT ALL ON TABLES TO ""; +ALTER DEFAULT PRIVILEGES IN SCHEMA public + GRANT ALL ON SEQUENCES TO ""; +ALTER DEFAULT PRIVILEGES IN SCHEMA public + GRANT ALL ON FUNCTIONS TO ""; +ALTER DEFAULT PRIVILEGES IN SCHEMA public + GRANT ALL ON ROUTINES TO ""; +``` + +![SQL Editor](./assets/lakebase-setup/step-5.png) + +### 6. Verify the role + +Navigate to the **Roles & Databases** tab and confirm the role is visible. You may need to fully refresh the page. + +![Roles & Databases tab](./assets/lakebase-setup/step-6.png) + +## Basic usage + +```ts +import { createApp, lakebase, server } from "@databricks/appkit"; + +await createApp({ + plugins: [server(), lakebase()], +}); +``` + +## Environment variables + +The required environment variables: + +| Variable | Description | +|---|---| +| `PGHOST` | Lakebase host | +| `PGDATABASE` | Database name | +| `LAKEBASE_ENDPOINT` | Endpoint resource path (e.g. `projects/.../branches/.../endpoints/...`) | +| `PGSSLMODE` | TLS mode — set to `require` | + +Ensure that those environment variables are set both for local development (`.env` file) and for deployment (`app.yaml` file): + +```yaml +env: + - name: LAKEBASE_ENDPOINT + value: projects/{project-id}/branches/{branch-id}/endpoints/primary + - name: PGHOST + value: {your-lakebase-host} + - name: PGDATABASE + value: databricks_postgres + - name: PGSSLMODE + value: require +``` + +For the full configuration reference (SSL, pool size, timeouts, logging, ORM examples), see the [`@databricks/lakebase` README](https://github.com/databricks/appkit/blob/main/packages/lakebase/README.md). + +## Accessing the pool + +After initialization, access Lakebase through the `AppKit.lakebase` object: + +```ts +const AppKit = await createApp({ + plugins: [server(), lakebase()], +}); + +// Direct query (parameterized) +const result = await AppKit.lakebase.query( + "SELECT * FROM orders WHERE user_id = $1", + [userId], +); + +// Raw pg.Pool (for ORMs or advanced usage) +const pool = AppKit.lakebase.pool; + +// ORM-ready config objects +const ormConfig = AppKit.lakebase.getOrmConfig(); // { host, port, database, ... } +const pgConfig = AppKit.lakebase.getPgConfig(); // pg.PoolConfig +``` + +## Configuration options + +Pass a `pool` object to override any defaults: + +```ts +await createApp({ + plugins: [ + lakebase({ + pool: { + max: 10, // Max pool connections (default: 10) + connectionTimeoutMillis: 5000, // Connection timeout ms (default: 10000) + idleTimeoutMillis: 30000, // Idle connection timeout ms (default: 30000) + }, + }), + ], +}); +``` diff --git a/docs/docs/plugins/plugin-management.md b/docs/docs/plugins/plugin-management.md new file mode 100644 index 00000000..1fa25068 --- /dev/null +++ b/docs/docs/plugins/plugin-management.md @@ -0,0 +1,85 @@ +--- +sidebar_position: 6 +--- + +# Plugin management + +AppKit includes a CLI for managing plugins. All commands are available under `npx @databricks/appkit plugin`. + +## Create a plugin + +Scaffold a new plugin interactively: + +```bash +npx @databricks/appkit plugin create +``` + +The wizard walks you through: +- **Placement**: In your repository (e.g. `plugins/my-plugin`) or as a standalone package +- **Metadata**: Name, display name, description +- **Resources**: Which Databricks resources the plugin needs (SQL Warehouse, Secret, etc.) and whether each is required or optional +- **Optional fields**: Author, version, license + +The command generates a complete plugin scaffold with `manifest.json`, TypeScript class, and barrel exports — ready to register in your app. + +## Sync plugin manifests + +Scan your project for plugins and generate `appkit.plugins.json`: + +```bash +npx @databricks/appkit plugin sync --write +``` + +This discovers plugin manifests from installed packages and local imports, then writes a consolidated manifest used by deployment tooling. Plugins referenced in your `createApp({ plugins: [...] })` call are automatically marked as required. + +Use the `--silent` flag in build hooks to suppress output: + +```json +{ + "scripts": { + "sync": "appkit plugin sync --write --silent", + "predev": "npm run sync", + "prebuild": "npm run sync" + } +} +``` + +## Validate manifests + +Check plugin manifests against the JSON schema: + +```bash +# Validate manifest.json in the current directory +npx @databricks/appkit plugin validate + +# Validate specific files or directories +npx @databricks/appkit plugin validate plugins/my-plugin appkit.plugins.json +``` + +The validator auto-detects whether a file is a plugin manifest or a template manifest (from `$schema`) and reports errors with humanized paths and expected values. + +## List plugins + +View registered plugins from `appkit.plugins.json` or scan a directory: + +```bash +# From appkit.plugins.json (default) +npx @databricks/appkit plugin list + +# Scan a directory for plugin folders +npx @databricks/appkit plugin list --dir plugins/ + +# JSON output for scripting +npx @databricks/appkit plugin list --json +``` + +## Add a resource to a plugin + +Interactively add a new resource requirement to an existing plugin manifest: + +```bash +npx @databricks/appkit plugin add-resource + +# Or specify the plugin directory +npx @databricks/appkit plugin add-resource --path plugins/my-plugin +``` diff --git a/docs/docs/plugins/server.md b/docs/docs/plugins/server.md new file mode 100644 index 00000000..b43c00e4 --- /dev/null +++ b/docs/docs/plugins/server.md @@ -0,0 +1,72 @@ +--- +sidebar_position: 2 +--- + +# Server plugin + +Provides HTTP server capabilities with development and production modes. + +**Key features:** +- Express server for REST APIs +- Vite dev server with hot module reload +- Static file serving for production +- Remote tunneling to deployed backends + +The Server plugin uses the deferred initialization phase to access routes from other plugins. + +## What it does + +- Starts an Express server (default `host=0.0.0.0`, `port=8000`) +- Mounts plugin routes under `/api//...` +- Adds `/health` endpoint (returns `{ status: "ok" }`) +- Serves frontend: + - **Development** (`NODE_ENV=development`): runs a Vite dev server in middleware mode + - **Production**: auto-detects static frontend directory (checks `dist`, `client/dist`, `build`, `public`, `out`) + +## Minimal server example + +The smallest valid AppKit server: + +```ts +// server/index.ts +import { createApp, server } from "@databricks/appkit"; + +await createApp({ + plugins: [server()], +}); +``` + +## Manual server start example + +When you need to extend Express with custom routes: + +```ts +import { createApp, server } from "@databricks/appkit"; + +const appkit = await createApp({ + plugins: [server({ autoStart: false })], +}); + +appkit.server.extend((app) => { + app.get("/custom", (_req, res) => res.json({ ok: true })); +}); + +await appkit.server.start(); +``` + +## Configuration options + +```ts +import { createApp, server } from "@databricks/appkit"; + +await createApp({ + plugins: [ + server({ + port: 8000, // default: Number(process.env.DATABRICKS_APP_PORT) || 8000 + host: "0.0.0.0", // default: process.env.FLASK_RUN_HOST || "0.0.0.0" + autoStart: true, // default: true + staticPath: "dist", // optional: force a specific static directory + }), + ], +}); +``` diff --git a/docs/static/schemas/plugin-manifest.schema.json b/docs/static/schemas/plugin-manifest.schema.json index e4d43c8f..465498b2 100644 --- a/docs/static/schemas/plugin-manifest.schema.json +++ b/docs/static/schemas/plugin-manifest.schema.json @@ -86,6 +86,10 @@ "type": "string", "description": "SPDX license identifier", "examples": ["Apache-2.0", "MIT"] + }, + "onSetupMessage": { + "type": "string", + "description": "Message displayed to the user after project initialization. Use this to inform about manual setup steps (e.g. environment variables, resource provisioning)." } }, "additionalProperties": false, diff --git a/docs/static/schemas/template-plugins.schema.json b/docs/static/schemas/template-plugins.schema.json index 9713e9f6..290edd05 100644 --- a/docs/static/schemas/template-plugins.schema.json +++ b/docs/static/schemas/template-plugins.schema.json @@ -65,6 +65,10 @@ "default": false, "description": "When true, this plugin is required by the template and cannot be deselected during CLI init. The user will only be prompted to configure its resources. When absent or false, the plugin is optional and the user can choose whether to include it." }, + "onSetupMessage": { + "type": "string", + "description": "Message displayed to the user after project initialization. Use this to inform about manual setup steps (e.g. environment variables, resource provisioning)." + }, "resources": { "type": "object", "required": ["required", "optional"], diff --git a/packages/appkit/src/index.ts b/packages/appkit/src/index.ts index 45cad458..a5e6f50c 100644 --- a/packages/appkit/src/index.ts +++ b/packages/appkit/src/index.ts @@ -47,7 +47,7 @@ export { } from "./errors"; // Plugin authoring export { Plugin, type ToPlugin, toPlugin } from "./plugin"; -export { analytics, server } from "./plugins"; +export { analytics, lakebase, server } from "./plugins"; // Registry types and utilities for plugin manifests export type { ConfigSchema, diff --git a/packages/appkit/src/plugins/index.ts b/packages/appkit/src/plugins/index.ts index aba6f26b..f6a9e2c5 100644 --- a/packages/appkit/src/plugins/index.ts +++ b/packages/appkit/src/plugins/index.ts @@ -1,2 +1,3 @@ export * from "./analytics"; +export * from "./lakebase"; export * from "./server"; diff --git a/packages/appkit/src/plugins/lakebase/index.ts b/packages/appkit/src/plugins/lakebase/index.ts new file mode 100644 index 00000000..73bcff5e --- /dev/null +++ b/packages/appkit/src/plugins/lakebase/index.ts @@ -0,0 +1,3 @@ +export * from "./lakebase"; +export * from "./manifest"; +export * from "./types"; diff --git a/packages/appkit/src/plugins/lakebase/lakebase.ts b/packages/appkit/src/plugins/lakebase/lakebase.ts new file mode 100644 index 00000000..e8a1f18c --- /dev/null +++ b/packages/appkit/src/plugins/lakebase/lakebase.ts @@ -0,0 +1,124 @@ +import type pg from "pg"; +import { + createLakebasePool, + getLakebaseOrmConfig, + getLakebasePgConfig, + getUsernameWithApiLookup, +} from "../../connectors/lakebase"; +import { createLogger } from "../../logging/logger"; +import { Plugin, toPlugin } from "../../plugin"; +import { lakebaseManifest } from "./manifest"; +import type { ILakebaseConfig } from "./types"; + +const logger = createLogger("lakebase"); + +/** + * AppKit plugin for Databricks Lakebase Autoscaling. + * + * Wraps `@databricks/lakebase` to provide a standard `pg.Pool` with automatic + * OAuth token refresh, integrated with AppKit's logger and OpenTelemetry setup. + * + * @example + * ```ts + * import { createApp, lakebase, server } from "@databricks/appkit"; + * + * const AppKit = await createApp({ + * plugins: [server(), lakebase()], + * }); + * + * const result = await AppKit.lakebase.query("SELECT * FROM users WHERE id = $1", [userId]); + * ``` + */ +export class LakebasePlugin extends Plugin { + name = "lakebase"; + + /** Plugin manifest declaring metadata and resource requirements */ + static manifest = lakebaseManifest; + + protected declare config: ILakebaseConfig; + private pool: pg.Pool | null = null; + + constructor(config: ILakebaseConfig) { + super(config); + this.config = config; + } + + /** + * Initializes the Lakebase connection pool. + * Called automatically by AppKit during the plugin setup phase. + * + * Resolves the PostgreSQL username via {@link getUsernameWithApiLookup}, + * which tries config, env vars, and finally the Databricks workspace API. + */ + async setup() { + const poolConfig = this.config.pool; + const user = await getUsernameWithApiLookup(poolConfig); + this.pool = createLakebasePool({ ...poolConfig, user }); + logger.info("Lakebase pool initialized"); + } + + /** + * Executes a parameterized SQL query against the Lakebase pool. + * + * @param text - SQL query string, using `$1`, `$2`, ... placeholders + * @param values - Parameter values corresponding to placeholders + * @returns Query result with typed rows + * + * @example + * ```ts + * const result = await AppKit.lakebase.query<{ id: number; name: string }>( + * "SELECT id, name FROM users WHERE active = $1", + * [true], + * ); + * ``` + */ + async query( + text: string, + values?: unknown[], + ): Promise> { + // biome-ignore lint/style/noNonNullAssertion: pool is guaranteed non-null after setup(), which AppKit always awaits before exposing the plugin API + return this.pool!.query(text, values); + } + + /** + * Gracefully drains and closes the connection pool. + * Called automatically by AppKit during shutdown. + */ + abortActiveOperations(): void { + super.abortActiveOperations(); + if (this.pool) { + logger.info("Closing Lakebase pool"); + this.pool.end().catch((err) => { + logger.error("Error closing Lakebase pool: %O", err); + }); + this.pool = null; + } + } + + /** + * Returns the plugin's public API, accessible via `AppKit.lakebase`. + * + * - `pool` — The raw `pg.Pool` instance, for use with ORMs or advanced scenarios + * - `query` — Convenience method for executing parameterized SQL queries + * - `getOrmConfig()` — Returns a config object compatible with Drizzle, TypeORM, Sequelize, etc. + * - `getPgConfig()` — Returns a `pg.PoolConfig` object for manual pool construction + */ + exports() { + return { + // biome-ignore lint/style/noNonNullAssertion: pool is guaranteed non-null after setup(), which AppKit always awaits before exposing the plugin API + pool: this.pool!, + query: this.query.bind(this), + getOrmConfig: () => getLakebaseOrmConfig(this.config.pool), + getPgConfig: () => getLakebasePgConfig(this.config.pool), + }; + } +} + +/** + * @internal + */ +export const lakebase = toPlugin< + typeof LakebasePlugin, + ILakebaseConfig, + "lakebase" +>(LakebasePlugin, "lakebase"); diff --git a/packages/appkit/src/plugins/lakebase/manifest.json b/packages/appkit/src/plugins/lakebase/manifest.json new file mode 100644 index 00000000..889d9e13 --- /dev/null +++ b/packages/appkit/src/plugins/lakebase/manifest.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://databricks.github.io/appkit/schemas/plugin-manifest.schema.json", + "name": "lakebase", + "displayName": "Lakebase", + "description": "SQL query execution against Databricks Lakebase Autoscaling", + "onSetupMessage": "Configure environment variables before running or deploying the app.\nSee: https://databricks.github.io/appkit/docs/plugins/lakebase", + "resources": { + "required": [], + "optional": [] + } +} diff --git a/packages/appkit/src/plugins/lakebase/manifest.ts b/packages/appkit/src/plugins/lakebase/manifest.ts new file mode 100644 index 00000000..7575062a --- /dev/null +++ b/packages/appkit/src/plugins/lakebase/manifest.ts @@ -0,0 +1,10 @@ +import { readFileSync } from "node:fs"; +import { dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; +import type { PluginManifest } from "../../registry"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +export const lakebaseManifest: PluginManifest = JSON.parse( + readFileSync(join(__dirname, "manifest.json"), "utf-8"), +) as PluginManifest; diff --git a/packages/appkit/src/plugins/lakebase/types.ts b/packages/appkit/src/plugins/lakebase/types.ts new file mode 100644 index 00000000..ac6997c6 --- /dev/null +++ b/packages/appkit/src/plugins/lakebase/types.ts @@ -0,0 +1,20 @@ +import type { BasePluginConfig } from "shared"; +import type { LakebasePoolConfig } from "../../connectors/lakebase"; + +/** + * Configuration for the Lakebase plugin. + * + * The minimum required setup is via environment variables — no `pool` config + * is needed if `PGHOST`, `PGDATABASE`, and `LAKEBASE_ENDPOINT` are set. + * + * @see {@link https://github.com/databricks/appkit/blob/main/packages/lakebase/README.md} for the full configuration reference. + */ +export interface ILakebaseConfig extends BasePluginConfig { + /** + * Optional overrides for the underlying `pg.Pool` configuration. + * All fields are optional and fall back to environment variables or defaults. + * + * Common overrides: `max` (pool size), `connectionTimeoutMillis`, `idleTimeoutMillis`. + */ + pool?: Partial; +} diff --git a/packages/appkit/tsdown.config.ts b/packages/appkit/tsdown.config.ts index 2472c084..32600ee7 100644 --- a/packages/appkit/tsdown.config.ts +++ b/packages/appkit/tsdown.config.ts @@ -42,6 +42,10 @@ export default defineConfig([ from: "src/plugins/analytics/manifest.json", to: "dist/plugins/analytics/manifest.json", }, + { + from: "src/plugins/lakebase/manifest.json", + to: "dist/plugins/lakebase/manifest.json", + }, { from: "src/plugins/server/manifest.json", to: "dist/plugins/server/manifest.json", diff --git a/packages/shared/src/cli/commands/plugin/manifest-types.ts b/packages/shared/src/cli/commands/plugin/manifest-types.ts index 420a09b3..9dfb041e 100644 --- a/packages/shared/src/cli/commands/plugin/manifest-types.ts +++ b/packages/shared/src/cli/commands/plugin/manifest-types.ts @@ -27,6 +27,7 @@ export interface PluginManifest { optional: ResourceRequirement[]; }; config?: { schema: unknown }; + onSetupMessage?: string; } export interface TemplatePlugin extends Omit { diff --git a/packages/shared/src/cli/commands/plugin/sync/sync.ts b/packages/shared/src/cli/commands/plugin/sync/sync.ts index 5d00b656..1ec5e9e3 100644 --- a/packages/shared/src/cli/commands/plugin/sync/sync.ts +++ b/packages/shared/src/cli/commands/plugin/sync/sync.ts @@ -278,6 +278,9 @@ function discoverLocalPlugins( description: manifest.description, package: `./${relativePath}`, resources: manifest.resources, + ...(manifest.onSetupMessage && { + onSetupMessage: manifest.onSetupMessage, + }), }; } catch (error) { console.warn( @@ -358,6 +361,9 @@ function scanForPlugins( description: manifest.description, package: packageName, resources: manifest.resources, + ...(manifest.onSetupMessage && { + onSetupMessage: manifest.onSetupMessage, + }), }; } } @@ -400,6 +406,9 @@ function scanPluginsDir( description: manifest.description, package: packageName, resources: manifest.resources, + ...(manifest.onSetupMessage && { + onSetupMessage: manifest.onSetupMessage, + }), }; } } catch (error) { diff --git a/packages/shared/src/plugin.ts b/packages/shared/src/plugin.ts index 54d8f583..b29933b3 100644 --- a/packages/shared/src/plugin.ts +++ b/packages/shared/src/plugin.ts @@ -84,6 +84,7 @@ export interface PluginManifest { config?: { schema: JSONSchema7; }; + onSetupMessage?: string; author?: string; version?: string; repository?: string; diff --git a/packages/shared/src/schemas/plugin-manifest.schema.json b/packages/shared/src/schemas/plugin-manifest.schema.json index e4d43c8f..465498b2 100644 --- a/packages/shared/src/schemas/plugin-manifest.schema.json +++ b/packages/shared/src/schemas/plugin-manifest.schema.json @@ -86,6 +86,10 @@ "type": "string", "description": "SPDX license identifier", "examples": ["Apache-2.0", "MIT"] + }, + "onSetupMessage": { + "type": "string", + "description": "Message displayed to the user after project initialization. Use this to inform about manual setup steps (e.g. environment variables, resource provisioning)." } }, "additionalProperties": false, diff --git a/packages/shared/src/schemas/template-plugins.schema.json b/packages/shared/src/schemas/template-plugins.schema.json index 9713e9f6..290edd05 100644 --- a/packages/shared/src/schemas/template-plugins.schema.json +++ b/packages/shared/src/schemas/template-plugins.schema.json @@ -65,6 +65,10 @@ "default": false, "description": "When true, this plugin is required by the template and cannot be deselected during CLI init. The user will only be prompted to configure its resources. When absent or false, the plugin is optional and the user can choose whether to include it." }, + "onSetupMessage": { + "type": "string", + "description": "Message displayed to the user after project initialization. Use this to inform about manual setup steps (e.g. environment variables, resource provisioning)." + }, "resources": { "type": "object", "required": ["required", "optional"], diff --git a/template/appkit.plugins.json b/template/appkit.plugins.json index 5d5c7a10..7fc8fb6d 100644 --- a/template/appkit.plugins.json +++ b/template/appkit.plugins.json @@ -26,6 +26,17 @@ "optional": [] } }, + "lakebase": { + "name": "lakebase", + "displayName": "Lakebase", + "description": "SQL query execution against Databricks Lakebase Autoscaling", + "package": "@databricks/appkit", + "resources": { + "required": [], + "optional": [] + }, + "onSetupMessage": "Configure environment variables before running or deploying the app.\nSee: https://databricks.github.io/appkit/docs/plugins/lakebase" + }, "server": { "name": "server", "displayName": "Server Plugin",