diff --git a/apps/docs/content/guides/auth/server-side/creating-a-client.mdx b/apps/docs/content/guides/auth/server-side/creating-a-client.mdx index b02a37ad8fd8e..3ba089193d45c 100644 --- a/apps/docs/content/guides/auth/server-side/creating-a-client.mdx +++ b/apps/docs/content/guides/auth/server-side/creating-a-client.mdx @@ -463,8 +463,8 @@ export async function loader({ request }: LoaderFunctionArgs) { return parseCookieHeader(request.headers.get('Cookie') ?? '') }, setAll(cookiesToSet) { - cookiesToSet.forEach(({ name, value }) => - headers.append('Set-Cookie', serializeCookieHeader(name, value)) + cookiesToSet.forEach(({ name, value, options }) => + headers.append('Set-Cookie', serializeCookieHeader(name, value, options)) ) }, }, @@ -497,8 +497,8 @@ export async function action({ request }: ActionFunctionArgs) { return parseCookieHeader(request.headers.get('Cookie') ?? '') }, setAll(cookiesToSet) { - cookiesToSet.forEach(({ name, value }) => - headers.append('Set-Cookie', serializeCookieHeader(name, value)) + cookiesToSet.forEach(({ name, value, options }) => + headers.append('Set-Cookie', serializeCookieHeader(name, value, options)) ) }, }, diff --git a/apps/docs/content/guides/realtime/authorization.mdx b/apps/docs/content/guides/realtime/authorization.mdx index 4bdb5850eabc2..26ad20d965dee 100644 --- a/apps/docs/content/guides/realtime/authorization.mdx +++ b/apps/docs/content/guides/realtime/authorization.mdx @@ -133,7 +133,7 @@ exists ( rooms_users where user_id = (select auth.uid()) - and topic = (select realtime.topic()) + and room_topic = (select realtime.topic()) and realtime.messages.extension in ('broadcast') ) ); @@ -266,7 +266,7 @@ with check ( rooms_users where user_id = (select auth.uid()) - and topic = (select realtime.topic()) + and room_topic = (select realtime.topic()) and realtime.messages.extension in ('broadcast') ) ); @@ -293,7 +293,7 @@ using ( rooms_users where user_id = (select auth.uid()) - and topic = (select realtime.topic()) + and room_topic = (select realtime.topic()) and realtime.messages.extension in ('presence') ) ); @@ -316,7 +316,7 @@ with check ( rooms_users where user_id = (select auth.uid()) - and name = (select realtime.topic()) + and room_topic = (select realtime.topic()) and realtime.messages.extension in ('presence') ) ); @@ -343,7 +343,7 @@ using ( rooms_users where user_id = (select auth.uid()) - and topic = (select realtime.topic()) + and room_topic = (select realtime.topic()) and realtime.messages.extension in ('broadcast', 'presence') ) ); @@ -366,7 +366,7 @@ with check ( rooms_users where user_id = (select auth.uid()) - and name = (select realtime.topic()) + and room_topic = (select realtime.topic()) and realtime.messages.extension in ('broadcast', 'presence') ) ); diff --git a/apps/docs/content/troubleshooting/supabase-storage-inefficient-folder-operations-and-hierarchical-rls-challenges-b05a4d.mdx b/apps/docs/content/troubleshooting/supabase-storage-inefficient-folder-operations-and-hierarchical-rls-challenges-b05a4d.mdx index c4fdf40594260..d666ff718b32e 100644 --- a/apps/docs/content/troubleshooting/supabase-storage-inefficient-folder-operations-and-hierarchical-rls-challenges-b05a4d.mdx +++ b/apps/docs/content/troubleshooting/supabase-storage-inefficient-folder-operations-and-hierarchical-rls-challenges-b05a4d.mdx @@ -20,3 +20,45 @@ To overcome these limitations and implement robust folder management with hierar - **Implement RLS policies on `storage.objects`.** These policies must `JOIN` with your custom metadata table to enforce hierarchical access permissions based on your defined folder structure. - **Handle batch folder operations via your metadata table.** For operations like moving or renaming folders, update the relevant entries in your custom metadata table. Note that actual file paths in Storage are not directly altered by these operations. - **Optimize RLS policies for performance.** `JOIN`s in RLS policies can lead to performance degradation, especially with large datasets. Ensure proper indexing on your custom metadata table and consider using `SECURITY DEFINER` functions to optimize policy execution. + +## Alternative approach: Using the S3 protocol for bulk operations + +Supabase Storage also supports an S3-compatible API. This allows you to use tools like the AWS CLI to perform bulk file operations such as downloading, moving, or reorganizing objects more efficiently. + +Install the AWS CLI by following the [AWS CLI installation guide](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html). + +Create S3 credentials in Supabase using the [Supabase S3 authentication guide](/docs/guides/storage/s3/authentication?queryGroups=language&language=credentials). + +Configure an AWS CLI profile using the credentials you generated in Supabase. The profile name can be anything, but it must match the value used in the following commands. + +```shell +aws configure --profile supabase-s3 +``` + +Download files from a bucket or prefix: + +```shell +aws s3 cp s3://bucket-name/folder-name ./download-target +--profile supabase-s3 +--endpoint-url https://.supabase.co/storage/v1/s3 +--recursive +--region +``` + +- Replace `bucket-name` with your bucket name. +- Replace `folder-name` with the prefix you want to download, or omit it to download the entire bucket. +- Replace `` with your Supabase project reference. +- Replace `` with your project's region (for example `eu-central-1`). +- `./download-target` is the local directory where files will be saved. + +Move or rename files using the `mv` command. Because folders in Supabase Storage are implemented as prefixes, renaming a folder is effectively moving objects from one prefix to another. + +```shell +aws s3 mv s3://bucket-name-one/folder-name-one s3://bucket-name-two/folder-name-two +--profile supabase-s3 +--endpoint-url https://.supabase.co/storage/v1/s3 +--recursive +--region +``` + +This method is useful for large-scale downloads, migrations, or reorganizing files within a bucket. diff --git a/apps/studio/components/grid/components/header/filter/FilterPopoverNew.utils.ts b/apps/studio/components/grid/components/header/filter/FilterPopoverNew.utils.ts index 60bba8b017912..5cfefa6273b02 100644 --- a/apps/studio/components/grid/components/header/filter/FilterPopoverNew.utils.ts +++ b/apps/studio/components/grid/components/header/filter/FilterPopoverNew.utils.ts @@ -50,11 +50,13 @@ const DATE_OPERATORS: OperatorDefinition[] = [ { value: '<=', label: 'Less or equal', group: 'comparison' }, { value: '=', label: 'Equals', group: 'comparison' }, { value: '<>', label: 'Not equal', group: 'comparison' }, + { value: 'is', label: 'Is', group: 'setNull' }, ] const BOOLEAN_OPERATORS: OperatorDefinition[] = [ { value: '=', label: 'Equals', group: 'comparison' }, { value: '<>', label: 'Not equal', group: 'comparison' }, + { value: 'is', label: 'Is', group: 'setNull' }, ] export function columnToFilterProperty(column: SupaColumn): FilterProperty { diff --git a/apps/studio/components/interfaces/App/CommandMenu/ContextSearchCommands.tsx b/apps/studio/components/interfaces/App/CommandMenu/ContextSearchCommands.tsx index 9cd507530deae..8e61ba4731ab7 100644 --- a/apps/studio/components/interfaces/App/CommandMenu/ContextSearchCommands.tsx +++ b/apps/studio/components/interfaces/App/CommandMenu/ContextSearchCommands.tsx @@ -1,25 +1,26 @@ 'use client' -import { useMemo } from 'react' -import { Database } from 'lucide-react' +import { IS_PLATFORM } from 'common' +import { useIsFeatureEnabled } from 'hooks/misc/useIsFeatureEnabled' +import { useSelectedProjectQuery } from 'hooks/misc/useSelectedProject' import { Auth, EdgeFunctions, Storage } from 'icons' +import { Database } from 'lucide-react' +import { useMemo } from 'react' import type { ICommand } from 'ui-patterns/CommandMenu' import { CommandHeader, CommandInput, CommandWrapper, PageType, + useQuery, useRegisterCommands, useRegisterPage, useSetPage, - useQuery, } from 'ui-patterns/CommandMenu' + import { COMMAND_MENU_SECTIONS } from './CommandMenu.utils' -import { useSelectedProjectQuery } from 'hooks/misc/useSelectedProject' -import { orderCommandSectionsByPriority } from './ordering' import { ContextSearchResults } from './ContextSearchResults' -import { useFlag, IS_PLATFORM } from 'common' -import { useIsFeatureEnabled } from 'hooks/misc/useIsFeatureEnabled' +import { orderCommandSectionsByPriority } from './ordering' import type { SearchContextValue } from './SearchContext.types' interface SearchContextOption { @@ -81,7 +82,6 @@ function ContextSearchPage({ } export function useContextSearchCommands() { - const enableSearchEntitiesCommandMenu = useFlag('enableSearchEntitiesCommandMenu') const { data: project } = useSelectedProjectQuery() const setPage = useSetPage() @@ -147,6 +147,6 @@ export function useContextSearchCommands() { useRegisterCommands(COMMAND_MENU_SECTIONS.QUERY, contextCommands, { orderSection: orderCommandSectionsByPriority, sectionMeta: { priority: 3 }, - enabled: !IS_PLATFORM || (enableSearchEntitiesCommandMenu && !!project), + enabled: !IS_PLATFORM || !!project, }) } diff --git a/apps/studio/components/interfaces/App/CommandMenu/CreateCommands.tsx b/apps/studio/components/interfaces/App/CommandMenu/CreateCommands.tsx index 09fafe9e864bb..4885b3274944a 100644 --- a/apps/studio/components/interfaces/App/CommandMenu/CreateCommands.tsx +++ b/apps/studio/components/interfaces/App/CommandMenu/CreateCommands.tsx @@ -1,6 +1,6 @@ 'use client' -import { IS_PLATFORM, useFlag } from 'common' +import { IS_PLATFORM } from 'common' import { SIDEBAR_KEYS } from 'components/layouts/ProjectLayout/LayoutSidebar/LayoutSidebarProvider' import { Clock5, @@ -51,7 +51,6 @@ const Graphql = dynamic(() => import('icons').then((mod) => mod.Graphql)) const CREATE_STUDIO_ENTITY = 'Create Studio Entity' export function useCreateCommands(options?: CommandOptions) { - const enableCreateCommands = useFlag('enablecreatecommands') const setIsOpen = useSetCommandMenuOpen() const { ref, @@ -451,7 +450,7 @@ export function useCreateCommands(options?: CommandOptions) { }, { deps: [sections], - enabled: enableCreateCommands, + enabled: true, } ) @@ -469,7 +468,7 @@ export function useCreateCommands(options?: CommandOptions) { ...options, orderSection: (sections) => sections, sectionMeta: { priority: 3 }, - enabled: enableCreateCommands, + enabled: true, } ) } diff --git a/apps/studio/components/interfaces/Organization/SSO/SSOConfig.tsx b/apps/studio/components/interfaces/Organization/SSO/SSOConfig.tsx index f6a454b40ed3b..117c62339fd8b 100644 --- a/apps/studio/components/interfaces/Organization/SSO/SSOConfig.tsx +++ b/apps/studio/components/interfaces/Organization/SSO/SSOConfig.tsx @@ -168,7 +168,7 @@ export const SSOConfig = () => { plan="Team" source="organizationSso" primaryText="Organization Single Sign-on (SSO) is available from Team plan and above" - secondaryText="SSO as a login option provides additional acccount security for your team by enforcing the use of an identity provider when logging into Supabase. Upgrade to Team or above to set up SSO for your organization." + secondaryText="SSO as a login option provides additional account security for your team by enforcing the use of an identity provider when logging into Supabase. Upgrade to Team or above to set up SSO for your organization." featureProposition="enable Single Sign-on (SSO)" /> ) : isSuccess || isSSOProviderNotFound ? ( diff --git a/apps/studio/components/interfaces/Reports/Reports.constants.ts b/apps/studio/components/interfaces/Reports/Reports.constants.ts index 7d8a729559afe..c9a98310cfe7a 100644 --- a/apps/studio/components/interfaces/Reports/Reports.constants.ts +++ b/apps/studio/components/interfaces/Reports/Reports.constants.ts @@ -1,6 +1,6 @@ +import { PlanId } from 'data/subscriptions/types' import dayjs from 'dayjs' -import { PlanId } from 'data/subscriptions/types' import type { DatetimeHelper } from '../Settings/Logs/Logs.types' import { PresetConfig, Presets, ReportFilterItem } from './Reports.types' @@ -400,11 +400,11 @@ select -- mean_time, coalesce(statements.rows::numeric / nullif(statements.calls, 0), 0) as avg_rows, statements.rows as rows_read, - case - when (statements.shared_blks_hit + statements.shared_blks_read) > 0 + case + when (statements.shared_blks_hit + statements.shared_blks_read) > 0 then round( - (statements.shared_blks_hit * 100.0) / - (statements.shared_blks_hit + statements.shared_blks_read), + (statements.shared_blks_hit * 100.0) / + (statements.shared_blks_hit + statements.shared_blks_read), 2 ) else 0 @@ -430,7 +430,8 @@ select } from pg_stat_statements as statements inner join pg_authid as auth on statements.userid = auth.oid - ${where || ''} + -- skip queries that were never actually executed + WHERE statements.calls > 0 ${where ? where.replace(/^WHERE/, 'AND') : ''} ${orderBy || 'order by statements.calls desc'} limit 20`, }, @@ -440,6 +441,11 @@ select -- reports-query-performance-most-time-consuming set search_path to public, extensions; +-- compute total time once up front so we don't need a window function over all rows +with grand_total as ( + select coalesce(nullif(sum(total_exec_time + total_plan_time), 0), 1) as v + from pg_stat_statements where calls > 0 +) select auth.rolname, statements.query, @@ -448,7 +454,7 @@ select statements.mean_exec_time + statements.mean_plan_time as mean_time, coalesce( ((statements.total_exec_time + statements.total_plan_time) / - nullif(sum(statements.total_exec_time + statements.total_plan_time) OVER(), 0)) * + (select v from grand_total)) * 100, 0 ) as prop_total_time${ @@ -473,7 +479,8 @@ select } from pg_stat_statements as statements inner join pg_authid as auth on statements.userid = auth.oid - ${where || ''} + -- skip queries that were never actually executed + WHERE statements.calls > 0 ${where ? where.replace(/^WHERE/, 'AND') : ''} ${orderBy || 'order by total_time desc'} limit 20`, }, @@ -519,7 +526,8 @@ select } from pg_stat_statements as statements inner join pg_authid as auth on statements.userid = auth.oid - ${where || ''} + -- skip queries that were never actually executed + WHERE statements.calls > 0 ${where ? where.replace(/^WHERE/, 'AND') : ''} ${orderBy || 'order by max_time desc'} limit 20`, }, @@ -543,7 +551,12 @@ select -- reports-query-performance-unified set search_path to public, extensions; - with base as ( + -- compute total time once up front so we don't need a window function over all rows + with grand_total as ( + select coalesce(nullif(sum(total_exec_time + total_plan_time), 0), 1) as v + from pg_stat_statements where calls > 0 + ), + base as ( select auth.rolname, statements.query, @@ -564,13 +577,14 @@ select end as cache_hit_rate, coalesce( ((statements.total_exec_time + statements.total_plan_time) / - nullif(sum(statements.total_exec_time + statements.total_plan_time) OVER(), 0)) * + (select v from grand_total)) * 100, 0 ) as prop_total_time from pg_stat_statements as statements inner join pg_authid as auth on statements.userid = auth.oid - ${where || ''} + -- skip queries that were never actually executed + WHERE statements.calls > 0 ${where ? where.replace(/^WHERE/, 'AND') : ''} ${orderBy || 'order by total_time desc'} limit 50 ), @@ -615,28 +629,30 @@ select -- Count of slow queries (> 1 second average) SELECT count(*) as slow_queries_count - FROM pg_stat_statements - WHERE statements.mean_exec_time > 1000;`, + -- alias needed to reference columns in WHERE + FROM pg_stat_statements as statements + -- skip never-executed queries; mean_exec_time > 1000ms = avg over 1 second + WHERE statements.calls > 0 AND statements.mean_exec_time > 1000;`, }, queryMetrics: { queryType: 'db', sql: (_params, where, orderBy, runIndexAdvisor = false, filterIndexAdvisor = false) => ` -- reports-query-performance-metrics set search_path to public, extensions; - - SELECT + + SELECT COALESCE(ROUND(AVG(statements.rows::numeric / NULLIF(statements.calls, 0)), 1), 0) as avg_rows_per_call, COUNT(*) FILTER (WHERE statements.total_exec_time + statements.total_plan_time > 1000) as slow_queries, COALESCE( ROUND( - SUM(statements.shared_blks_hit) * 100.0 / - NULLIF(SUM(statements.shared_blks_hit + statements.shared_blks_read), 0), + SUM(statements.shared_blks_hit) * 100.0 / + NULLIF(SUM(statements.shared_blks_hit + statements.shared_blks_read), 0), 2 ), 0 ) || '%' as cache_hit_rate FROM pg_stat_statements as statements - WHERE statements.calls > 0 - ${where || ''} + -- skip queries that were never actually executed + WHERE statements.calls > 0 ${where ? where.replace(/^WHERE/, 'AND') : ''} ${orderBy || ''}`, }, }, diff --git a/apps/studio/components/interfaces/Reports/Reports.queryPerformance.test.ts b/apps/studio/components/interfaces/Reports/Reports.queryPerformance.test.ts new file mode 100644 index 0000000000000..4e5df19a8ef11 --- /dev/null +++ b/apps/studio/components/interfaces/Reports/Reports.queryPerformance.test.ts @@ -0,0 +1,116 @@ +import { describe, expect, it } from 'vitest' + +import { PRESET_CONFIG } from './Reports.constants' +import { Presets } from './Reports.types' + +const queries = PRESET_CONFIG[Presets.QUERY_PERFORMANCE].queries as Record< + string, + { sql: (...args: any[]) => string } +> + +const queryNames = [ + 'mostFrequentlyInvoked', + 'mostTimeConsuming', + 'slowestExecutionTime', + 'unified', + 'slowQueriesCount', + 'queryMetrics', +] as const + +describe('QUERY_PERFORMANCE SQL queries', () => { + describe('calls > 0 base filter', () => { + it.each(queryNames)('%s includes calls > 0 without user filters', (name) => { + const sql = queries[name].sql([], undefined, undefined) + expect(sql).toContain('calls > 0') + }) + + it.each(queryNames)('%s still includes calls > 0 when user filters are provided', (name) => { + const sql = queries[name].sql([], "WHERE auth.rolname in ('postgres')", undefined) + expect(sql).toContain('calls > 0') + }) + }) + + describe('WHERE clause composition with user filters', () => { + const userWhere = "WHERE auth.rolname in ('postgres')" + + it.each([ + 'mostFrequentlyInvoked', + 'mostTimeConsuming', + 'slowestExecutionTime', + 'unified', + ] as const)('%s: user filters appended with AND (no duplicate WHERE)', (name) => { + const sql = queries[name].sql([], userWhere, undefined) + // Should not have two WHERE keywords in a row / duplicate WHERE + expect(sql).not.toMatch(/WHERE\s+.*WHERE/s) + // User filter condition should be present + expect(sql).toContain("auth.rolname in ('postgres')") + // Should use AND to join base filter and user filter + expect(sql).toMatch(/calls > 0\s+AND/) + }) + + it('queryMetrics: user filters appended with AND (no duplicate WHERE in FROM clause)', () => { + const sql = queries.queryMetrics.sql([], userWhere, undefined) + // queryMetrics uses COUNT(*) FILTER (WHERE ...) which is valid SQL and not a duplicate + // Just verify the base filter + user filter are correctly composed + expect(sql).toContain("auth.rolname in ('postgres')") + expect(sql).toMatch(/calls > 0\s+AND/) + // Should not have two WHERE keywords after the FROM keyword + expect(sql).not.toMatch(/FROM[\s\S]*WHERE[\s\S]*WHERE[\s\S]*WHERE/s) + }) + + it.each([ + 'mostFrequentlyInvoked', + 'mostTimeConsuming', + 'slowestExecutionTime', + 'unified', + 'queryMetrics', + ] as const)('%s: no trailing junk when no user filters', (name) => { + const sql = queries[name].sql([], undefined, undefined) + // Should not have a dangling undefined or 'WHERE' with nothing after the base filter + expect(sql).not.toContain('undefined') + expect(sql).not.toMatch(/calls > 0\s+AND\s+(ORDER|LIMIT|$)/im) + }) + }) + + describe('slowQueriesCount bug fix', () => { + it('uses table alias "statements"', () => { + const sql = queries.slowQueriesCount.sql() + expect(sql).toContain('pg_stat_statements as statements') + }) + + it('filters by mean_exec_time using the alias', () => { + const sql = queries.slowQueriesCount.sql() + expect(sql).toContain('statements.mean_exec_time > 1000') + }) + }) + + describe('window function elimination', () => { + it('unified uses grand_total CTE instead of OVER()', () => { + const sql = queries.unified.sql([], undefined, undefined) + expect(sql).toContain('grand_total') + expect(sql).not.toContain('OVER()') + }) + + it('mostTimeConsuming uses grand_total CTE instead of OVER()', () => { + const sql = queries.mostTimeConsuming.sql([], undefined, undefined) + expect(sql).toContain('grand_total') + expect(sql).not.toContain('OVER()') + }) + + it('grand_total CTE references calls > 0', () => { + const sql = queries.unified.sql([], undefined, undefined) + expect(sql).toMatch(/grand_total[\s\S]*calls > 0/) + }) + }) + + describe('multiple user filters', () => { + it('handles multiple user filter conditions', () => { + const multiWhere = "WHERE auth.rolname in ('postgres') AND statements.calls >= 10" + const sql = queries.mostFrequentlyInvoked.sql([], multiWhere, undefined) + expect(sql).toContain('calls > 0') + expect(sql).toContain("auth.rolname in ('postgres')") + expect(sql).toContain('statements.calls >= 10') + expect(sql).not.toMatch(/WHERE\s+.*WHERE/s) + }) + }) +}) diff --git a/apps/studio/components/interfaces/RoleImpersonationSelector/UserImpersonationSelector.tsx b/apps/studio/components/interfaces/RoleImpersonationSelector/UserImpersonationSelector.tsx index ec707ca748e71..a0f859553ee7d 100644 --- a/apps/studio/components/interfaces/RoleImpersonationSelector/UserImpersonationSelector.tsx +++ b/apps/studio/components/interfaces/RoleImpersonationSelector/UserImpersonationSelector.tsx @@ -96,10 +96,10 @@ const UserImpersonationSelector = () => { setPreviousSearches((prev) => { // Remove if already present const filtered = prev.filter((u) => u.id !== user.id) - // Add new user to the end - const updated = [...filtered, user] + // Add new user to the start of the list (last used first) + const updated = [user, ...filtered] // Keep only the last 6 - return updated.slice(-6) + return updated.slice(0, 5) }) if (customAccessTokenHookDetails?.type === 'https') { diff --git a/apps/studio/components/interfaces/Storage/StorageExplorer/StorageExplorer.tsx b/apps/studio/components/interfaces/Storage/StorageExplorer/StorageExplorer.tsx index 75976839083da..155586b9e90e5 100644 --- a/apps/studio/components/interfaces/Storage/StorageExplorer/StorageExplorer.tsx +++ b/apps/studio/components/interfaces/Storage/StorageExplorer/StorageExplorer.tsx @@ -3,7 +3,6 @@ import { useEffect, useRef, useState } from 'react' import { useStaticEffectEvent } from '@/hooks/useStaticEffectEvent' import { useDebounce } from '@uidotdev/usehooks' -import { useParams } from 'common' import { useProjectStorageConfigQuery } from 'data/config/project-storage-config-query' import type { Bucket } from 'data/storage/buckets-query' import { useLatest } from 'hooks/misc/useLatest' @@ -20,9 +19,9 @@ import { MoveItemsModal } from './MoveItemsModal' import { PreviewPane } from './PreviewPane' export const StorageExplorer = () => { - const { ref, bucketId } = useParams() const storageExplorerRef = useRef(null) const { + bucketId, projectRef, view, columns, @@ -44,7 +43,7 @@ export const StorageExplorer = () => { setSelectedItemsToMove, } = useStorageExplorerStateSnapshot() - useProjectStorageConfigQuery({ projectRef: ref }, { enabled: IS_PLATFORM }) + useProjectStorageConfigQuery({ projectRef }, { enabled: IS_PLATFORM }) const { data: bucket, isLoading: isBucketQueryLoading } = useSelectedBucket() // Detect when transitioning between buckets to avoid showing stale content from the previous bucket. @@ -91,14 +90,15 @@ export const StorageExplorer = () => { } } }) + useEffect(() => { if (bucket && projectRef) fetchContents(bucket) - }, [bucket, projectRef, debouncedSearchString, fetchContents]) + }, [bucketId, bucket, projectRef, debouncedSearchString, fetchContents]) const openBucketRef = useLatest(openBucket) useEffect(() => { if (bucket && !!projectRef) openBucketRef.current(bucket) - }, [bucket, projectRef, openBucketRef]) + }, [bucketId, bucket, projectRef, openBucketRef]) /** Checkbox selection methods */ /** [Joshen] We'll only support checkbox selection for files ONLY */ diff --git a/apps/studio/package.json b/apps/studio/package.json index e12597aec341a..a809e22775ad0 100644 --- a/apps/studio/package.json +++ b/apps/studio/package.json @@ -102,7 +102,7 @@ "memoize-one": "^5.0.1", "mime-db": "^1.53.0", "monaco-editor": "0.52.2", - "next": "~16.0.0", + "next": "~16.1.6", "next-themes": "^0.3.0", "nuqs": "2.7.1", "openai": "^4.75.1", diff --git a/apps/studio/pages/api/ai/sql/filter-v1.ts b/apps/studio/pages/api/ai/sql/filter-v1.ts index 9dd6cd2b2ad85..1f296714c40c1 100644 --- a/apps/studio/pages/api/ai/sql/filter-v1.ts +++ b/apps/studio/pages/api/ai/sql/filter-v1.ts @@ -78,6 +78,7 @@ export async function handlePost(req: NextApiRequest, res: NextApiResponse) { - When unsure, default to simple equality comparisons with reasonable values. - Values should respect property types: booleans must be true/false, dates should be ISO date strings (YYYY-MM-DD), and numbers must be numbers. - If options are provided for a property, choose from those values when appropriate. + - The "is" operator is used for NULL checks. Valid values are: null, not null. For boolean columns, true and false are also valid. User request: "${prompt}" `, diff --git a/apps/studio/state/storage-explorer.tsx b/apps/studio/state/storage-explorer.tsx index 807585de24657..1060054cff25c 100644 --- a/apps/studio/state/storage-explorer.tsx +++ b/apps/studio/state/storage-explorer.tsx @@ -1,7 +1,7 @@ import { BlobReader, BlobWriter, ZipWriter } from '@zip.js/zip.js' import { IS_PLATFORM, LOCAL_STORAGE_KEYS } from 'common' import { capitalize, chunk, compact, find, findIndex, has, isObject, uniq, uniqBy } from 'lodash' -import { createContext, PropsWithChildren, useContext, useEffect, useState } from 'react' +import { createContext, PropsWithChildren, useContext, useEffect, useRef, useState } from 'react' import { useLatest } from 'react-use' import { toast } from 'sonner' import * as tus from 'tus-js-client' @@ -50,6 +50,7 @@ import { tryParseJson } from '@/lib/helpers' import { lookupMime } from '@/lib/mime' import { createProjectSupabaseClient } from '@/lib/project-supabase-client' import { ResponseError } from '@/types' +import { useSelectedBucket } from '@/components/interfaces/Storage/FilesBuckets/useSelectedBucket' type UploadProgress = { percentage: number @@ -82,11 +83,13 @@ function createStorageExplorerState({ connectionString, resumableUploadUrl, clientEndpoint, + bucketId, }: { projectRef: string connectionString: string resumableUploadUrl: string clientEndpoint: string + bucketId?: string }) { const localStorageKey = LOCAL_STORAGE_KEYS.STORAGE_PREFERENCE(projectRef) const { view, sortBy, sortByOrder, sortBucket } = @@ -95,6 +98,7 @@ function createStorageExplorerState({ const state = proxy({ projectRef, + bucketId, connectionString, resumableUploadUrl, uploadProgresses: [] as UploadProgress[], @@ -1864,10 +1868,13 @@ const StorageExplorerStateContext = createContext( export const StorageExplorerStateContextProvider = ({ children }: PropsWithChildren) => { const { data: project } = useSelectedProjectQuery() + const { data: bucket } = useSelectedBucket() + const isPaused = project?.status === PROJECT_STATUS.INACTIVE const [state, setState] = useState(() => createStorageExplorerState(DEFAULT_STATE_CONFIG)) const stateRef = useLatest(state) + const bucketRef = useRef(bucket?.id) const { storageEndpoint, @@ -1883,13 +1890,20 @@ export const StorageExplorerStateContextProvider = ({ children }: PropsWithChild useEffect(() => { const hasDataReady = !!project?.ref const storeAlreadyLoaded = state.projectRef === project?.ref - - if (!isPaused && hasDataReady && !storeAlreadyLoaded && isSuccessSettings) { + const hasBucketChanged = bucket?.id !== bucketRef.current + + if ( + !isPaused && + hasDataReady && + isSuccessSettings && + (!storeAlreadyLoaded || hasBucketChanged) + ) { const clientEndpoint = storageEndpoint ?? hostEndpoint ?? '' const resumableUploadUrl = `${clientEndpoint}/storage/v1/upload/resumable` setState( createStorageExplorerState({ projectRef: project?.ref ?? '', + bucketId: bucket?.id, connectionString: project.connectionString ?? '', resumableUploadUrl, clientEndpoint, @@ -1897,6 +1911,7 @@ export const StorageExplorerStateContextProvider = ({ children }: PropsWithChild ) } }, [ + bucket?.id, state.projectRef, project?.ref, project?.connectionString, diff --git a/apps/www/_blog/2026-03-05-log-drains-now-available-on-pro.mdx b/apps/www/_blog/2026-03-05-log-drains-now-available-on-pro.mdx new file mode 100644 index 0000000000000..5b2b60bc14b62 --- /dev/null +++ b/apps/www/_blog/2026-03-05-log-drains-now-available-on-pro.mdx @@ -0,0 +1,130 @@ +--- +title: 'Log Drains: Now available on Pro' +description: 'Supabase Pro users can now send their Supabase logs to their own logging backend, enabling them to debug in the same place as the rest of their stack.' +author: steven_eubank +date: '2026-03-05' +categories: + - product +tags: + - log-drains + - observability + - logging +imgSocial: 'https://zhfonblqamxferhoguzj.supabase.co/functions/v1/generate-og?template=announcement&layout=horizontal©=Log+Drains%3A%0ANow+available+on+Pro&icon=supabase.svg' +imgThumb: 'https://zhfonblqamxferhoguzj.supabase.co/functions/v1/generate-og?template=announcement&layout=horizontal©=Log+Drains%3A%0ANow+available+on+Pro&icon=supabase.svg' +toc_depth: 3 +--- + +Today, we are launching Supabase Log Drains on the Supabase Pro tier. Supabase Pro users can now send their Supabase logs to their own logging backend, enabling them to debug in the same place as the rest of their stack. + +When something breaks, you go to your logs. But your application does not stop at your application code. Behind every request is a Postgres query, an auth check, a storage operation, or an Edge Function invocation. Until today, Supabase logs remained in Supabase, separate from the tools where you debug everything else. + +This separation costs you time. During an incident, you switch between your logging dashboard and the Supabase console, trying to correlate timestamps and piece together what happened. You build dashboards in Datadog or Grafana, but they only show half the picture. + +Log drains eliminate this context switching. Your Postgres query errors appear next to your application exceptions. Your auth failures show up in the same timeline as your API errors. You build one dashboard that covers your entire stack. + +## What makes Supabase log drains different + +Supabase captures logs from every layer of your infrastructure, not just your application code: + +- **Postgres.** Query execution, connection events, errors, and replication status. +- **API Gateway.** Request and response logs from PostgREST and GraphQL. +- **Auth.** Login attempts, token operations, MFA events, and session management. +- **Storage.** File uploads, downloads, transformations, and access patterns. +- **Edge Functions.** Function invocations, execution traces, and error details. +- **Realtime.** WebSocket connections, broadcast events, and presence updates. + +This full-stack visibility is rare. Most platforms only export application-level logs, leaving you blind to what happens in the database layer. + +Supabase also batches logs intelligently to protect your destination from being overwhelmed. We send up to 250 logs per batch or flush every second, whichever comes first. Gzip compression reduces bandwidth costs when your destination supports it. + +## Who should use log drains + +**Growing startups with production traffic.** Once you have real users, you need real observability. Log drains let you set up alerts for database errors, track auth patterns, and investigate incidents without leaving your existing tools. If you are scaling from prototype to production, this is when centralized logging becomes essential. + +**Teams already using Datadog, Grafana, or Sentry.** You have dashboards, alerts, and runbooks built around your current observability stack. Log drains bring Supabase into that workflow instead of forcing you to learn a new tool. Your on-call engineers can investigate database issues in the same interface they use for everything else. + +**Developers building AI applications.** AI workloads generate unpredictable traffic patterns and complex debugging scenarios. When an embedding query times out or a vector search returns unexpected results, you need to correlate Edge Function logs with Postgres execution plans. Log drains make this correlation possible in tools like Axiom or Datadog that handle high-volume, bursty traffic well. + +**Platform teams managing multiple projects.** If you run Supabase projects for multiple products or clients, centralized logging reduces context switching. One Grafana dashboard can show the health of all your databases. One set of alerts can catch problems across your entire portfolio. + +**Organizations with compliance requirements.** Some industries require long-term log retention in systems you control. Sending logs to S3 gives you a compliance-friendly archive at minimal cost. You own the data, you control the retention, and you can query it with Athena when auditors come calling. + +## Supported destinations + +Supabase sends logs in small batches over HTTP. Each destination has its own configuration, but setup takes a few minutes in the dashboard. + +### Sentry + +Send logs to Sentry. Search and filter Supabase logs next to your application errors and traces. Every log field becomes a filterable attribute with no cardinality limits. + +Sentry recently launched their Structured Logs product with trace-connected logging. When you send Supabase logs to Sentry, your database errors appear in the same trace as your frontend exceptions. You can follow a slow query from the user-facing error it caused all the way back to the Postgres execution. This is particularly valuable if you already use Sentry for error tracking and want a unified debugging experience. + +[Sentry setup guide](/docs/guides/platform/log-drains#sentry) + +### Grafana Loki + +Send logs to Grafana Loki. Query them with LogQL in your existing Grafana dashboards. Build visualizations that show Postgres query logs alongside your application metrics and infrastructure telemetry. + +Loki works well for teams running Grafana for infrastructure monitoring. You can create alerts on specific error patterns, build log-based metrics for SLOs, and correlate database events with system metrics like CPU and memory. Stream labels automatically include the log source, so filtering by Postgres, Auth, Storage, or Edge Functions requires no additional configuration. + +[Loki setup guide](/docs/guides/platform/log-drains#grafana-loki) + +### Datadog + +Send logs to Datadog. Use Log Management for search and dashboards. Connect logs to APM traces to see database calls in the context of distributed transactions. + +Datadog excels at anomaly detection and ML-powered alerting. You can configure monitors that trigger when Postgres error rates spike or when auth failures exceed normal patterns. The integration works especially well for teams that want to trace slow API calls from their frontend through Supabase and into the database, seeing exactly where latency accumulates. + +[Datadog setup guide](/docs/guides/platform/log-drains#datadog) + +### AWS S3 + +Send logs to S3 for low-cost archival. Query historical data with Athena when you need to investigate incidents from weeks or months ago. + +S3 is the most economical option for long-term retention. Store years of logs for pennies per gigabyte. This destination is useful for compliance requirements, post-incident analysis, or organizations that want to own their log data without paying for real-time indexing they rarely use. + +[S3 setup guide](/docs/guides/platform/log-drains#amazon-s3) + +### Axiom + +Send logs to Axiom for fast searches across high-volume data without expensive indexing costs. + +Axiom handles bursty, high-volume workloads well. If you run many Edge Functions or have database traffic that spikes unpredictably, Axiom provides real-time search without the per-GB costs adding up as quickly as traditional SIEM tools. + +[Axiom setup guide](/docs/guides/platform/log-drains#axiom) + +### Generic HTTP endpoint + +Send logs to any HTTP endpoint when you need full control or when we do not have a preset for your vendor. + +You can point logs at your own Edge Function to transform, filter, or route them. Some teams use this to enrich logs with business context before forwarding to a final destination. Others use it to split logs between multiple tools based on severity or source type. + +[Generic HTTP endpoint setup guide](/docs/guides/platform/log-drains#generic-http-endpoint) + +## How it works + +You create a log drain in the Supabase Dashboard: + +1. Open your project. +2. Go to **Project Settings**. +3. Click **Log Drains**. +4. Select a destination. +5. Enter the configuration for that destination. +6. Save. + +Supabase sends logs in small batches over HTTP. Your vendor stores and indexes them. You can create multiple drains to send logs to different destinations simultaneously. + +## Pricing + +- $10 per drain per project +- $0.20 per million events +- $0.09 per GB egress + +For full billing details, see the [usage guide](/docs/guides/platform/log-drains#pricing). + +## Getting started + +Read the log drains documentation, select a destination, and set up your first drain: + +- [Log drains documentation](/docs/guides/platform/log-drains) +- [Usage and billing guide](/docs/guides/platform/log-drains#pricing) diff --git a/apps/www/data/features.tsx b/apps/www/data/features.tsx index 6a05b95ac5718..d18345bf78e72 100644 --- a/apps/www/data/features.tsx +++ b/apps/www/data/features.tsx @@ -2606,26 +2606,30 @@ This feature is particularly beneficial for developers working with complex data }, { title: 'Log Drains', - subtitle: 'Export logs to external destinations for enhanced monitoring.', + subtitle: 'Export logs to Datadog, Grafana, Sentry, S3, and more — now available on Pro.', description: ` -Log Drains enable developers to export logs generated by Supabase products—such as the Database, Storage, Realtime, and Auth—to external destinations like Datadog or custom HTTP endpoints. This feature provides a unified view of logs within existing logging and monitoring systems, allowing teams to build robust alerting and observability pipelines. +Log Drains enable developers to export logs generated by Supabase services—Postgres, Auth, Storage, Edge Functions, Realtime, and the API Gateway—directly to their existing observability tools. Previously available on Team and Enterprise plans only, Log Drains are now available as a Pro plan add-on. ## Key benefits -1. Centralized logging: Consolidate logs from multiple Supabase services into a single location for easier management and analysis. -2. Custom alerting: Ingest logs into Security Information and Event Management (SIEM) or Intrusion Detection Systems (IDS) to create tailored alerting rules based on database events. -3. Extended retention: Supports longer log retention periods to meet compliance requirements, ensuring data availability for audits and investigations. -4. Flexible configuration: Easily set up Log Drains through the project settings, with support for popular destinations like Datadog and custom HTTP endpoints. -5. Scalable architecture: Built on Logflare's multi-node Elixir cluster, allowing for efficient and scalable log dispatching to multiple destinations. +1. Full-stack visibility: Export logs from every layer of your Supabase infrastructure—not just application code—into a single dashboard alongside your other services. +2. Centralized logging: Consolidate logs from multiple Supabase services into your existing observability stack without building custom polling infrastructure. +3. Custom alerting: Ingest logs into Datadog, Grafana, or Sentry to trigger alerts on database errors, auth failures, or traffic anomalies. +4. Extended retention: Route logs to AWS S3 for low-cost long-term archival, meeting compliance and audit requirements. +5. Near-real-time delivery: Logs are batched (up to 250 per batch or flushed every second) and compressed with gzip when the destination supports it. -This feature is particularly useful for teams seeking to enhance their observability practices while maintaining compliance and security standards across their applications. +## Supported destinations +Datadog, Grafana Loki, Sentry, AWS S3, Axiom, and a generic HTTP endpoint for custom routing. + +## Pricing +$60 per drain per project, plus $0.20 per million events and $0.09 per GB egress. `, icon: Activity, products: [ADDITIONAL_PRODUCTS.STUDIO], heroImage: 'https://www.youtube-nocookie.com/embed/A4GFmvgxS-E', - docsUrl: 'https://supabase.com/blog/log-drains', + docsUrl: 'https://supabase.com/docs/guides/telemetry/log-drains', slug: 'log-drains', status: { - stage: PRODUCT_STAGES.PUBLIC_ALPHA, + stage: PRODUCT_STAGES.GA, availableOnSelfHosted: true, }, }, diff --git a/e2e/studio/features/filter-bar.spec.ts b/e2e/studio/features/filter-bar.spec.ts index 86597c9e758c3..5c186bf9f2a20 100644 --- a/e2e/studio/features/filter-bar.spec.ts +++ b/e2e/studio/features/filter-bar.spec.ts @@ -813,6 +813,85 @@ test.describe('Filter Bar', () => { }) }) + test.describe('IS NULL Filters', () => { + test('IS NULL on a text column shows only null rows', async ({ page, ref }) => { + const tableName = `${tableNamePrefix}_is_null_txt` + + await query( + `CREATE TABLE IF NOT EXISTS ${tableName} ( + id bigint generated by default as identity primary key, + name text + )` + ) + await query(`INSERT INTO ${tableName} (name) VALUES ('Alice'), (NULL), ('Charlie')`) + + try { + await setupFilterBarPage(page, ref, toUrl(`/project/${ref}/editor?schema=public`)) + await navigateToTable(page, ref, tableName) + + await addFilterWithDropdownValue(page, ref, 'name', 'is', 'null') + + const rows = page.locator('[role="row"]') + // Header row + 1 data row with NULL + await expect(rows).toHaveCount(2) + } finally { + await dropTable(tableName) + } + }) + + test('IS NOT NULL on a text column shows only non-null rows', async ({ page, ref }) => { + const tableName = `${tableNamePrefix}_is_nnull_txt` + + await query( + `CREATE TABLE IF NOT EXISTS ${tableName} ( + id bigint generated by default as identity primary key, + name text + )` + ) + await query(`INSERT INTO ${tableName} (name) VALUES ('Alice'), (NULL), ('Charlie')`) + + try { + await setupFilterBarPage(page, ref, toUrl(`/project/${ref}/editor?schema=public`)) + await navigateToTable(page, ref, tableName) + + await addFilterWithDropdownValue(page, ref, 'name', 'is', 'not null') + + const rows = page.locator('[role="row"]') + // Header row + 2 data rows (Alice, Charlie) + await expect(rows).toHaveCount(3) + } finally { + await dropTable(tableName) + } + }) + + test('IS NULL on a timestamptz column shows only null rows', async ({ page, ref }) => { + const tableName = `${tableNamePrefix}_is_null_ts` + + await query( + `CREATE TABLE IF NOT EXISTS ${tableName} ( + id bigint generated by default as identity primary key, + created_at timestamptz + )` + ) + await query( + `INSERT INTO ${tableName} (created_at) VALUES (NOW()), (NULL), (NOW() - INTERVAL '1 day')` + ) + + try { + await setupFilterBarPage(page, ref, toUrl(`/project/${ref}/editor?schema=public`)) + await navigateToTable(page, ref, tableName) + + await addFilterWithDropdownValue(page, ref, 'created_at', 'is', 'null') + + const rows = page.locator('[role="row"]') + // Header row + 1 data row with NULL + await expect(rows).toHaveCount(2) + } finally { + await dropTable(tableName) + } + }) + }) + test.describe('Filter Error Feedback', () => { test('invalid filter value shows friendly error and remove button clears filters', async ({ page, diff --git a/e2e/studio/features/storage.spec.ts b/e2e/studio/features/storage.spec.ts index b2d371b9de9eb..c03136bc19d83 100644 --- a/e2e/studio/features/storage.spec.ts +++ b/e2e/studio/features/storage.spec.ts @@ -1,8 +1,7 @@ -import { expect } from '@playwright/test' import path from 'path' +import { expect } from '@playwright/test' + import { env } from '../env.config.js' -import { test } from '../utils/test.js' -import { waitForApiResponse } from '../utils/wait-for-response.js' import { createBucket, createFolder, @@ -18,6 +17,8 @@ import { createBucket as createBucketViaApi, deleteBucket as deleteBucketViaApi, } from '../utils/storage/index.js' +import { test } from '../utils/test.js' +import { waitForApiResponse } from '../utils/wait-for-response.js' const bucketNamePrefix = 'pw_bucket' @@ -291,6 +292,34 @@ test.describe('Storage', () => { ).toBeVisible() }) + test('resets storage view when switching buckets', async ({ page, ref }) => { + const bucketName = `${bucketNamePrefix}_navigation` + const bucketName2 = `${bucketNamePrefix}2_navigation` + const folderName = 'folder_navigation' + const fileName = 'test-file.txt' + + // Create 2 bucket via API, navigate to the first + await deleteBucketViaApi(bucketName) + await deleteBucketViaApi(bucketName2) + await createBucketViaApi(bucketName, false) + await createBucketViaApi(bucketName2, false) + await navigateToStorageFiles(page, ref) + await navigateToBucket(page, ref, bucketName) + + // create a folder and add a file + await createFolder(page, folderName) + // Open the folder + await page.getByTitle(folderName).click() + const filePath = path.join(import.meta.dirname, 'files', fileName) + await uploadFile(page, filePath, fileName) + + // Navigate to bucket list + await page.getByRole('link', { name: 'Files' }).nth(1).click() + // Navigate to the 2nd bucket + await navigateToBucket(page, ref, bucketName2) + await expect(page.getByTitle(fileName)).not.toBeVisible() + }) + test('can delete a file', async ({ page, ref }) => { const bucketName = `${bucketNamePrefix}_delete_file` const fileName = 'test-file.txt' diff --git a/packages/ui-patterns/src/FilterBar/menuItems.test.ts b/packages/ui-patterns/src/FilterBar/menuItems.test.ts new file mode 100644 index 0000000000000..14dfa07478258 --- /dev/null +++ b/packages/ui-patterns/src/FilterBar/menuItems.test.ts @@ -0,0 +1,104 @@ +import { describe, expect, it } from 'vitest' + +import { buildValueItems } from './menuItems' +import { FilterGroup, FilterProperty } from './types' + +const stringProperty: FilterProperty = { + label: 'Name', + name: 'name', + type: 'string', + options: [ + { label: 'Alice', value: 'alice' }, + { label: 'Bob', value: 'bob' }, + ], + operators: [ + { value: '=', label: 'Equals', group: 'comparison' as const }, + { value: 'is', label: 'Is', group: 'setNull' as const }, + ], +} + +const booleanProperty: FilterProperty = { + label: 'Active', + name: 'active', + type: 'boolean', + options: [ + { label: 'true', value: 'true' }, + { label: 'false', value: 'false' }, + ], + operators: [ + { value: '=', label: 'Equals', group: 'comparison' as const }, + { value: 'is', label: 'Is', group: 'setNull' as const }, + ], +} + +const filterProperties: FilterProperty[] = [stringProperty, booleanProperty] + +describe('buildValueItems', () => { + it('returns NULL and NOT NULL options when IS operator is selected', () => { + const filters: FilterGroup = { + logicalOperator: 'AND', + conditions: [{ propertyName: 'name', operator: 'is', value: '' }], + } + + const items = buildValueItems( + { type: 'value', path: [0] }, + filters, + filterProperties, + {}, + {}, + '', + false + ) + + expect(items).toEqual([ + { value: 'null', label: 'NULL' }, + { value: 'not null', label: 'NOT NULL' }, + ]) + }) + + it('includes TRUE and FALSE options for boolean properties with IS operator', () => { + const filters: FilterGroup = { + logicalOperator: 'AND', + conditions: [{ propertyName: 'active', operator: 'is', value: '' }], + } + + const items = buildValueItems( + { type: 'value', path: [0] }, + filters, + filterProperties, + {}, + {}, + '', + false + ) + + expect(items).toEqual([ + { value: 'null', label: 'NULL' }, + { value: 'not null', label: 'NOT NULL' }, + { value: 'true', label: 'TRUE' }, + { value: 'false', label: 'FALSE' }, + ]) + }) + + it('returns normal property options for non-IS operators', () => { + const filters: FilterGroup = { + logicalOperator: 'AND', + conditions: [{ propertyName: 'name', operator: '=', value: '' }], + } + + const items = buildValueItems( + { type: 'value', path: [0] }, + filters, + filterProperties, + {}, + {}, + '', + false + ) + + expect(items).toEqual([ + { value: 'alice', label: 'Alice' }, + { value: 'bob', label: 'Bob' }, + ]) + }) +}) diff --git a/packages/ui-patterns/src/FilterBar/menuItems.ts b/packages/ui-patterns/src/FilterBar/menuItems.ts index f46a0828087f3..a618e50ceb1f6 100644 --- a/packages/ui-patterns/src/FilterBar/menuItems.ts +++ b/packages/ui-patterns/src/FilterBar/menuItems.ts @@ -97,6 +97,10 @@ export function buildValueItems( if (!property) return items + if (activeCondition?.operator === 'is') { + return getIsOperatorValueItems(property, inputValue, hasTypedSinceFocus) + } + if (!Array.isArray(property.options) && isCustomOptionObject(property.options)) { items.push({ value: 'custom', @@ -157,3 +161,27 @@ function getCachedOptionItems(options: any[]): MenuItem[] { return { value: option.value, label: option.label } }) } + +function getIsOperatorValueItems( + property: FilterProperty, + inputValue: string, + hasTypedSinceFocus: boolean +): MenuItem[] { + const options: { value: string; label: string }[] = [ + { value: 'null', label: 'NULL' }, + { value: 'not null', label: 'NOT NULL' }, + ] + + if (property.type === 'boolean') { + options.push({ value: 'true', label: 'TRUE' }, { value: 'false', label: 'FALSE' }) + } + + const shouldFilter = hasTypedSinceFocus && inputValue.length > 0 + if (!shouldFilter) return options + + const normalizedInput = inputValue.toLowerCase() + return options.filter( + (opt) => + opt.label.toLowerCase().includes(normalizedInput) || opt.value.includes(normalizedInput) + ) +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index cb6c665e22330..a3b275f357949 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -821,7 +821,7 @@ importers: version: 1.1.3(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@sentry/nextjs': specifier: 'catalog:' - version: 10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(encoding@0.1.13)(next@16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)(supports-color@8.1.1)(webpack@5.94.0) + version: 10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(encoding@0.1.13)(next@16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)(supports-color@8.1.1)(webpack@5.94.0) '@std/path': specifier: npm:@jsr/std__path@^1.0.8 version: '@jsr/std__path@1.0.8' @@ -970,14 +970,14 @@ importers: specifier: 0.52.2 version: 0.52.2 next: - specifier: ~16.0.0 - version: 16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) + specifier: ~16.1.6 + version: 16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) next-themes: specifier: ^0.3.0 version: 0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) nuqs: specifier: 2.7.1 - version: 2.7.1(@tanstack/react-router@1.158.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(next@16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react-router@7.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + version: 2.7.1(@tanstack/react-router@1.158.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(next@16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react-router@7.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) openai: specifier: ^4.75.1 version: 4.104.0(encoding@0.1.13)(ws@8.18.3)(zod@3.25.76) @@ -1242,7 +1242,7 @@ importers: version: 2.11.3(@types/node@22.13.14)(typescript@5.9.2) next-router-mock: specifier: ^0.9.13 - version: 0.9.13(next@16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1) + version: 0.9.13(next@16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1) node-mocks-http: specifier: ^1.17.2 version: 1.17.2(@types/node@22.13.14) @@ -2171,7 +2171,7 @@ importers: version: 18.3.0 next-router-mock: specifier: ^0.9.13 - version: 0.9.13(next@16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1) + version: 0.9.13(next@16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1) tsconfig: specifier: workspace:* version: link:../tsconfig @@ -4641,8 +4641,8 @@ packages: '@next/env@15.5.10': resolution: {integrity: sha512-plg+9A/KoZcTS26fe15LHg+QxReTazrIOoKKUC3Uz4leGGeNPgLHdevVraAAOX0snnUs3WkRx3eUQpj9mreG6A==} - '@next/env@16.0.11': - resolution: {integrity: sha512-hULMheQaOhFK1vAoFPigXca42LguwyLILtJKPRzpY1d+og6jk0YNAQVwLGNYYhWEMd2zj4gcIWSf1yC5PffqqA==} + '@next/env@16.1.6': + resolution: {integrity: sha512-N1ySLuZjnAtN3kFnwhAwPvZah8RJxKasD7x1f8shFqhncnWZn4JMfg37diLNuoHsLAlrDfM3g4mawVdtAG8XLQ==} '@next/eslint-plugin-next@15.5.4': resolution: {integrity: sha512-SR1vhXNNg16T4zffhJ4TS7Xn7eq4NfKfcOsRwea7RIAHrjRpI9ALYbamqIJqkAhowLlERffiwk0FMvTLNdnVtw==} @@ -4664,8 +4664,8 @@ packages: cpu: [arm64] os: [darwin] - '@next/swc-darwin-arm64@16.0.11': - resolution: {integrity: sha512-3G7Rx6m6tgLqkc3Ce3QY/Yrsx7nJF4ithdHfx70Jmzel8m2xpjnGRC+oB4UcCHvQwN0ZP5YsLJakwx/M0vWbSQ==} + '@next/swc-darwin-arm64@16.1.6': + resolution: {integrity: sha512-wTzYulosJr/6nFnqGW7FrG3jfUUlEf8UjGA0/pyypJl42ExdVgC6xJgcXQ+V8QFn6niSG2Pb8+MIG1mZr2vczw==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] @@ -4676,8 +4676,8 @@ packages: cpu: [x64] os: [darwin] - '@next/swc-darwin-x64@16.0.11': - resolution: {integrity: sha512-poUTsYKRwuG+eApDngouEiN6AGcAMq8TAQYP8Nou7iMS7x6+q3dFhhyhgodIzTF9acsEINl4cIzMaM9XJor8kw==} + '@next/swc-darwin-x64@16.1.6': + resolution: {integrity: sha512-BLFPYPDO+MNJsiDWbeVzqvYd4NyuRrEYVB5k2N3JfWncuHAy2IVwMAOlVQDFjj+krkWzhY2apvmekMkfQR0CUQ==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] @@ -4689,8 +4689,8 @@ packages: os: [linux] libc: [glibc] - '@next/swc-linux-arm64-gnu@16.0.11': - resolution: {integrity: sha512-Q9shvB+eLNrK/n8w+/ZTWSzbEIzJ56mP83ZVaqmHay6/Ulcn6THEId4gxfYCXmSwEG/xPAtv58FBWeZkp36XUA==} + '@next/swc-linux-arm64-gnu@16.1.6': + resolution: {integrity: sha512-OJYkCd5pj/QloBvoEcJ2XiMnlJkRv9idWA/j0ugSuA34gMT6f5b7vOiCQHVRpvStoZUknhl6/UxOXL4OwtdaBw==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] @@ -4703,8 +4703,8 @@ packages: os: [linux] libc: [musl] - '@next/swc-linux-arm64-musl@16.0.11': - resolution: {integrity: sha512-rq+d/a0FZHVPEh3zismoQgfVkSIEzlTbNhD4Z8bToLMszUlggAh1D1syhJ4MHkYzXRszhjS2emy0PYXz7Uwttw==} + '@next/swc-linux-arm64-musl@16.1.6': + resolution: {integrity: sha512-S4J2v+8tT3NIO9u2q+S0G5KdvNDjXfAv06OhfOzNDaBn5rw84DGXWndOEB7d5/x852A20sW1M56vhC/tRVbccQ==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] @@ -4717,8 +4717,8 @@ packages: os: [linux] libc: [glibc] - '@next/swc-linux-x64-gnu@16.0.11': - resolution: {integrity: sha512-82Wroterii1p15O+ZF/DDsHPuxKptR1JGK+obgbAk13vrc3B/fTJ2qOOmdeoMwAQ15gb/9mN4LQl9+IzFje76Q==} + '@next/swc-linux-x64-gnu@16.1.6': + resolution: {integrity: sha512-2eEBDkFlMMNQnkTyPBhQOAyn2qMxyG2eE7GPH2WIDGEpEILcBPI/jdSv4t6xupSP+ot/jkfrCShLAa7+ZUPcJQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] @@ -4731,8 +4731,8 @@ packages: os: [linux] libc: [musl] - '@next/swc-linux-x64-musl@16.0.11': - resolution: {integrity: sha512-YK9RoeZuHWBd+wHi5/7VLp6P5ZOldAjQfBjjtzcR4f14FNmwT0a3ozMMlG2txDxh53krAd5yOO601RbJxH0gCQ==} + '@next/swc-linux-x64-musl@16.1.6': + resolution: {integrity: sha512-oicJwRlyOoZXVlxmIMaTq7f8pN9QNbdes0q2FXfRsPhfCi8n8JmOZJm5oo1pwDaFbnnD421rVU409M3evFbIqg==} engines: {node: '>= 10'} cpu: [x64] os: [linux] @@ -4744,8 +4744,8 @@ packages: cpu: [arm64] os: [win32] - '@next/swc-win32-arm64-msvc@16.0.11': - resolution: {integrity: sha512-pcDMpSckekV8xj2SSKO8PaqaJhrmDx84zUNip0kOWsT/ERhhDpnWkr6KXMqRXVp2y5CW9pp4LwOFdtpt3rhRgw==} + '@next/swc-win32-arm64-msvc@16.1.6': + resolution: {integrity: sha512-gQmm8izDTPgs+DCWH22kcDmuUp7NyiJgEl18bcr8irXA5N2m2O+JQIr6f3ct42GOs9c0h8QF3L5SzIxcYAAXXw==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] @@ -4756,8 +4756,8 @@ packages: cpu: [x64] os: [win32] - '@next/swc-win32-x64-msvc@16.0.11': - resolution: {integrity: sha512-Zzo9NLLRzBSHw9zOGpER/gdc5rofZHLjR2OIUIfoBaN2Oo5zWRl43IF5rMSX2LX7MPLTx4Ww8+5lNHAhXgitnA==} + '@next/swc-win32-x64-msvc@16.1.6': + resolution: {integrity: sha512-NRfO39AIrzBnixKbjuo2YiYhB6o9d8v/ymU9m/Xk8cyVk+k7XylniXkHwjs4s70wedVffc6bQNbufk5v0xEm0A==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -14500,8 +14500,8 @@ packages: sass: optional: true - next@16.0.11: - resolution: {integrity: sha512-Xlo2aFWaoypPzXr4PFLSNmxrzNptlp+hgxnG9Y2THYvHrvmXIuHUyNAWO6Q+F4rm4/bmTOukprXEyF/j4qsC2A==} + next@16.1.6: + resolution: {integrity: sha512-hkyRkcu5x/41KoqnROkfTm2pZVbKxvbZRuNvKXLRXxs3VfyO0WhY50TQS40EuKO9SW3rBj/sF3WbVwDACeMZyw==} engines: {node: '>=20.9.0'} hasBin: true peerDependencies: @@ -21541,7 +21541,7 @@ snapshots: '@next/env@15.5.10': {} - '@next/env@16.0.11': {} + '@next/env@16.1.6': {} '@next/eslint-plugin-next@15.5.4': dependencies: @@ -21557,49 +21557,49 @@ snapshots: '@next/swc-darwin-arm64@15.5.7': optional: true - '@next/swc-darwin-arm64@16.0.11': + '@next/swc-darwin-arm64@16.1.6': optional: true '@next/swc-darwin-x64@15.5.7': optional: true - '@next/swc-darwin-x64@16.0.11': + '@next/swc-darwin-x64@16.1.6': optional: true '@next/swc-linux-arm64-gnu@15.5.7': optional: true - '@next/swc-linux-arm64-gnu@16.0.11': + '@next/swc-linux-arm64-gnu@16.1.6': optional: true '@next/swc-linux-arm64-musl@15.5.7': optional: true - '@next/swc-linux-arm64-musl@16.0.11': + '@next/swc-linux-arm64-musl@16.1.6': optional: true '@next/swc-linux-x64-gnu@15.5.7': optional: true - '@next/swc-linux-x64-gnu@16.0.11': + '@next/swc-linux-x64-gnu@16.1.6': optional: true '@next/swc-linux-x64-musl@15.5.7': optional: true - '@next/swc-linux-x64-musl@16.0.11': + '@next/swc-linux-x64-musl@16.1.6': optional: true '@next/swc-win32-arm64-msvc@15.5.7': optional: true - '@next/swc-win32-arm64-msvc@16.0.11': + '@next/swc-win32-arm64-msvc@16.1.6': optional: true '@next/swc-win32-x64-msvc@15.5.7': optional: true - '@next/swc-win32-x64-msvc@16.0.11': + '@next/swc-win32-x64-msvc@16.1.6': optional: true '@ngrok/ngrok-android-arm64@1.6.0': @@ -24983,7 +24983,7 @@ snapshots: - supports-color - webpack - '@sentry/nextjs@10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(encoding@0.1.13)(next@16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)(supports-color@8.1.1)(webpack@5.94.0)': + '@sentry/nextjs@10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(encoding@0.1.13)(next@16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)(supports-color@8.1.1)(webpack@5.94.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.38.0 @@ -24996,7 +24996,7 @@ snapshots: '@sentry/react': 10.27.0(react@18.3.1) '@sentry/vercel-edge': 10.27.0 '@sentry/webpack-plugin': 4.6.1(encoding@0.1.13)(supports-color@8.1.1)(webpack@5.94.0) - next: 16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) + next: 16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) resolve: 1.22.8 rollup: 4.59.0 stacktrace-parser: 0.1.10 @@ -33146,9 +33146,9 @@ snapshots: next: 15.5.10(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(babel-plugin-macros@3.1.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) react: 18.3.1 - next-router-mock@0.9.13(next@16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1): + next-router-mock@0.9.13(next@16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1): dependencies: - next: 16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) + next: 16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) react: 18.3.1 next-seo@6.5.0(next@15.5.10(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): @@ -33190,24 +33190,25 @@ snapshots: - '@babel/core' - babel-plugin-macros - next@16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4): + next@16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4): dependencies: - '@next/env': 16.0.11 + '@next/env': 16.1.6 '@swc/helpers': 0.5.15 + baseline-browser-mapping: 2.8.5 caniuse-lite: 1.0.30001743 postcss: 8.4.31 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) styled-jsx: 5.1.6(@babel/core@7.29.0(supports-color@8.1.1))(babel-plugin-macros@3.1.0)(react@18.3.1) optionalDependencies: - '@next/swc-darwin-arm64': 16.0.11 - '@next/swc-darwin-x64': 16.0.11 - '@next/swc-linux-arm64-gnu': 16.0.11 - '@next/swc-linux-arm64-musl': 16.0.11 - '@next/swc-linux-x64-gnu': 16.0.11 - '@next/swc-linux-x64-musl': 16.0.11 - '@next/swc-win32-arm64-msvc': 16.0.11 - '@next/swc-win32-x64-msvc': 16.0.11 + '@next/swc-darwin-arm64': 16.1.6 + '@next/swc-darwin-x64': 16.1.6 + '@next/swc-linux-arm64-gnu': 16.1.6 + '@next/swc-linux-arm64-musl': 16.1.6 + '@next/swc-linux-x64-gnu': 16.1.6 + '@next/swc-linux-x64-musl': 16.1.6 + '@next/swc-win32-arm64-msvc': 16.1.6 + '@next/swc-win32-x64-msvc': 16.1.6 '@opentelemetry/api': 1.9.0 '@playwright/test': 1.56.1 sass: 1.77.4 @@ -33532,13 +33533,13 @@ snapshots: mitt: 3.0.1 next: 15.5.10(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(babel-plugin-macros@3.1.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) - nuqs@2.7.1(@tanstack/react-router@1.158.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(next@16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react-router@7.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): + nuqs@2.7.1(@tanstack/react-router@1.158.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(next@16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react-router@7.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): dependencies: '@standard-schema/spec': 1.0.0 react: 18.3.1 optionalDependencies: '@tanstack/react-router': 1.158.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - next: 16.0.11(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) + next: 16.1.6(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4) react-router: 7.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) nuqs@2.8.1(@tanstack/react-router@1.158.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(next@15.5.10(@babel/core@7.29.0(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react-router@7.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1):