//#region src/PostgrestError.d.ts /** * Error format * * {@link https://postgrest.org/en/stable/api.html?highlight=options#errors-and-http-status-codes} */ declare class PostgrestError extends Error { details: string; hint: string; code: string; /** * @example * ```ts * import PostgrestError from '@supabase/postgrest-js' * * throw new PostgrestError({ * message: 'Row level security prevented the request', * details: 'RLS denied the insert', * hint: 'Check your policies', * code: 'PGRST301', * }) * ``` */ constructor(context: { message: string; details: string; hint: string; code: string; }); } //#endregion //#region src/types/common/common.d.ts type Fetch = typeof fetch; type GenericRelationship = { foreignKeyName: string; columns: string[]; isOneToOne?: boolean; referencedRelation: string; referencedColumns: string[]; }; type GenericTable = { Row: Record; Insert: Record; Update: Record; Relationships: GenericRelationship[]; }; type GenericUpdatableView = { Row: Record; Insert: Record; Update: Record; Relationships: GenericRelationship[]; }; type GenericNonUpdatableView = { Row: Record; Relationships: GenericRelationship[]; }; type GenericView = GenericUpdatableView | GenericNonUpdatableView; type GenericSetofOption = { isSetofReturn?: boolean | undefined; isOneToOne?: boolean | undefined; isNotNullable?: boolean | undefined; to: string; from: string; }; type GenericFunction = { Args: Record | never; Returns: unknown; SetofOptions?: GenericSetofOption; }; type GenericSchema = { Tables: Record; Views: Record; Functions: Record; }; type ClientServerOptions = { PostgrestVersion?: string; }; //#endregion //#region src/select-query-parser/types.d.ts type AggregateWithoutColumnFunctions = 'count'; type AggregateWithColumnFunctions = 'sum' | 'avg' | 'min' | 'max' | AggregateWithoutColumnFunctions; type AggregateFunctions = AggregateWithColumnFunctions; type Json = string | number | boolean | null | { [key: string]: Json | undefined; } | Json[]; type PostgresSQLNumberTypes = 'int2' | 'int4' | 'int8' | 'float4' | 'float8' | 'numeric'; type PostgresSQLStringTypes = 'bytea' | 'bpchar' | 'varchar' | 'date' | 'text' | 'citext' | 'time' | 'timetz' | 'timestamp' | 'timestamptz' | 'uuid' | 'vector'; type SingleValuePostgreSQLTypes = PostgresSQLNumberTypes | PostgresSQLStringTypes | 'bool' | 'json' | 'jsonb' | 'void' | 'record' | string; type ArrayPostgreSQLTypes = `_${SingleValuePostgreSQLTypes}`; type TypeScriptSingleValueTypes = T extends 'bool' ? boolean : T extends PostgresSQLNumberTypes ? number : T extends PostgresSQLStringTypes ? string : T extends 'json' | 'jsonb' ? Json : T extends 'void' ? undefined : T extends 'record' ? Record : unknown; type StripUnderscore = T extends `_${infer U}` ? U : T; type PostgreSQLTypes = SingleValuePostgreSQLTypes | ArrayPostgreSQLTypes; type TypeScriptTypes = T extends ArrayPostgreSQLTypes ? TypeScriptSingleValueTypes>>[] : TypeScriptSingleValueTypes; type UnionToIntersection$1 = (U extends any ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never; type LastOf$1 = UnionToIntersection$1 T : never> extends (() => infer R) ? R : never; type Push = [...T, V]; type UnionToTuple, N = ([T] extends [never] ? true : false)> = N extends true ? [] : Push>, L$1>; type UnionToArray = UnionToTuple; type ExtractFirstProperty = T extends { [K in keyof T]: infer U } ? U : never; type ContainsNull = null extends T ? true : false; type IsNonEmptyArray = Exclude extends readonly [unknown, ...unknown[]] ? true : false; type TablesAndViews$2 = Schema['Tables'] & Exclude; //#endregion //#region src/select-query-parser/parser.d.ts /** * Parses a query. * A query is a sequence of nodes, separated by `,`, ensuring that there is * no remaining input after all nodes have been parsed. * * Returns an array of parsed nodes, or an error. */ type ParseQuery = string extends Query ? GenericStringError : ParseNodes> extends [infer Nodes, `${infer Remainder}`] ? Nodes extends Ast.Node[] ? EatWhitespace extends '' ? SimplifyDeep : ParserError<`Unexpected input: ${Remainder}`> : ParserError<'Invalid nodes array structure'> : ParseNodes>; /** * Notes: all `Parse*` types assume that their input strings have their whitespace * removed. They return tuples of ["Return Value", "Remainder of text"] or * a `ParserError`. */ /** * Parses a sequence of nodes, separated by `,`. * * Returns a tuple of ["Parsed fields", "Remainder of text"] or an error. */ type ParseNodes = string extends Input ? GenericStringError : ParseNodesHelper; type ParseNodesHelper = ParseNode extends [infer Node, `${infer Remainder}`] ? Node extends Ast.Node ? EatWhitespace extends `,${infer Remainder}` ? ParseNodesHelper, [...Nodes$1, Node]> : [[...Nodes$1, Node], EatWhitespace] : ParserError<'Invalid node type in nodes helper'> : ParseNode; /** * Parses a node. * A node is one of the following: * - `*` * - a field, as defined above * - a renamed field, `renamed_field:field` * - a spread field, `...field` */ type ParseNode = Input extends '' ? ParserError<'Empty string'> : Input extends `*${infer Remainder}` ? [Ast.StarNode, EatWhitespace] : Input extends `...${infer Remainder}` ? ParseField> extends [infer TargetField, `${infer Remainder}`] ? TargetField extends Ast.FieldNode ? [{ type: 'spread'; target: TargetField; }, EatWhitespace] : ParserError<'Invalid target field type in spread'> : ParserError<`Unable to parse spread resource at \`${Input}\``> : ParseIdentifier extends [infer NameOrAlias, `${infer Remainder}`] ? EatWhitespace extends `::${infer _}` ? ParseField : EatWhitespace extends `:${infer Remainder}` ? ParseField> extends [infer Field, `${infer Remainder}`] ? Field extends Ast.FieldNode ? [Omit & { alias: NameOrAlias; }, EatWhitespace] : ParserError<'Invalid field type in alias parsing'> : ParserError<`Unable to parse renamed field at \`${Input}\``> : ParseField : ParserError<`Expected identifier at \`${Input}\``>; /** * Parses a field without preceding alias. * A field is one of the following: * - a top-level `count` field: https://docs.postgrest.org/en/v12/references/api/aggregate_functions.html#the-case-of-count * - a field with an embedded resource * - `field(nodes)` * - `field!hint(nodes)` * - `field!inner(nodes)` * - `field!left(nodes)` * - `field!hint!inner(nodes)` * - `field!hint!left(nodes)` * - a field without an embedded resource (see {@link ParseNonEmbeddedResourceField}) */ type ParseField = Input extends '' ? ParserError<'Empty string'> : ParseIdentifier extends [infer Name, `${infer Remainder}`] ? Name extends 'count' ? ParseCountField : Remainder extends `!inner${infer Remainder}` ? ParseEmbeddedResource> extends [infer Children, `${infer Remainder}`] ? Children extends Ast.Node[] ? [{ type: 'field'; name: Name; innerJoin: true; children: Children; }, Remainder] : ParserError<'Invalid children array in inner join'> : CreateParserErrorIfRequired>, `Expected embedded resource after "!inner" at \`${Remainder}\``> : EatWhitespace extends `!left${infer Remainder}` ? ParseEmbeddedResource> extends [infer Children, `${infer Remainder}`] ? Children extends Ast.Node[] ? [{ type: 'field'; name: Name; children: Children; }, EatWhitespace] : ParserError<'Invalid children array in left join'> : CreateParserErrorIfRequired>, `Expected embedded resource after "!left" at \`${EatWhitespace}\``> : EatWhitespace extends `!${infer Remainder}` ? ParseIdentifier> extends [infer Hint, `${infer Remainder}`] ? EatWhitespace extends `!inner${infer Remainder}` ? ParseEmbeddedResource> extends [infer Children, `${infer Remainder}`] ? Children extends Ast.Node[] ? [{ type: 'field'; name: Name; hint: Hint; innerJoin: true; children: Children; }, EatWhitespace] : ParserError<'Invalid children array in hint inner join'> : ParseEmbeddedResource> : ParseEmbeddedResource> extends [infer Children, `${infer Remainder}`] ? Children extends Ast.Node[] ? [{ type: 'field'; name: Name; hint: Hint; children: Children; }, EatWhitespace] : ParserError<'Invalid children array in hint'> : ParseEmbeddedResource> : ParserError<`Expected identifier after "!" at \`${EatWhitespace}\``> : EatWhitespace extends `(${infer _}` ? ParseEmbeddedResource> extends [infer Children, `${infer Remainder}`] ? Children extends Ast.Node[] ? [{ type: 'field'; name: Name; children: Children; }, EatWhitespace] : ParserError<'Invalid children array in field'> : ParseEmbeddedResource> : ParseNonEmbeddedResourceField : ParserError<`Expected identifier at \`${Input}\``>; type ParseCountField = ParseIdentifier extends ['count', `${infer Remainder}`] ? (EatWhitespace extends `()${infer Remainder_}` ? EatWhitespace : EatWhitespace) extends `${infer Remainder}` ? Remainder extends `::${infer _}` ? ParseFieldTypeCast extends [infer CastType, `${infer Remainder}`] ? [{ type: 'field'; name: 'count'; aggregateFunction: 'count'; castType: CastType; }, Remainder] : ParseFieldTypeCast : [{ type: 'field'; name: 'count'; aggregateFunction: 'count'; }, Remainder] : never : ParserError<`Expected "count" at \`${Input}\``>; /** * Parses an embedded resource, which is an opening `(`, followed by a sequence of * 0 or more nodes separated by `,`, then a closing `)`. * * Returns a tuple of ["Parsed fields", "Remainder of text"], an error, * or the original string input indicating that no opening `(` was found. */ type ParseEmbeddedResource = Input extends `(${infer Remainder}` ? EatWhitespace extends `)${infer Remainder}` ? [[], EatWhitespace] : ParseNodes> extends [infer Nodes, `${infer Remainder}`] ? Nodes extends Ast.Node[] ? EatWhitespace extends `)${infer Remainder}` ? [Nodes, EatWhitespace] : ParserError<`Expected ")" at \`${EatWhitespace}\``> : ParserError<'Invalid nodes array in embedded resource'> : ParseNodes> : ParserError<`Expected "(" at \`${Input}\``>; /** * Parses a field excluding embedded resources, without preceding field renaming. * This is one of the following: * - `field` * - `field.aggregate()` * - `field.aggregate()::type` * - `field::type` * - `field::type.aggregate()` * - `field::type.aggregate()::type` * - `field->json...` * - `field->json.aggregate()` * - `field->json.aggregate()::type` * - `field->json::type` * - `field->json::type.aggregate()` * - `field->json::type.aggregate()::type` */ type ParseNonEmbeddedResourceField = ParseIdentifier extends [infer Name, `${infer Remainder}`] ? (Remainder extends `->${infer PathAndRest}` ? ParseJsonAccessor extends [infer PropertyName, infer PropertyType, `${infer Remainder}`] ? [{ type: 'field'; name: Name; alias: PropertyName; castType: PropertyType; jsonPath: JsonPathToAccessor; }, Remainder] : ParseJsonAccessor : [{ type: 'field'; name: Name; }, Remainder]) extends infer Parsed ? Parsed extends [infer Field, `${infer Remainder}`] ? (Remainder extends `::${infer _}` ? ParseFieldTypeCast extends [infer CastType, `${infer Remainder}`] ? [Omit & { castType: CastType; }, Remainder] : ParseFieldTypeCast : [Field, Remainder]) extends infer Parsed ? Parsed extends [infer Field, `${infer Remainder}`] ? Remainder extends `.${infer _}` ? ParseFieldAggregation extends [infer AggregateFunction, `${infer Remainder}`] ? Remainder extends `::${infer _}` ? ParseFieldTypeCast extends [infer CastType, `${infer Remainder}`] ? [Omit & { aggregateFunction: AggregateFunction; castType: CastType; }, Remainder] : ParseFieldTypeCast : [Field & { aggregateFunction: AggregateFunction; }, Remainder] : ParseFieldAggregation : [Field, Remainder] : Parsed : never : Parsed : never : ParserError<`Expected identifier at \`${Input}\``>; /** * Parses a JSON property accessor of the shape `->a->b->c`. The last accessor in * the series may convert to text by using the ->> operator instead of ->. * * Returns a tuple of ["Last property name", "Last property type", "Remainder of text"] */ type ParseJsonAccessor = Input extends `->${infer Remainder}` ? Remainder extends `>${infer Remainder}` ? ParseIdentifier extends [infer Name, `${infer Remainder}`] ? [Name, 'text', EatWhitespace] : ParserError<'Expected property name after `->>`'> : ParseIdentifier extends [infer Name, `${infer Remainder}`] ? ParseJsonAccessor extends [infer PropertyName, infer PropertyType, `${infer Remainder}`] ? [PropertyName, PropertyType, EatWhitespace] : [Name, 'json', EatWhitespace] : ParserError<'Expected property name after `->`'> : ParserError<'Expected ->'>; /** * Parses a field typecast (`::type`), returning a tuple of ["Type", "Remainder of text"]. */ type ParseFieldTypeCast = EatWhitespace extends `::${infer Remainder}` ? ParseIdentifier> extends [`${infer CastType}`, `${infer Remainder}`] ? [CastType, EatWhitespace] : ParserError<`Invalid type for \`::\` operator at \`${Remainder}\``> : ParserError<'Expected ::'>; /** * Parses a field aggregation (`.max()`), returning a tuple of ["Aggregate function", "Remainder of text"] */ type ParseFieldAggregation = EatWhitespace extends `.${infer Remainder}` ? ParseIdentifier> extends [`${infer FunctionName}`, `${infer Remainder}`] ? FunctionName extends Token.AggregateFunction ? EatWhitespace extends `()${infer Remainder}` ? [FunctionName, EatWhitespace] : ParserError<`Expected \`()\` after \`.\` operator \`${FunctionName}\``> : ParserError<`Invalid type for \`.\` operator \`${FunctionName}\``> : ParserError<`Invalid type for \`.\` operator at \`${Remainder}\``> : ParserError<'Expected .'>; /** * Parses a (possibly double-quoted) identifier. * Identifiers are sequences of 1 or more letters. */ type ParseIdentifier = ParseLetters extends [infer Name, `${infer Remainder}`] ? [Name, EatWhitespace] : ParseQuotedLetters extends [infer Name, `${infer Remainder}`] ? [Name, EatWhitespace] : ParserError<`No (possibly double-quoted) identifier at \`${Input}\``>; /** * Parse a consecutive sequence of 1 or more letter, where letters are `[0-9a-zA-Z_]`. */ type ParseLetters = string extends Input ? GenericStringError : ParseLettersHelper extends [`${infer Letters}`, `${infer Remainder}`] ? Letters extends '' ? ParserError<`Expected letter at \`${Input}\``> : [Letters, Remainder] : ParseLettersHelper; type ParseLettersHelper = string extends Input ? GenericStringError : Input extends `${infer L}${infer Remainder}` ? L extends Token.Letter ? ParseLettersHelper : [Acc, Input] : [Acc, '']; /** * Parse a consecutive sequence of 1 or more double-quoted letters, * where letters are `[^"]`. */ type ParseQuotedLetters = string extends Input ? GenericStringError : Input extends `"${infer Remainder}` ? ParseQuotedLettersHelper extends [`${infer Letters}`, `${infer Remainder}`] ? Letters extends '' ? ParserError<`Expected string at \`${Remainder}\``> : [Letters, Remainder] : ParseQuotedLettersHelper : ParserError<`Not a double-quoted string at \`${Input}\``>; type ParseQuotedLettersHelper = string extends Input ? GenericStringError : Input extends `${infer L}${infer Remainder}` ? L extends '"' ? [Acc, Remainder] : ParseQuotedLettersHelper : ParserError<`Missing closing double-quote in \`"${Acc}${Input}\``>; /** * Trims whitespace from the left of the input. */ type EatWhitespace = string extends Input ? GenericStringError : Input extends `${Token.Whitespace}${infer Remainder}` ? EatWhitespace : Input; /** * Creates a new {@link ParserError} if the given input is not already a parser error. */ type CreateParserErrorIfRequired = Input extends ParserError ? Input : ParserError; /** * Parser errors. */ type ParserError = { error: true; } & Message; type GenericStringError = ParserError<'Received a generic string'>; declare namespace Ast { type Node = FieldNode | StarNode | SpreadNode; type FieldNode = { type: 'field'; name: string; alias?: string; hint?: string; innerJoin?: true; castType?: string; jsonPath?: string; aggregateFunction?: Token.AggregateFunction; children?: Node[]; }; type StarNode = { type: 'star'; }; type SpreadNode = { type: 'spread'; target: FieldNode & { children: Node[]; }; }; } declare namespace Token { export type Whitespace = ' ' | '\n' | '\t'; type LowerAlphabet = 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z'; type Alphabet = LowerAlphabet | Uppercase; type Digit = '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '0'; export type Letter = Alphabet | Digit | '_'; export type AggregateFunction = 'count' | 'sum' | 'avg' | 'min' | 'max'; export {}; } //#endregion //#region src/select-query-parser/utils.d.ts type IsAny$1 = 0 extends 1 & T ? true : false; type SelectQueryError = { error: true; } & Message; type DeduplicateRelationships = T extends readonly [infer First, ...infer Rest] ? First extends Rest[number] ? DeduplicateRelationships : [First, ...DeduplicateRelationships] : T; type GetFieldNodeResultName = Field$1['alias'] extends string ? Field$1['alias'] : Field$1['aggregateFunction'] extends AggregateFunctions ? Field$1['aggregateFunction'] : Field$1['name']; type FilterRelationNodes = UnionToArray<{ [K in keyof Nodes$1]: Nodes$1[K] extends Ast.SpreadNode ? Nodes$1[K]['target'] : Nodes$1[K] extends Ast.FieldNode ? IsNonEmptyArray extends true ? Nodes$1[K] : never : never }[number]>; type ResolveRelationships = UnionToArray<{ [K in keyof Nodes$1]: Nodes$1[K] extends Ast.FieldNode ? ResolveRelationship extends infer Relation ? Relation extends { relation: { referencedRelation: string; foreignKeyName: string; match: string; }; from: string; } ? { referencedTable: Relation['relation']['referencedRelation']; fkName: Relation['relation']['foreignKeyName']; from: Relation['from']; match: Relation['relation']['match']; fieldName: GetFieldNodeResultName; } : Relation : never : never }>[0]; /** * Checks if a relation is implicitly referenced twice, requiring disambiguation */ type IsDoubleReference = T extends { referencedTable: infer RT; fieldName: infer FN; match: infer M; } ? M extends 'col' | 'refrel' ? U extends { referencedTable: RT; fieldName: FN; match: M; } ? true : false : false : false; /** * Compares one element with all other elements in the array to find duplicates */ type CheckDuplicates = Arr extends [infer Head, ...infer Tail] ? IsDoubleReference extends true ? Head | CheckDuplicates : CheckDuplicates : never; /** * Iterates over the elements of the array to find duplicates */ type FindDuplicatesWithinDeduplicated = Arr extends [infer Head, ...infer Tail] ? CheckDuplicates | FindDuplicatesWithinDeduplicated : never; type FindDuplicates = FindDuplicatesWithinDeduplicated>; type CheckDuplicateEmbededReference = FilterRelationNodes extends infer RelationsNodes ? RelationsNodes extends Ast.FieldNode[] ? ResolveRelationships extends infer ResolvedRels ? ResolvedRels extends unknown[] ? FindDuplicates extends infer Duplicates ? Duplicates extends never ? false : Duplicates extends { fieldName: infer FieldName; } ? FieldName extends string ? { [K in FieldName]: SelectQueryError<`table "${RelationName}" specified more than once use hinting for desambiguation`> } : false : false : false : false : false : false : false; /** * Returns a boolean representing whether there is a foreign key referencing * a given relation. */ type HasFKeyToFRel = Relationships extends [infer R] ? R extends { referencedRelation: FRelName; } ? true : false : Relationships extends [infer R, ...infer Rest] ? HasFKeyToFRel extends true ? true : HasFKeyToFRel : false; /** * Checks if there is more than one relation to a given foreign relation name in the Relationships. */ type HasMultipleFKeysToFRelDeduplicated = Relationships extends [infer R, ...infer Rest] ? R extends { referencedRelation: FRelName; } ? HasFKeyToFRel extends true ? true : HasMultipleFKeysToFRelDeduplicated : HasMultipleFKeysToFRelDeduplicated : false; type HasMultipleFKeysToFRel = HasMultipleFKeysToFRelDeduplicated>; type CheckRelationshipError & string, FoundRelation$1> = FoundRelation$1 extends SelectQueryError ? FoundRelation$1 : FoundRelation$1 extends { relation: { referencedRelation: infer RelatedRelationName; name: string; }; direction: 'reverse'; } ? RelatedRelationName extends string ? HasMultipleFKeysToFRel extends true ? SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}! ?`> : FoundRelation$1 : never : FoundRelation$1 extends { relation: { referencedRelation: infer RelatedRelationName; name: string; }; direction: 'forward'; from: infer From; } ? RelatedRelationName extends string ? From extends keyof TablesAndViews$2 & string ? HasMultipleFKeysToFRel[From]['Relationships']> extends true ? SelectQueryError<`Could not embed because more than one relationship was found for '${From}' and '${RelatedRelationName}' you need to hint the column with ${From}! ?`> : FoundRelation$1 : never : never : FoundRelation$1; /** * Resolves relationships for embedded resources and retrieves the referenced Table */ type ResolveRelationship & string> = ResolveReverseRelationship extends infer ReverseRelationship ? ReverseRelationship extends false ? CheckRelationshipError> : CheckRelationshipError : never; /** * Resolves reverse relationships (from children to parent) */ type ResolveReverseRelationship & string> = FindFieldMatchingRelationships extends infer FoundRelation ? FoundRelation extends never ? false : FoundRelation extends { referencedRelation: infer RelatedRelationName; } ? RelatedRelationName extends string ? RelatedRelationName extends keyof TablesAndViews$2 ? FoundRelation extends { hint: string; } ? { referencedTable: TablesAndViews$2[RelatedRelationName]; relation: FoundRelation; direction: 'reverse'; from: CurrentTableOrView; } : HasMultipleFKeysToFRel extends true ? SelectQueryError<`Could not embed because more than one relationship was found for '${RelatedRelationName}' and '${CurrentTableOrView}' you need to hint the column with ${RelatedRelationName}! ?`> : { referencedTable: TablesAndViews$2[RelatedRelationName]; relation: FoundRelation; direction: 'reverse'; from: CurrentTableOrView; } : SelectQueryError<`Relation '${RelatedRelationName}' not found in schema.`> : false : false : false; type FindMatchingTableRelationships = Relationships extends [infer R, ...infer Rest] ? Rest extends GenericRelationship[] ? R extends { referencedRelation: infer ReferencedRelation; } ? ReferencedRelation extends keyof Schema['Tables'] ? R extends { foreignKeyName: value; } ? R & { match: 'fkname'; } : R extends { referencedRelation: value; } ? R & { match: 'refrel'; } : R extends { columns: [value]; } ? R & { match: 'col'; } : FindMatchingTableRelationships : FindMatchingTableRelationships : false : false : false; type FindMatchingViewRelationships = Relationships extends [infer R, ...infer Rest] ? Rest extends GenericRelationship[] ? R extends { referencedRelation: infer ReferencedRelation; } ? ReferencedRelation extends keyof Schema['Views'] ? R extends { foreignKeyName: value; } ? R & { match: 'fkname'; } : R extends { referencedRelation: value; } ? R & { match: 'refrel'; } : R extends { columns: [value]; } ? R & { match: 'col'; } : FindMatchingViewRelationships : FindMatchingViewRelationships : false : false : false; type FindMatchingHintTableRelationships = Relationships extends [infer R, ...infer Rest] ? Rest extends GenericRelationship[] ? R extends { referencedRelation: infer ReferencedRelation; } ? ReferencedRelation extends name ? R extends { foreignKeyName: hint; } ? R & { match: 'fkname'; } : R extends { referencedRelation: hint; } ? R & { match: 'refrel'; } : R extends { columns: [hint]; } ? R & { match: 'col'; } : FindMatchingHintTableRelationships : FindMatchingHintTableRelationships : false : false : false; type FindMatchingHintViewRelationships = Relationships extends [infer R, ...infer Rest] ? Rest extends GenericRelationship[] ? R extends { referencedRelation: infer ReferencedRelation; } ? ReferencedRelation extends name ? R extends { foreignKeyName: hint; } ? R & { match: 'fkname'; } : R extends { referencedRelation: hint; } ? R & { match: 'refrel'; } : R extends { columns: [hint]; } ? R & { match: 'col'; } : FindMatchingHintViewRelationships : FindMatchingHintViewRelationships : false : false : false; type IsColumnsNullable, Columns extends (keyof Table['Row'])[]> = Columns extends [infer Column, ...infer Rest] ? Column extends keyof Table['Row'] ? ContainsNull extends true ? true : IsColumnsNullable : false : false; type IsRelationNullable
= IsColumnsNullable; type TableForwardRelationships = TName extends keyof TablesAndViews$2 ? UnionToArray>> extends infer R ? R extends (GenericRelationship & { from: keyof TablesAndViews$2; })[] ? R : [] : [] : []; type RecursivelyFindRelationships> = Keys extends infer K ? K extends keyof TablesAndViews$2 ? FilterRelationships[K]['Relationships'], TName, K> extends never ? RecursivelyFindRelationships> : FilterRelationships[K]['Relationships'], TName, K> | RecursivelyFindRelationships> : false : false; type FilterRelationships = R$1 extends readonly (infer Rel)[] ? Rel extends { referencedRelation: TName; } ? Rel & { from: From$1; } : never : never; type ResolveForwardRelationship & string> = FindFieldMatchingRelationships[Field$1['name']]['Relationships'], Ast.FieldNode & { name: CurrentTableOrView; hint: Field$1['hint']; }> extends infer FoundByName ? FoundByName extends GenericRelationship ? { referencedTable: TablesAndViews$2[Field$1['name']]; relation: FoundByName; direction: 'forward'; from: Field$1['name']; type: 'found-by-name'; } : FindFieldMatchingRelationships, Field$1> extends infer FoundByMatch ? FoundByMatch extends GenericRelationship & { from: keyof TablesAndViews$2; } ? { referencedTable: TablesAndViews$2[FoundByMatch['from']]; relation: FoundByMatch; direction: 'forward'; from: CurrentTableOrView; type: 'found-by-match'; } : FindJoinTableRelationship extends infer FoundByJoinTable ? FoundByJoinTable extends GenericRelationship ? { referencedTable: TablesAndViews$2[FoundByJoinTable['referencedRelation']]; relation: FoundByJoinTable & { match: 'refrel'; }; direction: 'forward'; from: CurrentTableOrView; type: 'found-by-join-table'; } : ResolveEmbededFunctionJoinTableRelationship extends infer FoundEmbededFunctionJoinTableRelation ? FoundEmbededFunctionJoinTableRelation extends GenericSetofOption ? { referencedTable: TablesAndViews$2[FoundEmbededFunctionJoinTableRelation['to']]; relation: { foreignKeyName: `${Field$1['name']}_${CurrentTableOrView}_${FoundEmbededFunctionJoinTableRelation['to']}_forward`; columns: []; isOneToOne: FoundEmbededFunctionJoinTableRelation['isOneToOne'] extends true ? true : false; referencedColumns: []; referencedRelation: FoundEmbededFunctionJoinTableRelation['to']; } & { match: 'func'; isNotNullable: FoundEmbededFunctionJoinTableRelation['isNotNullable'] extends true ? true : FoundEmbededFunctionJoinTableRelation['isSetofReturn'] extends true ? false : true; isSetofReturn: FoundEmbededFunctionJoinTableRelation['isSetofReturn']; }; direction: 'forward'; from: CurrentTableOrView; type: 'found-by-embeded-function'; } : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field$1['name']}`> : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field$1['name']}`> : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field$1['name']}`> : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field$1['name']}`> : SelectQueryError<`could not find the relation between ${CurrentTableOrView} and ${Field$1['name']}`>; /** * Given a CurrentTableOrView, finds all join tables to this relation. * For example, if products and categories are linked via product_categories table: * * @example Find join table relationship * Given: * - CurrentTableView = 'products' * - FieldName = "categories" * * It should return this relationship from product_categories: * { * foreignKeyName: "product_categories_category_id_fkey", * columns: ["category_id"], * isOneToOne: false, * referencedRelation: "categories", * referencedColumns: ["id"] * } */ type ResolveJoinTableRelationship & string, FieldName$1 extends string> = { [TableName in keyof TablesAndViews$2]: DeduplicateRelationships[TableName]['Relationships']> extends readonly (infer Rel)[] ? Rel extends { referencedRelation: CurrentTableOrView; } ? DeduplicateRelationships[TableName]['Relationships']> extends readonly (infer OtherRel)[] ? OtherRel extends { referencedRelation: FieldName$1; } ? OtherRel : never : never : never : never }[keyof TablesAndViews$2]; type ResolveEmbededFunctionJoinTableRelationship & string, FieldName$1 extends string> = FindMatchingFunctionBySetofFrom extends infer Fn ? Fn extends GenericFunction ? Fn['SetofOptions'] : false : false; type FindJoinTableRelationship & string, FieldName$1 extends string> = ResolveJoinTableRelationship extends infer Result ? [Result] extends [never] ? false : Result : never; /** * Finds a matching relationship based on the FieldNode's name and optional hint. */ type FindFieldMatchingRelationships = Field$1 extends { hint: string; } ? FindMatchingHintTableRelationships extends GenericRelationship ? FindMatchingHintTableRelationships & { branch: 'found-in-table-via-hint'; hint: Field$1['hint']; } : FindMatchingHintViewRelationships extends GenericRelationship ? FindMatchingHintViewRelationships & { branch: 'found-in-view-via-hint'; hint: Field$1['hint']; } : SelectQueryError<'Failed to find matching relation via hint'> : FindMatchingTableRelationships extends GenericRelationship ? FindMatchingTableRelationships & { branch: 'found-in-table-via-name'; name: Field$1['name']; } : FindMatchingViewRelationships extends GenericRelationship ? FindMatchingViewRelationships & { branch: 'found-in-view-via-name'; name: Field$1['name']; } : SelectQueryError<'Failed to find matching relation via name'>; type JsonPathToAccessor = Path extends `${infer P1}->${infer P2}` ? P2 extends `>${infer Rest}` ? JsonPathToAccessor<`${P1}.${Rest}`> : P2 extends string ? JsonPathToAccessor<`${P1}.${P2}`> : Path : Path extends `>${infer Rest}` ? JsonPathToAccessor : Path extends `${infer P1}::${infer _}` ? JsonPathToAccessor : Path extends `${infer P1}${')' | ','}${infer _}` ? P1 : Path; type JsonPathToType = Path extends '' ? T : ContainsNull extends true ? JsonPathToType, Path> : Path extends `${infer Key}.${infer Rest}` ? Key extends keyof T ? JsonPathToType : never : Path extends keyof T ? T[Path] : never; type IsStringUnion = string extends T ? false : T extends string ? [T] extends [never] ? false : true : false; type MatchingFunctionBySetofFrom = Fn$1['SetofOptions'] extends GenericSetofOption ? TableName$1 extends Fn$1['SetofOptions']['from'] ? Fn$1 : never : false; type FindMatchingFunctionBySetofFrom = FnUnion extends infer Fn extends GenericFunction ? MatchingFunctionBySetofFrom : false; type ComputedField, FieldName$1 extends keyof TablesAndViews$2[RelationName]['Row']> = FieldName$1 extends keyof Schema['Functions'] ? [Schema['Functions'][FieldName$1]['Args']] extends [never] ? never : Schema['Functions'][FieldName$1] extends { Args: { '': TablesAndViews$2[RelationName]['Row']; }; Returns: any; } ? FieldName$1 : never : never; type GetComputedFields> = { [K in keyof TablesAndViews$2[RelationName]['Row']]: ComputedField }[keyof TablesAndViews$2[RelationName]['Row']]; //#endregion //#region src/types/types.d.ts /** * Response format * * {@link https://github.com/supabase/supabase-js/issues/32} */ interface PostgrestResponseBase { status: number; statusText: string; } interface PostgrestResponseSuccess extends PostgrestResponseBase { error: null; data: T; count: number | null; } interface PostgrestResponseFailure extends PostgrestResponseBase { error: PostgrestError; data: null; count: null; } type PostgrestSingleResponse = PostgrestResponseSuccess | PostgrestResponseFailure; type PostgrestMaybeSingleResponse = PostgrestSingleResponse; type PostgrestResponse = PostgrestSingleResponse; type Prettify = { [K in keyof T]: T[K] } & {}; type SimplifyDeep = ConditionalSimplifyDeep | Map, object>; type ConditionalSimplifyDeep = Type extends ExcludeType ? Type : Type extends IncludeType ? { [TypeKey in keyof Type]: ConditionalSimplifyDeep } : Type; type NonRecursiveType = BuiltIns | Function | (new (...arguments_: any[]) => unknown); type BuiltIns = Primitive | void | Date | RegExp; type Primitive = null | undefined | string | number | boolean | symbol | bigint; type IsValidResultOverride = Result$1 extends any[] ? NewResult extends any[] ? true : ErrorResult : NewResult extends any[] ? ErrorNewResult : true; /** * Utility type to check if array types match between Result and NewResult. * Returns either the valid NewResult type or an error message type. */ type CheckMatchingArrayTypes = Result$1 extends SelectQueryError ? NewResult : IsValidResultOverride> or .returns> (deprecated) for array results or .single() to convert the result to a single object'; }, { Error: 'Type mismatch: Cannot cast single object to array type. Remove Array wrapper from return type or make sure you are not using .single() up in the calling chain'; }> extends infer ValidationResult ? ValidationResult extends true ? ContainsNull extends true ? NewResult | null : NewResult : ValidationResult : never; type Simplify = T extends object ? { [K in keyof T]: T[K] } : T; type ExplicitKeys = { [K in keyof T]: string extends K ? never : K }[keyof T]; type MergeExplicit = { [K in ExplicitKeys | ExplicitKeys]: K extends keyof New ? K extends keyof Row ? Row[K] extends SelectQueryError ? New[K] : New[K] extends any[] ? Row[K] extends any[] ? Array, NonNullable>>> : New[K] : IsPlainObject> extends true ? IsPlainObject> extends true ? ContainsNull extends true ? // If the override wants to preserve optionality Simplify, NonNullable>> | null : Simplify>> : New[K] : New[K] : New[K] : K extends keyof Row ? Row[K] : never }; type MergeDeep = Simplify & (string extends keyof Row ? { [K: string]: Row[string]; } : {})>; type IsPlainObject = T extends any[] ? false : T extends object ? true : false; type MergePartialResult = Options extends { merge: true; } ? Result$1 extends any[] ? NewResult extends any[] ? Array>> : never : Simplify> : NewResult; //#endregion //#region src/PostgrestBuilder.d.ts declare abstract class PostgrestBuilder implements PromiseLike : PostgrestSingleResponse> { protected method: 'GET' | 'HEAD' | 'POST' | 'PATCH' | 'DELETE'; protected url: URL; protected headers: Headers; protected schema?: string; protected body?: unknown; protected shouldThrowOnError: boolean; protected signal?: AbortSignal; protected fetch: Fetch; protected isMaybeSingle: boolean; protected urlLengthLimit: number; /** * Creates a builder configured for a specific PostgREST request. * * @example * ```ts * import { PostgrestQueryBuilder } from '@supabase/postgrest-js' * * const builder = new PostgrestQueryBuilder( * new URL('https://xyzcompany.supabase.co/rest/v1/users'), * { headers: new Headers({ apikey: 'public-anon-key' }) } * ) * ``` * * @category Database * * @example Creating a Postgrest query builder * ```ts * import { PostgrestQueryBuilder } from '@supabase/postgrest-js' * * const builder = new PostgrestQueryBuilder( * new URL('https://xyzcompany.supabase.co/rest/v1/users'), * { headers: new Headers({ apikey: 'public-anon-key' }) } * ) * ``` */ constructor(builder: { method: 'GET' | 'HEAD' | 'POST' | 'PATCH' | 'DELETE'; url: URL; headers: HeadersInit; schema?: string; body?: unknown; shouldThrowOnError?: boolean; signal?: AbortSignal; fetch?: Fetch; isMaybeSingle?: boolean; urlLengthLimit?: number; }); /** * If there's an error with the query, throwOnError will reject the promise by * throwing the error instead of returning it as part of a successful response. * * {@link https://github.com/supabase/supabase-js/issues/92} * * @category Database */ throwOnError(): this & PostgrestBuilder; /** * Set an HTTP header for the request. * * @category Database */ setHeader(name: string, value: string): this; /** * * @category Database */ then : PostgrestSingleResponse), TResult2 = never>(onfulfilled?: ((value: ThrowOnError extends true ? PostgrestResponseSuccess : PostgrestSingleResponse) => TResult1 | PromiseLike) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike) | undefined | null): PromiseLike; /** * Override the type of the returned `data`. * * @typeParam NewResult - The new result type to override with * @deprecated Use overrideTypes() method at the end of your call chain instead * * @category Database */ returns(): PostgrestBuilder, ThrowOnError>; /** * Override the type of the returned `data` field in the response. * * @typeParam NewResult - The new type to cast the response data to * @typeParam Options - Optional type configuration (defaults to { merge: true }) * @typeParam Options.merge - When true, merges the new type with existing return type. When false, replaces the existing types entirely (defaults to true) * @example * ```typescript * // Merge with existing types (default behavior) * const query = supabase * .from('users') * .select() * .overrideTypes<{ custom_field: string }>() * * // Replace existing types completely * const replaceQuery = supabase * .from('users') * .select() * .overrideTypes<{ id: number; name: string }, { merge: false }>() * ``` * @returns A PostgrestBuilder instance with the new type * * @category Database * * @example Complete Override type of successful response * ```ts * const { data } = await supabase * .from('countries') * .select() * .overrideTypes, { merge: false }>() * ``` * * @exampleResponse Complete Override type of successful response * ```ts * let x: typeof data // MyType[] * ``` * * @example Complete Override type of object response * ```ts * const { data } = await supabase * .from('countries') * .select() * .maybeSingle() * .overrideTypes() * ``` * * @exampleResponse Complete Override type of object response * ```ts * let x: typeof data // MyType | null * ``` * * @example Partial Override type of successful response * ```ts * const { data } = await supabase * .from('countries') * .select() * .overrideTypes>() * ``` * * @exampleResponse Partial Override type of successful response * ```ts * let x: typeof data // Array * ``` * * @example Partial Override type of object response * ```ts * const { data } = await supabase * .from('countries') * .select() * .maybeSingle() * .overrideTypes<{ status: "A" | "B" }>() * ``` * * @exampleResponse Partial Override type of object response * ```ts * let x: typeof data // CountryRowProperties & { status: "A" | "B" } | null * ``` * * @example Example 5 * ```typescript * // Merge with existing types (default behavior) * const query = supabase * .from('users') * .select() * .overrideTypes<{ custom_field: string }>() * * // Replace existing types completely * const replaceQuery = supabase * .from('users') * .select() * .overrideTypes<{ id: number; name: string }, { merge: false }>() * ``` */ overrideTypes(): PostgrestBuilder extends true ? ContainsNull extends true ? MergePartialResult, Options> | null : MergePartialResult : CheckMatchingArrayTypes, ThrowOnError>; } //#endregion //#region src/types/feature-flags.d.ts type IsPostgrest13 = PostgrestVersion extends `13${string}` ? true : false; type IsPostgrest14 = PostgrestVersion extends `14${string}` ? true : false; type IsPostgrestVersionGreaterThan12 = IsPostgrest13 extends true ? true : IsPostgrest14 extends true ? true : false; type MaxAffectedEnabled = IsPostgrestVersionGreaterThan12 extends true ? true : false; type SpreadOnManyEnabled = IsPostgrestVersionGreaterThan12 extends true ? true : false; //#endregion //#region src/select-query-parser/result.d.ts /** * Main entry point for constructing the result type of a PostgREST query. * * @param Schema - Database schema. * @param Row - The type of a row in the current table. * @param RelationName - The name of the current table or view. * @param Relationships - Relationships of the current table. * @param Query - The select query string literal to parse. */ type GetResult, RelationName, Relationships, Query extends string, ClientOptions extends ClientServerOptions> = IsAny$1 extends true ? ParseQuery extends infer ParsedQuery ? ParsedQuery extends Ast.Node[] ? RelationName extends string ? ProcessNodesWithoutSchema : any : ParsedQuery : any : Relationships extends null ? ParseQuery extends infer ParsedQuery ? ParsedQuery extends Ast.Node[] ? RPCCallNodes : ParsedQuery : Row : ParseQuery extends infer ParsedQuery ? ParsedQuery extends Ast.Node[] ? RelationName extends string ? Relationships extends GenericRelationship[] ? ProcessNodes : SelectQueryError<'Invalid Relationships cannot infer result type'> : SelectQueryError<'Invalid RelationName cannot infer result type'> : ParsedQuery : never; type ProcessSimpleFieldWithoutSchema = Field$1['aggregateFunction'] extends AggregateFunctions ? { [K in GetFieldNodeResultName]: Field$1['castType'] extends PostgreSQLTypes ? TypeScriptTypes : number } : { [K in GetFieldNodeResultName]: Field$1['castType'] extends PostgreSQLTypes ? TypeScriptTypes : any }; type ProcessFieldNodeWithoutSchema = IsNonEmptyArray extends true ? { [K in GetFieldNodeResultName]: Node$1['children'] extends Ast.Node[] ? ProcessNodesWithoutSchema[] : ProcessSimpleFieldWithoutSchema } : ProcessSimpleFieldWithoutSchema; /** * Processes a single Node without schema and returns the resulting TypeScript type. */ type ProcessNodeWithoutSchema = Node$1 extends Ast.StarNode ? any : Node$1 extends Ast.SpreadNode ? Node$1['target']['children'] extends Ast.StarNode[] ? any : Node$1['target']['children'] extends Ast.FieldNode[] ? { [P in Node$1['target']['children'][number] as GetFieldNodeResultName

]: P['castType'] extends PostgreSQLTypes ? TypeScriptTypes : any } : any : Node$1 extends Ast.FieldNode ? ProcessFieldNodeWithoutSchema : any; /** * Processes nodes when Schema is any, providing basic type inference */ type ProcessNodesWithoutSchema = {}> = Nodes$1 extends [infer FirstNode, ...infer RestNodes] ? FirstNode extends Ast.Node ? RestNodes extends Ast.Node[] ? ProcessNodeWithoutSchema extends infer FieldResult ? FieldResult extends Record ? ProcessNodesWithoutSchema : FieldResult : any : any : any : Prettify; /** * Processes a single Node from a select chained after a rpc call * * @param Row - The type of a row in the current table. * @param RelationName - The name of the current rpc function * @param NodeType - The Node to process. */ type ProcessRPCNode, RelationName extends string, NodeType extends Ast.Node> = NodeType['type'] extends Ast.StarNode['type'] ? Row : NodeType['type'] extends Ast.FieldNode['type'] ? ProcessSimpleField> : SelectQueryError<'RPC Unsupported node type.'>; /** * Process select call that can be chained after an rpc call */ type RPCCallNodes, Acc extends Record = {}> = Nodes$1 extends [infer FirstNode, ...infer RestNodes] ? FirstNode extends Ast.Node ? RestNodes extends Ast.Node[] ? ProcessRPCNode extends infer FieldResult ? FieldResult extends Record ? RPCCallNodes : FieldResult extends SelectQueryError ? SelectQueryError : SelectQueryError<'Could not retrieve a valid record or error value'> : SelectQueryError<'Processing node failed.'> : SelectQueryError<'Invalid rest nodes array in RPC call'> : SelectQueryError<'Invalid first node in RPC call'> : Prettify; /** * Recursively processes an array of Nodes and accumulates the resulting TypeScript type. * * @param Schema - Database schema. * @param Row - The type of a row in the current table. * @param RelationName - The name of the current table or view. * @param Relationships - Relationships of the current table. * @param Nodes - An array of AST nodes to process. * @param Acc - Accumulator for the constructed type. */ type ProcessNodes, RelationName extends string, Relationships extends GenericRelationship[], Nodes$1 extends Ast.Node[], Acc extends Record = {}> = CheckDuplicateEmbededReference extends false ? Nodes$1 extends [infer FirstNode, ...infer RestNodes] ? FirstNode extends Ast.Node ? RestNodes extends Ast.Node[] ? ProcessNode extends infer FieldResult ? FieldResult extends Record ? ProcessNodes : FieldResult extends SelectQueryError ? SelectQueryError : SelectQueryError<'Could not retrieve a valid record or error value'> : SelectQueryError<'Processing node failed.'> : SelectQueryError<'Invalid rest nodes array type in ProcessNodes'> : SelectQueryError<'Invalid first node type in ProcessNodes'> : Prettify : Prettify>; /** * Processes a single Node and returns the resulting TypeScript type. * * @param Schema - Database schema. * @param Row - The type of a row in the current table. * @param RelationName - The name of the current table or view. * @param Relationships - Relationships of the current table. * @param NodeType - The Node to process. */ type ProcessNode, RelationName extends string, Relationships extends GenericRelationship[], NodeType extends Ast.Node> = NodeType['type'] extends Ast.StarNode['type'] ? GetComputedFields extends never ? Row : Omit> : NodeType['type'] extends Ast.SpreadNode['type'] ? ProcessSpreadNode> : NodeType['type'] extends Ast.FieldNode['type'] ? ProcessFieldNode> : SelectQueryError<'Unsupported node type.'>; /** * Processes a FieldNode and returns the resulting TypeScript type. * * @param Schema - Database schema. * @param Row - The type of a row in the current table. * @param RelationName - The name of the current table or view. * @param Relationships - Relationships of the current table. * @param Field - The FieldNode to process. */ type ProcessFieldNode, RelationName extends string, Relationships extends GenericRelationship[], Field$1 extends Ast.FieldNode> = Field$1['children'] extends [] ? {} : IsNonEmptyArray extends true ? ProcessEmbeddedResource : ProcessSimpleField; type ResolveJsonPathType = Path extends string ? JsonPathToType extends never ? TypeScriptTypes : JsonPathToType extends infer PathResult ? PathResult extends string ? PathResult : IsStringUnion extends true ? PathResult : CastType$1 extends 'json' ? PathResult : TypeScriptTypes : TypeScriptTypes : TypeScriptTypes; /** * Processes a simple field (without embedded resources). * * @param Row - The type of a row in the current table. * @param RelationName - The name of the current table or view. * @param Field - The FieldNode to process. */ type ProcessSimpleField, RelationName extends string, Field$1 extends Ast.FieldNode> = Field$1['name'] extends keyof Row | 'count' ? Field$1['aggregateFunction'] extends AggregateFunctions ? { [K in GetFieldNodeResultName]: Field$1['castType'] extends PostgreSQLTypes ? TypeScriptTypes : number } : { [K in GetFieldNodeResultName]: Field$1['castType'] extends PostgreSQLTypes ? ResolveJsonPathType : Row[Field$1['name']] } : SelectQueryError<`column '${Field$1['name']}' does not exist on '${RelationName}'.`>; /** * Processes an embedded resource (relation). * * @param Schema - Database schema. * @param Row - The type of a row in the current table. * @param RelationName - The name of the current table or view. * @param Relationships - Relationships of the current table. * @param Field - The FieldNode to process. */ type ProcessEmbeddedResource & string> = ResolveRelationship extends infer Resolved ? Resolved extends { referencedTable: Pick; relation: GenericRelationship & { match: 'refrel' | 'col' | 'fkname' | 'func'; }; direction: string; } ? ProcessEmbeddedResourceResult : { [K in GetFieldNodeResultName]: Resolved } : { [K in GetFieldNodeResultName]: SelectQueryError<'Failed to resolve relationship.'> & string }; /** * Helper type to process the result of an embedded resource. */ type ProcessEmbeddedResourceResult; relation: GenericRelationship & { match: 'refrel' | 'col' | 'fkname' | 'func'; isNotNullable?: boolean; referencedRelation: string; isSetofReturn?: boolean; }; direction: string; }, Field$1 extends Ast.FieldNode, CurrentTableOrView extends keyof TablesAndViews$2> = ProcessNodes extends Ast.Node[] ? Exclude : []> extends infer ProcessedChildren ? { [K in GetFieldNodeResultName]: Resolved$1['direction'] extends 'forward' ? Field$1 extends { innerJoin: true; } ? Resolved$1['relation']['isOneToOne'] extends true ? ProcessedChildren : ProcessedChildren[] : Resolved$1['relation']['isOneToOne'] extends true ? Resolved$1['relation']['match'] extends 'func' ? Resolved$1['relation']['isNotNullable'] extends true ? Resolved$1['relation']['isSetofReturn'] extends true ? ProcessedChildren : { [P in keyof ProcessedChildren]: ProcessedChildren[P] | null } : ProcessedChildren | null : ProcessedChildren | null : ProcessedChildren[] : Resolved$1['relation']['referencedRelation'] extends CurrentTableOrView ? Resolved$1['relation']['match'] extends 'col' ? IsRelationNullable[CurrentTableOrView], Resolved$1['relation']> extends true ? ProcessedChildren | null : ProcessedChildren : ProcessedChildren[] : IsRelationNullable[CurrentTableOrView], Resolved$1['relation']> extends true ? Field$1 extends { innerJoin: true; } ? ProcessedChildren : ProcessedChildren | null : ProcessedChildren } : { [K in GetFieldNodeResultName]: SelectQueryError<'Failed to process embedded resource nodes.'> & string }; /** * Processes a SpreadNode by processing its target node. * * @param Schema - Database schema. * @param Row - The type of a row in the current table. * @param RelationName - The name of the current table or view. * @param Relationships - Relationships of the current table. * @param Spread - The SpreadNode to process. */ type ProcessSpreadNode, RelationName extends string, Relationships extends GenericRelationship[], Spread extends Ast.SpreadNode> = ProcessNode extends infer Result ? Result extends SelectQueryError ? SelectQueryError : ExtractFirstProperty extends unknown[] ? SpreadOnManyEnabled extends true ? ProcessManyToManySpreadNodeResult : { [K in Spread['target']['name']]: SelectQueryError<`"${RelationName}" and "${Spread['target']['name']}" do not form a many-to-one or one-to-one relationship spread not possible`> } : ProcessSpreadNodeResult : never; /** * Helper type to process the result of a many-to-many spread node. * Converts all fields in the spread object into arrays. */ type ProcessManyToManySpreadNodeResult = Result$1 extends Record | null> ? Result$1 : ExtractFirstProperty extends infer SpreadedObject ? SpreadedObject extends Array> ? { [K in keyof SpreadedObject[number]]: Array } : SelectQueryError<'An error occurred spreading the many-to-many object'> : SelectQueryError<'An error occurred spreading the many-to-many object'>; /** * Helper type to process the result of a spread node. */ type ProcessSpreadNodeResult = Result$1 extends Record | null> ? Result$1 : ExtractFirstProperty extends infer SpreadedObject ? ContainsNull extends true ? Exclude<{ [K in keyof SpreadedObject]: SpreadedObject[K] | null }, null> : Exclude<{ [K in keyof SpreadedObject]: SpreadedObject[K] }, null> : SelectQueryError<'An error occurred spreading the object'>; //#endregion //#region src/PostgrestTransformBuilder.d.ts declare class PostgrestTransformBuilder, Result$1, RelationName = unknown, Relationships = unknown, Method = unknown> extends PostgrestBuilder { /** * Perform a SELECT on the query result. * * By default, `.insert()`, `.update()`, `.upsert()`, and `.delete()` do not * return modified rows. By calling this method, modified rows are returned in * `data`. * * @param columns - The columns to retrieve, separated by commas * * @category Database * * @example With `upsert()` * ```ts * const { data, error } = await supabase * .from('characters') * .upsert({ id: 1, name: 'Han Solo' }) * .select() * ``` * * @exampleSql With `upsert()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Han'); * ``` * * @exampleResponse With `upsert()` * ```json * { * "data": [ * { * "id": 1, * "name": "Han Solo" * } * ], * "status": 201, * "statusText": "Created" * } * ``` */ select>(columns?: Query): PostgrestFilterBuilder; order(column: ColumnName, options?: { ascending?: boolean; nullsFirst?: boolean; referencedTable?: undefined; }): this; order(column: string, options?: { ascending?: boolean; nullsFirst?: boolean; referencedTable?: string; }): this; /** * @deprecated Use `options.referencedTable` instead of `options.foreignTable` */ order(column: ColumnName, options?: { ascending?: boolean; nullsFirst?: boolean; foreignTable?: undefined; }): this; /** * @deprecated Use `options.referencedTable` instead of `options.foreignTable` */ order(column: string, options?: { ascending?: boolean; nullsFirst?: boolean; foreignTable?: string; }): this; /** * Limit the query result by `count`. * * @param count - The maximum number of rows to return * @param options - Named parameters * @param options.referencedTable - Set this to limit rows of referenced * tables instead of the parent table * @param options.foreignTable - Deprecated, use `options.referencedTable` * instead * * @category Database * * @example With `select()` * ```ts * const { data, error } = await supabase * .from('characters') * .select('name') * .limit(1) * ``` * * @exampleSql With `select()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse With `select()` * ```json * { * "data": [ * { * "name": "Luke" * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @example On a referenced table * ```ts * const { data, error } = await supabase * .from('orchestral_sections') * .select(` * name, * instruments ( * name * ) * `) * .limit(1, { referencedTable: 'instruments' }) * ``` * * @exampleSql On a referenced table * ```sql * create table * orchestral_sections (id int8 primary key, name text); * create table * instruments ( * id int8 primary key, * section_id int8 not null references orchestral_sections, * name text * ); * * insert into * orchestral_sections (id, name) * values * (1, 'strings'); * insert into * instruments (id, section_id, name) * values * (1, 1, 'harp'), * (2, 1, 'violin'); * ``` * * @exampleResponse On a referenced table * ```json * { * "data": [ * { * "name": "strings", * "instruments": [ * { * "name": "violin" * } * ] * } * ], * "status": 200, * "statusText": "OK" * } * ``` */ limit(count: number, { foreignTable, referencedTable }?: { foreignTable?: string; referencedTable?: string; }): this; /** * Limit the query result by starting at an offset `from` and ending at the offset `to`. * Only records within this range are returned. * This respects the query order and if there is no order clause the range could behave unexpectedly. * The `from` and `to` values are 0-based and inclusive: `range(1, 3)` will include the second, third * and fourth rows of the query. * * @param from - The starting index from which to limit the result * @param to - The last index to which to limit the result * @param options - Named parameters * @param options.referencedTable - Set this to limit rows of referenced * tables instead of the parent table * @param options.foreignTable - Deprecated, use `options.referencedTable` * instead * * @category Database * * @example With `select()` * ```ts * const { data, error } = await supabase * .from('characters') * .select('name') * .range(0, 1) * ``` * * @exampleSql With `select()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse With `select()` * ```json * { * "data": [ * { * "name": "Luke" * }, * { * "name": "Leia" * } * ], * "status": 200, * "statusText": "OK" * } * ``` */ range(from: number, to: number, { foreignTable, referencedTable }?: { foreignTable?: string; referencedTable?: string; }): this; /** * Set the AbortSignal for the fetch request. * * @param signal - The AbortSignal to use for the fetch request * * @category Database * * @remarks * You can use this to set a timeout for the request. * * @exampleDescription Aborting requests in-flight * You can use an [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) to abort requests. * Note that `status` and `statusText` don't mean anything for aborted requests as the request wasn't fulfilled. * * @example Aborting requests in-flight * ```ts * const ac = new AbortController() * * const { data, error } = await supabase * .from('very_big_table') * .select() * .abortSignal(ac.signal) * * // Abort the request after 100 ms * setTimeout(() => ac.abort(), 100) * ``` * * @exampleResponse Aborting requests in-flight * ```json * { * "error": { * "message": "AbortError: The user aborted a request.", * "details": "", * "hint": "The request was aborted locally via the provided AbortSignal.", * "code": "" * }, * "status": 0, * "statusText": "" * } * * ``` * * @example Set a timeout * ```ts * const { data, error } = await supabase * .from('very_big_table') * .select() * .abortSignal(AbortSignal.timeout(1000 /* ms *\/)) * ``` * * @exampleResponse Set a timeout * ```json * { * "error": { * "message": "FetchError: The user aborted a request.", * "details": "", * "hint": "", * "code": "" * }, * "status": 400, * "statusText": "Bad Request" * } * * ``` */ abortSignal(signal: AbortSignal): this; /** * Return `data` as a single object instead of an array of objects. * * Query result must be one row (e.g. using `.limit(1)`), otherwise this * returns an error. * * @category Database * * @example With `select()` * ```ts * const { data, error } = await supabase * .from('characters') * .select('name') * .limit(1) * .single() * ``` * * @exampleSql With `select()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse With `select()` * ```json * { * "data": { * "name": "Luke" * }, * "status": 200, * "statusText": "OK" * } * ``` */ single(): PostgrestBuilder; /** * Return `data` as a single object instead of an array of objects. * * Query result must be zero or one row (e.g. using `.limit(1)`), otherwise * this returns an error. * * @category Database * * @example With `select()` * ```ts * const { data, error } = await supabase * .from('characters') * .select() * .eq('name', 'Katniss') * .maybeSingle() * ``` * * @exampleSql With `select()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse With `select()` * ```json * { * "status": 200, * "statusText": "OK" * } * ``` */ maybeSingle(): PostgrestBuilder; /** * Return `data` as a string in CSV format. * * @category Database * * @exampleDescription Return data as CSV * By default, the data is returned in JSON format, but can also be returned as Comma Separated Values. * * @example Return data as CSV * ```ts * const { data, error } = await supabase * .from('characters') * .select() * .csv() * ``` * * @exampleSql Return data as CSV * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse Return data as CSV * ```json * { * "data": "id,name\n1,Luke\n2,Leia\n3,Han", * "status": 200, * "statusText": "OK" * } * ``` */ csv(): PostgrestBuilder; /** * Return `data` as an object in [GeoJSON](https://geojson.org) format. * * @category Database */ geojson(): PostgrestBuilder>; /** * Return `data` as the EXPLAIN plan for the query. * * You need to enable the * [db_plan_enabled](https://supabase.com/docs/guides/database/debugging-performance#enabling-explain) * setting before using this method. * * @param options - Named parameters * * @param options.analyze - If `true`, the query will be executed and the * actual run time will be returned * * @param options.verbose - If `true`, the query identifier will be returned * and `data` will include the output columns of the query * * @param options.settings - If `true`, include information on configuration * parameters that affect query planning * * @param options.buffers - If `true`, include information on buffer usage * * @param options.wal - If `true`, include information on WAL record generation * * @param options.format - The format of the output, can be `"text"` (default) * or `"json"` * * @category Database * * @exampleDescription Get the execution plan * By default, the data is returned in TEXT format, but can also be returned as JSON by using the `format` parameter. * * @example Get the execution plan * ```ts * const { data, error } = await supabase * .from('characters') * .select() * .explain() * ``` * * @exampleSql Get the execution plan * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse Get the execution plan * ```js * Aggregate (cost=33.34..33.36 rows=1 width=112) * -> Limit (cost=0.00..18.33 rows=1000 width=40) * -> Seq Scan on characters (cost=0.00..22.00 rows=1200 width=40) * ``` * * @exampleDescription Get the execution plan with analyze and verbose * By default, the data is returned in TEXT format, but can also be returned as JSON by using the `format` parameter. * * @example Get the execution plan with analyze and verbose * ```ts * const { data, error } = await supabase * .from('characters') * .select() * .explain({analyze:true,verbose:true}) * ``` * * @exampleSql Get the execution plan with analyze and verbose * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse Get the execution plan with analyze and verbose * ```js * Aggregate (cost=33.34..33.36 rows=1 width=112) (actual time=0.041..0.041 rows=1 loops=1) * Output: NULL::bigint, count(ROW(characters.id, characters.name)), COALESCE(json_agg(ROW(characters.id, characters.name)), '[]'::json), NULLIF(current_setting('response.headers'::text, true), ''::text), NULLIF(current_setting('response.status'::text, true), ''::text) * -> Limit (cost=0.00..18.33 rows=1000 width=40) (actual time=0.005..0.006 rows=3 loops=1) * Output: characters.id, characters.name * -> Seq Scan on public.characters (cost=0.00..22.00 rows=1200 width=40) (actual time=0.004..0.005 rows=3 loops=1) * Output: characters.id, characters.name * Query Identifier: -4730654291623321173 * Planning Time: 0.407 ms * Execution Time: 0.119 ms * ``` */ explain({ analyze, verbose, settings, buffers, wal, format }?: { analyze?: boolean; verbose?: boolean; settings?: boolean; buffers?: boolean; wal?: boolean; format?: 'json' | 'text'; }): PostgrestBuilder[], false> | PostgrestBuilder; /** * Rollback the query. * * `data` will still be returned, but the query is not committed. * * @category Database */ rollback(): this; /** * Override the type of the returned `data`. * * @typeParam NewResult - The new result type to override with * @deprecated Use overrideTypes() method at the end of your call chain instead * * @category Database * * @remarks * - Deprecated: use overrideTypes method instead * * @example Override type of successful response * ```ts * const { data } = await supabase * .from('countries') * .select() * .returns>() * ``` * * @exampleResponse Override type of successful response * ```js * let x: typeof data // MyType[] * ``` * * @example Override type of object response * ```ts * const { data } = await supabase * .from('countries') * .select() * .maybeSingle() * .returns() * ``` * * @exampleResponse Override type of object response * ```js * let x: typeof data // MyType | null * ``` */ returns(): PostgrestTransformBuilder, RelationName, Relationships, Method>; /** * Set the maximum number of rows that can be affected by the query. * Only available in PostgREST v13+ and only works with PATCH and DELETE methods. * * @param value - The maximum number of rows that can be affected * * @category Database */ maxAffected(value: number): MaxAffectedEnabled extends true ? Method extends 'PATCH' | 'DELETE' | 'RPC' ? this : InvalidMethodError<'maxAffected method only available on update or delete'> : InvalidMethodError<'maxAffected method only available on postgrest 13+'>; } //#endregion //#region src/PostgrestFilterBuilder.d.ts type FilterOperator = 'eq' | 'neq' | 'gt' | 'gte' | 'lt' | 'lte' | 'like' | 'ilike' | 'is' | 'isdistinct' | 'in' | 'cs' | 'cd' | 'sl' | 'sr' | 'nxl' | 'nxr' | 'adj' | 'ov' | 'fts' | 'plfts' | 'phfts' | 'wfts' | 'match' | 'imatch'; type IsStringOperator = Path extends `${string}->>${string}` ? true : false; type ResolveFilterValue, ColumnName extends string> = ColumnName extends `${infer RelationshipTable}.${infer Remainder}` ? Remainder extends `${infer _}.${infer _}` ? ResolveFilterValue : ResolveFilterRelationshipValue : ColumnName extends keyof Row ? Row[ColumnName] : IsStringOperator extends true ? string : JsonPathToType> extends infer JsonPathValue ? JsonPathValue extends never ? never : JsonPathValue : never; type ResolveFilterRelationshipValue = Schema['Tables'] & Schema['Views'] extends infer TablesAndViews ? RelationshipTable$1 extends keyof TablesAndViews ? 'Row' extends keyof TablesAndViews[RelationshipTable$1] ? RelationshipColumn extends keyof TablesAndViews[RelationshipTable$1]['Row'] ? TablesAndViews[RelationshipTable$1]['Row'][RelationshipColumn] : unknown : unknown : unknown : never; type InvalidMethodError = { Error: S; }; declare class PostgrestFilterBuilder, Result$1, RelationName = unknown, Relationships = unknown, Method = unknown> extends PostgrestTransformBuilder { /** * Match only rows where `column` is equal to `value`. * * To check if the value of `column` is NULL, you should use `.is()` instead. * * @param column - The column to filter on * @param value - The value to filter with * * @category Database * * @example With `select()` * ```ts * const { data, error } = await supabase * .from('characters') * .select() * .eq('name', 'Leia') * ``` * * @exampleSql With `select()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse With `select()` * ```json * { * "data": [ * { * "id": 2, * "name": "Leia" * } * ], * "status": 200, * "statusText": "OK" * } * ``` */ eq(column: ColumnName extends keyof Row ? ColumnName : ColumnName extends `${string}.${string}` | `${string}->${string}` ? ColumnName : string extends ColumnName ? string : keyof Row, value: ResolveFilterValue extends never ? NonNullable : ResolveFilterValue extends infer ResolvedFilterValue ? NonNullable : never): this; /** * Match only rows where `column` is not equal to `value`. * * @param column - The column to filter on * @param value - The value to filter with * * @category Database * * @example With `select()` * ```ts * const { data, error } = await supabase * .from('characters') * .select() * .neq('name', 'Leia') * ``` * * @exampleSql With `select()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse With `select()` * ```json * { * "data": [ * { * "id": 1, * "name": "Luke" * }, * { * "id": 3, * "name": "Han" * } * ], * "status": 200, * "statusText": "OK" * } * ``` */ neq(column: ColumnName extends keyof Row ? ColumnName : ColumnName extends `${string}.${string}` | `${string}->${string}` ? ColumnName : string extends ColumnName ? string : keyof Row, value: ResolveFilterValue extends never ? unknown : ResolveFilterValue extends infer Resolved ? Resolved : never): this; gt(column: ColumnName, value: Row[ColumnName]): this; gt(column: string, value: unknown): this; gte(column: ColumnName, value: Row[ColumnName]): this; gte(column: string, value: unknown): this; lt(column: ColumnName, value: Row[ColumnName]): this; lt(column: string, value: unknown): this; lte(column: ColumnName, value: Row[ColumnName]): this; lte(column: string, value: unknown): this; like(column: ColumnName, pattern: string): this; like(column: string, pattern: string): this; likeAllOf(column: ColumnName, patterns: readonly string[]): this; likeAllOf(column: string, patterns: readonly string[]): this; likeAnyOf(column: ColumnName, patterns: readonly string[]): this; likeAnyOf(column: string, patterns: readonly string[]): this; ilike(column: ColumnName, pattern: string): this; ilike(column: string, pattern: string): this; ilikeAllOf(column: ColumnName, patterns: readonly string[]): this; ilikeAllOf(column: string, patterns: readonly string[]): this; ilikeAnyOf(column: ColumnName, patterns: readonly string[]): this; ilikeAnyOf(column: string, patterns: readonly string[]): this; regexMatch(column: ColumnName, pattern: string): this; regexMatch(column: string, pattern: string): this; regexIMatch(column: ColumnName, pattern: string): this; regexIMatch(column: string, pattern: string): this; is(column: ColumnName, value: Row[ColumnName] & (boolean | null)): this; is(column: string, value: boolean | null): this; /** * Match only rows where `column` IS DISTINCT FROM `value`. * * Unlike `.neq()`, this treats `NULL` as a comparable value. Two `NULL` values * are considered equal (not distinct), and comparing `NULL` with any non-NULL * value returns true (distinct). * * @param column - The column to filter on * @param value - The value to filter with */ isDistinct(column: ColumnName, value: ResolveFilterValue extends never ? unknown : ResolveFilterValue extends infer ResolvedFilterValue ? ResolvedFilterValue : never): this; /** * Match only rows where `column` is included in the `values` array. * * @param column - The column to filter on * @param values - The values array to filter with * * @category Database * * @example With `select()` * ```ts * const { data, error } = await supabase * .from('characters') * .select() * .in('name', ['Leia', 'Han']) * ``` * * @exampleSql With `select()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse With `select()` * ```json * { * "data": [ * { * "id": 2, * "name": "Leia" * }, * { * "id": 3, * "name": "Han" * } * ], * "status": 200, * "statusText": "OK" * } * ``` */ in(column: ColumnName, values: ReadonlyArray extends never ? unknown : ResolveFilterValue extends infer ResolvedFilterValue ? ResolvedFilterValue : never>): this; /** * Match only rows where `column` is NOT included in the `values` array. * * @param column - The column to filter on * @param values - The values array to filter with */ notIn(column: ColumnName, values: ReadonlyArray extends never ? unknown : ResolveFilterValue extends infer ResolvedFilterValue ? ResolvedFilterValue : never>): this; contains(column: ColumnName, value: string | ReadonlyArray | Record): this; contains(column: string, value: string | readonly unknown[] | Record): this; containedBy(column: ColumnName, value: string | ReadonlyArray | Record): this; containedBy(column: string, value: string | readonly unknown[] | Record): this; rangeGt(column: ColumnName, range: string): this; rangeGt(column: string, range: string): this; rangeGte(column: ColumnName, range: string): this; rangeGte(column: string, range: string): this; rangeLt(column: ColumnName, range: string): this; rangeLt(column: string, range: string): this; rangeLte(column: ColumnName, range: string): this; rangeLte(column: string, range: string): this; rangeAdjacent(column: ColumnName, range: string): this; rangeAdjacent(column: string, range: string): this; overlaps(column: ColumnName, value: string | ReadonlyArray): this; overlaps(column: string, value: string | readonly unknown[]): this; textSearch(column: ColumnName, query: string, options?: { config?: string; type?: 'plain' | 'phrase' | 'websearch'; }): this; textSearch(column: string, query: string, options?: { config?: string; type?: 'plain' | 'phrase' | 'websearch'; }): this; match(query: Record): this; match(query: Record): this; not(column: ColumnName, operator: FilterOperator, value: Row[ColumnName]): this; not(column: string, operator: string, value: unknown): this; /** * Match only rows which satisfy at least one of the filters. * * Unlike most filters, `filters` is used as-is and needs to follow [PostgREST * syntax](https://postgrest.org/en/stable/api.html#operators). You also need * to make sure it's properly sanitized. * * It's currently not possible to do an `.or()` filter across multiple tables. * * @param filters - The filters to use, following PostgREST syntax * @param options - Named parameters * @param options.referencedTable - Set this to filter on referenced tables * instead of the parent table * @param options.foreignTable - Deprecated, use `referencedTable` instead * * @category Database * * @remarks * or() expects you to use the raw PostgREST syntax for the filter names and values. * * ```ts * .or('id.in.(5,6,7), arraycol.cs.{"a","b"}') // Use `()` for `in` filter, `{}` for array values and `cs` for `contains()`. * .or('id.in.(5,6,7), arraycol.cd.{"a","b"}') // Use `cd` for `containedBy()` * ``` * * @example With `select()` * ```ts * const { data, error } = await supabase * .from('characters') * .select('name') * .or('id.eq.2,name.eq.Han') * ``` * * @exampleSql With `select()` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse With `select()` * ```json * { * "data": [ * { * "name": "Leia" * }, * { * "name": "Han" * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @example Use `or` with `and` * ```ts * const { data, error } = await supabase * .from('characters') * .select('name') * .or('id.gt.3,and(id.eq.1,name.eq.Luke)') * ``` * * @exampleSql Use `or` with `and` * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse Use `or` with `and` * ```json * { * "data": [ * { * "name": "Luke" * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @example Use `or` on referenced tables * ```ts * const { data, error } = await supabase * .from('orchestral_sections') * .select(` * name, * instruments!inner ( * name * ) * `) * .or('section_id.eq.1,name.eq.guzheng', { referencedTable: 'instruments' }) * ``` * * @exampleSql Use `or` on referenced tables * ```sql * create table * orchestral_sections (id int8 primary key, name text); * create table * instruments ( * id int8 primary key, * section_id int8 not null references orchestral_sections, * name text * ); * * insert into * orchestral_sections (id, name) * values * (1, 'strings'), * (2, 'woodwinds'); * insert into * instruments (id, section_id, name) * values * (1, 2, 'flute'), * (2, 1, 'violin'); * ``` * * @exampleResponse Use `or` on referenced tables * ```json * { * "data": [ * { * "name": "strings", * "instruments": [ * { * "name": "violin" * } * ] * } * ], * "status": 200, * "statusText": "OK" * } * ``` */ or(filters: string, { foreignTable, referencedTable }?: { foreignTable?: string; referencedTable?: string; }): this; filter(column: ColumnName, operator: `${'' | 'not.'}${FilterOperator}`, value: unknown): this; filter(column: string, operator: string, value: unknown): this; } //#endregion //#region src/PostgrestQueryBuilder.d.ts declare class PostgrestQueryBuilder { url: URL; headers: Headers; schema?: string; signal?: AbortSignal; fetch?: Fetch; urlLengthLimit: number; /** * Creates a query builder scoped to a Postgres table or view. * * @category Database * * @example Creating a Postgrest query builder * ```ts * import { PostgrestQueryBuilder } from '@supabase/postgrest-js' * * const query = new PostgrestQueryBuilder( * new URL('https://xyzcompany.supabase.co/rest/v1/users'), * { headers: { apikey: 'public-anon-key' } } * ) * ``` */ constructor(url: URL, { headers, schema, fetch, urlLengthLimit }: { headers?: HeadersInit; schema?: string; fetch?: Fetch; urlLengthLimit?: number; }); /** * Clone URL and headers to prevent shared state between operations. */ private cloneRequestState; /** * Perform a SELECT query on the table or view. * * @param columns - The columns to retrieve, separated by commas. Columns can be renamed when returned with `customName:columnName` * * @param options - Named parameters * * @param options.head - When set to `true`, `data` will not be returned. * Useful if you only need the count. * * @param options.count - Count algorithm to use to count rows in the table or view. * * `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the * hood. * * `"planned"`: Approximated but fast count algorithm. Uses the Postgres * statistics under the hood. * * `"estimated"`: Uses exact count for low numbers and planned count for high * numbers. * * @remarks * When using `count` with `.range()` or `.limit()`, the returned `count` is the total number of rows * that match your filters, not the number of rows in the current page. Use this to build pagination UI. * - By default, Supabase projects return a maximum of 1,000 rows. This setting can be changed in your project's [API settings](/dashboard/project/_/settings/api). It's recommended that you keep it low to limit the payload size of accidental or malicious requests. You can use `range()` queries to paginate through your data. * - `select()` can be combined with [Filters](/docs/reference/javascript/using-filters) * - `select()` can be combined with [Modifiers](/docs/reference/javascript/using-modifiers) * - `apikey` is a reserved keyword if you're using the [Supabase Platform](/docs/guides/platform) and [should be avoided as a column name](https://github.com/supabase/supabase/issues/5465). * * @category Database * * @example Getting your data * ```js * const { data, error } = await supabase * .from('characters') * .select() * ``` * * @exampleSql Getting your data * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Harry'), * (2, 'Frodo'), * (3, 'Katniss'); * ``` * * @exampleResponse Getting your data * ```json * { * "data": [ * { * "id": 1, * "name": "Harry" * }, * { * "id": 2, * "name": "Frodo" * }, * { * "id": 3, * "name": "Katniss" * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @example Selecting specific columns * ```js * const { data, error } = await supabase * .from('characters') * .select('name') * ``` * * @exampleSql Selecting specific columns * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Frodo'), * (2, 'Harry'), * (3, 'Katniss'); * ``` * * @exampleResponse Selecting specific columns * ```json * { * "data": [ * { * "name": "Frodo" * }, * { * "name": "Harry" * }, * { * "name": "Katniss" * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Query referenced tables * If your database has foreign key relationships, you can query related tables too. * * @example Query referenced tables * ```js * const { data, error } = await supabase * .from('orchestral_sections') * .select(` * name, * instruments ( * name * ) * `) * ``` * * @exampleSql Query referenced tables * ```sql * create table * orchestral_sections (id int8 primary key, name text); * create table * instruments ( * id int8 primary key, * section_id int8 not null references orchestral_sections, * name text * ); * * insert into * orchestral_sections (id, name) * values * (1, 'strings'), * (2, 'woodwinds'); * insert into * instruments (id, section_id, name) * values * (1, 2, 'flute'), * (2, 1, 'violin'); * ``` * * @exampleResponse Query referenced tables * ```json * { * "data": [ * { * "name": "strings", * "instruments": [ * { * "name": "violin" * } * ] * }, * { * "name": "woodwinds", * "instruments": [ * { * "name": "flute" * } * ] * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Query referenced tables with spaces in their names * If your table name contains spaces, you must use double quotes in the `select` statement to reference the table. * * @example Query referenced tables with spaces in their names * ```js * const { data, error } = await supabase * .from('orchestral sections') * .select(` * name, * "musical instruments" ( * name * ) * `) * ``` * * @exampleSql Query referenced tables with spaces in their names * ```sql * create table * "orchestral sections" (id int8 primary key, name text); * create table * "musical instruments" ( * id int8 primary key, * section_id int8 not null references "orchestral sections", * name text * ); * * insert into * "orchestral sections" (id, name) * values * (1, 'strings'), * (2, 'woodwinds'); * insert into * "musical instruments" (id, section_id, name) * values * (1, 2, 'flute'), * (2, 1, 'violin'); * ``` * * @exampleResponse Query referenced tables with spaces in their names * ```json * { * "data": [ * { * "name": "strings", * "musical instruments": [ * { * "name": "violin" * } * ] * }, * { * "name": "woodwinds", * "musical instruments": [ * { * "name": "flute" * } * ] * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Query referenced tables through a join table * If you're in a situation where your tables are **NOT** directly * related, but instead are joined by a _join table_, you can still use * the `select()` method to query the related data. The join table needs * to have the foreign keys as part of its composite primary key. * * @example Query referenced tables through a join table * ```ts * const { data, error } = await supabase * .from('users') * .select(` * name, * teams ( * name * ) * `) * * ``` * * @exampleSql Query referenced tables through a join table * ```sql * create table * users ( * id int8 primary key, * name text * ); * create table * teams ( * id int8 primary key, * name text * ); * -- join table * create table * users_teams ( * user_id int8 not null references users, * team_id int8 not null references teams, * -- both foreign keys must be part of a composite primary key * primary key (user_id, team_id) * ); * * insert into * users (id, name) * values * (1, 'Kiran'), * (2, 'Evan'); * insert into * teams (id, name) * values * (1, 'Green'), * (2, 'Blue'); * insert into * users_teams (user_id, team_id) * values * (1, 1), * (1, 2), * (2, 2); * ``` * * @exampleResponse Query referenced tables through a join table * ```json * { * "data": [ * { * "name": "Kiran", * "teams": [ * { * "name": "Green" * }, * { * "name": "Blue" * } * ] * }, * { * "name": "Evan", * "teams": [ * { * "name": "Blue" * } * ] * } * ], * "status": 200, * "statusText": "OK" * } * * ``` * * @exampleDescription Query the same referenced table multiple times * If you need to query the same referenced table twice, use the name of the * joined column to identify which join to use. You can also give each * column an alias. * * @example Query the same referenced table multiple times * ```ts * const { data, error } = await supabase * .from('messages') * .select(` * content, * from:sender_id(name), * to:receiver_id(name) * `) * * // To infer types, use the name of the table (in this case `users`) and * // the name of the foreign key constraint. * const { data, error } = await supabase * .from('messages') * .select(` * content, * from:users!messages_sender_id_fkey(name), * to:users!messages_receiver_id_fkey(name) * `) * ``` * * @exampleSql Query the same referenced table multiple times * ```sql * create table * users (id int8 primary key, name text); * * create table * messages ( * sender_id int8 not null references users, * receiver_id int8 not null references users, * content text * ); * * insert into * users (id, name) * values * (1, 'Kiran'), * (2, 'Evan'); * * insert into * messages (sender_id, receiver_id, content) * values * (1, 2, '👋'); * ``` * ``` * * @exampleResponse Query the same referenced table multiple times * ```json * { * "data": [ * { * "content": "👋", * "from": { * "name": "Kiran" * }, * "to": { * "name": "Evan" * } * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Query nested foreign tables through a join table * You can use the result of a joined table to gather data in * another foreign table. With multiple references to the same foreign * table you must specify the column on which to conduct the join. * * @example Query nested foreign tables through a join table * ```ts * const { data, error } = await supabase * .from('games') * .select(` * game_id:id, * away_team:teams!games_away_team_fkey ( * users ( * id, * name * ) * ) * `) * * ``` * * @exampleSql Query nested foreign tables through a join table * ```sql * ```sql * create table * users ( * id int8 primary key, * name text * ); * create table * teams ( * id int8 primary key, * name text * ); * -- join table * create table * users_teams ( * user_id int8 not null references users, * team_id int8 not null references teams, * * primary key (user_id, team_id) * ); * create table * games ( * id int8 primary key, * home_team int8 not null references teams, * away_team int8 not null references teams, * name text * ); * * insert into users (id, name) * values * (1, 'Kiran'), * (2, 'Evan'); * insert into * teams (id, name) * values * (1, 'Green'), * (2, 'Blue'); * insert into * users_teams (user_id, team_id) * values * (1, 1), * (1, 2), * (2, 2); * insert into * games (id, home_team, away_team, name) * values * (1, 1, 2, 'Green vs Blue'), * (2, 2, 1, 'Blue vs Green'); * ``` * * @exampleResponse Query nested foreign tables through a join table * ```json * { * "data": [ * { * "game_id": 1, * "away_team": { * "users": [ * { * "id": 1, * "name": "Kiran" * }, * { * "id": 2, * "name": "Evan" * } * ] * } * }, * { * "game_id": 2, * "away_team": { * "users": [ * { * "id": 1, * "name": "Kiran" * } * ] * } * } * ], * "status": 200, * "statusText": "OK" * } * * ``` * * @exampleDescription Filtering through referenced tables * If the filter on a referenced table's column is not satisfied, the referenced * table returns `[]` or `null` but the parent table is not filtered out. * If you want to filter out the parent table rows, use the `!inner` hint * * @example Filtering through referenced tables * ```ts * const { data, error } = await supabase * .from('instruments') * .select('name, orchestral_sections(*)') * .eq('orchestral_sections.name', 'percussion') * ``` * * @exampleSql Filtering through referenced tables * ```sql * create table * orchestral_sections (id int8 primary key, name text); * create table * instruments ( * id int8 primary key, * section_id int8 not null references orchestral_sections, * name text * ); * * insert into * orchestral_sections (id, name) * values * (1, 'strings'), * (2, 'woodwinds'); * insert into * instruments (id, section_id, name) * values * (1, 2, 'flute'), * (2, 1, 'violin'); * ``` * * @exampleResponse Filtering through referenced tables * ```json * { * "data": [ * { * "name": "flute", * "orchestral_sections": null * }, * { * "name": "violin", * "orchestral_sections": null * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Querying referenced table with count * You can get the number of rows in a related table by using the * **count** property. * * @example Querying referenced table with count * ```ts * const { data, error } = await supabase * .from('orchestral_sections') * .select(`*, instruments(count)`) * ``` * * @exampleSql Querying referenced table with count * ```sql * create table orchestral_sections ( * "id" "uuid" primary key default "extensions"."uuid_generate_v4"() not null, * "name" text * ); * * create table characters ( * "id" "uuid" primary key default "extensions"."uuid_generate_v4"() not null, * "name" text, * "section_id" "uuid" references public.orchestral_sections on delete cascade * ); * * with section as ( * insert into orchestral_sections (name) * values ('strings') returning id * ) * insert into instruments (name, section_id) values * ('violin', (select id from section)), * ('viola', (select id from section)), * ('cello', (select id from section)), * ('double bass', (select id from section)); * ``` * * @exampleResponse Querying referenced table with count * ```json * [ * { * "id": "693694e7-d993-4360-a6d7-6294e325d9b6", * "name": "strings", * "instruments": [ * { * "count": 4 * } * ] * } * ] * ``` * * @exampleDescription Querying with count option * You can get the number of rows by using the * [count](/docs/reference/javascript/select#parameters) option. * * @example Querying with count option * ```ts * const { count, error } = await supabase * .from('characters') * .select('*', { count: 'exact', head: true }) * ``` * * @exampleSql Querying with count option * ```sql * create table * characters (id int8 primary key, name text); * * insert into * characters (id, name) * values * (1, 'Luke'), * (2, 'Leia'), * (3, 'Han'); * ``` * * @exampleResponse Querying with count option * ```json * { * "count": 3, * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Querying JSON data * You can select and filter data inside of * [JSON](/docs/guides/database/json) columns. Postgres offers some * [operators](/docs/guides/database/json#query-the-jsonb-data) for * querying JSON data. * * @example Querying JSON data * ```ts * const { data, error } = await supabase * .from('users') * .select(` * id, name, * address->city * `) * ``` * * @exampleSql Querying JSON data * ```sql * create table * users ( * id int8 primary key, * name text, * address jsonb * ); * * insert into * users (id, name, address) * values * (1, 'Frodo', '{"city":"Hobbiton"}'); * ``` * * @exampleResponse Querying JSON data * ```json * { * "data": [ * { * "id": 1, * "name": "Frodo", * "city": "Hobbiton" * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Querying referenced table with inner join * If you don't want to return the referenced table contents, you can leave the parenthesis empty. * Like `.select('name, orchestral_sections!inner()')`. * * @example Querying referenced table with inner join * ```ts * const { data, error } = await supabase * .from('instruments') * .select('name, orchestral_sections!inner(name)') * .eq('orchestral_sections.name', 'woodwinds') * .limit(1) * ``` * * @exampleSql Querying referenced table with inner join * ```sql * create table orchestral_sections ( * "id" "uuid" primary key default "extensions"."uuid_generate_v4"() not null, * "name" text * ); * * create table instruments ( * "id" "uuid" primary key default "extensions"."uuid_generate_v4"() not null, * "name" text, * "section_id" "uuid" references public.orchestral_sections on delete cascade * ); * * with section as ( * insert into orchestral_sections (name) * values ('woodwinds') returning id * ) * insert into instruments (name, section_id) values * ('flute', (select id from section)), * ('clarinet', (select id from section)), * ('bassoon', (select id from section)), * ('piccolo', (select id from section)); * ``` * * @exampleResponse Querying referenced table with inner join * ```json * { * "data": [ * { * "name": "flute", * "orchestral_sections": {"name": "woodwinds"} * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Switching schemas per query * In addition to setting the schema during initialization, you can also switch schemas on a per-query basis. * Make sure you've set up your [database privileges and API settings](/docs/guides/api/using-custom-schemas). * * @example Switching schemas per query * ```ts * const { data, error } = await supabase * .schema('myschema') * .from('mytable') * .select() * ``` * * @exampleSql Switching schemas per query * ```sql * create schema myschema; * * create table myschema.mytable ( * id uuid primary key default gen_random_uuid(), * data text * ); * * insert into myschema.mytable (data) values ('mydata'); * ``` * * @exampleResponse Switching schemas per query * ```json * { * "data": [ * { * "id": "4162e008-27b0-4c0f-82dc-ccaeee9a624d", * "data": "mydata" * } * ], * "status": 200, * "statusText": "OK" * } * ``` */ select>(columns?: Query, options?: { head?: boolean; count?: 'exact' | 'planned' | 'estimated'; }): PostgrestFilterBuilder; insert(values: Row, options?: { count?: 'exact' | 'planned' | 'estimated'; }): PostgrestFilterBuilder; insert(values: Row[], options?: { count?: 'exact' | 'planned' | 'estimated'; defaultToNull?: boolean; }): PostgrestFilterBuilder; upsert(values: Row, options?: { onConflict?: string; ignoreDuplicates?: boolean; count?: 'exact' | 'planned' | 'estimated'; }): PostgrestFilterBuilder; upsert(values: Row[], options?: { onConflict?: string; ignoreDuplicates?: boolean; count?: 'exact' | 'planned' | 'estimated'; defaultToNull?: boolean; }): PostgrestFilterBuilder; /** * Perform an UPDATE on the table or view. * * By default, updated rows are not returned. To return it, chain the call * with `.select()` after filters. * * @param values - The values to update with * * @param options - Named parameters * * @param options.count - Count algorithm to use to count updated rows. * * `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the * hood. * * `"planned"`: Approximated but fast count algorithm. Uses the Postgres * statistics under the hood. * * `"estimated"`: Uses exact count for low numbers and planned count for high * numbers. * * @category Database * * @remarks * - `update()` should always be combined with [Filters](/docs/reference/javascript/using-filters) to target the item(s) you wish to update. * * @example Updating your data * ```ts * const { error } = await supabase * .from('instruments') * .update({ name: 'piano' }) * .eq('id', 1) * ``` * * @exampleSql Updating your data * ```sql * create table * instruments (id int8 primary key, name text); * * insert into * instruments (id, name) * values * (1, 'harpsichord'); * ``` * * @exampleResponse Updating your data * ```json * { * "status": 204, * "statusText": "No Content" * } * ``` * * @example Update a record and return it * ```ts * const { data, error } = await supabase * .from('instruments') * .update({ name: 'piano' }) * .eq('id', 1) * .select() * ``` * * @exampleSql Update a record and return it * ```sql * create table * instruments (id int8 primary key, name text); * * insert into * instruments (id, name) * values * (1, 'harpsichord'); * ``` * * @exampleResponse Update a record and return it * ```json * { * "data": [ * { * "id": 1, * "name": "piano" * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Updating JSON data * Postgres offers some * [operators](/docs/guides/database/json#query-the-jsonb-data) for * working with JSON data. Currently, it is only possible to update the entire JSON document. * * @example Updating JSON data * ```ts * const { data, error } = await supabase * .from('users') * .update({ * address: { * street: 'Melrose Place', * postcode: 90210 * } * }) * .eq('address->postcode', 90210) * .select() * ``` * * @exampleSql Updating JSON data * ```sql * create table * users ( * id int8 primary key, * name text, * address jsonb * ); * * insert into * users (id, name, address) * values * (1, 'Michael', '{ "postcode": 90210 }'); * ``` * * @exampleResponse Updating JSON data * ```json * { * "data": [ * { * "id": 1, * "name": "Michael", * "address": { * "street": "Melrose Place", * "postcode": 90210 * } * } * ], * "status": 200, * "statusText": "OK" * } * ``` */ update(values: Row, { count }?: { count?: 'exact' | 'planned' | 'estimated'; }): PostgrestFilterBuilder; /** * Perform a DELETE on the table or view. * * By default, deleted rows are not returned. To return it, chain the call * with `.select()` after filters. * * @param options - Named parameters * * @param options.count - Count algorithm to use to count deleted rows. * * `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the * hood. * * `"planned"`: Approximated but fast count algorithm. Uses the Postgres * statistics under the hood. * * `"estimated"`: Uses exact count for low numbers and planned count for high * numbers. * * @category Database * * @remarks * - `delete()` should always be combined with [filters](/docs/reference/javascript/using-filters) to target the item(s) you wish to delete. * - If you use `delete()` with filters and you have * [RLS](/docs/learn/auth-deep-dive/auth-row-level-security) enabled, only * rows visible through `SELECT` policies are deleted. Note that by default * no rows are visible, so you need at least one `SELECT`/`ALL` policy that * makes the rows visible. * - When using `delete().in()`, specify an array of values to target multiple rows with a single query. This is particularly useful for batch deleting entries that share common criteria, such as deleting users by their IDs. Ensure that the array you provide accurately represents all records you intend to delete to avoid unintended data removal. * * @example Delete a single record * ```ts * const response = await supabase * .from('countries') * .delete() * .eq('id', 1) * ``` * * @exampleSql Delete a single record * ```sql * create table * countries (id int8 primary key, name text); * * insert into * countries (id, name) * values * (1, 'Mordor'); * ``` * * @exampleResponse Delete a single record * ```json * { * "status": 204, * "statusText": "No Content" * } * ``` * * @example Delete a record and return it * ```ts * const { data, error } = await supabase * .from('countries') * .delete() * .eq('id', 1) * .select() * ``` * * @exampleSql Delete a record and return it * ```sql * create table * countries (id int8 primary key, name text); * * insert into * countries (id, name) * values * (1, 'Mordor'); * ``` * * @exampleResponse Delete a record and return it * ```json * { * "data": [ * { * "id": 1, * "name": "Mordor" * } * ], * "status": 200, * "statusText": "OK" * } * ``` * * @example Delete multiple records * ```ts * const response = await supabase * .from('countries') * .delete() * .in('id', [1, 2, 3]) * ``` * * @exampleSql Delete multiple records * ```sql * create table * countries (id int8 primary key, name text); * * insert into * countries (id, name) * values * (1, 'Rohan'), (2, 'The Shire'), (3, 'Mordor'); * ``` * * @exampleResponse Delete multiple records * ```json * { * "status": 204, * "statusText": "No Content" * } * ``` */ delete({ count }?: { count?: 'exact' | 'planned' | 'estimated'; }): PostgrestFilterBuilder; } //#endregion //#region src/types/common/rpc.d.ts type IsMatchingArgs = [FnArgs] extends [Record] ? PassedArgs extends Record ? true : false : keyof PassedArgs extends keyof FnArgs ? PassedArgs extends FnArgs ? true : false : false; type MatchingFunctionArgs = Fn$1 extends { Args: infer A extends GenericFunction['Args']; } ? IsMatchingArgs extends true ? Fn$1 : never : false; type FindMatchingFunctionByArgs = FnUnion extends infer Fn extends GenericFunction ? MatchingFunctionArgs : false; type TablesAndViews$1 = Schema['Tables'] & Exclude; type UnionToIntersection = (U extends any ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never; type LastOf = UnionToIntersection T : never> extends (() => infer R) ? R : never; type IsAny = 0 extends 1 & T ? true : false; type ExactMatch = [T] extends [S] ? ([S] extends [T] ? true : false) : false; type ExtractExactFunction = Fns extends infer F ? F extends GenericFunction ? ExactMatch extends true ? F : never : never : never; type IsNever = [T] extends [never] ? true : false; type RpcFunctionNotFound = { Row: any; Result: { error: true; } & "Couldn't infer function definition matching provided arguments"; RelationName: FnName; Relationships: null; }; type CrossSchemaError = { error: true; } & `Function returns SETOF from a different schema ('${TableRef}'). Use .overrideTypes() to specify the return type explicitly.`; type GetRpcFunctionFilterBuilderByArgs = { 0: Schema['Functions'][FnName]; 1: IsAny extends true ? any : IsNever extends true ? IsNever> extends true ? LastOf : ExtractExactFunction : Args extends Record ? LastOf : Args extends GenericFunction['Args'] ? IsNever>> extends true ? LastOf : LastOf> : ExtractExactFunction extends GenericFunction ? ExtractExactFunction : any; }[1] extends infer Fn ? IsAny extends true ? { Row: any; Result: any; RelationName: FnName; Relationships: null; } : Fn extends GenericFunction ? { Row: Fn['SetofOptions'] extends GenericSetofOption ? Fn['SetofOptions']['to'] extends keyof TablesAndViews$1 ? TablesAndViews$1[Fn['SetofOptions']['to']]['Row'] : Fn['Returns'] extends any[] ? Fn['Returns'][number] extends Record ? Fn['Returns'][number] : CrossSchemaError : Fn['Returns'] extends Record ? Fn['Returns'] : CrossSchemaError : Fn['Returns'] extends any[] ? Fn['Returns'][number] extends Record ? Fn['Returns'][number] : never : Fn['Returns'] extends Record ? Fn['Returns'] : never; Result: Fn['SetofOptions'] extends GenericSetofOption ? Fn['SetofOptions']['isSetofReturn'] extends true ? Fn['SetofOptions']['isOneToOne'] extends true ? Fn['Returns'][] : Fn['Returns'] : Fn['Returns'] : Fn['Returns']; RelationName: Fn['SetofOptions'] extends GenericSetofOption ? Fn['SetofOptions']['to'] : FnName; Relationships: Fn['SetofOptions'] extends GenericSetofOption ? Fn['SetofOptions']['to'] extends keyof Schema['Tables'] ? Schema['Tables'][Fn['SetofOptions']['to']]['Relationships'] : Fn['SetofOptions']['to'] extends keyof Schema['Views'] ? Schema['Views'][Fn['SetofOptions']['to']]['Relationships'] : null : null; } : Fn extends false ? RpcFunctionNotFound : RpcFunctionNotFound : RpcFunctionNotFound; //#endregion //#region src/PostgrestClient.d.ts /** * PostgREST client. * * @typeParam Database - Types for the schema from the [type * generator](https://supabase.com/docs/reference/javascript/next/typescript-support) * * @typeParam SchemaName - Postgres schema to switch to. Must be a string * literal, the same one passed to the constructor. If the schema is not * `"public"`, this must be supplied manually. */ declare class PostgrestClient = ('public' extends keyof Omit ? 'public' : string & keyof Omit), Schema extends GenericSchema = (Omit[SchemaName] extends GenericSchema ? Omit[SchemaName] : any)> { url: string; headers: Headers; schemaName?: SchemaName; fetch?: Fetch; urlLengthLimit: number; /** * Creates a PostgREST client. * * @param url - URL of the PostgREST endpoint * @param options - Named parameters * @param options.headers - Custom headers * @param options.schema - Postgres schema to switch to * @param options.fetch - Custom fetch * @param options.timeout - Optional timeout in milliseconds for all requests. When set, requests will automatically abort after this duration to prevent indefinite hangs. * @param options.urlLengthLimit - Maximum URL length in characters before warnings/errors are triggered. Defaults to 8000. * @example * ```ts * import { PostgrestClient } from '@supabase/postgrest-js' * * const postgrest = new PostgrestClient('https://xyzcompany.supabase.co/rest/v1', { * headers: { apikey: 'public-anon-key' }, * schema: 'public', * timeout: 30000, // 30 second timeout * }) * ``` * * @category Database * * @remarks * - A `timeout` option (in milliseconds) can be set to automatically abort requests that take too long. * - A `urlLengthLimit` option (default: 8000) can be set to control when URL length warnings are included in error messages for aborted requests. * * @example Creating a Postgrest client * ```ts * import { PostgrestClient } from '@supabase/postgrest-js' * * const postgrest = new PostgrestClient('https://xyzcompany.supabase.co/rest/v1', { * headers: { apikey: 'public-anon-key' }, * schema: 'public', * }) * ``` * * @example With timeout * ```ts * import { PostgrestClient } from '@supabase/postgrest-js' * * const postgrest = new PostgrestClient('https://xyzcompany.supabase.co/rest/v1', { * headers: { apikey: 'public-anon-key' }, * schema: 'public', * timeout: 30000, // 30 second timeout * }) * ``` */ constructor(url: string, { headers, schema, fetch, timeout, urlLengthLimit }?: { headers?: HeadersInit; schema?: SchemaName; fetch?: Fetch; timeout?: number; urlLengthLimit?: number; }); from(relation: TableName$1): PostgrestQueryBuilder; from(relation: ViewName): PostgrestQueryBuilder; /** * Select a schema to query or perform an function (rpc) call. * * The schema needs to be on the list of exposed schemas inside Supabase. * * @param schema - The schema to query * * @category Database */ schema>(schema: DynamicSchema): PostgrestClient; /** * Perform a function call. * * @param fn - The function name to call * @param args - The arguments to pass to the function call * @param options - Named parameters * @param options.head - When set to `true`, `data` will not be returned. * Useful if you only need the count. * @param options.get - When set to `true`, the function will be called with * read-only access mode. * @param options.count - Count algorithm to use to count rows returned by the * function. Only applicable for [set-returning * functions](https://www.postgresql.org/docs/current/functions-srf.html). * * `"exact"`: Exact but slow count algorithm. Performs a `COUNT(*)` under the * hood. * * `"planned"`: Approximated but fast count algorithm. Uses the Postgres * statistics under the hood. * * `"estimated"`: Uses exact count for low numbers and planned count for high * numbers. * * @example * ```ts * // For cross-schema functions where type inference fails, use overrideTypes: * const { data } = await supabase * .schema('schema_b') * .rpc('function_a', {}) * .overrideTypes<{ id: string; user_id: string }[]>() * ``` * * @category Database * * @example Call a Postgres function without arguments * ```ts * const { data, error } = await supabase.rpc('hello_world') * ``` * * @exampleSql Call a Postgres function without arguments * ```sql * create function hello_world() returns text as $$ * select 'Hello world'; * $$ language sql; * ``` * * @exampleResponse Call a Postgres function without arguments * ```json * { * "data": "Hello world", * "status": 200, * "statusText": "OK" * } * ``` * * @example Call a Postgres function with arguments * ```ts * const { data, error } = await supabase.rpc('echo', { say: '👋' }) * ``` * * @exampleSql Call a Postgres function with arguments * ```sql * create function echo(say text) returns text as $$ * select say; * $$ language sql; * ``` * * @exampleResponse Call a Postgres function with arguments * ```json * { * "data": "👋", * "status": 200, * "statusText": "OK" * } * * ``` * * @exampleDescription Bulk processing * You can process large payloads by passing in an array as an argument. * * @example Bulk processing * ```ts * const { data, error } = await supabase.rpc('add_one_each', { arr: [1, 2, 3] }) * ``` * * @exampleSql Bulk processing * ```sql * create function add_one_each(arr int[]) returns int[] as $$ * select array_agg(n + 1) from unnest(arr) as n; * $$ language sql; * ``` * * @exampleResponse Bulk processing * ```json * { * "data": [ * 2, * 3, * 4 * ], * "status": 200, * "statusText": "OK" * } * ``` * * @exampleDescription Call a Postgres function with filters * Postgres functions that return tables can also be combined with [Filters](/docs/reference/javascript/using-filters) and [Modifiers](/docs/reference/javascript/using-modifiers). * * @example Call a Postgres function with filters * ```ts * const { data, error } = await supabase * .rpc('list_stored_countries') * .eq('id', 1) * .single() * ``` * * @exampleSql Call a Postgres function with filters * ```sql * create table * countries (id int8 primary key, name text); * * insert into * countries (id, name) * values * (1, 'Rohan'), * (2, 'The Shire'); * * create function list_stored_countries() returns setof countries as $$ * select * from countries; * $$ language sql; * ``` * * @exampleResponse Call a Postgres function with filters * ```json * { * "data": { * "id": 1, * "name": "Rohan" * }, * "status": 200, * "statusText": "OK" * } * ``` * * @example Call a read-only Postgres function * ```ts * const { data, error } = await supabase.rpc('hello_world', undefined, { get: true }) * ``` * * @exampleSql Call a read-only Postgres function * ```sql * create function hello_world() returns text as $$ * select 'Hello world'; * $$ language sql; * ``` * * @exampleResponse Call a read-only Postgres function * ```json * { * "data": "Hello world", * "status": 200, * "statusText": "OK" * } * ``` */ rpc = GetRpcFunctionFilterBuilderByArgs>(fn: FnName, args?: Args, { head, get, count }?: { head?: boolean; get?: boolean; count?: 'exact' | 'planned' | 'estimated'; }): PostgrestFilterBuilder; } //#endregion //#region src/index.d.ts declare const _default: { PostgrestClient: typeof PostgrestClient; PostgrestQueryBuilder: typeof PostgrestQueryBuilder; PostgrestFilterBuilder: typeof PostgrestFilterBuilder; PostgrestTransformBuilder: typeof PostgrestTransformBuilder; PostgrestBuilder: typeof PostgrestBuilder; PostgrestError: typeof PostgrestError; }; //#endregion export { PostgrestBuilder, PostgrestClient, type ClientServerOptions as PostgrestClientOptions, PostgrestError, PostgrestFilterBuilder, type PostgrestMaybeSingleResponse, PostgrestQueryBuilder, type PostgrestResponse, type PostgrestResponseFailure, type PostgrestResponseSuccess, type PostgrestSingleResponse, PostgrestTransformBuilder, type GetResult as UnstableGetResult, _default as default }; //# sourceMappingURL=index.d.mts.map