diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e6a930e4..702115fd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -5,10 +5,10 @@ name: tests on: push: branches: - - master + - master pull_request: branches: - - master + - master # The service containers will use host port binding instead of container networking so you access them via localhost rather than the service name jobs: @@ -28,23 +28,23 @@ jobs: POSTGRES_PASSWORD: api POSTGRES_DB: prostgles_server_tests ports: - # will assign a random free host port - - 5432/tcp + # will assign a random free host port + - 5432/tcp # needed because the postgres container does not provide a healthcheck - options: >- + options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 10 steps: - - uses: actions/checkout@v1 - - run: npm ci - # working-directory: ./postgres - - run: npm test - # working-directory: ./postgres - env: - # use localhost for the host here because we are running the job on the VM. - # If we were running the job on in a container this would be postgres - POSTGRES_HOST: localhost - POSTGRES_PORT: ${{ job.services.postgres.ports[5432] }} # get randomly assigned published port + - uses: actions/checkout@v1 + - run: npm ci + # working-directory: ./postgres + - run: npm test + # working-directory: ./postgres + env: + # use localhost for the host here because we are running the job on the VM. + # If we were running the job on in a container this would be postgres + POSTGRES_HOST: localhost + POSTGRES_PORT: ${{ job.services.postgres.ports[5432] }} # get randomly assigned published port diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..0e4f5587 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,2 @@ +*.json +dist \ No newline at end of file diff --git a/README.md b/README.md index 4b8efec0..ce974806 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ prostgles({ { orderBy: { created: -1 }, limit: 10, - } + }, ); }, }); diff --git a/docs/client.md b/docs/client.md index f1ded736..96738199 100644 --- a/docs/client.md +++ b/docs/client.md @@ -1,14 +1,19 @@ # Overview + Client-side API for interacting with a PostgreSQL database. ### Installation + To install the package, run: + ```bash npm install prostgles-client ``` ### Configuration + Example react configuration and usage: + ```typescript import prostgles from "prostgles-client"; import { DBGeneratedSchema } from "./DBGeneratedSchema"; @@ -23,6 +28,7 @@ export const App = () => { ``` Example configuration: + ```typescript import prostgles from "prostgles-client"; import { DBGeneratedSchema } from "./DBGeneratedSchema"; @@ -38,560 +44,711 @@ const prostglesClient = prostgles ``` ### Configuration options + InitOptions - - **socket** required Socket<DefaultEventsMap, DefaultEventsMap> - Socket.io client instance - - **onReload** optional () => void +- **socket** required Socket<DefaultEventsMap, DefaultEventsMap> + + Socket.io client instance + +- **onReload** optional () => void + + Execute this when requesting user reload (due to session expiring authGuard) + Otherwise window will reload + +- **onSchemaChange** optional () => void + + Callback called when schema changes. + "onReady" will be called after this callback + +- **onReady** required OnReadyCallback + + Callback called when: - Execute this when requesting user reload (due to session expiring authGuard) - Otherwise window will reload - - **onSchemaChange** optional () => void + - the client connects for the first time + - the schema changes + - the client reconnects + - server requests a reload - Callback called when schema changes. - "onReady" will be called after this callback - - **onReady** required OnReadyCallback +- **onReconnect** optional (socket: any, error?: any) => void - Callback called when: - - the client connects for the first time - - the schema changes - - the client reconnects - - server requests a reload - - **onReconnect** optional (socket: any, error?: any) => void + Custom handler in case of websocket re-connection. + If not provided will fire onReady - Custom handler in case of websocket re-connection. - If not provided will fire onReady - - **onDisconnect** optional () => void +- **onDisconnect** optional () => void - On disconnect handler. - It is recommended to use this callback instead of socket.on("disconnect") - - **onDebug** optional (event: DebugEvent) => void | Promise<void> + On disconnect handler. + It is recommended to use this callback instead of socket.on("disconnect") - Awaited debug callback. - Allows greater granularity during debugging. +- **onDebug** optional (event: DebugEvent) => void | Promise<void> + + Awaited debug callback. + Allows greater granularity during debugging. # Client-only Methods The following table/view methods are available on the client. ## useSync(basicFilter: EqualityFilter, syncOptions: SyncOptions): AsyncResult + Retrieves rows matching the filter and keeps them in sync + - use { handlesOnData: true } to get optimistic updates method: $update - any changes to the row using the $update method will be reflected instantly - to all sync subscribers that were initiated with the same syncOptions + to all sync subscribers that were initiated with the same syncOptions + #### Parameters - - **basicFilter** required EqualityFilter +- **basicFilter** required EqualityFilter + + Filter used for data synchronization, where all specified columns must match the given values. + + Columns are combined using an AND condition. + + Example: `{ department: 'd1', name: 'abc' }` would match records where department is 'd1' AND name is 'abc'. + +- **syncOptions** required SyncOptions - Filter used for data synchronization, where all specified columns must match the given values. - - Columns are combined using an AND condition. - - Example: `{ department: 'd1', name: 'abc' }` would match records where department is 'd1' AND name is 'abc'. - - **syncOptions** required SyncOptions #### Return type + #### AsyncResult - Async result type: - - data: the expected data - - isLoading: true when data is being fetched (initially or on subsequent filter/option changes) - - error: any error that occurred +Async result type: + +- data: the expected data +- isLoading: true when data is being fetched (initially or on subsequent filter/option changes) +- error: any error that occurred ## sync(basicFilter: EqualityFilter, options: SyncOptions, onChange: OnChange, onError?: OnErrorHandler): Promise<SyncHandler> #### Parameters - - **basicFilter** required EqualityFilter +- **basicFilter** required EqualityFilter + + Filter used for data synchronization, where all specified columns must match the given values. + + Columns are combined using an AND condition. + + Example: `{ department: 'd1', name: 'abc' }` would match records where department is 'd1' AND name is 'abc'. + +- **options** required SyncOptions +- **onChange** required OnChange - Filter used for data synchronization, where all specified columns must match the given values. - - Columns are combined using an AND condition. - - Example: `{ department: 'd1', name: 'abc' }` would match records where department is 'd1' AND name is 'abc'. - - **options** required SyncOptions - - **onChange** required OnChange + Creates a local synchronized table + +- **onError** optional OnErrorHandler - Creates a local synchronized table - - **onError** optional OnErrorHandler #### Return type + #### SyncHandler - - **$unsync** required () => void - - **$upsert** required (newData: T[]) => void | Promise<void> - - **getItems** required () => T[] + +- **$unsync** required () => void +- **$upsert** required (newData: T[]) => void | Promise<void> +- **getItems** required () => T[] ## syncOne(basicFilter: Partial, options: SyncOneOptions, onChange: OnchangeOne, onError?: OnErrorHandler): Promise<SingleSyncHandles> #### Parameters - - **basicFilter** required Partial +- **basicFilter** required Partial + + Make all properties in T optional + +- **options** required SyncOneOptions + + - **onChange** optional MultiChangeListener + + Data change listener. + Called on first sync and every time the data changes - Make all properties in T optional - - **options** required SyncOneOptions - - **onChange** optional MultiChangeListener + - **skipFirstTrigger** optional boolean - Data change listener. - Called on first sync and every time the data changes - - **skipFirstTrigger** optional boolean + If true then the first onChange trigger is skipped - If true then the first onChange trigger is skipped - - **select** optional AnyObject | "*" | undefined - - **storageType** optional "object" | "array" | "localStorage" | undefined + - **select** optional AnyObject | "\*" | undefined + - **storageType** optional "object" | "array" | "localStorage" | undefined - Default is "object". - "localStorage" will persist the data - - **patchText** optional boolean + Default is "object". + "localStorage" will persist the data + + - **patchText** optional boolean + + If true then only the delta of the text field is sent to server. + Full text is sent if an error occurs + + - **patchJSON** optional boolean + - **onReady** optional () => void + - **handlesOnData** optional boolean + +- **onChange** required OnchangeOne +- **onError** optional OnErrorHandler - If true then only the delta of the text field is sent to server. - Full text is sent if an error occurs - - **patchJSON** optional boolean - - **onReady** optional () => void - - **handlesOnData** optional boolean - - **onChange** required OnchangeOne - - **onError** optional OnErrorHandler #### Return type + #### SingleSyncHandles - CRUD handles added if initialised with handlesOnData = true - - **$get** required () => T | undefined - - **$find** required (idObj: Partial<T>) => T | undefined - - **$unsync** required () => any - - **$delete** required () => void - - **$update** required <OPTS extends $UpdateOpts>(newData: OPTS extends { deepMerge: true; } ? DeepPartial<T> : Partial<T>, opts?: OPTS | undefined) => any - - **$cloneSync** required CloneSync - - **$cloneMultiSync** required CloneMultiSync +CRUD handles added if initialised with handlesOnData = true + +- **$get** required () => T | undefined +- **$find** required (idObj: Partial<T>) => T | undefined +- **$unsync** required () => any +- **$delete** required () => void +- **$update** required <OPTS extends $UpdateOpts>(newData: OPTS extends { deepMerge: true; } ? DeepPartial<T> : Partial<T>, opts?: OPTS | undefined) => any +- **$cloneSync** required CloneSync +- **$cloneMultiSync** required CloneMultiSync ## useSyncOne(basicFilter: EqualityFilter, syncOptions: SyncOneOptions): AsyncResult + Retrieves the first row matching the filter and keeps it in sync + - use { handlesOnData: true } to get optimistic updates method: $update - any changes to the row using the $update method will be reflected instantly - to all sync subscribers that were initiated with the same syncOptions + to all sync subscribers that were initiated with the same syncOptions + #### Parameters - - **basicFilter** required EqualityFilter - - Filter used for data synchronization, where all specified columns must match the given values. - - Columns are combined using an AND condition. - - Example: `{ department: 'd1', name: 'abc' }` would match records where department is 'd1' AND name is 'abc'. - - **syncOptions** required SyncOneOptions - - **onChange** optional MultiChangeListener - - Data change listener. - Called on first sync and every time the data changes - - **skipFirstTrigger** optional boolean - - If true then the first onChange trigger is skipped - - **select** optional AnyObject | "*" | undefined - - **storageType** optional "object" | "array" | "localStorage" | undefined - - Default is "object". - "localStorage" will persist the data - - **patchText** optional boolean - - If true then only the delta of the text field is sent to server. - Full text is sent if an error occurs - - **patchJSON** optional boolean - - **onReady** optional () => void - - **handlesOnData** optional boolean -#### Return type -#### AsyncResult +- **basicFilter** required EqualityFilter + + Filter used for data synchronization, where all specified columns must match the given values. + + Columns are combined using an AND condition. + + Example: `{ department: 'd1', name: 'abc' }` would match records where department is 'd1' AND name is 'abc'. + +- **syncOptions** required SyncOneOptions + + - **onChange** optional MultiChangeListener + + Data change listener. + Called on first sync and every time the data changes + + - **skipFirstTrigger** optional boolean + + If true then the first onChange trigger is skipped + + - **select** optional AnyObject | "\*" | undefined + - **storageType** optional "object" | "array" | "localStorage" | undefined + + Default is "object". + "localStorage" will persist the data - Async result type: - - data: the expected data - - isLoading: true when data is being fetched (initially or on subsequent filter/option changes) - - error: any error that occurred + - **patchText** optional boolean + If true then only the delta of the text field is sent to server. + Full text is sent if an error occurs + - **patchJSON** optional boolean + - **onReady** optional () => void + - **handlesOnData** optional boolean +#### Return type +#### AsyncResult +Async result type: +- data: the expected data +- isLoading: true when data is being fetched (initially or on subsequent filter/option changes) +- error: any error that occurred ## useSubscribe(filter?: FullFilter, options?: SubscribeParams): AsyncResult + Retrieves a list of matching records from the view/table and subscribes to changes + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **options** optional SubscribeParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) - - **throttle** optional number - - If true then the subscription will be throttled to the provided number of milliseconds - - **throttleOpts** optional { skipFirst?: boolean | undefined; } - - **skipFirst** optional boolean - - False by default. - If true then the first value will be emitted at the end of the interval. Instant otherwise +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **options** optional SubscribeParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + + - **throttle** optional number + + If true then the subscription will be throttled to the provided number of milliseconds + + - **throttleOpts** optional { skipFirst?: boolean | undefined; } + + - **skipFirst** optional boolean + + False by default. + If true then the first value will be emitted at the end of the interval. Instant otherwise + #### Return type + #### AsyncResult - Async result type: - - data: the expected data - - isLoading: true when data is being fetched (initially or on subsequent filter/option changes) - - error: any error that occurred +Async result type: + +- data: the expected data +- isLoading: true when data is being fetched (initially or on subsequent filter/option changes) +- error: any error that occurred ## useSubscribeOne(filter?: FullFilter, options?: SubscribeParams): AsyncResult + Retrieves a matching record from the view/table and subscribes to changes + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **options** optional SubscribeParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) - - **throttle** optional number - - If true then the subscription will be throttled to the provided number of milliseconds - - **throttleOpts** optional { skipFirst?: boolean | undefined; } - - **skipFirst** optional boolean - - False by default. - If true then the first value will be emitted at the end of the interval. Instant otherwise +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **options** optional SubscribeParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + + - **throttle** optional number + + If true then the subscription will be throttled to the provided number of milliseconds + + - **throttleOpts** optional { skipFirst?: boolean | undefined; } + + - **skipFirst** optional boolean + + False by default. + If true then the first value will be emitted at the end of the interval. Instant otherwise + #### Return type + #### AsyncResult - Async result type: - - data: the expected data - - isLoading: true when data is being fetched (initially or on subsequent filter/option changes) - - error: any error that occurred +Async result type: + +- data: the expected data +- isLoading: true when data is being fetched (initially or on subsequent filter/option changes) +- error: any error that occurred ## useFind(filter?: FullFilter, selectParams?: SelectParams): AsyncResult + Retrieves a list of matching records from the view/table + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **selectParams** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **selectParams** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### AsyncResult - Async result type: - - data: the expected data - - isLoading: true when data is being fetched (initially or on subsequent filter/option changes) - - error: any error that occurred +Async result type: + +- data: the expected data +- isLoading: true when data is being fetched (initially or on subsequent filter/option changes) +- error: any error that occurred ## useFindOne(filter?: FullFilter, selectParams?: SelectParams): AsyncResult + Retrieves first matching record from the view/table + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **selectParams** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **selectParams** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### AsyncResult - Async result type: - - data: the expected data - - isLoading: true when data is being fetched (initially or on subsequent filter/option changes) - - error: any error that occurred +Async result type: + +- data: the expected data +- isLoading: true when data is being fetched (initially or on subsequent filter/option changes) +- error: any error that occurred ## useCount(filter?: FullFilter, selectParams?: SelectParams): AsyncResult + Returns the total number of rows matching the filter + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **selectParams** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **selectParams** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### AsyncResult - Async result type: - - data: the expected data - - isLoading: true when data is being fetched (initially or on subsequent filter/option changes) - - error: any error that occurred +Async result type: + +- data: the expected data +- isLoading: true when data is being fetched (initially or on subsequent filter/option changes) +- error: any error that occurred ## useSize(filter?: FullFilter, selectParams?: SelectParams): AsyncResult + Returns result size in bits matching the filter and selectParams + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **selectParams** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **selectParams** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### AsyncResult - Async result type: - - data: the expected data - - isLoading: true when data is being fetched (initially or on subsequent filter/option changes) - - error: any error that occurred \ No newline at end of file +Async result type: + +- data: the expected data +- isLoading: true when data is being fetched (initially or on subsequent filter/option changes) +- error: any error that occurred diff --git a/docs/db-handler.md b/docs/db-handler.md index 8bb75fc7..aca88d55 100644 --- a/docs/db-handler.md +++ b/docs/db-handler.md @@ -3,788 +3,1028 @@ The following table/view methods are available on the client and server db object ## getInfo(lang?: string): Promise<TableInfo> + Retrieves the table/view info + #### Parameters - - **lang** optional string +- **lang** optional string + + Language code for i18n data. "en" by default - Language code for i18n data. "en" by default #### Return type + #### TableInfo - - **oid** required number - OID from the postgres database - Useful in handling renamed tables - - **comment** optional string +- **oid** required number + + OID from the postgres database + Useful in handling renamed tables + +- **comment** optional string + + Comment from the postgres database + +- **isFileTable** optional FileTableConfig + + Defined if this is the fileTable + + - **allowedNestedInserts** optional { table: string; column: string; } + +- **hasFiles** optional boolean - Comment from the postgres database - - **isFileTable** optional FileTableConfig + True if fileTable is enabled and this table references the fileTable + Used in UI - Defined if this is the fileTable - - **allowedNestedInserts** optional { table: string; column: string; } - - **hasFiles** optional boolean +- **isView** optional boolean - True if fileTable is enabled and this table references the fileTable - Used in UI - - **isView** optional boolean + True if this is a view. + Table methods (insert, update, delete) are undefined for views - True if this is a view. - Table methods (insert, update, delete) are undefined for views - - **fileTableName** optional string +- **fileTableName** optional string - Name of the fileTable (if enabled) - Used in UI - - **dynamicRules** optional { update?: boolean | undefined; } + Name of the fileTable (if enabled) + Used in UI - Used for getColumns in cases where the columns are dynamic based on the request. - See dynamicFields from Update rules - - **update** optional boolean - - **info** optional { label?: string | undefined; } +- **dynamicRules** optional { update?: boolean | undefined; } - Additional table info provided through TableConfig - - **label** optional string - - **uniqueColumnGroups** required string[][] | undefined + Used for getColumns in cases where the columns are dynamic based on the request. + See dynamicFields from Update rules - List of unique column indexes/constraints. - Column groups where at least a column is not allowed to be viewed (selected) are omitted. + - **update** optional boolean + +- **info** optional { label?: string | undefined; } + + Additional table info provided through TableConfig + + - **label** optional string + +- **uniqueColumnGroups** required string[][] | undefined + + List of unique column indexes/constraints. + Column groups where at least a column is not allowed to be viewed (selected) are omitted. ## getColumns(lang?: string, params?: GetColumnsParams): Promise<ValidatedColumnInfo[]> + Retrieves columns metadata of the table/view + #### Parameters - - **lang** optional string +- **lang** optional string + + Language code for i18n data. "en" by default + +- **params** optional GetColumnsParams + + Dynamic/filter based rules (dynamicFields) allow specifying which columns can be updated based on the target record. + Useful when the same user can update different fields based on the record state. - Language code for i18n data. "en" by default - - **params** optional GetColumnsParams + - **rule** required "update" - Dynamic/filter based rules (dynamicFields) allow specifying which columns can be updated based on the target record. - Useful when the same user can update different fields based on the record state. - - **rule** required "update" + Only "update" is supported at the moment - Only "update" is supported at the moment - - **filter** required FullFilter + - **filter** required FullFilter + + Filter specifying which records are to be updated - Filter specifying which records are to be updated #### Return type + #### ValidatedColumnInfo - - **name** required string - - **label** required string - Column display name. Will be first non empty value from i18n data, comment, name - - **comment** required string | undefined +- **name** required string +- **label** required string + + Column display name. Will be first non empty value from i18n data, comment, name + +- **comment** required string | undefined + + Column description (if provided) + +- **ordinal_position** required number + + Ordinal position of the column within the table (count starts at 1) + +- **is_nullable** required boolean + + True if column is nullable + +- **is_updatable** required boolean +- **is_generated** required boolean + + If the column is a generated column (converted to boolean from ALWAYS and NEVER) + +- **data_type** required string + + Simplified data type + +- **udt_name** required PG_COLUMN_UDT_DATA_TYPE + + Postgres data type name. + Array types start with an underscore + +- **element_type** required string | undefined + + Element data type + +- **element_udt_name** required string | undefined + + Element data type name + +- **is_pkey** required boolean - Column description (if provided) - - **ordinal_position** required number + PRIMARY KEY constraint on column. + A table can have a multi column primary key - Ordinal position of the column within the table (count starts at 1) - - **is_nullable** required boolean +- **references** optional ReferenceTable +- **has_default** required boolean - True if column is nullable - - **is_updatable** required boolean - - **is_generated** required boolean + true if column has a default value + Used for excluding pkey from insert - If the column is a generated column (converted to boolean from ALWAYS and NEVER) - - **data_type** required string +- **column_default** optional any - Simplified data type - - **udt_name** required PG_COLUMN_UDT_DATA_TYPE + Column default value - Postgres data type name. - Array types start with an underscore - - **element_type** required string | undefined +- **min** optional string | number | undefined - Element data type - - **element_udt_name** required string | undefined + Extracted from tableConfig + Used in SmartForm - Element data type name - - **is_pkey** required boolean +- **max** optional string | number | undefined +- **hint** optional string +- **jsonbSchema** optional JSONBSchema - PRIMARY KEY constraint on column. - A table can have a multi column primary key - - **references** optional ReferenceTable - - **has_default** required boolean + JSONB schema (a simplified version of json schema) for the column (if defined in the tableConfig) + A check constraint will use this schema for runtime data validation and apropriate TS types will be generated - true if column has a default value - Used for excluding pkey from insert - - **column_default** optional any +- **file** optional FileColumnConfig - Column default value - - **min** optional string | number | undefined + If degined then this column is referencing the file table + Extracted from FileTable config + Used in SmartForm - Extracted from tableConfig - Used in SmartForm - - **max** optional string | number | undefined - - **hint** optional string - - **jsonbSchema** optional JSONBSchema +- **tsDataType** required "string" | "number" | "boolean" | "any" | "number[]" | "boolean[]" | "string[]" | "any[]" - JSONB schema (a simplified version of json schema) for the column (if defined in the tableConfig) - A check constraint will use this schema for runtime data validation and apropriate TS types will be generated - - **file** optional FileColumnConfig + TypeScript data type - If degined then this column is referencing the file table - Extracted from FileTable config - Used in SmartForm - - **tsDataType** required "string" | "number" | "boolean" | "any" | "number[]" | "boolean[]" | "string[]" | "any[]" +- **select** required boolean - TypeScript data type - - **select** required boolean + Can be viewed/selected + Based on access rules and postgres policies - Can be viewed/selected - Based on access rules and postgres policies - - **orderBy** required boolean +- **orderBy** required boolean - Can be ordered by - Based on access rules - - **filter** required boolean + Can be ordered by + Based on access rules - Can be filtered by - Based on access rules - - **insert** required boolean +- **filter** required boolean - Can be inserted - Based on access rules and postgres policies - - **update** required boolean + Can be filtered by + Based on access rules - Can be updated - Based on access rules and postgres policies - - **delete** required boolean +- **insert** required boolean - Can be used in the delete filter - Based on access rules + Can be inserted + Based on access rules and postgres policies + +- **update** required boolean + + Can be updated + Based on access rules and postgres policies + +- **delete** required boolean + + Can be used in the delete filter + Based on access rules ## find(filter?: FullFilter, selectParams?: SelectParams): Promise<SelectReturnType> + Retrieves a list of matching records from the view/table + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **selectParams** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **selectParams** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### SelectReturnType ## findOne(filter?: FullFilter, selectParams?: SelectParams): Promise<SelectReturnType<S, P, T, false> | undefined> + Retrieves a record from the view/table + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **selectParams** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **selectParams** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### SelectReturnType<S, P, T, false> | undefined ## subscribe(filter: FullFilter, params: SelectParams, onData: SubscribeCallback, onError?: SubscribeOnError): Promise<SubscriptionHandler> + Retrieves a list of matching records from the view/table and subscribes to changes + #### Parameters - - **filter** required FullFilter +- **filter** required FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **params** required SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + +- **onData** required SubscribeCallback + + Callback fired once after subscribing and then every time the data matching the filter changes + +- **onError** optional SubscribeOnError + + Error handler that may fire due to schema changes or other post subscribe issues + Column or filter issues are thrown during the subscribe call - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **params** required SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) - - **onData** required SubscribeCallback - - Callback fired once after subscribing and then every time the data matching the filter changes - - **onError** optional SubscribeOnError - - Error handler that may fire due to schema changes or other post subscribe issues - Column or filter issues are thrown during the subscribe call #### Return type + #### SubscriptionHandler - - **unsubscribe** required () => Promise<any> - - **filter** required {} | FullFilter<void, void> + +- **unsubscribe** required () => Promise<any> +- **filter** required {} | FullFilter<void, void> ## subscribeOne(filter: FullFilter, params: SelectParams, onData: SubscribeOneCallback, onError?: SubscribeOnError): Promise<SubscriptionHandler> + Retrieves first matching record from the view/table and subscribes to changes + #### Parameters - - **filter** required FullFilter +- **filter** required FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **params** required SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + +- **onData** required SubscribeOneCallback + + Callback fired once after subscribing and then every time the data matching the filter changes + +- **onError** optional SubscribeOnError + + Error handler that may fire due to schema changes or other post subscribe issues + Column or filter issues are thrown during the subscribe call - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **params** required SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) - - **onData** required SubscribeOneCallback - - Callback fired once after subscribing and then every time the data matching the filter changes - - **onError** optional SubscribeOnError - - Error handler that may fire due to schema changes or other post subscribe issues - Column or filter issues are thrown during the subscribe call #### Return type + #### SubscriptionHandler - - **unsubscribe** required () => Promise<any> - - **filter** required {} | FullFilter<void, void> + +- **unsubscribe** required () => Promise<any> +- **filter** required {} | FullFilter<void, void> ## count(filter?: FullFilter, selectParams?: SelectParams): Promise<number> + Returns the number of rows that match the filter + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **selectParams** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **selectParams** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### number ## size(filter?: FullFilter, selectParams?: SelectParams): Promise<string> + Returns result size in bits + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **selectParams** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **selectParams** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### string ## update(filter: FullFilter, newData: Partial, params?: SelectParams): Promise<UpdateReturnType<P, T, S> | undefined> + Updates a record in the table based on the specified filter criteria + - Use { multi: false } to ensure no more than one row is updated + #### Parameters - - **filter** required FullFilter +- **filter** required FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **newData** required Partial + + Make all properties in T optional + +- **params** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **newData** required Partial - - Make all properties in T optional - - **params** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) #### Return type + #### UpdateReturnType<P, T, S> | undefined ## updateBatch(data: [FullFilter, Partial>][], params?: SelectParams): Promise<void | UpdateReturnType<P, T, S>> + Updates multiple records in the table in a batch operation. + - Each item in the `data` array contains a filter and the corresponding data to update. + #### Parameters - - **data** required [FullFilter<T, S>, Partial<UpsertDataToPGCast<T>>][] - - **params** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **data** required [FullFilter<T, S>, Partial<UpsertDataToPGCast<T>>][] +- **params** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### void | UpdateReturnType<P, T, S> ## insert(data: UpsertDataToPGCast | UpsertDataToPGCast[], params?: SelectParams): Promise<InsertReturnType> + Inserts a new record into the table. + #### Parameters - - **data** required InsertData - - **params** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **data** required InsertData +- **params** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type + #### InsertReturnType - Nothing is returned by default. - `returning` must be specified to return the updated records. - If an array of records is inserted then an array of records will be returned - otherwise a single record will be returned. +Nothing is returned by default. +`returning` must be specified to return the updated records. +If an array of records is inserted then an array of records will be returned +otherwise a single record will be returned. ## upsert(filter: FullFilter, newData: Partial, params?: SelectParams): Promise<UpdateReturnType<P, T, S> | undefined> + Inserts or updates a record in the table. + - If a record matching the `filter` exists, it updates the record. - If no matching record exists, it inserts a new record. + #### Parameters - - **filter** required FullFilter +- **filter** required FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **newData** required Partial + + Make all properties in T optional + +- **params** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **newData** required Partial - - Make all properties in T optional - - **params** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) #### Return type + #### UpdateReturnType<P, T, S> | undefined ## delete(filter?: FullFilter, params?: SelectParams): Promise<UpdateReturnType<P, T, S> | undefined> + Deletes records from the table based on the specified filter criteria. + - If no filter is provided, all records may be deleted (use with caution). + #### Parameters - - **filter** optional FullFilter - - Data filter - - `{ status: 'live' }` - - `{ $or: [{ id: 1 }, { status: 'live' }] }` - - `{ $existsJoined: { referencedTable: { id: 1 } } }` - - `{ - $filter: [ - { $age: ["created_at"] }, - "<", - '1 year' - ] - }` - - **params** optional SelectParams - - **limit** optional number | null | undefined - - Max number of rows to return. Defaults to 1000 - - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) - - **offset** optional number - - Number of rows to skip - - **groupBy** optional boolean - - Will group by all non aggregated fields specified in select (or all fields by default) - - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined - - Result data structure/type: - - **row**: the first row as an object - - **value**: the first value from of first field - - **values**: array of values from the selected field - - **statement**: sql statement - - **statement-no-rls**: sql statement without row level security - - **statement-where**: sql statement where condition - - **select** optional Select - - Fields/expressions/linked data to select - - `"*"` or empty will return all fields - - `{ field: 0 }` - all fields except the specified field will be selected - - `{ field: 1 }` - only the specified field will be selected - - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied - - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected - - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable - - **orderBy** optional OrderBy - - Order by options - - Order is maintained in arrays - - `[{ key: "field", asc: true, nulls: "last" }]` - - **having** optional FullFilter - - Filter applied after any aggregations (group by) +- **filter** optional FullFilter + + Data filter + + - `{ status: 'live' }` + - `{ $or: [{ id: 1 }, { status: 'live' }] }` + - `{ $existsJoined: { referencedTable: { id: 1 } } }` + - `{ + $filter: [ + { $age: ["created_at"] }, + "<", + '1 year' + ] +}` + +- **params** optional SelectParams + + - **limit** optional number | null | undefined + + Max number of rows to return. Defaults to 1000 + + - On client publish rules can affect this behaviour: cannot request more than the maxLimit (if present) + + - **offset** optional number + + Number of rows to skip + + - **groupBy** optional boolean + + Will group by all non aggregated fields specified in select (or all fields by default) + + - **returnType** optional "row" | "value" | "values" | "statement" | "statement-no-rls" | "statement-where" | undefined + + Result data structure/type: + + - **row**: the first row as an object + - **value**: the first value from of first field + - **values**: array of values from the selected field + - **statement**: sql statement + - **statement-no-rls**: sql statement without row level security + - **statement-where**: sql statement where condition + + - **select** optional Select + + Fields/expressions/linked data to select + + - `"*"` or empty will return all fields + - `{ field: 0 }` - all fields except the specified field will be selected + - `{ field: 1 }` - only the specified field will be selected + - `{ field: { $funcName: [args] } }` - the field will be selected with the specified function applied + - `{ field: 1, referencedTable: "*" }` - field together with all fields from referencedTable will be selected + - `{ linkedData: { referencedTable: { field: 1 } } }` - linkedData will contain the linked/joined records from referencedTable + + - **orderBy** optional OrderBy + + Order by options + + - Order is maintained in arrays + - `[{ key: "field", asc: true, nulls: "last" }]` + + - **having** optional FullFilter + + Filter applied after any aggregations (group by) + #### Return type -#### UpdateReturnType<P, T, S> | undefined \ No newline at end of file + +#### UpdateReturnType<P, T, S> | undefined diff --git a/docs/server.md b/docs/server.md index e15b1850..a2d035a3 100644 --- a/docs/server.md +++ b/docs/server.md @@ -1,15 +1,22 @@ # Overview -Prostgles allows connecting to a PostgreSQL database to get a realtime view of the data and schema changes. + +Prostgles allows connecting to a PostgreSQL database to get a realtime view of the data and schema changes. By configuring "tsGeneratedTypesDir" the database schema types are generated automatically allowing full end-to-end type safety + ### Installation + To install the package, run: + ```bash npm install prostgles-server ``` + ### Configuration + To get started, you need to provide a configuration object to the server. Minimal configuration: + ```typescript import prostgles from "prostgles-server"; import { DBGeneratedSchema } from "./DBGeneratedSchema"; @@ -34,6 +41,7 @@ prostgles({ ``` To allow clients to connect an express server with socket.io needs to be configured: + ```typescript import prostgles from "prostgles-server"; import { DBGeneratedSchema } from "./DBGeneratedSchema"; @@ -74,7 +82,9 @@ prostgles({ }, }); ``` + ### Configuration options + - **dbConnection** required DbConnection Database connection details and options @@ -99,31 +109,32 @@ prostgles({ Rest API configuration. The REST API allows interacting with the database similarly to the websocket connection. with the exception of subscriptions and realtime features. - + POST Routes: + - /api/db/:tableName/:command - /api/db/sql - /api/methods/:method - /api/schema - + Example request: + ```typescript - const res = await fetch( - `http://127.0.0.1:3001/api/db/items/findOne`, - { - method: "POST", - headers: new Headers({ - 'Authorization': `Bearer ${Buffer.from(token, "utf-8").toString("base64")}`, - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }), - body: JSON.stringify([{ id: 1 }]), - } - ); + const res = await fetch(`http://127.0.0.1:3001/api/db/items/findOne`, { + method: "POST", + headers: new Headers({ + Authorization: `Bearer ${Buffer.from(token, "utf-8").toString("base64")}`, + Accept: "application/json", + "Content-Type": "application/json", + }), + body: JSON.stringify([{ id: 1 }]), + }); ``` + - **expressApp** required Express Express server instance + - **routePrefix** required string Defaults to "/api" @@ -139,7 +150,7 @@ prostgles({ Data access rules applied to clients. By default, nothing is allowed. -- **publishRawSQL** optional (params: PublishParams<S, SUser>) => boolean | "*" | Promise<boolean | "*"> +- **publishRawSQL** optional (params: PublishParams<S, SUser>) => boolean | "_" | Promise<boolean | "_"> If defined and resolves to true then the connected client can run SQL queries @@ -155,8 +166,9 @@ prostgles({ - **joins** optional Joins Allows defining table relationships that can then be used in filters and data inserts: - - `infered` - uses the foreign keys to infer the joins - - `Join[]` - specifies the joins manually + + - `infered` - uses the foreign keys to infer the joins + - `Join[]` - specifies the joins manually - **schemaFilter** optional Record<string, 1> | Record<string, 0> | undefined @@ -170,11 +182,12 @@ prostgles({ - **transactions** optional boolean | undefined If true then will allow transactions on the server through the db.tx method: + ```typescript - db.tx(async t => { - await t.items.insert({ name: "a" }); - throw new Error("rollback"); - }) + db.tx(async (t) => { + await t.items.insert({ name: "a" }); + throw new Error("rollback"); + }); ``` - **onSocketConnect** optional (args: AuthRequestParams<S, SUser> & { socket: PRGLIOSocket; }) => void | Promise<void> @@ -190,52 +203,66 @@ prostgles({ Auth configuration. Supports email and OAuth strategies + - **sidKeyName** optional string Name of the cookie or socket hadnshake query param that represents the session id. Defaults to "session_id" + - **responseThrottle** optional number Response time rounding in milliseconds to prevent timing attacks on login. Login response time should always be a multiple of this value. Defaults to 500 milliseconds + - **expressConfig** optional ExpressConfig Will setup auth routes - /login - /logout - /magic-link/:id + /login + /logout + /magic-link/:id + - **app** required Express Express app instance. If provided Prostgles will attempt to set sidKeyName to user cookie + - **cookieOptions** optional AnyObject | undefined Options used in setting the cookie after a successful login + - **disableSocketAuthGuard** optional boolean | undefined False by default. If false and userRoutes are provided then the socket will request window.location.reload if the current url is on a user route. + - **publicRoutes** optional string[] | undefined If provided, any client requests to NOT these routes (or their subroutes) will be redirected to loginRoute (if logged in) and then redirected back to the initial route after logging in If logged in the user is allowed to access these routes + - **use** optional ExpressMiddleware<S, SUser> | undefined Will attach a app.use listener and will expose getUser Used in UI for blocking access + - **onGetRequestOK** optional ((req: ExpressReq, res: ExpressRes, params: AuthRequestParams<S, SUser>) => any) | undefined Will be called after a GET request is authorised This means that + - **magicLinks** optional { check: (magicId: string, dbo: DBOFullyTyped<S>, db: DB, client: LoginClientInfo) => Awaitable<BasicSession | undefined>; } | undefined If defined, will check the magic link id and log in the user and redirect to the returnUrl if set + - **registrations** optional AuthRegistrationConfig<S> | undefined + - **getUser** required (sid: string | undefined, dbo: DBOFullyTyped<S>, db: DB, client: AuthClientRequest & LoginClientInfo) => Awaitable<AuthResult<...>> undefined sid is allowed to enable public users + - **login** optional (params: LoginParams, dbo: DBOFullyTyped<S>, db: DB, client: LoginClientInfo) => Awaitable<BasicSession> - **logout** optional (sid: string | undefined, dbo: DBOFullyTyped<S>, db: DB) => any - **cacheSession** optional { getSession: (sid: string | undefined, dbo: DBOFullyTyped<S>, db: DB) => Awaitable<BasicSession>; } - If provided then session info will be saved on socket.__prglCache and reused from there + If provided then session info will be saved on socket.\_\_prglCache and reused from there + - **getSession** required (sid: string | undefined, dbo: DBOFullyTyped<S>, db: DB) => Awaitable<BasicSession> - **DEBUG_MODE** optional boolean | undefined @@ -250,6 +277,7 @@ prostgles({ - **watchSchemaType** optional "DDL_trigger" | "prostgles_queries" | undefined What schema change watcher to use when watchSchema is enabled: + - `"DDL_trigger"` - (default) - Use a database event trigger for schema changes. Requires superuser. - `"prostgles_queries"` - Check db.sql() initiated queries for schema changes. Any other queries are ignored. @@ -259,11 +287,12 @@ prostgles({ Either calls the provided callback or triggers "onReady" on both the server and any connected clients when schema changes and also updates `DBGeneratedSchema.d.ts` if enabled. Options: + - `true` - "onReady" call and "DBGeneratedSchema" rewrite - `EventTriggerTagFilter` - same as `true` but only on specified events - `"hotReloadMode"` - only rewrites `DBGeneratedSchema.d.ts`. Used in development when server restarts on file change. - `OnSchemaChangeCallback` - custom callback to be fired. Nothing else triggered - Useful for development + Useful for development - **onNotice** optional (notice: AnyObject, message?: string | undefined) => void @@ -274,42 +303,52 @@ prostgles({ Enables file storage and serving. Currently supports saving files locally or to AWS S3. By designating a file table files can be inserted through the table handler: + ```typescript const file = await db.files.insert( - { file: new Buffer("file content"), name: "file.txt" }, - { returnType: "*" } + { file: new Buffer("file content"), name: "file.txt" }, + { returnType: "*" }, ); - + const fileUrl = file.url; ``` + - **tableName** optional string Name of the table that will contain the file metadata. Defaults to "files" + - **fileServeRoute** optional string GET path used in serving media. defaults to /${tableName} + - **delayedDelete** optional { deleteAfterNDays: number; checkIntervalHours?: number | undefined; } If defined the the files will not be deleted immediately Instead, the "deleted" field will be updated to the current timestamp and after the day interval provided in "deleteAfterNDays" the files will be deleted "checkIntervalMinutes" is the frequency in hours at which the files ready for deletion are deleted + - **deleteAfterNDays** required number Minimum amount of time measured in days for which the files will not be deleted after requesting delete + - **checkIntervalHours** optional number How freuquently the files will be checked for deletion delay + - **expressApp** required ExpressApp Express server instance + - **referencedTables** optional { [tableName: string]: { type: "column"; referenceColumns: Record<string, FileColumnConfig>; }; } Used to specify which tables will have a file column and allowed file types. - + Specifying referencedTables will: - 1. create a column in that table called media - 2. create a lookup table lookup_media_{referencedTable} that joins referencedTable to the media table + + 1. create a column in that table called media + 2. create a lookup table lookup*media*{referencedTable} that joins referencedTable to the media table + - **imageOptions** optional ImageOptions - **keepMetadata** optional boolean | undefined - **compression** optional ImageCompressionOptions | undefined @@ -317,17 +356,21 @@ prostgles({ Callbacks for file upload and download. Used for custom file handling. + - **upload** required (file: FileUploadArgs) => Promise<void> - **downloadAsStream** required (name: string) => Promise<Readable> - **delete** required (fileName: string) => Promise<void> - **getSignedUrlForDownload** required (fileName: string, expiresInSeconds: number) => Promise<string> + - **localConfig** optional LocalConfig Local file storage configuration. + - **localFolderPath** required string - example: path.join(__dirname+'/media') + example: path.join(\_\_dirname+'/media') note that this location will be relative to the compiled file location + - **minFreeBytes** optional number Minimum amount of free bytes available to allow saving files @@ -344,17 +387,21 @@ prostgles({ Migration logic used when the new tableConfig version is higher than the one in the database. By default server will fail to start if the tableConfig schema changes cannot be applied without errors + - **silentFail** optional boolean | undefined If false then prostgles won't start on any tableConfig error true by default + - **version** required number Version number that must be increased on each schema change. + - **versionTableName** optional string Table that will contain the schema version number and the tableConfig Defaults to schema_version + - **onMigrate** required OnMigrate Script executed before tableConfig is loaded and IF an older schema_version is present. @@ -362,4 +409,4 @@ prostgles({ - **onLog** optional (evt: EventInfo) => Promise<void> - Usefull for logging or debugging \ No newline at end of file + Usefull for logging or debugging diff --git a/docs/utils/generateClientDocs.ts b/docs/utils/generateClientDocs.ts index d70c9b0a..5e8a43cf 100644 --- a/docs/utils/generateClientDocs.ts +++ b/docs/utils/generateClientDocs.ts @@ -11,7 +11,7 @@ const docsFolder = `${__dirname}/../../`; export const generateClientDocs = async () => { const clientFilePath = path.resolve( - `${testFolderPath}/client/node_modules/prostgles-client/dist/prostgles.d.ts` + `${testFolderPath}/client/node_modules/prostgles-client/dist/prostgles.d.ts`, ); const excludedTypes = [ "FullFilter", @@ -51,13 +51,13 @@ export const generateClientDocs = async () => { const isomorphicMd = getMethodsDocs( getObjectEntries(tableHandler.properties).filter( - ([methodName]) => isomotphicMethodNames[methodName] - ) + ([methodName]) => isomotphicMethodNames[methodName], + ), ); const clientMd = getMethodsDocs( getObjectEntries(tableHandler.properties).filter( - ([methodName]) => !isomotphicMethodNames[methodName] - ) + ([methodName]) => !isomotphicMethodNames[methodName], + ), ); const result = [ diff --git a/docs/utils/generateServerDocs.ts b/docs/utils/generateServerDocs.ts index 8162c391..cc24a345 100644 --- a/docs/utils/generateServerDocs.ts +++ b/docs/utils/generateServerDocs.ts @@ -10,7 +10,7 @@ const docsFolder = `${__dirname}/../../`; export const generateServerDocs = () => { const serverFilePath = path.resolve( - `${testFolderPath}/server/node_modules/prostgles-server/dist/ProstglesTypes.d.ts` // "ProstglesInitOptions", + `${testFolderPath}/server/node_modules/prostgles-server/dist/ProstglesTypes.d.ts`, // "ProstglesInitOptions", ); const { resolvedTypes: [ProstglesInitOptions], @@ -38,11 +38,11 @@ export const generateServerDocs = () => { if (!ProstglesInitOptions || !prostglesInitOpts) throw new Error("ProstglesInitOptions not found"); - const configurationPropsMarkdown = getObjectEntries(prostglesInitOpts.properties).map( - ([propName, prop]) => { - return renderTsType(prop, 0, { name: propName, optional: prop.optional }); - } - ); + const configurationPropsMarkdown = getObjectEntries( + prostglesInitOpts.properties, + ).map(([propName, prop]) => { + return renderTsType(prop, 0, { name: propName, optional: prop.optional }); + }); const docs = [ `# Overview`, diff --git a/docs/utils/getMethodsDocs.ts b/docs/utils/getMethodsDocs.ts index da004728..1fd969d7 100644 --- a/docs/utils/getMethodsDocs.ts +++ b/docs/utils/getMethodsDocs.ts @@ -1,23 +1,29 @@ -import { TS_Function, TS_Type } from "./getSerializableType/getSerializableType"; +import { + TS_Function, + TS_Type, +} from "./getSerializableType/getSerializableType"; import { renderTsType } from "./renderTsType"; export const getMethodsDocs = (methods: [name: string, TS_Type][]) => { return methods.map(([methodName, _methodInfo]) => { const methodInfo = ( - _methodInfo.type === "function" ? - (_methodInfo as TS_Function) - // : _methodInfo.type === "union" ? _methodInfo.types.find((t) => t.type === "function") - : undefined) as TS_Function | undefined; + _methodInfo.type === "function" + ? (_methodInfo as TS_Function) + : // : _methodInfo.type === "union" ? _methodInfo.types.find((t) => t.type === "function") + undefined + ) as TS_Function | undefined; if (!methodInfo) return ""; const args = methodInfo.arguments.map( - (arg) => `${arg.name}${arg.optional ? "?" : ""}: ${getAliasWithoutGenerics(arg)}` + (arg) => + `${arg.name}${arg.optional ? "?" : ""}: ${getAliasWithoutGenerics(arg)}`, ); - const escapedAliasFirst = (t: TS_Type) => t.aliasSymbolescapedName || t.alias || ""; + const escapedAliasFirst = (t: TS_Type) => + t.aliasSymbolescapedName || t.alias || ""; // const rType = `${methodInfo.returnType.aliasSymbolescapedName || methodInfo.returnType.alias}` const rType = replaceSigns( - methodInfo.returnType.type === "promise" ? - `Promise<${escapedAliasFirst(methodInfo.returnType.innerType)}>` - : escapedAliasFirst(methodInfo.returnType) + methodInfo.returnType.type === "promise" + ? `Promise<${escapedAliasFirst(methodInfo.returnType.innerType)}>` + : escapedAliasFirst(methodInfo.returnType), ); return [ `## ${methodName}(${args.join(", ")}): ${rType}`, @@ -25,15 +31,18 @@ export const getMethodsDocs = (methods: [name: string, TS_Type][]) => { // `\`\`\`typescript`, // `${methodName}: (): `, // `\`\`\``, - ...(methodInfo.arguments.length ? - [ - `#### Parameters`, - ``, - ...methodInfo.arguments.map((arg) => { - return renderTsType(arg, 2, { name: arg.name, optional: arg.optional }); - }), - ] - : []), + ...(methodInfo.arguments.length + ? [ + `#### Parameters`, + ``, + ...methodInfo.arguments.map((arg) => { + return renderTsType(arg, 2, { + name: arg.name, + optional: arg.optional, + }); + }), + ] + : []), `#### Return type`, `#### ` + renderTsType(methodInfo.returnType, 0, undefined), ].join("\n"); @@ -41,8 +50,10 @@ export const getMethodsDocs = (methods: [name: string, TS_Type][]) => { }; const getAliasWithoutGenerics = (type: TS_Type) => { - if (type.type === "union") return type.types.map(getAliasWithoutGenerics).join(" | "); + if (type.type === "union") + return type.types.map(getAliasWithoutGenerics).join(" | "); return type.aliasSymbolescapedName || type.alias; }; -const replaceSigns = (str: string) => str.replaceAll("<", "<").replaceAll(">", ">"); +const replaceSigns = (str: string) => + str.replaceAll("<", "<").replaceAll(">", ">"); diff --git a/docs/utils/getResolvedTypes.ts b/docs/utils/getResolvedTypes.ts index cd57f3e4..2c93aa00 100644 --- a/docs/utils/getResolvedTypes.ts +++ b/docs/utils/getResolvedTypes.ts @@ -18,7 +18,11 @@ type Args = { outputFilename?: string; }; -export const getResolvedTypes = ({ filePath, outputFilename, filter }: Args) => { +export const getResolvedTypes = ({ + filePath, + outputFilename, + filter, +}: Args) => { const { checker, sourceFile } = loadTsFile(filePath); const results: TS_Type[] = []; @@ -53,9 +57,13 @@ export const getResolvedTypes = ({ filePath, outputFilename, filter }: Args) => `export const definitions = ${JSON.stringify(results, null, 2)} as const satisfies TS_Type[];`, ].join("\n"); - fs.writeFileSync(`${docsFolder}/utils/${outputFilename}.ts`, serverTypesStr, { - encoding: "utf-8", - }); + fs.writeFileSync( + `${docsFolder}/utils/${outputFilename}.ts`, + serverTypesStr, + { + encoding: "utf-8", + }, + ); } return result; diff --git a/docs/utils/getSerializableType/getSerializableArrayOrTuple.ts b/docs/utils/getSerializableType/getSerializableArrayOrTuple.ts index c5f73c89..b5c9ca02 100644 --- a/docs/utils/getSerializableType/getSerializableArrayOrTuple.ts +++ b/docs/utils/getSerializableType/getSerializableArrayOrTuple.ts @@ -1,5 +1,9 @@ import * as ts from "typescript"; -import { getSerializableType, TS_Type, TsTypeParser } from "./getSerializableType"; +import { + getSerializableType, + TS_Type, + TsTypeParser, +} from "./getSerializableType"; export const getSerializableArrayOrTuple: TsTypeParser = ({ myType, @@ -9,35 +13,37 @@ export const getSerializableArrayOrTuple: TsTypeParser = ({ depth, nextUnresolvedParentAliases, }) => { - const arrayOrTuple = - checker.isArrayType(myType) ? "array" - : checker.isTupleType(myType) ? "tuple" - : undefined; + const arrayOrTuple = checker.isArrayType(myType) + ? "array" + : checker.isTupleType(myType) + ? "tuple" + : undefined; if (arrayOrTuple) { const itemType = - arrayOrTuple === "tuple" ? myType : checker.getTypeArguments(myType as ts.TypeReference)[0]; + arrayOrTuple === "tuple" + ? myType + : checker.getTypeArguments(myType as ts.TypeReference)[0]; if (itemType && checker.isTupleType(itemType)) { const tupleTypes = - (itemType as unknown as { resolvedTypeArguments: ts.Type[] }).resolvedTypeArguments.map( - (d: ts.Type) => { - return getSerializableType({ - myType: d, - checker, - visited, - parentAliases: nextUnresolvedParentAliases, - opts, - depth: depth + 1, - }).resolvedType; - } - ) ?? []; + ( + itemType as unknown as { resolvedTypeArguments: ts.Type[] } + ).resolvedTypeArguments.map((d: ts.Type) => { + return getSerializableType({ + myType: d, + checker, + visited, + parentAliases: nextUnresolvedParentAliases, + opts, + depth: depth + 1, + }).resolvedType; + }) ?? []; return { type: "tuple", itemTypes: tupleTypes, }; } - const resolvedItemType: TS_Type = - itemType ? - getSerializableType({ + const resolvedItemType: TS_Type = itemType + ? getSerializableType({ myType: itemType, checker, visited, diff --git a/docs/utils/getSerializableType/getSerializableConditional.ts b/docs/utils/getSerializableType/getSerializableConditional.ts index 9db158cb..534c9c4a 100644 --- a/docs/utils/getSerializableType/getSerializableConditional.ts +++ b/docs/utils/getSerializableType/getSerializableConditional.ts @@ -1,5 +1,9 @@ import * as ts from "typescript"; -import { getSerializableType, TS_Conditional, TsTypeParser } from "./getSerializableType"; +import { + getSerializableType, + TS_Conditional, + TsTypeParser, +} from "./getSerializableType"; /** * T = G extends true? string : number diff --git a/docs/utils/getSerializableType/getSerializableFunction.ts b/docs/utils/getSerializableType/getSerializableFunction.ts index 290102f1..f6cb881d 100644 --- a/docs/utils/getSerializableType/getSerializableFunction.ts +++ b/docs/utils/getSerializableType/getSerializableFunction.ts @@ -22,7 +22,10 @@ export const getSerializableFunction: TsTypeParser = ({ .map((param) => { const { valueDeclaration } = param; if (!valueDeclaration) return undefined; - const paramType = checker.getTypeOfSymbolAtLocation(param, valueDeclaration); + const paramType = checker.getTypeOfSymbolAtLocation( + param, + valueDeclaration, + ); const resolvedParamType = getSerializableType({ myType: paramType, checker, @@ -34,15 +37,18 @@ export const getSerializableFunction: TsTypeParser = ({ const paramComments = getSymbolComments(param, checker); const optional = Boolean( (ts as any).isParameterDeclaration(param.valueDeclaration) && - checker.isOptionalParameter(valueDeclaration as ts.ParameterDeclaration) + checker.isOptionalParameter( + valueDeclaration as ts.ParameterDeclaration, + ), ); const name = param.escapedName.toString() || param.name; // if (name === "selectParams") { // debugger; // } - const resolvedParam = - optional ? simplifyUnionForOptionalType(resolvedParamType) : resolvedParamType; + const resolvedParam = optional + ? simplifyUnionForOptionalType(resolvedParamType) + : resolvedParamType; return { name, optional, @@ -75,7 +81,7 @@ export const getSerializableFunction: TsTypeParser = ({ export const simplifyUnionForOptionalType = (resolvedType: TS_Type) => { if (resolvedType.type === "union") { const indexOfUndefined = resolvedType.types.findIndex( - (t) => t.type === "primitive" && t.subType === "undefined" + (t) => t.type === "primitive" && t.subType === "undefined", ); if (indexOfUndefined === -1) return resolvedType; @@ -84,7 +90,8 @@ export const simplifyUnionForOptionalType = (resolvedType: TS_Type) => { const nonUndefined = unionTypes.find((_, i) => indexOfUndefined !== i); if ( nonUndefined && - (nonUndefined.type !== "primitive" || nonUndefined.subType !== "undefined") + (nonUndefined.type !== "primitive" || + nonUndefined.subType !== "undefined") ) { return nonUndefined; } diff --git a/docs/utils/getSerializableType/getSerializableIntersection.ts b/docs/utils/getSerializableType/getSerializableIntersection.ts index 85b92a4e..6e4adcc1 100644 --- a/docs/utils/getSerializableType/getSerializableIntersection.ts +++ b/docs/utils/getSerializableType/getSerializableIntersection.ts @@ -1,4 +1,8 @@ -import { getSerializableType, TS_Object, TsTypeParser } from "./getSerializableType"; +import { + getSerializableType, + TS_Object, + TsTypeParser, +} from "./getSerializableType"; /** * A & B @@ -37,7 +41,7 @@ export const getSerializableIntersection: TsTypeParser = ({ ...acc, [k]: { ...v, intersectionParent: t.aliasSymbolescapedName }, }), - {} + {}, ); return { ...acc, ...propertiesWithParentObject }; }, {}); diff --git a/docs/utils/getSerializableType/getSerializableObjectOrRecord.ts b/docs/utils/getSerializableType/getSerializableObjectOrRecord.ts index 2d5763ad..f5386c63 100644 --- a/docs/utils/getSerializableType/getSerializableObjectOrRecord.ts +++ b/docs/utils/getSerializableType/getSerializableObjectOrRecord.ts @@ -38,7 +38,9 @@ export const getSerializableObjectOrRecord: TsTypeParser = ({ depth: depth + 1, }).resolvedType; const innerType: Exclude = - resolvedInnerType?.type === "promise" ? defaultType : (resolvedInnerType ?? defaultType); + resolvedInnerType?.type === "promise" + ? defaultType + : (resolvedInnerType ?? defaultType); return { type: "promise", innerType, @@ -48,9 +50,8 @@ export const getSerializableObjectOrRecord: TsTypeParser = ({ const properties: TS_Object["properties"] = {}; myType.getProperties().forEach((symbol) => { - const propertyType = - symbol.valueDeclaration ? - checker.getTypeOfSymbolAtLocation(symbol, symbol.valueDeclaration) + const propertyType = symbol.valueDeclaration + ? checker.getTypeOfSymbolAtLocation(symbol, symbol.valueDeclaration) : checker.getTypeOfSymbol(symbol); const resolvedPropertyType = getSerializableType({ @@ -65,10 +66,13 @@ export const getSerializableObjectOrRecord: TsTypeParser = ({ /** * Prioritise symbol comments over resolved property type comments */ - const propertyComments = getSymbolComments(symbol, checker) || resolvedPropertyType.comments; + const propertyComments = + getSymbolComments(symbol, checker) || resolvedPropertyType.comments; const optional = Boolean(symbol.flags & ts.SymbolFlags.Optional); properties[symbol.name] = { - ...(optional ? simplifyUnionForOptionalType(resolvedPropertyType) : resolvedPropertyType), + ...(optional + ? simplifyUnionForOptionalType(resolvedPropertyType) + : resolvedPropertyType), optional, comments: propertyComments || undefined, }; @@ -120,7 +124,9 @@ export const getSerializableObjectOrRecord: TsTypeParser = ({ } }; -const getNonInternalTSDeclarations = (declarations: ts.Declaration[]): ts.Declaration[] => { +const getNonInternalTSDeclarations = ( + declarations: ts.Declaration[], +): ts.Declaration[] => { return declarations.filter((d) => { return !d.getSourceFile().fileName.includes("/node_modules/typescript/"); }); diff --git a/docs/utils/getSerializableType/getSerializableType.ts b/docs/utils/getSerializableType/getSerializableType.ts index 2d794b8c..a3ce1ab3 100644 --- a/docs/utils/getSerializableType/getSerializableType.ts +++ b/docs/utils/getSerializableType/getSerializableType.ts @@ -117,13 +117,20 @@ type GetSerializableTypeArgs = { export type TsTypeParser = ( args: Omit & { nextUnresolvedParentAliases: string[]; - } + }, ) => TS_Type | undefined; export const getSerializableType = ( - args: GetSerializableTypeArgs + args: GetSerializableTypeArgs, ): { resolvedType: TS_Type; visited: VisitedTypesMap } => { - const { myType, checker, depth, opts, parentAliases, visited = new Map() } = args; + const { + myType, + checker, + depth, + opts, + parentAliases, + visited = new Map(), + } = args; let alias = "unknown"; const { escapedName } = myType.aliasSymbol ?? {}; const aliasSymbolescapedName = escapedName?.toString(); @@ -137,13 +144,17 @@ export const getSerializableType = ( const isTooDeep = opts?.maxDepth !== undefined && depth >= opts.maxDepth; - const unresolvedParentAliases = parentAliases?.filter((a) => !visited.get(a)) ?? []; + const unresolvedParentAliases = + parentAliases?.filter((a) => !visited.get(a)) ?? []; // console.log(unresolvedParentAliases, alias); /* Circular resolved type */ const visitedType = visited.get(alias); if (visitedType) { - return { resolvedType: visitedType.reference?.type ?? visitedType.resolvedType, visited }; + return { + resolvedType: visitedType.reference?.type ?? visitedType.resolvedType, + visited, + }; } const withAlias = (_type: TS_Type) => { const finalComments = _type.comments || comments; @@ -154,33 +165,32 @@ export const getSerializableType = ( ..._type, ...(finalComments && { comments: finalComments }), }, - ["type", "alias", "aliasSymbolescapedName", "comments"] + ["type", "alias", "aliasSymbolescapedName", "comments"], ); let referenceType: TS_Type | undefined; const fileName = symbol?.declarations?.[0]?.getSourceFile().fileName; const referenceReason = - ( - opts?.excludedTypes.includes(alias) || - (fileName && - opts?.excludedFilenameParts?.length && - opts.excludedFilenameParts.some((part) => fileName.includes(part))) || - (escapedName && opts?.excludedTypes.includes(escapedName)) - ) ? - // TODO: Add a way to exclude well known types (Date, Array, UInt8Array, etc) - "excluded" - : isTooDeep ? "depth" - : undefined; + opts?.excludedTypes.includes(alias) || + (fileName && + opts?.excludedFilenameParts?.length && + opts.excludedFilenameParts.some((part) => fileName.includes(part))) || + (escapedName && opts?.excludedTypes.includes(escapedName)) + ? // TODO: Add a way to exclude well known types (Date, Array, UInt8Array, etc) + "excluded" + : isTooDeep + ? "depth" + : undefined; if (referenceReason) { referenceType = - resolvedType.type === "primitive" ? - resolvedType - : { - type: "reference", - alias, - aliasSymbolescapedName, - comments: resolvedType.comments, - }; + resolvedType.type === "primitive" + ? resolvedType + : { + type: "reference", + alias, + aliasSymbolescapedName, + comments: resolvedType.comments, + }; } visited.set(alias, { @@ -255,7 +265,7 @@ export type ResolveTypeOptions = { export const resolveType = ( myType: ts.Type, checker: ts.TypeChecker, - opts: ResolveTypeOptions + opts: ResolveTypeOptions, ): { resolvedType: TS_Type; constituentTypes?: TS_Type[] } => { const { resolvedType } = getSerializableType({ myType, @@ -265,7 +275,10 @@ export const resolveType = ( opts, depth: 0, }); - if (resolvedType.type === "reference" && opts.excludedTypes.includes(resolvedType.alias)) { + if ( + resolvedType.type === "reference" && + opts.excludedTypes.includes(resolvedType.alias) + ) { return { resolvedType: { type: "primitive", subType: "any" } }; } return { resolvedType, constituentTypes: [] }; @@ -273,7 +286,7 @@ export const resolveType = ( const sortObjectsByKeyOrder = ( obj: T, - keyOrder: K[] + keyOrder: K[], ): T => { const newKeyOrder = arraySort(Object.keys(obj), keyOrder); @@ -282,7 +295,7 @@ const sortObjectsByKeyOrder = ( ...acc, [key]: obj[key], }), - {} as T + {} as T, ); }; @@ -297,7 +310,10 @@ const arraySort = (arrayToSort: string[], keyOrder: string[]): string[] => { }); }; -export const getSymbolComments = (symbol: ts.Symbol, checker: ts.TypeChecker): string => { +export const getSymbolComments = ( + symbol: ts.Symbol, + checker: ts.TypeChecker, +): string => { const comments = symbol.getDocumentationComment(checker); return comments .map((comment) => comment.text) diff --git a/docs/utils/getSerializableType/getSerializableUnion.ts b/docs/utils/getSerializableType/getSerializableUnion.ts index 2062161d..183211a6 100644 --- a/docs/utils/getSerializableType/getSerializableUnion.ts +++ b/docs/utils/getSerializableType/getSerializableUnion.ts @@ -1,4 +1,8 @@ -import { getSerializableType, TS_Union, TsTypeParser } from "./getSerializableType"; +import { + getSerializableType, + TS_Union, + TsTypeParser, +} from "./getSerializableType"; export const getSerializableUnion: TsTypeParser = ({ myType, @@ -14,7 +18,11 @@ export const getSerializableUnion: TsTypeParser = ({ * myType.types tends to unnest unions into bigger unions. * myType.origin is the original union which we want to keep for brevity. */ - if ("origin" in myType && myType.origin && (myType.origin as any).isUnion()) { + if ( + "origin" in myType && + myType.origin && + (myType.origin as any).isUnion() + ) { unionMembers = (myType.origin as any).types; } const unionTypes = unionMembers.map((t) => { @@ -35,14 +43,14 @@ export const getSerializableUnion: TsTypeParser = ({ * So we need to check for "true" and "false" and merge them into "boolean" */ const booleanTypes = unionTypes.filter( - (t) => t.type === "primitive" && t.subType === "boolean" + (t) => t.type === "primitive" && t.subType === "boolean", ); const dedupedTypes = - booleanTypes.length > 1 ? - unionTypes - .filter((t) => t.type !== "primitive" || t.subType !== "boolean") - .concat(booleanTypes[0]!) - : unionTypes; + booleanTypes.length > 1 + ? unionTypes + .filter((t) => t.type !== "primitive" || t.subType !== "boolean") + .concat(booleanTypes[0]!) + : unionTypes; const result: TS_Union = { type: "union", diff --git a/docs/utils/loadTsFile.ts b/docs/utils/loadTsFile.ts index 12a9c06d..1f3069b9 100644 --- a/docs/utils/loadTsFile.ts +++ b/docs/utils/loadTsFile.ts @@ -7,7 +7,7 @@ export const loadTsFile = (filePath: string) => { const configPath = ts.findConfigFile( path.dirname(absolutePath), ts.sys.fileExists, - "tsconfig.json" + "tsconfig.json", ); if (!configPath) { @@ -18,7 +18,7 @@ export const loadTsFile = (filePath: string) => { const parsedConfig = ts.parseJsonConfigFileContent( configFile.config, ts.sys, - path.dirname(configPath) + path.dirname(configPath), ); const program = ts.createProgram({ diff --git a/docs/utils/renderTsType.ts b/docs/utils/renderTsType.ts index a3456b16..411f3cfa 100644 --- a/docs/utils/renderTsType.ts +++ b/docs/utils/renderTsType.ts @@ -6,13 +6,15 @@ const renderedAliases = new Set(); export const renderTsType = ( type: TS_Type, indent = 2, - argOrProp: ArgOrProp | undefined + argOrProp: ArgOrProp | undefined, ): string => { const indentText = " ".repeat(indent); const typeAlias = renderTypeAlias(type, argOrProp); const title = [ `${indentText}${argOrProp?.name ? `- **${argOrProp.name}** ${argOrProp.optional ? "optional" : "required"} ` : ""}${typeAlias}`, - type.comments ? `${reIndentLineStarts(type.comments, indentText + " ")}` : undefined, + type.comments + ? `${reIndentLineStarts(type.comments, indentText + " ")}` + : undefined, ] .filter(isDefined) .join("\n\n"); @@ -34,7 +36,9 @@ export const renderTsType = ( title + `\n` + getObjectEntries(type.properties) - .map(([name, p]) => renderTsType(p, indent + 2, { name, optional: p.optional })) + .map(([name, p]) => + renderTsType(p, indent + 2, { name, optional: p.optional }), + ) .join("\n") ); } @@ -64,11 +68,11 @@ const reIndentLineStarts = (str: string, indent: string) => const renderTypeAlias = (type: TS_Type, argOrProp: ArgOrProp | undefined) => { const typeAlias = - type.type === "primitive" ? - type.subType - : (type.aliasSymbolescapedName || type.alias || type.type) - .replaceAll("<", "<") - .replaceAll(">", ">"); + type.type === "primitive" + ? type.subType + : (type.aliasSymbolescapedName || type.alias || type.type) + .replaceAll("<", "<") + .replaceAll(">", ">"); const color = type.type === "literal" ? "brown" : "green"; const style = `style="color: ${color};"`; diff --git a/examples/full-example-typescript/DBoGenerated.d.ts b/examples/full-example-typescript/DBoGenerated.d.ts index a9f4f925..7998005a 100644 --- a/examples/full-example-typescript/DBoGenerated.d.ts +++ b/examples/full-example-typescript/DBoGenerated.d.ts @@ -1,74 +1,132 @@ export declare type FieldFilter = object | string[] | "*" | ""; -export declare type OrderBy = { - key: string; - asc: boolean; -}[] | { - [key: string]: boolean; -}[] | string | string[]; +export declare type OrderBy = + | { + key: string; + asc: boolean; + }[] + | { + [key: string]: boolean; + }[] + | string + | string[]; export declare type SelectParams = { - select?: FieldFilter; - limit?: number; - offset?: number; - orderBy?: OrderBy; - expectOne?: boolean; + select?: FieldFilter; + limit?: number; + offset?: number; + orderBy?: OrderBy; + expectOne?: boolean; }; export declare type UpdateParams = { - returning?: FieldFilter; - onConflictDoNothing?: boolean; - removeDisallowedFields?: boolean; - multi?: boolean; + returning?: FieldFilter; + onConflictDoNothing?: boolean; + removeDisallowedFields?: boolean; + multi?: boolean; }; export declare type InsertParams = { - returning?: FieldFilter; - onConflictDoNothing?: boolean; - removeDisallowedFields?: boolean; + returning?: FieldFilter; + onConflictDoNothing?: boolean; + removeDisallowedFields?: boolean; }; export declare type DeleteParams = { - returning?: FieldFilter; + returning?: FieldFilter; }; declare type Airports = { - last_updated?: number; - id?: number; + last_updated?: number; + id?: number; }; declare type Planes = { - last_updated?: number; - manufacturer?: string; - model?: string; - id?: number; + last_updated?: number; + manufacturer?: string; + model?: string; + id?: number; }; declare type DBO_airports = { - find: (filter?: object, selectParams?: SelectParams, param3_unused?: any) => Promise; - findOne: (filter?: object, selectParams?: SelectParams, param3_unused?: any) => Promise; - subscribe: (filter: object, params: SelectParams, onData: (items: Airports[]) => any) => { - unsubscribe: () => any; - }; - subscribeOne: (filter: object, params: SelectParams, onData: (item: Airports) => any) => { - unsubscribe: () => any; - }; - count: (filter?: object) => Promise; - update: (filter: object, newData: Airports, params?: UpdateParams) => Promise; - upsert: (filter: object, newData: Airports, params?: UpdateParams) => Promise; - insert: (data: (Airports | Airports[]), params?: InsertParams) => Promise; - delete: (filter: object, params?: DeleteParams) => Promise; + find: ( + filter?: object, + selectParams?: SelectParams, + param3_unused?: any, + ) => Promise; + findOne: ( + filter?: object, + selectParams?: SelectParams, + param3_unused?: any, + ) => Promise; + subscribe: ( + filter: object, + params: SelectParams, + onData: (items: Airports[]) => any, + ) => { + unsubscribe: () => any; + }; + subscribeOne: ( + filter: object, + params: SelectParams, + onData: (item: Airports) => any, + ) => { + unsubscribe: () => any; + }; + count: (filter?: object) => Promise; + update: ( + filter: object, + newData: Airports, + params?: UpdateParams, + ) => Promise; + upsert: ( + filter: object, + newData: Airports, + params?: UpdateParams, + ) => Promise; + insert: ( + data: Airports | Airports[], + params?: InsertParams, + ) => Promise; + delete: (filter: object, params?: DeleteParams) => Promise; }; declare type DBO_planes = { - find: (filter?: object, selectParams?: SelectParams, param3_unused?: any) => Promise; - findOne: (filter?: object, selectParams?: SelectParams, param3_unused?: any) => Promise; - subscribe: (filter: object, params: SelectParams, onData: (items: Planes[]) => any) => { - unsubscribe: () => any; - }; - subscribeOne: (filter: object, params: SelectParams, onData: (item: Planes) => any) => { - unsubscribe: () => any; - }; - count: (filter?: object) => Promise; - update: (filter: object, newData: Planes, params?: UpdateParams) => Promise; - upsert: (filter: object, newData: Planes, params?: UpdateParams) => Promise; - insert: (data: (Planes | Planes[]), params?: InsertParams) => Promise; - delete: (filter: object, params?: DeleteParams) => Promise; + find: ( + filter?: object, + selectParams?: SelectParams, + param3_unused?: any, + ) => Promise; + findOne: ( + filter?: object, + selectParams?: SelectParams, + param3_unused?: any, + ) => Promise; + subscribe: ( + filter: object, + params: SelectParams, + onData: (items: Planes[]) => any, + ) => { + unsubscribe: () => any; + }; + subscribeOne: ( + filter: object, + params: SelectParams, + onData: (item: Planes) => any, + ) => { + unsubscribe: () => any; + }; + count: (filter?: object) => Promise; + update: ( + filter: object, + newData: Planes, + params?: UpdateParams, + ) => Promise; + upsert: ( + filter: object, + newData: Planes, + params?: UpdateParams, + ) => Promise; + insert: ( + data: Planes | Planes[], + params?: InsertParams, + ) => Promise; + delete: (filter: object, params?: DeleteParams) => Promise; }; export declare type DBObj = { - airports: DBO_airports; - planes: DBO_planes; + airports: DBO_airports; + planes: DBO_planes; }; export {}; -//# sourceMappingURL=DBoGenerated.d.ts.map \ No newline at end of file +//# sourceMappingURL=DBoGenerated.d.ts.map diff --git a/examples/full-example-typescript/DBoGenerated.js b/examples/full-example-typescript/DBoGenerated.js index c2532ede..5f5718c6 100644 --- a/examples/full-example-typescript/DBoGenerated.js +++ b/examples/full-example-typescript/DBoGenerated.js @@ -1,6 +1,6 @@ "use strict"; /* This file was generated by Prostgles -* Mon, 12 Oct 2020 08:18:01 GMT -*/ + * Mon, 12 Oct 2020 08:18:01 GMT + */ Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=DBoGenerated.js.map \ No newline at end of file +//# sourceMappingURL=DBoGenerated.js.map diff --git a/examples/full-example-typescript/home.html b/examples/full-example-typescript/home.html index a29b765c..4526739e 100644 --- a/examples/full-example-typescript/home.html +++ b/examples/full-example-typescript/home.html @@ -1,30 +1,32 @@ - - - + - - Prostgles - - - - - - + + Prostgles - + + + + - + prostgles({ + socket, + isReady: (db, methods) => { + db.planes.find().then(console.log); + }, + onDisconnect: (err, res) => { + // location.reload(); + }, + }); + + diff --git a/examples/full-example-typescript/index.d.ts b/examples/full-example-typescript/index.d.ts index e26a57a8..a5bcb021 100644 --- a/examples/full-example-typescript/index.d.ts +++ b/examples/full-example-typescript/index.d.ts @@ -1,2 +1,2 @@ export {}; -//# sourceMappingURL=index.d.ts.map \ No newline at end of file +//# sourceMappingURL=index.d.ts.map diff --git a/examples/full-example-typescript/index.js b/examples/full-example-typescript/index.js index f9278482..fdaef76f 100644 --- a/examples/full-example-typescript/index.js +++ b/examples/full-example-typescript/index.js @@ -1,66 +1,93 @@ "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } +var __awaiter = + (this && this.__awaiter) || + function (thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P + ? value + : new P(function (resolve) { + resolve(value); + }); + } return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done + ? resolve(result.value) + : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; + }; +var __importDefault = + (this && this.__importDefault) || + function (mod) { + return mod && mod.__esModule ? mod : { default: mod }; + }; Object.defineProperty(exports, "__esModule", { value: true }); const express_1 = __importDefault(require("express")); const app = express_1.default(); const path_1 = __importDefault(require("path")); -var http = require('http').createServer(app); +var http = require("http").createServer(app); var io = require("socket.io")(http); http.listen(30009); var prostgles = require("../../dist/index"); prostgles({ - dbConnection: { - host: "localhost", - port: 5432, - database: "example", - user: process.env.PRGL_USER, - password: process.env.PRGL_PWD - }, - // dbOptions: { - // application_name: "prostgles_api", - // max: 100, - // poolIdleTimeout: 10000 - // }, - sqlFilePath: path_1.default.join(__dirname + '/init.sql'), - io, - tsGeneratedTypesDir: path_1.default.join(__dirname + '/'), - publish: (socket, dbo) => { - // if(!socket || !socket._user.admin && !socket._user.id){ - // return false; - // } - return { - planes: "*" - }; - }, - // publishMethods: (socket, dbo: DBObj) => { - // return { - // insertPlanes: async (data) => { - // // let tl = Date.now(); - // let res = await (dbo.planes).insert(data); - // // console.log(Date.now() - tl, "ms"); - // return res; - // } - // } - // }, - onReady: (dbo) => __awaiter(void 0, void 0, void 0, function* () { - let plane = yield dboo.planes.findOne(); - app.get('/', (req, res) => { - res.sendFile(path_1.default.join(__dirname + '/home.html')); - }); - app.get('*', function (req, res) { - res.status(404).send('Page not found'); - }); + dbConnection: { + host: "localhost", + port: 5432, + database: "example", + user: process.env.PRGL_USER, + password: process.env.PRGL_PWD, + }, + // dbOptions: { + // application_name: "prostgles_api", + // max: 100, + // poolIdleTimeout: 10000 + // }, + sqlFilePath: path_1.default.join(__dirname + "/init.sql"), + io, + tsGeneratedTypesDir: path_1.default.join(__dirname + "/"), + publish: (socket, dbo) => { + // if(!socket || !socket._user.admin && !socket._user.id){ + // return false; + // } + return { + planes: "*", + }; + }, + // publishMethods: (socket, dbo: DBObj) => { + // return { + // insertPlanes: async (data) => { + // // let tl = Date.now(); + // let res = await (dbo.planes).insert(data); + // // console.log(Date.now() - tl, "ms"); + // return res; + // } + // } + // }, + onReady: (dbo) => + __awaiter(void 0, void 0, void 0, function* () { + let plane = yield dboo.planes.findOne(); + app.get("/", (req, res) => { + res.sendFile(path_1.default.join(__dirname + "/home.html")); + }); + app.get("*", function (req, res) { + res.status(404).send("Page not found"); + }); }), }); -//# sourceMappingURL=index.js.map \ No newline at end of file +//# sourceMappingURL=index.js.map diff --git a/examples/full-example-vanilla/home.html b/examples/full-example-vanilla/home.html index e6902545..cce414a1 100644 --- a/examples/full-example-vanilla/home.html +++ b/examples/full-example-vanilla/home.html @@ -1,131 +1,157 @@ - - - + - - Prostgles - - - - - - +

+ + - + }, + 0, + ); + + function rgb() { + color = "rgb("; + for (var i = 0; i < 3; i++) { + color += Math.floor(Math.random() * 255) + ","; + } + return color.replace(/\,$/, ")"); + } + } + + diff --git a/examples/server/javascript/index.js b/examples/server/javascript/index.js index 6f09d667..ea5b0e20 100644 --- a/examples/server/javascript/index.js +++ b/examples/server/javascript/index.js @@ -1,27 +1,25 @@ -const express = require('express'); +const express = require("express"); const app = express(); -const path = require('path'); -const { join } = require('path'); -var http = require('http').createServer(app); -var io = require('socket.io')(http); +const path = require("path"); +const { join } = require("path"); +var http = require("http").createServer(app); +var io = require("socket.io")(http); http.listen(3001); -const prostgles = require('prostgles-server'); +const prostgles = require("prostgles-server"); prostgles({ - dbConnection: { - host: "localhost", - port: "5432", + dbConnection: { + host: "localhost", + port: "5432", user: process.env.PRGL_USER, - password: process.env.PRGL_PWD - }, + password: process.env.PRGL_PWD, + }, - // Optional sql file to be run on each reload - sqlFilePath: path.join(__dirname+'/init.sql'), + // Optional sql file to be run on each reload + sqlFilePath: path.join(__dirname + "/init.sql"), - publish: () => "*", - io, - onReady: async (db, _db) => { - - }, -}); \ No newline at end of file + publish: () => "*", + io, + onReady: async (db, _db) => {}, +}); diff --git a/lib/Auth/AuthHandler.ts b/lib/Auth/AuthHandler.ts index 9ed9d924..380e1eca 100644 --- a/lib/Auth/AuthHandler.ts +++ b/lib/Auth/AuthHandler.ts @@ -1,30 +1,47 @@ -import { AnyObject, AuthGuardLocation, AuthGuardLocationResponse, CHANNELS, AuthSocketSchema } from "prostgles-types"; +import { + AnyObject, + AuthGuardLocation, + AuthGuardLocationResponse, + CHANNELS, + AuthSocketSchema, +} from "prostgles-types"; import { LocalParams, PRGLIOSocket } from "../DboBuilder/DboBuilder"; import { DBOFullyTyped } from "../DBSchemaBuilder"; import { removeExpressRoute } from "../FileManager/FileManager"; import { DB, DBHandlerServer, Prostgles } from "../Prostgles"; -import { Auth, AuthClientRequest, AuthResult, BasicSession, ExpressReq, ExpressRes, LoginClientInfo, LoginParams } from "./AuthTypes" +import { + Auth, + AuthClientRequest, + AuthResult, + BasicSession, + ExpressReq, + ExpressRes, + LoginClientInfo, + LoginParams, +} from "./AuthTypes"; import { getSafeReturnURL } from "./getSafeReturnURL"; import { setupAuthRoutes } from "./setupAuthRoutes"; import { getProviders } from "./setAuthProviders"; -export const HTTPCODES = { +export const HTTPCODES = { AUTH_ERROR: 401, NOT_FOUND: 404, BAD_REQUEST: 400, INTERNAL_SERVER_ERROR: 500, } as const; -export const getLoginClientInfo = (req: AuthClientRequest): AuthClientRequest & LoginClientInfo => { - if("httpReq" in req){ +export const getLoginClientInfo = ( + req: AuthClientRequest, +): AuthClientRequest & LoginClientInfo => { + if ("httpReq" in req) { const ip_address = req.httpReq.ip; - if(!ip_address) throw new Error("ip_address missing from req.httpReq"); + if (!ip_address) throw new Error("ip_address missing from req.httpReq"); const user_agent = req.httpReq.headers["user-agent"]; - return { - ...req, + return { + ...req, ip_address, ip_address_remote: req.httpReq.connection.remoteAddress, - x_real_ip: req.httpReq.headers['x-real-ip'] as any, + x_real_ip: req.httpReq.headers["x-real-ip"] as any, user_agent, }; } else { @@ -33,10 +50,10 @@ export const getLoginClientInfo = (req: AuthClientRequest): AuthClientRequest & ip_address: req.socket.handshake.address, ip_address_remote: req.socket.request.connection.remoteAddress, x_real_ip: req.socket.handshake.headers?.["x-real-ip"], - user_agent: req.socket.handshake.headers?.['user-agent'], - } + user_agent: req.socket.handshake.headers?.["user-agent"], + }; } -} +}; export const AUTH_ROUTES_AND_PARAMS = { login: "/login", @@ -61,7 +78,7 @@ export class AuthHandler { constructor(prostgles: Prostgles) { this.prostgles = prostgles; this.opts = prostgles.opts.auth as any; - if(!prostgles.dbo || !prostgles.db) throw "dbo or db missing"; + if (!prostgles.dbo || !prostgles.db) throw "dbo or db missing"; this.dbo = prostgles.dbo; this.db = prostgles.db; } @@ -74,65 +91,76 @@ export class AuthHandler { if (!sid) return undefined; if (typeof sid !== "string") throw "sid missing or not a string"; return sid; - } + }; matchesRoute = (route: string | undefined, clientFullRoute: string) => { - return route && clientFullRoute && ( - route === clientFullRoute || - clientFullRoute.startsWith(route) && ["/", "?", "#"].includes(clientFullRoute[route.length] ?? "") - ) - } + return ( + route && + clientFullRoute && + (route === clientFullRoute || + (clientFullRoute.startsWith(route) && + ["/", "?", "#"].includes(clientFullRoute[route.length] ?? ""))) + ); + }; isUserRoute = (pathname: string) => { - const { login, logoutGetPath, magicLinksRoute, loginWithProvider } = AUTH_ROUTES_AND_PARAMS; + const { login, logoutGetPath, magicLinksRoute, loginWithProvider } = + AUTH_ROUTES_AND_PARAMS; const pubRoutes = [ - ...this.opts?.expressConfig?.publicRoutes || [], - login, logoutGetPath, magicLinksRoute, loginWithProvider, - ].filter(publicRoute => publicRoute); - - return !pubRoutes.some(publicRoute => { + ...(this.opts?.expressConfig?.publicRoutes || []), + login, + logoutGetPath, + magicLinksRoute, + loginWithProvider, + ].filter((publicRoute) => publicRoute); + + return !pubRoutes.some((publicRoute) => { return this.matchesRoute(publicRoute, pathname); }); - } + }; - setCookieAndGoToReturnURLIFSet = (cookie: { sid: string; expires: number; }, r: { req: ExpressReq; res: ExpressRes }) => { + setCookieAndGoToReturnURLIFSet = ( + cookie: { sid: string; expires: number }, + r: { req: ExpressReq; res: ExpressRes }, + ) => { const { sid, expires } = cookie; const { res, req } = r; if (sid) { const maxAgeOneDay = 60 * 60 * 24; // 24 hours; - type CD = { maxAge: number } | { expires: Date } + type CD = { maxAge: number } | { expires: Date }; let cookieDuration: CD = { - maxAge: maxAgeOneDay - } - if(expires && Number.isFinite(expires) && !isNaN(+ new Date(expires))){ + maxAge: maxAgeOneDay, + }; + if (expires && Number.isFinite(expires) && !isNaN(+new Date(expires))) { // const maxAge = (+new Date(expires)) - Date.now(); cookieDuration = { expires: new Date(expires) }; - const days = (+cookieDuration.expires - Date.now())/(24 * 60 * 60e3); - if(days >= 400){ - console.warn(`Cookie expiration is higher than the Chrome 400 day limit: ${days}days`) + const days = (+cookieDuration.expires - Date.now()) / (24 * 60 * 60e3); + if (days >= 400) { + console.warn( + `Cookie expiration is higher than the Chrome 400 day limit: ${days}days`, + ); } } - - const cookieOpts = { - ...cookieDuration, + + const cookieOpts = { + ...cookieDuration, httpOnly: true, // The cookie only accessible by the web server //signed: true // Indicates if the cookie should be signed - secure: true, - sameSite: "strict" as const, - ...(this.opts?.expressConfig?.cookieOptions || {}) + secure: true, + sameSite: "strict" as const, + ...(this.opts?.expressConfig?.cookieOptions || {}), }; const cookieData = sid; res.cookie(this.sidKeyName, cookieData, cookieOpts); const successURL = this.getReturnUrl(req) || "/"; res.redirect(successURL); - } else { - throw ("no user or session") + throw "no user or session"; } - } + }; - getUser = async (clientReq: { httpReq: ExpressReq; }): Promise => { - if(!this.opts?.getUser) { + getUser = async (clientReq: { httpReq: ExpressReq }): Promise => { + if (!this.opts?.getUser) { throw "this.opts.getUser missing"; } const sid = clientReq.httpReq?.cookies?.[this.sidKeyName]; @@ -140,37 +168,64 @@ export class AuthHandler { try { return this.throttledFunc(async () => { - return this.opts!.getUser(this.validateSid(sid), this.dbo as any, this.db, getLoginClientInfo(clientReq)); - }, 50) + return this.opts!.getUser( + this.validateSid(sid), + this.dbo as any, + this.db, + getLoginClientInfo(clientReq), + ); + }, 50); } catch (err) { console.error(err); } return undefined; - } + }; init = setupAuthRoutes.bind(this); getReturnUrl = (req: ExpressReq) => { const { returnUrlParamName } = AUTH_ROUTES_AND_PARAMS; if (returnUrlParamName && req?.query?.[returnUrlParamName]) { - const returnURL = decodeURIComponent(req?.query?.[returnUrlParamName] as string); - + const returnURL = decodeURIComponent( + req?.query?.[returnUrlParamName] as string, + ); + return getSafeReturnURL(returnURL, returnUrlParamName); } return null; - } + }; destroy = () => { const app = this.opts?.expressConfig?.app; - const { login, logoutGetPath, magicLinksExpressRoute, catchAll, loginWithProvider, emailSignup, magicLinksRoute, confirmEmail, confirmEmailExpressRoute } = AUTH_ROUTES_AND_PARAMS; - removeExpressRoute(app, [login, logoutGetPath, magicLinksExpressRoute, catchAll, loginWithProvider, emailSignup, magicLinksRoute, confirmEmail, confirmEmailExpressRoute]); - } + const { + login, + logoutGetPath, + magicLinksExpressRoute, + catchAll, + loginWithProvider, + emailSignup, + magicLinksRoute, + confirmEmail, + confirmEmailExpressRoute, + } = AUTH_ROUTES_AND_PARAMS; + removeExpressRoute(app, [ + login, + logoutGetPath, + magicLinksExpressRoute, + catchAll, + loginWithProvider, + emailSignup, + magicLinksRoute, + confirmEmail, + confirmEmailExpressRoute, + ]); + }; throttledFunc = (func: () => Promise, throttle = 500): Promise => { - return new Promise(async (resolve, reject) => { - - let result: any, error: any, finished = false; + let result: any, + error: any, + finished = false; /** * Throttle reject response times to prevent timing attacks @@ -181,51 +236,74 @@ export class AuthHandler { if (error) { reject(error); } else { - resolve(result) + resolve(result); } } }, throttle); - try { result = await func(); resolve(result); clearInterval(interval); } catch (err) { - console.log(err) + console.log(err); error = err; } finished = true; - }) - } + }); + }; - loginThrottled = async (params: LoginParams, client: LoginClientInfo): Promise => { + loginThrottled = async ( + params: LoginParams, + client: LoginClientInfo, + ): Promise => { if (!this.opts?.login) throw "Auth login config missing"; const { responseThrottle = 500 } = this.opts; return this.throttledFunc(async () => { - const result = await this.opts?.login?.(params, this.dbo as DBOFullyTyped, this.db, client); + const result = await this.opts?.login?.( + params, + this.dbo as DBOFullyTyped, + this.db, + client, + ); const err = { - msg: "Bad login result type. \nExpecting: undefined | null | { sid: string; expires: number } but got: " + JSON.stringify(result) - } - - if(!result) throw err; - if(result && (typeof result.sid !== "string" || typeof result.expires !== "number") || !result && ![undefined, null].includes(result)) { - throw err + msg: + "Bad login result type. \nExpecting: undefined | null | { sid: string; expires: number } but got: " + + JSON.stringify(result), + }; + + if (!result) throw err; + if ( + (result && + (typeof result.sid !== "string" || + typeof result.expires !== "number")) || + (!result && ![undefined, null].includes(result)) + ) { + throw err; } - if(result && result.expires < Date.now()){ - throw { msg: "auth.login() is returning an expired session. Can only login with a session.expires greater than Date.now()" } + if (result && result.expires < Date.now()) { + throw { + msg: "auth.login() is returning an expired session. Can only login with a session.expires greater than Date.now()", + }; } return result; }, responseThrottle); - }; - loginThrottledAndSetCookie = async (req: ExpressReq, res: ExpressRes, loginParams: LoginParams) => { + loginThrottledAndSetCookie = async ( + req: ExpressReq, + res: ExpressRes, + loginParams: LoginParams, + ) => { const start = Date.now(); - const { sid, expires } = await this.loginThrottled(loginParams, getLoginClientInfo({ httpReq: req })) || {}; + const { sid, expires } = + (await this.loginThrottled( + loginParams, + getLoginClientInfo({ httpReq: req }), + )) || {}; await this.prostgles.opts.onLog?.({ type: "auth", command: "login", @@ -233,21 +311,18 @@ export class AuthHandler { sid, socketId: undefined, }); - - if (sid) { + if (sid) { this.setCookieAndGoToReturnURLIFSet({ sid, expires }, { req, res }); - } else { - throw ("Internal error: no user or session") + throw "Internal error: no user or session"; } - } - + }; /** * Will return first sid value found in: - * Bearer header - * http cookie + * Bearer header + * http cookie * query params * Based on sid names in auth */ @@ -258,7 +333,8 @@ export class AuthHandler { const { sidKeyName } = this; if (localParams.socket) { const { handshake } = localParams.socket; - const querySid = handshake?.auth?.[sidKeyName] || handshake?.query?.[sidKeyName]; + const querySid = + handshake?.auth?.[sidKeyName] || handshake?.query?.[sidKeyName]; let rawSid = querySid; if (!rawSid) { const cookie_str = localParams.socket?.handshake?.headers?.cookie; @@ -266,29 +342,31 @@ export class AuthHandler { rawSid = cookie[sidKeyName]; } return this.validateSid(rawSid); - } else if (localParams.httpReq) { - const [tokenType, base64Token] = localParams.httpReq.headers.authorization?.split(' ') ?? []; + const [tokenType, base64Token] = + localParams.httpReq.headers.authorization?.split(" ") ?? []; let bearerSid: string | undefined; - if(tokenType && base64Token){ - if(tokenType.trim() !== "Bearer"){ + if (tokenType && base64Token) { + if (tokenType.trim() !== "Bearer") { throw "Only Bearer Authorization header allowed"; } - bearerSid = Buffer.from(base64Token, 'base64').toString(); + bearerSid = Buffer.from(base64Token, "base64").toString(); } - return this.validateSid(bearerSid ?? localParams.httpReq?.cookies?.[sidKeyName]); - + return this.validateSid( + bearerSid ?? localParams.httpReq?.cookies?.[sidKeyName], + ); } else throw "socket OR httpReq missing from localParams"; function parseCookieStr(cookie_str: string | undefined): any { if (!cookie_str || typeof cookie_str !== "string") { - return {} + return {}; } - return cookie_str.replace(/\s/g, '') + return cookie_str + .replace(/\s/g, "") .split(";") .reduce((prev, current) => { - const [name, value] = current.split('='); + const [name, value] = current.split("="); prev[name!] = value; return prev; }, {}); @@ -298,72 +376,86 @@ export class AuthHandler { /** * Used for logging */ - getSIDNoError = (localParams: LocalParams | undefined): string | undefined => { - if(!localParams) return undefined; + getSIDNoError = ( + localParams: LocalParams | undefined, + ): string | undefined => { + if (!localParams) return undefined; try { return this.getSID(localParams); } catch { return undefined; } - } + }; - async getClientInfo(localParams: Pick): Promise { + async getClientInfo( + localParams: Pick, + ): Promise { if (!this.opts) return {}; const getSession = this.opts.cacheSession?.getSession; const isSocket = "socket" in localParams; - if(isSocket){ - if(getSession && localParams.socket?.__prglCache){ + if (isSocket) { + if (getSession && localParams.socket?.__prglCache) { const { session, user, clientUser } = localParams.socket.__prglCache; - const isValid = this.isValidSocketSession(localParams.socket, session) - if(isValid){ - + const isValid = this.isValidSocketSession(localParams.socket, session); + if (isValid) { return { sid: session.sid, - user, + user, clientUser, - } - } else return { - sid: session.sid - }; - } + }; + } else + return { + sid: session.sid, + }; + } } const authStart = Date.now(); const res = await this.throttledFunc(async () => { - const { getUser } = this.opts ?? {}; - if (getUser && localParams && (localParams.httpReq || localParams.socket)) { + if ( + getUser && + localParams && + (localParams.httpReq || localParams.socket) + ) { const sid = this.getSID(localParams); - const clientReq = localParams.httpReq? { httpReq: localParams.httpReq } : { socket: localParams.socket! }; + const clientReq = localParams.httpReq + ? { httpReq: localParams.httpReq } + : { socket: localParams.socket! }; let user, clientUser; - if(sid){ - const res = await getUser(sid, this.dbo as any, this.db, getLoginClientInfo(clientReq)) as any; + if (sid) { + const res = (await getUser( + sid, + this.dbo as any, + this.db, + getLoginClientInfo(clientReq), + )) as any; user = res?.user; clientUser = res?.clientUser; } - if(getSession && isSocket){ - const session = await getSession(sid, this.dbo as any, this.db) - if(session?.expires && user && clientUser && localParams.socket){ - localParams.socket.__prglCache = { + if (getSession && isSocket) { + const session = await getSession(sid, this.dbo as any, this.db); + if (session?.expires && user && clientUser && localParams.socket) { + localParams.socket.__prglCache = { session, - user, + user, clientUser, - } + }; } } - if(sid) { - return { sid, user, clientUser } + if (sid) { + return { sid, user, clientUser }; } } - + return {}; }, 5); - await this.prostgles.opts.onLog?.({ - type: "auth", - command: "getClientInfo", + await this.prostgles.opts.onLog?.({ + type: "auth", + command: "getClientInfo", duration: Date.now() - authStart, sid: res.sid, socketId: localParams.socket?.id, @@ -371,55 +463,78 @@ export class AuthHandler { return res; } - isValidSocketSession = (socket: PRGLIOSocket, session: BasicSession): boolean => { - const hasExpired = Boolean(session && session.expires <= Date.now()) - if(this.opts?.expressConfig?.publicRoutes && !this.opts.expressConfig?.disableSocketAuthGuard){ + isValidSocketSession = ( + socket: PRGLIOSocket, + session: BasicSession, + ): boolean => { + const hasExpired = Boolean(session && session.expires <= Date.now()); + if ( + this.opts?.expressConfig?.publicRoutes && + !this.opts.expressConfig?.disableSocketAuthGuard + ) { const error = "Session has expired"; - if(hasExpired){ - if(session.onExpiration === "redirect") - socket.emit(CHANNELS.AUTHGUARD, { - shouldReload: session.onExpiration === "redirect", - error - }); + if (hasExpired) { + if (session.onExpiration === "redirect") + socket.emit(CHANNELS.AUTHGUARD, { + shouldReload: session.onExpiration === "redirect", + error, + }); throw error; } } return Boolean(session && !hasExpired); - } - - getClientAuth = async (clientReq: Pick): Promise<{ auth: AuthSocketSchema; userData: AuthResult; }> => { + }; + getClientAuth = async ( + clientReq: Pick, + ): Promise<{ auth: AuthSocketSchema; userData: AuthResult }> => { let pathGuard = false; - if (this.opts?.expressConfig?.publicRoutes && !this.opts.expressConfig?.disableSocketAuthGuard) { - + if ( + this.opts?.expressConfig?.publicRoutes && + !this.opts.expressConfig?.disableSocketAuthGuard + ) { pathGuard = true; - if("socket" in clientReq && clientReq.socket){ + if ("socket" in clientReq && clientReq.socket) { const { socket } = clientReq; - socket.removeAllListeners(CHANNELS.AUTHGUARD) - socket.on(CHANNELS.AUTHGUARD, async (params: AuthGuardLocation, cb = (_err: any, _res?: AuthGuardLocationResponse) => { /** EMPTY */ }) => { - - try { - - const { pathname, origin } = typeof params === "string" ? JSON.parse(params) : (params || {}); - if (pathname && typeof pathname !== "string") { - console.warn("Invalid pathname provided for AuthGuardLocation: ", pathname); + socket.removeAllListeners(CHANNELS.AUTHGUARD); + socket.on( + CHANNELS.AUTHGUARD, + async ( + params: AuthGuardLocation, + cb = (_err: any, _res?: AuthGuardLocationResponse) => { + /** EMPTY */ + }, + ) => { + try { + const { pathname, origin } = + typeof params === "string" ? JSON.parse(params) : params || {}; + if (pathname && typeof pathname !== "string") { + console.warn( + "Invalid pathname provided for AuthGuardLocation: ", + pathname, + ); + } + + /** These origins */ + const IGNORED_API_ORIGINS = ["file://"]; + if ( + !IGNORED_API_ORIGINS.includes(origin) && + pathname && + typeof pathname === "string" && + this.isUserRoute(pathname) && + !(await this.getClientInfo({ socket }))?.user + ) { + cb(null, { shouldReload: true }); + } else { + cb(null, { shouldReload: false }); + } + } catch (err) { + console.error("AUTHGUARD err: ", err); + cb(err); } - - /** These origins */ - const IGNORED_API_ORIGINS = ["file://"] - if (!IGNORED_API_ORIGINS.includes(origin) && pathname && typeof pathname === "string" && this.isUserRoute(pathname) && !(await this.getClientInfo({ socket }))?.user) { - cb(null, { shouldReload: true }); - } else { - cb(null, { shouldReload: false }); - } - - } catch (err) { - console.error("AUTHGUARD err: ", err); - cb(err) - } - }); - + }, + ); } } @@ -427,11 +542,14 @@ export class AuthHandler { const { email } = this.opts?.expressConfig?.registrations ?? {}; const auth: AuthSocketSchema = { providers: getProviders.bind(this)(), - register: email && { type: email.signupType, url: AUTH_ROUTES_AND_PARAMS.emailSignup }, + register: email && { + type: email.signupType, + url: AUTH_ROUTES_AND_PARAMS.emailSignup, + }, user: userData?.clientUser, loginType: email?.signupType ?? "withPassword", pathGuard, }; return { auth, userData }; - } -} \ No newline at end of file + }; +} diff --git a/lib/Auth/AuthTypes.ts b/lib/Auth/AuthTypes.ts index d72b268a..2eafe72e 100644 --- a/lib/Auth/AuthTypes.ts +++ b/lib/Auth/AuthTypes.ts @@ -1,5 +1,10 @@ import { Express, NextFunction, Request, Response } from "express"; -import { AnyObject, FieldFilter, IdentityProvider, UserLike } from "prostgles-types"; +import { + AnyObject, + FieldFilter, + IdentityProvider, + UserLike, +} from "prostgles-types"; import { DB } from "../Prostgles"; import { DBOFullyTyped } from "../DBSchemaBuilder"; import { PRGLIOSocket } from "../DboBuilder/DboBuilderTypes"; @@ -8,7 +13,10 @@ import type { StrategyOptions as GoogleStrategy, Profile as GoogleProfile, } from "passport-google-oauth20"; -import type { StrategyOptions as GitHubStrategy, Profile as GitHubProfile } from "passport-github2"; +import type { + StrategyOptions as GitHubStrategy, + Profile as GitHubProfile, +} from "passport-github2"; import type { MicrosoftStrategyOptions } from "passport-microsoft"; import type { StrategyOptions as FacebookStrategy, @@ -38,7 +46,9 @@ export type BasicSession = { /** On expired */ onExpiration: "redirect" | "show_error"; }; -export type AuthClientRequest = { socket: PRGLIOSocket } | { httpReq: ExpressReq }; +export type AuthClientRequest = + | { socket: PRGLIOSocket } + | { httpReq: ExpressReq }; type ThirdPartyProviders = { facebook?: Pick & { @@ -101,7 +111,10 @@ type EmailProvider = } | { signupType: "withPassword"; - onRegistered: (data: { username: string; password: string }) => void | Promise; + onRegistered: (data: { + username: string; + password: string; + }) => void | Promise; /** * Defaults to 8 */ @@ -115,7 +128,9 @@ type EmailProvider = confirmationUrlPath: string; }) => EmailWithoutTo | Promise; smtp: SMTPConfig; - onConfirmed: (data: { confirmationCode: string }) => void | Promise; + onConfirmed: (data: { + confirmationCode: string; + }) => void | Promise; }; }; @@ -252,22 +267,30 @@ export type Auth = { sid: string | undefined, dbo: DBOFullyTyped, db: DB, - client: AuthClientRequest & LoginClientInfo + client: AuthClientRequest & LoginClientInfo, ) => Awaitable>; login?: ( params: LoginParams, dbo: DBOFullyTyped, db: DB, - client: LoginClientInfo + client: LoginClientInfo, ) => Awaitable | BasicSession; - logout?: (sid: string | undefined, dbo: DBOFullyTyped, db: DB) => Awaitable; + logout?: ( + sid: string | undefined, + dbo: DBOFullyTyped, + db: DB, + ) => Awaitable; /** * If provided then session info will be saved on socket.__prglCache and reused from there */ cacheSession?: { - getSession: (sid: string | undefined, dbo: DBOFullyTyped, db: DB) => Awaitable; + getSession: ( + sid: string | undefined, + dbo: DBOFullyTyped, + db: DB, + ) => Awaitable; }; }; @@ -307,7 +330,11 @@ type ExpressConfig = { * Will be called after a GET request is authorised * This means that */ - onGetRequestOK?: (req: ExpressReq, res: ExpressRes, params: AuthRequestParams) => any; + onGetRequestOK?: ( + req: ExpressReq, + res: ExpressRes, + params: AuthRequestParams, + ) => any; /** * If defined, will check the magic link id and log in the user and redirect to the returnUrl if set @@ -320,7 +347,7 @@ type ExpressConfig = { magicId: string, dbo: DBOFullyTyped, db: DB, - client: LoginClientInfo + client: LoginClientInfo, ) => Awaitable; }; @@ -328,5 +355,9 @@ type ExpressConfig = { }; type ExpressMiddleware = ( - args: { req: ExpressReq; res: ExpressRes; next: NextFunction } & AuthRequestParams + args: { + req: ExpressReq; + res: ExpressRes; + next: NextFunction; + } & AuthRequestParams, ) => void | Promise; diff --git a/lib/Auth/getSafeReturnURL.ts b/lib/Auth/getSafeReturnURL.ts index fa27becd..dcb56bd2 100644 --- a/lib/Auth/getSafeReturnURL.ts +++ b/lib/Auth/getSafeReturnURL.ts @@ -1,14 +1,21 @@ -export const getSafeReturnURL = (returnURL: string, returnUrlParamName: string, quiet = false) => { +export const getSafeReturnURL = ( + returnURL: string, + returnUrlParamName: string, + quiet = false, +) => { /** Dissalow redirect to other domains */ - if(returnURL) { + if (returnURL) { const allowedOrigin = "https://localhost"; - const { origin, pathname, search, searchParams } = new URL(returnURL, allowedOrigin); - if( + const { origin, pathname, search, searchParams } = new URL( + returnURL, + allowedOrigin, + ); + if ( origin !== allowedOrigin || returnURL !== `${pathname}${search}` || searchParams.get(returnUrlParamName) - ){ - if(!quiet){ + ) { + if (!quiet) { console.error(`Unsafe returnUrl: ${returnURL}. Redirecting to /`); } return "/"; @@ -16,20 +23,27 @@ export const getSafeReturnURL = (returnURL: string, returnUrlParamName: string, return returnURL; } -} +}; -const issue = ([ - ["https://localhost", "/"], - ["//localhost.bad.com", "/"], - ["//localhost.com", "/"], - ["/localhost/com", "/localhost/com"], - ["/localhost/com?here=there", "/localhost/com?here=there"], - ["/localhost/com?returnUrl=there", "/"], - ["//http://localhost.com", "/"], - ["//abc.com", "/"], - ["///abc.com", "/"], -] as const).find(([returnURL, expected]) => getSafeReturnURL(returnURL, "returnUrl", true) !== expected); +const issue = ( + [ + ["https://localhost", "/"], + ["//localhost.bad.com", "/"], + ["//localhost.com", "/"], + ["/localhost/com", "/localhost/com"], + ["/localhost/com?here=there", "/localhost/com?here=there"], + ["/localhost/com?returnUrl=there", "/"], + ["//http://localhost.com", "/"], + ["//abc.com", "/"], + ["///abc.com", "/"], + ] as const +).find( + ([returnURL, expected]) => + getSafeReturnURL(returnURL, "returnUrl", true) !== expected, +); -if(issue){ - throw new Error(`getSafeReturnURL failed for ${issue[0]}. Expected: ${issue[1]}`); -} \ No newline at end of file +if (issue) { + throw new Error( + `getSafeReturnURL failed for ${issue[0]}. Expected: ${issue[1]}`, + ); +} diff --git a/lib/Auth/sendEmail.ts b/lib/Auth/sendEmail.ts index dadb8b0c..82835e09 100644 --- a/lib/Auth/sendEmail.ts +++ b/lib/Auth/sendEmail.ts @@ -3,8 +3,14 @@ import * as nodemailer from "nodemailer"; import * as aws from "@aws-sdk/client-ses"; import SESTransport from "nodemailer/lib/ses-transport"; -type SESTransporter = nodemailer.Transporter; -type SMTPTransporter = nodemailer.Transporter; +type SESTransporter = nodemailer.Transporter< + SESTransport.SentMessageInfo, + SESTransport.Options +>; +type SMTPTransporter = nodemailer.Transporter< + nodemailer.SentMessageInfo, + nodemailer.TransportOptions +>; type Transporter = SESTransporter | SMTPTransporter; const transporterCache: Map = new Map(); @@ -16,80 +22,77 @@ const transporterCache: Map = new Map(); export const sendEmail = (smptConfig: SMTPConfig, email: Email) => { const transporter = getOrSetTransporter(smptConfig); return send(transporter, email); -} +}; /** * Returns a transporter from cache or creates a new one */ export const getOrSetTransporter = (smptConfig: SMTPConfig) => { const configStr = JSON.stringify(smptConfig); - const transporter = transporterCache.get(configStr) ?? getTransporter(smptConfig); - if(!transporterCache.has(configStr)){ + const transporter = + transporterCache.get(configStr) ?? getTransporter(smptConfig); + if (!transporterCache.has(configStr)) { transporterCache.set(configStr, transporter); } return transporter; -} +}; const getTransporter = (smptConfig: SMTPConfig) => { let transporter: Transporter | undefined; - if(smptConfig.type === "aws-ses"){ - const { - region, - accessKeyId, + if (smptConfig.type === "aws-ses") { + const { + region, + accessKeyId, secretAccessKey, /** * max 1 messages/second */ - sendingRate = 1 + sendingRate = 1, } = smptConfig; const ses = new aws.SES({ apiVersion: "2010-12-01", region, credentials: { accessKeyId, - secretAccessKey - } + secretAccessKey, + }, }); transporter = nodemailer.createTransport({ SES: { ses, aws }, maxConnections: 1, - sendingRate + sendingRate, }); - } else { const { user, pass, host, port, secure } = smptConfig; transporter = nodemailer.createTransport({ host, port, secure, - auth: { user, pass } + auth: { user, pass }, }); } return transporter; -} +}; const send = (transporter: Transporter, email: Email) => { return new Promise((resolve, reject) => { const doSend = () => { if (transporter.isIdle()) { - transporter.sendMail( - email, - (err, info) => { - if(err){ - reject(err); - } else { - resolve(info); - } + transporter.sendMail(email, (err, info) => { + if (err) { + reject(err); + } else { + resolve(info); } - ); + }); } - } - if(transporter.isIdle()){ + }; + if (transporter.isIdle()) { doSend(); } else { - transporter.once('idle', doSend); + transporter.once("idle", doSend); } }); }; @@ -98,11 +101,11 @@ export const verifySMTPConfig = async (smptConfig: SMTPConfig) => { const transporter = getOrSetTransporter(smptConfig); return new Promise((resolve, reject) => { transporter.verify((err, success) => { - if(err){ + if (err) { reject(err); } else { resolve(success); } }); }); -} \ No newline at end of file +}; diff --git a/lib/Auth/setAuthProviders.ts b/lib/Auth/setAuthProviders.ts index f56ba8e6..e24c63f6 100644 --- a/lib/Auth/setAuthProviders.ts +++ b/lib/Auth/setAuthProviders.ts @@ -7,45 +7,67 @@ import { Strategy as MicrosoftStrategy } from "passport-microsoft"; import { AuthSocketSchema, getObjectEntries, isEmpty } from "prostgles-types"; import { getErrorAsObject } from "../DboBuilder/dboBuilderUtils"; import { removeExpressRouteByName } from "../FileManager/FileManager"; -import { AUTH_ROUTES_AND_PARAMS, AuthHandler, getLoginClientInfo } from "./AuthHandler"; -import { Auth } from './AuthTypes'; +import { + AUTH_ROUTES_AND_PARAMS, + AuthHandler, + getLoginClientInfo, +} from "./AuthHandler"; +import { Auth } from "./AuthTypes"; import { setEmailProvider } from "./setEmailProvider"; import * as passport from "passport"; -export const upsertNamedExpressMiddleware = (app: e.Express, handler: RequestHandler, name: string) => { +export const upsertNamedExpressMiddleware = ( + app: e.Express, + handler: RequestHandler, + name: string, +) => { const funcName = name; Object.defineProperty(handler, "name", { value: funcName }); removeExpressRouteByName(app, name); app.use(handler); -} +}; -export async function setAuthProviders (this: AuthHandler, { registrations, app }: Required["expressConfig"]) { - if(!registrations) return; - const { onProviderLoginFail, onProviderLoginStart, websiteUrl, OAuthProviders } = registrations; +export async function setAuthProviders( + this: AuthHandler, + { registrations, app }: Required["expressConfig"], +) { + if (!registrations) return; + const { + onProviderLoginFail, + onProviderLoginStart, + websiteUrl, + OAuthProviders, + } = registrations; await setEmailProvider.bind(this)(app); - if(!OAuthProviders || isEmpty(OAuthProviders)){ + if (!OAuthProviders || isEmpty(OAuthProviders)) { return; } - upsertNamedExpressMiddleware(app, passport.initialize(), "prostglesPassportMiddleware"); + upsertNamedExpressMiddleware( + app, + passport.initialize(), + "prostglesPassportMiddleware", + ); getObjectEntries(OAuthProviders).forEach(([providerName, providerConfig]) => { - - if(!providerConfig?.clientID){ + if (!providerConfig?.clientID) { return; } const { authOpts, ...config } = providerConfig; - - const strategy = providerName === "google" ? GoogleStrategy : - providerName === "github" ? GitHubStrategy : - providerName === "facebook" ? FacebookStrategy : - providerName === "microsoft" ? MicrosoftStrategy : - undefined - ; + const strategy = + providerName === "google" + ? GoogleStrategy + : providerName === "github" + ? GitHubStrategy + : providerName === "facebook" + ? FacebookStrategy + : providerName === "microsoft" + ? MicrosoftStrategy + : undefined; const callbackPath = `${AUTH_ROUTES_AND_PARAMS.loginWithProvider}/${providerName}/callback`; passport.use( new (strategy as typeof GoogleStrategy)( @@ -56,72 +78,73 @@ export async function setAuthProviders (this: AuthHandler, { registrations, app async (accessToken, refreshToken, profile, done) => { // This callback is where you would normally store or retrieve user info from the database return done(null, profile, { accessToken, refreshToken, profile }); - } - ) - ); - - app.get(`${AUTH_ROUTES_AND_PARAMS.loginWithProvider}/${providerName}`, - passport.authenticate(providerName, authOpts ?? {}) + }, + ), ); app.get( - callbackPath, - async (req, res) => { - try { - const clientInfo = getLoginClientInfo({ httpReq: req }); - const db = this.db; - const dbo = this.dbo as any; - const args = { provider: providerName, req, res, clientInfo, db, dbo }; - const startCheck = await onProviderLoginStart?.(args); - if(startCheck && "error" in startCheck){ - res.status(500).json({ error: startCheck.error }); - return; - } - passport.authenticate( - providerName, - { - session: false, - failureRedirect: "/login", - failWithError: true, - }, - async (error: any, _profile: any, authInfo: any) => { - if(error){ - await onProviderLoginFail?.({ ...args, error }); - res.status(500).json({ - error: "Failed to login with provider", - }); - } else { - this.loginThrottledAndSetCookie(req, res, { type: "provider", provider: providerName, ...authInfo }) - .catch((e: any) => { - res.status(500).json(getErrorAsObject(e)); - }); - } - } - )(req, res); + `${AUTH_ROUTES_AND_PARAMS.loginWithProvider}/${providerName}`, + passport.authenticate(providerName, authOpts ?? {}), + ); - } catch (_e) { - res.status(500).json({ error: "Something went wrong" }); + app.get(callbackPath, async (req, res) => { + try { + const clientInfo = getLoginClientInfo({ httpReq: req }); + const db = this.db; + const dbo = this.dbo as any; + const args = { provider: providerName, req, res, clientInfo, db, dbo }; + const startCheck = await onProviderLoginStart?.(args); + if (startCheck && "error" in startCheck) { + res.status(500).json({ error: startCheck.error }); + return; } + passport.authenticate( + providerName, + { + session: false, + failureRedirect: "/login", + failWithError: true, + }, + async (error: any, _profile: any, authInfo: any) => { + if (error) { + await onProviderLoginFail?.({ ...args, error }); + res.status(500).json({ + error: "Failed to login with provider", + }); + } else { + this.loginThrottledAndSetCookie(req, res, { + type: "provider", + provider: providerName, + ...authInfo, + }).catch((e: any) => { + res.status(500).json(getErrorAsObject(e)); + }); + } + }, + )(req, res); + } catch (_e) { + res.status(500).json({ error: "Something went wrong" }); } - ); - + }); }); } -export function getProviders(this: AuthHandler): AuthSocketSchema["providers"] | undefined { - const { registrations } = this.opts?.expressConfig ?? {} - if(!registrations) return undefined; - const { OAuthProviders } = registrations; - if(!OAuthProviders || isEmpty(OAuthProviders)) return undefined; - - const result: AuthSocketSchema["providers"] = {} +export function getProviders( + this: AuthHandler, +): AuthSocketSchema["providers"] | undefined { + const { registrations } = this.opts?.expressConfig ?? {}; + if (!registrations) return undefined; + const { OAuthProviders } = registrations; + if (!OAuthProviders || isEmpty(OAuthProviders)) return undefined; + + const result: AuthSocketSchema["providers"] = {}; getObjectEntries(OAuthProviders).forEach(([providerName, config]) => { - if(config?.clientID){ + if (config?.clientID) { result[providerName] = { url: `${AUTH_ROUTES_AND_PARAMS.loginWithProvider}/${providerName}`, - } + }; } }); return result; -} \ No newline at end of file +} diff --git a/lib/Auth/setEmailProvider.ts b/lib/Auth/setEmailProvider.ts index 9693168f..091c4235 100644 --- a/lib/Auth/setEmailProvider.ts +++ b/lib/Auth/setEmailProvider.ts @@ -5,18 +5,17 @@ import { getOrSetTransporter, sendEmail } from "./sendEmail"; import { promises } from "node:dns"; export async function setEmailProvider(this: AuthHandler, app: e.Express) { - const { email, websiteUrl } = this.opts?.expressConfig?.registrations ?? {}; - if(!email) return; - if(websiteUrl){ + if (!email) return; + if (websiteUrl) { await checkDmarc(websiteUrl); } /** * Setup nodemailer transporters */ - if(email.signupType === "withPassword"){ - if(email.emailConfirmation){ + if (email.signupType === "withPassword") { + if (email.emailConfirmation) { tryGetTransporter(email.emailConfirmation.smtp); } } else { @@ -26,79 +25,97 @@ export async function setEmailProvider(this: AuthHandler, app: e.Express) { app.post(AUTH_ROUTES_AND_PARAMS.emailSignup, async (req, res) => { const { username, password } = req.body; let validationError = ""; - if(typeof username !== "string"){ + if (typeof username !== "string") { validationError = "Invalid username"; } - if(email.signupType === "withPassword"){ + if (email.signupType === "withPassword") { const { minPasswordLength = 8 } = email; - if(typeof password !== "string"){ + if (typeof password !== "string") { validationError = "Invalid password"; - } else if(password.length < minPasswordLength){ + } else if (password.length < minPasswordLength) { validationError = `Password must be at least ${minPasswordLength} characters long`; } } - if(validationError){ - res.status(HTTPCODES.AUTH_ERROR).json({ success: false, error: validationError }); + if (validationError) { + res + .status(HTTPCODES.AUTH_ERROR) + .json({ success: false, error: validationError }); return; } try { let emailMessage: undefined | { message: Email; smtp: SMTPConfig }; - if(email.signupType === "withPassword"){ - if(email.emailConfirmation){ + if (email.signupType === "withPassword") { + if (email.emailConfirmation) { const { onSend, smtp } = email.emailConfirmation; - const message = await onSend({ email: username, confirmationUrlPath: `${websiteUrl}${AUTH_ROUTES_AND_PARAMS.confirmEmail}` }); + const message = await onSend({ + email: username, + confirmationUrlPath: `${websiteUrl}${AUTH_ROUTES_AND_PARAMS.confirmEmail}`, + }); emailMessage = { message: { ...message, to: username }, smtp }; } } else { const { emailMagicLink } = email; - const message = await emailMagicLink.onSend({ email: username, magicLinkPath: `${websiteUrl}${AUTH_ROUTES_AND_PARAMS.magicLinksRoute}` }); - emailMessage = { message: { ...message, to: username }, smtp: emailMagicLink.smtp }; + const message = await emailMagicLink.onSend({ + email: username, + magicLinkPath: `${websiteUrl}${AUTH_ROUTES_AND_PARAMS.magicLinksRoute}`, + }); + emailMessage = { + message: { ...message, to: username }, + smtp: emailMagicLink.smtp, + }; } - if(emailMessage){ + if (emailMessage) { await sendEmail(emailMessage.smtp, emailMessage.message); res.json({ success: true, message: "Email sent" }); } } catch { - res.status(HTTPCODES.AUTH_ERROR).json({ success: false, error: "Failed to send email" }); + res + .status(HTTPCODES.AUTH_ERROR) + .json({ success: false, error: "Failed to send email" }); } }); - if(email.signupType === "withPassword" && email.emailConfirmation){ - app.get(AUTH_ROUTES_AND_PARAMS.confirmEmailExpressRoute, async (req, res) => { - const { id } = req.params ?? {}; - try { - await email.emailConfirmation?.onConfirmed({ confirmationCode: id }); - res.json({ success: true, message: "Email confirmed" }); - } catch (_e) { - res.status(HTTPCODES.AUTH_ERROR).json({ success: false, error: "Failed to confirm email" }); - } - }); + if (email.signupType === "withPassword" && email.emailConfirmation) { + app.get( + AUTH_ROUTES_AND_PARAMS.confirmEmailExpressRoute, + async (req, res) => { + const { id } = req.params ?? {}; + try { + await email.emailConfirmation?.onConfirmed({ confirmationCode: id }); + res.json({ success: true, message: "Email confirmed" }); + } catch (_e) { + res + .status(HTTPCODES.AUTH_ERROR) + .json({ success: false, error: "Failed to confirm email" }); + } + }, + ); } } const checkDmarc = async (websiteUrl: string) => { const { host, hostname } = new URL(websiteUrl); - const ignoredHosts = ["localhost", "127.0.0.1"] - if(!hostname || ignoredHosts.includes(hostname)){ + const ignoredHosts = ["localhost", "127.0.0.1"]; + if (!hostname || ignoredHosts.includes(hostname)) { return; } const dmarc = await promises.resolveTxt(`_dmarc.${host}`); const dmarkTxt = dmarc[0]?.[0]; - if( + if ( !dmarkTxt?.includes("v=DMARC1") || (!dmarkTxt?.includes("p=reject") && !dmarkTxt?.includes("p=quarantine")) - ){ + ) { throw new Error("DMARC not set to reject/quarantine"); } else { - console.log("DMARC set to reject") + console.log("DMARC set to reject"); } -} +}; const tryGetTransporter = (smtp: SMTPConfig) => { try { getOrSetTransporter(smtp); - } catch(err) { + } catch (err) { console.error("Failed to set email transporter", err); } -} \ No newline at end of file +}; diff --git a/lib/Auth/setupAuthRoutes.ts b/lib/Auth/setupAuthRoutes.ts index 1c45d0e8..280eb179 100644 --- a/lib/Auth/setupAuthRoutes.ts +++ b/lib/Auth/setupAuthRoutes.ts @@ -1,12 +1,25 @@ import { RequestHandler } from "express"; import { DBOFullyTyped } from "../DBSchemaBuilder"; -import { AUTH_ROUTES_AND_PARAMS, AuthHandler, getLoginClientInfo, HTTPCODES } from "./AuthHandler"; -import { AuthClientRequest, ExpressReq, ExpressRes, LoginParams } from "./AuthTypes"; -import { setAuthProviders, upsertNamedExpressMiddleware } from "./setAuthProviders"; +import { + AUTH_ROUTES_AND_PARAMS, + AuthHandler, + getLoginClientInfo, + HTTPCODES, +} from "./AuthHandler"; +import { + AuthClientRequest, + ExpressReq, + ExpressRes, + LoginParams, +} from "./AuthTypes"; +import { + setAuthProviders, + upsertNamedExpressMiddleware, +} from "./setAuthProviders"; export async function setupAuthRoutes(this: AuthHandler) { if (!this.opts) return; - + const { login, getUser, expressConfig } = this.opts; if (!login) { @@ -20,27 +33,37 @@ export async function setupAuthRoutes(this: AuthHandler) { if (!getUser) throw "getUser missing from auth config"; if (!expressConfig) { - return + return; } - const { app, publicRoutes = [], onGetRequestOK, magicLinks, use } = expressConfig; - if (publicRoutes.find(r => typeof r !== "string" || !r)) { - throw "Invalid or empty string provided within publicRoutes " + const { + app, + publicRoutes = [], + onGetRequestOK, + magicLinks, + use, + } = expressConfig; + if (publicRoutes.find((r) => typeof r !== "string" || !r)) { + throw "Invalid or empty string provided within publicRoutes "; } await setAuthProviders.bind(this)(expressConfig); - if(use){ + if (use) { const prostglesUseMiddleware: RequestHandler = (req, res, next) => { - use({ - req, - res, - next, + use({ + req, + res, + next, getUser: () => this.getUser({ httpReq: req }) as any, - dbo: this.dbo as DBOFullyTyped, + dbo: this.dbo as DBOFullyTyped, db: this.db, - }) + }); }; - upsertNamedExpressMiddleware(app, prostglesUseMiddleware, "prostglesUseMiddleware"); + upsertNamedExpressMiddleware( + app, + prostglesUseMiddleware, + "prostglesUseMiddleware", + ); } if (magicLinks) { @@ -49,111 +72,152 @@ export async function setupAuthRoutes(this: AuthHandler) { throw "Check must be defined for magicLinks"; } - app.get(AUTH_ROUTES_AND_PARAMS.magicLinksExpressRoute, async (req: ExpressReq, res: ExpressRes) => { - const { id } = req.params ?? {}; - - if (typeof id !== "string" || !id) { - res.status(HTTPCODES.BAD_REQUEST).json({ msg: "Invalid magic-link id. Expecting a string" }); - } else { - try { - const session = await this.throttledFunc(async () => { - return check(id, this.dbo as any, this.db, getLoginClientInfo({ httpReq: req })); - }); - if(!session) { - res.status(HTTPCODES.AUTH_ERROR).json({ error: "Invalid magic-link" }); - } else { - this.setCookieAndGoToReturnURLIFSet(session, { req, res }); + app.get( + AUTH_ROUTES_AND_PARAMS.magicLinksExpressRoute, + async (req: ExpressReq, res: ExpressRes) => { + const { id } = req.params ?? {}; + + if (typeof id !== "string" || !id) { + res + .status(HTTPCODES.BAD_REQUEST) + .json({ msg: "Invalid magic-link id. Expecting a string" }); + } else { + try { + const session = await this.throttledFunc(async () => { + return check( + id, + this.dbo as any, + this.db, + getLoginClientInfo({ httpReq: req }), + ); + }); + if (!session) { + res + .status(HTTPCODES.AUTH_ERROR) + .json({ error: "Invalid magic-link" }); + } else { + this.setCookieAndGoToReturnURLIFSet(session, { req, res }); + } + } catch (e) { + res.status(HTTPCODES.AUTH_ERROR).json({ error: e }); } - - } catch (e) { - res.status(HTTPCODES.AUTH_ERROR).json({ error: e }); } - } - }); + }, + ); } - app.post(AUTH_ROUTES_AND_PARAMS.login, async (req: ExpressReq, res: ExpressRes) => { - try { - const loginParams: LoginParams = { - type: "username", - ...req.body, - }; - - await this.loginThrottledAndSetCookie(req, res, loginParams); - } catch (error) { - res.status(HTTPCODES.AUTH_ERROR).json({ error }); - } - }); + app.post( + AUTH_ROUTES_AND_PARAMS.login, + async (req: ExpressReq, res: ExpressRes) => { + try { + const loginParams: LoginParams = { + type: "username", + ...req.body, + }; + + await this.loginThrottledAndSetCookie(req, res, loginParams); + } catch (error) { + res.status(HTTPCODES.AUTH_ERROR).json({ error }); + } + }, + ); const onLogout = async (req: ExpressReq, res: ExpressRes) => { const sid = this.validateSid(req?.cookies?.[this.sidKeyName]); if (sid) { try { await this.throttledFunc(() => { - return this.opts?.logout?.(req?.cookies?.[this.sidKeyName], this.dbo as any, this.db); - }) + return this.opts?.logout?.( + req?.cookies?.[this.sidKeyName], + this.dbo as any, + this.db, + ); + }); } catch (err) { console.error(err); } } - res.redirect("/") - } - + res.redirect("/"); + }; + /* Redirect if not logged in and requesting non public content */ - app.get(AUTH_ROUTES_AND_PARAMS.catchAll, async (req: ExpressReq, res: ExpressRes, next) => { - - const clientReq: AuthClientRequest = { httpReq: req }; - const getUser = this.getUser; - if(this.prostgles.restApi){ - if(Object.values(this.prostgles.restApi.routes).some(restRoute => this.matchesRoute(restRoute.split("/:")[0], req.path))){ - next(); - return; - } - } - try { - const returnURL = this.getReturnUrl(req); - - if(this.matchesRoute(AUTH_ROUTES_AND_PARAMS.logoutGetPath, req.path)){ - await onLogout(req, res); - return; + app.get( + AUTH_ROUTES_AND_PARAMS.catchAll, + async (req: ExpressReq, res: ExpressRes, next) => { + const clientReq: AuthClientRequest = { httpReq: req }; + const getUser = this.getUser; + if (this.prostgles.restApi) { + if ( + Object.values(this.prostgles.restApi.routes).some((restRoute) => + this.matchesRoute(restRoute.split("/:")[0], req.path), + ) + ) { + next(); + return; + } } + try { + const returnURL = this.getReturnUrl(req); - if(this.matchesRoute(AUTH_ROUTES_AND_PARAMS.loginWithProvider, req.path)){ - next(); - return; - } - /** - * Requesting a User route - */ - if (this.isUserRoute(req.path)) { - - /* Check auth. Redirect to login if unauthorized */ - const u = await getUser(clientReq); - if (!u) { - res.redirect(`${AUTH_ROUTES_AND_PARAMS.login}?returnURL=${encodeURIComponent(req.originalUrl)}`); + if (this.matchesRoute(AUTH_ROUTES_AND_PARAMS.logoutGetPath, req.path)) { + await onLogout(req, res); return; } - /* If authorized and going to returnUrl then redirect. Otherwise serve file */ - } else if (returnURL && (await getUser(clientReq))) { + if ( + this.matchesRoute(AUTH_ROUTES_AND_PARAMS.loginWithProvider, req.path) + ) { + next(); + return; + } + /** + * Requesting a User route + */ + if (this.isUserRoute(req.path)) { + /* Check auth. Redirect to login if unauthorized */ + const u = await getUser(clientReq); + if (!u) { + res.redirect( + `${AUTH_ROUTES_AND_PARAMS.login}?returnURL=${encodeURIComponent(req.originalUrl)}`, + ); + return; + } - res.redirect(returnURL); - return; + /* If authorized and going to returnUrl then redirect. Otherwise serve file */ + } else if (returnURL && (await getUser(clientReq))) { + res.redirect(returnURL); + return; - /** If Logged in and requesting login then redirect to main page */ - } else if (this.matchesRoute(AUTH_ROUTES_AND_PARAMS.login, req.path) && (await getUser(clientReq))) { + /** If Logged in and requesting login then redirect to main page */ + } else if ( + this.matchesRoute(AUTH_ROUTES_AND_PARAMS.login, req.path) && + (await getUser(clientReq)) + ) { + res.redirect("/"); + return; + } - res.redirect("/"); - return; + onGetRequestOK?.(req, res, { + getUser: () => getUser(clientReq), + dbo: this.dbo as DBOFullyTyped, + db: this.db, + }); + } catch (error) { + console.error(error); + const errorMessage = + typeof error === "string" + ? error + : error instanceof Error + ? error.message + : ""; + res + .status(HTTPCODES.AUTH_ERROR) + .json({ + error: + "Something went wrong when processing your request" + + (errorMessage ? ": " + errorMessage : ""), + }); } - - onGetRequestOK?.(req, res, { getUser: () => getUser(clientReq), dbo: this.dbo as DBOFullyTyped, db: this.db }) - - } catch (error) { - console.error(error); - const errorMessage = typeof error === "string" ? error : error instanceof Error ? error.message : ""; - res.status(HTTPCODES.AUTH_ERROR).json({ error: "Something went wrong when processing your request" + (errorMessage? (": " + errorMessage) : "") }); - } - - }); -} \ No newline at end of file + }, + ); +} diff --git a/lib/DBEventsManager.ts b/lib/DBEventsManager.ts index dfe19a0c..bacaef1b 100644 --- a/lib/DBEventsManager.ts +++ b/lib/DBEventsManager.ts @@ -1,17 +1,19 @@ -import { PostgresNotifListenManager, PrglNotifListener } from "./PostgresNotifListenManager"; +import { + PostgresNotifListenManager, + PrglNotifListener, +} from "./PostgresNotifListenManager"; import { DB, PGP } from "./Prostgles"; import { getKeys, CHANNELS } from "prostgles-types"; import { PRGLIOSocket } from "./DboBuilder/DboBuilder"; export class DBEventsManager { - - notifies: { + notifies: { [key: string]: { socketChannel: string; - sockets: any[]; + sockets: any[]; localFuncs: ((payload: string) => void)[]; notifMgr: PostgresNotifListenManager; - } + }; } = {}; notice: { @@ -21,73 +23,82 @@ export class DBEventsManager { } = { socketChannel: CHANNELS.NOTICE_EV, socketUnsubChannel: CHANNELS.NOTICE_EV + "unsubscribe", - sockets: [] + sockets: [], }; notifManager?: PostgresNotifListenManager; db_pg: DB; - pgp: PGP - constructor(db_pg: DB, pgp: PGP){ + pgp: PGP; + constructor(db_pg: DB, pgp: PGP) { this.db_pg = db_pg; this.pgp = pgp; } private onNotif: PrglNotifListener = ({ channel, payload }) => { - // console.log(36, { channel, payload }, Object.keys(this.notifies)); getKeys(this.notifies) - .filter(ch => ch === channel) - .map(ch => { + .filter((ch) => ch === channel) + .map((ch) => { const sub = this.notifies[ch]!; - - sub.sockets.map(s => { - s.emit(sub.socketChannel, payload) + + sub.sockets.map((s) => { + s.emit(sub.socketChannel, payload); }); - sub.localFuncs.map(lf => { + sub.localFuncs.map((lf) => { lf(payload); - }) + }); }); - } + }; onNotice = (notice: any) => { - if(this.notice && this.notice.sockets.length){ - this.notice.sockets.map(s => { + if (this.notice && this.notice.sockets.length) { + this.notice.sockets.map((s) => { s.emit(this.notice.socketChannel, notice); - }) + }); } - } + }; getNotifChannelName = async (channel: string) => { const c = await this.db_pg.one("SELECT quote_ident($1) as c", channel); return c.c; - } + }; - async addNotify(query: string, socket?: PRGLIOSocket, func?: any): Promise<{ + async addNotify( + query: string, + socket?: PRGLIOSocket, + func?: any, + ): Promise<{ socketChannel: string; socketUnsubChannel: string; notifChannel: string; unsubscribe?: () => void; }> { - if(typeof query !== "string" || (!socket && !func)){ - throw "Expecting (query: string, socket?, localFunc?) But received: " + JSON.stringify({ query, socket, func }); + if (typeof query !== "string" || (!socket && !func)) { + throw ( + "Expecting (query: string, socket?, localFunc?) But received: " + + JSON.stringify({ query, socket, func }) + ); } /* Remove comments */ - let q = query.trim() - .replace(/\/\*[\s\S]*?\*\/|\/\/.*/g,'\n') - .split("\n").map(v => v.trim()).filter(v => v && !v.startsWith("--")) + let q = query + .trim() + .replace(/\/\*[\s\S]*?\*\/|\/\/.*/g, "\n") + .split("\n") + .map((v) => v.trim()) + .filter((v) => v && !v.startsWith("--")) .join("\n"); /* Find the notify channel name */ - if(!q.toLowerCase().startsWith("listen")){ + if (!q.toLowerCase().startsWith("listen")) { throw "Expecting a LISTEN query but got: " + query; } q = q.slice(7).trim(); // Remove listen - if(q.endsWith(";")) q = q.slice(0, -1); + if (q.endsWith(";")) q = q.slice(0, -1); - if(q.startsWith('"') && q.endsWith('"')) { + if (q.startsWith('"') && q.endsWith('"')) { q = q.slice(1, -1); } else { /* Replicate PG by lowercasing identifier if not quoted */ @@ -95,35 +106,40 @@ export class DBEventsManager { } q = q.replace(/""/g, `"`); - const channel = q; - let notifChannel = await this.getNotifChannelName(channel) + const channel = q; + let notifChannel = await this.getNotifChannelName(channel); notifChannel = notifChannel.replace(/""/g, `"`); - if(notifChannel.startsWith('"')) notifChannel = notifChannel.slice(1, -1); + if (notifChannel.startsWith('"')) notifChannel = notifChannel.slice(1, -1); const socketChannel = CHANNELS.LISTEN_EV + notifChannel, socketUnsubChannel = socketChannel + "unsubscribe"; - if(!this.notifies[notifChannel]){ + if (!this.notifies[notifChannel]) { this.notifies[notifChannel] = { socketChannel, - sockets: socket? [socket] : [], - localFuncs: func? [func] : [], - notifMgr: await PostgresNotifListenManager.create(this.db_pg, this.onNotif, channel) - } - + sockets: socket ? [socket] : [], + localFuncs: func ? [func] : [], + notifMgr: await PostgresNotifListenManager.create( + this.db_pg, + this.onNotif, + channel, + ), + }; } else { - if(socket && !this.notifies[notifChannel]!.sockets.find(s => s.id === socket.id)) { + if ( + socket && + !this.notifies[notifChannel]!.sockets.find((s) => s.id === socket.id) + ) { this.notifies[notifChannel]!.sockets.push(socket); - - } else if(func) { + } else if (func) { this.notifies[notifChannel]!.localFuncs.push(func); } } - if(socket){ + if (socket) { socket.removeAllListeners(socketUnsubChannel); - socket.on(socketUnsubChannel, ()=>{ + socket.on(socketUnsubChannel, () => { this.removeNotify(notifChannel, socket); }); } @@ -132,32 +148,38 @@ export class DBEventsManager { socketChannel, socketUnsubChannel, notifChannel, - } + }; } - removeNotify(channel?: string, socket?: PRGLIOSocket, func?: any){ - const notifChannel = channel && this.notifies[channel] - if(notifChannel){ - if(socket){ - notifChannel.sockets = notifChannel.sockets.filter(s => s.id !== socket.id); - } else if(func){ - notifChannel.localFuncs = notifChannel.localFuncs.filter(f => f !== func); + removeNotify(channel?: string, socket?: PRGLIOSocket, func?: any) { + const notifChannel = channel && this.notifies[channel]; + if (notifChannel) { + if (socket) { + notifChannel.sockets = notifChannel.sockets.filter( + (s) => s.id !== socket.id, + ); + } else if (func) { + notifChannel.localFuncs = notifChannel.localFuncs.filter( + (f) => f !== func, + ); } /* UNLISTEN if no listeners ?? */ } - if(socket){ - getKeys(this.notifies).forEach(channel => { - this.notifies[channel]!.sockets = this.notifies[channel]!.sockets.filter(s => s.id !== socket.id); - }) + if (socket) { + getKeys(this.notifies).forEach((channel) => { + this.notifies[channel]!.sockets = this.notifies[ + channel + ]!.sockets.filter((s) => s.id !== socket.id); + }); } } - addNotice(socket: PRGLIOSocket){ - if(!socket || !socket.id) throw "Expecting a socket obj with id"; + addNotice(socket: PRGLIOSocket) { + if (!socket || !socket.id) throw "Expecting a socket obj with id"; - if(!this.notice.sockets.find(s => s.id === socket.id)){ + if (!this.notice.sockets.find((s) => s.id === socket.id)) { this.notice.sockets.push(socket); } @@ -168,11 +190,11 @@ export class DBEventsManager { this.removeNotice(socket); }); - return { socketChannel, socketUnsubChannel, } + return { socketChannel, socketUnsubChannel }; } - removeNotice(socket: PRGLIOSocket){ - if(!socket || !socket.id) throw "Expecting a socket obj with id"; - this.notice.sockets = this.notice.sockets.filter(s => s.id !== socket.id) + removeNotice(socket: PRGLIOSocket) { + if (!socket || !socket.id) throw "Expecting a socket obj with id"; + this.notice.sockets = this.notice.sockets.filter((s) => s.id !== socket.id); } -} \ No newline at end of file +} diff --git a/lib/DBSchemaBuilder.ts b/lib/DBSchemaBuilder.ts index 5fdd08bc..508c4e32 100644 --- a/lib/DBSchemaBuilder.ts +++ b/lib/DBSchemaBuilder.ts @@ -10,7 +10,11 @@ import { } from "prostgles-types"; import prostgles from "."; import { Auth } from "./Auth/AuthTypes"; -import { DboBuilder, escapeTSNames, postgresToTsType } from "./DboBuilder/DboBuilder"; +import { + DboBuilder, + escapeTSNames, + postgresToTsType, +} from "./DboBuilder/DboBuilder"; import { PublishAllOrNothing, PublishParams, @@ -18,14 +22,28 @@ import { PublishViewRule, } from "./PublishParser/PublishParser"; import { getJSONBSchemaTSTypes } from "./JSONBValidation/validation"; -import { DBHandlerServer, TableSchemaColumn, TX } from "./DboBuilder/DboBuilderTypes"; +import { + DBHandlerServer, + TableSchemaColumn, + TX, +} from "./DboBuilder/DboBuilderTypes"; export const getDBSchema = (dboBuilder: DboBuilder): string => { const tables: string[] = []; - const getColTypeForDBSchema = (udt_name: TableSchemaColumn["udt_name"]): string => { + const getColTypeForDBSchema = ( + udt_name: TableSchemaColumn["udt_name"], + ): string => { if (udt_name === "interval") { - const units = ["years", "months", "days", "hours", "minutes", "seconds", "milliseconds"]; + const units = [ + "years", + "months", + "days", + "hours", + "minutes", + "seconds", + "milliseconds", + ]; return `{ ${units.map((u) => `${u}?: number;`).join(" ")} }`; } @@ -38,13 +56,23 @@ export const getDBSchema = (dboBuilder: DboBuilder): string => { ?.slice(0) .sort((a, b) => a.name.localeCompare(b.name)) .forEach((tov) => { - const cols = tov.columns.slice(0).sort((a, b) => a.name.localeCompare(b.name)); + const cols = tov.columns + .slice(0) + .sort((a, b) => a.name.localeCompare(b.name)); const getColType = (c: (typeof cols)[number]) => { let type: string = - (c.is_nullable ? "null | " : "") + getColTypeForDBSchema(c.udt_name) + ";"; - const colConf = dboBuilder.prostgles.tableConfigurator?.getColumnConfig(tov.name, c.name); + (c.is_nullable ? "null | " : "") + + getColTypeForDBSchema(c.udt_name) + + ";"; + const colConf = dboBuilder.prostgles.tableConfigurator?.getColumnConfig( + tov.name, + c.name, + ); if (colConf) { - if (isObject(colConf) && (colConf.jsonbSchema || colConf.jsonbSchemaType)) { + if ( + isObject(colConf) && + (colConf.jsonbSchema || colConf.jsonbSchemaType) + ) { const schema: JSONB.JSONBSchema = colConf.jsonbSchema || { ...colConf, type: colConf.jsonbSchemaType, @@ -54,11 +82,13 @@ export const getDBSchema = (dboBuilder: DboBuilder): string => { schema, { nullable: colConf.nullable }, " ", - dboBuilder.tablesOrViews ?? [] + dboBuilder.tablesOrViews ?? [], ); } else if (isObject(colConf) && "enum" in colConf) { if (!colConf.enum) throw "colConf.enum missing"; - const types = colConf.enum.map((t) => (typeof t === "number" ? t : JSON.stringify(t))); + const types = colConf.enum.map((t) => + typeof t === "number" ? t : JSON.stringify(t), + ); if (colConf.nullable) { types.unshift("null"); } @@ -80,7 +110,7 @@ export const getDBSchema = (dboBuilder: DboBuilder): string => { columns: {${cols .map( (c) => ` - ${getColType(c)}` + ${getColType(c)}`, ) .join("")} }; @@ -102,12 +132,11 @@ type ServerTableHandler< Schema extends DBSchema | void = void, > = TableHandler & { is_view: boolean }; -export type DBTableHandlersFromSchema = - Schema extends DBSchema ? - { - [tov_name in keyof Schema]: Schema[tov_name]["is_view"] extends true ? - ServerViewHandler - : ServerTableHandler; +export type DBTableHandlersFromSchema = Schema extends DBSchema + ? { + [tov_name in keyof Schema]: Schema[tov_name]["is_view"] extends true + ? ServerViewHandler + : ServerTableHandler; } : Record>; @@ -125,17 +154,22 @@ export type DBHandlerServerExtra< export type DBOFullyTyped = DBTableHandlersFromSchema & DBHandlerServerExtra>; -export type PublishFullyTyped = - Schema extends DBSchema ? - | PublishAllOrNothing - | { - [tov_name in keyof Partial]: - | PublishAllOrNothing - | (Schema[tov_name]["is_view"] extends true ? - PublishViewRule - : PublishTableRule); - } - : PublishAllOrNothing | Record; +export type PublishFullyTyped = Schema extends DBSchema + ? + | PublishAllOrNothing + | { + [tov_name in keyof Partial]: + | PublishAllOrNothing + | (Schema[tov_name]["is_view"] extends true + ? PublishViewRule + : PublishTableRule); + } + : + | PublishAllOrNothing + | Record< + string, + PublishViewRule | PublishTableRule | PublishAllOrNothing + >; /** Type checks */ () => { diff --git a/lib/DboBuilder/DboBuilder.ts b/lib/DboBuilder/DboBuilder.ts index 87eb73d4..59fb2b35 100644 --- a/lib/DboBuilder/DboBuilder.ts +++ b/lib/DboBuilder/DboBuilder.ts @@ -167,7 +167,9 @@ export class DboBuilder { _joins?: Join[]; get joins(): Join[] { - return clone(this._joins ?? []).filter((j) => j.tables[0] !== j.tables[1]) as Join[]; + return clone(this._joins ?? []).filter( + (j) => j.tables[0] !== j.tables[1], + ) as Join[]; } set joins(j: Join[]) { @@ -179,7 +181,8 @@ export class DboBuilder { } prepareShortestJoinPaths = async () => { - const { joins, shortestJoinPaths, joinGraph } = await prepareShortestJoinPaths(this); + const { joins, shortestJoinPaths, joinGraph } = + await prepareShortestJoinPaths(this); this.joinGraph = joinGraph; this.joins = joins; this.shortestJoinPaths = shortestJoinPaths; @@ -189,12 +192,17 @@ export class DboBuilder { query: string, params: any, options: SQLOptions | undefined, - localParams?: LocalParams + localParams?: LocalParams, ) => { return runSQL .bind(this)(query, params, options, localParams) .catch((error) => - Promise.reject(getSerializedClientErrorFromPGError(error, { type: "sql", localParams })) + Promise.reject( + getSerializedClientErrorFromPGError(error, { + type: "sql", + localParams, + }), + ), ); }; @@ -207,7 +215,7 @@ export class DboBuilder { if (subscribeError) { console.error( "Could not initiate PubSubManager. Realtime data/Subscriptions will not work. Error: ", - subscribeError + subscribeError, ); this.canSubscribe = false; } else { @@ -218,7 +226,7 @@ export class DboBuilder { const start = Date.now(); const tablesOrViewsReq = await getTablesForSchemaPostgresSQL( this, - this.prostgles.opts.schemaFilter + this.prostgles.opts.schemaFilter, ); await this.prostgles.opts.onLog?.({ type: "debug", @@ -228,28 +236,31 @@ export class DboBuilder { }); this.tablesOrViews = tablesOrViewsReq.result; - this.constraints = await getConstraints(this.db, this.prostgles.opts.schemaFilter); + this.constraints = await getConstraints( + this.db, + this.prostgles.opts.schemaFilter, + ); await this.prepareShortestJoinPaths(); this.dbo = {}; this.tablesOrViews.map((tov) => { - const columnsForTypes = tov.columns.slice(0).sort((a, b) => a.name.localeCompare(b.name)); + const columnsForTypes = tov.columns + .slice(0) + .sort((a, b) => a.name.localeCompare(b.name)); const filterKeywords = Object.values(this.prostgles.keywords); - const $filterCol = columnsForTypes.find((c) => filterKeywords.includes(c.name)); + const $filterCol = columnsForTypes.find((c) => + filterKeywords.includes(c.name), + ); if ($filterCol) { throw `DboBuilder init error: \n\nTable ${JSON.stringify(tov.name)} column ${JSON.stringify($filterCol.name)} is colliding with Prostgles filtering functionality ($filter keyword) Please provide a replacement keyword name using the $filter_keyName init option. Alternatively you can rename the table column\n`; } - this.dbo[tov.escaped_identifier] = new (tov.is_view ? ViewHandler : TableHandler)( - this.db, - tov, - this, - undefined, - this.shortestJoinPaths - ); + this.dbo[tov.escaped_identifier] = new ( + tov.is_view ? ViewHandler : TableHandler + )(this.db, tov, this, undefined, this.shortestJoinPaths); if ( this.shortestJoinPaths && @@ -282,7 +293,9 @@ export class DboBuilder { if (!this.dbo.sql) { this.dbo.sql = this.runSQL; } else { - console.warn(`Could not create dbo.sql handler because there is already a table named "sql"`); + console.warn( + `Could not create dbo.sql handler because there is already a table named "sql"`, + ); } this.tsTypesDefinition = [ @@ -296,7 +309,7 @@ export class DboBuilder { getShortestJoinPath = ( viewHandler: ViewHandler, - target: string + target: string, ): JoinPaths[number] | undefined => { const source = viewHandler.name; if (source === target) { @@ -315,7 +328,9 @@ export class DboBuilder { }; } - const jp = this.shortestJoinPaths.find((jp) => jp.t1 === source && jp.t2 === target); + const jp = this.shortestJoinPaths.find( + (jp) => jp.t1 === source && jp.t2 === target, + ); return jp; }; @@ -324,7 +339,13 @@ export class DboBuilder { const dbTX: DbTxTableHandlers & Pick = {}; this.tablesOrViews?.map((tov) => { const handlerClass = tov.is_view ? ViewHandler : TableHandler; - dbTX[tov.name] = new handlerClass(this.db, tov, this, { t, dbTX }, this.shortestJoinPaths); + dbTX[tov.name] = new handlerClass( + this.db, + tov, + this, + { t, dbTX }, + this.shortestJoinPaths, + ); }); dbTX.sql = (q, args, opts, localP) => this.runSQL(q, args, opts, { tx: { dbTX, t }, ...(localP ?? {}) }); @@ -338,6 +359,9 @@ export class DboBuilder { cacheDBTypes = cacheDBTypes.bind(this); runClientTransactionStatement = (statement: string) => { - return runClientTransactionStatement(statement, this.prostgles.opts.dbConnection as any); + return runClientTransactionStatement( + statement, + this.prostgles.opts.dbConnection as any, + ); }; } diff --git a/lib/DboBuilder/DboBuilderTypes.ts b/lib/DboBuilder/DboBuilderTypes.ts index 1e9de639..ed6ddb41 100644 --- a/lib/DboBuilder/DboBuilderTypes.ts +++ b/lib/DboBuilder/DboBuilderTypes.ts @@ -1,10 +1,9 @@ - /*--------------------------------------------------------------------------------------------- * Copyright (c) Stefan L. All rights reserved. * Licensed under the MIT License. See LICENSE in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import * as pgPromise from 'pg-promise'; +import * as pgPromise from "pg-promise"; import { AnyObject, ClientSchema, @@ -14,25 +13,23 @@ import { RawJoinPath, SQLHandler, TableInfo as TInfo, - UserLike + UserLike, } from "prostgles-types"; import { BasicSession, ExpressReq } from "../Auth/AuthTypes"; import { BasicCallback } from "../PubSubManager/PubSubManager"; -import { - PublishAllOrNothing -} from "../PublishParser/PublishParser"; -import { FieldSpec, } from "./QueryBuilder/Functions"; +import { PublishAllOrNothing } from "../PublishParser/PublishParser"; +import { FieldSpec } from "./QueryBuilder/Functions"; import { TableHandler } from "./TableHandler/TableHandler"; import { ParsedJoinPath } from "./ViewHandler/parseJoinPath"; -import pg = require('pg-promise/typescript/pg-subset'); - +import pg = require("pg-promise/typescript/pg-subset"); type PGP = pgPromise.IMain<{}, pg.IClient>; - export type TableSchemaColumn = ColumnInfo & { - privileges: Partial>; -} + privileges: Partial< + Record<"INSERT" | "REFERENCES" | "SELECT" | "UPDATE", true> + >; +}; export type TableSchema = Pick & { schema: string; @@ -52,7 +49,7 @@ export type TableSchema = Pick & { }; /** Cannot add triggers to hyperTables */ isHyperTable?: boolean; -} +}; export type SortItem = { asc: boolean; @@ -66,13 +63,16 @@ export type SortItem = { wrapperQuerySortItem: string; joinAlias: string; }; -} & ({ - type: "query"; - fieldQuery: string; -} | { - type: "position"; - fieldPosition: number; -}); +} & ( + | { + type: "query"; + fieldQuery: string; + } + | { + type: "position"; + fieldPosition: number; + } +); export type Media = { id?: string; @@ -94,26 +94,27 @@ export type Media = { export type ParsedMedia = Required>; export type TxCB = { - (t: TH & Pick, _t: pgPromise.ITask<{}>): (any | void); -} + (t: TH & Pick, _t: pgPromise.ITask<{}>): any | void; +}; export type TX = { - (t: TxCB): Promise<(any | void)>; -} + (t: TxCB): Promise; +}; export type TableHandlers = { [key: string]: Partial; -} +}; export type DbTxTableHandlers = { - [key: string]: Omit, "dbTx"> | Omit; -} - + [key: string]: + | Omit, "dbTx"> + | Omit; +}; -export type DBHandlerServerExtra = { +export type DBHandlerServerExtra< + TH = TableHandlers, + WithTransactions = true, +> = { sql: SQLHandler; -} & ( - WithTransactions extends true? { tx: TX } : - Record -); +} & (WithTransactions extends true ? { tx: TX } : Record); // export type DBHandlerServer = // TH & @@ -123,14 +124,12 @@ export type DBHandlerServerExtra = // tx?: TX // } -export type DBHandlerServer = - TH & +export type DBHandlerServer = TH & Partial & { - sql?: SQLHandler + sql?: SQLHandler; } & { - tx?: TX - } - + tx?: TX; + }; export const pgp: PGP = pgPromise({ // ,query: function (e) { console.log({psql: e.query, params: e.params}); } @@ -142,15 +141,16 @@ export type TableInfo = TInfo & { oid: number; comment: string; columns: ColumnInfo[]; -} +}; export type ViewInfo = TableInfo & { - parent_tables: string[] -} + parent_tables: string[]; +}; -export type TableOrViewInfo = TableInfo & ViewInfo & { - is_view: boolean; -} +export type TableOrViewInfo = TableInfo & + ViewInfo & { + is_view: boolean; + }; export type PRGLIOSocket = { readonly id: string; @@ -161,15 +161,22 @@ export type PRGLIOSocket = { * IP Address */ address: string; - headers?: AnyObject & { cookie?: string; }; // e.g.: "some_arg=dwdaw; otherarg=23232" + headers?: AnyObject & { cookie?: string }; // e.g.: "some_arg=dwdaw; otherarg=23232" auth?: Record; - } + }; - readonly on: (channel: string, params: any, cb?: (err: any, res?: any) => void) => any;// Promise; + readonly on: ( + channel: string, + params: any, + cb?: (err: any, res?: any) => void, + ) => any; // Promise; readonly emit: (channel: string, message?: any, cb?: BasicCallback) => any; - readonly once: (channel: string, cb: (_data: any, cb: BasicCallback) => void) => void; + readonly once: ( + channel: string, + cb: (_data: any, cb: BasicCallback) => void, + ) => void; readonly removeAllListeners: (channel: string) => void; @@ -177,17 +184,17 @@ export type PRGLIOSocket = { readonly request: { url?: string; - connection: { remoteAddress?: string; } - } + connection: { remoteAddress?: string }; + }; /** Used for session caching */ __prglCache?: { session: BasicSession; user: UserLike; clientUser: UserLike; - } + }; - _user?: AnyObject + _user?: AnyObject; /** Used for publish error caching */ prostgles?: ClientSchema; @@ -207,7 +214,7 @@ export type LocalParams = { tx?: { dbTX: TableHandlers; t: pgPromise.ITask<{}>; - } + }; /** Used to exclude certain logs */ noLog?: boolean; @@ -222,14 +229,13 @@ export type LocalParams = { previousData: AnyObject; previousTable: string; referencingColumn?: string; - } -} - + }; +}; export type Aggregation = { - field: string, - query: string, - alias: string, + field: string; + query: string; + alias: string; getQuery: (alias: string) => string; }; @@ -239,50 +245,46 @@ export type JoinInfo = { /** * If true then all joins involve unique columns and the result is a 1 to 1 join */ - expectOne?: boolean, + expectOne?: boolean; paths: { - /** * The table that JOIN ON columns refer to. * columns in index = 1 refer to this table. index = 0 columns refer to previous JoinInfo.table */ - table: string, + table: string; /** * Source and target JOIN ON column groups for each existing constraint - * Each inner array group will be combined with AND and outer arrays with OR to allow multiple references to the same table + * Each inner array group will be combined with AND and outer arrays with OR to allow multiple references to the same table * e.g.: [[source_table_column: string, table_column: string]] */ - on: [string, string][][], + on: [string, string][][]; /** * Source table name */ - source: string, + source: string; /** * Target table name */ - target: string - }[] -} - + target: string; + }[]; +}; export type CommonTableRules = { - /** - * True by default. Allows clients to get column information on any columns that are allowed in (select, insert, update) field rules. + * True by default. Allows clients to get column information on any columns that are allowed in (select, insert, update) field rules. */ getColumns?: PublishAllOrNothing; /** - * True by default. Allows clients to get table information (oid, comment, label, has_media). + * True by default. Allows clients to get table information (oid, comment, label, has_media). */ - getInfo?: PublishAllOrNothing -} + getInfo?: PublishAllOrNothing; +}; export type ValidatedTableRules = CommonTableRules & { - /* All columns of the view/table. Includes computed fields as well */ allColumns: FieldSpec[]; @@ -301,7 +303,7 @@ export type ValidatedTableRules = CommonTableRules & { /* Max limit allowed for each select. 1000 by default. If null then an unlimited select is allowed when providing { limit: null } */ maxLimit: number | null; - }, + }; update: { /* Fields you can update */ fields: string[]; @@ -317,7 +319,7 @@ export type ValidatedTableRules = CommonTableRules & { /* Data applied to every update */ forcedData: any; - }, + }; insert: { /* Fields you can insert */ fields: string[]; @@ -327,7 +329,7 @@ export type ValidatedTableRules = CommonTableRules & { /* Data applied to every insert */ forcedData: any; - }, + }; delete: { /* Fields to filter by when deleting */ filterFields: string[]; @@ -337,8 +339,8 @@ export type ValidatedTableRules = CommonTableRules & { /* Fields you can return after deleting */ returningFields: string[]; - } -} + }; +}; export type ExistsFilterConfig = { existType: EXISTS_KEY; @@ -346,17 +348,19 @@ export type ExistsFilterConfig = { * Target table filter. target table is the last table from tables */ targetTableFilter: Filter; - -} & ({ - isJoined: true; - /** - * list of join tables in their order - * If table path starts with "**" then get shortest join to first table - * e.g.: "**.users" means finding the shortest join from root table to users table - */ - path: RawJoinPath; - parsedPath: ParsedJoinPath[] -} | { - isJoined: false; - targetTable: string; -}); \ No newline at end of file +} & ( + | { + isJoined: true; + /** + * list of join tables in their order + * If table path starts with "**" then get shortest join to first table + * e.g.: "**.users" means finding the shortest join from root table to users table + */ + path: RawJoinPath; + parsedPath: ParsedJoinPath[]; + } + | { + isJoined: false; + targetTable: string; + } +); diff --git a/lib/DboBuilder/QueryBuilder/Functions.ts b/lib/DboBuilder/QueryBuilder/Functions.ts index 3893fc21..761198e0 100644 --- a/lib/DboBuilder/QueryBuilder/Functions.ts +++ b/lib/DboBuilder/QueryBuilder/Functions.ts @@ -1,54 +1,84 @@ -import { asName, ColumnInfo, isEmpty, isObject, PG_COLUMN_UDT_DATA_TYPE, TextFilter_FullTextSearchFilterKeys } from "prostgles-types"; +import { + asName, + ColumnInfo, + isEmpty, + isObject, + PG_COLUMN_UDT_DATA_TYPE, + TextFilter_FullTextSearchFilterKeys, +} from "prostgles-types"; import { isPlainObject, pgp, postgresToTsType } from "../DboBuilder"; import { parseFieldFilter } from "../ViewHandler/parseFieldFilter"; import { asNameAlias } from "./QueryBuilder"; -export const parseFunction = (funcData: { func: string | FunctionSpec, args: any[], functions: FunctionSpec[]; allowedFields: string[]; }): FunctionSpec => { +export const parseFunction = (funcData: { + func: string | FunctionSpec; + args: any[]; + functions: FunctionSpec[]; + allowedFields: string[]; +}): FunctionSpec => { const { func, args, functions, allowedFields } = funcData; /* Function is computed column. No checks needed */ - if(typeof func !== "string"){ - const computedCol = COMPUTED_FIELDS.find(c => c.name === func.name); - if(!computedCol) throw `Unexpected function: computed column spec not found for ${JSON.stringify(func.name)}`; + if (typeof func !== "string") { + const computedCol = COMPUTED_FIELDS.find((c) => c.name === func.name); + if (!computedCol) + throw `Unexpected function: computed column spec not found for ${JSON.stringify(func.name)}`; return func; } const funcName = func; const makeErr = (msg: string): string => { - return `Issue with function ${JSON.stringify({ [funcName]: args })}: \n${msg}` - } + return `Issue with function ${JSON.stringify({ [funcName]: args })}: \n${msg}`; + }; /* Find function */ - const funcDef = functions.find(f => f.name === funcName); - - if(!funcDef) { - const sf = functions.filter(f => f.name.toLowerCase().slice(1).startsWith(funcName.toLowerCase())).sort((a, b) => (a.name.length - b.name.length)); - const hint = (sf.length? `. \n Maybe you meant: \n | ${sf.map(s => s.name + " " + (s.description || "")).join(" \n | ")} ?` : ""); - throw "\n Function " + funcName + " does not exist or is not allowed " + hint; + const funcDef = functions.find((f) => f.name === funcName); + + if (!funcDef) { + const sf = functions + .filter((f) => + f.name.toLowerCase().slice(1).startsWith(funcName.toLowerCase()), + ) + .sort((a, b) => a.name.length - b.name.length); + const hint = sf.length + ? `. \n Maybe you meant: \n | ${sf.map((s) => s.name + " " + (s.description || "")).join(" \n | ")} ?` + : ""; + throw ( + "\n Function " + funcName + " does not exist or is not allowed " + hint + ); } - + /* Validate fields */ const fields = funcDef.getFields(args); - if(fields !== "*"){ - fields.forEach(fieldKey => { - if(typeof fieldKey !== "string" || !allowedFields.includes(fieldKey)) { - throw makeErr(`getFields() => field name ${JSON.stringify(fieldKey)} is invalid or disallowed`) + if (fields !== "*") { + fields.forEach((fieldKey) => { + if (typeof fieldKey !== "string" || !allowedFields.includes(fieldKey)) { + throw makeErr( + `getFields() => field name ${JSON.stringify(fieldKey)} is invalid or disallowed`, + ); } }); - if((funcDef.minCols ?? 0) > fields.length){ - throw makeErr(`Less columns provided than necessary (minCols=${funcDef.minCols})`) + if ((funcDef.minCols ?? 0) > fields.length) { + throw makeErr( + `Less columns provided than necessary (minCols=${funcDef.minCols})`, + ); } } - if(funcDef.numArgs && funcDef.minCols !== 0 && fields !== "*" && Array.isArray(fields) && !fields.length) { + if ( + funcDef.numArgs && + funcDef.minCols !== 0 && + fields !== "*" && + Array.isArray(fields) && + !fields.length + ) { throw `\n Function "${funcDef.name}" expects at least a field name but has not been provided with one`; } return funcDef; -} - +}; -type GetQueryArgs = { +type GetQueryArgs = { allColumns: ColumnInfo[]; allowedFields: string[]; args: any[]; @@ -113,102 +143,142 @@ export type FunctionSpec = { const MAX_COL_NUM = 1600; const asValue = (v: any, castAs = "") => pgp.as.format("$1" + castAs, [v]); -const parseUnix = (colName: string, tableAlias: string | undefined, allColumns: ColumnInfo[], opts: { timeZone: boolean | string } | undefined) => { +const parseUnix = ( + colName: string, + tableAlias: string | undefined, + allColumns: ColumnInfo[], + opts: { timeZone: boolean | string } | undefined, +) => { let tz = ""; - if(opts){ + if (opts) { const { timeZone } = opts ?? {}; - if(timeZone && typeof timeZone !== "string" && typeof timeZone !== "boolean"){ + if ( + timeZone && + typeof timeZone !== "string" && + typeof timeZone !== "boolean" + ) { throw `Bad timeZone value. timeZone can be boolean or string`; } - if(timeZone === true){ + if (timeZone === true) { tz = "::TIMESTAMPTZ"; - } else if(typeof timeZone === "string"){ + } else if (typeof timeZone === "string") { tz = ` AT TIME ZONE ${asValue(timeZone)}`; } } - const col = allColumns.find(c => c.name === colName); - if(!col) throw `Unexpected: column ${colName} not found`; - const escapedName = asNameAlias(colName, tableAlias); - if(col.udt_name === "int8"){ - return `to_timestamp(${escapedName}/1000.0)${tz}` + const col = allColumns.find((c) => c.name === colName); + if (!col) throw `Unexpected: column ${colName} not found`; + const escapedName = asNameAlias(colName, tableAlias); + if (col.udt_name === "int8") { + return `to_timestamp(${escapedName}/1000.0)${tz}`; } return `${escapedName}${tz}`; -} +}; const JSON_Funcs: FunctionSpec[] = [ { name: "$jsonb_set", - description: "[columnName: string, path: (string | number)[], new_value?: any, create_missing?: boolean ] Returns target value (columnName) with the section designated by path replaced by new_value, or with new_value added if create_missing is true (default is true) and the item designated by path does not exist", + description: + "[columnName: string, path: (string | number)[], new_value?: any, create_missing?: boolean ] Returns target value (columnName) with the section designated by path replaced by new_value, or with new_value added if create_missing is true (default is true) and the item designated by path does not exist", singleColArg: false, numArgs: 4, type: "function", getFields: ([column]) => column, - getQuery: ({ + getQuery: ({ args: [colName, path = [], new_value, create_missing = true], - tableAlias, allowedFields + tableAlias, + allowedFields, }) => { - if(!allowedFields.includes(colName)) { + if (!allowedFields.includes(colName)) { throw `Unexpected: column ${colName} not found`; } - if(!path || !Array.isArray(path) || !path.every(v => ["number", "string"].includes(typeof v))){ - throw "Expecting: [columnName: string, path: (string | number)[], new_value?: any, create_missing?: boolean ]" + if ( + !path || + !Array.isArray(path) || + !path.every((v) => ["number", "string"].includes(typeof v)) + ) { + throw "Expecting: [columnName: string, path: (string | number)[], new_value?: any, create_missing?: boolean ]"; } - const escapedName = asNameAlias(colName, tableAlias); + const escapedName = asNameAlias(colName, tableAlias); return `jsonb_set(${escapedName}, ${asValue(path)}, ${asValue(new_value)}, ${create_missing})`; - } + }, }, { name: "$jsonb_path_query", - description: "[columnName: string, jsonPath: string, vars?: object, silent?: boolean]\n Returns all JSON items returned by the JSON path for the specified JSON value. The optional vars and silent arguments act the same as for jsonb_path_exists.", + description: + "[columnName: string, jsonPath: string, vars?: object, silent?: boolean]\n Returns all JSON items returned by the JSON path for the specified JSON value. The optional vars and silent arguments act the same as for jsonb_path_exists.", singleColArg: false, numArgs: 4, type: "function", getFields: ([column]) => column, - getQuery: ({ + getQuery: ({ args: [colName, jsonPath, ...otherArgs], - tableAlias, allowedFields + tableAlias, + allowedFields, }) => { - if(!allowedFields.includes(colName)) { + if (!allowedFields.includes(colName)) { throw `Unexpected: column ${colName} not found`; } - if(!jsonPath || typeof jsonPath !== "string"){ - throw "Expecting: [columnName: string, jsonPath: string, vars?: object, silent?: boolean]" + if (!jsonPath || typeof jsonPath !== "string") { + throw "Expecting: [columnName: string, jsonPath: string, vars?: object, silent?: boolean]"; } - const escapedName = asNameAlias(colName, tableAlias); + const escapedName = asNameAlias(colName, tableAlias); - return `jsonb_path_query(${escapedName}, ${[jsonPath, ...otherArgs].map(v => asValue(v)).join(", ")})`; - } + return `jsonb_path_query(${escapedName}, ${[jsonPath, ...otherArgs].map((v) => asValue(v)).join(", ")})`; + }, }, - ...([ - ["jsonb_array_length", "Returns the number of elements in the outermost JSON array"], - ["jsonb_each", "Expands the outermost JSON object into a set of key/value pairs"], - ["jsonb_each_text", "Expands the outermost JSON object into a set of key/value pairs. The returned values will be of type text"], - ["jsonb_object_keys", "Returns set of keys in the outermost JSON object"], - ["jsonb_strip_nulls", "Returns from_json with all object fields that have null values omitted. Other null values are untouched"], - ["jsonb_pretty", "Returns from_json as indented JSON text "], - ["jsonb_to_record", "Builds an arbitrary record from a JSON object"], - ["jsonb_array_elements", "Expands a JSON array to a set of JSON values"], - ["jsonb_array_elements_text", "Expands a JSON array to a set of text values "], - ["jsonb_typeof", "Returns the type of the outermost JSON value as a text string. Possible types are object, array, string, number, boolean, and null "], - ] as const).map(([ name, description]) => ({ - name: "$" + name, - description, - singleColArg: true, - numArgs: 1, - type: "function", - getFields: ([col]) => col, - getQuery: ({ args: [colName], tableAlias }) => { - const escapedName = asNameAlias(colName, tableAlias); - return `${name}(${escapedName})`; - } - } as FunctionSpec)) + ...( + [ + [ + "jsonb_array_length", + "Returns the number of elements in the outermost JSON array", + ], + [ + "jsonb_each", + "Expands the outermost JSON object into a set of key/value pairs", + ], + [ + "jsonb_each_text", + "Expands the outermost JSON object into a set of key/value pairs. The returned values will be of type text", + ], + ["jsonb_object_keys", "Returns set of keys in the outermost JSON object"], + [ + "jsonb_strip_nulls", + "Returns from_json with all object fields that have null values omitted. Other null values are untouched", + ], + ["jsonb_pretty", "Returns from_json as indented JSON text "], + ["jsonb_to_record", "Builds an arbitrary record from a JSON object"], + ["jsonb_array_elements", "Expands a JSON array to a set of JSON values"], + [ + "jsonb_array_elements_text", + "Expands a JSON array to a set of text values ", + ], + [ + "jsonb_typeof", + "Returns the type of the outermost JSON value as a text string. Possible types are object, array, string, number, boolean, and null ", + ], + ] as const + ).map( + ([name, description]) => + ({ + name: "$" + name, + description, + singleColArg: true, + numArgs: 1, + type: "function", + getFields: ([col]) => col, + getQuery: ({ args: [colName], tableAlias }) => { + const escapedName = asNameAlias(colName, tableAlias); + return `${name}(${escapedName})`; + }, + }) as FunctionSpec, + ), ]; -const FTS_Funcs: FunctionSpec[] = +const FTS_Funcs: FunctionSpec[] = /* Full text search https://www.postgresql.org/docs/current/textsearch-dictionaries.html#TEXTSEARCH-SIMPLE-DICTIONARY */ @@ -217,10 +287,10 @@ const FTS_Funcs: FunctionSpec[] = // "synonym", // replace word with a synonym "english", // "english_stem", - // "english_hunspell", - "" - ].map(type => ({ - name: "$ts_headline" + (type? ("_" + type) : ""), + // "english_hunspell", + "", + ].map((type) => ({ + name: "$ts_headline" + (type ? "_" + type : ""), description: ` :[column_name , search_term: ] -> sha512 hash of the of column content`, type: "function" as const, singleColArg: true, @@ -228,253 +298,291 @@ const FTS_Funcs: FunctionSpec[] = getFields: ([column]) => [column], getQuery: ({ args }) => { const col = asName(args[0]); - let qVal = args[1], qType = "to_tsquery"; - const _type = type? (asValue(type) + ",") : ""; + let qVal = args[1], + qType = "to_tsquery"; + const _type = type ? asValue(type) + "," : ""; const searchTypes = TextFilter_FullTextSearchFilterKeys; - + /* { to_tsquery: 'search term' } */ - if(isPlainObject(qVal)){ + if (isPlainObject(qVal)) { const keys = Object.keys(qVal); - if(!keys.length) throw "Bad arg"; - if(keys.length !==1 || !searchTypes.includes(keys[0] as any)) throw "Expecting a an object with a single key named one of: " + searchTypes.join(", "); + if (!keys.length) throw "Bad arg"; + if (keys.length !== 1 || !searchTypes.includes(keys[0] as any)) + throw ( + "Expecting a an object with a single key named one of: " + + searchTypes.join(", ") + ); qType = keys[0]!; qVal = asValue(qVal[qType]); - /* 'search term' */ - } else if(typeof qVal === "string") { - qVal = pgp.as.format(qType + "($1)", [qVal]) - } else throw "Bad second arg. Exepcting search string or { to_tsquery: 'search string' }"; + /* 'search term' */ + } else if (typeof qVal === "string") { + qVal = pgp.as.format(qType + "($1)", [qVal]); + } else + throw "Bad second arg. Exepcting search string or { to_tsquery: 'search string' }"; - const res = `ts_headline(${_type} ${col}::text, ${qVal}, 'ShortWord=1 ' )` + const res = `ts_headline(${_type} ${col}::text, ${qVal}, 'ShortWord=1 ' )`; // console.log(res) - - return res - } + + return res; + }, })); -let PostGIS_Funcs: FunctionSpec[] = ([ +let PostGIS_Funcs: FunctionSpec[] = ( + [ { fname: "ST_DWithin", description: `:[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean; distance: number; }] -> Returns true if the geometries are within a given distance For geometry: The distance is specified in units defined by the spatial reference system of the geometries. For this function to make sense, the source geometries must be in the same coordinate system (have the same SRID). For geography: units are in meters and distance measurement defaults to use_spheroid=true. For faster evaluation use use_spheroid=false to measure on the sphere. - ` + `, }, { fname: "<->", description: `:[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean }] - -> The <-> operator returns the 2D distance between two geometries. Used in the "ORDER BY" clause provides index-assisted nearest-neighbor result sets. For PostgreSQL below 9.5 only gives centroid distance of bounding boxes and for PostgreSQL 9.5+, does true KNN distance search giving true distance between geometries, and distance sphere for geographies.` + -> The <-> operator returns the 2D distance between two geometries. Used in the "ORDER BY" clause provides index-assisted nearest-neighbor result sets. For PostgreSQL below 9.5 only gives centroid distance of bounding boxes and for PostgreSQL 9.5+, does true KNN distance search giving true distance between geometries, and distance sphere for geographies.`, }, - { + { fname: "ST_Distance", description: ` :[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean }] -> For geometry types returns the minimum 2D Cartesian (planar) distance between two geometries, in projected units (spatial ref units). -> For geography types defaults to return the minimum geodesic distance between two geographies in meters, compute on the spheroid determined by the SRID. If use_spheroid is false, a faster spherical calculation is used. `, - },{ + }, + { fname: "ST_DistanceSpheroid", description: ` :[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number; spheroid?: string; }] -> Returns minimum distance in meters between two lon/lat geometries given a particular spheroid. See the explanation of spheroids given for ST_LengthSpheroid. `, - },{ + }, + { fname: "ST_DistanceSphere", description: ` :[column_name, { lat?: number; lng?: number; geojson?: object; srid?: number }] -> Returns linear distance in meters between two lon/lat points. Uses a spherical earth and radius of 6370986 meters. Faster than ST_DistanceSpheroid, but less accurate. Only implemented for points.`, + }, + ] as const +).map(({ fname, description }) => ({ + name: "$" + fname, + description, + type: "function" as const, + singleColArg: true, + numArgs: 1, + canBeUsedForFilter: fname === "ST_DWithin", + getFields: (args: any[]) => [args[0]], + getQuery: ({ allColumns, args: [columnName, arg2], tableAlias }) => { + const mErr = () => { + throw `${fname}: Expecting a second argument like: { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean }`; + }; + + if (!isObject(arg2)) { + mErr(); + } + const col = allColumns.find((c) => c.name === columnName); + if (!col) { + throw new Error("Col not found: " + columnName); } - ] as const).map(({ fname, description }) => ({ - name: "$" + fname, - description, - type: "function" as const, - singleColArg: true, - numArgs: 1, - canBeUsedForFilter: fname === "ST_DWithin", - getFields: (args: any[]) => [args[0]], - getQuery: ({ allColumns, args: [columnName, arg2], tableAlias }) => { - const mErr = () => { throw `${fname}: Expecting a second argument like: { lat?: number; lng?: number; geojson?: object; srid?: number; use_spheroid?: boolean }` }; - - if(!isObject(arg2)) { - mErr(); - } - const col = allColumns.find(c => c.name === columnName); - if(!col) { - throw new Error("Col not found: " + columnName) - } - - const { - lat, lng, srid = 4326, - geojson, text, use_spheroid, - distance, spheroid = 'SPHEROID["WGS 84", 6378137, 298.257223563]', - unit, - debug - } = arg2; - let geomQ = "", extraParams = ""; - - if(typeof text === "string"){ - geomQ = `ST_GeomFromText(${asValue(text)})`; - } else if([lat, lng].every(v => Number.isFinite(v))){ - geomQ = `ST_Point(${asValue(lng)}, ${asValue(lat)})`; - } else if(isPlainObject(geojson)){ - geomQ = `ST_GeomFromGeoJSON(${geojson})`; - } else mErr(); - - if(Number.isFinite(srid)){ - geomQ = `ST_SetSRID(${geomQ}, ${asValue(srid)})`; - } - - let colCast = ""; - const colIsGeog = col.udt_name === "geography"; - let geomQCast = colIsGeog? "::geography" : "::geometry"; - /** - * float ST_Distance(geometry g1, geometry g2); - * float ST_Distance(geography geog1, geography geog2, boolean use_spheroid=true); - */ - if(fname === "ST_Distance"){ + const { + lat, + lng, + srid = 4326, + geojson, + text, + use_spheroid, + distance, + spheroid = 'SPHEROID["WGS 84", 6378137, 298.257223563]', + unit, + debug, + } = arg2; + let geomQ = "", + extraParams = ""; + + if (typeof text === "string") { + geomQ = `ST_GeomFromText(${asValue(text)})`; + } else if ([lat, lng].every((v) => Number.isFinite(v))) { + geomQ = `ST_Point(${asValue(lng)}, ${asValue(lat)})`; + } else if (isPlainObject(geojson)) { + geomQ = `ST_GeomFromGeoJSON(${geojson})`; + } else mErr(); + + if (Number.isFinite(srid)) { + geomQ = `ST_SetSRID(${geomQ}, ${asValue(srid)})`; + } - if(typeof use_spheroid === "boolean"){ - extraParams = ", " + asValue(use_spheroid); - } + let colCast = ""; + const colIsGeog = col.udt_name === "geography"; + let geomQCast = colIsGeog ? "::geography" : "::geometry"; + + /** + * float ST_Distance(geometry g1, geometry g2); + * float ST_Distance(geography geog1, geography geog2, boolean use_spheroid=true); + */ + if (fname === "ST_Distance") { + if (typeof use_spheroid === "boolean") { + extraParams = ", " + asValue(use_spheroid); + } - colCast = (colIsGeog || use_spheroid)? "::geography" : "::geometry"; - geomQCast = (colIsGeog || use_spheroid)? "::geography" : "::geometry"; + colCast = colIsGeog || use_spheroid ? "::geography" : "::geometry"; + geomQCast = colIsGeog || use_spheroid ? "::geography" : "::geometry"; /** * boolean ST_DWithin(geometry g1, geometry g2, double precision distance_of_srid); * boolean ST_DWithin(geography gg1, geography gg2, double precision distance_meters, boolean use_spheroid = true); */ - } else if(fname === "ST_DWithin"){ - colCast = colIsGeog? "::geography" : "::geometry"; - geomQCast = colIsGeog? "::geography" : "::geometry"; - - if(typeof distance !== "number") { - throw `ST_DWithin: distance param missing or not a number`; - } - const allowedUnits = ["m", "km"]; - if(unit && !allowedUnits.includes(unit)){ - throw `ST_DWithin: unit can only be one of: ${allowedUnits}`; - } - extraParams = ", " + asValue(distance * (unit === "km"? 1000 : 1)); + } else if (fname === "ST_DWithin") { + colCast = colIsGeog ? "::geography" : "::geometry"; + geomQCast = colIsGeog ? "::geography" : "::geometry"; + if (typeof distance !== "number") { + throw `ST_DWithin: distance param missing or not a number`; + } + const allowedUnits = ["m", "km"]; + if (unit && !allowedUnits.includes(unit)) { + throw `ST_DWithin: unit can only be one of: ${allowedUnits}`; + } + extraParams = ", " + asValue(distance * (unit === "km" ? 1000 : 1)); /** * float ST_DistanceSpheroid(geometry geomlonlatA, geometry geomlonlatB, spheroid measurement_spheroid); */ - } else if(fname === "ST_DistanceSpheroid"){ - colCast = "::geometry"; - geomQCast = "::geometry"; - if(typeof spheroid !== "string") throw `ST_DistanceSpheroid: spheroid param must be string`; - extraParams = `, ${asValue(spheroid)}` - - + } else if (fname === "ST_DistanceSpheroid") { + colCast = "::geometry"; + geomQCast = "::geometry"; + if (typeof spheroid !== "string") + throw `ST_DistanceSpheroid: spheroid param must be string`; + extraParams = `, ${asValue(spheroid)}`; /** * float ST_DistanceSphere(geometry geomlonlatA, geometry geomlonlatB); */ - } else if(fname === "ST_DistanceSphere"){ - colCast = "::geometry"; - geomQCast = "::geometry"; - extraParams = ""; + } else if (fname === "ST_DistanceSphere") { + colCast = "::geometry"; + geomQCast = "::geometry"; + extraParams = ""; /** * double precision <->( geometry A , geometry B ); * double precision <->( geography A , geography B ); */ - } else if(fname === "<->"){ - colCast = colIsGeog? "::geography" : "::geometry"; - geomQCast = colIsGeog? "::geography" : "::geometry"; - const q = pgp.as.format(`${asNameAlias(columnName, tableAlias)}${colCast} <-> ${geomQ}${geomQCast}`); - if(debug) throw q; - return q; - } + } else if (fname === "<->") { + colCast = colIsGeog ? "::geography" : "::geometry"; + geomQCast = colIsGeog ? "::geography" : "::geometry"; + const q = pgp.as.format( + `${asNameAlias(columnName, tableAlias)}${colCast} <-> ${geomQ}${geomQCast}`, + ); + if (debug) throw q; + return q; + } - const query = pgp.as.format(`${fname}(${asNameAlias(columnName, tableAlias)}${colCast} , ${geomQ}${geomQCast} ${extraParams})`); - if(debug) { - throw query; - } - return query; + const query = pgp.as.format( + `${fname}(${asNameAlias(columnName, tableAlias)}${colCast} , ${geomQ}${geomQCast} ${extraParams})`, + ); + if (debug) { + throw query; } - })); + return query; + }, +})); - PostGIS_Funcs = PostGIS_Funcs.concat( - [ - "ST_AsText", "ST_AsEWKT", "ST_AsEWKB", "ST_AsBinary", "ST_AsMVT", "ST_AsMVTGeom", - "ST_AsGeoJSON", "ST_Simplify", - "ST_SnapToGrid", "ST_Centroid", - "st_aslatlontext", - ] - .map(fname => { - const res: FunctionSpec = { - name: "$" + fname, - description: ` :[column_name, precision?] -> json GeoJSON output of a geometry column`, - type: "function", - singleColArg: true, - numArgs: 1, - getFields: (args: any[]) => [args[0]], - getQuery: ({ args: [colName, ...otherArgs], tableAlias }) => { - let secondArg = ""; - if(otherArgs.length) secondArg = ", " + otherArgs.map(arg => asValue(arg)).join(", "); - const escTabelName = asNameAlias(colName, tableAlias) + "::geometry"; - const result = pgp.as.format(fname + "(" + escTabelName + secondArg + ( fname === "ST_AsGeoJSON"? ")::jsonb" : ")" )); - if(["ST_Centroid", "ST_SnapToGrid", "ST_Simplify"].includes(fname)){ - const r = `ST_AsGeoJSON(${result})::jsonb`; - return r; - } - return result; +PostGIS_Funcs = PostGIS_Funcs.concat( + [ + "ST_AsText", + "ST_AsEWKT", + "ST_AsEWKB", + "ST_AsBinary", + "ST_AsMVT", + "ST_AsMVTGeom", + "ST_AsGeoJSON", + "ST_Simplify", + "ST_SnapToGrid", + "ST_Centroid", + "st_aslatlontext", + ].map((fname) => { + const res: FunctionSpec = { + name: "$" + fname, + description: ` :[column_name, precision?] -> json GeoJSON output of a geometry column`, + type: "function", + singleColArg: true, + numArgs: 1, + getFields: (args: any[]) => [args[0]], + getQuery: ({ args: [colName, ...otherArgs], tableAlias }) => { + let secondArg = ""; + if (otherArgs.length) + secondArg = ", " + otherArgs.map((arg) => asValue(arg)).join(", "); + const escTabelName = asNameAlias(colName, tableAlias) + "::geometry"; + const result = pgp.as.format( + fname + + "(" + + escTabelName + + secondArg + + (fname === "ST_AsGeoJSON" ? ")::jsonb" : ")"), + ); + if (["ST_Centroid", "ST_SnapToGrid", "ST_Simplify"].includes(fname)) { + const r = `ST_AsGeoJSON(${result})::jsonb`; + return r; } - } - return res; - }), - ); - + return result; + }, + }; + return res; + }), +); - PostGIS_Funcs = PostGIS_Funcs.concat( - ["ST_Extent", "ST_3DExtent", "ST_XMin_Agg", "ST_XMax_Agg", "ST_YMin_Agg", "ST_YMax_Agg", "ST_ZMin_Agg", "ST_ZMax_Agg"] - .map(fname => { - const res: FunctionSpec = { - name: "$" + fname, - description: ` :[column_name] -> ST_Extent returns a bounding box that encloses a set of geometries. +PostGIS_Funcs = PostGIS_Funcs.concat( + [ + "ST_Extent", + "ST_3DExtent", + "ST_XMin_Agg", + "ST_XMax_Agg", + "ST_YMin_Agg", + "ST_YMax_Agg", + "ST_ZMin_Agg", + "ST_ZMax_Agg", + ].map((fname) => { + const res: FunctionSpec = { + name: "$" + fname, + description: ` :[column_name] -> ST_Extent returns a bounding box that encloses a set of geometries. The ST_Extent function is an "aggregate" function in the terminology of SQL. That means that it operates on lists of data, in the same way the SUM() and AVG() functions do.`, - type: "aggregation", - singleColArg: true, - numArgs: 1, - getFields: (args: any[]) => [args[0]], - getQuery: ({ args, tableAlias }) => { - const escTabelName = asNameAlias(args[0], tableAlias) + "::geometry"; - if(fname.includes("Extent")){ - return `${fname}(${escTabelName})`; - } - return `${fname.endsWith("_Agg")? fname.slice(0, -4) : fname}(ST_Collect(${escTabelName}))`; - } - } - return res; - }), - ); - - PostGIS_Funcs = PostGIS_Funcs.concat( - ["ST_Length", "ST_X", "ST_Y", "ST_Z"].map(fname => ({ - name: "$" + fname, - type: "function", + type: "aggregation", singleColArg: true, numArgs: 1, getFields: (args: any[]) => [args[0]], - getQuery: ({ allColumns, args, tableAlias }) => { - const colName = args[0]; - const escapedColName = asNameAlias(colName, tableAlias); - const col = allColumns.find(c => c.name === colName); - if(!col) throw new Error("Col not found: " + colName) - - return `${fname}(${escapedColName})`; - } - })) - ); - + getQuery: ({ args, tableAlias }) => { + const escTabelName = asNameAlias(args[0], tableAlias) + "::geometry"; + if (fname.includes("Extent")) { + return `${fname}(${escTabelName})`; + } + return `${fname.endsWith("_Agg") ? fname.slice(0, -4) : fname}(ST_Collect(${escTabelName}))`; + }, + }; + return res; + }), +); + +PostGIS_Funcs = PostGIS_Funcs.concat( + ["ST_Length", "ST_X", "ST_Y", "ST_Z"].map((fname) => ({ + name: "$" + fname, + type: "function", + singleColArg: true, + numArgs: 1, + getFields: (args: any[]) => [args[0]], + getQuery: ({ allColumns, args, tableAlias }) => { + const colName = args[0]; + const escapedColName = asNameAlias(colName, tableAlias); + const col = allColumns.find((c) => c.name === colName); + if (!col) throw new Error("Col not found: " + colName); + + return `${fname}(${escapedColName})`; + }, + })), +); + /** -* Each function expects a column at the very least -*/ + * Each function expects a column at the very least + */ export const FUNCTIONS: FunctionSpec[] = [ - // Hashing { name: "$md5_multi", @@ -484,9 +592,18 @@ export const FUNCTIONS: FunctionSpec[] = [ numArgs: MAX_COL_NUM, getFields: (args: any[]) => args, getQuery: ({ args, tableAlias }) => { - const q = pgp.as.format("md5(" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + "::text, '' )" ).join(" || ") + ")"); - return q - } + const q = pgp.as.format( + "md5(" + + args + .map( + (fname) => + "COALESCE( " + asNameAlias(fname, tableAlias) + "::text, '' )", + ) + .join(" || ") + + ")", + ); + return q; + }, }, { name: "$md5_multi_agg", @@ -496,9 +613,18 @@ export const FUNCTIONS: FunctionSpec[] = [ numArgs: MAX_COL_NUM, getFields: (args: any[]) => args, getQuery: ({ args, tableAlias }) => { - const q = pgp.as.format("md5(string_agg(" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + "::text, '' )" ).join(" || ") + ", ','))"); - return q - } + const q = pgp.as.format( + "md5(string_agg(" + + args + .map( + (fname) => + "COALESCE( " + asNameAlias(fname, tableAlias) + "::text, '' )", + ) + .join(" || ") + + ", ','))", + ); + return q; + }, }, { @@ -509,9 +635,18 @@ export const FUNCTIONS: FunctionSpec[] = [ numArgs: MAX_COL_NUM, getFields: (args: any[]) => args, getQuery: ({ args, tableAlias }) => { - const q = pgp.as.format("encode(sha256((" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )" ).join(" || ") + ")::text::bytea), 'hex')"); - return q - } + const q = pgp.as.format( + "encode(sha256((" + + args + .map( + (fname) => + "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )", + ) + .join(" || ") + + ")::text::bytea), 'hex')", + ); + return q; + }, }, { name: "$sha256_multi_agg", @@ -521,9 +656,18 @@ export const FUNCTIONS: FunctionSpec[] = [ numArgs: MAX_COL_NUM, getFields: (args: any[]) => args, getQuery: ({ args, tableAlias }) => { - const q = pgp.as.format("encode(sha256(string_agg(" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )" ).join(" || ") + ", ',')::text::bytea), 'hex')"); - return q - } + const q = pgp.as.format( + "encode(sha256(string_agg(" + + args + .map( + (fname) => + "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )", + ) + .join(" || ") + + ", ',')::text::bytea), 'hex')", + ); + return q; + }, }, { name: "$sha512_multi", @@ -533,9 +677,18 @@ export const FUNCTIONS: FunctionSpec[] = [ numArgs: MAX_COL_NUM, getFields: (args: any[]) => args, getQuery: ({ args, tableAlias }) => { - const q = pgp.as.format("encode(sha512((" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )" ).join(" || ") + ")::text::bytea), 'hex')"); - return q - } + const q = pgp.as.format( + "encode(sha512((" + + args + .map( + (fname) => + "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )", + ) + .join(" || ") + + ")::text::bytea), 'hex')", + ); + return q; + }, }, { name: "$sha512_multi_agg", @@ -545,9 +698,18 @@ export const FUNCTIONS: FunctionSpec[] = [ numArgs: MAX_COL_NUM, getFields: (args: any[]) => args, getQuery: ({ args, tableAlias }) => { - const q = pgp.as.format("encode(sha512(string_agg(" + args.map(fname => "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )" ).join(" || ") + ", ',')::text::bytea), 'hex')"); - return q - } + const q = pgp.as.format( + "encode(sha512(string_agg(" + + args + .map( + (fname) => + "COALESCE( " + asNameAlias(fname, tableAlias) + ", '' )", + ) + .join(" || ") + + ", ',')::text::bytea), 'hex')", + ); + return q; + }, }, ...FTS_Funcs, @@ -564,8 +726,11 @@ export const FUNCTIONS: FunctionSpec[] = [ singleColArg: false, getFields: (args: any[]) => [args[0]], getQuery: ({ allowedFields, args, tableAlias }) => { - return pgp.as.format("LEFT(" + asNameAlias(args[0], tableAlias) + ", $1)", [args[1]]); - } + return pgp.as.format( + "LEFT(" + asNameAlias(args[0], tableAlias) + ", $1)", + [args[1]], + ); + }, }, { name: "$unnest_words", @@ -575,8 +740,12 @@ export const FUNCTIONS: FunctionSpec[] = [ singleColArg: true, getFields: (args: any[]) => [args[0]], getQuery: ({ allowedFields, args, tableAlias }) => { - return pgp.as.format("unnest(string_to_array(" + asNameAlias(args[0], tableAlias) + "::TEXT , ' '))");//, [args[1]] - } + return pgp.as.format( + "unnest(string_to_array(" + + asNameAlias(args[0], tableAlias) + + "::TEXT , ' '))", + ); //, [args[1]] + }, }, { name: "$right", @@ -586,8 +755,11 @@ export const FUNCTIONS: FunctionSpec[] = [ singleColArg: false, getFields: (args: any[]) => [args[0]], getQuery: ({ allowedFields, args, tableAlias }) => { - return pgp.as.format("RIGHT(" + asNameAlias(args[0], tableAlias) + ", $1)", [args[1]]); - } + return pgp.as.format( + "RIGHT(" + asNameAlias(args[0], tableAlias) + ", $1)", + [args[1]], + ); + }, }, { @@ -598,11 +770,17 @@ export const FUNCTIONS: FunctionSpec[] = [ numArgs: 2, getFields: (args: any[]) => [args[0]], getQuery: ({ allowedFields, args, tableAlias }) => { - if(args.length === 3){ - return pgp.as.format("to_char(" + asNameAlias(args[0], tableAlias) + ", $2, $3)", [args[0], args[1], args[2]]); + if (args.length === 3) { + return pgp.as.format( + "to_char(" + asNameAlias(args[0], tableAlias) + ", $2, $3)", + [args[0], args[1], args[2]], + ); } - return pgp.as.format("to_char(" + asNameAlias(args[0], tableAlias) + ", $2)", [args[0], args[1]]); - } + return pgp.as.format( + "to_char(" + asNameAlias(args[0], tableAlias) + ", $2)", + [args[0], args[1]], + ); + }, }, /** @@ -621,88 +799,101 @@ export const FUNCTIONS: FunctionSpec[] = [ "year", "decade", "century", - "millennium" - ].map(k => ({ val: 0, unit: k })) - .concat([ - { val: 6, unit: 'month' }, - { val: 4, unit: 'month' }, - { val: 2, unit: 'month' }, - { val: 8, unit: 'hour' }, - { val: 4, unit: 'hour' }, - { val: 2, unit: 'hour' }, - { val: 30, unit: 'minute' }, - { val: 15, unit: 'minute' }, - { val: 6, unit: 'minute' }, - { val: 5, unit: 'minute' }, - { val: 4, unit: 'minute' }, - { val: 3, unit: 'minute' }, - { val: 2, unit: 'minute' }, - { val: 30, unit: 'second' }, - { val: 15, unit: 'second' }, - { val: 10, unit: 'second' }, - { val: 8, unit: 'second' }, - { val: 6, unit: 'second' }, - { val: 5, unit: 'second' }, - { val: 4, unit: 'second' }, - { val: 3, unit: 'second' }, - { val: 2, unit: 'second' }, - - { val: 500, unit: 'millisecond' }, - { val: 250, unit: 'millisecond' }, - { val: 100, unit: 'millisecond' }, - { val: 50, unit: 'millisecond' }, - { val: 25, unit: 'millisecond' }, - { val: 10, unit: 'millisecond' }, - { val: 5, unit: 'millisecond' }, - { val: 2, unit: 'millisecond' }, - ]).map(({ val, unit }) => ({ - name: "$date_trunc_" + (val || "") + unit, - type: "function", - description: ` :[column_name, opts?: { timeZone: true | 'TZ Name' }] -> round down timestamp to closest ${val || ""} ${unit} `, - singleColArg: true, - numArgs: 2, - getFields: (args: any[]) => [args[0]], - getQuery: ({ allColumns, args, tableAlias }) => { - /** Timestamp added to ensure filters work correctly (psql will loose the string value timezone when comparing to a non tz column) */ - const col = parseUnix(args[0], tableAlias, allColumns, args[1]); - if(!val) return `date_trunc(${asValue(unit)}, ${col})`; - const PreviousUnit = { - year: "decade", - month: "year", - hour: "day", - minute: "hour", - second: "minute", - millisecond: "second", - microsecond: "millisecond", - }; - - const prevUnit = PreviousUnit[unit as "month"]; - if(!prevUnit){ - throw "Not supported. prevUnit not found"; - } + "millennium", + ] + .map((k) => ({ val: 0, unit: k })) + .concat([ + { val: 6, unit: "month" }, + { val: 4, unit: "month" }, + { val: 2, unit: "month" }, + { val: 8, unit: "hour" }, + { val: 4, unit: "hour" }, + { val: 2, unit: "hour" }, + { val: 30, unit: "minute" }, + { val: 15, unit: "minute" }, + { val: 6, unit: "minute" }, + { val: 5, unit: "minute" }, + { val: 4, unit: "minute" }, + { val: 3, unit: "minute" }, + { val: 2, unit: "minute" }, + { val: 30, unit: "second" }, + { val: 15, unit: "second" }, + { val: 10, unit: "second" }, + { val: 8, unit: "second" }, + { val: 6, unit: "second" }, + { val: 5, unit: "second" }, + { val: 4, unit: "second" }, + { val: 3, unit: "second" }, + { val: 2, unit: "second" }, + + { val: 500, unit: "millisecond" }, + { val: 250, unit: "millisecond" }, + { val: 100, unit: "millisecond" }, + { val: 50, unit: "millisecond" }, + { val: 25, unit: "millisecond" }, + { val: 10, unit: "millisecond" }, + { val: 5, unit: "millisecond" }, + { val: 2, unit: "millisecond" }, + ]) + .map( + ({ val, unit }) => + ({ + name: "$date_trunc_" + (val || "") + unit, + type: "function", + description: ` :[column_name, opts?: { timeZone: true | 'TZ Name' }] -> round down timestamp to closest ${val || ""} ${unit} `, + singleColArg: true, + numArgs: 2, + getFields: (args: any[]) => [args[0]], + getQuery: ({ allColumns, args, tableAlias }) => { + /** Timestamp added to ensure filters work correctly (psql will loose the string value timezone when comparing to a non tz column) */ + const col = parseUnix(args[0], tableAlias, allColumns, args[1]); + if (!val) return `date_trunc(${asValue(unit)}, ${col})`; + const PreviousUnit = { + year: "decade", + month: "year", + hour: "day", + minute: "hour", + second: "minute", + millisecond: "second", + microsecond: "millisecond", + }; + + const prevUnit = PreviousUnit[unit as "month"]; + if (!prevUnit) { + throw "Not supported. prevUnit not found"; + } - let extractedUnit = `date_part(${asValue(unit, "::text")}, ${col})::int`; - if(unit === "microsecond" || unit === "millisecond"){ - extractedUnit = `(${extractedUnit} - 1000 * floor(${extractedUnit}/1000)::int)` - } - const res = `(date_trunc(${asValue(prevUnit)}, ${col}) + floor(${extractedUnit} / ${val}) * interval ${asValue(val + " " + unit)})`; - // console.log(res); - return res; - } - } as FunctionSpec)), + let extractedUnit = `date_part(${asValue(unit, "::text")}, ${col})::int`; + if (unit === "microsecond" || unit === "millisecond") { + extractedUnit = `(${extractedUnit} - 1000 * floor(${extractedUnit}/1000)::int)`; + } + const res = `(date_trunc(${asValue(prevUnit)}, ${col}) + floor(${extractedUnit} / ${val}) * interval ${asValue(val + " " + unit)})`; + // console.log(res); + return res; + }, + }) as FunctionSpec, + ), /* Date funcs date_part */ - ...["date_trunc", "date_part"].map(funcName => ({ - name: "$" + funcName, - type: "function", - numArgs: 3, - description: ` :[unit, column_name, opts?: { timeZone: true | string }] -> ` + (funcName === "date_trunc"? ` round down timestamp to closest unit value. ` : ` extract date unit as float8. ` ) + ` E.g. ['hour', col] `, - singleColArg: false, - getFields: (args: any[]) => [args[1]], - getQuery: ({ allColumns, args, tableAlias }) => { - return `${funcName}(${asValue(args[0])}, ${parseUnix(args[1], tableAlias, allColumns, args[2])})`; - } - } as FunctionSpec)), + ...["date_trunc", "date_part"].map( + (funcName) => + ({ + name: "$" + funcName, + type: "function", + numArgs: 3, + description: + ` :[unit, column_name, opts?: { timeZone: true | string }] -> ` + + (funcName === "date_trunc" + ? ` round down timestamp to closest unit value. ` + : ` extract date unit as float8. `) + + ` E.g. ['hour', col] `, + singleColArg: false, + getFields: (args: any[]) => [args[1]], + getQuery: ({ allColumns, args, tableAlias }) => { + return `${funcName}(${asValue(args[0])}, ${parseUnix(args[1], tableAlias, allColumns, args[2])})`; + }, + }) as FunctionSpec, + ), /* Handy date funcs */ ...[ @@ -740,136 +931,186 @@ export const FUNCTIONS: FunctionSpec[] = [ ["yyyy", "yyyy"], ["yy", "yy"], ["yr", "yy"], - ].map(([funcName, txt]) => ({ - name: "$" + funcName, - type: "function", - description: ` :[column_name, opts?: { timeZone: true | string }] -> get timestamp formated as ` + txt, - singleColArg: true, - numArgs: 1, - getFields: (args: any[]) => [args[0]], - getQuery: ({ allColumns, args, tableAlias }) => { - return pgp.as.format("trim(to_char(" + parseUnix(args[0], tableAlias, allColumns, args[1]) + ", $2))", [args[0], txt]); - } - } as FunctionSpec)), + ].map( + ([funcName, txt]) => + ({ + name: "$" + funcName, + type: "function", + description: + ` :[column_name, opts?: { timeZone: true | string }] -> get timestamp formated as ` + + txt, + singleColArg: true, + numArgs: 1, + getFields: (args: any[]) => [args[0]], + getQuery: ({ allColumns, args, tableAlias }) => { + return pgp.as.format( + "trim(to_char(" + + parseUnix(args[0], tableAlias, allColumns, args[1]) + + ", $2))", + [args[0], txt], + ); + }, + }) as FunctionSpec, + ), /* Basic 1 arg col funcs */ ...[ - ...["TEXT"].flatMap(cast => [ - "upper", "lower", "length", "reverse", "trim", "initcap" - ].map(funcName => ({ cast, funcName }))), - ...[""].flatMap(cast => [ - "round", "ceil", "floor", "sign", "md5" - ].map(funcName => ({ cast, funcName }))), - ].map(({ funcName, cast }) => ({ - name: "$" + funcName, - type: "function", - numArgs: 1, - singleColArg: true, - getFields: (args: any[]) => [args[0]], - getQuery: ({ args, tableAlias }) => { - return `${funcName}(${asNameAlias(args[0], tableAlias)}${cast? `::${cast}`: ""})`; - } - } as FunctionSpec)), + ...["TEXT"].flatMap((cast) => + ["upper", "lower", "length", "reverse", "trim", "initcap"].map( + (funcName) => ({ cast, funcName }), + ), + ), + ...[""].flatMap((cast) => + ["round", "ceil", "floor", "sign", "md5"].map((funcName) => ({ + cast, + funcName, + })), + ), + ].map( + ({ funcName, cast }) => + ({ + name: "$" + funcName, + type: "function", + numArgs: 1, + singleColArg: true, + getFields: (args: any[]) => [args[0]], + getQuery: ({ args, tableAlias }) => { + return `${funcName}(${asNameAlias(args[0], tableAlias)}${cast ? `::${cast}` : ""})`; + }, + }) as FunctionSpec, + ), /** - * Interval funcs + * Interval funcs * (col1, col2?, trunc ) * */ - ...["age", "ageNow", "difference"].map(funcName => ({ - name: "$" + funcName, - type: "function", - numArgs: 2, - singleColArg: true, - getFields: (args: any[]) => args.slice(0, 2).filter(a => typeof a === "string"), // Filtered because the second arg is optional - getQuery: ({ allowedFields, args, tableAlias, allColumns }) => { - const validColCount = args.slice(0, 2).filter(a => typeof a === "string").length; - const trunc = args[2]; - const allowedTruncs = ["second", "minute", "hour", "day", "month", "year"]; - if(trunc && !allowedTruncs.includes(trunc)) throw new Error("Incorrect trunc provided. Allowed values: " + allowedTruncs) - if(funcName === "difference" && validColCount !== 2) throw new Error("Must have two column names") - if(![1,2].includes(validColCount)) throw new Error("Must have one or two column names") - const [leftField, rightField] = args as [string, string]; - const tzOpts = args[2]; - const leftQ = parseUnix(leftField, tableAlias, allColumns, tzOpts); - let rightQ = rightField? parseUnix(rightField, tableAlias, allColumns, tzOpts) : ""; - let query = ""; - if(funcName === "ageNow" && validColCount === 1){ - query = `age(now(), ${leftQ})`; - } else if(funcName === "age" || funcName === "ageNow"){ - if(rightQ) rightQ = ", " + rightQ; - query = `age(${leftQ} ${rightQ})`; - } else { - query = `${leftQ} - ${rightQ}`; - } - return trunc? `date_trunc(${asValue(trunc)}, ${query})` : query; - } - } as FunctionSpec)), + ...["age", "ageNow", "difference"].map( + (funcName) => + ({ + name: "$" + funcName, + type: "function", + numArgs: 2, + singleColArg: true, + getFields: (args: any[]) => + args.slice(0, 2).filter((a) => typeof a === "string"), // Filtered because the second arg is optional + getQuery: ({ allowedFields, args, tableAlias, allColumns }) => { + const validColCount = args + .slice(0, 2) + .filter((a) => typeof a === "string").length; + const trunc = args[2]; + const allowedTruncs = [ + "second", + "minute", + "hour", + "day", + "month", + "year", + ]; + if (trunc && !allowedTruncs.includes(trunc)) + throw new Error( + "Incorrect trunc provided. Allowed values: " + allowedTruncs, + ); + if (funcName === "difference" && validColCount !== 2) + throw new Error("Must have two column names"); + if (![1, 2].includes(validColCount)) + throw new Error("Must have one or two column names"); + const [leftField, rightField] = args as [string, string]; + const tzOpts = args[2]; + const leftQ = parseUnix(leftField, tableAlias, allColumns, tzOpts); + let rightQ = rightField + ? parseUnix(rightField, tableAlias, allColumns, tzOpts) + : ""; + let query = ""; + if (funcName === "ageNow" && validColCount === 1) { + query = `age(now(), ${leftQ})`; + } else if (funcName === "age" || funcName === "ageNow") { + if (rightQ) rightQ = ", " + rightQ; + query = `age(${leftQ} ${rightQ})`; + } else { + query = `${leftQ} - ${rightQ}`; + } + return trunc ? `date_trunc(${asValue(trunc)}, ${query})` : query; + }, + }) as FunctionSpec, + ), /* pgcrypto funcs */ - ...["crypt"].map(funcName => ({ - name: "$" + funcName, - type: "function", - numArgs: 1, - singleColArg: false, - getFields: (args: any[]) => [args[1]], - getQuery: ({ allowedFields, args, tableAlias }) => { - const value = asValue(args[0]) + "", - seedColumnName = asNameAlias(args[1], tableAlias); - - return `crypt(${value}, ${seedColumnName}::text)`; - } - } as FunctionSpec)), + ...["crypt"].map( + (funcName) => + ({ + name: "$" + funcName, + type: "function", + numArgs: 1, + singleColArg: false, + getFields: (args: any[]) => [args[1]], + getQuery: ({ allowedFields, args, tableAlias }) => { + const value = asValue(args[0]) + "", + seedColumnName = asNameAlias(args[1], tableAlias); + + return `crypt(${value}, ${seedColumnName}::text)`; + }, + }) as FunctionSpec, + ), /* Text col and value funcs */ - ...["position", "position_lower"].map(funcName => ({ - name: "$" + funcName, - type: "function", - numArgs: 1, - singleColArg: false, - getFields: (args: any[]) => [args[1]], - getQuery: ({ allowedFields, args, tableAlias }) => { - let a1 = asValue(args[0]), - a2 = asNameAlias(args[1], tableAlias); - if(funcName === "position_lower"){ - a1 = `LOWER(${a1}::text)`; - a2 = `LOWER(${a2}::text)`; - } - return `position( ${a1} IN ${a2} )`; - } - } as FunctionSpec)), - ...["template_string"].map(funcName => ({ - name: "$" + funcName, - type: "function", - numArgs: 1, - minCols: 0, - singleColArg: false, - getFields: (args: any[]) => [] as string[], // Fields not validated because we'll use the allowed ones anyway - getQuery: ({ allowedFields, args, tableAlias }) => { - if(typeof args[0] !== "string") throw "First argument must be a string. E.g.: '{col1} ..text {col2} ...' "; - - const rawValue = args[0]; - let finalValue = rawValue; - const usedColumns = allowedFields.filter(fName => rawValue.includes(`{${fName}}`)); - usedColumns.forEach((colName, idx) => { - finalValue = finalValue.split(`{${colName}}`).join(`%${idx + 1}$s`) - }); - finalValue = asValue(finalValue); - - if(usedColumns.length){ - return `format(${finalValue}, ${usedColumns.map(c => `${asNameAlias(c, tableAlias)}::TEXT`).join(", ")})`; - } - - return `format(${finalValue})`; - } - } as FunctionSpec)), + ...["position", "position_lower"].map( + (funcName) => + ({ + name: "$" + funcName, + type: "function", + numArgs: 1, + singleColArg: false, + getFields: (args: any[]) => [args[1]], + getQuery: ({ allowedFields, args, tableAlias }) => { + let a1 = asValue(args[0]), + a2 = asNameAlias(args[1], tableAlias); + if (funcName === "position_lower") { + a1 = `LOWER(${a1}::text)`; + a2 = `LOWER(${a2}::text)`; + } + return `position( ${a1} IN ${a2} )`; + }, + }) as FunctionSpec, + ), + ...["template_string"].map( + (funcName) => + ({ + name: "$" + funcName, + type: "function", + numArgs: 1, + minCols: 0, + singleColArg: false, + getFields: (args: any[]) => [] as string[], // Fields not validated because we'll use the allowed ones anyway + getQuery: ({ allowedFields, args, tableAlias }) => { + if (typeof args[0] !== "string") + throw "First argument must be a string. E.g.: '{col1} ..text {col2} ...' "; + + const rawValue = args[0]; + let finalValue = rawValue; + const usedColumns = allowedFields.filter((fName) => + rawValue.includes(`{${fName}}`), + ); + usedColumns.forEach((colName, idx) => { + finalValue = finalValue.split(`{${colName}}`).join(`%${idx + 1}$s`); + }); + finalValue = asValue(finalValue); + + if (usedColumns.length) { + return `format(${finalValue}, ${usedColumns.map((c) => `${asNameAlias(c, tableAlias)}::TEXT`).join(", ")})`; + } + + return `format(${finalValue})`; + }, + }) as FunctionSpec, + ), /** Custom highlight -> myterm => ['some text and', ['myterm'], ' and some other text'] - * (fields: "*" | string[], term: string, { edgeTruncate: number = -1; noFields: boolean = false }) => string | (string | [string])[] + * (fields: "*" | string[], term: string, { edgeTruncate: number = -1; noFields: boolean = false }) => string | (string | [string])[] * edgeTruncate = maximum extra characters left and right of matches * noFields = exclude field names in search - * */ + * */ { - name: "$term_highlight", /* */ + name: "$term_highlight" /* */, description: ` :[column_names, search_term, opts?<{ returnIndex?: number; edgeTruncate?: number; noFields?: boolean }>] -> get case-insensitive text match highlight`, type: "function", numArgs: 1, @@ -877,37 +1118,56 @@ export const FUNCTIONS: FunctionSpec[] = [ canBeUsedForFilter: true, getFields: (args: any[]) => args[0], getQuery: ({ allowedFields, args, tableAlias, allColumns }) => { - const cols = parseFieldFilter(args[0], false, allowedFields); let term = args[1]; const rawTerm = args[1]; - const { edgeTruncate, noFields = false, returnType, matchCase = false } = args[2] || {}; - if(!isEmpty(args[2])){ + const { + edgeTruncate, + noFields = false, + returnType, + matchCase = false, + } = args[2] || {}; + if (!isEmpty(args[2])) { const keys = Object.keys(args[2]); - const validKeys = ["edgeTruncate", "noFields", "returnType", "matchCase"]; - const bad_keys = keys.filter(k => !validKeys.includes(k)); - if(bad_keys.length) throw "Invalid options provided for $term_highlight. Expecting one of: " + validKeys.join(", "); + const validKeys = [ + "edgeTruncate", + "noFields", + "returnType", + "matchCase", + ]; + const bad_keys = keys.filter((k) => !validKeys.includes(k)); + if (bad_keys.length) + throw ( + "Invalid options provided for $term_highlight. Expecting one of: " + + validKeys.join(", ") + ); } - if(!cols.length) throw "Cols are empty/invalid"; - if(typeof term !== "string") throw "Non string term provided: " + term; - if(edgeTruncate !== undefined && (!Number.isInteger(edgeTruncate) || edgeTruncate < -1)) throw "Invalid edgeTruncate. expecting a positive integer"; - if(typeof noFields !== "boolean") throw "Invalid noFields. expecting boolean"; + if (!cols.length) throw "Cols are empty/invalid"; + if (typeof term !== "string") throw "Non string term provided: " + term; + if ( + edgeTruncate !== undefined && + (!Number.isInteger(edgeTruncate) || edgeTruncate < -1) + ) + throw "Invalid edgeTruncate. expecting a positive integer"; + if (typeof noFields !== "boolean") + throw "Invalid noFields. expecting boolean"; const RETURN_TYPES = ["index", "boolean", "object"]; - if(returnType && !RETURN_TYPES.includes(returnType)){ - throw `returnType can only be one of: ${RETURN_TYPES}` + if (returnType && !RETURN_TYPES.includes(returnType)) { + throw `returnType can only be one of: ${RETURN_TYPES}`; } const makeTextMatcherArray = (rawText: string, _term: string) => { - let matchText = rawText, term = _term; - if(!matchCase) { - matchText = `LOWER(${rawText})` - term = `LOWER(${term})` + let matchText = rawText, + term = _term; + if (!matchCase) { + matchText = `LOWER(${rawText})`; + term = `LOWER(${term})`; } let leftStr = `substr(${rawText}, 1, position(${term} IN ${matchText}) - 1 )`, rightStr = `substr(${rawText}, position(${term} IN ${matchText}) + length(${term}) )`; - if(edgeTruncate){ + if (edgeTruncate) { leftStr = `RIGHT(${leftStr}, ${asValue(edgeTruncate)})`; - rightStr = `LEFT(${rightStr}, ${asValue(edgeTruncate)})` + rightStr = `LEFT(${rightStr}, ${asValue(edgeTruncate)})`; } return ` CASE WHEN position(${term} IN ${matchText}) > 0 AND ${term} <> '' @@ -922,87 +1182,94 @@ export const FUNCTIONS: FunctionSpec[] = [ array_to_json(ARRAY[(${rawText})::TEXT]) END `; - } + }; - const colRaw = "( " + cols.map(c =>`${noFields? "" : (asValue(c + ": ") + " || ")} COALESCE(${asNameAlias(c, tableAlias)}::TEXT, '')`).join(" || ', ' || ") + " )"; + const colRaw = + "( " + + cols + .map( + (c) => + `${noFields ? "" : asValue(c + ": ") + " || "} COALESCE(${asNameAlias(c, tableAlias)}::TEXT, '')`, + ) + .join(" || ', ' || ") + + " )"; let col = colRaw; term = asValue(term); - if(!matchCase) { + if (!matchCase) { col = "LOWER" + col; - term = `LOWER(${term})` + term = `LOWER(${term})`; } let leftStr = `substr(${colRaw}, 1, position(${term} IN ${col}) - 1 )`, rightStr = `substr(${colRaw}, position(${term} IN ${col}) + length(${term}) )`; - if(edgeTruncate){ + if (edgeTruncate) { leftStr = `RIGHT(${leftStr}, ${asValue(edgeTruncate)})`; - rightStr = `LEFT(${rightStr}, ${asValue(edgeTruncate)})` + rightStr = `LEFT(${rightStr}, ${asValue(edgeTruncate)})`; } - + // console.log(col); - let res = "" - if(returnType === "index"){ + let res = ""; + if (returnType === "index") { res = `CASE WHEN position(${term} IN ${col}) > 0 THEN position(${term} IN ${col}) - 1 ELSE -1 END`; - // } else if(returnType === "boolean"){ - // res = `CASE WHEN position(${term} IN ${col}) > 0 THEN TRUE ELSE FALSE END`; - - } else if(returnType === "object" || returnType === "boolean"){ - const hasChars = Boolean(rawTerm && /[a-z]/i.test(rawTerm)); - const validCols = cols.map(c => { - const colInfo = allColumns.find(ac => ac.name === c); + // } else if(returnType === "boolean"){ + // res = `CASE WHEN position(${term} IN ${col}) > 0 THEN TRUE ELSE FALSE END`; + } else if (returnType === "object" || returnType === "boolean") { + const hasChars = Boolean(rawTerm && /[a-z]/i.test(rawTerm)); + const validCols = cols + .map((c) => { + const colInfo = allColumns.find((ac) => ac.name === c); return { key: c, - colInfo - } + colInfo, + }; }) - .filter(c => c.colInfo && c.colInfo.udt_name !== "bytea") - - const _cols = validCols.filter(c => - /** Exclude numeric columns when the search tern contains a character */ - !hasChars || - postgresToTsType(c.colInfo!.udt_name) !== "number" + .filter((c) => c.colInfo && c.colInfo.udt_name !== "bytea"); + + const _cols = validCols.filter( + (c) => + /** Exclude numeric columns when the search tern contains a character */ + !hasChars || postgresToTsType(c.colInfo!.udt_name) !== "number", ); /** This will break GROUP BY (non-integer constant in GROUP BY) */ - if(!_cols.length){ - if(validCols.length && hasChars) throw `You're searching the impossible: characters in numeric fields. Use this to prevent making such a request in future: /[a-z]/i.test(your_term) ` - return (returnType === "boolean")? "FALSE" : "NULL" + if (!_cols.length) { + if (validCols.length && hasChars) + throw `You're searching the impossible: characters in numeric fields. Use this to prevent making such a request in future: /[a-z]/i.test(your_term) `; + return returnType === "boolean" ? "FALSE" : "NULL"; } res = `CASE ${_cols - .map(c => { - const colNameEscaped = asNameAlias(c.key, tableAlias) - let colSelect = `${colNameEscaped}::TEXT`; - const isTstamp = c.colInfo?.udt_name.startsWith("timestamp"); - if(isTstamp || c.colInfo?.udt_name === "date"){ - colSelect = `( CASE WHEN ${colNameEscaped} IS NULL THEN '' + .map((c) => { + const colNameEscaped = asNameAlias(c.key, tableAlias); + let colSelect = `${colNameEscaped}::TEXT`; + const isTstamp = c.colInfo?.udt_name.startsWith("timestamp"); + if (isTstamp || c.colInfo?.udt_name === "date") { + colSelect = `( CASE WHEN ${colNameEscaped} IS NULL THEN '' ELSE concat_ws(' ', trim(to_char(${colNameEscaped}, 'YYYY-MM-DD HH24:MI:SS')), trim(to_char(${colNameEscaped}, 'Day Month')), 'Q' || trim(to_char(${colNameEscaped}, 'Q')), 'WK' || trim(to_char(${colNameEscaped}, 'WW')) - ) END)` - } - const colTxt = `COALESCE(${colSelect}, '')`; // position(${term} IN ${colTxt}) > 0 - if(returnType === "boolean"){ - return ` - WHEN ${colTxt} ${matchCase? "LIKE" : "ILIKE"} ${asValue('%' + rawTerm + '%')} + ) END)`; + } + const colTxt = `COALESCE(${colSelect}, '')`; // position(${term} IN ${colTxt}) > 0 + if (returnType === "boolean") { + return ` + WHEN ${colTxt} ${matchCase ? "LIKE" : "ILIKE"} ${asValue("%" + rawTerm + "%")} THEN TRUE - ` - } - return ` - WHEN ${colTxt} ${matchCase? "LIKE" : "ILIKE"} ${asValue('%' + rawTerm + '%')} + `; + } + return ` + WHEN ${colTxt} ${matchCase ? "LIKE" : "ILIKE"} ${asValue("%" + rawTerm + "%")} THEN json_build_object( ${asValue(c.key)}, - ${makeTextMatcherArray( - colTxt, - term - )} + ${makeTextMatcherArray(colTxt, term)} )::jsonb - ` - }).join(" ")} - ELSE ${(returnType === "boolean")? "FALSE" : "NULL"} + `; + }) + .join(" ")} + ELSE ${returnType === "boolean" ? "FALSE" : "NULL"} END`; @@ -1016,28 +1283,42 @@ export const FUNCTIONS: FunctionSpec[] = [ ), to_json(${rightStr}::TEXT ) ]) ELSE array_to_json(ARRAY[(${colRaw})::TEXT]) END`; - } return res; - } + }, }, /* Aggs */ - ...["max", "min", "count", "avg", "json_agg", "jsonb_agg", "string_agg", "array_agg", "sum"].map(aggName => ({ - name: "$" + aggName, - type: "aggregation", - numArgs: 1, - singleColArg: true, - getFields: (args: any[]) => [args[0]], - getQuery: ({ args, tableAlias }) => { - let extraArgs = ""; - if(args.length > 1){ - extraArgs = pgp.as.format(", $1:csv", args.slice(1)) - } - return aggName + "(" + asNameAlias(args[0], tableAlias) + `${extraArgs})`; - } - } satisfies FunctionSpec)), + ...[ + "max", + "min", + "count", + "avg", + "json_agg", + "jsonb_agg", + "string_agg", + "array_agg", + "sum", + ].map( + (aggName) => + ({ + name: "$" + aggName, + type: "aggregation", + numArgs: 1, + singleColArg: true, + getFields: (args: any[]) => [args[0]], + getQuery: ({ args, tableAlias }) => { + let extraArgs = ""; + if (args.length > 1) { + extraArgs = pgp.as.format(", $1:csv", args.slice(1)); + } + return ( + aggName + "(" + asNameAlias(args[0], tableAlias) + `${extraArgs})` + ); + }, + }) satisfies FunctionSpec, + ), { name: "$jsonb_build_object", @@ -1045,10 +1326,10 @@ export const FUNCTIONS: FunctionSpec[] = [ numArgs: 22, minCols: 1, singleColArg: false, - getFields: args => args, + getFields: (args) => args, getQuery: ({ args, tableAlias }) => { - return `jsonb_build_object(${args.flatMap(arg => [asValue(arg), asNameAlias(arg, tableAlias)]).join(", ")})`; - } + return `jsonb_build_object(${args.flatMap((arg) => [asValue(arg), asNameAlias(arg, tableAlias)]).join(", ")})`; + }, }, /* More aggs */ @@ -1061,7 +1342,7 @@ export const FUNCTIONS: FunctionSpec[] = [ getFields: (args: any[]) => [], getQuery: ({ allowedFields, args, tableAlias }) => { return "COUNT(*)"; - } + }, } as FunctionSpec, { name: "$diff_perc", @@ -1071,14 +1352,13 @@ export const FUNCTIONS: FunctionSpec[] = [ getFields: (args: any[]) => [args[0]], getQuery: ({ allowedFields, args, tableAlias }) => { const col = asNameAlias(args[0], tableAlias); - return `round( ( ( MAX(${col}) - MIN(${col}) )::float/MIN(${col}) ) * 100, 2)` - } - } as FunctionSpec + return `round( ( ( MAX(${col}) - MIN(${col}) )::float/MIN(${col}) ) * 100, 2)`; + }, + } as FunctionSpec, ]; /* The difference between a function and computed field is that the computed field does not require any arguments */ export const COMPUTED_FIELDS: FieldSpec[] = [ - /** * Used instead of row id. Must be used as a last resort. Use all non pseudo or domain data type columns first! */ @@ -1087,18 +1367,20 @@ export const COMPUTED_FIELDS: FieldSpec[] = [ type: "computed", // description: ` order hash of row content `, getQuery: ({ allowedFields, tableAlias, ctidField }) => { - return "md5(" + + return ( + "md5(" + allowedFields /* CTID not available in AFTER trigger */ // .concat(ctidField? [ctidField] : []) .sort() - .map(f => asNameAlias(f, tableAlias)) - .map(f => `md5(coalesce(${f}::text, 'dd'))`) - .join(" || ") + - `)`; - } - } + .map((f) => asNameAlias(f, tableAlias)) + .map((f) => `md5(coalesce(${f}::text, 'dd'))`) + .join(" || ") + + `)` + ); + }, + }, // ,{ // name: "ctid", // type: "computed", @@ -1150,4 +1432,4 @@ WHERE NOT EXISTS ( --Keep only leaf values AND starts_with(t2.path, t1.path) ); -*/ \ No newline at end of file +*/ diff --git a/lib/DboBuilder/QueryBuilder/QueryBuilder.ts b/lib/DboBuilder/QueryBuilder/QueryBuilder.ts index b080c1ac..5dfde73d 100644 --- a/lib/DboBuilder/QueryBuilder/QueryBuilder.ts +++ b/lib/DboBuilder/QueryBuilder/QueryBuilder.ts @@ -1,36 +1,53 @@ - /*--------------------------------------------------------------------------------------------- * Copyright (c) Stefan L. All rights reserved. * Licensed under the MIT License. See LICENSE in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { asName, ColumnInfo, getKeys, isEmpty, isObject, JoinSelect, PG_COLUMN_UDT_DATA_TYPE, Select, ValidatedColumnInfo } from "prostgles-types"; +import { + asName, + ColumnInfo, + getKeys, + isEmpty, + isObject, + JoinSelect, + PG_COLUMN_UDT_DATA_TYPE, + Select, + ValidatedColumnInfo, +} from "prostgles-types"; import { isPlainObject, postgresToTsType, SortItem } from "../DboBuilder"; import { ParsedJoinPath } from "../ViewHandler/parseJoinPath"; import { ViewHandler } from "../ViewHandler/ViewHandler"; -import { COMPUTED_FIELDS, FieldSpec, FunctionSpec, parseFunction } from "./Functions"; +import { + COMPUTED_FIELDS, + FieldSpec, + FunctionSpec, + parseFunction, +} from "./Functions"; export type SelectItem = { getFields: (args?: any[]) => string[] | "*"; getQuery: (tableAlias?: string) => string; columnPGDataType?: string; column_udt_type?: PG_COLUMN_UDT_DATA_TYPE; - tsDataType?: ValidatedColumnInfo["tsDataType"] + tsDataType?: ValidatedColumnInfo["tsDataType"]; alias: string; selected: boolean; -} & ({ - type: "column"; - columnName: string; -} | { - type: "function" | "aggregation" | "joinedColumn" | "computed"; - columnName?: undefined; -}); -export type SelectItemValidated = SelectItem & { fields: string[]; } +} & ( + | { + type: "column"; + columnName: string; + } + | { + type: "function" | "aggregation" | "joinedColumn" | "computed"; + columnName?: undefined; + } +); +export type SelectItemValidated = SelectItem & { fields: string[] }; export type WhereOptions = Awaited>; export type NewQueryRoot = { /** - * All fields from the table will be in nested SELECT and GROUP BY to allow order/filter by fields not in select + * All fields from the table will be in nested SELECT and GROUP BY to allow order/filter by fields not in select */ allFields: string[]; @@ -50,37 +67,38 @@ export type NewQueryRoot = { tableAlias?: string; }; -export type NewQueryJoin = (NewQuery & { +export type NewQueryJoin = NewQuery & { joinPath: ParsedJoinPath[]; joinAlias: string; -}); +}; export type NewQuery = NewQueryRoot & { joins?: NewQueryJoin[]; -} +}; export const asNameAlias = (field: string, tableAlias?: string) => { const result = asName(field); - if(tableAlias) return asName(tableAlias) + "." + result; + if (tableAlias) return asName(tableAlias) + "." + result; return result; -} +}; -export const parseFunctionObject = (funcData: any): { funcName: string; args: any[] } => { - const makeErr = (msg: string) => `Function not specified correctly. Expecting { $funcName: ["columnName" | , ...args] } object but got: ${JSON.stringify(funcData)} \n ${msg}` - if(!isObject(funcData)) throw makeErr(""); +export const parseFunctionObject = ( + funcData: any, +): { funcName: string; args: any[] } => { + const makeErr = (msg: string) => + `Function not specified correctly. Expecting { $funcName: ["columnName" | , ...args] } object but got: ${JSON.stringify(funcData)} \n ${msg}`; + if (!isObject(funcData)) throw makeErr(""); const keys = getKeys(funcData); - if(keys.length !== 1) throw makeErr(""); + if (keys.length !== 1) throw makeErr(""); const funcName = keys[0]!; const args = funcData[funcName]; - if(!args || !Array.isArray(args)){ + if (!args || !Array.isArray(args)) { throw makeErr("Arguments missing or invalid"); } return { funcName, args }; -} - +}; export class SelectItemBuilder { - select: SelectItemValidated[] = []; private allFields: string[]; @@ -92,7 +110,15 @@ export class SelectItemBuilder { private isView: boolean; private columns: ColumnInfo[]; - constructor(params: { allowedFields: string[]; allowedOrderByFields: string[]; computedFields: FieldSpec[]; functions: FunctionSpec[]; allFields: string[]; isView: boolean; columns: ColumnInfo[]; }){ + constructor(params: { + allowedFields: string[]; + allowedOrderByFields: string[]; + computedFields: FieldSpec[]; + functions: FunctionSpec[]; + allFields: string[]; + isView: boolean; + columns: ColumnInfo[]; + }) { this.allFields = params.allFields; this.allowedFields = params.allowedFields; this.allowedOrderByFields = params.allowedOrderByFields; @@ -100,48 +126,72 @@ export class SelectItemBuilder { this.isView = params.isView; this.functions = params.functions; this.columns = params.columns; - this.allowedFieldsIncludingComputed = this.allowedFields.concat(this.computedFields? this.computedFields.map(cf => cf.name) : []); - if(!this.allowedFields.length){ - if(!this.columns.length){ + this.allowedFieldsIncludingComputed = this.allowedFields.concat( + this.computedFields ? this.computedFields.map((cf) => cf.name) : [], + ); + if (!this.allowedFields.length) { + if (!this.columns.length) { throw "This view/table has no columns. Cannot select anything"; } throw "allowedFields empty/missing"; } /* Check for conflicting computed column names */ - const conflictingCol = this.allFields.find(fieldName => this.computedFields.find(cf => cf.name === fieldName)); - if(conflictingCol){ - throw "INTERNAL ERROR: Cannot have duplicate column names ( " + conflictingCol + " ). One or more computed column names are colliding with table columns ones"; + const conflictingCol = this.allFields.find((fieldName) => + this.computedFields.find((cf) => cf.name === fieldName), + ); + if (conflictingCol) { + throw ( + "INTERNAL ERROR: Cannot have duplicate column names ( " + + conflictingCol + + " ). One or more computed column names are colliding with table columns ones" + ); } } private checkField = (f: string, isSelected: boolean) => { const allowedSelectedFields = this.allowedFieldsIncludingComputed; - const allowedNonSelectedFields = [...this.allowedFieldsIncludingComputed, ...this.allowedOrderByFields]; + const allowedNonSelectedFields = [ + ...this.allowedFieldsIncludingComputed, + ...this.allowedOrderByFields, + ]; /** Not selected items can be part of the orderBy fields */ - const allowedFields = isSelected? allowedSelectedFields : allowedNonSelectedFields; - if(!allowedFields.includes(f)){ - throw "Field " + f + " is invalid or dissallowed. \nAllowed fields: " + allowedFields.join(", "); + const allowedFields = isSelected + ? allowedSelectedFields + : allowedNonSelectedFields; + if (!allowedFields.includes(f)) { + throw ( + "Field " + + f + + " is invalid or dissallowed. \nAllowed fields: " + + allowedFields.join(", ") + ); } return f; - } + }; private addItem = (item: SelectItem) => { let fields = item.getFields(); // console.trace(fields) - if(fields === "*") fields = this.allowedFields.slice(0); - fields.map(f => this.checkField(f, item.selected)); + if (fields === "*") fields = this.allowedFields.slice(0); + fields.map((f) => this.checkField(f, item.selected)); - if(this.select.find(s => s.alias === item.alias)){ + if (this.select.find((s) => s.alias === item.alias)) { throw `Cannot specify duplicate columns ( ${item.alias} ). Perhaps you're using "*" with column names?`; } this.select.push({ ...item, fields }); - } + }; - private addFunction = (func: FunctionSpec | string, args: any[], alias: string) => { + private addFunction = ( + func: FunctionSpec | string, + args: any[], + alias: string, + ) => { const funcDef = parseFunction({ - func, args, functions: this.functions, + func, + args, + functions: this.functions, allowedFields: this.allowedFieldsIncludingComputed, }); @@ -149,36 +199,40 @@ export class SelectItemBuilder { type: funcDef.type, alias, getFields: () => funcDef.getFields(args), - getQuery: (tableAlias?: string) => funcDef.getQuery({ allColumns: this.columns, allowedFields: this.allowedFields, args, tableAlias, - ctidField: undefined, - - /* CTID not available in AFTER trigger */ - // ctidField: this.isView? undefined : "ctid" - }), - selected: true + getQuery: (tableAlias?: string) => + funcDef.getQuery({ + allColumns: this.columns, + allowedFields: this.allowedFields, + args, + tableAlias, + ctidField: undefined, + + /* CTID not available in AFTER trigger */ + // ctidField: this.isView? undefined : "ctid" + }), + selected: true, }); - } + }; addColumn = (fieldName: string, selected: boolean) => { - /* Check if computed col */ - if(selected){ - const compCol = COMPUTED_FIELDS.find(cf => cf.name === fieldName); - if(compCol && !this.select.find(s => s.alias === fieldName)){ - const cf: FunctionSpec = { + if (selected) { + const compCol = COMPUTED_FIELDS.find((cf) => cf.name === fieldName); + if (compCol && !this.select.find((s) => s.alias === fieldName)) { + const cf: FunctionSpec = { ...compCol, type: "computed", numArgs: 0, singleColArg: false, - getFields: (_args: any[]) => [] - } - this.addFunction(cf, [], compCol.name) + getFields: (_args: any[]) => [], + }; + this.addFunction(cf, [], compCol.name); return; } } - const colDef = this.columns.find(c => c.name === fieldName); - const alias = selected? fieldName : ("not_selected_" + fieldName); + const colDef = this.columns.find((c) => c.name === fieldName); + const alias = selected ? fieldName : "not_selected_" + fieldName; this.addItem({ type: "column", columnName: fieldName, @@ -188,101 +242,108 @@ export class SelectItemBuilder { alias, getQuery: (tableAlias) => asNameAlias(fieldName, tableAlias), getFields: () => [fieldName], - selected + selected, }); - } + }; + + parseUserSelect = async ( + userSelect: Select, + joinParse?: ( + key: string, + val: JoinSelect, + throwErr: (msg: string) => any, + ) => any, + ) => { + /* [col1, col2, col3] */ + if (Array.isArray(userSelect)) { + if (userSelect.find((key) => typeof key !== "string")) + throw "Invalid array select. Expecting an array of strings"; - parseUserSelect = async (userSelect: Select, joinParse?: (key: string, val: JoinSelect, throwErr: (msg: string) => any) => any) => { + userSelect.map((key) => this.addColumn(key, true)); - /* [col1, col2, col3] */ - if(Array.isArray(userSelect)){ - if(userSelect.find(key => typeof key !== "string")) throw "Invalid array select. Expecting an array of strings"; - - userSelect.map(key => this.addColumn(key, true)) - - /* Empty select */ - } else if(userSelect === ""){ + /* Empty select */ + } else if (userSelect === "") { return []; - - } else if(userSelect === "*"){ - this.allowedFields.map(key => this.addColumn(key, true) ); - - } else if(isPlainObject(userSelect) && !isEmpty(userSelect)){ + } else if (userSelect === "*") { + this.allowedFields.map((key) => this.addColumn(key, true)); + } else if (isPlainObject(userSelect) && !isEmpty(userSelect)) { const selectKeys = Object.keys(userSelect), selectValues = Object.values(userSelect); - + /* Cannot include and exclude at the same time */ - if( - selectValues.filter(v => [0, false].includes(v)).length - ){ - if(selectValues.filter(v => ![0, false].includes(v)).length ){ + if (selectValues.filter((v) => [0, false].includes(v)).length) { + if (selectValues.filter((v) => ![0, false].includes(v)).length) { throw "\nCannot include and exclude fields at the same time"; } - + /* Exclude only */ - this.allowedFields.filter(f => !selectKeys.includes(f)).map(key => this.addColumn(key, true) ) - + this.allowedFields + .filter((f) => !selectKeys.includes(f)) + .map((key) => this.addColumn(key, true)); } else { - await Promise.all(selectKeys.map(async key => { - const val: any = userSelect[key as keyof typeof userSelect], - throwErr = (extraErr = "") => { - console.trace(extraErr) - throw "Unexpected select -> " + JSON.stringify({ [key]: val }) + "\n" + extraErr; - }; - - /* Included fields */ - if([1, true].includes(val)){ - if(key === "*"){ - this.allowedFields.map(key => this.addColumn(key, true) ) - } else { - this.addColumn(key, true); - } - - /* Aggs and functions */ - } else if(typeof val === "string" || isObject(val)) { - - /* Function shorthand notation + await Promise.all( + selectKeys.map(async (key) => { + const val: any = userSelect[key as keyof typeof userSelect], + throwErr = (extraErr = "") => { + console.trace(extraErr); + throw ( + "Unexpected select -> " + + JSON.stringify({ [key]: val }) + + "\n" + + extraErr + ); + }; + + /* Included fields */ + if ([1, true].includes(val)) { + if (key === "*") { + this.allowedFields.map((key) => this.addColumn(key, true)); + } else { + this.addColumn(key, true); + } + + /* Aggs and functions */ + } else if (typeof val === "string" || isObject(val)) { + /* Function shorthand notation { id: "$max" } === { id: { $max: ["id"] } } === SELECT MAX(id) AS id - */ - if( - (typeof val === "string" && val !== "*") || - isPlainObject(val) && Object.keys(val).length === 1 && Array.isArray(Object.values(val)[0]) - ){ - - let funcName: string | undefined, args: any[] | undefined; - if(typeof val === "string") { - /* Shorthand notation -> it is expected that the key is the column name used as the only argument */ - try { - this.checkField(key, true) - } catch (err){ - throwErr(` Shorthand function notation error: the specifield column ( ${key} ) is invalid or dissallowed. \n Use correct column name or full aliased function notation, e.g.: -> { alias: { $func_name: ["column_name"] } } `) + */ + if ( + (typeof val === "string" && val !== "*") || + (isPlainObject(val) && + Object.keys(val).length === 1 && + Array.isArray(Object.values(val)[0])) + ) { + let funcName: string | undefined, args: any[] | undefined; + if (typeof val === "string") { + /* Shorthand notation -> it is expected that the key is the column name used as the only argument */ + try { + this.checkField(key, true); + } catch (err) { + throwErr( + ` Shorthand function notation error: the specifield column ( ${key} ) is invalid or dissallowed. \n Use correct column name or full aliased function notation, e.g.: -> { alias: { $func_name: ["column_name"] } } `, + ); + } + funcName = val; + args = [key]; + + /** Function full notation { $funcName: ["colName", ...args] } */ + } else { + ({ funcName, args } = parseFunctionObject(val)); } - funcName = val; - args = [key]; - /** Function full notation { $funcName: ["colName", ...args] } */ + this.addFunction(funcName, args, key); + + /* Join */ } else { - ({ funcName, args } = parseFunctionObject(val)); - } - - this.addFunction(funcName, args, key); - - /* Join */ - } else { - - if(!joinParse) { - throw "Joins dissalowed"; + if (!joinParse) { + throw "Joins dissalowed"; + } + await joinParse(key, val as JoinSelect, throwErr); } - await joinParse(key, val as JoinSelect, throwErr); - - } - - } else throwErr(); - - })); + } else throwErr(); + }), + ); } } else throw "Unexpected select -> " + JSON.stringify(userSelect); - - } - + }; } diff --git a/lib/DboBuilder/QueryBuilder/getJoinQuery.ts b/lib/DboBuilder/QueryBuilder/getJoinQuery.ts index b0c6819c..5e6034cd 100644 --- a/lib/DboBuilder/QueryBuilder/getJoinQuery.ts +++ b/lib/DboBuilder/QueryBuilder/getJoinQuery.ts @@ -1,7 +1,16 @@ import { isDefined, asName } from "prostgles-types"; import { ParsedJoinPath, parseJoinPath } from "../ViewHandler/parseJoinPath"; -import { NewQuery, NewQueryJoin, SelectItem, asNameAlias } from "./QueryBuilder"; -import { ROOT_TABLE_ALIAS, ROOT_TABLE_ROW_NUM_ID, indentLines } from "./getSelectQuery"; +import { + NewQuery, + NewQueryJoin, + SelectItem, + asNameAlias, +} from "./QueryBuilder"; +import { + ROOT_TABLE_ALIAS, + ROOT_TABLE_ROW_NUM_ID, + indentLines, +} from "./getSelectQuery"; import { ViewHandler } from "../ViewHandler/ViewHandler"; import { getJoinOnCondition } from "../ViewHandler/getTableJoinQuery"; import { prepareOrderByQuery } from "../DboBuilder"; @@ -10,7 +19,7 @@ type Args = { q1: NewQuery; q2: NewQueryJoin; selectParamsGroupBy: boolean; -} +}; /** * Rename all join columns to prevent name clash @@ -20,8 +29,8 @@ export const getJoinCol = (colName: string) => { return { alias, rootSelect: `${asName(colName)} AS ${alias}`, - } -} + }; +}; export const JSON_AGG_FIELD_NAME = "prostgles_json_agg_result_field"; /** @@ -29,25 +38,29 @@ export const JSON_AGG_FIELD_NAME = "prostgles_json_agg_result_field"; */ export const NESTED_ROWID_FIELD_NAME = "prostgles_rowid_field"; -const getJoinTable = (tableName: string, pathIndex: number, isLastTableAlias: string | undefined) => { - const rawAlias = isLastTableAlias ?? `p${pathIndex} ${tableName}`; +const getJoinTable = ( + tableName: string, + pathIndex: number, + isLastTableAlias: string | undefined, +) => { + const rawAlias = isLastTableAlias ?? `p${pathIndex} ${tableName}`; return { // name: asName(tableName), /** table names are already escaped */ name: tableName, alias: asName(rawAlias), rawAlias, - } -} + }; +}; type GetJoinQueryResult = { resultAlias: string; - // queryLines: string[]; + // queryLines: string[]; firstJoinTableJoinFields: string[]; isOrJoin: boolean; type: "cte"; joinLines: string[]; cteLines: string[]; -} +}; /** Returns join query. All inner join tables will be prefixed with path index unless it's the final target table which is aliased using the q2 tableAlias @@ -60,81 +73,109 @@ type GetJoinQueryResult = { ) target_table ON ...condition */ -export const getJoinQuery = (viewHandler: ViewHandler, { q1, q2 }: Args): GetJoinQueryResult => { - const paths = parseJoinPath({ - rootTable: q1.table, - rawPath: q2.joinPath, - viewHandler: viewHandler, - allowMultiOrJoin: true, - addShortestJoinIfMissing: true, +export const getJoinQuery = ( + viewHandler: ViewHandler, + { q1, q2 }: Args, +): GetJoinQueryResult => { + const paths = parseJoinPath({ + rootTable: q1.table, + rawPath: q2.joinPath, + viewHandler: viewHandler, + allowMultiOrJoin: true, + addShortestJoinIfMissing: true, }); const targetTableAliasRaw = q2.tableAlias || q2.table; const targetTableAlias = asName(targetTableAliasRaw); - + const firstJoinTablePath = paths[0]!; - const firstJoinTableJoinFields = firstJoinTablePath.on.flatMap(condObj => Object.entries(condObj).map(([source, target]) => target)); - const { rootSelectItems, jsonAggLimit } = getNestedSelectFields({ - q: q2, - firstJoinTableAlias: getJoinTable(firstJoinTablePath.table, 0, paths.length === 1? targetTableAliasRaw : undefined).rawAlias, - _joinFields: firstJoinTableJoinFields + const firstJoinTableJoinFields = firstJoinTablePath.on.flatMap((condObj) => + Object.entries(condObj).map(([source, target]) => target), + ); + const { rootSelectItems, jsonAggLimit } = getNestedSelectFields({ + q: q2, + firstJoinTableAlias: getJoinTable( + firstJoinTablePath.table, + 0, + paths.length === 1 ? targetTableAliasRaw : undefined, + ).rawAlias, + _joinFields: firstJoinTableJoinFields, }); - const joinType = q2.isLeftJoin? "LEFT" : "INNER"; + const joinType = q2.isLeftJoin ? "LEFT" : "INNER"; const isOrJoin = firstJoinTablePath.on.length > 1; - const joinCondition = getJoinOnCondition({ - on: firstJoinTablePath.on, - leftAlias: asName(q1.tableAlias || q1.table), - rightAlias: targetTableAlias, - getRightColName: (col) => getJoinCol(col).alias + const joinCondition = getJoinOnCondition({ + on: firstJoinTablePath.on, + leftAlias: asName(q1.tableAlias || q1.table), + rightAlias: targetTableAlias, + getRightColName: (col) => getJoinCol(col).alias, }); - const joinFields = rootSelectItems.filter(s => s.isJoinCol).map(s => s.alias); - const selectedFields = rootSelectItems.filter(s => s.selected).map(s => asNameAlias(s.alias, targetTableAliasRaw)); - const rootNestedSort = q1.orderByItems.filter(d => d.nested?.joinAlias === q2.joinAlias); - const jsonAggSort = prepareOrderByQuery(q2.orderByItems, targetTableAliasRaw).join(", "); + const joinFields = rootSelectItems + .filter((s) => s.isJoinCol) + .map((s) => s.alias); + const selectedFields = rootSelectItems + .filter((s) => s.selected) + .map((s) => asNameAlias(s.alias, targetTableAliasRaw)); + const rootNestedSort = q1.orderByItems.filter( + (d) => d.nested?.joinAlias === q2.joinAlias, + ); + const jsonAggSort = prepareOrderByQuery( + q2.orderByItems, + targetTableAliasRaw, + ).join(", "); const jsonAgg = `json_agg((SELECT x FROM (SELECT ${selectedFields}) as x )${jsonAggSort}) ${jsonAggLimit} as ${JSON_AGG_FIELD_NAME}`; - - const { innerQuery } = getInnerJoinQuery({ paths, q1, q2, rootSelectItems, targetTableAliasRaw }); - const requiredJoinFields = joinFields.map(field => getJoinCol(field).alias); + const { innerQuery } = getInnerJoinQuery({ + paths, + q1, + q2, + rootSelectItems, + targetTableAliasRaw, + }); + + const requiredJoinFields = joinFields.map((field) => getJoinCol(field).alias); /** * Used to prevent duplicates in case of OR filters */ const rootTableIdField = `${ROOT_TABLE_ALIAS}.${ROOT_TABLE_ROW_NUM_ID}`; const wrappingQuery = [ `SELECT `, - ...indentLines([ - ...(isOrJoin? [rootTableIdField]: requiredJoinFields), - jsonAgg, - ...rootNestedSort.map(d => d.nested!.wrapperQuerySortItem) - ], { appendCommas: true }), + ...indentLines( + [ + ...(isOrJoin ? [rootTableIdField] : requiredJoinFields), + jsonAgg, + ...rootNestedSort.map((d) => d.nested!.wrapperQuerySortItem), + ], + { appendCommas: true }, + ), `FROM (`, ...indentLines(innerQuery), `) ${targetTableAlias}`, - ...(isOrJoin? [ - `LEFT JOIN ${q1.table} ${ROOT_TABLE_ALIAS}`, - `ON ${joinCondition}` - ] : []), - `GROUP BY ${isOrJoin? rootTableIdField : requiredJoinFields}`, - ]; - + ...(isOrJoin + ? [`LEFT JOIN ${q1.table} ${ROOT_TABLE_ALIAS}`, `ON ${joinCondition}`] + : []), + `GROUP BY ${isOrJoin ? rootTableIdField : requiredJoinFields}`, + ]; + /** * This is done to prevent join cte names clashing with actual table names */ - const targetTableAliasTempRename = asName(`${targetTableAlias}_prostgles_join_temp_rename`) + const targetTableAliasTempRename = asName( + `${targetTableAlias}_prostgles_join_temp_rename`, + ); const cteLines = [ `${targetTableAliasTempRename} AS (`, ...indentLines(wrappingQuery), - `)` + `)`, ]; const joinLines = [ `${joinType} JOIN ( SELECT * FROM ${targetTableAliasTempRename} ) as ${targetTableAlias}`, - isOrJoin? - `ON ${targetTableAlias}.${ROOT_TABLE_ROW_NUM_ID} = ${rootTableIdField}` : - `ON ${joinCondition}` + isOrJoin + ? `ON ${targetTableAlias}.${ROOT_TABLE_ROW_NUM_ID} = ${rootTableIdField}` + : `ON ${joinCondition}`, ]; return { @@ -145,119 +186,153 @@ export const getJoinQuery = (viewHandler: ViewHandler, { q1, q2 }: Args): GetJoi cteLines, isOrJoin, firstJoinTableJoinFields, - } -} - + }; +}; /** - * prepares the + * prepares the */ -const getInnerJoinQuery = ({ paths, q1, q2, targetTableAliasRaw, rootSelectItems }: { +const getInnerJoinQuery = ({ + paths, + q1, + q2, + targetTableAliasRaw, + rootSelectItems, +}: { paths: ParsedJoinPath[]; q1: NewQuery; q2: NewQueryJoin; targetTableAliasRaw: string; rootSelectItems: SelectItemNested[]; }) => { - const innerQuery = paths.flatMap((path, i) => { - const isLast = i === paths.length - 1; const targetQueryExtraQueries: string[] = []; - const prevTable = getJoinTable(!i? (q1.tableAlias? asName(q1.tableAlias) : q1.table) : paths[i-1]!.table, i-1, undefined); - - const table = getJoinTable(path.table, i, isLast? targetTableAliasRaw : undefined); + const prevTable = getJoinTable( + !i + ? q1.tableAlias + ? asName(q1.tableAlias) + : q1.table + : paths[i - 1]!.table, + i - 1, + undefined, + ); - if(isLast){ - if(q2.where){ + const table = getJoinTable( + path.table, + i, + isLast ? targetTableAliasRaw : undefined, + ); + + if (isLast) { + if (q2.where) { targetQueryExtraQueries.push(q2.where); } /* If aggs exist need to set groupBy add joinFields into select */ - const aggs = q2.select.filter(s => s.type === "aggregation") + const aggs = q2.select.filter((s) => s.type === "aggregation"); if (aggs.length) { - const groupByFields = rootSelectItems.map((c, i) => (c.isJoinCol || c.selected && c.type !== "aggregation")? `${i+1}` : undefined ).filter(isDefined); - if(groupByFields.length){ - targetQueryExtraQueries.push(`GROUP BY ${groupByFields}`) + const groupByFields = rootSelectItems + .map((c, i) => + c.isJoinCol || (c.selected && c.type !== "aggregation") + ? `${i + 1}` + : undefined, + ) + .filter(isDefined); + if (groupByFields.length) { + targetQueryExtraQueries.push(`GROUP BY ${groupByFields}`); } - if(q2.having){ - targetQueryExtraQueries.push(`HAVING ${q2.having}`) + if (q2.having) { + targetQueryExtraQueries.push(`HAVING ${q2.having}`); } } } const isFirst = !i; - if(isFirst){ + if (isFirst) { return [ `SELECT `, - ` /* Join fields + select */`, - ...indentLines(rootSelectItems.map(s => s.query), { appendCommas: true }), + ` /* Join fields + select */`, + ...indentLines( + rootSelectItems.map((s) => s.query), + { appendCommas: true }, + ), `FROM ${table.name} ${table.alias}`, - ...targetQueryExtraQueries - ] + ...targetQueryExtraQueries, + ]; } return [ `INNER JOIN ${table.name} ${table.alias}`, - `ON ${getJoinOnCondition({ - on: path.on, - leftAlias: prevTable.alias, + `ON ${getJoinOnCondition({ + on: path.on, + leftAlias: prevTable.alias, rightAlias: table.alias, })}`, - ...targetQueryExtraQueries - ] + ...targetQueryExtraQueries, + ]; }); - return { innerQuery } -} - + return { innerQuery }; +}; type GetSelectFieldsArgs = { q: NewQueryJoin; firstJoinTableAlias: string; _joinFields: string[]; -} +}; + +export type SelectItemNested = SelectItem & { + query: string; + isJoinCol: boolean; +}; +const getNestedSelectFields = ({ + q, + firstJoinTableAlias, + _joinFields, +}: GetSelectFieldsArgs) => { + const targetTableAlias = q.tableAlias || q.table; -export type SelectItemNested = SelectItem & { query: string; isJoinCol: boolean; }; -const getNestedSelectFields = ({ q, firstJoinTableAlias, _joinFields }: GetSelectFieldsArgs) => { - const targetTableAlias = (q.tableAlias || q.table); - - const requiredJoinFields = Array.from(new Set(_joinFields)) - const selectedFields = q.select.filter(s => s.selected); - const rootSelectItems: SelectItemNested[] = selectedFields - .map(s => ({ + const requiredJoinFields = Array.from(new Set(_joinFields)); + const selectedFields = q.select.filter((s) => s.selected); + const rootSelectItems: SelectItemNested[] = selectedFields + .map((s) => ({ ...s, isJoinCol: false, - query: s.getQuery(targetTableAlias) + " AS " + asName(s.alias) + query: s.getQuery(targetTableAlias) + " AS " + asName(s.alias), })) - .concat(requiredJoinFields.map(f => ({ - type: "column", - columnName: f, - alias: f, - getFields: () => [f], - getQuery: (tableAlias) => asNameAlias(f, tableAlias), - selected: false, - isJoinCol: true, - query: `${asName(firstJoinTableAlias)}.${getJoinCol(f).rootSelect}`, - }))); - - const getQuery = (tableAlias?: string) => { - const partitionBy = `PARTITION BY ${requiredJoinFields.map(f => asNameAlias(f, tableAlias))}`; - return `ROW_NUMBER() OVER(${partitionBy}) AS ${NESTED_ROWID_FIELD_NAME}` - }; - rootSelectItems.push({ - type: "computed", - selected: false, - alias: NESTED_ROWID_FIELD_NAME, - getFields: () => [], - getQuery, - query: getQuery(firstJoinTableAlias), - isJoinCol: false, - }) - - return { - rootSelectItems, - jsonAggLimit: q.limit? `FILTER (WHERE ${NESTED_ROWID_FIELD_NAME} <= ${q.limit})` : "" + .concat( + requiredJoinFields.map((f) => ({ + type: "column", + columnName: f, + alias: f, + getFields: () => [f], + getQuery: (tableAlias) => asNameAlias(f, tableAlias), + selected: false, + isJoinCol: true, + query: `${asName(firstJoinTableAlias)}.${getJoinCol(f).rootSelect}`, + })), + ); + + const getQuery = (tableAlias?: string) => { + const partitionBy = `PARTITION BY ${requiredJoinFields.map((f) => asNameAlias(f, tableAlias))}`; + return `ROW_NUMBER() OVER(${partitionBy}) AS ${NESTED_ROWID_FIELD_NAME}`; + }; + rootSelectItems.push({ + type: "computed", + selected: false, + alias: NESTED_ROWID_FIELD_NAME, + getFields: () => [], + getQuery, + query: getQuery(firstJoinTableAlias), + isJoinCol: false, + }); + + return { + rootSelectItems, + jsonAggLimit: q.limit + ? `FILTER (WHERE ${NESTED_ROWID_FIELD_NAME} <= ${q.limit})` + : "", }; -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/QueryBuilder/getNewQuery.ts b/lib/DboBuilder/QueryBuilder/getNewQuery.ts index c451dc71..b7ff90fb 100644 --- a/lib/DboBuilder/QueryBuilder/getNewQuery.ts +++ b/lib/DboBuilder/QueryBuilder/getNewQuery.ts @@ -1,4 +1,14 @@ -import { DetailedJoinSelect, JoinPath, JoinSelect, RawJoinPath, SelectParams, SimpleJoinSelect, getKeys, isEmpty, omitKeys } from "prostgles-types"; +import { + DetailedJoinSelect, + JoinPath, + JoinSelect, + RawJoinPath, + SelectParams, + SimpleJoinSelect, + getKeys, + isEmpty, + omitKeys, +} from "prostgles-types"; import { TableRule } from "../../PublishParser/PublishParser"; import { Filter, LocalParams, ValidatedTableRules } from "../DboBuilder"; import { ViewHandler } from "../ViewHandler/ViewHandler"; @@ -9,207 +19,242 @@ import { COMPUTED_FIELDS, FUNCTIONS } from "./Functions"; import { NewQuery, NewQueryJoin, SelectItemBuilder } from "./QueryBuilder"; const JOIN_KEYS = ["$innerJoin", "$leftJoin"] as const; -const JOIN_PARAM_KEYS = getKeys({ - $condition: 1, - filter: 1, - having: 1, - limit: 1, - offset: 1, - orderBy: 1, - select: 1 -} satisfies Record, 1>); +const JOIN_PARAM_KEYS = getKeys({ + $condition: 1, + filter: 1, + having: 1, + limit: 1, + offset: 1, + orderBy: 1, + select: 1, +} satisfies Record< + keyof Omit, + 1 +>); -type ParsedJoin = -| { type: "detailed"; params: DetailedJoinSelect & { table: DetailedJoinSelect["$leftJoin"]; path: RawJoinPath } } -| { type: "simple"; params: SimpleJoinSelect; } -| { type?: undefined; error: string; }; +type ParsedJoin = + | { + type: "detailed"; + params: DetailedJoinSelect & { + table: DetailedJoinSelect["$leftJoin"]; + path: RawJoinPath; + }; + } + | { type: "simple"; params: SimpleJoinSelect } + | { type?: undefined; error: string }; const parseJoinSelect = (joinParams: string | JoinSelect): ParsedJoin => { - if(!joinParams){ + if (!joinParams) { return { - error: "Empty join params" - } + error: "Empty join params", + }; } - if(typeof joinParams === "string"){ - if(joinParams !== "*"){ - throw "Join select can be * or { field: 1 }" + if (typeof joinParams === "string") { + if (joinParams !== "*") { + throw "Join select can be * or { field: 1 }"; } return { type: "simple", - params: joinParams - } + params: joinParams, + }; } - const [joinKey, ...otherKeys] = getKeys(joinParams).filter(k => JOIN_KEYS.includes(k as any)); - if(otherKeys.length) { + const [joinKey, ...otherKeys] = getKeys(joinParams).filter((k) => + JOIN_KEYS.includes(k as any), + ); + if (otherKeys.length) { return { - error: "Cannot specify more than one join type ( $innerJoin OR $leftJoin )" - } - } else if(joinKey) { - + error: + "Cannot specify more than one join type ( $innerJoin OR $leftJoin )", + }; + } else if (joinKey) { /* Full option join { field_name: db.innerJoin.table_name(filter, select) } */ - const invalidParams = Object.keys(joinParams).filter(k => ![ ...JOIN_PARAM_KEYS, ...JOIN_KEYS ].includes(k as any)); - if(invalidParams.length) { + const invalidParams = Object.keys(joinParams).filter( + (k) => ![...JOIN_PARAM_KEYS, ...JOIN_KEYS].includes(k as any), + ); + if (invalidParams.length) { throw "Invalid join params: " + invalidParams.join(", "); } const path = joinParams[joinKey] as string | JoinPath[]; - if(Array.isArray(path) && !path.length){ - throw `Cannot have an empty join path/tableName ${joinKey}` + if (Array.isArray(path) && !path.length) { + throw `Cannot have an empty join path/tableName ${joinKey}`; } - return { + return { type: "detailed", params: { ...(joinParams as DetailedJoinSelect), path, - table: typeof path === "string"? path : path.at(-1)!.table, + table: typeof path === "string" ? path : path.at(-1)!.table, }, }; } return { type: "simple", - params: joinParams as SimpleJoinSelect - } -} + params: joinParams as SimpleJoinSelect, + }; +}; export async function getNewQuery( _this: ViewHandler, - filter: Filter, - selectParams: (SelectParams & { alias?: string }) = {}, - param3_unused = null, - tableRules: TableRule | undefined, + filter: Filter, + selectParams: SelectParams & { alias?: string } = {}, + param3_unused = null, + tableRules: TableRule | undefined, localParams: LocalParams | undefined, ): Promise { const { columns } = _this; - if(localParams?.isRemoteRequest && !tableRules?.select?.fields){ + if (localParams?.isRemoteRequest && !tableRules?.select?.fields) { throw `INTERNAL ERROR: publish.${_this.name}.select.fields rule missing`; } - const allowedOrderByFields = !tableRules? _this.column_names.slice(0) : _this.parseFieldFilter(tableRules?.select?.orderByFields ?? tableRules?.select?.fields); - const allowedSelectFields = !tableRules? _this.column_names.slice(0) : _this.parseFieldFilter(tableRules?.select?.fields); + const allowedOrderByFields = !tableRules + ? _this.column_names.slice(0) + : _this.parseFieldFilter( + tableRules?.select?.orderByFields ?? tableRules?.select?.fields, + ); + const allowedSelectFields = !tableRules + ? _this.column_names.slice(0) + : _this.parseFieldFilter(tableRules?.select?.fields); const joinQueries: NewQueryJoin[] = []; const { select: userSelect = "*" } = selectParams, - sBuilder = new SelectItemBuilder({ - allowedFields: allowedSelectFields, + sBuilder = new SelectItemBuilder({ + allowedFields: allowedSelectFields, allowedOrderByFields, - computedFields: COMPUTED_FIELDS, - isView: _this.is_view, - functions: FUNCTIONS, - allFields: _this.column_names.slice(0), - columns + computedFields: COMPUTED_FIELDS, + isView: _this.is_view, + functions: FUNCTIONS, + allFields: _this.column_names.slice(0), + columns, }); - - await sBuilder.parseUserSelect(userSelect, async (fTable, _joinParams, throwErr) => { - - const j_selectParams: SelectParams = {}; - let j_filter: Filter = {}, + await sBuilder.parseUserSelect( + userSelect, + async (fTable, _joinParams, throwErr) => { + const j_selectParams: SelectParams = {}; + let j_filter: Filter = {}, j_isLeftJoin = true, j_tableRules: TableRule | undefined; - const j_alias = fTable; + const j_alias = fTable; - const parsedJoin = parseJoinSelect(_joinParams); + const parsedJoin = parseJoinSelect(_joinParams); - if(!parsedJoin.type){ - throwErr(parsedJoin.error); - return; - } - const j_path = parseJoinPath({ - rawPath: parsedJoin.type === "simple"? fTable : parsedJoin.params.path, - rootTable: _this.name, - viewHandler: _this, - allowMultiOrJoin: true, - addShortestJoinIfMissing: true, - }); + if (!parsedJoin.type) { + throwErr(parsedJoin.error); + return; + } + const j_path = parseJoinPath({ + rawPath: parsedJoin.type === "simple" ? fTable : parsedJoin.params.path, + rootTable: _this.name, + viewHandler: _this, + allowMultiOrJoin: true, + addShortestJoinIfMissing: true, + }); - if(parsedJoin.params === "*"){ + if (parsedJoin.params === "*") { j_selectParams.select = "*"; - } else if(parsedJoin.type === "detailed") { - const joinParams = parsedJoin.params; + } else if (parsedJoin.type === "detailed") { + const joinParams = parsedJoin.params; - j_isLeftJoin = !!joinParams.$leftJoin; - - j_selectParams.select = joinParams.select || "*"; - j_filter = joinParams.filter || {}; - j_selectParams.limit = joinParams.limit; - j_selectParams.offset = joinParams.offset; - j_selectParams.orderBy = joinParams.orderBy; - j_selectParams.having = joinParams.having; - } else { - j_selectParams.select = parsedJoin.params; - } - - const jTable = parsedJoin.type === "simple"? fTable : (typeof j_path === "string"? j_path : j_path?.at(-1)?.table); - if(!jTable) { - throw "jTable missing"; - } - const _thisJoinedTable: any = _this.dboBuilder.dbo[jTable]; - if(!_thisJoinedTable) { - throw `Joined table ${JSON.stringify(jTable)} is disallowed or inexistent \nOr you've forgot to put the function arguments into an array`; - } + j_isLeftJoin = !!joinParams.$leftJoin; - let isLocal = true; - if(localParams && (localParams.socket || localParams.httpReq)){ - isLocal = false; - j_tableRules = await _this.dboBuilder.publishParser?.getValidatedRequestRuleWusr({ tableName: jTable, command: "find", localParams }); - } - - const isAllowedAccessToTable = isLocal || j_tableRules; - if(isAllowedAccessToTable){ + j_selectParams.select = joinParams.select || "*"; + j_filter = joinParams.filter || {}; + j_selectParams.limit = joinParams.limit; + j_selectParams.offset = joinParams.offset; + j_selectParams.orderBy = joinParams.orderBy; + j_selectParams.having = joinParams.having; + } else { + j_selectParams.select = parsedJoin.params; + } + + const jTable = + parsedJoin.type === "simple" + ? fTable + : typeof j_path === "string" + ? j_path + : j_path?.at(-1)?.table; + if (!jTable) { + throw "jTable missing"; + } + const _thisJoinedTable: any = _this.dboBuilder.dbo[jTable]; + if (!_thisJoinedTable) { + throw `Joined table ${JSON.stringify(jTable)} is disallowed or inexistent \nOr you've forgot to put the function arguments into an array`; + } - const joinQuery: NewQuery = await getNewQuery( + let isLocal = true; + if (localParams && (localParams.socket || localParams.httpReq)) { + isLocal = false; + j_tableRules = + await _this.dboBuilder.publishParser?.getValidatedRequestRuleWusr({ + tableName: jTable, + command: "find", + localParams, + }); + } + + const isAllowedAccessToTable = isLocal || j_tableRules; + if (isAllowedAccessToTable) { + const joinQuery: NewQuery = await getNewQuery( _thisJoinedTable, - j_filter, - { ...j_selectParams, alias: j_alias }, - param3_unused, - j_tableRules, + j_filter, + { ...j_selectParams, alias: j_alias }, + param3_unused, + j_tableRules, localParams, ); - joinQuery.isLeftJoin = j_isLeftJoin; - joinQuery.tableAlias = j_alias; - joinQueries.push({ - ...joinQuery, - joinPath: j_path, - joinAlias: joinQuery.tableAlias ?? joinQuery.table, - }); - } - }) + joinQuery.isLeftJoin = j_isLeftJoin; + joinQuery.tableAlias = j_alias; + joinQueries.push({ + ...joinQuery, + joinPath: j_path, + joinAlias: joinQuery.tableAlias ?? joinQuery.table, + }); + } + }, + ); /** * Is this still needed?!!! * Add non selected columns * This ensures all fields are available for orderBy in case of nested select * */ - Array.from(new Set([...allowedSelectFields, ...allowedOrderByFields])).map(key => { - if(!sBuilder.select.find(s => s.alias === key && s.type === "column")){ - sBuilder.addColumn(key, false); - } - }); + Array.from(new Set([...allowedSelectFields, ...allowedOrderByFields])).map( + (key) => { + if ( + !sBuilder.select.find((s) => s.alias === key && s.type === "column") + ) { + sBuilder.addColumn(key, false); + } + }, + ); const select = sBuilder.select; - + const tableAlias = selectParams.alias; const commonWhereParams: PrepareWhereParams = { - filter, - select, - forcedFilter: tableRules?.select?.forcedFilter, - filterFields: tableRules?.select?.filterFields, - tableAlias, + filter, + select, + forcedFilter: tableRules?.select?.forcedFilter, + filterFields: tableRules?.select?.filterFields, + tableAlias, localParams, tableRule: tableRules, isHaving: false, - } + }; const filterOpts = await _this.prepareWhere({ ...commonWhereParams, isHaving: false, }); - const havingOpts = !isEmpty(selectParams.having)? await _this.prepareWhere({ - ...omitKeys(commonWhereParams, ["forcedFilter"]), - filter: selectParams.having, - isHaving: true, - }) : undefined; + const havingOpts = !isEmpty(selectParams.having) + ? await _this.prepareWhere({ + ...omitKeys(commonWhereParams, ["forcedFilter"]), + filter: selectParams.having, + isHaving: true, + }) + : undefined; const where = filterOpts.where; const validatedRules = _this.getValidatedRules(tableRules, localParams); @@ -224,16 +269,19 @@ export async function getNewQuery( isLeftJoin: false, limit: prepareLimitQuery(selectParams.limit, validatedRules), orderByItems: prepareSortItems( - selectParams.orderBy, - allowedOrderByFields, - selectParams.alias, - select, - joinQueries + selectParams.orderBy, + allowedOrderByFields, + selectParams.alias, + select, + joinQueries, ), - offset: prepareOffsetQuery(selectParams.offset) + offset: prepareOffsetQuery(selectParams.offset), }; - if(resQuery.select.some(s => s.type === "aggregation") && resQuery.joins?.length){ + if ( + resQuery.select.some((s) => s.type === "aggregation") && + resQuery.joins?.length + ) { throw new Error(`Root query aggregation AND nested joins not allowed`); } @@ -246,26 +294,39 @@ const prepareOffsetQuery = (offset?: number) => { } return 0; -} - -const prepareLimitQuery = (limit: number | null | undefined = null, p: ValidatedTableRules): number | null => { +}; +const prepareLimitQuery = ( + limit: number | null | undefined = null, + p: ValidatedTableRules, +): number | null => { if (limit !== undefined && limit !== null && !Number.isInteger(limit)) { throw "Unexpected LIMIT. Must be null or an integer"; } let _limit = limit; /* If no limit then set as the lesser of (100, maxLimit) */ - if (_limit !== null && !Number.isInteger(_limit) && p.select.maxLimit !== null) { - _limit = [100, p.select.maxLimit].filter(Number.isInteger).sort((a, b) => a - b)[0]!; + if ( + _limit !== null && + !Number.isInteger(_limit) && + p.select.maxLimit !== null + ) { + _limit = [100, p.select.maxLimit] + .filter(Number.isInteger) + .sort((a, b) => a - b)[0]!; } else { - /* If a limit higher than maxLimit specified throw error */ - if (Number.isInteger(p.select.maxLimit) && _limit !== null && _limit > p.select.maxLimit!) { - throw `Unexpected LIMIT ${_limit}. Must be less than the published maxLimit: ` + p.select.maxLimit; + if ( + Number.isInteger(p.select.maxLimit) && + _limit !== null && + _limit > p.select.maxLimit! + ) { + throw ( + `Unexpected LIMIT ${_limit}. Must be less than the published maxLimit: ` + + p.select.maxLimit + ); } } - return _limit; -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/QueryBuilder/getSelectQuery.ts b/lib/DboBuilder/QueryBuilder/getSelectQuery.ts index 33a42742..18c26f6b 100644 --- a/lib/DboBuilder/QueryBuilder/getSelectQuery.ts +++ b/lib/DboBuilder/QueryBuilder/getSelectQuery.ts @@ -1,19 +1,18 @@ - import { prepareOrderByQuery } from "../DboBuilder"; import { isDefined, asName } from "prostgles-types"; import { NewQuery } from "./QueryBuilder"; import { ViewHandler } from "../ViewHandler/ViewHandler"; -import { getJoinQuery } from "./getJoinQuery"; +import { getJoinQuery } from "./getJoinQuery"; /** * Used to prevent single row nested results in case of OR join conditions */ export const ROOT_TABLE_ROW_NUM_ID = "prostgles_root_table_row_id" as const; -export const ROOT_TABLE_ALIAS = 'prostgles_root_table_alias' as const; +export const ROOT_TABLE_ALIAS = "prostgles_root_table_alias" as const; /** * Creating the text query from the NewQuery spec - * No validation/authorisation at this point + * No validation/authorisation at this point * */ export function getSelectQuery( viewHandler: ViewHandler, @@ -21,106 +20,125 @@ export function getSelectQuery( depth = 0, selectParamsGroupBy: boolean, ): string { - - const rootSelect = q.select.filter(s => s.selected).map(s => [s.getQuery(ROOT_TABLE_ALIAS), " AS ", asName(s.alias)].join("")); - - const parsedJoins = q.joins?.flatMap(q2 => { - const parsed = getJoinQuery( - viewHandler, - { - q1: { ...q, tableAlias: ROOT_TABLE_ALIAS }, - q2: { ...q2 }, - selectParamsGroupBy - } + const rootSelect = q.select + .filter((s) => s.selected) + .map((s) => + [s.getQuery(ROOT_TABLE_ALIAS), " AS ", asName(s.alias)].join(""), ); - return { - ...q2, - ...parsed - } - }) ?? []; + + const parsedJoins = + q.joins?.flatMap((q2) => { + const parsed = getJoinQuery(viewHandler, { + q1: { ...q, tableAlias: ROOT_TABLE_ALIAS }, + q2: { ...q2 }, + selectParamsGroupBy, + }); + return { + ...q2, + ...parsed, + }; + }) ?? []; const selectItems = rootSelect.concat( - parsedJoins?.map(join => { + parsedJoins?.map((join) => { const { joinAlias } = join; - return `COALESCE(${asName(joinAlias)}.${join.resultAlias}, '[]') as ${asName(joinAlias)}` - }) ?? []); + return `COALESCE(${asName(joinAlias)}.${join.resultAlias}, '[]') as ${asName(joinAlias)}`; + }) ?? [], + ); /** OR joins cannot be easily aggregated to one-many with the root table. Must group by root table id */ - const hasOrJoins = parsedJoins.some(j => j.isOrJoin) - - let joinCtes = !parsedJoins.length? [] : [ - ...parsedJoins.flatMap((j, i) => { - const needsComma = parsedJoins.length > 1 && i < parsedJoins.length -1; - return j.cteLines.concat(needsComma? [","] : []); - }) - ]; - - - if(hasOrJoins){ - const pkey = viewHandler.columns.find(c => c.is_pkey); + const hasOrJoins = parsedJoins.some((j) => j.isOrJoin); + + let joinCtes = !parsedJoins.length + ? [] + : [ + ...parsedJoins.flatMap((j, i) => { + const needsComma = + parsedJoins.length > 1 && i < parsedJoins.length - 1; + return j.cteLines.concat(needsComma ? [","] : []); + }), + ]; + + if (hasOrJoins) { + const pkey = viewHandler.columns.find((c) => c.is_pkey); joinCtes = [ `${q.table} AS (`, - ` SELECT *, ${pkey? asName(pkey.name): "ROW_NUMBER() OVER()"} as ${ROOT_TABLE_ROW_NUM_ID}`, + ` SELECT *, ${pkey ? asName(pkey.name) : "ROW_NUMBER() OVER()"} as ${ROOT_TABLE_ROW_NUM_ID}`, ` FROM ${q.table}`, `),`, - ...joinCtes - ] + ...joinCtes, + ]; } - if(joinCtes.length){ - joinCtes.unshift(`WITH `) + if (joinCtes.length) { + joinCtes.unshift(`WITH `); } const query = [ ...joinCtes, - `SELECT` - ,...indentLines(selectItems, { appendCommas: true }) - , `FROM ( ` - , ` SELECT *` - , ` FROM ${q.table}` - , ...(q.where? [` ${q.where}`] : []) - , `) ${ROOT_TABLE_ALIAS}` - , ...parsedJoins.flatMap(j => j.joinLines) - , ...getRootGroupBy(q, selectParamsGroupBy) - , ...prepareOrderByQuery(q.orderByItems) - , ...(q.having ? [`HAVING ${q.having} `] : []) - , ...(depth || q.limit === null ? [] : [`LIMIT ${q.limit || 0}`]) - , ...(q.offset? [`OFFSET ${q.offset || 0}`] : []) + `SELECT`, + ...indentLines(selectItems, { appendCommas: true }), + `FROM ( `, + ` SELECT *`, + ` FROM ${q.table}`, + ...(q.where ? [` ${q.where}`] : []), + `) ${ROOT_TABLE_ALIAS}`, + ...parsedJoins.flatMap((j) => j.joinLines), + ...getRootGroupBy(q, selectParamsGroupBy), + ...prepareOrderByQuery(q.orderByItems), + ...(q.having ? [`HAVING ${q.having} `] : []), + ...(depth || q.limit === null ? [] : [`LIMIT ${q.limit || 0}`]), + ...(q.offset ? [`OFFSET ${q.offset || 0}`] : []), ]; return indentLinesToString(query); } -const indentLine = (numberOfSpaces: number, str: string, indentStr = " "): string => new Array(numberOfSpaces).fill(indentStr).join("") + str; +const indentLine = ( + numberOfSpaces: number, + str: string, + indentStr = " ", +): string => new Array(numberOfSpaces).fill(indentStr).join("") + str; type IndentLinesOpts = { numberOfSpaces?: number; indentStr?: string; appendCommas?: boolean; -} -export const indentLines = (strArr: (string | undefined | null)[], { numberOfSpaces = 2, indentStr = " ", appendCommas = false }: IndentLinesOpts = {}): string[] => { - const nonEmptyLines = strArr - .filter(v => v); +}; +export const indentLines = ( + strArr: (string | undefined | null)[], + { + numberOfSpaces = 2, + indentStr = " ", + appendCommas = false, + }: IndentLinesOpts = {}, +): string[] => { + const nonEmptyLines = strArr.filter((v) => v); return nonEmptyLines.map((str, idx) => { - const res = indentLine(numberOfSpaces, str as string, indentStr); - if(appendCommas && idx < nonEmptyLines.length - 1){ - return `${res},`; - } - return res; - }); -} -const indentLinesToString = (strArr: (string | undefined | null)[], numberOfSpaces = 0, separator = " \n ", indentStr = " ") => indentLines(strArr, { numberOfSpaces, indentStr }).join(separator); -const getTableAlias = (q: NewQuery) => !q.tableAlias ? q.table : `${q.tableAlias || ""}_${q.table}`; + const res = indentLine(numberOfSpaces, str as string, indentStr); + if (appendCommas && idx < nonEmptyLines.length - 1) { + return `${res},`; + } + return res; + }); +}; +const indentLinesToString = ( + strArr: (string | undefined | null)[], + numberOfSpaces = 0, + separator = " \n ", + indentStr = " ", +) => indentLines(strArr, { numberOfSpaces, indentStr }).join(separator); +const getTableAlias = (q: NewQuery) => + !q.tableAlias ? q.table : `${q.tableAlias || ""}_${q.table}`; export const getTableAliasAsName = (q: NewQuery) => asName(getTableAlias(q)); - export const getRootGroupBy = (q: NewQuery, selectParamsGroupBy?: boolean) => { + const aggs = q.select.filter((s) => s.selected && s.type === "aggregation"); + const nonAggs = q.select.filter( + (s) => s.selected && s.type !== "aggregation", + ); - const aggs = q.select.filter(s => s.selected && s.type === "aggregation"); - const nonAggs = q.select.filter(s => s.selected && s.type !== "aggregation"); - if ((selectParamsGroupBy || aggs.length) && nonAggs.length) { - /** Add ORDER BY items not included in root select */ const orderByItems: string[] = []; // q.orderByItems.forEach(sortItem => { @@ -129,8 +147,16 @@ export const getRootGroupBy = (q: NewQuery, selectParamsGroupBy?: boolean) => { // } // }); - return [`GROUP BY ${q.select.map((s, i)=> s.selected && s.type !== "aggregation"? `${i+1}` : undefined).concat(orderByItems).filter(isDefined).join(", ")} `] + return [ + `GROUP BY ${q.select + .map((s, i) => + s.selected && s.type !== "aggregation" ? `${i + 1}` : undefined, + ) + .concat(orderByItems) + .filter(isDefined) + .join(", ")} `, + ]; } - return [] -} \ No newline at end of file + return []; +}; diff --git a/lib/DboBuilder/QueryBuilder/prepareHaving.ts b/lib/DboBuilder/QueryBuilder/prepareHaving.ts index 206a29b1..9a262820 100644 --- a/lib/DboBuilder/QueryBuilder/prepareHaving.ts +++ b/lib/DboBuilder/QueryBuilder/prepareHaving.ts @@ -8,10 +8,15 @@ type Args = { select: SelectItemValidated[]; tableAlias: string | undefined; filterFieldNames: string[]; -} -export const prepareHaving = ({ having, select, tableAlias, filterFieldNames }: Args) => { - if(!having || isEmpty(having)) return ""; - +}; +export const prepareHaving = ({ + having, + select, + tableAlias, + filterFieldNames, +}: Args) => { + if (!having || isEmpty(having)) return ""; + const havingStr = parseFilterItem({ filter: having, select, @@ -19,4 +24,4 @@ export const prepareHaving = ({ having, select, tableAlias, filterFieldNames }: allowedColumnNames: filterFieldNames, }); return havingStr; -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/QueryStreamer.ts b/lib/DboBuilder/QueryStreamer.ts index 58a91cc2..63887b85 100644 --- a/lib/DboBuilder/QueryStreamer.ts +++ b/lib/DboBuilder/QueryStreamer.ts @@ -1,32 +1,42 @@ import * as pg from "pg"; -import CursorType from 'pg-cursor'; -import { CHANNELS, SQLOptions, SocketSQLStreamPacket, SocketSQLStreamServer, omitKeys, pickKeys } from "prostgles-types"; +import CursorType from "pg-cursor"; +import { + CHANNELS, + SQLOptions, + SocketSQLStreamPacket, + SocketSQLStreamServer, + omitKeys, + pickKeys, +} from "prostgles-types"; import { BasicCallback } from "../PubSubManager/PubSubManager"; import { VoidFunction } from "../SchemaWatch/SchemaWatch"; import { DB } from "../initProstgles"; import { DboBuilder } from "./DboBuilder"; import { PRGLIOSocket } from "./DboBuilderTypes"; -import { getErrorAsObject, getSerializedClientErrorFromPGError } from "./dboBuilderUtils"; +import { + getErrorAsObject, + getSerializedClientErrorFromPGError, +} from "./dboBuilderUtils"; import { getDetailedFieldInfo } from "./runSQL"; -const Cursor: typeof CursorType = require('pg-cursor'); +const Cursor: typeof CursorType = require("pg-cursor"); type ClientStreamedRequest = { socket: PRGLIOSocket; query: string; options: SQLOptions | undefined; persistConnection?: boolean; -} +}; type StreamedQuery = ClientStreamedRequest & { cursor: CursorType | undefined; client: pg.Client | undefined; stop?: VoidFunction; - onError: ((error: any) => void); -} -type Info = { - command: string; - fields: any[]; - rowCount: number; - duration: number; + onError: (error: any) => void; +}; +type Info = { + command: string; + fields: any[]; + rowCount: number; + duration: number; }; const shortSocketIds: Record = {}; @@ -49,41 +59,56 @@ export class QueryStreamer { const setAdminClient = () => { this.adminClient = this.getConnection(undefined, { keepAlive: true }); this.adminClient.connect(); - } - this.adminClient = this.getConnection((error) => { - if(error.message?.includes("database") && error.message?.includes("does not exist")) return; - console.log("Admin client error. Reconnecting...", error); - setAdminClient(); - }, { keepAlive: true }); + }; + this.adminClient = this.getConnection( + (error) => { + if ( + error.message?.includes("database") && + error.message?.includes("does not exist") + ) + return; + console.log("Admin client error. Reconnecting...", error); + setAdminClient(); + }, + { keepAlive: true }, + ); this.adminClient.connect(); } - getConnection = (onError: ((err: any) => void) | undefined, extraOptions?: pg.ClientConfig) => { - const connectionInfo = typeof this.db.$cn === "string"? { connectionString: this.db.$cn } : this.db.$cn as any; + getConnection = ( + onError: ((err: any) => void) | undefined, + extraOptions?: pg.ClientConfig, + ) => { + const connectionInfo = + typeof this.db.$cn === "string" + ? { connectionString: this.db.$cn } + : (this.db.$cn as any); const client = new pg.Client({ ...connectionInfo, ...extraOptions }); - client.on("error", (err) => { + client.on("error", (err) => { onError?.(err); }); return client; - } + }; onDisconnect = (socketId: string) => { const socketQueries = this.socketQueries[socketId]; - if(!socketQueries) return; + if (!socketQueries) return; Object.values(socketQueries).forEach(({ client, stop }) => { stop?.(); /** end does not stop active query?! */ client?.end(); }); delete this.socketQueries[socketId]; - } + }; - create = async (query: ClientStreamedRequest): Promise => { + create = async ( + query: ClientStreamedRequest, + ): Promise => { const { socket, persistConnection } = query; const socketId = socket.id; const id = getSetShortSocketId(socketId); const channel = `${CHANNELS.SQL_STREAM}__${socketId}_${id}`; const unsubChannel = `${channel}.unsubscribe`; - if(this.socketQueries[id] && !persistConnection){ + if (this.socketQueries[id] && !persistConnection) { throw `Must stop existing query ${id} first`; } @@ -95,53 +120,90 @@ export class QueryStreamer { client: undefined, cursor: undefined, onError: (rawError: any) => { - if(errored) return; + if (errored) return; errored = true; - const errorWithoutQuery = getSerializedClientErrorFromPGError(rawError, { type: "sql", localParams: { socket } }); + const errorWithoutQuery = getSerializedClientErrorFromPGError( + rawError, + { type: "sql", localParams: { socket } }, + ); // For some reason query is not present on the error object from sql stream mode const error = { ...errorWithoutQuery, query: query.query }; - socket.emit(channel, { type: "error", error } satisfies SocketSQLStreamPacket); + socket.emit(channel, { + type: "error", + error, + } satisfies SocketSQLStreamPacket); }, }; this.socketQueries[socketId]![id] ??= socketQuery; let processID = -1; let streamState: "started" | "ended" | "errored" | undefined; - const startStream = async (client: pg.Client | undefined, query: ClientStreamedRequest) => { + const startStream = async ( + client: pg.Client | undefined, + query: ClientStreamedRequest, + ) => { const socketQuery = this.socketQueries[socketId]?.[id]; - if(!socketQuery){ + if (!socketQuery) { throw "socket query not found"; - } - + } + /** Only send fields on first request */ let fieldsWereSent = false; - const emit = ({ reachedEnd, rows, info }: { reachedEnd: true; rows: any[]; info: Info } | { reachedEnd: false; rows: any[]; info: Omit }) => { - if(!info?.fields) throw "No fields"; - const fields = getDetailedFieldInfo.bind(this.dboBuilder)(info.fields as any); - const packet: SocketSQLStreamPacket = { type: "data", rows, fields: fieldsWereSent? undefined : fields, info: reachedEnd? info : undefined, ended: reachedEnd, processId: processID }; + const emit = ({ + reachedEnd, + rows, + info, + }: + | { reachedEnd: true; rows: any[]; info: Info } + | { reachedEnd: false; rows: any[]; info: Omit }) => { + if (!info?.fields) throw "No fields"; + const fields = getDetailedFieldInfo.bind(this.dboBuilder)( + info.fields as any, + ); + const packet: SocketSQLStreamPacket = { + type: "data", + rows, + fields: fieldsWereSent ? undefined : fields, + info: reachedEnd ? info : undefined, + ended: reachedEnd, + processId: processID, + }; socket.emit(channel, packet); - if(reachedEnd){ - this.dboBuilder.prostgles.schemaWatch?.onSchemaChangeFallback?.({ command: info.command, query: query.query }); + if (reachedEnd) { + this.dboBuilder.prostgles.schemaWatch?.onSchemaChangeFallback?.({ + command: info.command, + query: query.query, + }); } fieldsWereSent = true; - } - const currentClient = client ?? this.getConnection(err => { - socketQuery.onError(err); - currentClient.end(); - }); + }; + const currentClient = + client ?? + this.getConnection((err) => { + socketQuery.onError(err); + currentClient.end(); + }); this.socketQueries[socketId]![id]!.client = currentClient; try { - if(!client){ + if (!client) { await currentClient.connect(); } processID = (currentClient as any).processID; - if(query.options?.streamLimit && (!Number.isInteger(query.options.streamLimit) || query.options.streamLimit < 0)){ + if ( + query.options?.streamLimit && + (!Number.isInteger(query.options.streamLimit) || + query.options.streamLimit < 0) + ) { throw "streamLimit must be a positive integer"; } - const batchSize = query.options?.streamLimit? Math.min(1e3, query.options?.streamLimit) : 1e3; - const cursor = currentClient.query(new Cursor(query.query, undefined, { rowMode: "array" })); + const batchSize = query.options?.streamLimit + ? Math.min(1e3, query.options?.streamLimit) + : 1e3; + const cursor = currentClient.query( + new Cursor(query.query, undefined, { rowMode: "array" }), + ); this.socketQueries[socketId]![id]!.cursor = cursor; let streamLimitReached = false; let reachedEnd = false; @@ -151,100 +213,145 @@ export class QueryStreamer { let rowsSent = 0; do { rowChunk = await cursor.read(batchSize); - const info = pickKeys((cursor as any)._result, ["fields", "rowCount", "command", "duration"]) as Info; + const info = pickKeys((cursor as any)._result, [ + "fields", + "rowCount", + "command", + "duration", + ]) as Info; rowsSent += rowChunk.length; - streamLimitReached = Boolean(query.options?.streamLimit && rowsSent >= query.options.streamLimit); + streamLimitReached = Boolean( + query.options?.streamLimit && + rowsSent >= query.options.streamLimit, + ); reachedEnd = rowChunk.length < batchSize; - emit({ info, rows: rowChunk, reachedEnd: reachedEnd || streamLimitReached }); + emit({ + info, + rows: rowChunk, + reachedEnd: reachedEnd || streamLimitReached, + }); } while (!reachedEnd && !streamLimitReached); - - streamState = "ended"; - - if(!query.options?.persistStreamConnection){ + + streamState = "ended"; + + if (!query.options?.persistStreamConnection) { delete this.socketQueries[socketId]?.[id]; currentClient.end(); } cursor.close(); - } catch(error: any){ + } catch (error: any) { streamState = "errored"; - if(error.message === "cannot insert multiple commands into a prepared statement") { - this.dboBuilder.dbo.sql!(query.query, {}, { returnType: "arrayMode", hasParams: false }).then(res => { - emit({ info: omitKeys(res, ["rows"]), reachedEnd: true, rows: res.rows}); - }).catch(newError => { - socketQuery.onError(newError); - }); + if ( + error.message === + "cannot insert multiple commands into a prepared statement" + ) { + this.dboBuilder.dbo.sql!( + query.query, + {}, + { returnType: "arrayMode", hasParams: false }, + ) + .then((res) => { + emit({ + info: omitKeys(res, ["rows"]), + reachedEnd: true, + rows: res.rows, + }); + }) + .catch((newError) => { + socketQuery.onError(newError); + }); } else { socketQuery.onError(error); } - } - })() - } catch(err){ + } + })(); + } catch (err) { socketQuery.onError(err); await currentClient.end(); } - } + }; const cleanup = () => { socket.removeAllListeners(unsubChannel); socket.removeAllListeners(channel); delete this.socketQueries[socketId]?.[id]; - } - const stop = async (opts: { terminate?: boolean; } | undefined, cb: BasicCallback) => { + }; + const stop = async ( + opts: { terminate?: boolean } | undefined, + cb: BasicCallback, + ) => { const { client: queryClient } = this.socketQueries[socketId]?.[id] ?? {}; - if(!queryClient) return; - if(opts?.terminate){ + if (!queryClient) return; + if (opts?.terminate) { setTimeout(() => { queryClient.end(); }, 4e3); } try { - const stopFunction = opts?.terminate? "pg_terminate_backend" : "pg_cancel_backend"; - const rows = await this.adminClient.query(`SELECT ${stopFunction}(pid), pid, state, query FROM pg_stat_activity WHERE pid = $1`, [processID]); + const stopFunction = opts?.terminate + ? "pg_terminate_backend" + : "pg_cancel_backend"; + const rows = await this.adminClient.query( + `SELECT ${stopFunction}(pid), pid, state, query FROM pg_stat_activity WHERE pid = $1`, + [processID], + ); cleanup(); cb({ processID, info: rows.rows[0] }); - } catch (error){ + } catch (error) { cb(null, error); } - } - this.socketQueries[socketId]![id]!.stop = () => stop({ terminate: true }, () => { /* Empty */ }); + }; + this.socketQueries[socketId]![id]!.stop = () => + stop({ terminate: true }, () => { + /* Empty */ + }); socket.removeAllListeners(unsubChannel); socket.once(unsubChannel, stop); let runCount = 0; socket.removeAllListeners(channel); - socket.on(channel, async (_data: { query: string; params: any } | undefined, cb: BasicCallback) => { - if(streamState === "started"){ - return cb(processID, "Already started"); - } - streamState = "started"; - try { - /* Persisted connection query */ - if(runCount){ - const persistedClient = this.socketQueries[socketId]?.[id]; - if(!persistedClient) throw "Persisted query client not found"; - - await startStream(persistedClient.client, { ...query, query: _data!.query! }); - } else { - await startStream(undefined, query); + socket.on( + channel, + async ( + _data: { query: string; params: any } | undefined, + cb: BasicCallback, + ) => { + if (streamState === "started") { + return cb(processID, "Already started"); } - cb(processID); - } catch(err){ - console.error(err) - cb(processID, getErrorAsObject(err) ?? "Something went wrong"); - } - runCount++; - }); + streamState = "started"; + try { + /* Persisted connection query */ + if (runCount) { + const persistedClient = this.socketQueries[socketId]?.[id]; + if (!persistedClient) throw "Persisted query client not found"; + + await startStream(persistedClient.client, { + ...query, + query: _data!.query!, + }); + } else { + await startStream(undefined, query); + } + cb(processID); + } catch (err) { + console.error(err); + cb(processID, getErrorAsObject(err) ?? "Something went wrong"); + } + runCount++; + }, + ); /** If not started within 5 seconds then assume it will never happen */ setTimeout(() => { - if(streamState) return; + if (streamState) return; cleanup(); }, 5e3); return { channel, unsubChannel, - } - } -} \ No newline at end of file + }; + }; +} diff --git a/lib/DboBuilder/TableHandler/DataValidator.ts b/lib/DboBuilder/TableHandler/DataValidator.ts index a2598016..10af6f31 100644 --- a/lib/DboBuilder/TableHandler/DataValidator.ts +++ b/lib/DboBuilder/TableHandler/DataValidator.ts @@ -1,4 +1,15 @@ -import { AnyObject, ColumnInfo, FieldFilter, ValidatedColumnInfo, asName, getKeys, isEmpty, isObject, pickKeys, unpatchText } from "prostgles-types/dist"; +import { + AnyObject, + ColumnInfo, + FieldFilter, + ValidatedColumnInfo, + asName, + getKeys, + isEmpty, + isObject, + pickKeys, + unpatchText, +} from "prostgles-types/dist"; import { ValidateRowBasic } from "../../PublishParser/PublishParser"; import { DBHandlerServer } from "../../Prostgles"; import { asValue } from "../../PubSubManager/PubSubManager"; @@ -11,14 +22,14 @@ type RowFieldDataPlain = { type: "plain"; column: TableSchemaColumn; fieldValue: any; -} +}; type RowFieldDataFunction = { type: "function"; column: TableSchemaColumn; funcName: string; args: any[]; -} +}; type RowFieldData = RowFieldDataPlain | RowFieldDataFunction; type ParsedRowFieldData = { @@ -34,8 +45,8 @@ type ParseDataArgs = { validationOptions: { localParams: undefined | LocalParams; validate: undefined | ValidateRowBasic; - } -} + }; +}; export class DataValidator { rowFieldData?: RowFieldData[][]; @@ -47,67 +58,102 @@ export class DataValidator { parse = async (args: ParseDataArgs) => { const { command } = args; - const rowFieldData = await getValidatedRowFieldData(args, this.tableHandler); + const rowFieldData = await getValidatedRowFieldData( + args, + this.tableHandler, + ); const parsedRowFieldData = await getParsedRowFieldData(rowFieldData, args); if (command === "update") { - if (rowFieldData.some(rowParts => rowParts.length === 0)) { + if (rowFieldData.some((rowParts) => rowParts.length === 0)) { throw "Empty row. No data provided for update"; } } return { parsedRowFieldData, - getQuery: () => getQuery(command, parsedRowFieldData, this.tableHandler.escapedName), - } - } + getQuery: () => + getQuery(command, parsedRowFieldData, this.tableHandler.escapedName), + }; + }; } -const getQuery = (type: "insert" | "update", parsedRowFieldData: ParsedRowFieldData[][], escapedTableName: string): string => { +const getQuery = ( + type: "insert" | "update", + parsedRowFieldData: ParsedRowFieldData[][], + escapedTableName: string, +): string => { if (type === "insert") { - - const uniqueColumns = Array.from(new Set(parsedRowFieldData.flatMap(row => row.map(r => r.escapedCol)))) - const values = parsedRowFieldData.map(row => `(${uniqueColumns.map(colName => row.find(r => r.escapedCol === colName)?.escapedVal ?? 'DEFAULT')})`).join(",\n"); - const whatToInsert = !uniqueColumns.length ? "DEFAULT VALUES" : `(${uniqueColumns}) VALUES ${values}` + const uniqueColumns = Array.from( + new Set( + parsedRowFieldData.flatMap((row) => row.map((r) => r.escapedCol)), + ), + ); + const values = parsedRowFieldData + .map( + (row) => + `(${uniqueColumns.map((colName) => row.find((r) => r.escapedCol === colName)?.escapedVal ?? "DEFAULT")})`, + ) + .join(",\n"); + const whatToInsert = !uniqueColumns.length + ? "DEFAULT VALUES" + : `(${uniqueColumns}) VALUES ${values}`; return `INSERT INTO ${escapedTableName} ${whatToInsert} `; } else { - const query = parsedRowFieldData.map(rowParts => { - return `UPDATE ${escapedTableName} SET ` + rowParts.map(r => `${r.escapedCol} = ${r.escapedVal} `).join(",\n") - }).join(";\n") + " "; - + const query = + parsedRowFieldData + .map((rowParts) => { + return ( + `UPDATE ${escapedTableName} SET ` + + rowParts + .map((r) => `${r.escapedCol} = ${r.escapedVal} `) + .join(",\n") + ); + }) + .join(";\n") + " "; + return query; } -} +}; type PrepareFieldValuesArgs = { - row: AnyObject | undefined; - forcedData: AnyObject | undefined; + row: AnyObject | undefined; + forcedData: AnyObject | undefined; allowedCols: FieldFilter | undefined; removeDisallowedFields?: boolean; tableHandler: TableHandler; -} -/** -* Apply forcedData, remove disallowed columns, validate against allowed columns: -* @example ({ item_id: 1 }, { user_id: 32 }) => { item_id: 1, user_id: 32 } -* OR -* ({ a: 1 }, { b: 32 }, ["c", "d"]) => throw "a field is not allowed" -* @param {Object} obj - initial data -* @param {Object} forcedData - set/override property -* @param {string[]} allowed_cols - allowed columns (excluding forcedData) from table rules -*/ -const getValidatedRow = ({ row = {}, forcedData = {}, allowedCols, removeDisallowedFields = false, tableHandler }: PrepareFieldValuesArgs): AnyObject => { +}; +/** + * Apply forcedData, remove disallowed columns, validate against allowed columns: + * @example ({ item_id: 1 }, { user_id: 32 }) => { item_id: 1, user_id: 32 } + * OR + * ({ a: 1 }, { b: 32 }, ["c", "d"]) => throw "a field is not allowed" + * @param {Object} obj - initial data + * @param {Object} forcedData - set/override property + * @param {string[]} allowed_cols - allowed columns (excluding forcedData) from table rules + */ +const getValidatedRow = ({ + row = {}, + forcedData = {}, + allowedCols, + removeDisallowedFields = false, + tableHandler, +}: PrepareFieldValuesArgs): AnyObject => { const column_names = tableHandler.column_names.slice(0); if (!column_names.length) { throw "table column_names mising"; } - const validatedAllowedColumns = tableHandler.parseFieldFilter(allowedCols, false); - + const validatedAllowedColumns = tableHandler.parseFieldFilter( + allowedCols, + false, + ); + let finalRow = { ...row }; if (removeDisallowedFields && !isEmpty(finalRow)) { finalRow = pickKeys(finalRow, validatedAllowedColumns); } /* If has keys check against allowed_cols */ - validateObj(finalRow, validatedAllowedColumns) + validateObj(finalRow, validatedAllowedColumns); /** Apply forcedData */ if (!isEmpty(forcedData)) { @@ -117,14 +163,22 @@ const getValidatedRow = ({ row = {}, forcedData = {}, allowedCols, removeDisallo /** Validate forcedData */ validateObj(finalRow, column_names.slice(0)); return finalRow; -} +}; /** * Add synced_field value if missing * prepareFieldValues(): Apply forcedData, remove disallowed columns, validate against allowed columns * tableConfigurator?.checkColVal(): Validate column min/max/isText/lowerCased/trimmed values */ -export const prepareNewData = async ({ row, forcedData, allowedFields, tableRules, removeDisallowedFields = false, tableConfigurator, tableHandler }: ValidatedParams) => { +export const prepareNewData = async ({ + row, + forcedData, + allowedFields, + tableRules, + removeDisallowedFields = false, + tableConfigurator, + tableHandler, +}: ValidatedParams) => { const synced_field = (tableRules ?? {})?.sync?.synced_field; /* Update synced_field if sync is on and missing */ @@ -132,26 +186,45 @@ export const prepareNewData = async ({ row, forcedData, allowedFields, tableRule row[synced_field] = Date.now(); } - const data = getValidatedRow({ tableHandler, row, forcedData, allowedCols: allowedFields, removeDisallowedFields }); + const data = getValidatedRow({ + tableHandler, + row, + forcedData, + allowedCols: allowedFields, + removeDisallowedFields, + }); const dataKeys = getKeys(data); - dataKeys.forEach(col => { - tableConfigurator?.checkColVal({ table: tableHandler.name, col, value: data[col] }); - const colConfig = tableConfigurator?.getColumnConfig(tableHandler.name, col); - if (colConfig && isObject(colConfig) && "isText" in colConfig && data[col]) { + dataKeys.forEach((col) => { + tableConfigurator?.checkColVal({ + table: tableHandler.name, + col, + value: data[col], + }); + const colConfig = tableConfigurator?.getColumnConfig( + tableHandler.name, + col, + ); + if ( + colConfig && + isObject(colConfig) && + "isText" in colConfig && + data[col] + ) { if (colConfig.lowerCased) { - data[col] = data[col].toString().toLowerCase() + data[col] = data[col].toString().toLowerCase(); } if (colConfig.trimmed) { - data[col] = data[col].toString().trim() + data[col] = data[col].toString().trim(); } } - }) - - const allowedCols = tableHandler.columns.filter(c => dataKeys.includes(c.name)).map(c => c.name); - return { data, allowedCols } -} + }); + const allowedCols = tableHandler.columns + .filter((c) => dataKeys.includes(c.name)) + .map((c) => c.name); + return { data, allowedCols }; +}; /** * Ensures: @@ -160,76 +233,95 @@ export const prepareNewData = async ({ row, forcedData, allowedFields, tableRule * - update is not empty * - no duplicate column names ( could update with $func and plain value for same column ) */ -const getValidatedRowFieldData = async ({ allowedCols, rows, validationOptions, dbTx, command }: ParseDataArgs, tableHandler: TableHandler) => { +const getValidatedRowFieldData = async ( + { allowedCols, rows, validationOptions, dbTx, command }: ParseDataArgs, + tableHandler: TableHandler, +) => { if (!allowedCols.length && command === "update") { throw "allowedColumns cannot be empty"; } const rowFieldData = await Promise.all( - rows.map(async nonvalidatedRow => { - + rows.map(async (nonvalidatedRow) => { let row = pickKeys(nonvalidatedRow, allowedCols); const initialRowKeys = Object.keys(row); if (validationOptions.validate) { - if(!validationOptions.localParams){ + if (!validationOptions.localParams) { throw "localParams missing for validate"; } - row = await validationOptions.validate({ row, dbx: dbTx, localParams: validationOptions.localParams }); + row = await validationOptions.validate({ + row, + dbx: dbTx, + localParams: validationOptions.localParams, + }); } - const keysAddedDuringValidate = Object.keys(row).filter(newKey => !initialRowKeys.includes(newKey)); + const keysAddedDuringValidate = Object.keys(row).filter( + (newKey) => !initialRowKeys.includes(newKey), + ); const getColumn = (fieldName: string) => { if (!allowedCols.concat(keysAddedDuringValidate).includes(fieldName)) { throw `Unexpected/Dissallowed column name: ${fieldName}`; } - const column = tableHandler.columns.find(c => c.name === fieldName); + const column = tableHandler.columns.find((c) => c.name === fieldName); if (!column) { throw `Invalid column: ${fieldName}`; } return column; }; - const rowPartValues = Object.entries(row).map(([fieldName, fieldValue]) => { - const column = getColumn(fieldName); - if (isObject(fieldValue)) { - - // const textPatch = getTextPatch(column, fieldValue); - // if(textPatch){ - // return { - // type: "plain", - // column, - // fieldValue: textPatch, - // } satisfies RowFieldData; - // } - - const [firstKey, ...otherkeys] = Object.keys(fieldValue); - const func = firstKey && !otherkeys.length? convertionFuncs.some(f => `$${f.name}` === firstKey) : undefined; - if(func){ - const { funcName, args } = parseFunctionObject(fieldValue); - return { - type: "function", - column, - funcName, - args, - } satisfies RowFieldData + const rowPartValues = Object.entries(row).map( + ([fieldName, fieldValue]) => { + const column = getColumn(fieldName); + if (isObject(fieldValue)) { + // const textPatch = getTextPatch(column, fieldValue); + // if(textPatch){ + // return { + // type: "plain", + // column, + // fieldValue: textPatch, + // } satisfies RowFieldData; + // } + + const [firstKey, ...otherkeys] = Object.keys(fieldValue); + const func = + firstKey && !otherkeys.length + ? convertionFuncs.some((f) => `$${f.name}` === firstKey) + : undefined; + if (func) { + const { funcName, args } = parseFunctionObject(fieldValue); + return { + type: "function", + column, + funcName, + args, + } satisfies RowFieldData; + } } - } - return { - type: "plain", - column: getColumn(fieldName), - fieldValue, - } satisfies RowFieldData; - }); - + return { + type: "plain", + column: getColumn(fieldName), + fieldValue, + } satisfies RowFieldData; + }, + ); + return rowPartValues; - })); + }), + ); return rowFieldData; -} +}; const getTextPatch = async (c: TableSchemaColumn, fieldValue: any) => { - - if (c.data_type === "text" && fieldValue && isObject(fieldValue) && !["from", "to"].find(key => typeof fieldValue[key] !== "number")) { - const unrecProps = Object.keys(fieldValue).filter(k => !["from", "to", "text", "md5"].includes(k)); + if ( + c.data_type === "text" && + fieldValue && + isObject(fieldValue) && + !["from", "to"].find((key) => typeof fieldValue[key] !== "number") + ) { + const unrecProps = Object.keys(fieldValue).filter( + (k) => !["from", "to", "text", "md5"].includes(k), + ); if (unrecProps.length) { throw "Unrecognised params in textPatch field: " + unrecProps.join(", "); } @@ -238,10 +330,10 @@ const getTextPatch = async (c: TableSchemaColumn, fieldValue: any) => { from: number; to: number; text: string; - md5: string + md5: string; } = { - ...fieldValue, - fieldName: c.name + ...fieldValue, + fieldName: c.name, } as any; // if (tableRules && !tableRules.select) throw "Select needs to be permitted to patch data"; @@ -251,18 +343,20 @@ const getTextPatch = async (c: TableSchemaColumn, fieldValue: any) => { // throw "Cannot patch data within a filter that affects more/less than 1 row"; // } // return unpatchText(rows[0][p.fieldName], patchedTextData); - const rawValue = `OVERLAY(${asName(c.name)} PLACING ${asValue(patchedTextData.text)} FROM ${asValue(patchedTextData.from)} FOR ${asValue(patchedTextData.to - patchedTextData.from + 1)})` + const rawValue = `OVERLAY(${asName(c.name)} PLACING ${asValue(patchedTextData.text)} FROM ${asValue(patchedTextData.from)} FOR ${asValue(patchedTextData.to - patchedTextData.from + 1)})`; return rawValue; - } - - return undefined -} + } -const getParsedRowFieldDataFunction = async (rowPart: RowFieldDataFunction, args: ParseDataArgs) => { + return undefined; +}; - const func = convertionFuncs.find(f => `$${f.name}` === rowPart.funcName); +const getParsedRowFieldDataFunction = async ( + rowPart: RowFieldDataFunction, + args: ParseDataArgs, +) => { + const func = convertionFuncs.find((f) => `$${f.name}` === rowPart.funcName); if (!func) { - throw `Unknown function: ${rowPart.funcName}. Expecting one of: ${convertionFuncs.map(f => f.name).join(", ")}`; + throw `Unknown function: ${rowPart.funcName}. Expecting one of: ${convertionFuncs.map((f) => f.name).join(", ")}`; } if (func.onlyAllowedFor && func.onlyAllowedFor !== args.command) { throw `Function ${rowPart.funcName} is only allowed for ${func.onlyAllowedFor} but not ${args.command}`; @@ -270,35 +364,43 @@ const getParsedRowFieldDataFunction = async (rowPart: RowFieldDataFunction, args return func.getQuery(rowPart); }; -const getParsedRowFieldData = async (rowFieldData: RowFieldData[][], args: ParseDataArgs) => { - const parsedRowFieldData = Promise.all(rowFieldData.map(rowParts => { - return Promise.all(rowParts.map(async rowPart => { - let escapedVal: string; - if (rowPart.type === "function") { - escapedVal = await getParsedRowFieldDataFunction(rowPart, args); - } else { - - /** Prevent pg-promise formatting jsonb */ - const colIsJSON = ["json", "jsonb"].includes(rowPart.column.data_type); - escapedVal = pgp.as.format(colIsJSON ? "$1:json" : "$1", [rowPart.fieldValue]) - } +const getParsedRowFieldData = async ( + rowFieldData: RowFieldData[][], + args: ParseDataArgs, +) => { + const parsedRowFieldData = Promise.all( + rowFieldData.map((rowParts) => { + return Promise.all( + rowParts.map(async (rowPart) => { + let escapedVal: string; + if (rowPart.type === "function") { + escapedVal = await getParsedRowFieldDataFunction(rowPart, args); + } else { + /** Prevent pg-promise formatting jsonb */ + const colIsJSON = ["json", "jsonb"].includes( + rowPart.column.data_type, + ); + escapedVal = pgp.as.format(colIsJSON ? "$1:json" : "$1", [ + rowPart.fieldValue, + ]); + } - /** - * Cast to type to avoid array errors (they do not cast automatically) - */ - escapedVal += `::${rowPart.column.udt_name}`; + /** + * Cast to type to avoid array errors (they do not cast automatically) + */ + escapedVal += `::${rowPart.column.udt_name}`; - return { - escapedCol: asName(rowPart.column.name), - escapedVal, - }; - })); - })); + return { + escapedCol: asName(rowPart.column.name), + escapedVal, + }; + }), + ); + }), + ); return parsedRowFieldData; -} - - +}; type ConvertionFunc = { name: string; @@ -315,31 +417,36 @@ const convertionFuncs: ConvertionFunc[] = [ "ST_MakePointM", "ST_PointFromText", "ST_GeomFromEWKT", - "ST_GeomFromGeoJSON" - ].map(name => ({ - name, - getQuery: ({ args }) => { - const argList = args.map(arg => asValue(arg)).join(", "); - return `${name}(${argList})`; - } - } satisfies ConvertionFunc)), + "ST_GeomFromGeoJSON", + ].map( + (name) => + ({ + name, + getQuery: ({ args }) => { + const argList = args.map((arg) => asValue(arg)).join(", "); + return `${name}(${argList})`; + }, + }) satisfies ConvertionFunc, + ), { name: "to_timestamp", - getQuery: ({ args }) => `to_timestamp(${asValue(args[0])}::BIGINT/1000.0)::timestamp` - }, { + getQuery: ({ args }) => + `to_timestamp(${asValue(args[0])}::BIGINT/1000.0)::timestamp`, + }, + { name: "merge", - description: "Merge the provided jsonb objects into the existing column value, ensuring that a null source value will be coalesced with provided values", + description: + "Merge the provided jsonb objects into the existing column value, ensuring that a null source value will be coalesced with provided values", onlyAllowedFor: "update", getQuery: ({ args, column }) => { if (!args.length) throw "merge function requires at least one argument"; - const argVals = args.map(arg => asValue(arg)) + const argVals = args.map((arg) => asValue(arg)); const argList = argVals.join(" || "); return `COALESCE(${asName(column.name)}, ${argVals.join(", ")}) || ${argList}`; - } - } + }, + }, ]; - export class ColSet { opts: { columns: ColumnInfo[]; @@ -348,10 +455,9 @@ export class ColSet { }; constructor(columns: ColumnInfo[], tableName: string) { - this.opts = { columns, tableName, colNames: columns.map(c => c.name) } + this.opts = { columns, tableName, colNames: columns.map((c) => c.name) }; } - // private async getRow(data: any, allowedCols: string[], dbTx: DBHandlerServer, validate: ValidateRow | undefined, command: "update" | "insert", localParams: LocalParams | undefined): Promise { // const badCol = allowedCols.find(c => !this.opts.colNames.includes(c)) // if (!allowedCols || badCol) { @@ -388,7 +494,6 @@ export class ColSet { // escapedVal = func.getQuery(funcArgs); // } else if (col.udt_name === "text") { - // } else { // /** Prevent pg-promise formatting jsonb */ // const colIsJSON = ["json", "jsonb"].includes(col.data_type); @@ -425,4 +530,4 @@ export class ColSet { // }))).join(";\n") + " "; // return res; // } -} \ No newline at end of file +} diff --git a/lib/DboBuilder/TableHandler/TableHandler.ts b/lib/DboBuilder/TableHandler/TableHandler.ts index f5faa559..30918903 100644 --- a/lib/DboBuilder/TableHandler/TableHandler.ts +++ b/lib/DboBuilder/TableHandler/TableHandler.ts @@ -1,9 +1,24 @@ import pgPromise from "pg-promise"; -import { AnyObject, asName, DeleteParams, FieldFilter, InsertParams, Select, UpdateParams } from "prostgles-types"; +import { + AnyObject, + asName, + DeleteParams, + FieldFilter, + InsertParams, + Select, + UpdateParams, +} from "prostgles-types"; import { DB } from "../../Prostgles"; import { SyncRule, TableRule } from "../../PublishParser/PublishParser"; import TableConfigurator from "../../TableConfig/TableConfig"; -import { DboBuilder, Filter, getErrorAsObject, getSerializedClientErrorFromPGError, LocalParams, TableHandlers } from "../DboBuilder"; +import { + DboBuilder, + Filter, + getErrorAsObject, + getSerializedClientErrorFromPGError, + LocalParams, + TableHandlers, +} from "../DboBuilder"; import type { TableSchema } from "../DboBuilderTypes"; import { parseUpdateRules } from "../parseUpdateRules"; import { COMPUTED_FIELDS, FUNCTIONS } from "../QueryBuilder/Functions"; @@ -16,7 +31,6 @@ import { update } from "./update"; import { updateBatch } from "./updateBatch"; import { upsert } from "./upsert"; - export type ValidatedParams = { row: AnyObject; forcedData?: AnyObject; @@ -25,44 +39,58 @@ export type ValidatedParams = { removeDisallowedFields: boolean; tableConfigurator: TableConfigurator | undefined; tableHandler: TableHandler; -} - -export class TableHandler extends ViewHandler { +}; +export class TableHandler extends ViewHandler { dataValidator: DataValidator; - constructor(db: DB, tableOrViewInfo: TableSchema, dboBuilder: DboBuilder, tx?: {t: pgPromise.ITask<{}>, dbTX: TableHandlers}, joinPaths?: JoinPaths) { + constructor( + db: DB, + tableOrViewInfo: TableSchema, + dboBuilder: DboBuilder, + tx?: { t: pgPromise.ITask<{}>; dbTX: TableHandlers }, + joinPaths?: JoinPaths, + ) { super(db, tableOrViewInfo, dboBuilder, tx, joinPaths); this.remove = this.delete; this.dataValidator = new DataValidator(this); this.is_view = false; - this.is_media = dboBuilder.prostgles.isMedia(this.name) + this.is_media = dboBuilder.prostgles.isMedia(this.name); } getFinalDBtx = (localParams: LocalParams | undefined) => { return localParams?.tx?.dbTX ?? this.tx?.dbTX; - } + }; getFinalDbo = (localParams: LocalParams | undefined) => { return this.getFinalDBtx(localParams) ?? this.dboBuilder.dbo; - } + }; parseUpdateRules = parseUpdateRules.bind(this); - + update = update.bind(this); updateBatch = updateBatch.bind(this); - + async insert( - rowOrRows: AnyObject | AnyObject[], - param2?: InsertParams, - param3_unused?: undefined, - tableRules?: TableRule, - _localParams?: LocalParams - ): Promise { - return insert.bind(this)(rowOrRows, param2, param3_unused, tableRules, _localParams) + rowOrRows: AnyObject | AnyObject[], + param2?: InsertParams, + param3_unused?: undefined, + tableRules?: TableRule, + _localParams?: LocalParams, + ): Promise { + return insert.bind(this)( + rowOrRows, + param2, + param3_unused, + tableRules, + _localParams, + ); } - prepareReturning = async (returning: Select | undefined, allowedFields: string[]): Promise => { + prepareReturning = async ( + returning: Select | undefined, + allowedFields: string[], + ): Promise => { const result: SelectItem[] = []; if (returning) { const sBuilder = new SelectItemBuilder({ @@ -70,7 +98,9 @@ export class TableHandler extends ViewHandler { allowedFields, allowedOrderByFields: allowedFields, computedFields: COMPUTED_FIELDS, - functions: FUNCTIONS.filter(f => f.type === "function" && f.singleColArg), + functions: FUNCTIONS.filter( + (f) => f.type === "function" && f.singleColArg, + ), isView: this.is_view, columns: this.columns, }); @@ -80,97 +110,162 @@ export class TableHandler extends ViewHandler { } return result; - } + }; makeReturnQuery(items?: SelectItem[]) { if (items?.length) return " RETURNING " + getSelectItemQuery(items); return ""; } - async delete(filter?: Filter, params?: DeleteParams, param3_unused?: undefined, table_rules?: TableRule, localParams?: LocalParams): Promise { - return _delete.bind(this)(filter, params, param3_unused, table_rules, localParams); + async delete( + filter?: Filter, + params?: DeleteParams, + param3_unused?: undefined, + table_rules?: TableRule, + localParams?: LocalParams, + ): Promise { + return _delete.bind(this)( + filter, + params, + param3_unused, + table_rules, + localParams, + ); } - remove(filter: Filter, params?: UpdateParams, param3_unused?: undefined, tableRules?: TableRule, localParams?: LocalParams) { + remove( + filter: Filter, + params?: UpdateParams, + param3_unused?: undefined, + tableRules?: TableRule, + localParams?: LocalParams, + ) { return this.delete(filter, params, param3_unused, tableRules, localParams); } - upsert = upsert.bind(this); + upsert = upsert.bind(this); /* External request. Cannot sync from server */ - async sync(filter: Filter, params: { select?: FieldFilter }, param3_unused: undefined, table_rules: TableRule, localParams: LocalParams) { + async sync( + filter: Filter, + params: { select?: FieldFilter }, + param3_unused: undefined, + table_rules: TableRule, + localParams: LocalParams, + ) { const start = Date.now(); try { - - if(!this.dboBuilder.canSubscribe){ + if (!this.dboBuilder.canSubscribe) { throw "Cannot subscribe. PubSubManager not initiated"; } if (!localParams) throw "Sync not allowed within the server code"; const { socket } = localParams; if (!socket) throw "socket missing"; - - - if (!table_rules || !table_rules.sync || !table_rules.select) throw "sync or select table rules missing"; - + + if (!table_rules || !table_rules.sync || !table_rules.select) + throw "sync or select table rules missing"; + if (this.tx) throw "Sync not allowed within transactions"; - + const ALLOWED_PARAMS = ["select"]; - const invalidParams = Object.keys(params || {}).filter(k => !ALLOWED_PARAMS.includes(k)); - if (invalidParams.length) throw "Invalid or dissallowed params found: " + invalidParams.join(", "); - + const invalidParams = Object.keys(params || {}).filter( + (k) => !ALLOWED_PARAMS.includes(k), + ); + if (invalidParams.length) + throw ( + "Invalid or dissallowed params found: " + invalidParams.join(", ") + ); + const { synced_field, allow_delete }: SyncRule = table_rules.sync; if (!table_rules.sync.id_fields.length || !synced_field) { - const err = "INTERNAL ERROR: id_fields OR synced_field missing from publish"; + const err = + "INTERNAL ERROR: id_fields OR synced_field missing from publish"; console.error(err); throw err; } - const id_fields = this.parseFieldFilter(table_rules.sync.id_fields, false); + const id_fields = this.parseFieldFilter( + table_rules.sync.id_fields, + false, + ); const syncFields = [...id_fields, synced_field]; - const allowedSelect = this.parseFieldFilter(table_rules?.select.fields ?? false); - if (syncFields.find(f => !allowedSelect.includes(f))) { + const allowedSelect = this.parseFieldFilter( + table_rules?.select.fields ?? false, + ); + if (syncFields.find((f) => !allowedSelect.includes(f))) { throw `INTERNAL ERROR: sync field missing from publish.${this.name}.select.fields`; } const select = this.getAllowedSelectFields( params?.select ?? "*", allowedSelect, - false + false, ); if (!select.length) throw "Empty select not allowed"; /* Add sync fields if missing */ - syncFields.map(sf => { + syncFields.map((sf) => { if (!select.includes(sf)) select.push(sf); }); /* Step 1: parse command and params */ - const result = await this.find(filter, { select, limit: 0 }, undefined, table_rules, localParams) - .then(async _isValid => { - - const { filterFields, forcedFilter } = table_rules?.select || {}; - const condition = (await this.prepareWhere({ select: undefined, filter, forcedFilter, filterFields, addWhere: false, localParams, tableRule: table_rules })).where; - - const pubSubManager = await this.dboBuilder.getPubSubManager(); - return pubSubManager.addSync({ + const result = await this.find( + filter, + { select, limit: 0 }, + undefined, + table_rules, + localParams, + ).then(async (_isValid) => { + const { filterFields, forcedFilter } = table_rules?.select || {}; + const condition = ( + await this.prepareWhere({ + select: undefined, + filter, + forcedFilter, + filterFields, + addWhere: false, + localParams, + tableRule: table_rules, + }) + ).where; + + const pubSubManager = await this.dboBuilder.getPubSubManager(); + return pubSubManager + .addSync({ table_info: this.tableOrViewInfo, condition, - id_fields, + id_fields, synced_field, allow_delete, socket, table_rules, filter: { ...filter }, - params: { select } - }).then(channelName => ({ channelName, id_fields, synced_field })); - }); - await this._log({ command: "sync", localParams, data: { filter, params }, duration: Date.now() - start }); + params: { select }, + }) + .then((channelName) => ({ channelName, id_fields, synced_field })); + }); + await this._log({ + command: "sync", + localParams, + data: { filter, params }, + duration: Date.now() - start, + }); return result; } catch (e) { - await this._log({ command: "sync", localParams, data: { filter, params }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + await this._log({ + command: "sync", + localParams, + data: { filter, params }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } /* @@ -199,7 +294,7 @@ export class TableHandler extends ViewHandler { { data } -> WHERE synced >= from_synced */ } - } -export const getSelectItemQuery = (items: SelectItem[]) => items.map(s => s.getQuery() + " AS " + asName(s.alias)).join(", ") \ No newline at end of file +export const getSelectItemQuery = (items: SelectItem[]) => + items.map((s) => s.getQuery() + " AS " + asName(s.alias)).join(", "); diff --git a/lib/DboBuilder/TableHandler/delete.ts b/lib/DboBuilder/TableHandler/delete.ts index 1ce98510..fdb01bb0 100644 --- a/lib/DboBuilder/TableHandler/delete.ts +++ b/lib/DboBuilder/TableHandler/delete.ts @@ -1,12 +1,25 @@ import pgPromise from "pg-promise"; import { AnyObject, DeleteParams, FieldFilter } from "prostgles-types"; import { DeleteRule, TableRule } from "../../PublishParser/PublishParser"; -import { Filter, LocalParams, getErrorAsObject, getSerializedClientErrorFromPGError, withUserRLS } from "../DboBuilder"; +import { + Filter, + LocalParams, + getErrorAsObject, + getSerializedClientErrorFromPGError, + withUserRLS, +} from "../DboBuilder"; import { runQueryReturnType } from "../ViewHandler/find"; import { TableHandler } from "./TableHandler"; import { onDeleteFromFileTable } from "./onDeleteFromFileTable"; -export async function _delete(this: TableHandler, filter?: Filter, params?: DeleteParams, param3_unused?: undefined, tableRules?: TableRule, localParams?: LocalParams): Promise { +export async function _delete( + this: TableHandler, + filter?: Filter, + params?: DeleteParams, + param3_unused?: undefined, + tableRules?: TableRule, + localParams?: LocalParams, +): Promise { const start = Date.now(); try { const { returning } = params || {}; @@ -29,32 +42,49 @@ export async function _delete(this: TableHandler, filter?: Filter, params?: Dele if (!returningFields) returningFields = tableRules?.select?.fields; if (!returningFields) returningFields = tableRules?.delete?.filterFields; - if (!filterFields) throw ` Invalid delete rule for ${this.name}. filterFields missing `; + if (!filterFields) + throw ` Invalid delete rule for ${this.name}. filterFields missing `; /* Safely test publish rules */ if (testRule) { - await this.validateViewRules({ filterFields, returningFields, forcedFilter, rule: "delete" }); + await this.validateViewRules({ + filterFields, + returningFields, + forcedFilter, + rule: "delete", + }); return true; } } if (params) { - const good_paramsObj: Record = { returning: 1, returnType: 1 }; + const good_paramsObj: Record = { + returning: 1, + returnType: 1, + }; const good_params = Object.keys(good_paramsObj); - const bad_params = Object.keys(params).filter(k => !good_params.includes(k)); - if (bad_params && bad_params.length) throw "Invalid params: " + bad_params.join(", ") + " \n Expecting: " + good_params.join(", "); + const bad_params = Object.keys(params).filter( + (k) => !good_params.includes(k), + ); + if (bad_params && bad_params.length) + throw ( + "Invalid params: " + + bad_params.join(", ") + + " \n Expecting: " + + good_params.join(", ") + ); } - let queryType: keyof pgPromise.ITask<{}> = 'none'; + let queryType: keyof pgPromise.ITask<{}> = "none"; let queryWithoutRLS = `DELETE FROM ${this.escapedName} `; - const filterOpts = (await this.prepareWhere({ + const filterOpts = await this.prepareWhere({ select: undefined, filter, forcedFilter, filterFields, localParams, - tableRule: tableRules - })) + tableRule: tableRules, + }); queryWithoutRLS += filterOpts.where; if (validate) { const _filter = filterOpts.filter; @@ -67,8 +97,13 @@ export async function _delete(this: TableHandler, filter?: Filter, params?: Dele if (!returningFields) { throw "Returning dissallowed"; } - returningQuery = this.makeReturnQuery(await this.prepareReturning(returning, this.parseFieldFilter(returningFields))); - queryWithoutRLS += returningQuery + returningQuery = this.makeReturnQuery( + await this.prepareReturning( + returning, + this.parseFieldFilter(returningFields), + ), + ); + queryWithoutRLS += returningQuery; } // TODO - delete orphaned files @@ -86,30 +121,48 @@ export async function _delete(this: TableHandler, filter?: Filter, params?: Dele * Delete file */ if (this.is_media) { - const result = await onDeleteFromFileTable.bind(this)({ - localParams, - queryType, - returningQuery: returnQuery? returnQuery : undefined, + const result = await onDeleteFromFileTable.bind(this)({ + localParams, + queryType, + returningQuery: returnQuery ? returnQuery : undefined, filterOpts, }); - await this._log({ command: "delete", localParams, data: { filter, params }, duration: Date.now() - start }); + await this._log({ + command: "delete", + localParams, + data: { filter, params }, + duration: Date.now() - start, + }); return result; } - const result = await runQueryReturnType({ + const result = await runQueryReturnType({ queryWithoutRLS, queryWithRLS, - newQuery: undefined, - returnType: params?.returnType, - handler: this, - localParams + newQuery: undefined, + returnType: params?.returnType, + handler: this, + localParams, + }); + await this._log({ + command: "delete", + localParams, + data: { filter, params }, + duration: Date.now() - start, }); - await this._log({ command: "delete", localParams, data: { filter, params }, duration: Date.now() - start }); return result; - } catch (e) { - await this._log({ command: "delete", localParams, data: { filter, params }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + await this._log({ + command: "delete", + localParams, + data: { filter, params }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } -} - +} diff --git a/lib/DboBuilder/TableHandler/insert.ts b/lib/DboBuilder/TableHandler/insert.ts index 7d3f6667..57d72935 100644 --- a/lib/DboBuilder/TableHandler/insert.ts +++ b/lib/DboBuilder/TableHandler/insert.ts @@ -1,123 +1,186 @@ import { AnyObject, InsertParams, asName, isObject } from "prostgles-types"; import { TableRule, ValidateRowBasic } from "../../PublishParser/PublishParser"; -import { LocalParams, getErrorAsObject, getSerializedClientErrorFromPGError, withUserRLS } from "../DboBuilder"; +import { + LocalParams, + getErrorAsObject, + getSerializedClientErrorFromPGError, + withUserRLS, +} from "../DboBuilder"; import { insertNestedRecords } from "../insertNestedRecords"; import { prepareNewData } from "./DataValidator"; import { TableHandler } from "./TableHandler"; import { insertTest } from "./insertTest"; import { runInsertUpdateQuery } from "./runInsertUpdateQuery"; -export async function insert(this: TableHandler, rowOrRows: AnyObject | AnyObject[] = {}, insertParams?: InsertParams, param3_unused?: undefined, tableRules?: TableRule, localParams?: LocalParams): Promise { - +export async function insert( + this: TableHandler, + rowOrRows: AnyObject | AnyObject[] = {}, + insertParams?: InsertParams, + param3_unused?: undefined, + tableRules?: TableRule, + localParams?: LocalParams, +): Promise { const ACTION = "insert"; const start = Date.now(); try { - const { removeDisallowedFields = false } = insertParams || {}; const { returnQuery = false, nestedInsert } = localParams || {}; - + const finalDBtx = this.getFinalDBtx(localParams); const rule = tableRules?.[ACTION]; - const { postValidate, checkFilter, validate, allowedNestedInserts } = rule ?? {}; + const { postValidate, checkFilter, validate, allowedNestedInserts } = + rule ?? {}; /** Post validate and checkFilter require a transaction dbo handler because they happen after the insert */ - if(postValidate || checkFilter){ - if(!finalDBtx){ - return this.dboBuilder.getTX(_dbtx => _dbtx[this.name]?.[ACTION]?.(rowOrRows, insertParams, param3_unused, tableRules, localParams)) + if (postValidate || checkFilter) { + if (!finalDBtx) { + return this.dboBuilder.getTX((_dbtx) => + _dbtx[this.name]?.[ACTION]?.( + rowOrRows, + insertParams, + param3_unused, + tableRules, + localParams, + ), + ); } } - const { testOnly, fields, forcedData, returningFields } = await insertTest.bind(this)({ tableRules, localParams }); - if(testOnly){ + const { testOnly, fields, forcedData, returningFields } = + await insertTest.bind(this)({ tableRules, localParams }); + if (testOnly) { return true; } - if(allowedNestedInserts){ - if(!nestedInsert || !allowedNestedInserts.some(ai => ai.table === nestedInsert?.previousTable && ai.column === nestedInsert.referencingColumn)){ - throw `Direct inserts not allowed. Only nested inserts from these tables: ${JSON.stringify(allowedNestedInserts)} ` + if (allowedNestedInserts) { + if ( + !nestedInsert || + !allowedNestedInserts.some( + (ai) => + ai.table === nestedInsert?.previousTable && + ai.column === nestedInsert.referencingColumn, + ) + ) { + throw `Direct inserts not allowed. Only nested inserts from these tables: ${JSON.stringify(allowedNestedInserts)} `; } } validateInsertParams(insertParams); const isMultiInsert = Array.isArray(rowOrRows); - const preValidatedRows = await Promise.all((isMultiInsert? rowOrRows : [rowOrRows]).map(async nonValidated => { - const { preValidate, validate } = tableRules?.insert ?? {}; - const { tableConfigurator } = this.dboBuilder.prostgles; - if(!tableConfigurator) throw "tableConfigurator missing"; - let row = await tableConfigurator.getPreInsertRow(this, { dbx: this.getFinalDbo(localParams), validate, localParams, row: nonValidated }) - if (preValidate) { - if(!localParams) throw "localParams missing for insert preValidate"; - row = await preValidate({ row, dbx: (this.tx?.dbTX || this.dboBuilder.dbo) as any, localParams }); - } + const preValidatedRows = await Promise.all( + (isMultiInsert ? rowOrRows : [rowOrRows]).map(async (nonValidated) => { + const { preValidate, validate } = tableRules?.insert ?? {}; + const { tableConfigurator } = this.dboBuilder.prostgles; + if (!tableConfigurator) throw "tableConfigurator missing"; + let row = await tableConfigurator.getPreInsertRow(this, { + dbx: this.getFinalDbo(localParams), + validate, + localParams, + row: nonValidated, + }); + if (preValidate) { + if (!localParams) throw "localParams missing for insert preValidate"; + row = await preValidate({ + row, + dbx: (this.tx?.dbTX || this.dboBuilder.dbo) as any, + localParams, + }); + } - return row; - })); - const preValidatedrowOrRows = isMultiInsert? preValidatedRows : preValidatedRows[0]!; + return row; + }), + ); + const preValidatedrowOrRows = isMultiInsert + ? preValidatedRows + : preValidatedRows[0]!; /** * If media it will: upload file and continue insert * If nested insert it will: make separate inserts and not continue main insert */ - const mediaOrNestedInsert = await insertNestedRecords.bind(this)({ data: preValidatedrowOrRows, param2: insertParams, tableRules, localParams }); + const mediaOrNestedInsert = await insertNestedRecords.bind(this)({ + data: preValidatedrowOrRows, + param2: insertParams, + tableRules, + localParams, + }); const { data, insertResult } = mediaOrNestedInsert; if ("insertResult" in mediaOrNestedInsert) { return insertResult; } - const pkeyNames = this.columns.filter(c => c.is_pkey).map(c => c.name); + const pkeyNames = this.columns.filter((c) => c.is_pkey).map((c) => c.name); const getInsertQuery = async (_rows: AnyObject[]) => { - const validatedData = await Promise.all(_rows.map(async _row => { + const validatedData = await Promise.all( + _rows.map(async (_row) => { + const row = { ..._row }; - const row = { ..._row }; + if (!isObject(row)) { + throw ( + "\nInvalid insert data provided. Expected an object but received: " + + JSON.stringify(row) + ); + } - if (!isObject(row)) { - throw "\nInvalid insert data provided. Expected an object but received: " + JSON.stringify(row); - } - - const { data: validatedRow, allowedCols } = await prepareNewData({ - row, - forcedData, - allowedFields: fields, - tableRules, - removeDisallowedFields, - tableConfigurator: this.dboBuilder.prostgles.tableConfigurator, - tableHandler: this, - }); - return { validatedRow, allowedCols }; - })); - const validatedRows = validatedData.map(d => d.validatedRow); - const allowedCols = Array.from(new Set(validatedData.flatMap(d => d.allowedCols))); + const { data: validatedRow, allowedCols } = await prepareNewData({ + row, + forcedData, + allowedFields: fields, + tableRules, + removeDisallowedFields, + tableConfigurator: this.dboBuilder.prostgles.tableConfigurator, + tableHandler: this, + }); + return { validatedRow, allowedCols }; + }), + ); + const validatedRows = validatedData.map((d) => d.validatedRow); + const allowedCols = Array.from( + new Set(validatedData.flatMap((d) => d.allowedCols)), + ); const dbTx = finalDBtx || this.dboBuilder.dbo; - const validationOptions = { validate: validate as ValidateRowBasic, localParams }; + const validationOptions = { + validate: validate as ValidateRowBasic, + localParams, + }; // const query = await this.colSet.getInsertQuery(validatedRows, allowedCols, dbTx, validate, localParams); - const query = (await this.dataValidator.parse({ command: "insert", rows: validatedRows, allowedCols, dbTx, validationOptions })).getQuery(); + const query = ( + await this.dataValidator.parse({ + command: "insert", + rows: validatedRows, + allowedCols, + dbTx, + validationOptions, + }) + ).getQuery(); const { onConflict } = insertParams ?? {}; let conflict_query = ""; if (onConflict === "DoNothing") { conflict_query = " ON CONFLICT DO NOTHING "; - } else if(onConflict === "DoUpdate"){ - if(!pkeyNames.length) { + } else if (onConflict === "DoUpdate") { + if (!pkeyNames.length) { throw "Cannot do DoUpdate on a table without a primary key"; } - const nonPkeyCols = allowedCols.filter(c => !pkeyNames.includes(c)).map(v => asName(v)); - if(!nonPkeyCols.length){ + const nonPkeyCols = allowedCols + .filter((c) => !pkeyNames.includes(c)) + .map((v) => asName(v)); + if (!nonPkeyCols.length) { throw "Cannot on conflict DoUpdate on a table with only primary key columns"; } - conflict_query = ` ON CONFLICT (${pkeyNames.join(", ")}) DO UPDATE SET ${nonPkeyCols.map(k => `${k} = EXCLUDED.${k}`).join(", ")}`; + conflict_query = ` ON CONFLICT (${pkeyNames.join(", ")}) DO UPDATE SET ${nonPkeyCols.map((k) => `${k} = EXCLUDED.${k}`).join(", ")}`; } return query + conflict_query; }; - - let query = ""; - if (Array.isArray(data)) { - if(!data.length){ + let query = ""; + if (Array.isArray(data)) { + if (!data.length) { throw "Empty insert. Provide data"; } - query = await getInsertQuery(data); + query = await getInsertQuery(data); } else { - query = await getInsertQuery([data ?? {}]); + query = await getInsertQuery([data ?? {}]); } const queryWithoutUserRLS = query; @@ -129,55 +192,88 @@ export async function insert(this: TableHandler, rowOrRows: AnyObject | AnyObjec } const result = await runInsertUpdateQuery({ - rule, - localParams, - queryWithoutUserRLS, - tableHandler: this, - returningFields, + rule, + localParams, + queryWithoutUserRLS, + tableHandler: this, + returningFields, data: preValidatedrowOrRows, fields, params: insertParams, type: "insert", isMultiInsert, }); - await this._log({ command: "insert", localParams, data: { rowOrRows, param2: insertParams }, duration: Date.now() - start }); + await this._log({ + command: "insert", + localParams, + data: { rowOrRows, param2: insertParams }, + duration: Date.now() - start, + }); return result; } catch (e) { - await this._log({ command: "insert", localParams, data: { rowOrRows, param2: insertParams }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + await this._log({ + command: "insert", + localParams, + data: { rowOrRows, param2: insertParams }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } -} +} const validateInsertParams = (params: InsertParams | undefined) => { - const { onConflict, returnType, returning } = params ?? {}; - if(![undefined, "DoNothing", "DoUpdate"].includes(onConflict)){ + if (![undefined, "DoNothing", "DoUpdate"].includes(onConflict)) { throw `Invalid onConflict: ${onConflict}. Expecting one of: DoNothing, DoUpdate`; } - const allowedReturnTypes: InsertParams["returnType"][] = ["row", "value", "values", "statement", undefined] - if(!allowedReturnTypes.includes(returnType)){ - throw `Invalid return type ${returnType}. Expecting one of: ${allowedReturnTypes}` + const allowedReturnTypes: InsertParams["returnType"][] = [ + "row", + "value", + "values", + "statement", + undefined, + ]; + if (!allowedReturnTypes.includes(returnType)) { + throw `Invalid return type ${returnType}. Expecting one of: ${allowedReturnTypes}`; } - if(returnType && returnType !== "statement" && !returning){ + if (returnType && returnType !== "statement" && !returning) { throw `Must specify returning when using a non statement returnType: ${returnType}`; } if (params) { - const good_paramsObj: Record = { returning: 1, returnType: 1, removeDisallowedFields: 1, onConflict: 1 }; + const good_paramsObj: Record = { + returning: 1, + returnType: 1, + removeDisallowedFields: 1, + onConflict: 1, + }; const good_params = Object.keys(good_paramsObj); - const bad_params = Object.keys(params).filter(k => !good_params.includes(k)); - if (bad_params && bad_params.length) throw "Invalid params: " + bad_params.join(", ") + " \n Expecting: " + good_params.join(", "); + const bad_params = Object.keys(params).filter( + (k) => !good_params.includes(k), + ); + if (bad_params && bad_params.length) + throw ( + "Invalid params: " + + bad_params.join(", ") + + " \n Expecting: " + + good_params.join(", ") + ); } -} +}; // const removeBuffers = (o: any) => { // if(isPlainObject(o)){ // return JSON.stringify(getKeys(o).reduce((a, k) => { // const value = o[k] -// return { ...a, [k]: Buffer.isBuffer(value)? `Buffer[${value.byteLength}][...REMOVED]` : value +// return { ...a, [k]: Buffer.isBuffer(value)? `Buffer[${value.byteLength}][...REMOVED]` : value // } // }, {})); // } -// } \ No newline at end of file +// } diff --git a/lib/DboBuilder/TableHandler/insertTest.ts b/lib/DboBuilder/TableHandler/insertTest.ts index 39b4c058..00591807 100644 --- a/lib/DboBuilder/TableHandler/insertTest.ts +++ b/lib/DboBuilder/TableHandler/insertTest.ts @@ -8,7 +8,10 @@ type InsertTestArgs = { tableRules: TableRule | undefined; localParams: LocalParams | undefined; }; -export async function insertTest(this: TableHandler, { localParams, tableRules }: InsertTestArgs) { +export async function insertTest( + this: TableHandler, + { localParams, tableRules }: InsertTestArgs, +) { const { testRule } = localParams || {}; const ACTION = "insert"; @@ -27,9 +30,11 @@ export async function insertTest(this: TableHandler, { localParams, tableRules } fields = tableRules[ACTION].fields; /* If no returning fields specified then take select fields as returning or the allowed insert fields */ - if (!returningFields) returningFields = tableRules?.select?.fields || tableRules?.insert.fields; + if (!returningFields) + returningFields = tableRules?.select?.fields || tableRules?.insert.fields; - if (!fields) throw ` invalid insert rule for ${this.name} -> fields missing `; + if (!fields) + throw ` invalid insert rule for ${this.name} -> fields missing `; /* Safely test publish rules */ if (testRule) { @@ -51,13 +56,14 @@ export async function insertTest(this: TableHandler, { localParams, tableRules } if (keys.length) { const dataCols = keys.filter((k) => this.column_names.includes(k)); const nestedInsertCols = keys.filter( - (k) => !this.column_names.includes(k) && this.dboBuilder.dbo[k]?.insert + (k) => + !this.column_names.includes(k) && this.dboBuilder.dbo[k]?.insert, ); if (nestedInsertCols.length) { throw `Nested insert not supported for forcedData rule: ${nestedInsertCols}`; } const badCols = keys.filter( - (k) => !dataCols.includes(k) && !nestedInsertCols.includes(k) + (k) => !dataCols.includes(k) && !nestedInsertCols.includes(k), ); if (badCols.length) { throw `Invalid columns found in forced filter: ${badCols.join(", ")}`; @@ -70,7 +76,7 @@ export async function insertTest(this: TableHandler, { localParams, tableRules } (k) => asValue(forcedData![k]) + "::" + - this.columns.find((c) => c.name === k)!.udt_name + this.columns.find((c) => c.name === k)!.udt_name, ) .join(", ") + ")", @@ -79,7 +85,7 @@ export async function insertTest(this: TableHandler, { localParams, tableRules } "EXPLAIN INSERT INTO " + this.escapedName + " (${colNames:raw}) SELECT * FROM ( VALUES ${values:raw} ) t WHERE FALSE;", - { colNames, values } + { colNames, values }, ); await this.db.any(query); } catch (e) { diff --git a/lib/DboBuilder/TableHandler/onDeleteFromFileTable.ts b/lib/DboBuilder/TableHandler/onDeleteFromFileTable.ts index 65e28049..c0f80b87 100644 --- a/lib/DboBuilder/TableHandler/onDeleteFromFileTable.ts +++ b/lib/DboBuilder/TableHandler/onDeleteFromFileTable.ts @@ -10,29 +10,44 @@ type OnDeleteFromFileTableArgs = { filterOpts: { where: string; filter: AnyObject; - } -} -export async function onDeleteFromFileTable(this: TableHandler, { localParams, queryType, returningQuery, filterOpts }: OnDeleteFromFileTableArgs){ - - if (!this.dboBuilder.prostgles.fileManager) throw new Error("fileManager missing") + }; +}; +export async function onDeleteFromFileTable( + this: TableHandler, + { + localParams, + queryType, + returningQuery, + filterOpts, + }: OnDeleteFromFileTableArgs, +) { + if (!this.dboBuilder.prostgles.fileManager) + throw new Error("fileManager missing"); if (this.dboBuilder.prostgles.opts.fileTable?.delayedDelete) { - return this.dbHandler[queryType](`UPDATE ${asName(this.name)} SET deleted = now() ${filterOpts.where} ${returningQuery ?? ""};`) + return this.dbHandler[queryType]( + `UPDATE ${asName(this.name)} SET deleted = now() ${filterOpts.where} ${returningQuery ?? ""};`, + ); } else { - const txDelete = async (tbl: TableHandler) => { if (!tbl.tx) throw new Error("Missing transaction object tx"); let files: { id: string; name: string }[] = []; const totalFiles = await tbl.count(filterOpts.filter); do { - const batch = await tbl.find(filterOpts.filter, { limit: 100, offset: files.length }); + const batch = await tbl.find(filterOpts.filter, { + limit: 100, + offset: files.length, + }); files = files.concat(batch); - } while(files.length < totalFiles) - - const fileManager = tbl.dboBuilder.prostgles.fileManager + } while (files.length < totalFiles); + + const fileManager = tbl.dboBuilder.prostgles.fileManager; if (!fileManager) throw new Error("fileManager missing"); for await (const file of files) { - await tbl.tx.t.any(`DELETE FROM ${asName(this.name)} WHERE id = \${id}`, file); + await tbl.tx.t.any( + `DELETE FROM ${asName(this.name)} WHERE id = \${id}`, + file, + ); } /** If any table delete fails then do not delete files */ for await (const file of files) { @@ -42,21 +57,20 @@ export async function onDeleteFromFileTable(this: TableHandler, { localParams, q } if (returningQuery) { - return files.map(f => pickKeys(f, ["id", "name"])); + return files.map((f) => pickKeys(f, ["id", "name"])); } return undefined; - } + }; if (localParams?.tx?.dbTX) { - return txDelete(localParams.tx.dbTX[this.name] as TableHandler) + return txDelete(localParams.tx.dbTX[this.name] as TableHandler); } else if (this.tx) { - return txDelete(this) + return txDelete(this); } else { - - return this.dboBuilder.getTX(tx => { - return txDelete(tx[this.name] as TableHandler) - }) + return this.dboBuilder.getTX((tx) => { + return txDelete(tx[this.name] as TableHandler); + }); } } -} \ No newline at end of file +} diff --git a/lib/DboBuilder/TableHandler/runInsertUpdateQuery.ts b/lib/DboBuilder/TableHandler/runInsertUpdateQuery.ts index 542e4243..8f84446b 100644 --- a/lib/DboBuilder/TableHandler/runInsertUpdateQuery.ts +++ b/lib/DboBuilder/TableHandler/runInsertUpdateQuery.ts @@ -1,37 +1,62 @@ -import { AnyObject, asName, FieldFilter, InsertParams, UpdateParams } from "prostgles-types"; +import { + AnyObject, + asName, + FieldFilter, + InsertParams, + UpdateParams, +} from "prostgles-types"; import { InsertRule, UpdateRule } from "../../PublishParser/PublishParser"; -import { getClientErrorFromPGError, LocalParams, withUserRLS } from "../DboBuilder"; +import { + getClientErrorFromPGError, + LocalParams, + withUserRLS, +} from "../DboBuilder"; import { getSelectItemQuery, TableHandler } from "./TableHandler"; type RunInsertUpdateQueryArgs = { tableHandler: TableHandler; - queryWithoutUserRLS: string; + queryWithoutUserRLS: string; localParams: LocalParams | undefined; fields: FieldFilter | undefined; returningFields: FieldFilter | undefined; -} & ({ - type: "insert"; - params: InsertParams | undefined - rule: InsertRule | undefined; - data: AnyObject | AnyObject[]; - isMultiInsert: boolean; - nestedInsertsResultsObj?: undefined; -} | { - type: "update"; - nestedInsertsResultsObj: Record; - params: UpdateParams | undefined - rule: UpdateRule | undefined; - data: undefined; -}); +} & ( + | { + type: "insert"; + params: InsertParams | undefined; + rule: InsertRule | undefined; + data: AnyObject | AnyObject[]; + isMultiInsert: boolean; + nestedInsertsResultsObj?: undefined; + } + | { + type: "update"; + nestedInsertsResultsObj: Record; + params: UpdateParams | undefined; + rule: UpdateRule | undefined; + data: undefined; + } +); export const runInsertUpdateQuery = async (args: RunInsertUpdateQueryArgs) => { - const { tableHandler, queryWithoutUserRLS, rule, localParams, fields, returningFields, params, nestedInsertsResultsObj } = args; + const { + tableHandler, + queryWithoutUserRLS, + rule, + localParams, + fields, + returningFields, + params, + nestedInsertsResultsObj, + } = args; const { name } = tableHandler; - const returningSelectItems = await tableHandler.prepareReturning(params?.returning, tableHandler.parseFieldFilter(returningFields)) + const returningSelectItems = await tableHandler.prepareReturning( + params?.returning, + tableHandler.parseFieldFilter(returningFields), + ); const { checkFilter, postValidate } = rule ?? {}; let checkCondition = "WHERE FALSE"; - if(checkFilter){ + if (checkFilter) { const checkCond = await tableHandler.prepareWhere({ select: undefined, localParams: undefined, @@ -43,7 +68,7 @@ export const runInsertUpdateQuery = async (args: RunInsertUpdateQueryArgs) => { } const hasReturning = !!returningSelectItems.length; const userRLS = withUserRLS(localParams, ""); - const escapedTableName = asName(name) + const escapedTableName = asName(name); const query = ` ${userRLS} WITH ${escapedTableName} AS ( @@ -62,9 +87,9 @@ export const runInsertUpdateQuery = async (args: RunInsertUpdateQueryArgs) => { ( SELECT json_agg(item) FROM ( - SELECT ${!hasReturning? "1" : getSelectItemQuery(returningSelectItems)} + SELECT ${!hasReturning ? "1" : getSelectItemQuery(returningSelectItems)} FROM ${escapedTableName} - WHERE ${hasReturning? "TRUE" : "FALSE"} + WHERE ${hasReturning ? "TRUE" : "FALSE"} ) item ) as modified_returning, ( @@ -80,55 +105,80 @@ export const runInsertUpdateQuery = async (args: RunInsertUpdateQueryArgs) => { `; const allowedFieldKeys = tableHandler.parseFieldFilter(fields); - let result: { - row_count: number | null; - modified: AnyObject[] | null; + let result: { + row_count: number | null; + modified: AnyObject[] | null; failed_check: AnyObject[] | null; modified_returning: AnyObject[] | null; }; - + const queryType = "one"; const tx = localParams?.tx?.t || tableHandler.tx?.t; if (tx) { - result = await tx[queryType](query).catch((err: any) => getClientErrorFromPGError(err, { type: "tableMethod", localParams, view: tableHandler, allowedKeys: allowedFieldKeys })); + result = await tx[queryType](query).catch((err: any) => + getClientErrorFromPGError(err, { + type: "tableMethod", + localParams, + view: tableHandler, + allowedKeys: allowedFieldKeys, + }), + ); } else { - result = await tableHandler.db.tx(t => (t as any)[queryType](query)).catch(err => getClientErrorFromPGError(err, { type: "tableMethod", localParams, view: tableHandler, allowedKeys: allowedFieldKeys })); + result = await tableHandler.db + .tx((t) => (t as any)[queryType](query)) + .catch((err) => + getClientErrorFromPGError(err, { + type: "tableMethod", + localParams, + view: tableHandler, + allowedKeys: allowedFieldKeys, + }), + ); } - if(checkFilter && result.failed_check?.length){ - throw new Error(`Insert ${name} records failed the check condition: ${JSON.stringify(checkFilter, null, 2)}`); + if (checkFilter && result.failed_check?.length) { + throw new Error( + `Insert ${name} records failed the check condition: ${JSON.stringify(checkFilter, null, 2)}`, + ); } const finalDBtx = tableHandler.getFinalDBtx(localParams); - if(postValidate){ - if(!finalDBtx) throw new Error("Unexpected: no dbTX for postValidate"); - if(!localParams) throw new Error("Unexpected: no localParams for postValidate"); + if (postValidate) { + if (!finalDBtx) throw new Error("Unexpected: no dbTX for postValidate"); + if (!localParams) + throw new Error("Unexpected: no localParams for postValidate"); const rows = result.modified ?? []; - for await (const row of rows){ - await postValidate({ row: row ?? {}, dbx: finalDBtx as any, localParams }) + for await (const row of rows) { + await postValidate({ + row: row ?? {}, + dbx: finalDBtx as any, + localParams, + }); } } let returnMany = false; - if(args.type === "update"){ + if (args.type === "update") { const { multi = true } = args.params || {}; - if(!multi && result.row_count && +result.row_count > 1){ + if (!multi && result.row_count && +result.row_count > 1) { throw `More than 1 row modified: ${result.row_count} rows affected`; } - if(hasReturning){ + if (hasReturning) { returnMany = multi; } - } else { - returnMany = args.isMultiInsert + returnMany = args.isMultiInsert; } - if(!hasReturning) return undefined; + if (!hasReturning) return undefined; - const modified_returning = result.modified_returning?.map(d => ({ ...d, ...nestedInsertsResultsObj })) + const modified_returning = result.modified_returning?.map((d) => ({ + ...d, + ...nestedInsertsResultsObj, + })); - return returnMany? modified_returning : modified_returning?.[0]; -} + return returnMany ? modified_returning : modified_returning?.[0]; +}; diff --git a/lib/DboBuilder/TableHandler/update.ts b/lib/DboBuilder/TableHandler/update.ts index 04523540..0587458c 100644 --- a/lib/DboBuilder/TableHandler/update.ts +++ b/lib/DboBuilder/TableHandler/update.ts @@ -7,7 +7,10 @@ import { getSerializedClientErrorFromPGError, withUserRLS, } from "../DboBuilder"; -import { getInsertTableRules, getReferenceColumnInserts } from "../insertNestedRecords"; +import { + getInsertTableRules, + getReferenceColumnInserts, +} from "../insertNestedRecords"; import { prepareNewData } from "./DataValidator"; import { runInsertUpdateQuery } from "./runInsertUpdateQuery"; import { TableHandler } from "./TableHandler"; @@ -19,7 +22,7 @@ export async function update( _newData: AnyObject, params?: UpdateParams, tableRules?: TableRule, - localParams?: LocalParams + localParams?: LocalParams, ): Promise { const ACTION = "update"; const start = Date.now(); @@ -28,7 +31,13 @@ export async function update( const finalDBtx = this.getFinalDBtx(localParams); const wrapInTx = () => this.dboBuilder.getTX((_dbtx) => - _dbtx[this.name]?.[ACTION]?.(filter, _newData, params, tableRules, localParams) + _dbtx[this.name]?.[ACTION]?.( + filter, + _newData, + params, + tableRules, + localParams, + ), ); const rule = tableRules?.[ACTION]; if (rule?.postValidate && !finalDBtx) { @@ -37,10 +46,20 @@ export async function update( let newData = _newData; if (this.is_media) { - ({ newData } = await updateFile.bind(this)({ newData, filter, localParams, tableRules })); + ({ newData } = await updateFile.bind(this)({ + newData, + filter, + localParams, + tableRules, + })); } - const parsedRules = await this.parseUpdateRules(filter, params, tableRules, localParams); + const parsedRules = await this.parseUpdateRules( + filter, + params, + tableRules, + localParams, + ); if (localParams?.testRule) { return parsedRules; } @@ -49,8 +68,14 @@ export async function update( throw "no update data provided\nEXPECTING db.table.update(filter, updateData, options)"; } - const { fields, validateRow, forcedData, returningFields, forcedFilter, filterFields } = - parsedRules; + const { + fields, + validateRow, + forcedData, + returningFields, + forcedFilter, + filterFields, + } = parsedRules; const { removeDisallowedFields = false } = params || {}; const { returnQuery = false } = localParams ?? {}; @@ -62,10 +87,15 @@ export async function update( multi: 1, }; const good_params = Object.keys(good_paramsObj); - const bad_params = Object.keys(params).filter((k) => !good_params.includes(k)); + const bad_params = Object.keys(params).filter( + (k) => !good_params.includes(k), + ); if (bad_params && bad_params.length) throw ( - "Invalid params: " + bad_params.join(", ") + " \n Expecting: " + good_params.join(", ") + "Invalid params: " + + bad_params.join(", ") + + " \n Expecting: " + + good_params.join(", ") ); } @@ -104,12 +134,18 @@ export async function update( } await Promise.all( nestedInserts.map(async (nestedInsert) => { - const nesedTableHandler = finalDBtx[nestedInsert.tableName] as TableHandler | undefined; + const nesedTableHandler = finalDBtx[nestedInsert.tableName] as + | TableHandler + | undefined; if (!nesedTableHandler) throw `nestedInsert Tablehandler not found for ${nestedInsert.tableName}`; const refTableRules = !localParams ? undefined - : await getInsertTableRules(this, nestedInsert.tableName, localParams); + : await getInsertTableRules( + this, + nestedInsert.tableName, + localParams, + ); const nestedLocalParams: LocalParams = { ...localParams, nestedInsert: { @@ -124,7 +160,7 @@ export async function update( { returning: "*" }, undefined, refTableRules, - nestedLocalParams + nestedLocalParams, ); nestedInsertsResultsObj[nestedInsert.col] = nestedInsertResult; @@ -133,7 +169,7 @@ export async function update( ...nestedInsert, result: nestedInsertResult, }; - }) + }), ); } @@ -179,6 +215,10 @@ export async function update( duration: Date.now() - start, error: getErrorAsObject(e), }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } } diff --git a/lib/DboBuilder/TableHandler/updateBatch.ts b/lib/DboBuilder/TableHandler/updateBatch.ts index a3b38d1e..0b301fa9 100644 --- a/lib/DboBuilder/TableHandler/updateBatch.ts +++ b/lib/DboBuilder/TableHandler/updateBatch.ts @@ -1,49 +1,87 @@ import { AnyObject, UpdateParams } from "prostgles-types"; import { TableRule } from "../../PublishParser/PublishParser"; -import { Filter, LocalParams, getClientErrorFromPGError, getErrorAsObject, getSerializedClientErrorFromPGError, withUserRLS } from "../DboBuilder"; +import { + Filter, + LocalParams, + getClientErrorFromPGError, + getErrorAsObject, + getSerializedClientErrorFromPGError, + withUserRLS, +} from "../DboBuilder"; import { TableHandler } from "./TableHandler"; - -export async function updateBatch(this: TableHandler, updates: [Filter, AnyObject][], params?: UpdateParams, _?: undefined, tableRules?: TableRule, localParams?: LocalParams): Promise { +export async function updateBatch( + this: TableHandler, + updates: [Filter, AnyObject][], + params?: UpdateParams, + _?: undefined, + tableRules?: TableRule, + localParams?: LocalParams, +): Promise { const start = Date.now(); try { const { checkFilter, postValidate } = tableRules?.update ?? {}; - if(checkFilter || postValidate){ - throw `updateBatch not allowed for tables with checkFilter or postValidate rules` + if (checkFilter || postValidate) { + throw `updateBatch not allowed for tables with checkFilter or postValidate rules`; } const updateQueries: string[] = await Promise.all( - updates.map(async ([filter, data]) => { + updates.map(async ([filter, data]) => { const query = (await this.update( filter, data, { ...(params ?? {}), returning: undefined }, tableRules, - { ...(localParams ?? {}), returnQuery: "noRLS" } + { ...(localParams ?? {}), returnQuery: "noRLS" }, )) as unknown as string; return query; - }) + }), ); - const queries = [ - withUserRLS(localParams, ""), - ...updateQueries - ]; - + const queries = [withUserRLS(localParams, ""), ...updateQueries]; + const t = localParams?.tx?.t ?? this.tx?.t; - if(t){ + if (t) { const result = await t.none(queries.join(";\n")); - await this._log({ command: "updateBatch", localParams, data: { data: updates, params }, duration: Date.now() - start }); + await this._log({ + command: "updateBatch", + localParams, + data: { data: updates, params }, + duration: Date.now() - start, + }); return result; } - const result = await this.db.tx(t => { + const result = await this.db + .tx((t) => { return t.none(queries.join(";\n")); }) - .catch(err => getClientErrorFromPGError(err, { type: "tableMethod", localParams, view: this, allowedKeys: [] })); + .catch((err) => + getClientErrorFromPGError(err, { + type: "tableMethod", + localParams, + view: this, + allowedKeys: [], + }), + ); - await this._log({ command: "updateBatch", localParams, data: { data: updates, params }, duration: Date.now() - start }); + await this._log({ + command: "updateBatch", + localParams, + data: { data: updates, params }, + duration: Date.now() - start, + }); return result; } catch (e) { - await this._log({ command: "updateBatch", localParams, data: { data: updates, params }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + await this._log({ + command: "updateBatch", + localParams, + data: { data: updates, params }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } -} \ No newline at end of file +} diff --git a/lib/DboBuilder/TableHandler/updateFile.ts b/lib/DboBuilder/TableHandler/updateFile.ts index d8ff3f57..3c7b3917 100644 --- a/lib/DboBuilder/TableHandler/updateFile.ts +++ b/lib/DboBuilder/TableHandler/updateFile.ts @@ -1,48 +1,79 @@ import { AnyObject, getKeys, isObject } from "prostgles-types"; import { LocalParams, Media } from "../DboBuilder"; -import { TableRule, ValidateRow, ValidateRowBasic } from "../../PublishParser/PublishParser"; +import { + TableRule, + ValidateRow, + ValidateRowBasic, +} from "../../PublishParser/PublishParser"; import { omitKeys } from "../../PubSubManager/PubSubManager"; import { isFile, uploadFile } from "../uploadFile"; import { TableHandler } from "./TableHandler"; import { DBOFullyTyped } from "../../DBSchemaBuilder"; type Args = { - newData: AnyObject; + newData: AnyObject; filter: AnyObject; - tableRules: TableRule | undefined; + tableRules: TableRule | undefined; localParams: LocalParams | undefined; -} -export const updateFile = async function(this: TableHandler, { filter, newData, tableRules, localParams }: Args): Promise<{ newData: AnyObject }> { - +}; +export const updateFile = async function ( + this: TableHandler, + { filter, newData, tableRules, localParams }: Args, +): Promise<{ newData: AnyObject }> { const rule = tableRules?.update; - if(tableRules && !tableRules.update){ - throw "Not allowed" + if (tableRules && !tableRules.update) { + throw "Not allowed"; } - if(localParams?.testRule){ + if (localParams?.testRule) { return { newData: {} }; } - const existingMediaId: string = !(!filter || !isObject(filter) || getKeys(filter).join() !== "id" || typeof (filter as any).id !== "string")? (filter as any).id : undefined - if(!existingMediaId){ - throw new Error(`Updating the file table with file data can only be done by providing a single id filter. E.g. { id: "9ea4e23c-2b1a-4e33-8ec0-c15919bb45ec" } `); + const existingMediaId: string = !( + !filter || + !isObject(filter) || + getKeys(filter).join() !== "id" || + typeof (filter as any).id !== "string" + ) + ? (filter as any).id + : undefined; + if (!existingMediaId) { + throw new Error( + `Updating the file table with file data can only be done by providing a single id filter. E.g. { id: "9ea4e23c-2b1a-4e33-8ec0-c15919bb45ec" } `, + ); } - if(!isFile(newData)){ - throw new Error("Expecting { data: Buffer, name: string } but received " + JSON.stringify(newData)) + if (!isFile(newData)) { + throw new Error( + "Expecting { data: Buffer, name: string } but received " + + JSON.stringify(newData), + ); } - const fileManager = this.dboBuilder.prostgles.fileManager - if(!fileManager) throw new Error("fileManager missing"); - if(rule?.validate && !localParams) throw new Error("localParams missing"); - const validate: ValidateRowBasic | undefined = rule?.validate? async (row) => { - return rule.validate!({ update: row, filter, dbx: (this.tx?.dbTX || this.dboBuilder.dbo) as any, localParams: localParams! }) - } : undefined; + const fileManager = this.dboBuilder.prostgles.fileManager; + if (!fileManager) throw new Error("fileManager missing"); + if (rule?.validate && !localParams) throw new Error("localParams missing"); + const validate: ValidateRowBasic | undefined = rule?.validate + ? async (row) => { + return rule.validate!({ + update: row, + filter, + dbx: (this.tx?.dbTX || this.dboBuilder.dbo) as any, + localParams: localParams!, + }); + } + : undefined; + + const existingFile: Media | undefined = await ( + (localParams?.tx?.dbTX?.[this.name] as TableHandler) || this + ).findOne({ id: existingMediaId }); - const existingFile: Media | undefined = await (localParams?.tx?.dbTX?.[this.name] as TableHandler || this).findOne({ id: existingMediaId }); - - if(!existingFile?.name) throw new Error("Existing file record not found"); + if (!existingFile?.name) throw new Error("Existing file record not found"); await fileManager.deleteFile(existingFile.name); - const newFile = await uploadFile.bind(this)({ row: newData, validate, localParams, mediaId: existingFile.id }) + const newFile = await uploadFile.bind(this)({ + row: newData, + validate, + localParams, + mediaId: existingFile.id, + }); return { newData: omitKeys(newFile, ["id"]) }; - -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/TableHandler/upsert.ts b/lib/DboBuilder/TableHandler/upsert.ts index 8f26d855..7dc0887a 100644 --- a/lib/DboBuilder/TableHandler/upsert.ts +++ b/lib/DboBuilder/TableHandler/upsert.ts @@ -1,33 +1,78 @@ import { AnyObject, UpdateParams } from "prostgles-types"; import { TableRule } from "../../PublishParser/publishTypesAndUtils"; import { Filter, LocalParams } from "../DboBuilderTypes"; -import { getErrorAsObject, getSerializedClientErrorFromPGError } from "../dboBuilderUtils"; +import { + getErrorAsObject, + getSerializedClientErrorFromPGError, +} from "../dboBuilderUtils"; import { TableHandler } from "./TableHandler"; -export const upsert = async function(this: TableHandler, filter: Filter, newData: AnyObject, params?: UpdateParams, table_rules?: TableRule, localParams?: LocalParams): Promise { +export const upsert = async function ( + this: TableHandler, + filter: Filter, + newData: AnyObject, + params?: UpdateParams, + table_rules?: TableRule, + localParams?: LocalParams, +): Promise { const start = Date.now(); try { const _upsert = async function (tblH: TableHandler) { - return tblH.find(filter, { select: "", limit: 1 }, undefined, table_rules, localParams) - .then(exists => { + return tblH + .find( + filter, + { select: "", limit: 1 }, + undefined, + table_rules, + localParams, + ) + .then((exists) => { if (exists && exists.length) { - return tblH.update(filter, newData, params, table_rules, localParams); + return tblH.update( + filter, + newData, + params, + table_rules, + localParams, + ); } else { - return tblH.insert({ ...newData, ...filter }, params, undefined, table_rules, localParams); + return tblH.insert( + { ...newData, ...filter }, + params, + undefined, + table_rules, + localParams, + ); } }); - } + }; /* Do it within a transaction to ensure consisency */ if (!this.tx) { - return this.dboBuilder.getTX(dbTX => _upsert(dbTX[this.name] as TableHandler)) + return this.dboBuilder.getTX((dbTX) => + _upsert(dbTX[this.name] as TableHandler), + ); } const result = await _upsert(this); - await this._log({ command: "upsert", localParams, data: { filter, newData, params }, duration: Date.now() - start }); + await this._log({ + command: "upsert", + localParams, + data: { filter, newData, params }, + duration: Date.now() - start, + }); return result; - } catch (e) { - await this._log({ command: "upsert", localParams, data: { filter, newData, params }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + await this._log({ + command: "upsert", + localParams, + data: { filter, newData, params }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/ViewHandler/ViewHandler.ts b/lib/DboBuilder/ViewHandler/ViewHandler.ts index 9c43bc00..80b0e831 100644 --- a/lib/DboBuilder/ViewHandler/ViewHandler.ts +++ b/lib/DboBuilder/ViewHandler/ViewHandler.ts @@ -1,11 +1,13 @@ -import * as pgPromise from 'pg-promise'; +import * as pgPromise from "pg-promise"; import { AnyObject, - ColumnInfo, FieldFilter, SelectParams, + ColumnInfo, + FieldFilter, + SelectParams, SubscribeParams, asName, isEmpty, - isObject + isObject, } from "prostgles-types"; import { TableEvent } from "../../Logging"; import { DB } from "../../Prostgles"; @@ -16,12 +18,13 @@ import { DboBuilder, Filter, LocalParams, - TableHandlers, ValidatedTableRules, + TableHandlers, + ValidatedTableRules, escapeTSNames, getSerializedClientErrorFromPGError, - postgresToTsType + postgresToTsType, } from "../DboBuilder"; -import { TableSchema } from '../DboBuilderTypes'; +import { TableSchema } from "../DboBuilderTypes"; import { COMPUTED_FIELDS, FieldSpec } from "../QueryBuilder/Functions"; import { asNameAlias } from "../QueryBuilder/QueryBuilder"; import { getColumns } from "../getColumns"; @@ -57,7 +60,7 @@ export class ViewHandler { tx?: { t: pgPromise.ITask<{}>; dbTX: TableHandlers; - } + }; get dbHandler() { return this.tx?.t ?? this.db; } @@ -65,7 +68,13 @@ export class ViewHandler { is_view = true; filterDef = ""; is_media = false; - constructor(db: DB, tableOrViewInfo: TableSchema, dboBuilder: DboBuilder, tx?: { t: pgPromise.ITask<{}>, dbTX: TableHandlers }, joinPaths?: JoinPaths) { + constructor( + db: DB, + tableOrViewInfo: TableSchema, + dboBuilder: DboBuilder, + tx?: { t: pgPromise.ITask<{}>; dbTX: TableHandlers }, + joinPaths?: JoinPaths + ) { if (!db || !tableOrViewInfo) throw ""; this.db = db; @@ -76,115 +85,161 @@ export class ViewHandler { this.escapedName = tableOrViewInfo.escaped_identifier; this.columns = tableOrViewInfo.columns; /* cols are sorted by name to reduce .d.ts schema rewrites */ - this.columnsForTypes = tableOrViewInfo.columns.slice(0).sort((a, b) => a.name.localeCompare(b.name)); + this.columnsForTypes = tableOrViewInfo.columns + .slice(0) + .sort((a, b) => a.name.localeCompare(b.name)); + + this.column_names = tableOrViewInfo.columns.map((c) => c.name); - this.column_names = tableOrViewInfo.columns.map(c => c.name); - this.dboBuilder = dboBuilder; - this.joins = this.dboBuilder.joins ?? []; + this.joins = this.dboBuilder.joins ?? []; this.columnsForTypes.map(({ name, udt_name, is_nullable }) => { - this.tsColumnDefs.push(`${escapeTSNames(name)}?: ${postgresToTsType(udt_name) as string} ${is_nullable ? " | null " : ""};`); + this.tsColumnDefs.push( + `${escapeTSNames(name)}?: ${postgresToTsType(udt_name) as string} ${is_nullable ? " | null " : ""};` + ); }); - } + } - _log = ({ command, data, localParams, duration, error }: Pick & { duration: number; error?: any; }) => { - if(localParams?.noLog){ - if(localParams?.socket || localParams.httpReq) { + _log = ({ + command, + data, + localParams, + duration, + error, + }: Pick & { + duration: number; + error?: any; + }) => { + if (localParams?.noLog) { + if (localParams?.socket || localParams.httpReq) { throw new Error("noLog option is not allowed from a remote client"); } return; } const sid = this.dboBuilder.prostgles.authHandler?.getSIDNoError(localParams); - return this.dboBuilder.prostgles.opts.onLog?.({ - type: "table", - command, - duration, + return this.dboBuilder.prostgles.opts.onLog?.({ + type: "table", + command, + duration, error, txInfo: this.tx?.t.ctx, - sid, - socketId: localParams?.socket?.id, - tableName: this.name, - data, - localParams, + sid, + socketId: localParams?.socket?.id, + tableName: this.name, + data, + localParams, }); - } + }; getRowHashSelect(allowedFields: FieldFilter, alias?: string, tableAlias?: string): string { let allowed_cols = this.column_names; if (allowedFields) allowed_cols = this.parseFieldFilter(allowedFields); - return "md5(" + + return ( + "md5(" + allowed_cols /* CTID not available in AFTER trigger */ // .concat(this.is_view? [] : ["ctid"]) .sort() - .map(f => (tableAlias ? (asName(tableAlias) + ".") : "") + asName(f)) - .map(f => `md5(coalesce(${f}::text, 'dd'))`) + .map((f) => (tableAlias ? asName(tableAlias) + "." : "") + asName(f)) + .map((f) => `md5(coalesce(${f}::text, 'dd'))`) .join(" || ") + - `)` + (alias ? ` as ${asName(alias)}` : ""); + `)` + + (alias ? ` as ${asName(alias)}` : "") + ); } validateViewRules = validateViewRules.bind(this); - getShortestJoin(table1: string, table2: string, startAlias: number, isInner = false): { query: string, toOne: boolean } { - const getJoinCondition = (on: Record[], leftTable: string, rightTable: string) => { - return on.map(cond => Object.keys(cond).map(lKey => `${leftTable}.${lKey} = ${rightTable}.${cond[lKey]}`).join("\nAND ")).join(" OR ") - } + getShortestJoin( + table1: string, + table2: string, + startAlias: number, + isInner = false + ): { query: string; toOne: boolean } { + const getJoinCondition = ( + on: Record[], + leftTable: string, + rightTable: string + ) => { + return on + .map((cond) => + Object.keys(cond) + .map((lKey) => `${leftTable}.${lKey} = ${rightTable}.${cond[lKey]}`) + .join("\nAND ") + ) + .join(" OR "); + }; // let toOne = true; - const query = this.joins.map(({ tables, on, type }, i) => { + const query = this.joins + .map(({ tables, on, type }, i) => { if (type.split("-")[1] === "many") { // toOne = false; } const tl = `tl${startAlias + i}`, tr = `tr${startAlias + i}`; return `FROM ${tables[0]} ${tl} ${isInner ? "INNER" : "LEFT"} JOIN ${tables[1]} ${tr} ON ${getJoinCondition(on, tl, tr)}`; - }).join("\n"); - return { query, toOne: false } + }) + .join("\n"); + return { query, toOne: false }; } checkFilter(filter: any) { - if (filter === null || filter && !isObject(filter)) throw `invalid filter -> ${JSON.stringify(filter)} \nExpecting: undefined | {} | { field_name: "value" } | { field: { $gt: 22 } } ... `; + if (filter === null || (filter && !isObject(filter))) + throw `invalid filter -> ${JSON.stringify(filter)} \nExpecting: undefined | {} | { field_name: "value" } | { field: { $gt: 22 } } ... `; } - getInfo = getInfo.bind(this) + getInfo = getInfo.bind(this); getColumns = getColumns.bind(this); getValidatedRules(tableRules?: TableRule, localParams?: LocalParams): ValidatedTableRules { - if (localParams?.socket && !tableRules) { throw "INTERNAL ERROR: Unexpected case -> localParams && !tableRules"; } /* Computed fields are allowed only if select is allowed */ - const allColumns: FieldSpec[] = this.column_names.slice(0).map(fieldName => ({ - type: "column", - name: fieldName, - getQuery: ({ tableAlias }) => asNameAlias(fieldName, tableAlias), - selected: false - } as FieldSpec)).concat(COMPUTED_FIELDS.map(c => ({ - type: c.type, - name: c.name, - getQuery: ({ tableAlias, allowedFields }) => c.getQuery({ - allowedFields, - ctidField: undefined, - allColumns: this.columns, - - /* CTID not available in AFTER trigger */ - // ctidField: this.is_view? undefined : "ctid", - tableAlias - }), - selected: false - }))); + const allColumns: FieldSpec[] = this.column_names + .slice(0) + .map( + (fieldName) => + ({ + type: "column", + name: fieldName, + getQuery: ({ tableAlias }) => asNameAlias(fieldName, tableAlias), + selected: false, + }) as FieldSpec + ) + .concat( + COMPUTED_FIELDS.map((c) => ({ + type: c.type, + name: c.name, + getQuery: ({ tableAlias, allowedFields }) => + c.getQuery({ + allowedFields, + ctidField: undefined, + allColumns: this.columns, + + /* CTID not available in AFTER trigger */ + // ctidField: this.is_view? undefined : "ctid", + tableAlias, + }), + selected: false, + })) + ); if (tableRules) { - if (isEmpty(tableRules)) throw "INTERNAL ERROR: Unexpected case -> Empty table rules for " + this.name; - const throwFieldsErr = (command: "select" | "update" | "delete" | "insert", fieldType = "fields") => { - throw `Invalid publish.${this.name}.${command} rule -> ${fieldType} setting is missing.\nPlease specify allowed ${fieldType} in this format: "*" | { col_name: false } | { col1: true, col2: true }`; - }, + if (isEmpty(tableRules)) + throw "INTERNAL ERROR: Unexpected case -> Empty table rules for " + this.name; + const throwFieldsErr = ( + command: "select" | "update" | "delete" | "insert", + fieldType = "fields" + ) => { + throw `Invalid publish.${this.name}.${command} rule -> ${fieldType} setting is missing.\nPlease specify allowed ${fieldType} in this format: "*" | { col_name: false } | { col1: true, col2: true }`; + }, getFirstSpecified = (...fieldParams: (FieldFilter | undefined)[]): string[] => { - const firstValid = fieldParams.find(fp => fp !== undefined); - return this.parseFieldFilter(firstValid) + const firstValid = fieldParams.find((fp) => fp !== undefined); + return this.parseFieldFilter(firstValid); }; const res: ValidatedTableRules = { @@ -197,19 +252,30 @@ export class ViewHandler { if (!tableRules.select.fields) return throwFieldsErr("select"); let maxLimit: number | null = null; - if (!localParams?.bypassLimit && tableRules.select.maxLimit !== undefined && tableRules.select.maxLimit !== maxLimit) { + if ( + !localParams?.bypassLimit && + tableRules.select.maxLimit !== undefined && + tableRules.select.maxLimit !== maxLimit + ) { const ml = tableRules.select.maxLimit; - if (ml !== null && (!Number.isInteger(ml) || ml < 0)) throw ` Invalid publish.${this.name}.select.maxLimit -> expecting a positive integer OR null but got ` + ml; + if (ml !== null && (!Number.isInteger(ml) || ml < 0)) + throw ( + ` Invalid publish.${this.name}.select.maxLimit -> expecting a positive integer OR null but got ` + + ml + ); maxLimit = ml; } - const fields = this.parseFieldFilter(tableRules.select.fields) + const fields = this.parseFieldFilter(tableRules.select.fields); res.select = { fields, - orderByFields: tableRules.select.orderByFields ? this.parseFieldFilter(tableRules.select.orderByFields) : fields, + orderByFields: + tableRules.select.orderByFields ? + this.parseFieldFilter(tableRules.select.orderByFields) + : fields, forcedFilter: { ...tableRules.select.forcedFilter }, filterFields: this.parseFieldFilter(tableRules.select.filterFields), - maxLimit + maxLimit, }; } @@ -220,9 +286,13 @@ export class ViewHandler { fields: this.parseFieldFilter(tableRules.update.fields), forcedData: { ...tableRules.update.forcedData }, forcedFilter: { ...tableRules.update.forcedFilter }, - returningFields: getFirstSpecified(tableRules.update?.returningFields, tableRules?.select?.fields, tableRules.update.fields), - filterFields: this.parseFieldFilter(tableRules.update.filterFields) - } + returningFields: getFirstSpecified( + tableRules.update?.returningFields, + tableRules?.select?.fields, + tableRules.update.fields + ), + filterFields: this.parseFieldFilter(tableRules.update.filterFields), + }; } if (tableRules.insert) { @@ -231,8 +301,12 @@ export class ViewHandler { res.insert = { fields: this.parseFieldFilter(tableRules.insert.fields), forcedData: { ...tableRules.insert.forcedData }, - returningFields: getFirstSpecified(tableRules.insert.returningFields, tableRules?.select?.fields, tableRules.insert.fields) - } + returningFields: getFirstSpecified( + tableRules.insert.returningFields, + tableRules?.select?.fields, + tableRules.insert.fields + ), + }; } if (tableRules.delete) { @@ -241,8 +315,12 @@ export class ViewHandler { res.delete = { forcedFilter: { ...tableRules.delete.forcedFilter }, filterFields: this.parseFieldFilter(tableRules.delete.filterFields), - returningFields: getFirstSpecified(tableRules.delete.returningFields, tableRules?.select?.fields, tableRules.delete.filterFields) - } + returningFields: getFirstSpecified( + tableRules.delete.returningFields, + tableRules?.select?.fields, + tableRules.delete.filterFields + ), + }; } if (!tableRules.select && !tableRules.update && !tableRules.delete && !tableRules.insert) { @@ -269,65 +347,125 @@ export class ViewHandler { filterFields: allCols, forcedFilter: {}, forcedData: {}, - returningFields: allCols + returningFields: allCols, }, insert: { fields: allCols, forcedData: {}, - returningFields: allCols + returningFields: allCols, }, delete: { filterFields: allCols, forcedFilter: {}, - returningFields: allCols - } + returningFields: allCols, + }, }; - } - } find = find.bind(this); - - async findOne(filter?: Filter, selectParams?: SelectParams, _param3_unused?: undefined, table_rules?: TableRule, localParams?: LocalParams): Promise { + async findOne( + filter?: Filter, + selectParams?: SelectParams, + _param3_unused?: undefined, + table_rules?: TableRule, + localParams?: LocalParams + ): Promise { try { const { limit, ...params } = selectParams ?? {}; if (limit) { throw "limit not allowed in findOne()"; } const start = Date.now(); - const result = await this.find(filter, { ...params, limit: 1, returnType: "row" }, undefined, table_rules, localParams); - await this._log({ command: "find", localParams, data: { filter, selectParams }, duration: Date.now() - start }); + const result = await this.find( + filter, + { ...params, limit: 1, returnType: "row" }, + undefined, + table_rules, + localParams + ); + await this._log({ + command: "find", + localParams, + data: { filter, selectParams }, + duration: Date.now() - start, + }); return result; } catch (e) { - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } } - async subscribe(filter: Filter, params: SubscribeParams, localFuncs: LocalFuncs): Promise<{ unsubscribe: () => any }> - async subscribe(filter: Filter, params: SubscribeParams, localFuncs: undefined, table_rules: TableRule | undefined, localParams: LocalParams): Promise - async subscribe(filter: Filter, params: SubscribeParams, localFuncs?: LocalFuncs, table_rules?: TableRule, localParams?: LocalParams): - Promise<{ unsubscribe: () => any } | string> { + async subscribe( + filter: Filter, + params: SubscribeParams, + localFuncs: LocalFuncs + ): Promise<{ unsubscribe: () => any }>; + + async subscribe( + filter: Filter, + params: SubscribeParams, + localFuncs: undefined, + table_rules: TableRule | undefined, + localParams: LocalParams + ): Promise; + + async subscribe( + filter: Filter, + params: SubscribeParams, + localFuncs?: LocalFuncs, + table_rules?: TableRule, + localParams?: LocalParams + ): Promise<{ unsubscribe: () => any } | string> { + //@ts-ignore + return subscribe.bind(this)( + filter, + params, //@ts-ignore - return subscribe.bind(this)(filter, params, localFuncs, table_rules, localParams); + localFuncs, + table_rules, + localParams + ); } /* This should only be called from server */ - subscribeOne(filter: Filter, params: SubscribeParams, localFunc: (item: AnyObject) => any): Promise<{ unsubscribe: () => any }> - subscribeOne(filter: Filter, params: SubscribeParams, localFunc: undefined, table_rules: TableRule, localParams: LocalParams): Promise - subscribeOne(filter: Filter, params: SubscribeParams = {}, localFunc?: (item: AnyObject) => any, table_rules?: TableRule, localParams?: LocalParams): - Promise any }> { - - //@ts-ignore - const func = localParams? undefined : (rows: AnyObject[]) => localFunc(rows[0]); - //@ts-ignore - return this.subscribe(filter, { ...params, limit: 2 }, func, table_rules, localParams); + subscribeOne( + filter: Filter, + params: SubscribeParams, + localFunc: (item: AnyObject) => any + ): Promise<{ unsubscribe: () => any }>; + subscribeOne( + filter: Filter, + params: SubscribeParams, + localFunc: undefined, + table_rules: TableRule, + localParams: LocalParams + ): Promise; + subscribeOne( + filter: Filter, + params: SubscribeParams = {}, + localFunc?: (item: AnyObject) => any, + table_rules?: TableRule, + localParams?: LocalParams + ): Promise any }> { + //@ts-ignore + const func = localParams ? undefined : (rows: AnyObject[]) => localFunc(rows[0]); + //@ts-ignore + return this.subscribe(filter, { ...params, limit: 2 }, func, table_rules, localParams); } count = count.bind(this); size = size.bind(this); - getAllowedSelectFields(selectParams: FieldFilter = "*", allowed_cols: FieldFilter, allow_empty = true): string[] { + getAllowedSelectFields( + selectParams: FieldFilter = "*", + allowed_cols: FieldFilter, + allow_empty = true + ): string[] { const all_columns = this.column_names.slice(0); let allowedFields = all_columns.slice(0), resultFields: string[] = []; @@ -338,22 +476,26 @@ export class ViewHandler { if (allowed_cols) { allowedFields = this.parseFieldFilter(allowed_cols, allow_empty); } - let col_names = (resultFields || []).filter(f => !allowedFields || allowedFields.includes(f)); + let col_names = (resultFields || []).filter((f) => !allowedFields || allowedFields.includes(f)); /* Maintain allowed cols order */ - if (selectParams === "*" && allowedFields && allowedFields.length){ + if (selectParams === "*" && allowedFields && allowedFields.length) { col_names = allowedFields; } return col_names; - } + } /** * Parses group or simple filter */ - prepareWhere = prepareWhere.bind(this); + prepareWhere = prepareWhere.bind(this); - intersectColumns(allowedFields: FieldFilter, dissallowedFields: FieldFilter, removeDisallowedFields = false): string[] { + intersectColumns( + allowedFields: FieldFilter, + dissallowedFields: FieldFilter, + removeDisallowedFields = false + ): string[] { let result: string[] = []; if (allowedFields) { result = this.parseFieldFilter(allowedFields); @@ -362,32 +504,33 @@ export class ViewHandler { const _dissalowed = this.parseFieldFilter(dissallowedFields); if (!removeDisallowedFields) { - - throw `dissallowed/invalid field found for ${this.name}: ` + throw `dissallowed/invalid field found for ${this.name}: `; } - result = result.filter(key => !_dissalowed.includes(key)); + result = result.filter((key) => !_dissalowed.includes(key)); } return result; } - parseFieldFilter(fieldParams: FieldFilter = "*", allow_empty = true, allowed_cols?: string[]): string[] { - return parseFieldFilter(fieldParams, allow_empty, allowed_cols ?? this.column_names.slice(0)) + parseFieldFilter( + fieldParams: FieldFilter = "*", + allow_empty = true, + allowed_cols?: string[] + ): string[] { + return parseFieldFilter(fieldParams, allow_empty, allowed_cols ?? this.column_names.slice(0)); } - } - -/** -* Throw error if illegal keys found in object -*/ +/** + * Throw error if illegal keys found in object + */ export const validateObj = >(obj: T, allowedKeys: string[]): T => { if (obj && Object.keys(obj).length) { - const invalid_keys = Object.keys(obj).filter(k => !allowedKeys.includes(k)); + const invalid_keys = Object.keys(obj).filter((k) => !allowedKeys.includes(k)); if (invalid_keys.length) { throw "Invalid/Illegal fields found: " + invalid_keys.join(", "); } } return obj; -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/ViewHandler/count.ts b/lib/DboBuilder/ViewHandler/count.ts index b1d7a20b..6d735e31 100644 --- a/lib/DboBuilder/ViewHandler/count.ts +++ b/lib/DboBuilder/ViewHandler/count.ts @@ -1,38 +1,69 @@ import { SelectParams } from "prostgles-types"; import { TableRule } from "../../PublishParser/publishTypesAndUtils"; import { Filter, LocalParams } from "../DboBuilder"; -import { getErrorAsObject, getSerializedClientErrorFromPGError, withUserRLS } from "../dboBuilderUtils"; +import { + getErrorAsObject, + getSerializedClientErrorFromPGError, + withUserRLS, +} from "../dboBuilderUtils"; import { ViewHandler } from "./ViewHandler"; -export async function count(this: ViewHandler, _filter?: Filter, selectParams?: SelectParams, _param3_unused?: undefined, table_rules?: TableRule, localParams?: LocalParams): Promise { +export async function count( + this: ViewHandler, + _filter?: Filter, + selectParams?: SelectParams, + _param3_unused?: undefined, + table_rules?: TableRule, + localParams?: LocalParams, +): Promise { const filter = _filter || {}; const { limit: _limit, ...selectParamsWithoutLimit } = selectParams ?? {}; const start = Date.now(); try { - const result = await this.find(filter, { select: selectParamsWithoutLimit?.select ?? "", limit: 0 }, undefined, table_rules, localParams) - .then(async _allowed => { - const findQuery = await this.find( - filter, - selectParamsWithoutLimit, - undefined, - table_rules, - { ...localParams, returnQuery: "noRLS", bypassLimit: true } - ) as unknown as string; - const query = [ - withUserRLS(localParams, ""), - "SELECT COUNT(*)", - "FROM (", - findQuery, - ") t" - ].join("\n"); - const handler = this.tx?.t ?? this.db; - return handler.one(query).then(({ count }) => +count); - }); + const result = await this.find( + filter, + { select: selectParamsWithoutLimit?.select ?? "", limit: 0 }, + undefined, + table_rules, + localParams, + ).then(async (_allowed) => { + const findQuery = (await this.find( + filter, + selectParamsWithoutLimit, + undefined, + table_rules, + { ...localParams, returnQuery: "noRLS", bypassLimit: true }, + )) as unknown as string; + const query = [ + withUserRLS(localParams, ""), + "SELECT COUNT(*)", + "FROM (", + findQuery, + ") t", + ].join("\n"); + const handler = this.tx?.t ?? this.db; + return handler.one(query).then(({ count }) => +count); + }); - await this._log({ command: "count", localParams, data: { filter }, duration: Date.now() - start }); + await this._log({ + command: "count", + localParams, + data: { filter }, + duration: Date.now() - start, + }); return result; } catch (e) { - await this._log({ command: "count", localParams, data: { filter }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + await this._log({ + command: "count", + localParams, + data: { filter }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } -} \ No newline at end of file +} diff --git a/lib/DboBuilder/ViewHandler/find.ts b/lib/DboBuilder/ViewHandler/find.ts index 61f6f19c..cb2e8801 100644 --- a/lib/DboBuilder/ViewHandler/find.ts +++ b/lib/DboBuilder/ViewHandler/find.ts @@ -1,7 +1,13 @@ - import { SelectParams, isObject } from "prostgles-types"; import { TableRule } from "../../PublishParser/PublishParser"; -import { Filter, LocalParams, getClientErrorFromPGError, getErrorAsObject, getSerializedClientErrorFromPGError, withUserRLS } from "../DboBuilder"; +import { + Filter, + LocalParams, + getClientErrorFromPGError, + getErrorAsObject, + getSerializedClientErrorFromPGError, + withUserRLS, +} from "../DboBuilder"; import { getNewQuery } from "../QueryBuilder/getNewQuery"; import { getSelectQuery } from "../QueryBuilder/getSelectQuery"; import { NewQuery } from "../QueryBuilder/QueryBuilder"; @@ -9,68 +15,104 @@ import { canRunSQL } from "../runSQL"; import { TableHandler } from "../TableHandler/TableHandler"; import { ViewHandler } from "./ViewHandler"; -export const find = async function(this: ViewHandler, filter?: Filter, selectParams?: SelectParams, _?: undefined, tableRules?: TableRule, localParams?: LocalParams): Promise { +export const find = async function ( + this: ViewHandler, + filter?: Filter, + selectParams?: SelectParams, + _?: undefined, + tableRules?: TableRule, + localParams?: LocalParams, +): Promise { const start = Date.now(); - const command = selectParams?.limit === 1 && selectParams?.returnType === "row"? "findOne" : "find"; + const command = + selectParams?.limit === 1 && selectParams?.returnType === "row" + ? "findOne" + : "find"; try { filter = filter || {}; - const allowedReturnTypes = Object.keys({ - row: 1, statement: 1, value: 1, values: 1, - "statement-no-rls": 1, "statement-where": 1, + const allowedReturnTypes = Object.keys({ + row: 1, + statement: 1, + value: 1, + values: 1, + "statement-no-rls": 1, + "statement-where": 1, } satisfies Record["returnType"], 1>); const { returnType } = selectParams || {}; if (returnType && !allowedReturnTypes.includes(returnType)) { - throw `returnType (${returnType}) can only be ${allowedReturnTypes.join(" OR ")}` + throw `returnType (${returnType}) can only be ${allowedReturnTypes.join(" OR ")}`; } const { testRule = false } = localParams || {}; if (testRule) return []; if (selectParams) { - const validParamNames = Object.keys({ - "select": 1, "orderBy": 1, "offset": 1, "limit": 1, - "returnType": 1, "groupBy": 1, "having": 1 + const validParamNames = Object.keys({ + select: 1, + orderBy: 1, + offset: 1, + limit: 1, + returnType: 1, + groupBy: 1, + having: 1, } satisfies Record); - - const invalidParams = Object.keys(selectParams).filter(k => !validParamNames.includes(k as any)); - if (invalidParams && invalidParams.length) throw "Invalid params: " + invalidParams.join(", ") + " \n Expecting: " + validParamNames.join(", "); + + const invalidParams = Object.keys(selectParams).filter( + (k) => !validParamNames.includes(k as any), + ); + if (invalidParams && invalidParams.length) + throw ( + "Invalid params: " + + invalidParams.join(", ") + + " \n Expecting: " + + validParamNames.join(", ") + ); } /* Validate publish */ - if (tableRules) { - + if (tableRules) { if (!tableRules.select) throw "select rules missing for " + this.name; - const fields = tableRules.select.fields; + const fields = tableRules.select.fields; const maxLimit = tableRules.select.maxLimit; - if (tableRules.select !== "*" && typeof tableRules.select !== "boolean" && !isObject(tableRules.select)) { + if ( + tableRules.select !== "*" && + typeof tableRules.select !== "boolean" && + !isObject(tableRules.select) + ) { throw `\nInvalid publish.${this.name}.select\nExpecting any of: "*" | { fields: "*" } | true | false`; } if (!fields) { throw ` invalid ${this.name}.select rule -> fields (required) setting missing.\nExpecting any of: "*" | { col_name: false } | { col1: true, col2: true }`; } if (maxLimit && !Number.isInteger(maxLimit)) { - throw ` invalid publish.${this.name}.select.maxLimit -> expecting integer but got ` + maxLimit; + throw ( + ` invalid publish.${this.name}.select.maxLimit -> expecting integer but got ` + + maxLimit + ); } } - const _selectParams = selectParams ?? {} - const selectParamsLimitCheck = localParams?.bypassLimit && !Number.isFinite(_selectParams.limit)? { ..._selectParams, limit: null } : { limit: 1000, ..._selectParams } + const _selectParams = selectParams ?? {}; + const selectParamsLimitCheck = + localParams?.bypassLimit && !Number.isFinite(_selectParams.limit) + ? { ..._selectParams, limit: null } + : { limit: 1000, ..._selectParams }; const newQuery = await getNewQuery( - this, - filter, - selectParamsLimitCheck, - _, - tableRules, - localParams, + this, + filter, + selectParamsLimitCheck, + _, + tableRules, + localParams, ); const queryWithoutRLS = getSelectQuery( - this, - newQuery, - undefined, - !!selectParamsLimitCheck?.groupBy + this, + newQuery, + undefined, + !!selectParamsLimitCheck?.groupBy, ); const queryWithRLS = withUserRLS(localParams, queryWithoutRLS); @@ -80,74 +122,117 @@ export const find = async function(this: ViewHandler, filter?: Filter, selectPar return []; } catch (e) { console.error(e); - throw `Internal error: publish config is not valid for publish.${this.name}.select ` + throw `Internal error: publish config is not valid for publish.${this.name}.select `; } } /** Used for subscribe */ - if(localParams?.returnNewQuery) return (newQuery as unknown as any); + if (localParams?.returnNewQuery) return newQuery as unknown as any; if (localParams?.returnQuery) { - if(localParams?.returnQuery === "where-condition"){ + if (localParams?.returnQuery === "where-condition") { return newQuery.whereOpts.condition as any; } - return ((localParams?.returnQuery === "noRLS"? queryWithoutRLS : queryWithRLS) as unknown as any[]); + return (localParams?.returnQuery === "noRLS" + ? queryWithoutRLS + : queryWithRLS) as unknown as any[]; } - const result = await runQueryReturnType({ - queryWithoutRLS, - queryWithRLS, - returnType, - handler: this, + const result = await runQueryReturnType({ + queryWithoutRLS, + queryWithRLS, + returnType, + handler: this, localParams, newQuery, }); - await this._log({ command, localParams, data: { filter, selectParams }, duration: Date.now() - start }); + await this._log({ + command, + localParams, + data: { filter, selectParams }, + duration: Date.now() - start, + }); return result; } catch (e) { - this._log({ command, localParams, data: { filter, selectParams }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + this._log({ + command, + localParams, + data: { filter, selectParams }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } -} +}; type RunQueryReturnTypeArgs = { queryWithRLS: string; queryWithoutRLS: string; - returnType: SelectParams["returnType"]; - handler: ViewHandler | TableHandler; + returnType: SelectParams["returnType"]; + handler: ViewHandler | TableHandler; localParams: LocalParams | undefined; newQuery: NewQuery | undefined; }; -export const runQueryReturnType = async ({ newQuery, handler, localParams, queryWithRLS, queryWithoutRLS, returnType,}: RunQueryReturnTypeArgs) => { - +export const runQueryReturnType = async ({ + newQuery, + handler, + localParams, + queryWithRLS, + queryWithoutRLS, + returnType, +}: RunQueryReturnTypeArgs) => { const query = queryWithRLS; - const sqlTypes = ["statement", "statement-no-rls", "statement-where"] as const; - if(!returnType || returnType === "values"){ - - return handler.dbHandler.any(query).then(data => { - if (returnType === "values") { - return data.map(d => Object.values(d)[0]); - } - return data; - }).catch(err => getClientErrorFromPGError(err, { type: "tableMethod", localParams, view: handler, })); - - } else if (sqlTypes.some(v => v === returnType)) { + const sqlTypes = [ + "statement", + "statement-no-rls", + "statement-where", + ] as const; + if (!returnType || returnType === "values") { + return handler.dbHandler + .any(query) + .then((data) => { + if (returnType === "values") { + return data.map((d) => Object.values(d)[0]); + } + return data; + }) + .catch((err) => + getClientErrorFromPGError(err, { + type: "tableMethod", + localParams, + view: handler, + }), + ); + } else if (sqlTypes.some((v) => v === returnType)) { if (!(await canRunSQL(handler.dboBuilder.prostgles, localParams))) { - throw `Not allowed: { returnType: ${JSON.stringify(returnType)} } requires execute sql privileges ` + throw `Not allowed: { returnType: ${JSON.stringify(returnType)} } requires execute sql privileges `; } - if(returnType === "statement-no-rls"){ + if (returnType === "statement-no-rls") { return queryWithoutRLS as any; } - if(returnType === "statement-where"){ - if(!newQuery) throw `returnType ${returnType} not possible for this command type`; + if (returnType === "statement-where") { + if (!newQuery) + throw `returnType ${returnType} not possible for this command type`; return newQuery.whereOpts.condition as any; } return query as unknown as any[]; - } else if (["row", "value"].includes(returnType)) { - return handler.dbHandler.oneOrNone(query).then(data => { - return (data && returnType === "value") ? Object.values(data)[0] : data; - }).catch(err => getClientErrorFromPGError(err, { type: "tableMethod", localParams, view: handler, })); + return handler.dbHandler + .oneOrNone(query) + .then((data) => { + return data && returnType === "value" ? Object.values(data)[0] : data; + }) + .catch((err) => + getClientErrorFromPGError(err, { + type: "tableMethod", + localParams, + view: handler, + }), + ); } -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/ViewHandler/getExistsCondition.ts b/lib/DboBuilder/ViewHandler/getExistsCondition.ts index ae162dd2..f941ebe3 100644 --- a/lib/DboBuilder/ViewHandler/getExistsCondition.ts +++ b/lib/DboBuilder/ViewHandler/getExistsCondition.ts @@ -1,22 +1,40 @@ -import { AnyObject, EXISTS_KEY, EXISTS_KEYS, FieldFilter, asName } from "prostgles-types"; +import { + AnyObject, + EXISTS_KEY, + EXISTS_KEYS, + FieldFilter, + asName, +} from "prostgles-types"; import { LocalParams, ExistsFilterConfig } from "../DboBuilder"; import { ViewHandler } from "./ViewHandler"; import { TableRule } from "../../PublishParser/PublishParser"; import { TableHandler } from "../TableHandler/TableHandler"; import { getTableJoinQuery } from "./getTableJoinQuery"; - -export async function getExistsCondition(this: ViewHandler, eConfig: ExistsFilterConfig, localParams: LocalParams | undefined): Promise { - +export async function getExistsCondition( + this: ViewHandler, + eConfig: ExistsFilterConfig, + localParams: LocalParams | undefined, +): Promise { const thisTable = this.name; - const isNotExists = ["$notExists", "$notExistsJoined"].includes(eConfig.existType); + const isNotExists = ["$notExists", "$notExistsJoined"].includes( + eConfig.existType, + ); const { targetTableFilter } = eConfig; /* Nested $exists is not allowed */ - if (targetTableFilter && Object.keys(targetTableFilter).find(fk => EXISTS_KEYS.includes(fk as EXISTS_KEY))) { - throw { stack: ["prepareExistCondition()"], message: "Nested exists dissallowed" }; - } + if ( + targetTableFilter && + Object.keys(targetTableFilter).find((fk) => + EXISTS_KEYS.includes(fk as EXISTS_KEY), + ) + ) { + throw { + stack: ["prepareExistCondition()"], + message: "Nested exists dissallowed", + }; + } let t2Rules: TableRule | undefined = undefined, forcedFilter: AnyObject | undefined, @@ -24,41 +42,51 @@ export async function getExistsCondition(this: ViewHandler, eConfig: ExistsFilte tableAlias; /* Check if allowed to view data - forcedFilters will bypass this check through isForcedFilterBypass */ - if (localParams?.isRemoteRequest && !localParams?.socket && !localParams?.httpReq) { + if ( + localParams?.isRemoteRequest && + !localParams?.socket && + !localParams?.httpReq + ) { throw "Unexpected: localParams isRemoteRequest and missing socket/httpReq: "; } - const targetTable = eConfig.isJoined? eConfig.parsedPath.at(-1)!.table : eConfig.targetTable; - if ((localParams?.socket || localParams?.httpReq) && this.dboBuilder.publishParser) { - - t2Rules = await this.dboBuilder.publishParser.getValidatedRequestRuleWusr({ - tableName: targetTable, - command: "find", - localParams - }) as TableRule; + const targetTable = eConfig.isJoined + ? eConfig.parsedPath.at(-1)!.table + : eConfig.targetTable; + if ( + (localParams?.socket || localParams?.httpReq) && + this.dboBuilder.publishParser + ) { + t2Rules = (await this.dboBuilder.publishParser.getValidatedRequestRuleWusr({ + tableName: targetTable, + command: "find", + localParams, + })) as TableRule; if (!t2Rules || !t2Rules.select) throw "Dissallowed"; ({ forcedFilter, filterFields } = t2Rules.select); } - const tableHandler = this.dboBuilder.dbo[targetTable] as TableHandler - const finalWhere = (await tableHandler.prepareWhere({ - select: undefined, - filter: targetTableFilter, - forcedFilter, - filterFields, - addWhere: false, - tableAlias, - localParams, - tableRule: t2Rules - })).where + const tableHandler = this.dboBuilder.dbo[targetTable] as TableHandler; + const finalWhere = ( + await tableHandler.prepareWhere({ + select: undefined, + filter: targetTableFilter, + forcedFilter, + filterFields, + addWhere: false, + tableAlias, + localParams, + tableRule: t2Rules, + }) + ).where; let innerQuery = [ `SELECT 1`, `FROM ${asName(targetTable)}`, - `${finalWhere ? `WHERE ${finalWhere}` : ""}` + `${finalWhere ? `WHERE ${finalWhere}` : ""}`, ].join("\n"); - if(eConfig.isJoined){ + if (eConfig.isJoined) { const { query } = getTableJoinQuery({ path: eConfig.parsedPath, rootTableAlias: thisTable, @@ -69,5 +97,4 @@ export async function getExistsCondition(this: ViewHandler, eConfig: ExistsFilte } return `${isNotExists ? " NOT " : " "} EXISTS ( \n${innerQuery} \n) `; - -} \ No newline at end of file +} diff --git a/lib/DboBuilder/ViewHandler/getExistsFilters.ts b/lib/DboBuilder/ViewHandler/getExistsFilters.ts index 655658f4..234d4387 100644 --- a/lib/DboBuilder/ViewHandler/getExistsFilters.ts +++ b/lib/DboBuilder/ViewHandler/getExistsFilters.ts @@ -3,46 +3,53 @@ import { ExistsFilterConfig } from "../DboBuilder"; import { ViewHandler } from "./ViewHandler"; import { parseJoinPath } from "./parseJoinPath"; -export const getExistsFilters = (filter: any, viewHandler: ViewHandler): ExistsFilterConfig[] => { - +export const getExistsFilters = ( + filter: any, + viewHandler: ViewHandler, +): ExistsFilterConfig[] => { /* Exists join filter */ - const ERR = "Invalid exists filter. \nExpecting something like: \n | { $exists: { tableName.tableName2: Filter } } \n | { $exists: { \"**.tableName3\": Filter } }\n | { path: string[]; filter: AnyObject }" + const ERR = + 'Invalid exists filter. \nExpecting something like: \n | { $exists: { tableName.tableName2: Filter } } \n | { $exists: { "**.tableName3": Filter } }\n | { path: string[]; filter: AnyObject }'; const existsConfigs: ExistsFilterConfig[] = getKeys(filter) - .filter((k ): k is typeof EXISTS_KEYS[number] => EXISTS_KEYS.includes(k as EXISTS_KEY) && !!Object.keys(filter[k] ?? {}).length) - .map(key => { - + .filter( + (k): k is (typeof EXISTS_KEYS)[number] => + EXISTS_KEYS.includes(k as EXISTS_KEY) && + !!Object.keys(filter[k] ?? {}).length, + ) + .map((key) => { const isJoined = key.toLowerCase().includes("join"); const filterValue = filter[key]; - /** - * type ExistsJoined = + * type ExistsJoined = * | { "table1.table2": { column: filterValue } } * | { path: string[]; filter: AnyObject } */ const dataKeys = Object.keys(filterValue); - const isDetailed = dataKeys.length === 2 && dataKeys.every(key => ["path", "filter"].includes(key)); + const isDetailed = + dataKeys.length === 2 && + dataKeys.every((key) => ["path", "filter"].includes(key)); const firstKey = dataKeys[0]!; /** * Non joined exists are never detailed */ - if(!isJoined){ - const format = `Expecting single table in exists filter. Example: { $exists: { tableName: Filter } }` - if(isDetailed){ - throw `Exists filters cannot be detailed. ${format}` + if (!isJoined) { + const format = `Expecting single table in exists filter. Example: { $exists: { tableName: Filter } }`; + if (isDetailed) { + throw `Exists filters cannot be detailed. ${format}`; } const targetTable = firstKey; if (!viewHandler.dboBuilder.dbo[targetTable]) { - throw `Table ${JSON.stringify(targetTable)} not found. ${format}` + throw `Table ${JSON.stringify(targetTable)} not found. ${format}`; } const res: ExistsFilterConfig = { isJoined: false, existType: key as EXISTS_KEY, targetTableFilter: filterValue[firstKey], targetTable: firstKey, - } + }; return res; } @@ -50,12 +57,17 @@ export const getExistsFilters = (filter: any, viewHandler: ViewHandler): ExistsF * Prevent some errors with table names that contain "." */ const firstKeyIsATable = !!viewHandler.dboBuilder.dbo[firstKey]; - const [path, targetTableFilter] = isDetailed? [filterValue.path, filterValue.filter] : [(firstKeyIsATable? [firstKey] : firstKey.split(".")), filterValue[firstKey]]; + const [path, targetTableFilter] = isDetailed + ? [filterValue.path, filterValue.filter] + : [ + firstKeyIsATable ? [firstKey] : firstKey.split("."), + filterValue[firstKey], + ]; if (!path.length) { throw ERR + "\nBut got: " + JSON.stringify(filterValue); } - + return { isJoined: true, existType: key as EXISTS_KEY, @@ -67,8 +79,8 @@ export const getExistsFilters = (filter: any, viewHandler: ViewHandler): ExistsF allowMultiOrJoin: true, }), targetTableFilter, - } + }; }); return existsConfigs; -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/ViewHandler/getInfo.ts b/lib/DboBuilder/ViewHandler/getInfo.ts index b357fb0c..ab13a6ed 100644 --- a/lib/DboBuilder/ViewHandler/getInfo.ts +++ b/lib/DboBuilder/ViewHandler/getInfo.ts @@ -1,37 +1,68 @@ -import { - TableInfo as TInfo -} from "prostgles-types/dist"; +import { TableInfo as TInfo } from "prostgles-types/dist"; import { TableRule } from "../../PublishParser/PublishParser"; import { LocalParams } from "../DboBuilder"; import { ViewHandler } from "./ViewHandler"; -export async function getInfo(this: ViewHandler, lang?: string, param2?: any, param3?: any, tableRules?: TableRule, localParams?: LocalParams): Promise { +export async function getInfo( + this: ViewHandler, + lang?: string, + param2?: any, + param3?: any, + tableRules?: TableRule, + localParams?: LocalParams, +): Promise { const p = this.getValidatedRules(tableRules, localParams); if (!p.getInfo) { - await this._log({ command: "getInfo", localParams, data: { lang }, duration: 0, error: "Not allowed" }); + await this._log({ + command: "getInfo", + localParams, + data: { lang }, + duration: 0, + error: "Not allowed", + }); throw "Not allowed"; } const fileTableName = this.dboBuilder.prostgles?.opts?.fileTable?.tableName; - await this._log({ command: "getInfo", localParams, data: { lang }, duration: 0 }); - const allowedFieldsToSelect = this.parseFieldFilter(tableRules?.select?.fields); + await this._log({ + command: "getInfo", + localParams, + data: { lang }, + duration: 0, + }); + const allowedFieldsToSelect = this.parseFieldFilter( + tableRules?.select?.fields, + ); return { oid: this.tableOrViewInfo.oid, comment: this.tableOrViewInfo.comment, - info: this.dboBuilder.prostgles?.tableConfigurator?.getTableInfo({ tableName: this.name, lang }), - isFileTable: !this.is_media? undefined : { - allowedNestedInserts: tableRules?.insert?.allowedNestedInserts - }, + info: this.dboBuilder.prostgles?.tableConfigurator?.getTableInfo({ + tableName: this.name, + lang, + }), + isFileTable: !this.is_media + ? undefined + : { + allowedNestedInserts: tableRules?.insert?.allowedNestedInserts, + }, isView: this.is_view, - hasFiles: Boolean(!this.is_media && fileTableName && this.columns.some(c => c.references?.some(r => r.ftable === fileTableName))), + hasFiles: Boolean( + !this.is_media && + fileTableName && + this.columns.some((c) => + c.references?.some((r) => r.ftable === fileTableName), + ), + ), fileTableName, dynamicRules: { - update: Boolean(tableRules?.update?.dynamicFields?.length) + update: Boolean(tableRules?.update?.dynamicFields?.length), }, /** * Only show column groups that are fully allowed to be selected by the user */ - uniqueColumnGroups: this.tableOrViewInfo.uniqueColumnGroups?.filter(g => !localParams || g.every(c => allowedFieldsToSelect.includes(c))), - } -} \ No newline at end of file + uniqueColumnGroups: this.tableOrViewInfo.uniqueColumnGroups?.filter( + (g) => !localParams || g.every((c) => allowedFieldsToSelect.includes(c)), + ), + }; +} diff --git a/lib/DboBuilder/ViewHandler/getTableJoinQuery.ts b/lib/DboBuilder/ViewHandler/getTableJoinQuery.ts index 859a6ba4..0080438f 100644 --- a/lib/DboBuilder/ViewHandler/getTableJoinQuery.ts +++ b/lib/DboBuilder/ViewHandler/getTableJoinQuery.ts @@ -6,79 +6,113 @@ type getTableJoinsArgs = { type: "INNER" | "LEFT" | "EXISTS"; finalWhere?: string; path: ParsedJoinPath[]; -} -export const getTableJoinQuery = ({ path, type, rootTableAlias, finalWhere }: getTableJoinsArgs): { targetAlias: string; query: string } => { - +}; +export const getTableJoinQuery = ({ + path, + type, + rootTableAlias, + finalWhere, +}: getTableJoinsArgs): { targetAlias: string; query: string } => { const [firstPath] = path; - if(!firstPath){ + if (!firstPath) { throw `Cannot create join query for empty path`; } const aliasSufix = "jd"; - const getTableAlias = (table: string, pathIndex: number) => asName(`${aliasSufix}_${pathIndex}_${table}`); - - const query = path.map(({ table, on }, i) => { - if(!on) throw "on missing"; - const tableName = table; - const tableAlias = getTableAlias(table, i); - const prevTableAlias = i === 0? rootTableAlias : getTableAlias(path[i-1]!.table, i-1); + const getTableAlias = (table: string, pathIndex: number) => + asName(`${aliasSufix}_${pathIndex}_${table}`); - const onCondition = getJoinOnCondition({ on, leftAlias: prevTableAlias, rightAlias: tableAlias }); + const query = path + .map(({ table, on }, i) => { + if (!on) throw "on missing"; + const tableName = table; + const tableAlias = getTableAlias(table, i); + const prevTableAlias = + i === 0 ? rootTableAlias : getTableAlias(path[i - 1]!.table, i - 1); - const isExists = type === "EXISTS" - const joinType = isExists? "INNER" : type; - const keyword = `${joinType} JOIN`; - const isLast = i === path.length - 1; - const isFirst = !i; + const onCondition = getJoinOnCondition({ + on, + leftAlias: prevTableAlias, + rightAlias: tableAlias, + }); - /** - * rootTable joins to first path - * first path joins to target table through inner joins - */ - const whereJoinCondition = (isLast && isExists) ? - `WHERE (${getJoinOnCondition({ - on: firstPath.on, - leftAlias: rootTableAlias, - rightAlias: getTableAlias(firstPath.table, 0) - })})` : ""; + const isExists = type === "EXISTS"; + const joinType = isExists ? "INNER" : type; + const keyword = `${joinType} JOIN`; + const isLast = i === path.length - 1; + const isFirst = !i; - const tableSelect = (isExists && isLast)? [ - `(`, - ` SELECT *`, - ` FROM ${tableName}`, - (finalWhere? ` WHERE ${finalWhere}` : ""), - `)` - ].filter(v=>v).join("\n") : tableName; - if(isExists && isFirst){ - return [ - `SELECT 1`, - `FROM ${tableSelect} ${tableAlias}`, - whereJoinCondition - ].filter(v=>v).join("\n"); - } + /** + * rootTable joins to first path + * first path joins to target table through inner joins + */ + const whereJoinCondition = + isLast && isExists + ? `WHERE (${getJoinOnCondition({ + on: firstPath.on, + leftAlias: rootTableAlias, + rightAlias: getTableAlias(firstPath.table, 0), + })})` + : ""; - return [ - `${keyword} ${tableSelect} ${tableAlias}`, - ` ON ${onCondition}`, - whereJoinCondition - ].filter(v=>v).join("\n"); + const tableSelect = + isExists && isLast + ? [ + `(`, + ` SELECT *`, + ` FROM ${tableName}`, + finalWhere ? ` WHERE ${finalWhere}` : "", + `)`, + ] + .filter((v) => v) + .join("\n") + : tableName; + if (isExists && isFirst) { + return [ + `SELECT 1`, + `FROM ${tableSelect} ${tableAlias}`, + whereJoinCondition, + ] + .filter((v) => v) + .join("\n"); + } - }).join("\n"); + return [ + `${keyword} ${tableSelect} ${tableAlias}`, + ` ON ${onCondition}`, + whereJoinCondition, + ] + .filter((v) => v) + .join("\n"); + }) + .join("\n"); return { query, - targetAlias: getTableAlias(path.at(-1)!.table, path.length - 1) - } -} + targetAlias: getTableAlias(path.at(-1)!.table, path.length - 1), + }; +}; type GetJoinOnConditionArgs = { on: Record[]; - leftAlias: string; + leftAlias: string; rightAlias: string; getLeftColName?: (col: string) => string; getRightColName?: (col: string) => string; -} -export const getJoinOnCondition = ({ on, leftAlias, rightAlias, getLeftColName = asName, getRightColName = asName }: GetJoinOnConditionArgs ) => { - return on.map(constraint => Object.entries(constraint).map(([leftCol, rightCol]) => { - return `${leftAlias}.${getLeftColName(leftCol)} = ${rightAlias}.${getRightColName(rightCol)}`; - }).join(" AND ")).join(" OR ") -} +}; +export const getJoinOnCondition = ({ + on, + leftAlias, + rightAlias, + getLeftColName = asName, + getRightColName = asName, +}: GetJoinOnConditionArgs) => { + return on + .map((constraint) => + Object.entries(constraint) + .map(([leftCol, rightCol]) => { + return `${leftAlias}.${getLeftColName(leftCol)} = ${rightAlias}.${getRightColName(rightCol)}`; + }) + .join(" AND "), + ) + .join(" OR "); +}; diff --git a/lib/DboBuilder/ViewHandler/parseComplexFilter.ts b/lib/DboBuilder/ViewHandler/parseComplexFilter.ts index 32353903..8c0347a8 100644 --- a/lib/DboBuilder/ViewHandler/parseComplexFilter.ts +++ b/lib/DboBuilder/ViewHandler/parseComplexFilter.ts @@ -1,5 +1,9 @@ import { AnyObject, isObject } from "prostgles-types"; -import { FILTER_OPERANDS, FILTER_OPERAND_TO_SQL_OPERAND, parseFilterRightValue } from "../../Filtering"; +import { + FILTER_OPERANDS, + FILTER_OPERAND_TO_SQL_OPERAND, + parseFilterRightValue, +} from "../../Filtering"; import { FUNCTIONS, parseFunction } from "../QueryBuilder/Functions"; import { asNameAlias, parseFunctionObject } from "../QueryBuilder/QueryBuilder"; import { TableSchemaColumn } from "../DboBuilderTypes"; @@ -12,7 +16,7 @@ type Args = { tableAlias: string | undefined; allowed_colnames: string[]; columns: TableSchemaColumn[]; -} +}; /* Parse complex filters { @@ -30,67 +34,85 @@ export const parseComplexFilter = ({ allowed_colnames, columns, }: Args) => { - /** * { $funcName: [arg1, arg2] } * { $column: "column_name" } */ const getFuncQuery = (funcData: AnyObject): string => { - if(isObject(funcData) && "$column" in funcData){ - const column = funcData["$column"] - if(typeof column !== "string"){ + if (isObject(funcData) && "$column" in funcData) { + const column = funcData["$column"]; + if (typeof column !== "string") { throw `expecting: \n { $column: "column_name" } received:\n ${JSON.stringify(funcData)}`; } - if(!allowed_colnames.includes(column)){ + if (!allowed_colnames.includes(column)) { throw `Dissallowed or Invalid column ${column}. Allowed columns: ${allowed_colnames}`; } - return asNameAlias(column, tableAlias) + return asNameAlias(column, tableAlias); } const { funcName, args } = parseFunctionObject(funcData); - const funcDef = parseFunction({ func: funcName, args, functions: FUNCTIONS, allowedFields: allowed_colnames }); - return funcDef.getQuery({ args, tableAlias, allColumns: columns, allowedFields: allowed_colnames }); - } + const funcDef = parseFunction({ + func: funcName, + args, + functions: FUNCTIONS, + allowedFields: allowed_colnames, + }); + return funcDef.getQuery({ + args, + tableAlias, + allColumns: columns, + allowedFields: allowed_colnames, + }); + }; const complexFilter = filter[complexFilterKey]; if (!Array.isArray(complexFilter)) { - throw `Invalid $filter. Must contain an array of at least element but got: ${JSON.stringify(complexFilter)} ` - } + throw `Invalid $filter. Must contain an array of at least element but got: ${JSON.stringify(complexFilter)} `; + } const [leftFilter, comparator, rightFilterOrValue] = complexFilter; const leftVal = getFuncQuery(leftFilter); let result = leftVal; if (comparator) { - if (typeof comparator !== "string" || !allowedComparators.includes(comparator as any)) { + if ( + typeof comparator !== "string" || + !allowedComparators.includes(comparator as any) + ) { throw `Invalid $filter. comparator ${JSON.stringify(comparator)} is not valid. Expecting one of: ${allowedComparators}`; } if (!rightFilterOrValue) { throw "Invalid $filter. Expecting a value or function after the comparator"; } - const maybeValidComparator = comparator as keyof typeof FILTER_OPERAND_TO_SQL_OPERAND; + const maybeValidComparator = + comparator as keyof typeof FILTER_OPERAND_TO_SQL_OPERAND; const sqlOperand = FILTER_OPERAND_TO_SQL_OPERAND[maybeValidComparator]; - if(!sqlOperand){ + if (!sqlOperand) { throw `Invalid $filter. comparator ${comparator} is not valid. Expecting one of: ${allowedComparators}`; } - let rightVal = isObject(rightFilterOrValue) ? - getFuncQuery(rightFilterOrValue) : - parseFilterRightValue(rightFilterOrValue, { - selectItem: undefined, - expect: ["$in", "$nin"].includes(comparator)? "csv" : undefined - }); - if(maybeValidComparator === "$between" || maybeValidComparator === "$notBetween"){ - - if(!Array.isArray(rightVal) || rightVal.length !== 2){ + let rightVal = isObject(rightFilterOrValue) + ? getFuncQuery(rightFilterOrValue) + : parseFilterRightValue(rightFilterOrValue, { + selectItem: undefined, + expect: ["$in", "$nin"].includes(comparator) ? "csv" : undefined, + }); + if ( + maybeValidComparator === "$between" || + maybeValidComparator === "$notBetween" + ) { + if (!Array.isArray(rightVal) || rightVal.length !== 2) { throw "Between filter expects an array of two values"; } rightVal = asValue(rightVal[0]) + " AND " + asValue(rightVal[1]); } - if (leftVal === rightVal){ - throw "Invalid $filter. Cannot compare two identical function signatures: " + JSON.stringify(leftFilter); + if (leftVal === rightVal) { + throw ( + "Invalid $filter. Cannot compare two identical function signatures: " + + JSON.stringify(leftFilter) + ); } - + result += ` ${sqlOperand} ${rightVal}`; } return result; -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/ViewHandler/parseFieldFilter.ts b/lib/DboBuilder/ViewHandler/parseFieldFilter.ts index 2b1c813d..6fd1c0bf 100644 --- a/lib/DboBuilder/ViewHandler/parseFieldFilter.ts +++ b/lib/DboBuilder/ViewHandler/parseFieldFilter.ts @@ -1,33 +1,34 @@ import { FieldFilter, getKeys } from "prostgles-types"; import { isPlainObject } from "../DboBuilder"; -/** -* Filter string array -* @param {FieldFilter} fieldParams - { col1: 0, col2: 0 } | { col1: true, col2: true } | "*" | ["key1", "key2"] | [] -* @param {boolean} allow_empty - allow empty select. defaults to true -*/ +/** + * Filter string array + * @param {FieldFilter} fieldParams - { col1: 0, col2: 0 } | { col1: true, col2: true } | "*" | ["key1", "key2"] | [] + * @param {boolean} allow_empty - allow empty select. defaults to true + */ export const parseFieldFilter = ( - fieldParams: FieldFilter> = "*", - allow_empty = true, - all_cols: AllowedKeys + fieldParams: FieldFilter> = "*", + allow_empty = true, + all_cols: AllowedKeys, ): AllowedKeys | [""] => { - - if (!all_cols) throw "all_cols missing" - const all_fields = all_cols;// || this.column_names.slice(0); + if (!all_cols) throw "all_cols missing"; + const all_fields = all_cols; // || this.column_names.slice(0); let colNames: AllowedKeys = [] as any; const initialParams = JSON.stringify(fieldParams); if (fieldParams) { - /* "field1, field2, field4" | "*" */ if (typeof fieldParams === "string") { - fieldParams = fieldParams.split(",").map(k => k.trim()); + fieldParams = fieldParams.split(",").map((k) => k.trim()); } /* string[] */ - if (Array.isArray(fieldParams) && !fieldParams.find(f => typeof f !== "string")) { + if ( + Array.isArray(fieldParams) && + !fieldParams.find((f) => typeof f !== "string") + ) { /* ["*"] */ @@ -55,14 +56,15 @@ export const parseFieldFilter = ( { field1: false, field2: false } = all fields except field1 and field2 */ } else if (isPlainObject(fieldParams)) { - if (!getKeys(fieldParams).length) { return [] as unknown as typeof all_fields; //all_fields.slice(0) as typeof all_fields; } - const keys = getKeys(fieldParams as { - [key: string]: boolean | 0 | 1; - }) as AllowedKeys; + const keys = getKeys( + fieldParams as { + [key: string]: boolean | 0 | 1; + }, + ) as AllowedKeys; if (keys[0] === "") { if (allow_empty) { return [""]; @@ -73,23 +75,27 @@ export const parseFieldFilter = ( validate(keys); - keys.forEach(key => { + keys.forEach((key) => { const allowedVals = [true, false, 0, 1]; - if (!allowedVals.includes((fieldParams as any)[key])) throw `Invalid field selection value for: { ${key}: ${(fieldParams as any)[key]} }. \n Allowed values: ${allowedVals.join(" OR ")}` - }) - - const allowed = keys.filter(key => (fieldParams as any)[key]), - disallowed = keys.filter(key => !(fieldParams as any)[key]); + if (!allowedVals.includes((fieldParams as any)[key])) + throw `Invalid field selection value for: { ${key}: ${(fieldParams as any)[key]} }. \n Allowed values: ${allowedVals.join(" OR ")}`; + }); + const allowed = keys.filter((key) => (fieldParams as any)[key]), + disallowed = keys.filter((key) => !(fieldParams as any)[key]); if (disallowed && disallowed.length) { - return all_fields.filter(col => !disallowed.includes(col)) as typeof all_fields; + return all_fields.filter( + (col) => !disallowed.includes(col), + ) as typeof all_fields; } else { return [...allowed] as any; } - } else { - throw " Unrecognised field filter.\nExpecting any of: string | string[] | { [field]: boolean } \n Received -> " + initialParams; + throw ( + " Unrecognised field filter.\nExpecting any of: string | string[] | { [field]: boolean } \n Received -> " + + initialParams + ); } validate(colNames); @@ -97,9 +103,9 @@ export const parseFieldFilter = ( return colNames as any; function validate(cols: AllowedKeys) { - const bad_keys = cols.filter(col => !all_fields.includes(col)); + const bad_keys = cols.filter((col) => !all_fields.includes(col)); if (bad_keys && bad_keys.length) { throw "\nUnrecognised or illegal fields: " + bad_keys.join(", "); } } -} \ No newline at end of file +}; diff --git a/lib/DboBuilder/ViewHandler/parseJoinPath.ts b/lib/DboBuilder/ViewHandler/parseJoinPath.ts index 418d6cd9..a39dd8b1 100644 --- a/lib/DboBuilder/ViewHandler/parseJoinPath.ts +++ b/lib/DboBuilder/ViewHandler/parseJoinPath.ts @@ -7,76 +7,93 @@ type parseJoinPathArgs = { viewHandler: ViewHandler; allowMultiOrJoin?: boolean; addShortestJoinIfMissing?: boolean; -} +}; export type ParsedJoinPath = { table: string; on: Record[] }; /** * Return a valid join path */ -export const parseJoinPath = ({ rawPath, rootTable, viewHandler, allowMultiOrJoin = false, addShortestJoinIfMissing }: parseJoinPathArgs): ParsedJoinPath[] => { +export const parseJoinPath = ({ + rawPath, + rootTable, + viewHandler, + allowMultiOrJoin = false, + addShortestJoinIfMissing, +}: parseJoinPathArgs): ParsedJoinPath[] => { const result: ParsedJoinPath[] = []; - let cleanPath = typeof rawPath === "string"? [{ table: rawPath }] : rawPath; - if(addShortestJoinIfMissing && cleanPath[0] !== "**"){ - cleanPath = ["**", ...cleanPath] + let cleanPath = typeof rawPath === "string" ? [{ table: rawPath }] : rawPath; + if (addShortestJoinIfMissing && cleanPath[0] !== "**") { + cleanPath = ["**", ...cleanPath]; } cleanPath.forEach((item, i) => { const prevTable = result.at(-1)?.table ?? rootTable; - if(!prevTable) throw `prevTable missing`; + if (!prevTable) throw `prevTable missing`; const pushJoinPath = (targetPath: JoinPath) => { const getShortestJoin = i === 1 && cleanPath[0] === "**"; - const joinInfo = getJoins(viewHandler, prevTable, [targetPath], { allowMultiOrJoin, getShortestJoin }); - - joinInfo.paths.forEach(path => { + const joinInfo = getJoins(viewHandler, prevTable, [targetPath], { + allowMultiOrJoin, + getShortestJoin, + }); + joinInfo.paths.forEach((path) => { /** Check if join tables are valid */ if (!viewHandler.dboBuilder.dbo[path.table]) { - throw { stack: ["prepareExistCondition()"], message: `Invalid or dissallowed table in join path: ${path.table}` }; + throw { + stack: ["prepareExistCondition()"], + message: `Invalid or dissallowed table in join path: ${path.table}`, + }; } result.push({ table: path.table, - on: path.on.map(constraint => Object.fromEntries(constraint)) + on: path.on.map((constraint) => Object.fromEntries(constraint)), }); - }) - } + }); + }; /** Shortest join */ - if(item === "**"){ - - } else if(typeof item === "string"){ + if (item === "**") { + } else if (typeof item === "string") { const table = item; - pushJoinPath({ table }) + pushJoinPath({ table }); } else { pushJoinPath(item); } }); - const missingPath = result.find(r => !r.on.length || r.on.some(v => !Object.keys(v).length)); - if(missingPath){ - throw `Missing join on condition for: ${missingPath.table}` + const missingPath = result.find( + (r) => !r.on.length || r.on.some((v) => !Object.keys(v).length), + ); + if (missingPath) { + throw `Missing join on condition for: ${missingPath.table}`; } return result; -} +}; import { JoinInfo } from "../DboBuilder"; type Opts = { allowMultiOrJoin?: boolean; getShortestJoin?: boolean; -} +}; /** * Returns all tables and fields required to join from source table to target table * Respecting the path.on condition */ -function getJoins(viewHandler: ViewHandler, source: string, path: JoinPath[], { allowMultiOrJoin = true, getShortestJoin }: Opts = {}): JoinInfo { +function getJoins( + viewHandler: ViewHandler, + source: string, + path: JoinPath[], + { allowMultiOrJoin = true, getShortestJoin }: Opts = {}, +): JoinInfo { const [lastItem] = path; - if(!lastItem){ + if (!lastItem) { throw `Empty path`; } - if(getShortestJoin && path.length !== 1){ - throw `getShortestJoin requires exactly 1 path item` + if (getShortestJoin && path.length !== 1) { + throw `getShortestJoin requires exactly 1 path item`; } const target = lastItem.table; if (!viewHandler.joinPaths) { @@ -85,52 +102,77 @@ function getJoins(viewHandler: ViewHandler, source: string, path: JoinPath[], { /* Self join */ if (source === target) { - const tableHandler = viewHandler.dboBuilder.tablesOrViews?.find(t => t.name === source); + const tableHandler = viewHandler.dboBuilder.tablesOrViews?.find( + (t) => t.name === source, + ); if (!tableHandler) throw `Table not found for joining ${source}`; - const fcols = tableHandler.columns.filter(c => c.references?.some(({ ftable }) => ftable === viewHandler.name)); - if(!fcols.length){ - throw `There is no self-join foreign key relationship for table ${JSON.stringify(target)}` + const fcols = tableHandler.columns.filter((c) => + c.references?.some(({ ftable }) => ftable === viewHandler.name), + ); + if (!fcols.length) { + throw `There is no self-join foreign key relationship for table ${JSON.stringify(target)}`; } let allOnJoins: [string, string][][] = []; - fcols.forEach(fc => { + fcols.forEach((fc) => { fc.references!.forEach(({ fcols, cols }) => { - const fieldArr = fcols.map((fcol, i) => [fcol, cols[i]!] as [string, string]); + const fieldArr = fcols.map( + (fcol, i) => [fcol, cols[i]!] as [string, string], + ); allOnJoins.push(fieldArr); - }) + }); }); allOnJoins = [ ...allOnJoins, /** Reverse as well */ - ...allOnJoins.map(constraint => (constraint).map(([left, right]) => [right, left] as [string, string])) - ] + ...allOnJoins.map((constraint) => + constraint.map(([left, right]) => [right, left] as [string, string]), + ), + ]; return { - paths: [{ - source, - target, - table: target, - on: getValidOn(lastItem.on, allOnJoins.map(v => Object.fromEntries(v))) - }], - expectOne: false - } + paths: [ + { + source, + target, + table: target, + on: getValidOn( + lastItem.on, + allOnJoins.map((v) => Object.fromEntries(v)), + ), + }, + ], + expectOne: false, + }; } /* Find the join path between tables */ - const tableConfigJoinInfo = viewHandler.dboBuilder?.prostgles?.tableConfigurator?.getJoinInfo(source, target); + const tableConfigJoinInfo = + viewHandler.dboBuilder?.prostgles?.tableConfigurator?.getJoinInfo( + source, + target, + ); if (tableConfigJoinInfo) return tableConfigJoinInfo; - const actualPath = getShortestJoin? - viewHandler.joinPaths.find(j => { - return j.t1 === source && j.t2 === target - })?.path.map(table => ({ table, on: undefined })).slice(1) : - viewHandler.joinPaths.find(j => { - return j.path.join() === [{ table: source }, ...path].map(p => p.table).join() - })? path : undefined; + const actualPath = getShortestJoin + ? viewHandler.joinPaths + .find((j) => { + return j.t1 === source && j.t2 === target; + }) + ?.path.map((table) => ({ table, on: undefined })) + .slice(1) + : viewHandler.joinPaths.find((j) => { + return ( + j.path.join() === + [{ table: source }, ...path].map((p) => p.table).join() + ); + }) + ? path + : undefined; - if(getShortestJoin && actualPath?.length && lastItem.on?.length){ - actualPath[actualPath.length-1]!.on = lastItem.on; + if (getShortestJoin && actualPath?.length && lastItem.on?.length) { + actualPath[actualPath.length - 1]!.on = lastItem.on; } - + if (!actualPath) { throw `Joining ${source} <-...-> ${target} dissallowed or missing`; } @@ -144,19 +186,21 @@ function getJoins(viewHandler: ViewHandler, source: string, path: JoinPath[], { viewHandler.joins ??= viewHandler.dboBuilder.joins; /* Get join options */ - const join = viewHandler.joins.find(j => j.tables.includes(t1) && j.tables.includes(tablePath.table)); + const join = viewHandler.joins.find( + (j) => j.tables.includes(t1) && j.tables.includes(tablePath.table), + ); if (!join) { throw `Joining ${t1} <-> ${tablePath} dissallowed or missing`; } const isLtr = join.tables[0] === t1; - const joinOn = isLtr? join.on : reverseJoinOn(join.on); + const joinOn = isLtr ? join.on : reverseJoinOn(join.on); const on = getValidOn(tablePath.on, joinOn); paths.push({ source, target, table: tablePath.table, - on + on, }); }); const expectOne = false; @@ -172,37 +216,47 @@ function getJoins(viewHandler: ViewHandler, source: string, path: JoinPath[], { // } // }) - const isMultiOrJoin = paths.find(p => p.on.length > 1); - if(!allowMultiOrJoin && isMultiOrJoin){ - throw `Table ${JSON.stringify(source)} can join to ${JSON.stringify(target)} through multiple constraints. Must chose one of ${JSON.stringify(isMultiOrJoin.on)}` + const isMultiOrJoin = paths.find((p) => p.on.length > 1); + if (!allowMultiOrJoin && isMultiOrJoin) { + throw `Table ${JSON.stringify(source)} can join to ${JSON.stringify(target)} through multiple constraints. Must chose one of ${JSON.stringify(isMultiOrJoin.on)}`; } return { paths, - expectOne + expectOne, }; } -const getValidOn = (requested: JoinPath["on"], possible: ParsedJoinPath["on"]) => { - - if(!requested){ - return possible.map(v => Object.entries(v)); +const getValidOn = ( + requested: JoinPath["on"], + possible: ParsedJoinPath["on"], +) => { + if (!requested) { + return possible.map((v) => Object.entries(v)); } - if(!requested.length){ - throw `Invalid requested "tablePath.on". Cannot be empty` + if (!requested.length) { + throw `Invalid requested "tablePath.on". Cannot be empty`; } - const isValid = requested.every(requestedConstraint => { - return possible.some(possibleConstraint => conditionsMatch(possibleConstraint, requestedConstraint)); + const isValid = requested.every((requestedConstraint) => { + return possible.some((possibleConstraint) => + conditionsMatch(possibleConstraint, requestedConstraint), + ); }); - if(!isValid){ - throw `Invalid path specified for join: ${JSON.stringify(requested)}. Allowed paths: ${JSON.stringify(possible)}` + if (!isValid) { + throw `Invalid path specified for join: ${JSON.stringify(requested)}. Allowed paths: ${JSON.stringify(possible)}`; } - return requested.map(v => Object.entries(v)); -} + return requested.map((v) => Object.entries(v)); +}; -const conditionsMatch = (c1: Record, c2: Record) => { +const conditionsMatch = ( + c1: Record, + c2: Record, +) => { const keys1 = Object.keys(c1); const keys2 = Object.keys(c2); - return keys1.sort().join() === keys2.sort().join() && keys1.every(key => c1[key] === c2[key]); -} \ No newline at end of file + return ( + keys1.sort().join() === keys2.sort().join() && + keys1.every((key) => c1[key] === c2[key]) + ); +}; diff --git a/lib/DboBuilder/ViewHandler/prepareSortItems.ts b/lib/DboBuilder/ViewHandler/prepareSortItems.ts index fa1b0349..8dbd20f5 100644 --- a/lib/DboBuilder/ViewHandler/prepareSortItems.ts +++ b/lib/DboBuilder/ViewHandler/prepareSortItems.ts @@ -1,6 +1,16 @@ -import { OrderBy, asName, isDefined, isEmpty, isObject } from "prostgles-types/dist"; +import { + OrderBy, + asName, + isDefined, + isEmpty, + isObject, +} from "prostgles-types/dist"; import { SortItem } from "../DboBuilder"; -import { NewQueryJoin, SelectItemValidated, asNameAlias } from "../QueryBuilder/QueryBuilder"; +import { + NewQueryJoin, + SelectItemValidated, + asNameAlias, +} from "../QueryBuilder/QueryBuilder"; /* This relates only to SELECT */ export const prepareSortItems = ( @@ -10,127 +20,160 @@ export const prepareSortItems = ( select: SelectItemValidated[], joinQueries: NewQueryJoin[], ): SortItem[] => { - if (!rawOrderBy) return []; - let orderBy: { key: string, asc: boolean, nulls?: "first" | "last", nullEmpty?: boolean }[] = []; + let orderBy: { + key: string; + asc: boolean; + nulls?: "first" | "last"; + nullEmpty?: boolean; + }[] = []; if (isObject(rawOrderBy)) { orderBy = parseOrderObj(rawOrderBy); } else if (typeof rawOrderBy === "string") { /* string */ orderBy = [{ key: rawOrderBy, asc: true }]; } else if (Array.isArray(rawOrderBy)) { - /* Order by is formed of a list of ascending field names */ - const _orderBy = (rawOrderBy as any[]); - if (_orderBy && !_orderBy.find(v => typeof v !== "string")) { + const _orderBy = rawOrderBy as any[]; + if (_orderBy && !_orderBy.find((v) => typeof v !== "string")) { /* [string] */ - orderBy = _orderBy.map(key => ({ key, asc: true })); - } else if (_orderBy.find(v => isObject(v) && !isEmpty(v))) { - orderBy = _orderBy.map(v => parseOrderObj(v, true)[0]!); + orderBy = _orderBy.map((key) => ({ key, asc: true })); + } else if (_orderBy.find((v) => isObject(v) && !isEmpty(v))) { + orderBy = _orderBy.map((v) => parseOrderObj(v, true)[0]!); } else return throwErr(rawOrderBy); } else return throwErr(rawOrderBy); if (!orderBy || !orderBy.length) return []; - const validatedAggAliases = select.filter(s => - s.type !== "joinedColumn" && - (!s.fields.length || s.fields.every(f => allowed_cols.includes(f))) - ).map(s => s.alias) - - const sortableNestedColumns = joinQueries.flatMap(jq => jq.select.map(selectItem => { - const joinAlias = jq.tableAlias ?? jq.table; - return { - ...jq, - selectItem, - joinAlias, - key: `${joinAlias}.${selectItem.alias}` - } - })); - const bad_param = orderBy.find(({ key }) => - !sortableNestedColumns.some(v => v.key === key) && - !validatedAggAliases.includes(key) && - !allowed_cols.includes(key) + const validatedAggAliases = select + .filter( + (s) => + s.type !== "joinedColumn" && + (!s.fields.length || s.fields.every((f) => allowed_cols.includes(f))), + ) + .map((s) => s.alias); + + const sortableNestedColumns = joinQueries.flatMap((jq) => + jq.select.map((selectItem) => { + const joinAlias = jq.tableAlias ?? jq.table; + return { + ...jq, + selectItem, + joinAlias, + key: `${joinAlias}.${selectItem.alias}`, + }; + }), + ); + const bad_param = orderBy.find( + ({ key }) => + !sortableNestedColumns.some((v) => v.key === key) && + !validatedAggAliases.includes(key) && + !allowed_cols.includes(key), ); if (bad_param) { throw "Invalid/disallowed orderBy fields or params: " + bad_param.key; } - const selectedAliases = select.filter(s => s.selected).map(s => s.alias); - - const result: SortItem[] = orderBy.map(({ key, asc, nulls, nullEmpty = false }) => { - - const nestedField = sortableNestedColumns.find(f => f.key === key); - if (nestedField) { - const { table, selectItem, joinAlias } = nestedField; - - const comparableDataTypeCast = ["uuid", "xml"].includes(selectItem.column_udt_type ?? "")? "::TEXT" : ""; - const sortItemAlias = asName(`prostgles_nested_sort_${selectItem.alias}`) - - return { - key, - type: "query", - asc, - nulls, - nullEmpty, - nested: { - table, - joinAlias, - selectItemAlias: selectItem.alias, - isNumeric: selectItem.tsDataType === "number", - wrapperQuerySortItem: `${asc? "MIN" : "MAX"}(${asNameAlias(selectItem.alias, joinAlias)}${comparableDataTypeCast}) as ${sortItemAlias}`, - }, - fieldQuery: `${asName(joinAlias)}.${sortItemAlias + (asc? "" : " DESC")} ${nulls? `NULLS ${nulls === "last"? "LAST" : "FIRST" }` : ""}`, + const selectedAliases = select.filter((s) => s.selected).map((s) => s.alias); + + const result: SortItem[] = orderBy.map( + ({ key, asc, nulls, nullEmpty = false }) => { + const nestedField = sortableNestedColumns.find((f) => f.key === key); + if (nestedField) { + const { table, selectItem, joinAlias } = nestedField; + + const comparableDataTypeCast = ["uuid", "xml"].includes( + selectItem.column_udt_type ?? "", + ) + ? "::TEXT" + : ""; + const sortItemAlias = asName( + `prostgles_nested_sort_${selectItem.alias}`, + ); + + return { + key, + type: "query", + asc, + nulls, + nullEmpty, + nested: { + table, + joinAlias, + selectItemAlias: selectItem.alias, + isNumeric: selectItem.tsDataType === "number", + wrapperQuerySortItem: `${asc ? "MIN" : "MAX"}(${asNameAlias(selectItem.alias, joinAlias)}${comparableDataTypeCast}) as ${sortItemAlias}`, + }, + fieldQuery: `${asName(joinAlias)}.${sortItemAlias + (asc ? "" : " DESC")} ${nulls ? `NULLS ${nulls === "last" ? "LAST" : "FIRST"}` : ""}`, + }; } - } - /* Order by column index when possible to bypass name collision when ordering by a computed column. + /* Order by column index when possible to bypass name collision when ordering by a computed column. (Postgres will sort by existing columns wheundefined possible) */ - - const index = selectedAliases.indexOf(key) + 1; - let colKey = (index > 0 && !nullEmpty) ? index : [tableAlias, key].filter(isDefined).map(asName).join("."); - if (nullEmpty) { - colKey = `nullif(trim(${colKey}::text), '')` - } - if (typeof colKey === "number") { + const index = selectedAliases.indexOf(key) + 1; + let colKey = + index > 0 && !nullEmpty + ? index + : [tableAlias, key].filter(isDefined).map(asName).join("."); + if (nullEmpty) { + colKey = `nullif(trim(${colKey}::text), '')`; + } + + if (typeof colKey === "number") { + return { + key, + type: "position", + asc, + nulls, + nullEmpty, + fieldPosition: colKey, + }; + } + return { key, - type: "position", - asc, + type: "query", + fieldQuery: colKey, nulls, nullEmpty, - fieldPosition: colKey - } - } - - return { - key, - type: "query", - fieldQuery: colKey, - nulls, - nullEmpty, - asc, - } - }); + asc, + }; + }, + ); return result; -} +}; const throwErr = (rawOrderBy: any) => { - throw "\nInvalid orderBy option -> " + JSON.stringify(rawOrderBy) + - "Expecting: \ + throw ( + "\nInvalid orderBy option -> " + + JSON.stringify(rawOrderBy) + + "Expecting: \ { key2: false, \"nested.key2\": false, key1: true } \ { key1: 1, key2: -1 } \ [{ key1: true }, { key2: false }] \ [{ key: 'colName', asc: true, nulls: 'first', nullEmpty: true }]" + ); }; -const parseOrderObj = (orderBy: any, expectOne = false): { key: string, asc: boolean, nulls?: "first" | "last", nullEmpty?: boolean }[] => { +const parseOrderObj = ( + orderBy: any, + expectOne = false, +): { + key: string; + asc: boolean; + nulls?: "first" | "last"; + nullEmpty?: boolean; +}[] => { if (!isObject(orderBy)) return throwErr(orderBy); const keys = Object.keys(orderBy); - if (keys.length && keys.find(k => ["key", "asc", "nulls", "nullEmpty"].includes(k))) { + if ( + keys.length && + keys.find((k) => ["key", "asc", "nulls", "nullEmpty"].includes(k)) + ) { const { key, asc, nulls, nullEmpty = false } = orderBy; if ( !["string"].includes(typeof key) || @@ -139,25 +182,30 @@ const parseOrderObj = (orderBy: any, expectOne = false): { key: string, asc: boo !["boolean"].includes(typeof nullEmpty) ) { throw `Invalid orderBy option (${JSON.stringify(orderBy, null, 2)}) \n - Expecting { key: string, asc?: boolean, nulls?: 'first' | 'last' | null | undefined, nullEmpty?: boolean } ` + Expecting { key: string, asc?: boolean, nulls?: 'first' | 'last' | null | undefined, nullEmpty?: boolean } `; } return [{ key, asc, nulls, nullEmpty }]; } if (expectOne && keys.length > 1) { - throw "\nInvalid orderBy " + JSON.stringify(orderBy) + - "\nEach orderBy array element cannot have more than one key"; + throw ( + "\nInvalid orderBy " + + JSON.stringify(orderBy) + + "\nEach orderBy array element cannot have more than one key" + ); } /* { key2: true, key1: false } */ - if (!Object.values(orderBy).find(v => ![true, false].includes(v))) { - return keys.map(key => ({ key, asc: Boolean(orderBy[key]) })) + if (!Object.values(orderBy).find((v) => ![true, false].includes(v))) { + return keys.map((key) => ({ key, asc: Boolean(orderBy[key]) })); /* { key2: -1, key1: 1 } */ - } else if (!Object.values(orderBy).find(v => ![-1, 1].includes(v))) { - return keys.map(key => ({ key, asc: orderBy[key] === 1 })) + } else if (!Object.values(orderBy).find((v) => ![-1, 1].includes(v))) { + return keys.map((key) => ({ key, asc: orderBy[key] === 1 })); /* { key2: "asc", key1: "desc" } */ - } else if (!Object.values(orderBy).find(v => !["asc", "desc"].includes(v))) { - return keys.map(key => ({ key, asc: orderBy[key] === "asc" })) + } else if ( + !Object.values(orderBy).find((v) => !["asc", "desc"].includes(v)) + ) { + return keys.map((key) => ({ key, asc: orderBy[key] === "asc" })); } else return throwErr(orderBy); -}; \ No newline at end of file +}; diff --git a/lib/DboBuilder/ViewHandler/prepareWhere.ts b/lib/DboBuilder/ViewHandler/prepareWhere.ts index d6558c83..c8e587f2 100644 --- a/lib/DboBuilder/ViewHandler/prepareWhere.ts +++ b/lib/DboBuilder/ViewHandler/prepareWhere.ts @@ -1,4 +1,10 @@ -import { AnyObject, FieldFilter, getKeys, isDefined, isObject } from "prostgles-types/dist"; +import { + AnyObject, + FieldFilter, + getKeys, + isDefined, + isObject, +} from "prostgles-types/dist"; import { ViewHandler } from "./ViewHandler"; import { ExistsFilterConfig, Filter, LocalParams } from "../DboBuilder"; import { SelectItem } from "../QueryBuilder/QueryBuilder"; @@ -11,14 +17,31 @@ export type PrepareWhereParams = { forcedFilter?: AnyObject; filterFields?: FieldFilter; addWhere?: boolean; - tableAlias?: string, - localParams: LocalParams | undefined, - tableRule: TableRule | undefined, + tableAlias?: string; + localParams: LocalParams | undefined; + tableRule: TableRule | undefined; isHaving?: boolean; }; -export async function prepareWhere(this: ViewHandler, params: PrepareWhereParams): Promise<{ condition: string; where: string; filter: AnyObject; exists: ExistsFilterConfig[]; }> { - const { filter, select, forcedFilter, filterFields: ff, addWhere: addKeywords = true, tableAlias, localParams, tableRule } = params; +export async function prepareWhere( + this: ViewHandler, + params: PrepareWhereParams, +): Promise<{ + condition: string; + where: string; + filter: AnyObject; + exists: ExistsFilterConfig[]; +}> { + const { + filter, + select, + forcedFilter, + filterFields: ff, + addWhere: addKeywords = true, + tableAlias, + localParams, + tableRule, + } = params; const { $and: $and_key, $or: $or_key } = this.dboBuilder.prostgles.keywords; let filterFields = ff; @@ -27,17 +50,26 @@ export async function prepareWhere(this: ViewHandler, params: PrepareWhereParams const exists: ExistsFilterConfig[] = []; - const parseFullFilter = async (f: any, parentFilter: any = null, isForcedFilterBypass: boolean): Promise => { + const parseFullFilter = async ( + f: any, + parentFilter: any = null, + isForcedFilterBypass: boolean, + ): Promise => { if (!f) throw "Invalid/missing group filter provided"; - if (!isObject(f)) throw "\nInvalid filter\nExpecting an object but got -> " + JSON.stringify(f); + if (!isObject(f)) + throw ( + "\nInvalid filter\nExpecting an object but got -> " + JSON.stringify(f) + ); let result = ""; const keys = getKeys(f); if (!keys.length) { return result; } - if ((keys.includes($and_key) || keys.includes($or_key))) { - if (keys.length > 1) throw `\ngroup filter must contain only one array property. e.g.: { ${$and_key}: [...] } OR { ${$or_key}: [...] } `; - if (parentFilter && Object.keys(parentFilter).includes("")) throw "group filter ($and/$or) can only be placed at the root or within another group filter"; + if (keys.includes($and_key) || keys.includes($or_key)) { + if (keys.length > 1) + throw `\ngroup filter must contain only one array property. e.g.: { ${$and_key}: [...] } OR { ${$or_key}: [...] } `; + if (parentFilter && Object.keys(parentFilter).includes("")) + throw "group filter ($and/$or) can only be placed at the root or within another group filter"; } const { [$and_key]: $and, [$or_key]: $or } = f, @@ -45,21 +77,27 @@ export async function prepareWhere(this: ViewHandler, params: PrepareWhereParams if (group && group.length) { const operand = $and ? " AND " : " OR "; - const conditions = (await Promise.all( - group.map(async gf => await parseFullFilter(gf, group, isForcedFilterBypass)) - )).filter(c => c); - + const conditions = ( + await Promise.all( + group.map( + async (gf) => + await parseFullFilter(gf, group, isForcedFilterBypass), + ), + ) + ).filter((c) => c); + if (conditions?.length) { if (conditions.length === 1) return conditions.join(operand); else return ` ( ${conditions.sort().join(operand)} ) `; } } else if (!group) { - /** forcedFilters do not get checked against publish and are treated as server-side requests */ const cond = await getCondition.bind(this)({ filter: { ...f }, select, - allowed_colnames: isForcedFilterBypass ? this.column_names.slice(0) : this.parseFieldFilter(filterFields), + allowed_colnames: isForcedFilterBypass + ? this.column_names.slice(0) + : this.parseFieldFilter(filterFields), tableAlias, localParams: isForcedFilterBypass ? undefined : localParams, tableRules: isForcedFilterBypass ? undefined : tableRule, @@ -69,22 +107,24 @@ export async function prepareWhere(this: ViewHandler, params: PrepareWhereParams exists.push(...cond.exists); } return result; - } + }; /* A forced filter condition will not check if the existsJoined filter tables have been published */ - const forcedFilterCond = forcedFilter ? await parseFullFilter(forcedFilter, null, true) : undefined; + const forcedFilterCond = forcedFilter + ? await parseFullFilter(forcedFilter, null, true) + : undefined; const filterCond = await parseFullFilter(filter, null, false); - let cond = [ - forcedFilterCond, filterCond - ].filter(c => c).join(" AND "); + let cond = [forcedFilterCond, filterCond].filter((c) => c).join(" AND "); - const finalFilter = forcedFilter ? { - [$and_key]: [forcedFilter, filter].filter(isDefined) - } : { ...filter }; + const finalFilter = forcedFilter + ? { + [$and_key]: [forcedFilter, filter].filter(isDefined), + } + : { ...filter }; const condition = cond; if (cond && addKeywords) { cond = `WHERE ${cond}`; } return { condition, where: cond || "", filter: finalFilter, exists }; -} \ No newline at end of file +} diff --git a/lib/DboBuilder/ViewHandler/size.ts b/lib/DboBuilder/ViewHandler/size.ts index 19c9bbd6..9d97bd1c 100644 --- a/lib/DboBuilder/ViewHandler/size.ts +++ b/lib/DboBuilder/ViewHandler/size.ts @@ -1,37 +1,71 @@ import { SelectParams } from "prostgles-types"; import { TableRule } from "../../PublishParser/publishTypesAndUtils"; import { Filter, LocalParams } from "../DboBuilderTypes"; -import { getErrorAsObject, getSerializedClientErrorFromPGError, withUserRLS } from "../dboBuilderUtils"; +import { + getErrorAsObject, + getSerializedClientErrorFromPGError, + withUserRLS, +} from "../dboBuilderUtils"; import { ViewHandler } from "./ViewHandler"; -export async function size(this: ViewHandler, _filter?: Filter, selectParams?: SelectParams, param3_unused?: undefined, table_rules?: TableRule, localParams?: LocalParams): Promise { +export async function size( + this: ViewHandler, + _filter?: Filter, + selectParams?: SelectParams, + param3_unused?: undefined, + table_rules?: TableRule, + localParams?: LocalParams, +): Promise { const filter = _filter || {}; const start = Date.now(); try { - const result = await this.find(filter, { ...selectParams, limit: 2 }, undefined, table_rules, localParams) - .then(async _allowed => { - - const q: string = await this.find( - filter, { ...selectParams, limit: selectParams?.limit ?? Number.MAX_SAFE_INTEGER }, - undefined, - table_rules, - { ...localParams, returnQuery: "noRLS", bypassLimit: true } - ) as any; - const query = withUserRLS( - localParams, - `${withUserRLS(localParams, "")} + const result = await this.find( + filter, + { ...selectParams, limit: 2 }, + undefined, + table_rules, + localParams, + ).then(async (_allowed) => { + const q: string = (await this.find( + filter, + { + ...selectParams, + limit: selectParams?.limit ?? Number.MAX_SAFE_INTEGER, + }, + undefined, + table_rules, + { ...localParams, returnQuery: "noRLS", bypassLimit: true }, + )) as any; + const query = withUserRLS( + localParams, + `${withUserRLS(localParams, "")} SELECT sum(pg_column_size((prgl_size_query.*))) as size FROM ( ${q} ) prgl_size_query - ` - ); + `, + ); - return (this.tx?.t || this.db).one(query).then(({ size }) => size || '0'); - }); - await this._log({ command: "size", localParams, data: { filter, selectParams }, duration: Date.now() - start }); + return (this.tx?.t || this.db).one(query).then(({ size }) => size || "0"); + }); + await this._log({ + command: "size", + localParams, + data: { filter, selectParams }, + duration: Date.now() - start, + }); return result; } catch (e) { - await this._log({ command: "size", localParams, data: { filter, selectParams }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + await this._log({ + command: "size", + localParams, + data: { filter, selectParams }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } -} \ No newline at end of file +} diff --git a/lib/DboBuilder/ViewHandler/subscribe.ts b/lib/DboBuilder/ViewHandler/subscribe.ts index fa9f3658..cef2bd91 100644 --- a/lib/DboBuilder/ViewHandler/subscribe.ts +++ b/lib/DboBuilder/ViewHandler/subscribe.ts @@ -1,36 +1,80 @@ -import { AnyObject, SubscribeParams, SubscriptionChannels } from "prostgles-types"; +import { + AnyObject, + SubscribeParams, + SubscriptionChannels, +} from "prostgles-types"; import { TableRule } from "../../PublishParser/PublishParser"; -import { Filter, LocalParams, getClientErrorFromPGError, getErrorAsObject, getSerializedClientErrorFromPGError } from "../DboBuilder"; +import { + Filter, + LocalParams, + getClientErrorFromPGError, + getErrorAsObject, + getSerializedClientErrorFromPGError, +} from "../DboBuilder"; import { getSubscribeRelatedTables } from "../getSubscribeRelatedTables"; import { NewQuery } from "../QueryBuilder/QueryBuilder"; import { ViewHandler } from "./ViewHandler"; -type OnData = (items: AnyObject[]) => any; -export type LocalFuncs = { - onData: OnData - onError?: (error: any) => void; -} | OnData; +type OnData = (items: AnyObject[]) => any; +export type LocalFuncs = + | { + onData: OnData; + onError?: (error: any) => void; + } + | OnData; -export const getOnDataFunc = (localFuncs: LocalFuncs | undefined): OnData | undefined => { - return typeof localFuncs === "function"? localFuncs : localFuncs?.onData; -} -export const matchesLocalFuncs = (localFuncs1: LocalFuncs | undefined, localFuncs2: LocalFuncs | undefined) => { - return localFuncs1 && localFuncs2 && getOnDataFunc(localFuncs1) === getOnDataFunc(localFuncs2); -} -export const parseLocalFuncs = (localFuncs1: LocalFuncs | undefined): Extract | undefined=> { - return !localFuncs1? undefined : typeof localFuncs1 === "function"? { - onData: localFuncs1 - } : localFuncs1; -} +export const getOnDataFunc = ( + localFuncs: LocalFuncs | undefined, +): OnData | undefined => { + return typeof localFuncs === "function" ? localFuncs : localFuncs?.onData; +}; +export const matchesLocalFuncs = ( + localFuncs1: LocalFuncs | undefined, + localFuncs2: LocalFuncs | undefined, +) => { + return ( + localFuncs1 && + localFuncs2 && + getOnDataFunc(localFuncs1) === getOnDataFunc(localFuncs2) + ); +}; +export const parseLocalFuncs = ( + localFuncs1: LocalFuncs | undefined, +): Extract | undefined => { + return !localFuncs1 + ? undefined + : typeof localFuncs1 === "function" + ? { + onData: localFuncs1, + } + : localFuncs1; +}; -async function subscribe(this: ViewHandler, filter: Filter, params: SubscribeParams, localFuncs: LocalFuncs): Promise<{ unsubscribe: () => any }> -async function subscribe(this: ViewHandler, filter: Filter, params: SubscribeParams, localFuncs: undefined, table_rules: TableRule | undefined, localParams: LocalParams): Promise -async function subscribe(this: ViewHandler, filter: Filter, params: SubscribeParams, localFuncs?: LocalFuncs, table_rules?: TableRule, localParams?: LocalParams): Promise<{ unsubscribe: () => any } | SubscriptionChannels> -{ +async function subscribe( + this: ViewHandler, + filter: Filter, + params: SubscribeParams, + localFuncs: LocalFuncs, +): Promise<{ unsubscribe: () => any }>; +async function subscribe( + this: ViewHandler, + filter: Filter, + params: SubscribeParams, + localFuncs: undefined, + table_rules: TableRule | undefined, + localParams: LocalParams, +): Promise; +async function subscribe( + this: ViewHandler, + filter: Filter, + params: SubscribeParams, + localFuncs?: LocalFuncs, + table_rules?: TableRule, + localParams?: LocalParams, +): Promise<{ unsubscribe: () => any } | SubscriptionChannels> { const start = Date.now(); try { - - if(!this.dboBuilder.canSubscribe){ + if (!this.dboBuilder.canSubscribe) { throw "Cannot subscribe. PubSubManager not initiated"; } @@ -41,25 +85,39 @@ async function subscribe(this: ViewHandler, filter: Filter, params: SubscribePar throw " missing data. provide -> localFunc | localParams { socket } "; } if (localParams?.socket && localFuncs) { - console.error({ localParams, localFuncs }) + console.error({ localParams, localFuncs }); throw " Cannot have localFunc AND socket "; } const { throttle = 0, throttleOpts, ...selectParams } = params; /** Ensure request is valid */ - await this.find(filter, { ...selectParams, limit: 0 }, undefined, table_rules, localParams); + await this.find( + filter, + { ...selectParams, limit: 0 }, + undefined, + table_rules, + localParams, + ); // TODO: Implement comprehensive canSubscribe check // if (!this.dboBuilder.prostgles.isSuperUser) { // throw "Subscribe not possible. Must be superuser"; // } - const newQuery: NewQuery = await this.find(filter, { ...selectParams, limit: 0 }, undefined, table_rules, { ...localParams, returnNewQuery: true }) as any; - const viewOptions = await getSubscribeRelatedTables.bind(this)({ - filter, selectParams, - table_rules, localParams, - newQuery + const newQuery: NewQuery = (await this.find( + filter, + { ...selectParams, limit: 0 }, + undefined, + table_rules, + { ...localParams, returnNewQuery: true }, + )) as any; + const viewOptions = await getSubscribeRelatedTables.bind(this)({ + filter, + selectParams, + table_rules, + localParams, + newQuery, }); const commonSubOpts = { @@ -76,32 +134,40 @@ async function subscribe(this: ViewHandler, filter: Filter, params: SubscribePar } as const; const pubSubManager = await this.dboBuilder.getPubSubManager(); - if (!localFuncs) { - + if (!localFuncs) { const { socket } = localParams ?? {}; const result = await pubSubManager.addSub({ ...commonSubOpts, - socket, + socket, localFuncs: undefined, - socket_id: socket?.id, + socket_id: socket?.id, }); - await this._log({ command: "subscribe", localParams, data: { filter, params }, duration: Date.now() - start }); + await this._log({ + command: "subscribe", + localParams, + data: { filter, params }, + duration: Date.now() - start, + }); return result; } else { - - const { channelName, sendFirstData } = await pubSubManager.addSub({ + const { channelName, sendFirstData } = await pubSubManager.addSub({ ...commonSubOpts, - socket: undefined, - localFuncs, - socket_id: undefined, + socket: undefined, + localFuncs, + socket_id: undefined, }); - + const unsubscribe = async () => { const pubSubManager = await this.dboBuilder.getPubSubManager(); - pubSubManager.removeLocalSub(channelName, localFuncs) + pubSubManager.removeLocalSub(channelName, localFuncs); }; - await this._log({ command: "subscribe", localParams, data: { filter, params }, duration: Date.now() - start }); + await this._log({ + command: "subscribe", + localParams, + data: { filter, params }, + duration: Date.now() - start, + }); const res: { unsubscribe: () => any } = Object.freeze({ unsubscribe }); /** Send first data after subscription is initialised to prevent race conditions */ setTimeout(() => { @@ -110,8 +176,18 @@ async function subscribe(this: ViewHandler, filter: Filter, params: SubscribePar return res; } } catch (e) { - await this._log({ command: "subscribe", localParams, data: { filter, params }, duration: Date.now() - start, error: getErrorAsObject(e) }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + await this._log({ + command: "subscribe", + localParams, + data: { filter, params }, + duration: Date.now() - start, + error: getErrorAsObject(e), + }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } } diff --git a/lib/DboBuilder/ViewHandler/validateViewRules.ts b/lib/DboBuilder/ViewHandler/validateViewRules.ts index 60f5364e..a4b40bc2 100644 --- a/lib/DboBuilder/ViewHandler/validateViewRules.ts +++ b/lib/DboBuilder/ViewHandler/validateViewRules.ts @@ -5,13 +5,13 @@ import { ViewHandler } from "./ViewHandler"; export async function validateViewRules( this: ViewHandler, args: { - fields?: FieldFilter, - filterFields?: FieldFilter, - returningFields?: FieldFilter, - forcedFilter?: AnyObject, - dynamicFields?: UpdateRule["dynamicFields"], - rule: "update" | "select" | "insert" | "delete" - } + fields?: FieldFilter; + filterFields?: FieldFilter; + returningFields?: FieldFilter; + forcedFilter?: AnyObject; + dynamicFields?: UpdateRule["dynamicFields"]; + rule: "update" | "select" | "insert" | "delete"; + }, ) { const { fields, @@ -27,31 +27,51 @@ export async function validateViewRules( try { const _fields = this.parseFieldFilter(fields); if (this.is_media && rule === "insert" && !_fields.includes("id")) { - throw "Must allow id insert for media table" + throw "Must allow id insert for media table"; } } catch (e) { - throw ` issue with publish.${this.name}.${rule}.fields: \nVALUE: ` + JSON.stringify(fields, null, 2) + "\nERROR: " + JSON.stringify(e, null, 2); + throw ( + ` issue with publish.${this.name}.${rule}.fields: \nVALUE: ` + + JSON.stringify(fields, null, 2) + + "\nERROR: " + + JSON.stringify(e, null, 2) + ); } } if (filterFields) { try { this.parseFieldFilter(filterFields); } catch (e) { - throw ` issue with publish.${this.name}.${rule}.filterFields: \nVALUE: ` + JSON.stringify(filterFields, null, 2) + "\nERROR: " + JSON.stringify(e, null, 2); + throw ( + ` issue with publish.${this.name}.${rule}.filterFields: \nVALUE: ` + + JSON.stringify(filterFields, null, 2) + + "\nERROR: " + + JSON.stringify(e, null, 2) + ); } } if (returningFields) { try { this.parseFieldFilter(returningFields); } catch (e) { - throw ` issue with publish.${this.name}.${rule}.returningFields: \nVALUE: ` + JSON.stringify(returningFields, null, 2) + "\nERROR: " + JSON.stringify(e, null, 2); + throw ( + ` issue with publish.${this.name}.${rule}.returningFields: \nVALUE: ` + + JSON.stringify(returningFields, null, 2) + + "\nERROR: " + + JSON.stringify(e, null, 2) + ); } } if (forcedFilter) { try { await this.find(forcedFilter, { limit: 0 }); } catch (e) { - throw ` issue with publish.${this.name}.${rule}.forcedFilter: \nVALUE: ` + JSON.stringify(forcedFilter, null, 2) + "\nERROR: " + JSON.stringify(e, null, 2); + throw ( + ` issue with publish.${this.name}.${rule}.forcedFilter: \nVALUE: ` + + JSON.stringify(forcedFilter, null, 2) + + "\nERROR: " + + JSON.stringify(e, null, 2) + ); } } if (dynamicFields) { @@ -61,10 +81,15 @@ export async function validateViewRules( this.parseFieldFilter(fields); await this.find(filter, { limit: 0 }); } catch (e) { - throw ` issue with publish.${this.name}.${rule}.dynamicFields: \nVALUE: ` + JSON.stringify(dfieldRule, null, 2) + "\nERROR: " + JSON.stringify(e, null, 2); + throw ( + ` issue with publish.${this.name}.${rule}.dynamicFields: \nVALUE: ` + + JSON.stringify(dfieldRule, null, 2) + + "\nERROR: " + + JSON.stringify(e, null, 2) + ); } } } return true; -} \ No newline at end of file +} diff --git a/lib/DboBuilder/dboBuilderUtils.ts b/lib/DboBuilder/dboBuilderUtils.ts index 9c2acb9d..fabbb32c 100644 --- a/lib/DboBuilder/dboBuilderUtils.ts +++ b/lib/DboBuilder/dboBuilderUtils.ts @@ -34,7 +34,9 @@ export const getErrorAsObject = (rawError: any, includeStack = false) => { return { message: rawError }; } if (rawError instanceof Error) { - const result = JSON.parse(JSON.stringify(rawError, Object.getOwnPropertyNames(rawError))); + const result = JSON.parse( + JSON.stringify(rawError, Object.getOwnPropertyNames(rawError)), + ); if (!includeStack) { return omitKeys(result, ["stack"]); } @@ -76,7 +78,7 @@ const otherKeys = [ export function getSerializedClientErrorFromPGError( rawError: any, - args: GetSerializedClientErrorFromPGErrorArgs + args: GetSerializedClientErrorFromPGErrorArgs, ): AnyObject { const err = getErrorAsObject(rawError); if (err.code) { @@ -89,7 +91,9 @@ export function getSerializedClientErrorFromPGError( const isServerSideRequest = !args.localParams; //TODO: add a rawSQL check for HTTP requests const showFullError = - isServerSideRequest || args.type === "sql" || args.localParams?.socket?.prostgles?.rawSQL; + isServerSideRequest || + args.type === "sql" || + args.localParams?.socket?.prostgles?.rawSQL; if (showFullError) { return err; } @@ -102,15 +106,19 @@ export function getSerializedClientErrorFromPGError( ]; const errObject = pickKeys(err, finalKeys); - if (view?.dboBuilder?.constraints && errObject.constraint && !errObject.column) { + if ( + view?.dboBuilder?.constraints && + errObject.constraint && + !errObject.column + ) { const constraint = view.dboBuilder.constraints.find( - (c) => c.conname === errObject.constraint && c.relname === view.name + (c) => c.conname === errObject.constraint && c.relname === view.name, ); if (constraint) { const cols = view.columns?.filter( (c) => (!allowedKeys || allowedKeys.includes(c.name)) && - constraint.conkey.includes(c.ordinal_position) + constraint.conkey.includes(c.ordinal_position), ); const [firstCol] = cols ?? []; if (firstCol) { @@ -123,7 +131,7 @@ export function getSerializedClientErrorFromPGError( } export function getClientErrorFromPGError( rawError: any, - args: GetSerializedClientErrorFromPGErrorArgs + args: GetSerializedClientErrorFromPGErrorArgs, ) { const errorObj = getSerializedClientErrorFromPGError(rawError, args); return Promise.reject(errorObj); @@ -135,11 +143,16 @@ export function getClientErrorFromPGError( export function parseError(e: any, _caller: string): ProstglesError { const errorObject = isObject(e) ? e : undefined; const message = - typeof e === "string" ? e - : e instanceof Error ? e.message - : isObject(errorObject) ? - (errorObject.message ?? errorObject.txt ?? JSON.stringify(errorObject) ?? "") - : ""; + typeof e === "string" + ? e + : e instanceof Error + ? e.message + : isObject(errorObject) + ? (errorObject.message ?? + errorObject.txt ?? + JSON.stringify(errorObject) ?? + "") + : ""; const result: ProstglesError = { ...errorObject, @@ -175,7 +188,7 @@ export type PGConstraint = { export const getConstraints = async ( db: DB, - schema: ProstglesInitOptions["schemaFilter"] + schema: ProstglesInitOptions["schemaFilter"], ): Promise => { const { sql, schemaNames } = getSchemaFilter(schema); return db.any( @@ -188,7 +201,7 @@ export const getConstraints = async ( ON nsp.oid = connamespace WHERE nsp.nspname ${sql} `, - { schemaNames } + { schemaNames }, ); }; @@ -200,7 +213,9 @@ export function isPlainObject(o: any): o is Record { return Object(o) === o && Object.getPrototypeOf(o) === Object.prototype; } -export function postgresToTsType(udt_data_type: PG_COLUMN_UDT_DATA_TYPE): keyof typeof TS_PG_Types { +export function postgresToTsType( + udt_data_type: PG_COLUMN_UDT_DATA_TYPE, +): keyof typeof TS_PG_Types { return ( getKeys(TS_PG_Types).find((k) => { // @ts-ignore @@ -209,14 +224,19 @@ export function postgresToTsType(udt_data_type: PG_COLUMN_UDT_DATA_TYPE): keyof ); } -export const prepareOrderByQuery = (items: SortItem[], tableAlias?: string): string[] => { +export const prepareOrderByQuery = ( + items: SortItem[], + tableAlias?: string, +): string[] => { if (!items.length) return []; return [ "ORDER BY " + items .map((d) => { const orderType = d.asc ? " ASC " : " DESC "; - const nullOrder = d.nulls ? ` NULLS ${d.nulls === "first" ? " FIRST " : " LAST "}` : ""; + const nullOrder = d.nulls + ? ` NULLS ${d.nulls === "first" ? " FIRST " : " LAST "}` + : ""; if (d.type === "query" && d.nested) { return d.fieldQuery; } @@ -237,7 +257,10 @@ export const getCanExecute = async (db: DB) => { return false; }; -export const withUserRLS = (localParams: LocalParams | undefined, query: string) => { +export const withUserRLS = ( + localParams: LocalParams | undefined, + query: string, +) => { const user = localParams?.isRemoteRequest?.user; const queryPrefix = `SET SESSION "prostgles.user" \nTO`; let firstQuery = `${queryPrefix} '';`; diff --git a/lib/DboBuilder/getColumns.ts b/lib/DboBuilder/getColumns.ts index 412ce424..8333a62a 100644 --- a/lib/DboBuilder/getColumns.ts +++ b/lib/DboBuilder/getColumns.ts @@ -15,7 +15,8 @@ import { import { TableHandler } from "./TableHandler/TableHandler"; import { ViewHandler } from "./ViewHandler/ViewHandler"; -export const isTableHandler = (v: any): v is TableHandler => "parseUpdateRules" in v; +export const isTableHandler = (v: any): v is TableHandler => + "parseUpdateRules" in v; export async function getColumns( this: ViewHandler, @@ -23,7 +24,7 @@ export async function getColumns( params?: { rule: "update"; filter: AnyObject }, _param3?: undefined, tableRules?: TableRule, - localParams?: LocalParams + localParams?: LocalParams, ): Promise { const start = Date.now(); try { @@ -34,7 +35,11 @@ export async function getColumns( let dynamicUpdateFields = this.column_names; if (params && tableRules && isTableHandler(this)) { - if (!isObject(params) || !isObject(params.filter) || params.rule !== "update") { + if ( + !isObject(params) || + !isObject(params.filter) || + params.rule !== "update" + ) { throw ( "params must be { rule: 'update', filter: object } but received: " + JSON.stringify(params) @@ -45,7 +50,12 @@ export async function getColumns( dynamicUpdateFields = []; } else { const { filter } = params; - const updateRules = await this.parseUpdateRules(filter, undefined, tableRules, localParams); + const updateRules = await this.parseUpdateRules( + filter, + undefined, + tableRules, + localParams, + ); dynamicUpdateFields = updateRules.fields; } } @@ -79,11 +89,17 @@ export async function getColumns( }); /** Do not allow updates to file table unless it's to delete fields */ - if (prostgles.fileManager?.config && prostgles.fileManager.tableName === this.name) { + if ( + prostgles.fileManager?.config && + prostgles.fileManager.tableName === this.name + ) { update = false; } - const nonOrderableUD_Types: PG_COLUMN_UDT_DATA_TYPE[] = [..._PG_geometric, "xml" as any]; + const nonOrderableUD_Types: PG_COLUMN_UDT_DATA_TYPE[] = [ + ..._PG_geometric, + "xml" as any, + ]; const result: ValidatedColumnInfo = { ...c, @@ -97,7 +113,9 @@ export async function getColumns( select: select && Boolean(p.select?.fields?.includes(c.name)), orderBy: select && - Boolean(p.select?.fields && p.select.orderByFields.includes(c.name)) && + Boolean( + p.select?.fields && p.select.orderByFields.includes(c.name), + ) && !nonOrderableUD_Types.includes(c.udt_name), filter: Boolean(p.select?.filterFields?.includes(c.name)), update: @@ -108,9 +126,16 @@ export async function getColumns( dynamicUpdateFields.includes(c.name), delete: _delete && - Boolean(p.delete && p.delete.filterFields && p.delete.filterFields.includes(c.name)), - ...(prostgles?.tableConfigurator?.getColInfo({ table: this.name, col: c.name, lang }) || - {}), + Boolean( + p.delete && + p.delete.filterFields && + p.delete.filterFields.includes(c.name), + ), + ...(prostgles?.tableConfigurator?.getColInfo({ + table: this.name, + col: c.name, + lang, + }) || {}), ...(fileConfig && { file: fileConfig }), }; @@ -133,14 +158,21 @@ export async function getColumns( duration: Date.now() - start, error: getErrorAsObject(e), }); - throw getSerializedClientErrorFromPGError(e, { type: "tableMethod", localParams, view: this }); + throw getSerializedClientErrorFromPGError(e, { + type: "tableMethod", + localParams, + view: this, + }); } } function replaceNonAlphaNumeric(string: string, replacement = "_"): string { return string.replace(/[\W_]+/g, replacement); } -function capitalizeFirstLetter(string: string, nonalpha_replacement?: string): string { +function capitalizeFirstLetter( + string: string, + nonalpha_replacement?: string, +): string { const str = replaceNonAlphaNumeric(string, nonalpha_replacement); return str.charAt(0).toUpperCase() + str.slice(1); } diff --git a/lib/DboBuilder/getCondition.ts b/lib/DboBuilder/getCondition.ts index 83e06e8a..d5848313 100644 --- a/lib/DboBuilder/getCondition.ts +++ b/lib/DboBuilder/getCondition.ts @@ -9,7 +9,7 @@ import { getExistsCondition } from "./ViewHandler/getExistsCondition"; import { getExistsFilters } from "./ViewHandler/getExistsFilters"; import { parseComplexFilter } from "./ViewHandler/parseComplexFilter"; -const FILTER_FUNCS = FUNCTIONS.filter(f => f.canBeUsedForFilter); +const FILTER_FUNCS = FUNCTIONS.filter((f) => f.canBeUsedForFilter); /** * parses a single filter @@ -18,52 +18,79 @@ const FILTER_FUNCS = FUNCTIONS.filter(f => f.canBeUsedForFilter); * { fff: { $ilike: 'abc' } } => "fff" ilike 'abc' */ export async function getCondition( - this: ViewHandler, - params: { - filter: any, - select: SelectItem[] | undefined, - allowed_colnames: string[], - tableAlias?: string, - localParams?: LocalParams, - tableRules?: TableRule, - isHaving?: boolean, + this: ViewHandler, + params: { + filter: any; + select: SelectItem[] | undefined; + allowed_colnames: string[]; + tableAlias?: string; + localParams?: LocalParams; + tableRules?: TableRule; + isHaving?: boolean; }, ): Promise<{ exists: ExistsFilterConfig[]; condition: string }> { - const { filter: rawFilter, select, allowed_colnames, tableAlias, localParams, tableRules, isHaving } = params; - - const filter = { ... (rawFilter as any) } as any; + const { + filter: rawFilter, + select, + allowed_colnames, + tableAlias, + localParams, + tableRules, + isHaving, + } = params; + + const filter = { ...(rawFilter as any) } as any; const existsConfigs = getExistsFilters(filter, this); const funcConds: string[] = []; - const funcFilter = FILTER_FUNCS.filter(f => f.name in filter); + const funcFilter = FILTER_FUNCS.filter((f) => f.name in filter); - funcFilter.map(f => { + funcFilter.map((f) => { const funcArgs = filter[f.name]; if (!Array.isArray(funcArgs)) { throw `A function filter must contain an array. E.g: { $funcFilterName: ["col1"] } \n but got: ${JSON.stringify(pickKeys(filter, [f.name]))} `; } - const fields = this.parseFieldFilter(f.getFields(funcArgs), true, allowed_colnames); + const fields = this.parseFieldFilter( + f.getFields(funcArgs), + true, + allowed_colnames, + ); - const dissallowedCols = fields.filter(fname => !allowed_colnames.includes(fname)) + const dissallowedCols = fields.filter( + (fname) => !allowed_colnames.includes(fname), + ); if (dissallowedCols.length) { - throw `Invalid/disallowed columns found in function filter: ${dissallowedCols}` + throw `Invalid/disallowed columns found in function filter: ${dissallowedCols}`; } - funcConds.push(f.getQuery({ args: funcArgs, allColumns: this.columns, allowedFields: allowed_colnames, tableAlias })); + funcConds.push( + f.getQuery({ + args: funcArgs, + allColumns: this.columns, + allowedFields: allowed_colnames, + tableAlias, + }), + ); }); - let existsCond = ""; if (existsConfigs.length) { - existsCond = (await Promise.all(existsConfigs.map(async existsConfig => await getExistsCondition.bind(this)(existsConfig, localParams)))).join(" AND "); + existsCond = ( + await Promise.all( + existsConfigs.map( + async (existsConfig) => + await getExistsCondition.bind(this)(existsConfig, localParams), + ), + ) + ).join(" AND "); } /* Computed field queries ($rowhash) */ const p = this.getValidatedRules(tableRules, localParams); - const computedFields = p.allColumns.filter(c => c.type === "computed"); + const computedFields = p.allColumns.filter((c) => c.type === "computed"); const computedColConditions: string[] = []; - Object.keys(filter || {}).map(key => { - const compCol = computedFields.find(cf => cf.name === key); + Object.keys(filter || {}).map((key) => { + const compCol = computedFields.find((cf) => cf.name === key); if (compCol) { computedColConditions.push( compCol.getQuery({ @@ -75,7 +102,7 @@ export async function getCondition( // ctidField: this.is_view? undefined : "ctid" ctidField: undefined, - }) + ` = ${pgp.as.format("$1", [(filter as any)[key]])}` + }) + ` = ${pgp.as.format("$1", [(filter as any)[key]])}`, ); delete (filter as any)[key]; } @@ -85,41 +112,48 @@ export async function getCondition( /* Select aliases take precedence over col names. This is to ensure filters work correctly even on computed cols*/ if (select) { /* Allow filtering by selected fields/funcs */ - allowedSelect = select.filter(s => { - if (["function", "computed", "column"].includes(s.type) || isHaving && s.type === "aggregation") { + allowedSelect = select.filter((s) => { + if ( + ["function", "computed", "column"].includes(s.type) || + (isHaving && s.type === "aggregation") + ) { /** Selected computed cols are allowed for filtering without checking. Why not allow all?! */ if (s.type !== "column" || allowed_colnames.includes(s.alias)) { return true; } } return false; - }) + }); } /* Add remaining allowed fields */ - const remainingNonSelectedColumns: SelectItem[] = p.allColumns.filter(c => - allowed_colnames.includes(c.name) && - !allowedSelect.find(s => s.alias === c.name) - ).map(f => ({ - type: f.type, - alias: f.name, - columnName: f.type === "column"? f.name : undefined as any, - getQuery: (tableAlias) => f.getQuery({ - tableAlias, - allColumns: this.columns, - allowedFields: allowed_colnames - }), - selected: false, - getFields: () => [f.name], - column_udt_type: f.type === "column" ? this.columns.find(c => c.name === f.name)?.udt_name : undefined - })) - allowedSelect = allowedSelect.concat( - remainingNonSelectedColumns - ); + const remainingNonSelectedColumns: SelectItem[] = p.allColumns + .filter( + (c) => + allowed_colnames.includes(c.name) && + !allowedSelect.find((s) => s.alias === c.name), + ) + .map((f) => ({ + type: f.type, + alias: f.name, + columnName: f.type === "column" ? f.name : (undefined as any), + getQuery: (tableAlias) => + f.getQuery({ + tableAlias, + allColumns: this.columns, + allowedFields: allowed_colnames, + }), + selected: false, + getFields: () => [f.name], + column_udt_type: + f.type === "column" + ? this.columns.find((c) => c.name === f.name)?.udt_name + : undefined, + })); + allowedSelect = allowedSelect.concat(remainingNonSelectedColumns); const complexFilters: string[] = []; const complexFilterKey = "$filter"; if (complexFilterKey in filter) { - const complexFilterCondition = parseComplexFilter({ filter, complexFilterKey, @@ -135,39 +169,43 @@ export async function getCondition( will make an exists filter */ - const filterKeys = Object.keys(filter) - .filter(k => - k !== complexFilterKey && - !funcFilter.find(ek => ek.name === k) && - !computedFields.find(cf => cf.name === k) && - !existsConfigs.find(ek => ek.existType === k) - ); + const filterKeys = Object.keys(filter).filter( + (k) => + k !== complexFilterKey && + !funcFilter.find((ek) => ek.name === k) && + !computedFields.find((cf) => cf.name === k) && + !existsConfigs.find((ek) => ek.existType === k), + ); - const validFieldNames = allowedSelect.map(s => s.alias); - const invalidColumn = filterKeys - .find(fName => !validFieldNames.find(c => - c === fName || - ( - fName.startsWith(c) && ( - fName.slice(c.length).includes("->") || - fName.slice(c.length).includes(".") - ) - ) - )); + const validFieldNames = allowedSelect.map((s) => s.alias); + const invalidColumn = filterKeys.find( + (fName) => + !validFieldNames.find( + (c) => + c === fName || + (fName.startsWith(c) && + (fName.slice(c.length).includes("->") || + fName.slice(c.length).includes("."))), + ), + ); if (invalidColumn) { - const selItem = select?.find(s => s.alias === invalidColumn); + const selItem = select?.find((s) => s.alias === invalidColumn); let isComplexFilter = false; - if(selItem?.type === "aggregation"){ - if(!params.isHaving){ - throw new Error(`Filtering by ${invalidColumn} is not allowed. Aggregations cannot be filtered. Use HAVING clause instead.`); + if (selItem?.type === "aggregation") { + if (!params.isHaving) { + throw new Error( + `Filtering by ${invalidColumn} is not allowed. Aggregations cannot be filtered. Use HAVING clause instead.`, + ); } else { isComplexFilter = true; } } - if(!isComplexFilter){ - const allowedCols = allowedSelect.map(s => s.type === "column" ? s.getQuery() : s.alias).join(", "); + if (!isComplexFilter) { + const allowedCols = allowedSelect + .map((s) => (s.type === "column" ? s.getQuery() : s.alias)) + .join(", "); const errMessage = `Table: ${this.name} -> disallowed/inexistent columns in filter: ${invalidColumn} \n Expecting one of: ${allowedCols}`; throw errMessage; } @@ -182,10 +220,14 @@ export async function getCondition( filter: f, tableAlias, select: allowedSelect, - allowedColumnNames: !tableRules? this.column_names.slice(0) : this.parseFieldFilter(tableRules.select?.filterFields ?? tableRules.select?.fields), + allowedColumnNames: !tableRules + ? this.column_names.slice(0) + : this.parseFieldFilter( + tableRules.select?.filterFields ?? tableRules.select?.fields, + ), }); - let templates: string[] = [q].filter(q => q); + let templates: string[] = [q].filter((q) => q); if (existsCond) templates.push(existsCond); templates = templates.concat(funcConds); @@ -193,9 +235,8 @@ export async function getCondition( templates = templates.concat(complexFilters); /* sorted to ensure duplicate subscription channels are not created due to different condition order */ - return { - exists: existsConfigs, - condition: templates.sort().join(" AND \n") + return { + exists: existsConfigs, + condition: templates.sort().join(" AND \n"), }; - -} \ No newline at end of file +} diff --git a/lib/DboBuilder/getSubscribeRelatedTables.ts b/lib/DboBuilder/getSubscribeRelatedTables.ts index 04d15395..32ec9ada 100644 --- a/lib/DboBuilder/getSubscribeRelatedTables.ts +++ b/lib/DboBuilder/getSubscribeRelatedTables.ts @@ -1,7 +1,17 @@ -import { AnyObject, asName, ParsedJoinPath, reverseParsedPath, SubscribeParams } from "prostgles-types"; +import { + AnyObject, + asName, + ParsedJoinPath, + reverseParsedPath, + SubscribeParams, +} from "prostgles-types"; import { TableRule } from "../PublishParser/PublishParser"; import { log, ViewSubscriptionOptions } from "../PubSubManager/PubSubManager"; -import { Filter, getSerializedClientErrorFromPGError, LocalParams } from "./DboBuilder"; +import { + Filter, + getSerializedClientErrorFromPGError, + LocalParams, +} from "./DboBuilder"; import { NewQuery } from "./QueryBuilder/QueryBuilder"; import { ViewHandler } from "./ViewHandler/ViewHandler"; @@ -11,69 +21,100 @@ type Args = { table_rules: TableRule | undefined; localParams: LocalParams | undefined; newQuery: NewQuery; -} +}; /** * When subscribing to a view: we identify underlying tables to subscribe to them * When subscribing to a table: we identify joined tables to subscribe to them */ -export async function getSubscribeRelatedTables(this: ViewHandler, { filter, localParams, newQuery }: Args){ - +export async function getSubscribeRelatedTables( + this: ViewHandler, + { filter, localParams, newQuery }: Args, +) { let viewOptions: ViewSubscriptionOptions | undefined = undefined; const { condition } = newQuery.whereOpts; if (this.is_view) { /** TODO: this needs to be memoized on schema fetch */ const viewName = this.name; const viewNameEscaped = this.escapedName; - const { current_schema } = await this.db.oneOrNone("SELECT current_schema") + const { current_schema } = await this.db.oneOrNone("SELECT current_schema"); /** Get list of used columns and their parent tables */ - let { def } = (await this.db.oneOrNone("SELECT pg_get_viewdef(${viewName}) as def", { viewName })) as { def: string }; + let { def } = (await this.db.oneOrNone( + "SELECT pg_get_viewdef(${viewName}) as def", + { viewName }, + )) as { def: string }; def = def.trim(); if (def.endsWith(";")) { def = def.slice(0, -1); } if (!def || typeof def !== "string") { - throw getSerializedClientErrorFromPGError("Could get view definition", { type: "tableMethod", localParams, view: this, }); + throw getSerializedClientErrorFromPGError("Could get view definition", { + type: "tableMethod", + localParams, + view: this, + }); } - const { fields } = await this.dboBuilder.dbo.sql!(`SELECT * FROM ( \n ${def} \n ) prostgles_subscribe_view_definition LIMIT 0`, {}); - const tableColumns = fields.filter(f => f.tableName && f.columnName); + const { fields } = await this.dboBuilder.dbo.sql!( + `SELECT * FROM ( \n ${def} \n ) prostgles_subscribe_view_definition LIMIT 0`, + {}, + ); + const tableColumns = fields.filter((f) => f.tableName && f.columnName); /** Create exists filters for each table */ - const tableIds: string[] = Array.from(new Set(tableColumns.map(tc => tc.tableID!.toString()))); + const tableIds: string[] = Array.from( + new Set(tableColumns.map((tc) => tc.tableID!.toString())), + ); viewOptions = { type: "view", viewName, definition: def, - relatedTables: [] - } - viewOptions.relatedTables = await Promise.all(tableIds.map(async tableID => { - const table = this.dboBuilder.USER_TABLES!.find(t => t.relid === +tableID)!; - let tableCols = tableColumns.filter(tc => tc.tableID!.toString() === tableID); - - /** If table has primary keys and they are all in this view then use only primary keys */ - if (table?.pkey_columns?.every(pkey => tableCols.some(c => c.columnName === pkey))) { - tableCols = tableCols.filter(c => table?.pkey_columns?.includes(c.columnName!)) - } else { - /** Exclude non comparable data types */ - tableCols = tableCols.filter(c => !["json", "xml"].includes(c.udt_name)) - } - - const { relname: tableName, schemaname: tableSchema } = table; + relatedTables: [], + }; + viewOptions.relatedTables = await Promise.all( + tableIds.map(async (tableID) => { + const table = this.dboBuilder.USER_TABLES!.find( + (t) => t.relid === +tableID, + )!; + let tableCols = tableColumns.filter( + (tc) => tc.tableID!.toString() === tableID, + ); + + /** If table has primary keys and they are all in this view then use only primary keys */ + if ( + table?.pkey_columns?.every((pkey) => + tableCols.some((c) => c.columnName === pkey), + ) + ) { + tableCols = tableCols.filter((c) => + table?.pkey_columns?.includes(c.columnName!), + ); + } else { + /** Exclude non comparable data types */ + tableCols = tableCols.filter( + (c) => !["json", "xml"].includes(c.udt_name), + ); + } - if (tableCols.length) { + const { relname: tableName, schemaname: tableSchema } = table; - const tableNameEscaped = tableSchema === current_schema ? table.relname : [tableSchema, tableName].map(v => JSON.stringify(v)).join("."); + if (tableCols.length) { + const tableNameEscaped = + tableSchema === current_schema + ? table.relname + : [tableSchema, tableName] + .map((v) => JSON.stringify(v)) + .join("."); - const fullCondition = `EXISTS ( + const fullCondition = `EXISTS ( SELECT 1 FROM ${viewNameEscaped} - WHERE ${tableCols.map(c => `${tableNameEscaped}.${JSON.stringify(c.columnName)} = ${viewNameEscaped}.${JSON.stringify(c.name)}`).join(" AND \n")} + WHERE ${tableCols.map((c) => `${tableNameEscaped}.${JSON.stringify(c.columnName)} = ${viewNameEscaped}.${JSON.stringify(c.name)}`).join(" AND \n")} AND ${condition || "TRUE"} )`; - try { - const { count } = await this.db.oneOrNone(` + try { + const { count } = await this.db.oneOrNone(` WITH ${asName(tableName)} AS ( SELECT * FROM ${asName(tableName)} @@ -86,43 +127,56 @@ export async function getSubscribeRelatedTables(this: ViewHandler, { filter, loc ) prostgles_view_ref_table_test `); - const relatedTableSubscription = { - tableName: tableName!, - tableNameEscaped, - condition: fullCondition, - } + const relatedTableSubscription = { + tableName: tableName!, + tableNameEscaped, + condition: fullCondition, + }; - if (count.toString() === '0') { - return relatedTableSubscription; + if (count.toString() === "0") { + return relatedTableSubscription; + } + } catch (e) { + log( + `Could not not override subscribed view (${this.name}) table (${tableName}). Will not check condition`, + e, + ); } - } catch (e) { - log(`Could not not override subscribed view (${this.name}) table (${tableName}). Will not check condition`, e); } - } - - return { - tableName, - tableNameEscaped: JSON.stringify(tableName),// [table.schemaname, table.relname].map(v => JSON.stringify(v)).join("."), - condition: "TRUE" - } - })) + return { + tableName, + tableNameEscaped: JSON.stringify(tableName), // [table.schemaname, table.relname].map(v => JSON.stringify(v)).join("."), + condition: "TRUE", + }; + }), + ); /** Get list of remaining used inner tables */ - const allUsedTables: { table_name: string; table_schema: string; }[] = await this.db.any( - "SELECT distinct table_name, table_schema FROM information_schema.view_column_usage WHERE view_name = ${viewName}", - { viewName } - ); + const allUsedTables: { table_name: string; table_schema: string }[] = + await this.db.any( + "SELECT distinct table_name, table_schema FROM information_schema.view_column_usage WHERE view_name = ${viewName}", + { viewName }, + ); /** Remaining tables will have listeners on all records (condition = "TRUE") */ - const remainingInnerTables = allUsedTables.filter(at => !tableColumns.some(dc => dc.tableName === at.table_name && dc.tableSchema === at.table_schema)); + const remainingInnerTables = allUsedTables.filter( + (at) => + !tableColumns.some( + (dc) => + dc.tableName === at.table_name && + dc.tableSchema === at.table_schema, + ), + ); viewOptions.relatedTables = [ ...viewOptions.relatedTables, - ...remainingInnerTables.map(t => ({ + ...remainingInnerTables.map((t) => ({ tableName: t.table_name, - tableNameEscaped: [t.table_name, t.table_schema].map(v => JSON.stringify(v)).join("."), - condition: "TRUE" - })) + tableNameEscaped: [t.table_name, t.table_schema] + .map((v) => JSON.stringify(v)) + .join("."), + condition: "TRUE", + })), ]; if (!viewOptions.relatedTables.length) { @@ -133,51 +187,58 @@ export async function getSubscribeRelatedTables(this: ViewHandler, { filter, loc } else { viewOptions = { type: "table", - relatedTables: [] - } + relatedTables: [], + }; const nonExistsFilter = newQuery.whereOpts.exists.length ? {} : filter; - const pushRelatedTable = async (relatedTableName: string, joinPath: ParsedJoinPath[]) => { + const pushRelatedTable = async ( + relatedTableName: string, + joinPath: ParsedJoinPath[], + ) => { const relatedTableOrViewHandler = this.dboBuilder.dbo[relatedTableName]; if (!relatedTableOrViewHandler) { throw `Table ${relatedTableName} not found`; } - const alreadyPushed = viewOptions?.relatedTables.find(rt => rt.tableName === relatedTableName) - if(alreadyPushed || relatedTableOrViewHandler.is_view){ - return + const alreadyPushed = viewOptions?.relatedTables.find( + (rt) => rt.tableName === relatedTableName, + ); + if (alreadyPushed || relatedTableOrViewHandler.is_view) { + return; } viewOptions ??= { type: "table", - relatedTables: [] - } + relatedTables: [], + }; viewOptions.relatedTables.push({ tableName: relatedTableName, tableNameEscaped: asName(relatedTableName), - condition: (await relatedTableOrViewHandler!.prepareWhere!({ - select: undefined, - filter: { - $existsJoined: { - path: reverseParsedPath(joinPath, this.name), - filter: nonExistsFilter - } - }, - addWhere: false, - localParams: undefined, - tableRule: undefined - })).where + condition: ( + await relatedTableOrViewHandler!.prepareWhere!({ + select: undefined, + filter: { + $existsJoined: { + path: reverseParsedPath(joinPath, this.name), + filter: nonExistsFilter, + }, + }, + addWhere: false, + localParams: undefined, + tableRule: undefined, + }) + ).where, }); - } + }; /** * Avoid nested exists error. Will affect performance */ - for await (const j of (newQuery.joins ?? [])) { + for await (const j of newQuery.joins ?? []) { await pushRelatedTable(j.table, j.joinPath); } - for await (const e of newQuery.whereOpts.exists.filter(e => e.isJoined)) { - if(!e.isJoined) throw `Not possible`; + for await (const e of newQuery.whereOpts.exists.filter((e) => e.isJoined)) { + if (!e.isJoined) throw `Not possible`; const targetTable = e.parsedPath.at(-1)!.table; await pushRelatedTable(targetTable, e.parsedPath); } @@ -187,4 +248,4 @@ export async function getSubscribeRelatedTables(this: ViewHandler, { filter, loc } return viewOptions; -} \ No newline at end of file +} diff --git a/lib/DboBuilder/getTablesForSchemaPostgresSQL.ts b/lib/DboBuilder/getTablesForSchemaPostgresSQL.ts index 06cce631..374b1373 100644 --- a/lib/DboBuilder/getTablesForSchemaPostgresSQL.ts +++ b/lib/DboBuilder/getTablesForSchemaPostgresSQL.ts @@ -6,7 +6,10 @@ import { clone } from "../utils"; import { TableSchema, TableSchemaColumn } from "./DboBuilderTypes"; import { ProstglesInitOptions } from "../ProstglesTypes"; -const getMaterialViews = (db: DBorTx, schema: ProstglesInitOptions["schemaFilter"]) => { +const getMaterialViews = ( + db: DBorTx, + schema: ProstglesInitOptions["schemaFilter"], +) => { const { sql, schemaNames } = getSchemaFilter(schema); const query = ` @@ -97,7 +100,9 @@ where nspname = 'public' and relname = 'test_view'; return db.any(query, { schemaNames }); }; -export const getSchemaFilter = (schema: ProstglesInitOptions["schemaFilter"] = { public: 1 }) => { +export const getSchemaFilter = ( + schema: ProstglesInitOptions["schemaFilter"] = { public: 1 }, +) => { const schemaNames = Object.keys(schema); const isInclusive = Object.values(schema).every((v) => v); if (!schemaNames.length) { @@ -114,7 +119,7 @@ export const getSchemaFilter = (schema: ProstglesInitOptions["schemaFilter"] = { // Reason: this query gets blocked by prostgles.app_triggers from PubSubManager.addTrigger in some cases (pg_dump locks that table) export async function getTablesForSchemaPostgresSQL( { db, runSQL }: DboBuilder, - schema: ProstglesInitOptions["schemaFilter"] + schema: ProstglesInitOptions["schemaFilter"], ): Promise<{ result: TableSchema[]; durations: Record; @@ -160,7 +165,7 @@ export async function getTablesForSchemaPostgresSQL( ) SELECT * FROM fk `, - { schemaNames } + { schemaNames }, ); return { fkeys }; @@ -212,8 +217,9 @@ export async function getTablesForSchemaPostgresSQL( } const getTVColumns = await tryCatch(async () => { - const columns: (TableSchemaColumn & { table_oid: number })[] = await t.any( - ` + const columns: (TableSchemaColumn & { table_oid: number })[] = + await t.any( + ` SELECT table_oid , ccc.column_name as name , @@ -275,8 +281,8 @@ export async function getTablesForSchemaPostgresSQL( WHERE table_schema ${sql} ORDER BY table_oid, ordinal_position `, - { schemaNames } - ); + { schemaNames }, + ); return { columns }; }); @@ -285,7 +291,8 @@ export async function getTablesForSchemaPostgresSQL( } const getViewParentTables = await tryCatch(async () => { - const parent_tables: { oid: number; table_names: string[] }[] = await t.any(` + const parent_tables: { oid: number; table_names: string[] }[] = + await t.any(` SELECT cl_r.oid, cl_r.relname as view_name, array_agg(DISTINCT cl_d.relname) AS table_names FROM pg_rewrite AS r JOIN pg_class AS cl_r ON r.ev_class = cl_r.oid @@ -331,18 +338,18 @@ export async function getTablesForSchemaPostgresSQL( --GROUP BY t.table_schema, t.table_name, t.is_view, t.view_definition, t.oid ORDER BY schema, name `; - const tablesAndViews = ((await t.any(query, { schemaNames })) as TableSchema[]).map( - (table) => { - table.columns = - clone(getTVColumns.columns) - .filter((c) => c.table_oid === table.oid) - .map((c) => omitKeys(c, ["table_oid"])) ?? []; - table.parent_tables = - getViewParentTables.parent_tables?.find((vr) => vr.oid === table.oid)?.table_names ?? - []; - return table; - } - ); + const tablesAndViews = ( + (await t.any(query, { schemaNames })) as TableSchema[] + ).map((table) => { + table.columns = + clone(getTVColumns.columns) + .filter((c) => c.table_oid === table.oid) + .map((c) => omitKeys(c, ["table_oid"])) ?? []; + table.parent_tables = + getViewParentTables.parent_tables?.find((vr) => vr.oid === table.oid) + ?.table_names ?? []; + return table; + }); return { tablesAndViews }; }); if (getTablesAndViews.error || !getTablesAndViews.tablesAndViews) { @@ -365,7 +372,9 @@ export async function getTablesForSchemaPostgresSQL( console.error(getHyperTablesReq.error); } - let result = getTablesAndViews.tablesAndViews.concat(getMaterialViewsReq.materialViews); + let result = getTablesAndViews.tablesAndViews.concat( + getMaterialViewsReq.materialViews, + ); result = await Promise.all( result.map(async (table) => { table.name = table.escaped_identifier; @@ -379,7 +388,7 @@ export async function getTablesForSchemaPostgresSQL( allowAllIfNoColumns ?? table.columns.some((c) => c.privileges.UPDATE); table.columns = table.columns.map((c) => { const refs = getFkeys.fkeys!.filter( - (fc) => fc.oid === table.oid && fc.cols.includes(c.name) + (fc) => fc.oid === table.oid && fc.cols.includes(c.name), ); if (refs.length) c.references = refs.map((_ref) => { @@ -395,25 +404,30 @@ export async function getTablesForSchemaPostgresSQL( let viewFCols: Pick[] = []; if (table.is_view) { try { - const view_definition = - table.view_definition?.endsWith(";") ? - table.view_definition.slice(0, -1) + const view_definition = table.view_definition?.endsWith(";") + ? table.view_definition.slice(0, -1) : table.view_definition; const { fields } = (await runSQL( `SELECT * FROM \n ( ${view_definition!} \n) t LIMIT 0`, {}, {}, - undefined + undefined, )) as SQLResult; - const ftables = result.filter((r) => fields.some((f) => f.tableID === r.oid)); + const ftables = result.filter((r) => + fields.some((f) => f.tableID === r.oid), + ); ftables.forEach((ft) => { const fFields = fields.filter((f) => f.tableID === ft.oid); const pkeys = ft.columns.filter((c) => c.is_pkey); - const fFieldPK = fFields.filter((ff) => pkeys.some((p) => p.name === ff.columnName)); + const fFieldPK = fFields.filter((ff) => + pkeys.some((p) => p.name === ff.columnName), + ); const refCols = - pkeys.length && fFieldPK.length === pkeys.length ? - fFieldPK - : fFields.filter((ff) => !["json", "jsonb", "xml"].includes(ff.udt_name)); + pkeys.length && fFieldPK.length === pkeys.length + ? fFieldPK + : fFields.filter( + (ff) => !["json", "jsonb", "xml"].includes(ff.udt_name), + ); const _fcols: typeof viewFCols = refCols.map((ff) => { const d: Pick = { name: ff.columnName!, @@ -438,13 +452,11 @@ export async function getTablesForSchemaPostgresSQL( if (col.has_default) { /** Hide pkey default value */ col.column_default = - ( - col.udt_name !== "uuid" && - !col.is_pkey && - !col.column_default.startsWith("nextval(") - ) ? - col.column_default - : null; + col.udt_name !== "uuid" && + !col.is_pkey && + !col.column_default.startsWith("nextval(") + ? col.column_default + : null; } const viewFCol = viewFCols?.find((fc) => fc.name === col.name); @@ -454,14 +466,19 @@ export async function getTablesForSchemaPostgresSQL( return col; }); - table.isHyperTable = getHyperTablesReq.hyperTables?.includes(table.name); + table.isHyperTable = getHyperTablesReq.hyperTables?.includes( + table.name, + ); table.uniqueColumnGroups = uniqueColsReq.data - ?.filter((r) => r.table_name === table.name && r.table_schema === table.schema) + ?.filter( + (r) => + r.table_name === table.name && r.table_schema === table.schema, + ) .map((r) => r.column_names); return table; - }) + }), ); const res = { @@ -493,11 +510,11 @@ const getHyperTables = async (db: DBorTx): Promise => { AND table_schema = ${schema} \ AND table_name = 'hypertable' \ );", - { schema } + { schema }, ); if (res.exists) { const tables: { table_name: string }[] = await db.any( - "SELECT table_name FROM " + asName(schema) + ".hypertable;" + "SELECT table_name FROM " + asName(schema) + ".hypertable;", ); return tables.map((t) => t.table_name); } diff --git a/lib/DboBuilder/insertNestedRecords.ts b/lib/DboBuilder/insertNestedRecords.ts index c854346d..78147e7d 100644 --- a/lib/DboBuilder/insertNestedRecords.ts +++ b/lib/DboBuilder/insertNestedRecords.ts @@ -1,11 +1,17 @@ -import { AnyObject, getKeys, InsertParams, isDefined, isObject } from "prostgles-types"; +import { + AnyObject, + getKeys, + InsertParams, + isDefined, + isObject, +} from "prostgles-types"; import { LocalParams, TableHandlers } from "./DboBuilder"; import { TableRule } from "../PublishParser/PublishParser"; import { omitKeys } from "../PubSubManager/PubSubManager"; import { TableHandler } from "./TableHandler/TableHandler"; type InsertNestedRecordsArgs = { - data: (AnyObject | AnyObject[]); + data: AnyObject | AnyObject[]; param2?: InsertParams; tableRules?: TableRule; localParams?: LocalParams; @@ -16,34 +22,29 @@ type InsertNestedRecordsArgs = { */ export async function insertNestedRecords( this: TableHandler, - { - data, - param2, - tableRules, - localParams = {}, - }: InsertNestedRecordsArgs + { data, param2, tableRules, localParams = {} }: InsertNestedRecordsArgs, ): Promise<{ data?: AnyObject | AnyObject[]; insertResult?: AnyObject | AnyObject[]; -}> { +}> { const MEDIA_COL_NAMES = ["data", "name"]; - const getExtraKeys = (row: AnyObject) => getKeys(row).filter(fieldName => { - /* If media then use file insert columns */ - if (this.is_media) { - return !this.column_names.concat(MEDIA_COL_NAMES).includes(fieldName) - } else if (!this.columns.find(c => c.name === fieldName)) { - if (!isObject(row[fieldName]) && !Array.isArray(row[fieldName])) { - throw new Error("Invalid/Dissalowed field in data: " + fieldName) - } else if (!this.dboBuilder.dbo[fieldName]) { - return false; - // throw new Error("Invalid/Dissalowed nested insert table name in data: " + fieldName) + const getExtraKeys = (row: AnyObject) => + getKeys(row).filter((fieldName) => { + /* If media then use file insert columns */ + if (this.is_media) { + return !this.column_names.concat(MEDIA_COL_NAMES).includes(fieldName); + } else if (!this.columns.find((c) => c.name === fieldName)) { + if (!isObject(row[fieldName]) && !Array.isArray(row[fieldName])) { + throw new Error("Invalid/Dissalowed field in data: " + fieldName); + } else if (!this.dboBuilder.dbo[fieldName]) { + return false; + // throw new Error("Invalid/Dissalowed nested insert table name in data: " + fieldName) + } + return true; } - return true; - } - return false; - }); - + return false; + }); /** * True when: nested table data is provided within @@ -51,10 +52,15 @@ export async function insertNestedRecords( * OR * [referencing_column] property key * If true then will do the full insert within this function - * Nested insert is not allowed for the file table + * Nested insert is not allowed for the file table * */ const isMultiInsert = Array.isArray(data); - const hasNestedInserts = this.is_media ? false : (isMultiInsert ? data : [data]).some(d => getExtraKeys(d).length || getReferenceColumnInserts(this, d).length); + const hasNestedInserts = this.is_media + ? false + : (isMultiInsert ? data : [data]).some( + (d) => + getExtraKeys(d).length || getReferenceColumnInserts(this, d).length, + ); /** * Make sure nested insert uses a transaction @@ -69,242 +75,363 @@ export async function insertNestedRecords( param2, undefined, tableRules, - { tx: { dbTX, t: _t }, ...localParams } - ) - ) - } + { tx: { dbTX, t: _t }, ...localParams }, + ), + ), + }; } - - - const _data = await Promise.all((isMultiInsert ? data : [data]).map(async row => { - // const { preValidate, validate } = tableRules?.insert ?? {}; - // const { tableConfigurator } = this.dboBuilder.prostgles; - // if(!tableConfigurator) throw "tableConfigurator missing"; - // let row = await tableConfigurator.getPreInsertRow(this, { dbx: this.getFinalDbo(localParams), validate, localParams, row: _row }) - // if (preValidate) { - // row = await preValidate({ row, dbx: this.tx?.dbTX || this.dboBuilder.dbo, localParams }); - // } - - /* Potentially a nested join */ - if (hasNestedInserts) { - const extraKeys = getExtraKeys(row); - const colInserts = getReferenceColumnInserts(this, row); - - /* Ensure we're using the same transaction */ - const _this = this.tx ? this : dbTX![this.name] as TableHandler; - - const omitedKeys = extraKeys.concat(colInserts.map(c => c.col)); - - const rootData: AnyObject = omitKeys(row, omitedKeys); - - let insertedChildren: AnyObject[]; - let targetTableRules: TableRule; - - const colInsertsResult = colInserts.map(ci => ({ - ...ci, - inserted: undefined as AnyObject[] | undefined - })); - /** Insert referenced first and then populate root data with referenced keys */ - if (colInserts.length) { - for await (const colInsert of colInsertsResult) { - const newLocalParams: LocalParams = { - ...(localParams ?? {}), - nestedInsert: { - depth: (localParams.nestedInsert?.depth ?? 0) + 1, - previousData: rootData, - previousTable: this.name, - referencingColumn: colInsert.col - } - } - const colRows = await referencedInsert(_this, dbTX, newLocalParams, colInsert.tableName, row[colInsert.col]); - if (!Array.isArray(colRows) || colRows.length !== 1 || [null, undefined].includes(colRows[0]![colInsert.fcol])) { - throw new Error("Could not do nested column insert: Unexpected return " + JSON.stringify(colRows)) - } - colInsert.inserted = colRows; - - const foreignKey = colRows[0]![colInsert.fcol]; - rootData[colInsert.col] = foreignKey; - } - } - - const fullRootResult = await _this.insert(rootData, { returning: "*" }, undefined, tableRules, localParams); - let returnData: AnyObject | undefined; - const returning = param2?.returning; - if (returning) { - returnData = {} - const returningItems = await this.prepareReturning(returning, this.parseFieldFilter(tableRules?.insert?.returningFields)); - returningItems.filter(s => s.selected).map(rs => { - const colInsertResult = colInsertsResult.find(({ col }) => col === rs.columnName); - const inserted = colInsertResult?.singleInsert? colInsertResult.inserted?.[0] : colInsertResult?.inserted; - returnData![rs.alias] = inserted ?? fullRootResult[rs.alias]; - }) - } - - await Promise.all(extraKeys.map(async targetTable => { - const childDataItems = Array.isArray(row[targetTable]) ? row[targetTable] : [row[targetTable]]; - - const childInsert = async (cdata: AnyObject | AnyObject[], tableName: string) => { - - return referencedInsert(this, dbTX, localParams, tableName, cdata); - } - - const joinPath = await getJoinPath(this, targetTable); - const { path } = joinPath; - const [tbl1, tbl2, tbl3] = path; - targetTableRules = await getInsertTableRules(this, targetTable, localParams); // tbl3 - - const cols2 = this.dboBuilder.dbo[tbl2!]!.columns || []; - if (!this.dboBuilder.dbo[tbl2!]) throw "Invalid/disallowed table: " + tbl2; - const colsRefT1 = cols2?.filter(c => c.references?.some(rc => rc.cols.length === 1 && rc.ftable === tbl1)); - - - if (!path.length) { - throw "Nested inserts join path not found for " + [this.name, targetTable]; - } else if (path.length === 2) { - if (targetTable !== tbl2) throw "Did not expect this"; - - if (!colsRefT1.length) { - throw `Target table ${tbl2} does not reference any columns from the root table ${this.name}. Cannot insert nested data`; - } - - insertedChildren = await childInsert( - childDataItems.map((d: AnyObject) => { - const result = { ...d }; - colsRefT1.map(col => { - result[col.references![0]!.cols[0]!] = fullRootResult[col.references![0]!.fcols[0]!] - }) - return result; - }), - targetTable - ); - - } else if (path.length === 3) { - if (targetTable !== tbl3) throw "Did not expect this"; - const colsRefT3 = cols2?.filter(c => c.references?.some(rc => rc.cols.length === 1 && rc.ftable === tbl3)); - if (!colsRefT1.length || !colsRefT3.length) throw "Incorrectly referenced or missing columns for nested insert"; - - const fileTable = this.dboBuilder.prostgles.fileManager?.tableName; - if (targetTable !== fileTable) { - throw "Only media allowed to have nested inserts more than 2 tables apart" - } + const _data = await Promise.all( + (isMultiInsert ? data : [data]).map(async (row) => { + // const { preValidate, validate } = tableRules?.insert ?? {}; + // const { tableConfigurator } = this.dboBuilder.prostgles; + // if(!tableConfigurator) throw "tableConfigurator missing"; + // let row = await tableConfigurator.getPreInsertRow(this, { dbx: this.getFinalDbo(localParams), validate, localParams, row: _row }) + // if (preValidate) { + // row = await preValidate({ row, dbx: this.tx?.dbTX || this.dboBuilder.dbo, localParams }); + // } + + /* Potentially a nested join */ + if (hasNestedInserts) { + const extraKeys = getExtraKeys(row); + const colInserts = getReferenceColumnInserts(this, row); + + /* Ensure we're using the same transaction */ + const _this = this.tx ? this : (dbTX![this.name] as TableHandler); + + const omitedKeys = extraKeys.concat(colInserts.map((c) => c.col)); + + const rootData: AnyObject = omitKeys(row, omitedKeys); + + let insertedChildren: AnyObject[]; + let targetTableRules: TableRule; + + const colInsertsResult = colInserts.map((ci) => ({ + ...ci, + inserted: undefined as AnyObject[] | undefined, + })); + /** Insert referenced first and then populate root data with referenced keys */ + if (colInserts.length) { + for await (const colInsert of colInsertsResult) { + const newLocalParams: LocalParams = { + ...(localParams ?? {}), + nestedInsert: { + depth: (localParams.nestedInsert?.depth ?? 0) + 1, + previousData: rootData, + previousTable: this.name, + referencingColumn: colInsert.col, + }, + }; + const colRows = await referencedInsert( + _this, + dbTX, + newLocalParams, + colInsert.tableName, + row[colInsert.col], + ); + if ( + !Array.isArray(colRows) || + colRows.length !== 1 || + [null, undefined].includes(colRows[0]![colInsert.fcol]) + ) { + throw new Error( + "Could not do nested column insert: Unexpected return " + + JSON.stringify(colRows), + ); + } + colInsert.inserted = colRows; - /* We expect tbl2 to have only 2 columns (media_id and foreign_id) */ - if (!cols2 || !( - cols2.filter(c => c.references?.[0]?.ftable === fileTable).length === 1 && - cols2.filter(c => c.references?.[0]?.ftable === _this.name).length === 1 - )) { - console.log({ tbl1, tbl2, tbl3, name: _this.name, tthisName: this.name }) - throw "Second joining table (" + tbl2 + ") not of expected format. Must contain exactly one reference column for each table (file table and target table) "; + const foreignKey = colRows[0]![colInsert.fcol]; + rootData[colInsert.col] = foreignKey; } - - insertedChildren = await childInsert(childDataItems, targetTable); - - /* Insert in key_lookup table */ - await Promise.all(insertedChildren.map(async t3Child => { - const tbl2Row: AnyObject = {}; - - colsRefT3.map(col => { - tbl2Row[col.name] = t3Child[col.references![0]!.fcols[0]!]; - }) - colsRefT1.map(col => { - tbl2Row[col.name] = fullRootResult[col.references![0]!.fcols[0]!]; - }) - - await childInsert(tbl2Row, tbl2!);//.then(() => {}); - })); - - } else { - console.error(JSON.stringify({ path, thisTable: this.name, targetTable }, null, 2)); - throw "Unexpected path for Nested inserts"; } - /* Return also the nested inserted data */ - if (targetTableRules && insertedChildren?.length && returning) { - const targetTableHandler = dbTX![targetTable] as TableHandler; - const targetReturning = await targetTableHandler.prepareReturning("*", targetTableHandler.parseFieldFilter(targetTableRules?.insert?.returningFields)); - const clientTargetInserts = insertedChildren.map(d => { - const _d = { ...d }; - const res: AnyObject = {}; - targetReturning.map(r => { - res[r.alias] = _d[r.alias] + const fullRootResult = await _this.insert( + rootData, + { returning: "*" }, + undefined, + tableRules, + localParams, + ); + let returnData: AnyObject | undefined; + const returning = param2?.returning; + if (returning) { + returnData = {}; + const returningItems = await this.prepareReturning( + returning, + this.parseFieldFilter(tableRules?.insert?.returningFields), + ); + returningItems + .filter((s) => s.selected) + .map((rs) => { + const colInsertResult = colInsertsResult.find( + ({ col }) => col === rs.columnName, + ); + const inserted = colInsertResult?.singleInsert + ? colInsertResult.inserted?.[0] + : colInsertResult?.inserted; + returnData![rs.alias] = inserted ?? fullRootResult[rs.alias]; }); - return res; - }); - - returnData![targetTable] = clientTargetInserts.length === 1 ? clientTargetInserts[0] : clientTargetInserts; } - })); - return returnData - } + await Promise.all( + extraKeys.map(async (targetTable) => { + const childDataItems = Array.isArray(row[targetTable]) + ? row[targetTable] + : [row[targetTable]]; + + const childInsert = async ( + cdata: AnyObject | AnyObject[], + tableName: string, + ) => { + return referencedInsert( + this, + dbTX, + localParams, + tableName, + cdata, + ); + }; + + const joinPath = await getJoinPath(this, targetTable); + + const { path } = joinPath; + const [tbl1, tbl2, tbl3] = path; + targetTableRules = await getInsertTableRules( + this, + targetTable, + localParams, + ); // tbl3 + + const cols2 = this.dboBuilder.dbo[tbl2!]!.columns || []; + if (!this.dboBuilder.dbo[tbl2!]) + throw "Invalid/disallowed table: " + tbl2; + const colsRefT1 = cols2?.filter((c) => + c.references?.some( + (rc) => rc.cols.length === 1 && rc.ftable === tbl1, + ), + ); + + if (!path.length) { + throw ( + "Nested inserts join path not found for " + + [this.name, targetTable] + ); + } else if (path.length === 2) { + if (targetTable !== tbl2) throw "Did not expect this"; + + if (!colsRefT1.length) { + throw `Target table ${tbl2} does not reference any columns from the root table ${this.name}. Cannot insert nested data`; + } + + insertedChildren = await childInsert( + childDataItems.map((d: AnyObject) => { + const result = { ...d }; + colsRefT1.map((col) => { + result[col.references![0]!.cols[0]!] = + fullRootResult[col.references![0]!.fcols[0]!]; + }); + return result; + }), + targetTable, + ); + } else if (path.length === 3) { + if (targetTable !== tbl3) throw "Did not expect this"; + const colsRefT3 = cols2?.filter((c) => + c.references?.some( + (rc) => rc.cols.length === 1 && rc.ftable === tbl3, + ), + ); + if (!colsRefT1.length || !colsRefT3.length) + throw "Incorrectly referenced or missing columns for nested insert"; + + const fileTable = + this.dboBuilder.prostgles.fileManager?.tableName; + if (targetTable !== fileTable) { + throw "Only media allowed to have nested inserts more than 2 tables apart"; + } + + /* We expect tbl2 to have only 2 columns (media_id and foreign_id) */ + if ( + !cols2 || + !( + cols2.filter((c) => c.references?.[0]?.ftable === fileTable) + .length === 1 && + cols2.filter((c) => c.references?.[0]?.ftable === _this.name) + .length === 1 + ) + ) { + console.log({ + tbl1, + tbl2, + tbl3, + name: _this.name, + tthisName: this.name, + }); + throw ( + "Second joining table (" + + tbl2 + + ") not of expected format. Must contain exactly one reference column for each table (file table and target table) " + ); + } + + insertedChildren = await childInsert(childDataItems, targetTable); + + /* Insert in key_lookup table */ + await Promise.all( + insertedChildren.map(async (t3Child) => { + const tbl2Row: AnyObject = {}; + + colsRefT3.map((col) => { + tbl2Row[col.name] = t3Child[col.references![0]!.fcols[0]!]; + }); + colsRefT1.map((col) => { + tbl2Row[col.name] = + fullRootResult[col.references![0]!.fcols[0]!]; + }); + + await childInsert(tbl2Row, tbl2!); //.then(() => {}); + }), + ); + } else { + console.error( + JSON.stringify( + { path, thisTable: this.name, targetTable }, + null, + 2, + ), + ); + throw "Unexpected path for Nested inserts"; + } + + /* Return also the nested inserted data */ + if (targetTableRules && insertedChildren?.length && returning) { + const targetTableHandler = dbTX![targetTable] as TableHandler; + const targetReturning = await targetTableHandler.prepareReturning( + "*", + targetTableHandler.parseFieldFilter( + targetTableRules?.insert?.returningFields, + ), + ); + const clientTargetInserts = insertedChildren.map((d) => { + const _d = { ...d }; + const res: AnyObject = {}; + targetReturning.map((r) => { + res[r.alias] = _d[r.alias]; + }); + return res; + }); + + returnData![targetTable] = + clientTargetInserts.length === 1 + ? clientTargetInserts[0] + : clientTargetInserts; + } + }), + ); + + return returnData; + } - return row; - })); + return row; + }), + ); const result = isMultiInsert ? _data : _data[0]; - const res = hasNestedInserts ? - { insertResult: result } : - { data: result }; + const res = hasNestedInserts ? { insertResult: result } : { data: result }; return res; } /* Must be allowed to insert into referenced table */ -export const getInsertTableRules = async (tableHandler: TableHandler, targetTable: string, localParams: LocalParams) => { - const childRules = await tableHandler.dboBuilder.publishParser?.getValidatedRequestRuleWusr({ tableName: targetTable, command: "insert", localParams }); - if (!childRules || !childRules.insert) throw "Dissallowed nested insert into table " + childRules; +export const getInsertTableRules = async ( + tableHandler: TableHandler, + targetTable: string, + localParams: LocalParams, +) => { + const childRules = + await tableHandler.dboBuilder.publishParser?.getValidatedRequestRuleWusr({ + tableName: targetTable, + command: "insert", + localParams, + }); + if (!childRules || !childRules.insert) + throw "Dissallowed nested insert into table " + childRules; return childRules; -} +}; -const getJoinPath = async (tableHandler: TableHandler, targetTable: string): Promise<{ +const getJoinPath = async ( + tableHandler: TableHandler, + targetTable: string, +): Promise<{ t1: string; t2: string; path: string[]; }> => { - - const jp = tableHandler.dboBuilder.getShortestJoinPath(tableHandler, targetTable); + const jp = tableHandler.dboBuilder.getShortestJoinPath( + tableHandler, + targetTable, + ); if (!jp) { - const pref = tableHandler.dboBuilder.prostgles.opts.joins !== "inferred" ? "Joins are not inferred! " : "" - throw new Error(`${pref}Could not find a single join path for the nested data ( sourceTable: ${tableHandler.name} targetTable: ${targetTable} ) `); + const pref = + tableHandler.dboBuilder.prostgles.opts.joins !== "inferred" + ? "Joins are not inferred! " + : ""; + throw new Error( + `${pref}Could not find a single join path for the nested data ( sourceTable: ${tableHandler.name} targetTable: ${targetTable} ) `, + ); } return jp; -} - -const referencedInsert = async (tableHandler: TableHandler, dbTX: TableHandlers | undefined, localParams: LocalParams, targetTable: string, targetData: AnyObject | AnyObject[]): Promise => { - +}; +const referencedInsert = async ( + tableHandler: TableHandler, + dbTX: TableHandlers | undefined, + localParams: LocalParams, + targetTable: string, + targetData: AnyObject | AnyObject[], +): Promise => { // const thisInfo = await tableHandler.getInfo(); await getJoinPath(tableHandler, targetTable); - if (!targetData || !dbTX?.[targetTable] || !("insert" in dbTX[targetTable]!)) { - throw new Error("childInsertErr: Table handler missing for referenced table: " + targetTable); + if ( + !targetData || + !dbTX?.[targetTable] || + !("insert" in dbTX[targetTable]!) + ) { + throw new Error( + "childInsertErr: Table handler missing for referenced table: " + + targetTable, + ); } - const childRules = await getInsertTableRules(tableHandler, targetTable, localParams); + const childRules = await getInsertTableRules( + tableHandler, + targetTable, + localParams, + ); // if (thisInfo.has_media === "one" && thisInfo.media_table_name === targetTable && Array.isArray(targetData) && targetData.length > 1) { // throw "Constraint check fail: Cannot insert more than one record into " + JSON.stringify(targetTable); // } return Promise.all( - (Array.isArray(targetData) ? targetData : [targetData]) - .map(m => (dbTX![targetTable] as TableHandler) + (Array.isArray(targetData) ? targetData : [targetData]).map((m) => + (dbTX![targetTable] as TableHandler) .insert(m, { returning: "*" }, undefined, childRules, localParams) - .catch(e => { - return Promise.reject(e); - }) - ) + .catch((e) => { + return Promise.reject(e); + }), + ), ); - -} +}; type ReferenceColumnInsert = { tableName: string; col: string; fcol: string; singleInsert: boolean; - data: ExpectSingleInsert extends true? AnyObject : (AnyObject | AnyObject[]); -} + data: ExpectSingleInsert extends true ? AnyObject : AnyObject | AnyObject[]; +}; /** * Insert through the reference column. e.g.: @@ -313,34 +440,41 @@ type ReferenceColumnInsert = { * fkey_column: { ...referenced_table_data } * } */ -export const getReferenceColumnInserts = (tableHandler: TableHandler, parentRow: AnyObject, expectSingleInsert?: ExpectSingleInsert): ReferenceColumnInsert[] => { +export const getReferenceColumnInserts = ( + tableHandler: TableHandler, + parentRow: AnyObject, + expectSingleInsert?: ExpectSingleInsert, +): ReferenceColumnInsert[] => { return Object.entries(parentRow) .map(([insertedFieldName, insertedFieldValue]) => { - if(insertedFieldValue && isObject(insertedFieldValue)){ - const insertedRefCol = tableHandler.columns.find(c => c.name === insertedFieldName && c.references?.length); - if(!insertedRefCol) return undefined; + if (insertedFieldValue && isObject(insertedFieldValue)) { + const insertedRefCol = tableHandler.columns.find( + (c) => c.name === insertedFieldName && c.references?.length, + ); + if (!insertedRefCol) return undefined; return { insertedRefCol, - insertedRefColRef: insertedRefCol.references! - } + insertedRefColRef: insertedRefCol.references!, + }; } - + return undefined; }) .filter(isDefined) .map(({ insertedRefCol, insertedRefColRef }) => { - - if(insertedRefColRef.length !== 1){ - throw "Cannot do a nested insert on column that references multiple tables" + if (insertedRefColRef.length !== 1) { + throw "Cannot do a nested insert on column that references multiple tables"; } - - const referencesMultipleColumns = insertedRefColRef?.some(refs => refs.fcols.length !== 1); - if(referencesMultipleColumns){ - throw "Cannot do a nested insert on multi-column foreign key reference" + + const referencesMultipleColumns = insertedRefColRef?.some( + (refs) => refs.fcols.length !== 1, + ); + if (referencesMultipleColumns) { + throw "Cannot do a nested insert on multi-column foreign key reference"; } const singleInsert = !Array.isArray(parentRow[insertedRefCol.name]); - if(expectSingleInsert && !singleInsert){ + if (expectSingleInsert && !singleInsert) { throw "Expected singleInsert"; } const res = { @@ -349,7 +483,8 @@ export const getReferenceColumnInserts = (ta fcol: insertedRefCol.references![0]!.fcols[0]!, singleInsert, data: parentRow[insertedRefCol.name], - } + }; return res; - }).filter(isDefined); -} \ No newline at end of file + }) + .filter(isDefined); +}; diff --git a/lib/DboBuilder/parseUpdateRules.ts b/lib/DboBuilder/parseUpdateRules.ts index d546fa4a..7460ac9f 100644 --- a/lib/DboBuilder/parseUpdateRules.ts +++ b/lib/DboBuilder/parseUpdateRules.ts @@ -1,6 +1,16 @@ -import { AnyObject, FieldFilter, isDefined, UpdateParams } from "prostgles-types"; +import { + AnyObject, + FieldFilter, + isDefined, + UpdateParams, +} from "prostgles-types"; import { Filter, LocalParams } from "./DboBuilder"; -import { TableRule, UpdateRule, ValidateRowBasic, ValidateUpdateRowBasic } from "../PublishParser/PublishParser"; +import { + TableRule, + UpdateRule, + ValidateRowBasic, + ValidateUpdateRowBasic, +} from "../PublishParser/PublishParser"; import { TableHandler } from "./TableHandler/TableHandler"; import { prepareNewData } from "./TableHandler/DataValidator"; @@ -13,7 +23,7 @@ export async function parseUpdateRules( filter: Filter, params?: UpdateParams, tableRules?: TableRule, - localParams?: LocalParams + localParams?: LocalParams, ): Promise<{ fields: string[]; validateRow?: ValidateRowBasic; @@ -39,25 +49,29 @@ export async function parseUpdateRules( if (tableRules) { if (!tableRules.update) throw "update rules missing for " + this.name; - ({ forcedFilter, forcedData, fields, filterFields, validate } = tableRules.update); + ({ forcedFilter, forcedData, fields, filterFields, validate } = + tableRules.update); - returningFields = tableRules.update.returningFields ?? tableRules?.select?.fields ?? ""; + returningFields = + tableRules.update.returningFields ?? tableRules?.select?.fields ?? ""; if (!returningFields && params?.returning) { - throw "You are not allowed to return any fields from the update" + throw "You are not allowed to return any fields from the update"; } if (!fields) { throw ` Invalid update rule fo r ${this.name}. fields missing `; } - finalUpdateFilter = (await this.prepareWhere({ - select: undefined, - filter, - forcedFilter, - filterFields, - localParams, - tableRule: tableRules - })).filter; + finalUpdateFilter = ( + await this.prepareWhere({ + select: undefined, + filter, + forcedFilter, + filterFields, + localParams, + tableRule: tableRules, + }) + ).filter; if (tableRules.update.dynamicFields?.length) { /** * dynamicFields.fields used to allow a custom list of fields for specific records @@ -65,37 +79,48 @@ export async function parseUpdateRules( * updates must target records from a specific dynamicFields.filter or not match any dynamicFields.filter */ if (testRule) { - for await (const [dfIndex, dfRule] of tableRules.update.dynamicFields.entries()) { - + for await (const [ + dfIndex, + dfRule, + ] of tableRules.update.dynamicFields.entries()) { /** * Validated filter and fields */ const condition = await this.prepareWhere({ - select: undefined, - filterFields: this.column_names, - filter: dfRule.filter, - localParams, - tableRule: tableRules + select: undefined, + filterFields: this.column_names, + filter: dfRule.filter, + localParams, + tableRule: tableRules, }); if (!condition.where) { - throw "dynamicFields.filter cannot be empty: " + JSON.stringify(dfRule); + throw ( + "dynamicFields.filter cannot be empty: " + JSON.stringify(dfRule) + ); } - await this.validateViewRules({ - fields: dfRule.fields, - filterFields, - returningFields, - forcedFilter, - dynamicFields: tableRules.update.dynamicFields, - rule: "update" + await this.validateViewRules({ + fields: dfRule.fields, + filterFields, + returningFields, + forcedFilter, + dynamicFields: tableRules.update.dynamicFields, + rule: "update", }); - await this.find(dfRule.filter, { limit: 0 }); /** Ensure dynamicFields filters do not overlap */ - for await (const [_dfIndex, _dfRule] of tableRules.update.dynamicFields.entries()) { + for await (const [ + _dfIndex, + _dfRule, + ] of tableRules.update.dynamicFields.entries()) { if (dfIndex !== _dfIndex) { - if (await this.findOne({ $and: [dfRule.filter, _dfRule.filter] }, { select: "" })) { + if ( + await this.findOne( + { $and: [dfRule.filter, _dfRule.filter] }, + { select: "" }, + ) + ) { throw `dynamicFields.filter cannot overlap each other. \n Overlapping dynamicFields rules: ${JSON.stringify(dfRule)} @@ -109,12 +134,17 @@ export async function parseUpdateRules( } /** Pick dynamicFields.fields if matching filter */ - let matchedRule: Required["dynamicFields"][number] | undefined; + let matchedRule: + | Required["dynamicFields"][number] + | undefined; for await (const dfRule of tableRules.update.dynamicFields) { - const match = await this.findOne({ $and: ([finalUpdateFilter, dfRule.filter] as AnyObject[]).filter(isDefined) }); + const match = await this.findOne({ + $and: ([finalUpdateFilter, dfRule.filter] as AnyObject[]).filter( + isDefined, + ), + }); if (match) { - /** Ensure it doesn't overlap with other dynamicFields.filter */ if (matchedRule && !testRule) { throw "Your update is targeting multiple tableRules.update.dynamicFields. Restrict update filter to only target one rule"; @@ -128,37 +158,57 @@ export async function parseUpdateRules( /* Safely test publish rules */ if (testRule) { - await this.validateViewRules({ fields, filterFields, returningFields, forcedFilter, dynamicFields: tableRules.update.dynamicFields, rule: "update" }); + await this.validateViewRules({ + fields, + filterFields, + returningFields, + forcedFilter, + dynamicFields: tableRules.update.dynamicFields, + rule: "update", + }); if (forcedData) { try { - const { data, allowedCols } = await prepareNewData({ - row: forcedData, - forcedData: undefined, - allowedFields: "*", - tableRules, - removeDisallowedFields: false, + const { data, allowedCols } = await prepareNewData({ + row: forcedData, + forcedData: undefined, + allowedFields: "*", + tableRules, + removeDisallowedFields: false, tableConfigurator: this.dboBuilder.prostgles.tableConfigurator, tableHandler: this, }); let updateValidate: ValidateRowBasic | undefined; - if(validate){ - if(!localParams) throw "localParams missing"; - updateValidate = (args) => validate!({ update: args.row, filter: {}, dbx: this.getFinalDbo(localParams), localParams }) + if (validate) { + if (!localParams) throw "localParams missing"; + updateValidate = (args) => + validate!({ + update: args.row, + filter: {}, + dbx: this.getFinalDbo(localParams), + localParams, + }); } - const updateQ = (await this.dataValidator.parse({ - command: "update", - rows: [data], - allowedCols, - dbTx: this.tx?.dbTX || this.dboBuilder.dbo, - validationOptions: { - validate: updateValidate, - localParams, - } - })).getQuery(); + const updateQ = ( + await this.dataValidator.parse({ + command: "update", + rows: [data], + allowedCols, + dbTx: this.tx?.dbTX || this.dboBuilder.dbo, + validationOptions: { + validate: updateValidate, + localParams, + }, + }) + ).getQuery(); const query = updateQ + " WHERE FALSE "; await this.db.any("EXPLAIN " + query); } catch (e) { - throw " issue with forcedData: \nVALUE: " + JSON.stringify(forcedData, null, 2) + "\nERROR: " + e; + throw ( + " issue with forcedData: \nVALUE: " + + JSON.stringify(forcedData, null, 2) + + "\nERROR: " + + e + ); } } @@ -170,9 +220,15 @@ export async function parseUpdateRules( const _fields = this.parseFieldFilter(fields); let validateRow: ValidateRowBasic | undefined; - if(validate){ - if(!localParams) throw "localParams missing"; - validateRow = ({ row }) => validate!({ update: row, filter: finalUpdateFilter, localParams, dbx: this.getFinalDbo(localParams) }); + if (validate) { + if (!localParams) throw "localParams missing"; + validateRow = ({ row }) => + validate!({ + update: row, + filter: finalUpdateFilter, + localParams, + dbx: this.getFinalDbo(localParams), + }); } return { @@ -183,5 +239,5 @@ export async function parseUpdateRules( forcedFilter, returningFields, filterFields, - } -} \ No newline at end of file + }; +} diff --git a/lib/DboBuilder/prepareShortestJoinPaths.ts b/lib/DboBuilder/prepareShortestJoinPaths.ts index 42c84d76..93ae2c0c 100644 --- a/lib/DboBuilder/prepareShortestJoinPaths.ts +++ b/lib/DboBuilder/prepareShortestJoinPaths.ts @@ -8,34 +8,45 @@ type Result = { joinGraph?: Graph | undefined; joins: Join[]; shortestJoinPaths: JoinPaths; -} -export async function prepareShortestJoinPaths(dboBuilder: DboBuilder): Promise { - +}; +export async function prepareShortestJoinPaths( + dboBuilder: DboBuilder, +): Promise { if (dboBuilder.prostgles.opts.joins) { - let joinConfig = await dboBuilder.prostgles.opts.joins; if (!dboBuilder.tablesOrViews) { - throw new Error("Could not create join config. this.tablesOrViews missing"); + throw new Error( + "Could not create join config. this.tablesOrViews missing", + ); } const inferredJoins = await getInferredJoins2(dboBuilder.tablesOrViews); if (joinConfig === "inferred") { - joinConfig = inferredJoins + joinConfig = inferredJoins; /* If joins are specified then include inferred joins except the explicit tables */ } else if (Array.isArray(joinConfig)) { - const joinTables = joinConfig.map(j => j.tables).flat(); - joinConfig = joinConfig.concat(inferredJoins.filter(j => !j.tables.find(t => joinTables.includes(t)))) + const joinTables = joinConfig.map((j) => j.tables).flat(); + joinConfig = joinConfig.concat( + inferredJoins.filter( + (j) => !j.tables.find((t) => joinTables.includes(t)), + ), + ); } else if (joinConfig) { - throw new Error("Unexpected joins init param. Expecting 'inferred' OR joinConfig but got: " + JSON.stringify(joinConfig)) + throw new Error( + "Unexpected joins init param. Expecting 'inferred' OR joinConfig but got: " + + JSON.stringify(joinConfig), + ); } const joins = JSON.parse(JSON.stringify(joinConfig)) as Join[]; // Validate joins try { - const tovNames = dboBuilder.tablesOrViews!.map(t => t.name); + const tovNames = dboBuilder.tablesOrViews!.map((t) => t.name); // 2 find incorrect tables - const missing = joins.flatMap(j => j.tables).find(t => !tovNames.includes(t)); + const missing = joins + .flatMap((j) => j.tables) + .find((t) => !tovNames.includes(t)); if (missing) { throw "Table not found: " + missing; } @@ -44,34 +55,52 @@ export async function prepareShortestJoinPaths(dboBuilder: DboBuilder): Promise< joins.map(({ tables, on }) => { const t1 = tables[0], t2 = tables[1]; - on.map(cond => { - + on.map((cond) => { const f1s = Object.keys(cond), f2s = Object.values(cond); - [[t1, f1s], [t2, f2s]].map(v => { + [ + [t1, f1s], + [t2, f2s], + ].map((v) => { const t = v[0], f = v[1]; - const tov = dboBuilder.tablesOrViews!.find(_t => _t.name === t); + const tov = dboBuilder.tablesOrViews!.find((_t) => _t.name === t); if (!tov) throw "Table not found: " + t; - const m1 = f.filter(k => !tov!.columns.map(c => c.name).includes(k)) + const m1 = f.filter( + (k) => !tov!.columns.map((c) => c.name).includes(k), + ); if (m1 && m1.length) { - throw `Table ${t}(${tov.columns.map(c => c.name).join()}) has no fields named: ${m1.join()}`; + throw `Table ${t}(${tov.columns.map((c) => c.name).join()}) has no fields named: ${m1.join()}`; } }); - }) + }); }); // 4 find incorrect/missing join types - const expected_types = " \n\n-> Expecting: " + JOIN_TYPES.map(t => JSON.stringify(t)).join(` | `) - const mt = joins.find(j => !j.type); - if (mt) throw "Join type missing for: " + JSON.stringify(mt, null, 2) + expected_types; - - const it = joins.find(j => !JOIN_TYPES.includes(j.type)); - if (it) throw "Incorrect join type for: " + JSON.stringify(it, null, 2) + expected_types; - + const expected_types = + " \n\n-> Expecting: " + + JOIN_TYPES.map((t) => JSON.stringify(t)).join(` | `); + const mt = joins.find((j) => !j.type); + if (mt) + throw ( + "Join type missing for: " + + JSON.stringify(mt, null, 2) + + expected_types + ); + + const it = joins.find((j) => !JOIN_TYPES.includes(j.type)); + if (it) + throw ( + "Incorrect join type for: " + + JSON.stringify(it, null, 2) + + expected_types + ); } catch (e) { - const errMsg = ((joinConfig as any) === "inferred"? "INFERRED " : "") + "JOINS VALIDATION ERROR \n-> " + e; + const errMsg = + ((joinConfig as any) === "inferred" ? "INFERRED " : "") + + "JOINS VALIDATION ERROR \n-> " + + e; throw errMsg; } @@ -90,15 +119,14 @@ export async function prepareShortestJoinPaths(dboBuilder: DboBuilder): Promise< joinGraph![t2] ??= {}; joinGraph![t2]![t1] = 1; }); - const tables = Array.from(new Set(joins.flatMap(t => t.tables))); + const tables = Array.from(new Set(joins.flatMap((t) => t.tables))); const shortestJoinPaths: JoinPaths = []; tables.forEach((t1, i1) => { tables.forEach((t2, i2) => { - /** Prevent recursion */ if ( t1 === t2 || - shortestJoinPaths.some(jp => { + shortestJoinPaths.some((jp) => { if (arrayValuesMatch([jp.t1, jp.t2], [t1, t2])) { const spath = findShortestPath(joinGraph, t1, t2); if (spath && arrayValuesMatch(spath.path, jp.path)) { @@ -113,20 +141,30 @@ export async function prepareShortestJoinPaths(dboBuilder: DboBuilder): Promise< const spath = findShortestPath(joinGraph, t1, t2); if (!(spath && spath.distance < Infinity)) return; - const existing1 = shortestJoinPaths.find(j => j.t1 === t1 && j.t2 === t2) + const existing1 = shortestJoinPaths.find( + (j) => j.t1 === t1 && j.t2 === t2, + ); if (!existing1) { shortestJoinPaths.push({ t1, t2, path: spath.path.slice() }); } - const existing2 = shortestJoinPaths.find(j => j.t2 === t1 && j.t1 === t2); + const existing2 = shortestJoinPaths.find( + (j) => j.t2 === t1 && j.t1 === t2, + ); if (!existing2) { - shortestJoinPaths.push({ t1: t2, t2: t1, path: spath.path.slice().reverse() }); + shortestJoinPaths.push({ + t1: t2, + t2: t1, + path: spath.path.slice().reverse(), + }); } }); }); return { - joins, shortestJoinPaths, joinGraph - } + joins, + shortestJoinPaths, + joinGraph, + }; } return { @@ -135,27 +173,40 @@ export async function prepareShortestJoinPaths(dboBuilder: DboBuilder): Promise< }; } - const arrayValuesMatch = (arr1: T[], arr2: T[]): boolean => { - return arr1.slice().sort().join() === arr2.slice().sort().join() -} + return arr1.slice().sort().join() === arr2.slice().sort().join(); +}; async function getInferredJoins2(schema: TableSchema[]): Promise { const joins: Join[] = []; - const upsertJoin = (t1: string, t2: string, cols: { col1: string; col2: string }[], type: Join["type"]) => { - const existingIdx = joins.findIndex(j => arrayValuesMatch(j.tables.slice(0), [t1, t2])); + const upsertJoin = ( + t1: string, + t2: string, + cols: { col1: string; col2: string }[], + type: Join["type"], + ) => { + const existingIdx = joins.findIndex((j) => + arrayValuesMatch(j.tables.slice(0), [t1, t2]), + ); const existing = joins[existingIdx]; const normalCond = cols.reduce((a, v) => ({ ...a, [v.col1]: v.col2 }), {}); - const revertedCond = cols.reduce((a, v) => ({ ...a, [v.col2]: v.col1 }), {}); + const revertedCond = cols.reduce( + (a, v) => ({ ...a, [v.col2]: v.col1 }), + {}, + ); if (existing) { - const isLTR = existing.tables[0] === t1 + const isLTR = existing.tables[0] === t1; const cond = isLTR ? normalCond : revertedCond; /** At some point we should add relationship type to EACH JOIN CONDITION GROUP */ // const fixedType = isLTR? type : type.split("").reverse().join("") as Join["type"]; /** Avoid duplicates */ - if (!existing.on.some(_cond => JSON.stringify(_cond) === JSON.stringify(cond))) { + if ( + !existing.on.some( + (_cond) => JSON.stringify(_cond) === JSON.stringify(cond), + ) + ) { existing.on.push(cond); joins[existingIdx] = existing; } @@ -163,24 +214,32 @@ async function getInferredJoins2(schema: TableSchema[]): Promise { joins.push({ tables: [t1, t2], on: [normalCond], - type - }) + type, + }); } - } - schema.map(tov => { - tov.columns.map(col => { + }; + schema.map((tov) => { + tov.columns.map((col) => { if (col.references) { - col.references.forEach(r => { - const joinCols = r.cols.map((c, i) => ({ col1: c, col2: r.fcols[i]! })); + col.references.forEach((r) => { + const joinCols = r.cols.map((c, i) => ({ + col1: c, + col2: r.fcols[i]!, + })); let type: Join["type"] = "one-many"; - const ftablePkeys = schema.find(_tov => _tov.name === r.ftable)?.columns.filter(fcol => fcol.is_pkey); - if (ftablePkeys?.length && ftablePkeys.every(fkey => r.fcols.includes(fkey.name))) { + const ftablePkeys = schema + .find((_tov) => _tov.name === r.ftable) + ?.columns.filter((fcol) => fcol.is_pkey); + if ( + ftablePkeys?.length && + ftablePkeys.every((fkey) => r.fcols.includes(fkey.name)) + ) { type = "one-one"; } - upsertJoin(tov.name, r.ftable, joinCols, type) - }) + upsertJoin(tov.name, r.ftable, joinCols, type); + }); } - }) - }) + }); + }); return joins; -} \ No newline at end of file +} diff --git a/lib/DboBuilder/runSQL.ts b/lib/DboBuilder/runSQL.ts index af86a184..ecac501e 100644 --- a/lib/DboBuilder/runSQL.ts +++ b/lib/DboBuilder/runSQL.ts @@ -1,17 +1,39 @@ -import pgPromise, { ParameterizedQuery as PQ, ParameterizedQuery } from 'pg-promise'; +import pgPromise, { + ParameterizedQuery as PQ, + ParameterizedQuery, +} from "pg-promise"; import pg from "pg-promise/typescript/pg-subset"; -import { AnyObject, SQLOptions, SQLResult, SQLResultInfo } from "prostgles-types"; +import { + AnyObject, + SQLOptions, + SQLResult, + SQLResultInfo, +} from "prostgles-types"; import { DB, Prostgles } from "../Prostgles"; import { DboBuilder, LocalParams, pgp, postgresToTsType } from "./DboBuilder"; - -export async function runSQL(this: DboBuilder, queryWithoutRLS: string, args: undefined | AnyObject | any[], options: SQLOptions | undefined, localParams?: LocalParams) { +export async function runSQL( + this: DboBuilder, + queryWithoutRLS: string, + args: undefined | AnyObject | any[], + options: SQLOptions | undefined, + localParams?: LocalParams, +) { const queryWithRLS = queryWithoutRLS; - if(queryWithRLS?.replace(/\s\s+/g, ' ').toLowerCase().includes("create extension pg_stat_statements")){ - const { shared_preload_libraries } = await this.db.oneOrNone('SHOW shared_preload_libraries'); - if(!(shared_preload_libraries || "").includes("pg_stat_statements")){ - throw "This query will crash the server (pg_stat_statements must be loaded via shared_preload_libraries). Need to: \n ALTER SYSTEM SET shared_preload_libraries = 'pg_stat_statements' \n" + - " AND restart server: \n (linux) sudo service postgresql restart\n (mac) brew services restart postgres\n " + if ( + queryWithRLS + ?.replace(/\s\s+/g, " ") + .toLowerCase() + .includes("create extension pg_stat_statements") + ) { + const { shared_preload_libraries } = await this.db.oneOrNone( + "SHOW shared_preload_libraries", + ); + if (!(shared_preload_libraries || "").includes("pg_stat_statements")) { + throw ( + "This query will crash the server (pg_stat_statements must be loaded via shared_preload_libraries). Need to: \n ALTER SYSTEM SET shared_preload_libraries = 'pg_stat_statements' \n" + + " AND restart server: \n (linux) sudo service postgresql restart\n (mac) brew services restart postgres\n " + ); } } @@ -21,80 +43,90 @@ export async function runSQL(this: DboBuilder, queryWithoutRLS: string, args: un throw "Not allowed to run SQL"; } - const { returnType, allowListen, hasParams = true }: SQLOptions = options || ({} as SQLOptions); + const { + returnType, + allowListen, + hasParams = true, + }: SQLOptions = options || ({} as SQLOptions); const { socket } = localParams || {}; - const db = localParams?.tx?.t || this.db; if (returnType === "stream") { - if(localParams?.tx) throw "Cannot use stream with localParams transaction"; + if (localParams?.tx) throw "Cannot use stream with localParams transaction"; if (!socket) throw "Only allowed with client socket"; - const streamInfo = await this.queryStreamer.create({ socket, query: pgp.as.format(queryWithRLS, args), options }); + const streamInfo = await this.queryStreamer.create({ + socket, + query: pgp.as.format(queryWithRLS, args), + options, + }); return streamInfo; - } else if (returnType === "noticeSubscription") { - if (!socket) throw "Only allowed with client socket" + if (!socket) throw "Only allowed with client socket"; return await this.prostgles.dbEventsManager?.addNotice(socket); - } else if (returnType === "statement") { try { return pgp.as.format(queryWithoutRLS, args); } catch (err) { throw (err as any).toString(); } - } - + if (!db) { - throw "db is missing" + throw "db is missing"; } let finalQuery: string | ParameterizedQuery = queryWithRLS + ""; - const isNotListenOrNotify = (returnType === "arrayMode") && !["listen ", "notify "].find(c => queryWithoutRLS.toLowerCase().trim().startsWith(c)) + const isNotListenOrNotify = + returnType === "arrayMode" && + !["listen ", "notify "].find((c) => + queryWithoutRLS.toLowerCase().trim().startsWith(c), + ); if (isNotListenOrNotify) { - finalQuery = new PQ({ + finalQuery = new PQ({ rowMode: "array", - text: hasParams ? pgp.as.format(queryWithRLS, args) : queryWithRLS, + text: hasParams ? pgp.as.format(queryWithRLS, args) : queryWithRLS, }); } const params = hasParams ? args : undefined; let queryResult: pgPromise.IResultExt | undefined; - - if(returnType === "default-with-rollback"){ + + if (returnType === "default-with-rollback") { const ROLLBACK = "Rollback"; - await db.tx(async t => { - queryResult = await t.result(finalQuery, params); - /** Rollback */ - return Promise.reject(new Error(ROLLBACK)); - }).catch(e => { - if(!(e instanceof Error && e.message === ROLLBACK)) throw e; - }); - } else { + await db + .tx(async (t) => { + queryResult = await t.result(finalQuery, params); + /** Rollback */ + return Promise.reject(new Error(ROLLBACK)); + }) + .catch((e) => { + if (!(e instanceof Error && e.message === ROLLBACK)) throw e; + }); + } else { queryResult = await db.result(finalQuery, params); } - if(!queryResult) throw "No query result"; + if (!queryResult) throw "No query result"; const { fields, rows } = queryResult; - const listenHandlers = await onSQLResult.bind(this)(queryWithoutRLS, queryResult, allowListen, localParams); - if(listenHandlers) { + const listenHandlers = await onSQLResult.bind(this)( + queryWithoutRLS, + queryResult, + allowListen, + localParams, + ); + if (listenHandlers) { return listenHandlers; } if (returnType === "rows") { return rows; - } else if (returnType === "row") { return rows[0]; - } else if (returnType === "value") { return Object.values(rows?.[0] ?? {})?.[0]; - } else if (returnType === "values") { - return rows.map(r => Object.values(r ?? {})[0]); - + return rows.map((r) => Object.values(r ?? {})[0]); } else { - const qres: SQLResult = { duration: 0, ...queryResult, @@ -104,26 +136,42 @@ export async function runSQL(this: DboBuilder, queryWithoutRLS: string, args: un } } -const onSQLResult = async function(this: DboBuilder, queryWithoutRLS: string, { command }: Omit, allowListen: boolean | undefined, localParams?: LocalParams) { - - this.prostgles.schemaWatch?.onSchemaChangeFallback?.({ command, query: queryWithoutRLS }); +const onSQLResult = async function ( + this: DboBuilder, + queryWithoutRLS: string, + { command }: Omit, + allowListen: boolean | undefined, + localParams?: LocalParams, +) { + this.prostgles.schemaWatch?.onSchemaChangeFallback?.({ + command, + query: queryWithoutRLS, + }); if (command === "LISTEN") { const { socket } = localParams || {}; - if (!allowListen) throw new Error(`Your query contains a LISTEN command. Set { allowListen: true } to get subscription hooks. Or ignore this message`) - if (!socket) throw "Only allowed with client socket" - return await this.prostgles.dbEventsManager?.addNotify(queryWithoutRLS, socket); + if (!allowListen) + throw new Error( + `Your query contains a LISTEN command. Set { allowListen: true } to get subscription hooks. Or ignore this message`, + ); + if (!socket) throw "Only allowed with client socket"; + return await this.prostgles.dbEventsManager?.addNotify( + queryWithoutRLS, + socket, + ); } -} +}; export async function cacheDBTypes(this: DboBuilder, force = false) { - if(force){ + if (force) { this.DATA_TYPES = undefined; this.USER_TABLES = undefined; this.USER_TABLE_COLUMNS = undefined; } - this.DATA_TYPES ??= await this.db.any("SELECT oid, typname FROM pg_type") ?? []; - this.USER_TABLES ??= await this.db.any(` + this.DATA_TYPES ??= + (await this.db.any("SELECT oid, typname FROM pg_type")) ?? []; + this.USER_TABLES ??= + (await this.db.any(` SELECT relid, relname, @@ -139,7 +187,7 @@ export async function cacheDBTypes(this: DboBuilder, force = false) { ) c ON t.relid = c.table_oid GROUP BY relid, relname, schemaname - `) ?? []; + `)) ?? []; this.USER_TABLE_COLUMNS ??= await this.db.any(` SELECT t.relid, t.schemaname,t.relname, c.column_name, c.udt_name, c.ordinal_position FROM information_schema.columns c @@ -149,34 +197,47 @@ export async function cacheDBTypes(this: DboBuilder, force = false) { } export function getDetailedFieldInfo(this: DboBuilder, fields: pg.IColumn[]) { - return fields?.map(f => { - const dataType = this.DATA_TYPES!.find(dt => +dt.oid === +f.dataTypeID)?.typname ?? "text", - table = this.USER_TABLES!.find(t => +t.relid === +f.tableID), - column = this.USER_TABLE_COLUMNS!.find(c => +c.relid === +f.tableID && c.ordinal_position === f.columnID), - tsDataType = postgresToTsType(dataType); - - return { - ...f, - tsDataType, - dataType, - udt_name: dataType, - tableName: table?.relname, - tableSchema: table?.schemaname, - columnName: column?.column_name - } - }) ?? []; + return ( + fields?.map((f) => { + const dataType = + this.DATA_TYPES!.find((dt) => +dt.oid === +f.dataTypeID)?.typname ?? + "text", + table = this.USER_TABLES!.find((t) => +t.relid === +f.tableID), + column = this.USER_TABLE_COLUMNS!.find( + (c) => +c.relid === +f.tableID && c.ordinal_position === f.columnID, + ), + tsDataType = postgresToTsType(dataType); + + return { + ...f, + tsDataType, + dataType, + udt_name: dataType, + tableName: table?.relname, + tableSchema: table?.schemaname, + columnName: column?.column_name, + }; + }) ?? [] + ); } -export const canRunSQL = async (prostgles: Prostgles, localParams?: LocalParams): Promise => { +export const canRunSQL = async ( + prostgles: Prostgles, + localParams?: LocalParams, +): Promise => { if (!localParams?.socket || !localParams?.httpReq) return true; const { socket } = localParams; - const publishParams = await prostgles.publishParser!.getPublishParams({ socket }); + const publishParams = await prostgles.publishParser!.getPublishParams({ + socket, + }); //@ts-ignore const res = await prostgles.opts.publishRawSQL?.(publishParams); - return Boolean(res && typeof res === "boolean" || res === "*"); -} + return Boolean((res && typeof res === "boolean") || res === "*"); +}; export const canCreateTables = async (db: DB): Promise => { - return db.any(`SELECT has_database_privilege(current_database(), 'create') as yes`).then(rows => rows?.[0].yes === true) -} \ No newline at end of file + return db + .any(`SELECT has_database_privilege(current_database(), 'create') as yes`) + .then((rows) => rows?.[0].yes === true); +}; diff --git a/lib/DboBuilder/runTransaction.ts b/lib/DboBuilder/runTransaction.ts index 78ad9285..b47ef02a 100644 --- a/lib/DboBuilder/runTransaction.ts +++ b/lib/DboBuilder/runTransaction.ts @@ -1,34 +1,45 @@ import * as pg from "pg"; const { Client } = pg; -export type PGTX = ((query: pg.Client["query"]) => Promise); +export type PGTX = (query: pg.Client["query"]) => Promise; export type RunTransactionOpts = { begin?: boolean; onSuccess: "COMMIT" | "ROLLBACK" | undefined; -} -export const runClientTransaction = async (handler: PGTX, { onSuccess, begin = true }: RunTransactionOpts, dbConn: pg.ClientConfig) => { - const client = new Client(dbConn) +}; +export const runClientTransaction = async ( + handler: PGTX, + { onSuccess, begin = true }: RunTransactionOpts, + dbConn: pg.ClientConfig, +) => { + const client = new Client(dbConn); try { - if(begin){ - await client.query('BEGIN') + if (begin) { + await client.query("BEGIN"); } await handler(client.query); - if(onSuccess){ + if (onSuccess) { await client.query(onSuccess); } } catch (e) { - await client.query('ROLLBACK'); - throw e + await client.query("ROLLBACK"); + throw e; } finally { client.end(); } -} +}; -export const runClientTransactionStatement = async (statement: string, dbConn: pg.ClientConfig) => { - return runClientTransaction(async (query) => { - await query(statement); - }, { onSuccess: undefined, begin: false }, dbConn); -} +export const runClientTransactionStatement = async ( + statement: string, + dbConn: pg.ClientConfig, +) => { + return runClientTransaction( + async (query) => { + await query(statement); + }, + { onSuccess: undefined, begin: false }, + dbConn, + ); +}; // let pool: pg.Pool | undefined; // export const runPoolTransaction = async (handler: PGTX, { onSuccess, begin = true }: RunTransactionOpts, dbConn: pg.ClientConfig) => { @@ -47,4 +58,4 @@ export const runClientTransactionStatement = async (statement: string, dbConn: p // } finally { // client.release(); // } -// } \ No newline at end of file +// } diff --git a/lib/DboBuilder/sqlErrCodeToMsg.ts b/lib/DboBuilder/sqlErrCodeToMsg.ts index dac19e72..6d419d01 100644 --- a/lib/DboBuilder/sqlErrCodeToMsg.ts +++ b/lib/DboBuilder/sqlErrCodeToMsg.ts @@ -1,247 +1,506 @@ export const sqlErrCodeToMsg = (code: string) => { const errs = { - "00000": "successful_completion", - "01000": "warning", - "0100C": "dynamic_result_sets_returned", - "01008": "implicit_zero_bit_padding", - "01003": "null_value_eliminated_in_set_function", - "01007": "privilege_not_granted", - "01006": "privilege_not_revoked", - "01004": "string_data_right_truncation", - "01P01": "deprecated_feature", - "02000": "no_data", - "02001": "no_additional_dynamic_result_sets_returned", - "03000": "sql_statement_not_yet_complete", - "08000": "connection_exception", - "08003": "connection_does_not_exist", - "08006": "connection_failure", - "08001": "sqlclient_unable_to_establish_sqlconnection", - "08004": "sqlserver_rejected_establishment_of_sqlconnection", - "08007": "transaction_resolution_unknown", - "08P01": "protocol_violation", - "09000": "triggered_action_exception", - "0A000": "feature_not_supported", - "0B000": "invalid_transaction_initiation", - "0F000": "locator_exception", - "0F001": "invalid_locator_specification", - "0L000": "invalid_grantor", - "0LP01": "invalid_grant_operation", - "0P000": "invalid_role_specification", - "0Z000": "diagnostics_exception", - "0Z002": "stacked_diagnostics_accessed_without_active_handler", - "20000": "case_not_found", - "21000": "cardinality_violation", - "22000": "data_exception", - "2202E": "array_subscript_error", - "22021": "character_not_in_repertoire", - "22008": "datetime_field_overflow", - "22012": "division_by_zero", - "22005": "error_in_assignment", - "2200B": "escape_character_conflict", - "22022": "indicator_overflow", - "22015": "interval_field_overflow", - "2201E": "invalid_argument_for_logarithm", - "22014": "invalid_argument_for_ntile_function", - "22016": "invalid_argument_for_nth_value_function", - "2201F": "invalid_argument_for_power_function", - "2201G": "invalid_argument_for_width_bucket_function", - "22018": "invalid_character_value_for_cast", - "22007": "invalid_datetime_format", - "22019": "invalid_escape_character", - "2200D": "invalid_escape_octet", - "22025": "invalid_escape_sequence", - "22P06": "nonstandard_use_of_escape_character", - "22010": "invalid_indicator_parameter_value", - "22023": "invalid_parameter_value", - "2201B": "invalid_regular_expression", - "2201W": "invalid_row_count_in_limit_clause", - "2201X": "invalid_row_count_in_result_offset_clause", - "2202H": "invalid_tablesample_argument", - "2202G": "invalid_tablesample_repeat", - "22009": "invalid_time_zone_displacement_value", - "2200C": "invalid_use_of_escape_character", - "2200G": "most_specific_type_mismatch", - "22004": "null_value_not_allowed", - "22002": "null_value_no_indicator_parameter", - "22003": "numeric_value_out_of_range", - "2200H": "sequence_generator_limit_exceeded", - "22026": "string_data_length_mismatch", - "22001": "string_data_right_truncation", - "22011": "substring_error", - "22027": "trim_error", - "22024": "unterminated_c_string", - "2200F": "zero_length_character_string", - "22P01": "floating_point_exception", - "22P02": "invalid_text_representation", - "22P03": "invalid_binary_representation", - "22P04": "bad_copy_file_format", - "22P05": "untranslatable_character", - "2200L": "not_an_xml_document", - "2200M": "invalid_xml_document", - "2200N": "invalid_xml_content", - "2200S": "invalid_xml_comment", - "2200T": "invalid_xml_processing_instruction", - "23000": "integrity_constraint_violation", - "23001": "restrict_violation", - "23502": "not_null_violation", - "23503": "foreign_key_violation", - "23505": "unique_violation", - "23514": "check_violation", - "23P01": "exclusion_violation", - "24000": "invalid_cursor_state", - "25000": "invalid_transaction_state", - "25001": "active_sql_transaction", - "25002": "branch_transaction_already_active", - "25008": "held_cursor_requires_same_isolation_level", - "25003": "inappropriate_access_mode_for_branch_transaction", - "25004": "inappropriate_isolation_level_for_branch_transaction", - "25005": "no_active_sql_transaction_for_branch_transaction", - "25006": "read_only_sql_transaction", - "25007": "schema_and_data_statement_mixing_not_supported", - "25P01": "no_active_sql_transaction", - "25P02": "in_failed_sql_transaction", - "25P03": "idle_in_transaction_session_timeout", - "26000": "invalid_sql_statement_name", - "27000": "triggered_data_change_violation", - "28000": "invalid_authorization_specification", - "28P01": "invalid_password", - "2B000": "dependent_privilege_descriptors_still_exist", - "2BP01": "dependent_objects_still_exist", - "2D000": "invalid_transaction_termination", - "2F000": "sql_routine_exception", - "2F005": "function_executed_no_return_statement", - "2F002": "modifying_sql_data_not_permitted", - "2F003": "prohibited_sql_statement_attempted", - "2F004": "reading_sql_data_not_permitted", - "34000": "invalid_cursor_name", - "38000": "external_routine_exception", - "38001": "containing_sql_not_permitted", - "38002": "modifying_sql_data_not_permitted", - "38003": "prohibited_sql_statement_attempted", - "38004": "reading_sql_data_not_permitted", - "39000": "external_routine_invocation_exception", - "39001": "invalid_sqlstate_returned", - "39004": "null_value_not_allowed", - "39P01": "trigger_protocol_violated", - "39P02": "srf_protocol_violated", - "39P03": "event_trigger_protocol_violated", - "3B000": "savepoint_exception", - "3B001": "invalid_savepoint_specification", - "3D000": "invalid_catalog_name", - "3F000": "invalid_schema_name", - "40000": "transaction_rollback", - "40002": "transaction_integrity_constraint_violation", - "40001": "serialization_failure", - "40003": "statement_completion_unknown", - "40P01": "deadlock_detected", - "42000": "syntax_error_or_access_rule_violation", - "42601": "syntax_error", - "42501": "insufficient_privilege", - "42846": "cannot_coerce", - "42803": "grouping_error", - "42P20": "windowing_error", - "42P19": "invalid_recursion", - "42830": "invalid_foreign_key", - "42602": "invalid_name", - "42622": "name_too_long", - "42939": "reserved_name", - "42804": "datatype_mismatch", - "42P18": "indeterminate_datatype", - "42P21": "collation_mismatch", - "42P22": "indeterminate_collation", - "42809": "wrong_object_type", - "428C9": "generated_always", - "42703": "undefined_column", - "42883": "undefined_function", - "42P01": "undefined_table", - "42P02": "undefined_parameter", - "42704": "undefined_object", - "42701": "duplicate_column", - "42P03": "duplicate_cursor", - "42P04": "duplicate_database", - "42723": "duplicate_function", - "42P05": "duplicate_prepared_statement", - "42P06": "duplicate_schema", - "42P07": "duplicate_table", - "42712": "duplicate_alias", - "42710": "duplicate_object", - "42702": "ambiguous_column", - "42725": "ambiguous_function", - "42P08": "ambiguous_parameter", - "42P09": "ambiguous_alias", - "42P10": "invalid_column_reference", - "42611": "invalid_column_definition", - "42P11": "invalid_cursor_definition", - "42P12": "invalid_database_definition", - "42P13": "invalid_function_definition", - "42P14": "invalid_prepared_statement_definition", - "42P15": "invalid_schema_definition", - "42P16": "invalid_table_definition", - "42P17": "invalid_object_definition", - "44000": "with_check_option_violation", - "53000": "insufficient_resources", - "53100": "disk_full", - "53200": "out_of_memory", - "53300": "too_many_connections", - "53400": "configuration_limit_exceeded", - "54000": "program_limit_exceeded", - "54001": "statement_too_complex", - "54011": "too_many_columns", - "54023": "too_many_arguments", - "55000": "object_not_in_prerequisite_state", - "55006": "object_in_use", - "55P02": "cant_change_runtime_param", - "55P03": "lock_not_available", - "57000": "operator_intervention", - "57014": "query_canceled", - "57P01": "admin_shutdown", - "57P02": "crash_shutdown", - "57P03": "cannot_connect_now", - "57P04": "database_dropped", - "58000": "system_error", - "58030": "io_error", - "58P01": "undefined_file", - "58P02": "duplicate_file", - "72000": "snapshot_too_old", - "F0000": "config_file_error", - "F0001": "lock_file_exists", - "HV000": "fdw_error", - "HV005": "fdw_column_name_not_found", - "HV002": "fdw_dynamic_parameter_value_needed", - "HV010": "fdw_function_sequence_error", - "HV021": "fdw_inconsistent_descriptor_information", - "HV024": "fdw_invalid_attribute_value", - "HV007": "fdw_invalid_column_name", - "HV008": "fdw_invalid_column_number", - "HV004": "fdw_invalid_data_type", - "HV006": "fdw_invalid_data_type_descriptors", - "HV091": "fdw_invalid_descriptor_field_identifier", - "HV00B": "fdw_invalid_handle", - "HV00C": "fdw_invalid_option_index", - "HV00D": "fdw_invalid_option_name", - "HV090": "fdw_invalid_string_length_or_buffer_length", - "HV00A": "fdw_invalid_string_format", - "HV009": "fdw_invalid_use_of_null_pointer", - "HV014": "fdw_too_many_handles", - "HV001": "fdw_out_of_memory", - "HV00P": "fdw_no_schemas", - "HV00J": "fdw_option_name_not_found", - "HV00K": "fdw_reply_handle", - "HV00Q": "fdw_schema_not_found", - "HV00R": "fdw_table_not_found", - "HV00L": "fdw_unable_to_create_execution", - "HV00M": "fdw_unable_to_create_reply", - "HV00N": "fdw_unable_to_establish_connection", - "P0000": "plpgsql_error", - "P0001": "raise_exception", - "P0002": "no_data_found", - "P0003": "too_many_rows", - "P0004": "assert_failure", - "XX000": "internal_error", - "XX001": "data_corrupted", - "XX002": "index_corrupted" - }, - c2 = { "20000": "case_not_found", "21000": "cardinality_violation", "22000": "data_exception", "22001": "string_data_right_truncation", "22002": "null_value_no_indicator_parameter", "22003": "numeric_value_out_of_range", "22004": "null_value_not_allowed", "22005": "error_in_assignment", "22007": "invalid_datetime_format", "22008": "datetime_field_overflow", "22009": "invalid_time_zone_displacement_value", "22010": "invalid_indicator_parameter_value", "22011": "substring_error", "22012": "division_by_zero", "22013": "invalid_preceding_or_following_size", "22014": "invalid_argument_for_ntile_function", "22015": "interval_field_overflow", "22016": "invalid_argument_for_nth_value_function", "22018": "invalid_character_value_for_cast", "22019": "invalid_escape_character", "22021": "character_not_in_repertoire", "22022": "indicator_overflow", "22023": "invalid_parameter_value", "22024": "unterminated_c_string", "22025": "invalid_escape_sequence", "22026": "string_data_length_mismatch", "22027": "trim_error", "22030": "duplicate_json_object_key_value", "22031": "invalid_argument_for_sql_json_datetime_function", "22032": "invalid_json_text", "22033": "invalid_sql_json_subscript", "22034": "more_than_one_sql_json_item", "22035": "no_sql_json_item", "22036": "non_numeric_sql_json_item", "22037": "non_unique_keys_in_a_json_object", "22038": "singleton_sql_json_item_required", "22039": "sql_json_array_not_found", "23000": "integrity_constraint_violation", "23001": "restrict_violation", "23502": "not_null_violation", "23503": "foreign_key_violation", "23505": "unique_violation", "23514": "check_violation", "24000": "invalid_cursor_state", "25000": "invalid_transaction_state", "25001": "active_sql_transaction", "25002": "branch_transaction_already_active", "25003": "inappropriate_access_mode_for_branch_transaction", "25004": "inappropriate_isolation_level_for_branch_transaction", "25005": "no_active_sql_transaction_for_branch_transaction", "25006": "read_only_sql_transaction", "25007": "schema_and_data_statement_mixing_not_supported", "25008": "held_cursor_requires_same_isolation_level", "26000": "invalid_sql_statement_name", "27000": "triggered_data_change_violation", "28000": "invalid_authorization_specification", "34000": "invalid_cursor_name", "38000": "external_routine_exception", "38001": "containing_sql_not_permitted", "38002": "modifying_sql_data_not_permitted", "38003": "prohibited_sql_statement_attempted", "38004": "reading_sql_data_not_permitted", "39000": "external_routine_invocation_exception", "39001": "invalid_sqlstate_returned", "39004": "null_value_not_allowed", "40000": "transaction_rollback", "40001": "serialization_failure", "40002": "transaction_integrity_constraint_violation", "40003": "statement_completion_unknown", "42000": "syntax_error_or_access_rule_violation", "42501": "insufficient_privilege", "42601": "syntax_error", "42602": "invalid_name", "42611": "invalid_column_definition", "42622": "name_too_long", "42701": "duplicate_column", "42702": "ambiguous_column", "42703": "undefined_column", "42704": "undefined_object", "42710": "duplicate_object", "42712": "duplicate_alias", "42723": "duplicate_function", "42725": "ambiguous_function", "42803": "grouping_error", "42804": "datatype_mismatch", "42809": "wrong_object_type", "42830": "invalid_foreign_key", "42846": "cannot_coerce", "42883": "undefined_function", "42939": "reserved_name", "44000": "with_check_option_violation", "53000": "insufficient_resources", "53100": "disk_full", "53200": "out_of_memory", "53300": "too_many_connections", "53400": "configuration_limit_exceeded", "54000": "program_limit_exceeded", "54001": "statement_too_complex", "54011": "too_many_columns", "54023": "too_many_arguments", "55000": "object_not_in_prerequisite_state", "55006": "object_in_use", "57000": "operator_intervention", "57014": "query_canceled", "58000": "system_error", "58030": "io_error", "72000": "snapshot_too_old", "00000": "successful_completion", "01000": "warning", "0100C": "dynamic_result_sets_returned", "01008": "implicit_zero_bit_padding", "01003": "null_value_eliminated_in_set_function", "01007": "privilege_not_granted", "01006": "privilege_not_revoked", "01004": "string_data_right_truncation", "01P01": "deprecated_feature", "02000": "no_data", "02001": "no_additional_dynamic_result_sets_returned", "03000": "sql_statement_not_yet_complete", "08000": "connection_exception", "08003": "connection_does_not_exist", "08006": "connection_failure", "08001": "sqlclient_unable_to_establish_sqlconnection", "08004": "sqlserver_rejected_establishment_of_sqlconnection", "08007": "transaction_resolution_unknown", "08P01": "protocol_violation", "09000": "triggered_action_exception", "0A000": "feature_not_supported", "0B000": "invalid_transaction_initiation", "0F000": "locator_exception", "0F001": "invalid_locator_specification", "0L000": "invalid_grantor", "0LP01": "invalid_grant_operation", "0P000": "invalid_role_specification", "0Z000": "diagnostics_exception", "0Z002": "stacked_diagnostics_accessed_without_active_handler", "2202E": "array_subscript_error", "2200B": "escape_character_conflict", "2201E": "invalid_argument_for_logarithm", "2201F": "invalid_argument_for_power_function", "2201G": "invalid_argument_for_width_bucket_function", "2200D": "invalid_escape_octet", "22P06": "nonstandard_use_of_escape_character", "2201B": "invalid_regular_expression", "2201W": "invalid_row_count_in_limit_clause", "2201X": "invalid_row_count_in_result_offset_clause", "2202H": "invalid_tablesample_argument", "2202G": "invalid_tablesample_repeat", "2200C": "invalid_use_of_escape_character", "2200G": "most_specific_type_mismatch", "2200H": "sequence_generator_limit_exceeded", "2200F": "zero_length_character_string", "22P01": "floating_point_exception", "22P02": "invalid_text_representation", "22P03": "invalid_binary_representation", "22P04": "bad_copy_file_format", "22P05": "untranslatable_character", "2200L": "not_an_xml_document", "2200M": "invalid_xml_document", "2200N": "invalid_xml_content", "2200S": "invalid_xml_comment", "2200T": "invalid_xml_processing_instruction", "2203A": "sql_json_member_not_found", "2203B": "sql_json_number_not_found", "2203C": "sql_json_object_not_found", "2203D": "too_many_json_array_elements", "2203E": "too_many_json_object_members", "2203F": "sql_json_scalar_required", "23P01": "exclusion_violation", "25P01": "no_active_sql_transaction", "25P02": "in_failed_sql_transaction", "25P03": "idle_in_transaction_session_timeout", "28P01": "invalid_password", "2B000": "dependent_privilege_descriptors_still_exist", "2BP01": "dependent_objects_still_exist", "2D000": "invalid_transaction_termination", "2F000": "sql_routine_exception", "2F005": "function_executed_no_return_statement", "2F002": "modifying_sql_data_not_permitted", "2F003": "prohibited_sql_statement_attempted", "2F004": "reading_sql_data_not_permitted", "39P01": "trigger_protocol_violated", "39P02": "srf_protocol_violated", "39P03": "event_trigger_protocol_violated", "3B000": "savepoint_exception", "3B001": "invalid_savepoint_specification", "3D000": "invalid_catalog_name", "3F000": "invalid_schema_name", "40P01": "deadlock_detected", "42P20": "windowing_error", "42P19": "invalid_recursion", "42P18": "indeterminate_datatype", "42P21": "collation_mismatch", "42P22": "indeterminate_collation", "428C9": "generated_always", "42P01": "undefined_table", "42P02": "undefined_parameter", "42P03": "duplicate_cursor", "42P04": "duplicate_database", "42P05": "duplicate_prepared_statement", "42P06": "duplicate_schema", "42P07": "duplicate_table", "42P08": "ambiguous_parameter", "42P09": "ambiguous_alias", "42P10": "invalid_column_reference", "42P11": "invalid_cursor_definition", "42P12": "invalid_database_definition", "42P13": "invalid_function_definition", "42P14": "invalid_prepared_statement_definition", "42P15": "invalid_schema_definition", "42P16": "invalid_table_definition", "42P17": "invalid_object_definition", "55P02": "cant_change_runtime_param", "55P03": "lock_not_available", "55P04": "unsafe_new_enum_value_usage", "57P01": "admin_shutdown", "57P02": "crash_shutdown", "57P03": "cannot_connect_now", "57P04": "database_dropped", "58P01": "undefined_file", "58P02": "duplicate_file", "F0000": "config_file_error", "F0001": "lock_file_exists", "HV000": "fdw_error", "HV005": "fdw_column_name_not_found", "HV002": "fdw_dynamic_parameter_value_needed", "HV010": "fdw_function_sequence_error", "HV021": "fdw_inconsistent_descriptor_information", "HV024": "fdw_invalid_attribute_value", "HV007": "fdw_invalid_column_name", "HV008": "fdw_invalid_column_number", "HV004": "fdw_invalid_data_type", "HV006": "fdw_invalid_data_type_descriptors", "HV091": "fdw_invalid_descriptor_field_identifier", "HV00B": "fdw_invalid_handle", "HV00C": "fdw_invalid_option_index", "HV00D": "fdw_invalid_option_name", "HV090": "fdw_invalid_string_length_or_buffer_length", "HV00A": "fdw_invalid_string_format", "HV009": "fdw_invalid_use_of_null_pointer", "HV014": "fdw_too_many_handles", "HV001": "fdw_out_of_memory", "HV00P": "fdw_no_schemas", "HV00J": "fdw_option_name_not_found", "HV00K": "fdw_reply_handle", "HV00Q": "fdw_schema_not_found", "HV00R": "fdw_table_not_found", "HV00L": "fdw_unable_to_create_execution", "HV00M": "fdw_unable_to_create_reply", "HV00N": "fdw_unable_to_establish_connection", "P0000": "plpgsql_error", "P0001": "raise_exception", "P0002": "no_data_found", "P0003": "too_many_rows", "P0004": "assert_failure", "XX000": "internal_error", "XX001": "data_corrupted", "XX002": "index_corrupted" } + "00000": "successful_completion", + "01000": "warning", + "0100C": "dynamic_result_sets_returned", + "01008": "implicit_zero_bit_padding", + "01003": "null_value_eliminated_in_set_function", + "01007": "privilege_not_granted", + "01006": "privilege_not_revoked", + "01004": "string_data_right_truncation", + "01P01": "deprecated_feature", + "02000": "no_data", + "02001": "no_additional_dynamic_result_sets_returned", + "03000": "sql_statement_not_yet_complete", + "08000": "connection_exception", + "08003": "connection_does_not_exist", + "08006": "connection_failure", + "08001": "sqlclient_unable_to_establish_sqlconnection", + "08004": "sqlserver_rejected_establishment_of_sqlconnection", + "08007": "transaction_resolution_unknown", + "08P01": "protocol_violation", + "09000": "triggered_action_exception", + "0A000": "feature_not_supported", + "0B000": "invalid_transaction_initiation", + "0F000": "locator_exception", + "0F001": "invalid_locator_specification", + "0L000": "invalid_grantor", + "0LP01": "invalid_grant_operation", + "0P000": "invalid_role_specification", + "0Z000": "diagnostics_exception", + "0Z002": "stacked_diagnostics_accessed_without_active_handler", + "20000": "case_not_found", + "21000": "cardinality_violation", + "22000": "data_exception", + "2202E": "array_subscript_error", + "22021": "character_not_in_repertoire", + "22008": "datetime_field_overflow", + "22012": "division_by_zero", + "22005": "error_in_assignment", + "2200B": "escape_character_conflict", + "22022": "indicator_overflow", + "22015": "interval_field_overflow", + "2201E": "invalid_argument_for_logarithm", + "22014": "invalid_argument_for_ntile_function", + "22016": "invalid_argument_for_nth_value_function", + "2201F": "invalid_argument_for_power_function", + "2201G": "invalid_argument_for_width_bucket_function", + "22018": "invalid_character_value_for_cast", + "22007": "invalid_datetime_format", + "22019": "invalid_escape_character", + "2200D": "invalid_escape_octet", + "22025": "invalid_escape_sequence", + "22P06": "nonstandard_use_of_escape_character", + "22010": "invalid_indicator_parameter_value", + "22023": "invalid_parameter_value", + "2201B": "invalid_regular_expression", + "2201W": "invalid_row_count_in_limit_clause", + "2201X": "invalid_row_count_in_result_offset_clause", + "2202H": "invalid_tablesample_argument", + "2202G": "invalid_tablesample_repeat", + "22009": "invalid_time_zone_displacement_value", + "2200C": "invalid_use_of_escape_character", + "2200G": "most_specific_type_mismatch", + "22004": "null_value_not_allowed", + "22002": "null_value_no_indicator_parameter", + "22003": "numeric_value_out_of_range", + "2200H": "sequence_generator_limit_exceeded", + "22026": "string_data_length_mismatch", + "22001": "string_data_right_truncation", + "22011": "substring_error", + "22027": "trim_error", + "22024": "unterminated_c_string", + "2200F": "zero_length_character_string", + "22P01": "floating_point_exception", + "22P02": "invalid_text_representation", + "22P03": "invalid_binary_representation", + "22P04": "bad_copy_file_format", + "22P05": "untranslatable_character", + "2200L": "not_an_xml_document", + "2200M": "invalid_xml_document", + "2200N": "invalid_xml_content", + "2200S": "invalid_xml_comment", + "2200T": "invalid_xml_processing_instruction", + "23000": "integrity_constraint_violation", + "23001": "restrict_violation", + "23502": "not_null_violation", + "23503": "foreign_key_violation", + "23505": "unique_violation", + "23514": "check_violation", + "23P01": "exclusion_violation", + "24000": "invalid_cursor_state", + "25000": "invalid_transaction_state", + "25001": "active_sql_transaction", + "25002": "branch_transaction_already_active", + "25008": "held_cursor_requires_same_isolation_level", + "25003": "inappropriate_access_mode_for_branch_transaction", + "25004": "inappropriate_isolation_level_for_branch_transaction", + "25005": "no_active_sql_transaction_for_branch_transaction", + "25006": "read_only_sql_transaction", + "25007": "schema_and_data_statement_mixing_not_supported", + "25P01": "no_active_sql_transaction", + "25P02": "in_failed_sql_transaction", + "25P03": "idle_in_transaction_session_timeout", + "26000": "invalid_sql_statement_name", + "27000": "triggered_data_change_violation", + "28000": "invalid_authorization_specification", + "28P01": "invalid_password", + "2B000": "dependent_privilege_descriptors_still_exist", + "2BP01": "dependent_objects_still_exist", + "2D000": "invalid_transaction_termination", + "2F000": "sql_routine_exception", + "2F005": "function_executed_no_return_statement", + "2F002": "modifying_sql_data_not_permitted", + "2F003": "prohibited_sql_statement_attempted", + "2F004": "reading_sql_data_not_permitted", + "34000": "invalid_cursor_name", + "38000": "external_routine_exception", + "38001": "containing_sql_not_permitted", + "38002": "modifying_sql_data_not_permitted", + "38003": "prohibited_sql_statement_attempted", + "38004": "reading_sql_data_not_permitted", + "39000": "external_routine_invocation_exception", + "39001": "invalid_sqlstate_returned", + "39004": "null_value_not_allowed", + "39P01": "trigger_protocol_violated", + "39P02": "srf_protocol_violated", + "39P03": "event_trigger_protocol_violated", + "3B000": "savepoint_exception", + "3B001": "invalid_savepoint_specification", + "3D000": "invalid_catalog_name", + "3F000": "invalid_schema_name", + "40000": "transaction_rollback", + "40002": "transaction_integrity_constraint_violation", + "40001": "serialization_failure", + "40003": "statement_completion_unknown", + "40P01": "deadlock_detected", + "42000": "syntax_error_or_access_rule_violation", + "42601": "syntax_error", + "42501": "insufficient_privilege", + "42846": "cannot_coerce", + "42803": "grouping_error", + "42P20": "windowing_error", + "42P19": "invalid_recursion", + "42830": "invalid_foreign_key", + "42602": "invalid_name", + "42622": "name_too_long", + "42939": "reserved_name", + "42804": "datatype_mismatch", + "42P18": "indeterminate_datatype", + "42P21": "collation_mismatch", + "42P22": "indeterminate_collation", + "42809": "wrong_object_type", + "428C9": "generated_always", + "42703": "undefined_column", + "42883": "undefined_function", + "42P01": "undefined_table", + "42P02": "undefined_parameter", + "42704": "undefined_object", + "42701": "duplicate_column", + "42P03": "duplicate_cursor", + "42P04": "duplicate_database", + "42723": "duplicate_function", + "42P05": "duplicate_prepared_statement", + "42P06": "duplicate_schema", + "42P07": "duplicate_table", + "42712": "duplicate_alias", + "42710": "duplicate_object", + "42702": "ambiguous_column", + "42725": "ambiguous_function", + "42P08": "ambiguous_parameter", + "42P09": "ambiguous_alias", + "42P10": "invalid_column_reference", + "42611": "invalid_column_definition", + "42P11": "invalid_cursor_definition", + "42P12": "invalid_database_definition", + "42P13": "invalid_function_definition", + "42P14": "invalid_prepared_statement_definition", + "42P15": "invalid_schema_definition", + "42P16": "invalid_table_definition", + "42P17": "invalid_object_definition", + "44000": "with_check_option_violation", + "53000": "insufficient_resources", + "53100": "disk_full", + "53200": "out_of_memory", + "53300": "too_many_connections", + "53400": "configuration_limit_exceeded", + "54000": "program_limit_exceeded", + "54001": "statement_too_complex", + "54011": "too_many_columns", + "54023": "too_many_arguments", + "55000": "object_not_in_prerequisite_state", + "55006": "object_in_use", + "55P02": "cant_change_runtime_param", + "55P03": "lock_not_available", + "57000": "operator_intervention", + "57014": "query_canceled", + "57P01": "admin_shutdown", + "57P02": "crash_shutdown", + "57P03": "cannot_connect_now", + "57P04": "database_dropped", + "58000": "system_error", + "58030": "io_error", + "58P01": "undefined_file", + "58P02": "duplicate_file", + "72000": "snapshot_too_old", + F0000: "config_file_error", + F0001: "lock_file_exists", + HV000: "fdw_error", + HV005: "fdw_column_name_not_found", + HV002: "fdw_dynamic_parameter_value_needed", + HV010: "fdw_function_sequence_error", + HV021: "fdw_inconsistent_descriptor_information", + HV024: "fdw_invalid_attribute_value", + HV007: "fdw_invalid_column_name", + HV008: "fdw_invalid_column_number", + HV004: "fdw_invalid_data_type", + HV006: "fdw_invalid_data_type_descriptors", + HV091: "fdw_invalid_descriptor_field_identifier", + HV00B: "fdw_invalid_handle", + HV00C: "fdw_invalid_option_index", + HV00D: "fdw_invalid_option_name", + HV090: "fdw_invalid_string_length_or_buffer_length", + HV00A: "fdw_invalid_string_format", + HV009: "fdw_invalid_use_of_null_pointer", + HV014: "fdw_too_many_handles", + HV001: "fdw_out_of_memory", + HV00P: "fdw_no_schemas", + HV00J: "fdw_option_name_not_found", + HV00K: "fdw_reply_handle", + HV00Q: "fdw_schema_not_found", + HV00R: "fdw_table_not_found", + HV00L: "fdw_unable_to_create_execution", + HV00M: "fdw_unable_to_create_reply", + HV00N: "fdw_unable_to_establish_connection", + P0000: "plpgsql_error", + P0001: "raise_exception", + P0002: "no_data_found", + P0003: "too_many_rows", + P0004: "assert_failure", + XX000: "internal_error", + XX001: "data_corrupted", + XX002: "index_corrupted", + }, + c2 = { + "20000": "case_not_found", + "21000": "cardinality_violation", + "22000": "data_exception", + "22001": "string_data_right_truncation", + "22002": "null_value_no_indicator_parameter", + "22003": "numeric_value_out_of_range", + "22004": "null_value_not_allowed", + "22005": "error_in_assignment", + "22007": "invalid_datetime_format", + "22008": "datetime_field_overflow", + "22009": "invalid_time_zone_displacement_value", + "22010": "invalid_indicator_parameter_value", + "22011": "substring_error", + "22012": "division_by_zero", + "22013": "invalid_preceding_or_following_size", + "22014": "invalid_argument_for_ntile_function", + "22015": "interval_field_overflow", + "22016": "invalid_argument_for_nth_value_function", + "22018": "invalid_character_value_for_cast", + "22019": "invalid_escape_character", + "22021": "character_not_in_repertoire", + "22022": "indicator_overflow", + "22023": "invalid_parameter_value", + "22024": "unterminated_c_string", + "22025": "invalid_escape_sequence", + "22026": "string_data_length_mismatch", + "22027": "trim_error", + "22030": "duplicate_json_object_key_value", + "22031": "invalid_argument_for_sql_json_datetime_function", + "22032": "invalid_json_text", + "22033": "invalid_sql_json_subscript", + "22034": "more_than_one_sql_json_item", + "22035": "no_sql_json_item", + "22036": "non_numeric_sql_json_item", + "22037": "non_unique_keys_in_a_json_object", + "22038": "singleton_sql_json_item_required", + "22039": "sql_json_array_not_found", + "23000": "integrity_constraint_violation", + "23001": "restrict_violation", + "23502": "not_null_violation", + "23503": "foreign_key_violation", + "23505": "unique_violation", + "23514": "check_violation", + "24000": "invalid_cursor_state", + "25000": "invalid_transaction_state", + "25001": "active_sql_transaction", + "25002": "branch_transaction_already_active", + "25003": "inappropriate_access_mode_for_branch_transaction", + "25004": "inappropriate_isolation_level_for_branch_transaction", + "25005": "no_active_sql_transaction_for_branch_transaction", + "25006": "read_only_sql_transaction", + "25007": "schema_and_data_statement_mixing_not_supported", + "25008": "held_cursor_requires_same_isolation_level", + "26000": "invalid_sql_statement_name", + "27000": "triggered_data_change_violation", + "28000": "invalid_authorization_specification", + "34000": "invalid_cursor_name", + "38000": "external_routine_exception", + "38001": "containing_sql_not_permitted", + "38002": "modifying_sql_data_not_permitted", + "38003": "prohibited_sql_statement_attempted", + "38004": "reading_sql_data_not_permitted", + "39000": "external_routine_invocation_exception", + "39001": "invalid_sqlstate_returned", + "39004": "null_value_not_allowed", + "40000": "transaction_rollback", + "40001": "serialization_failure", + "40002": "transaction_integrity_constraint_violation", + "40003": "statement_completion_unknown", + "42000": "syntax_error_or_access_rule_violation", + "42501": "insufficient_privilege", + "42601": "syntax_error", + "42602": "invalid_name", + "42611": "invalid_column_definition", + "42622": "name_too_long", + "42701": "duplicate_column", + "42702": "ambiguous_column", + "42703": "undefined_column", + "42704": "undefined_object", + "42710": "duplicate_object", + "42712": "duplicate_alias", + "42723": "duplicate_function", + "42725": "ambiguous_function", + "42803": "grouping_error", + "42804": "datatype_mismatch", + "42809": "wrong_object_type", + "42830": "invalid_foreign_key", + "42846": "cannot_coerce", + "42883": "undefined_function", + "42939": "reserved_name", + "44000": "with_check_option_violation", + "53000": "insufficient_resources", + "53100": "disk_full", + "53200": "out_of_memory", + "53300": "too_many_connections", + "53400": "configuration_limit_exceeded", + "54000": "program_limit_exceeded", + "54001": "statement_too_complex", + "54011": "too_many_columns", + "54023": "too_many_arguments", + "55000": "object_not_in_prerequisite_state", + "55006": "object_in_use", + "57000": "operator_intervention", + "57014": "query_canceled", + "58000": "system_error", + "58030": "io_error", + "72000": "snapshot_too_old", + "00000": "successful_completion", + "01000": "warning", + "0100C": "dynamic_result_sets_returned", + "01008": "implicit_zero_bit_padding", + "01003": "null_value_eliminated_in_set_function", + "01007": "privilege_not_granted", + "01006": "privilege_not_revoked", + "01004": "string_data_right_truncation", + "01P01": "deprecated_feature", + "02000": "no_data", + "02001": "no_additional_dynamic_result_sets_returned", + "03000": "sql_statement_not_yet_complete", + "08000": "connection_exception", + "08003": "connection_does_not_exist", + "08006": "connection_failure", + "08001": "sqlclient_unable_to_establish_sqlconnection", + "08004": "sqlserver_rejected_establishment_of_sqlconnection", + "08007": "transaction_resolution_unknown", + "08P01": "protocol_violation", + "09000": "triggered_action_exception", + "0A000": "feature_not_supported", + "0B000": "invalid_transaction_initiation", + "0F000": "locator_exception", + "0F001": "invalid_locator_specification", + "0L000": "invalid_grantor", + "0LP01": "invalid_grant_operation", + "0P000": "invalid_role_specification", + "0Z000": "diagnostics_exception", + "0Z002": "stacked_diagnostics_accessed_without_active_handler", + "2202E": "array_subscript_error", + "2200B": "escape_character_conflict", + "2201E": "invalid_argument_for_logarithm", + "2201F": "invalid_argument_for_power_function", + "2201G": "invalid_argument_for_width_bucket_function", + "2200D": "invalid_escape_octet", + "22P06": "nonstandard_use_of_escape_character", + "2201B": "invalid_regular_expression", + "2201W": "invalid_row_count_in_limit_clause", + "2201X": "invalid_row_count_in_result_offset_clause", + "2202H": "invalid_tablesample_argument", + "2202G": "invalid_tablesample_repeat", + "2200C": "invalid_use_of_escape_character", + "2200G": "most_specific_type_mismatch", + "2200H": "sequence_generator_limit_exceeded", + "2200F": "zero_length_character_string", + "22P01": "floating_point_exception", + "22P02": "invalid_text_representation", + "22P03": "invalid_binary_representation", + "22P04": "bad_copy_file_format", + "22P05": "untranslatable_character", + "2200L": "not_an_xml_document", + "2200M": "invalid_xml_document", + "2200N": "invalid_xml_content", + "2200S": "invalid_xml_comment", + "2200T": "invalid_xml_processing_instruction", + "2203A": "sql_json_member_not_found", + "2203B": "sql_json_number_not_found", + "2203C": "sql_json_object_not_found", + "2203D": "too_many_json_array_elements", + "2203E": "too_many_json_object_members", + "2203F": "sql_json_scalar_required", + "23P01": "exclusion_violation", + "25P01": "no_active_sql_transaction", + "25P02": "in_failed_sql_transaction", + "25P03": "idle_in_transaction_session_timeout", + "28P01": "invalid_password", + "2B000": "dependent_privilege_descriptors_still_exist", + "2BP01": "dependent_objects_still_exist", + "2D000": "invalid_transaction_termination", + "2F000": "sql_routine_exception", + "2F005": "function_executed_no_return_statement", + "2F002": "modifying_sql_data_not_permitted", + "2F003": "prohibited_sql_statement_attempted", + "2F004": "reading_sql_data_not_permitted", + "39P01": "trigger_protocol_violated", + "39P02": "srf_protocol_violated", + "39P03": "event_trigger_protocol_violated", + "3B000": "savepoint_exception", + "3B001": "invalid_savepoint_specification", + "3D000": "invalid_catalog_name", + "3F000": "invalid_schema_name", + "40P01": "deadlock_detected", + "42P20": "windowing_error", + "42P19": "invalid_recursion", + "42P18": "indeterminate_datatype", + "42P21": "collation_mismatch", + "42P22": "indeterminate_collation", + "428C9": "generated_always", + "42P01": "undefined_table", + "42P02": "undefined_parameter", + "42P03": "duplicate_cursor", + "42P04": "duplicate_database", + "42P05": "duplicate_prepared_statement", + "42P06": "duplicate_schema", + "42P07": "duplicate_table", + "42P08": "ambiguous_parameter", + "42P09": "ambiguous_alias", + "42P10": "invalid_column_reference", + "42P11": "invalid_cursor_definition", + "42P12": "invalid_database_definition", + "42P13": "invalid_function_definition", + "42P14": "invalid_prepared_statement_definition", + "42P15": "invalid_schema_definition", + "42P16": "invalid_table_definition", + "42P17": "invalid_object_definition", + "55P02": "cant_change_runtime_param", + "55P03": "lock_not_available", + "55P04": "unsafe_new_enum_value_usage", + "57P01": "admin_shutdown", + "57P02": "crash_shutdown", + "57P03": "cannot_connect_now", + "57P04": "database_dropped", + "58P01": "undefined_file", + "58P02": "duplicate_file", + F0000: "config_file_error", + F0001: "lock_file_exists", + HV000: "fdw_error", + HV005: "fdw_column_name_not_found", + HV002: "fdw_dynamic_parameter_value_needed", + HV010: "fdw_function_sequence_error", + HV021: "fdw_inconsistent_descriptor_information", + HV024: "fdw_invalid_attribute_value", + HV007: "fdw_invalid_column_name", + HV008: "fdw_invalid_column_number", + HV004: "fdw_invalid_data_type", + HV006: "fdw_invalid_data_type_descriptors", + HV091: "fdw_invalid_descriptor_field_identifier", + HV00B: "fdw_invalid_handle", + HV00C: "fdw_invalid_option_index", + HV00D: "fdw_invalid_option_name", + HV090: "fdw_invalid_string_length_or_buffer_length", + HV00A: "fdw_invalid_string_format", + HV009: "fdw_invalid_use_of_null_pointer", + HV014: "fdw_too_many_handles", + HV001: "fdw_out_of_memory", + HV00P: "fdw_no_schemas", + HV00J: "fdw_option_name_not_found", + HV00K: "fdw_reply_handle", + HV00Q: "fdw_schema_not_found", + HV00R: "fdw_table_not_found", + HV00L: "fdw_unable_to_create_execution", + HV00M: "fdw_unable_to_create_reply", + HV00N: "fdw_unable_to_establish_connection", + P0000: "plpgsql_error", + P0001: "raise_exception", + P0002: "no_data_found", + P0003: "too_many_rows", + P0004: "assert_failure", + XX000: "internal_error", + XX001: "data_corrupted", + XX002: "index_corrupted", + }; //@ts-ignore return c2[code] || errs[code] || code; @@ -250,5 +509,4 @@ export const sqlErrCodeToMsg = (code: string) => { https://www.postgresql.org/docs/13/errcodes-appendix.html JSON.stringify([...THE_table_$0.rows].map(t => [...t.children].map(u => u.innerText)).filter((d, i) => i && d.length > 1).reduce((a, v)=>({ ...a, [v[0]]: v[1] }), {})) */ -} - +}; diff --git a/lib/DboBuilder/uploadFile.ts b/lib/DboBuilder/uploadFile.ts index 7172461e..143de655 100644 --- a/lib/DboBuilder/uploadFile.ts +++ b/lib/DboBuilder/uploadFile.ts @@ -4,61 +4,97 @@ import { ValidateRowBasic } from "../PublishParser/PublishParser"; import { TableHandler } from "./TableHandler/TableHandler"; export const isFile = (row: AnyObject) => { - return Boolean(row && isObject(row) && getKeys(row).sort().join() === ["name", "data"].sort().join() && row.data && (typeof row.data === "string" || Buffer.isBuffer(row.data)) && typeof row.name === "string") -} + return Boolean( + row && + isObject(row) && + getKeys(row).sort().join() === ["name", "data"].sort().join() && + row.data && + (typeof row.data === "string" || Buffer.isBuffer(row.data)) && + typeof row.name === "string", + ); +}; type UploadFileArgs = { row: AnyObject; - validate: ValidateRowBasic | undefined; + validate: ValidateRowBasic | undefined; localParams: LocalParams | undefined; /** * Used to update an existing file */ mediaId?: string; -} +}; -export async function uploadFile(this: TableHandler, { row, localParams, validate, mediaId }: UploadFileArgs): Promise { +export async function uploadFile( + this: TableHandler, + { row, localParams, validate, mediaId }: UploadFileArgs, +): Promise { if (!this.dboBuilder.prostgles?.fileManager) throw "fileManager not set up"; - if (!isFile(row)) throw "Expecting only two properties for file upload: { name: string; data: File | string | Buffer }; but got: " + Object.entries(row).map(([k, v]) => `${k}: ${typeof v}`).join(", "); + if (!isFile(row)) + throw ( + "Expecting only two properties for file upload: { name: string; data: File | string | Buffer }; but got: " + + Object.entries(row) + .map(([k, v]) => `${k}: ${typeof v}`) + .join(", ") + ); const { data, name } = row; - const media_id = mediaId ?? (await this.db.oneOrNone("SELECT gen_random_uuid() as name")).name; + const media_id = + mediaId ?? + (await this.db.oneOrNone("SELECT gen_random_uuid() as name")).name; const nestedInsert = localParams?.nestedInsert; - const type = await this.dboBuilder.prostgles.fileManager.getValidatedFileType({ file: data, fileName: name, tableName: nestedInsert?.previousTable, colName: nestedInsert?.referencingColumn }); + const type = await this.dboBuilder.prostgles.fileManager.getValidatedFileType( + { + file: data, + fileName: name, + tableName: nestedInsert?.previousTable, + colName: nestedInsert?.referencingColumn, + }, + ); const media_name = `${media_id}.${type.ext}`; - const parsedMediaKeys = ["id", "name", "original_name", "extension", "content_type"] as const - const media: Required> = { + const parsedMediaKeys = [ + "id", + "name", + "original_name", + "extension", + "content_type", + ] as const; + const media: Required> = { id: media_id, name: media_name, original_name: name, extension: type.ext, - content_type: type.mime - } + content_type: type.mime, + }; if (validate) { - if(!localParams) throw "localParams missing"; - const parsedMedia = await validate({ row: media, dbx: this.getFinalDbo(localParams), localParams }); - const missingKeys = parsedMediaKeys.filter(k => !parsedMedia[k]) - if(missingKeys.length){ + if (!localParams) throw "localParams missing"; + const parsedMedia = await validate({ + row: media, + dbx: this.getFinalDbo(localParams), + localParams, + }); + const missingKeys = parsedMediaKeys.filter((k) => !parsedMedia[k]); + if (missingKeys.length) { throw `Some keys are missing from file insert validation: ${missingKeys}`; } } - const _media: Media = await this.dboBuilder.prostgles.fileManager.uploadAsMedia({ - item: { - data, - name: media.name ?? "????", - content_type: media.content_type as any, - extension: media.extension - }, - // imageCompression: { - // inside: { - // width: 1100, - // height: 630 - // } - // } - }); + const _media: Media = + await this.dboBuilder.prostgles.fileManager.uploadAsMedia({ + item: { + data, + name: media.name ?? "????", + content_type: media.content_type as any, + extension: media.extension, + }, + // imageCompression: { + // inside: { + // width: 1100, + // height: 630 + // } + // } + }); const mediaRow = { ...media, @@ -66,4 +102,4 @@ export async function uploadFile(this: TableHandler, { row, localParams, validat }; return mediaRow; -} \ No newline at end of file +} diff --git a/lib/Event_Trigger_Tags.ts b/lib/Event_Trigger_Tags.ts index 32e49732..fc42fa5a 100644 --- a/lib/Event_Trigger_Tags.ts +++ b/lib/Event_Trigger_Tags.ts @@ -1,4 +1,3 @@ - export const EVENT_TRIGGER_TAGS = [ "ALTER AGGREGATE", "ALTER COLLATION", @@ -110,9 +109,12 @@ export const EVENT_TRIGGER_TAGS = [ "REFRESH MATERIALIZED VIEW", "REVOKE", "SECURITY LABEL", - "SELECT INTO" + "SELECT INTO", ] as const; -export type EventTriggerTag = typeof EVENT_TRIGGER_TAGS[number]; +export type EventTriggerTag = (typeof EVENT_TRIGGER_TAGS)[number]; -export type EventTriggerTagFilter = Partial> | Partial> | "*"; +export type EventTriggerTagFilter = + | Partial> + | Partial> + | "*"; diff --git a/lib/FileManager/FileManager.ts b/lib/FileManager/FileManager.ts index 8edf1ed4..8722e3a9 100644 --- a/lib/FileManager/FileManager.ts +++ b/lib/FileManager/FileManager.ts @@ -23,7 +23,10 @@ import { ExpressApp } from "../RestApi"; export const HOUR = 3600 * 1000; -export const asSQLIdentifier = async (name: string, db: DB): Promise => { +export const asSQLIdentifier = async ( + name: string, + db: DB, +): Promise => { return (await db.one("select format('%I', $1) as name", [name]))?.name; }; @@ -51,7 +54,9 @@ export type FileUploadArgs = { contentType: string; file: string | Buffer | stream.PassThrough; onFinish: ( - ...args: [error: Error, result: undefined] | [error: undefined, result: UploadedCloudFile] + ...args: + | [error: Error, result: undefined] + | [error: undefined, result: UploadedCloudFile] ) => void; onProgress?: (bytesUploaded: number) => void; }; @@ -59,7 +64,10 @@ export type CloudClient = { upload: (file: FileUploadArgs) => Promise; downloadAsStream: (name: string) => Promise; delete: (fileName: string) => Promise; - getSignedUrlForDownload: (fileName: string, expiresInSeconds: number) => Promise; + getSignedUrlForDownload: ( + fileName: string, + expiresInSeconds: number, + ) => Promise; }; export type LocalConfig = { @@ -153,11 +161,14 @@ export class FileManager { } } else { console.error( - "FileManager checkInterval delayedDelete FAIL: Could not access file table tableHandler.delete()" + "FileManager checkInterval delayedDelete FAIL: Could not access file table tableHandler.delete()", ); } }, - Math.max(10000, (fullConfig.delayedDelete.checkIntervalHours || 0) * HOUR) + Math.max( + 10000, + (fullConfig.delayedDelete.checkIntervalHours || 0) * HOUR, + ), ); } } @@ -191,7 +202,8 @@ export class FileManager { getValidatedFileType = getValidatedFileType.bind(this); - getLocalFileUrl = (name: string) => (this.fileRoute ? `${this.fileRoute}/${name}` : ""); + getLocalFileUrl = (name: string) => + this.fileRoute ? `${this.fileRoute}/${name}` : ""; checkFreeSpace = async (folderPath: string, fileSize = 0) => { if (!this.cloudClient && "localFolderPath" in this.config) { @@ -268,24 +280,32 @@ export class FileManager { return res; }; - async getFileCloudDownloadURL(fileName: string, expiresInSecondsRaw: number = 30 * 60) { + async getFileCloudDownloadURL( + fileName: string, + expiresInSecondsRaw: number = 30 * 60, + ) { const expiresInSeconds = Math.max(1, Math.round(expiresInSecondsRaw)); - return await this.cloudClient?.getSignedUrlForDownload(fileName, expiresInSeconds); + return await this.cloudClient?.getSignedUrlForDownload( + fileName, + expiresInSeconds, + ); } - parseSQLIdentifier = async (name: string) => asSQLIdentifier(name, this.prostgles!.db!); // this.prostgles.dbo.sql<"value">("select format('%I', $1)", [name], { returnType: "value" } ) + parseSQLIdentifier = async (name: string) => + asSQLIdentifier(name, this.prostgles!.db!); // this.prostgles.dbo.sql<"value">("select format('%I', $1)", [name], { returnType: "value" } ) getColInfo = (args: { tableName: string; colName: string; }): ValidatedColumnInfo["file"] | undefined => { const { colName, tableName } = args; - const tableConfig = this.prostgles?.opts.fileTable?.referencedTables?.[tableName]; + const tableConfig = + this.prostgles?.opts.fileTable?.referencedTables?.[tableName]; const isReferencingFileTable = this.dbo[tableName]?.columns?.some( (c) => c.name === colName && c.references && - c.references?.some(({ ftable }) => ftable === this.tableName) + c.references?.some(({ ftable }) => ftable === this.tableName), ); const allowAllFiles = { acceptedContent: "*" } as const; if (isReferencingFileTable) { @@ -300,13 +320,15 @@ export class FileManager { init = initFileManager.bind(this); destroy = () => { - removeExpressRoute(this.prostgles?.opts.fileTable?.expressApp, [this.fileRouteExpress]); + removeExpressRoute(this.prostgles?.opts.fileTable?.expressApp, [ + this.fileRouteExpress, + ]); }; } export const removeExpressRoute = ( app: ExpressApp | undefined, - routePaths: (string | undefined)[] + routePaths: (string | undefined)[], ) => { const routes = app?._router?.stack; if (routes) { @@ -318,7 +340,10 @@ export const removeExpressRoute = ( } }; -export const removeExpressRouteByName = (app: ExpressApp | undefined, name: string) => { +export const removeExpressRouteByName = ( + app: ExpressApp | undefined, + name: string, +) => { const routes = app?._router?.stack; if (routes) { routes.forEach((route, i) => { @@ -330,15 +355,17 @@ export const removeExpressRouteByName = (app: ExpressApp | undefined, name: stri }; export const getFileTypeFromFilename = ( - fileName: string -): { mime: ALLOWED_CONTENT_TYPE; ext: ALLOWED_EXTENSION | string } | undefined => { + fileName: string, +): + | { mime: ALLOWED_CONTENT_TYPE; ext: ALLOWED_EXTENSION | string } + | undefined => { const nameParts = fileName.split("."); if (nameParts.length < 2) return undefined; const nameExt = nameParts.at(-1)!.toLowerCase(), mime = getKeys(CONTENT_TYPE_TO_EXT).find((k) => - (CONTENT_TYPE_TO_EXT[k] as readonly string[]).includes(nameExt) + (CONTENT_TYPE_TO_EXT[k] as readonly string[]).includes(nameExt), ); if (!mime) return undefined; @@ -354,7 +381,7 @@ export const getFileTypeFromFilename = ( export const getFileType = async ( file: Buffer | string, - fileName: string + fileName: string, ): Promise<{ mime: ALLOWED_CONTENT_TYPE; ext: ALLOWED_EXTENSION }> => { const { fileTypeFromBuffer } = await (eval('import("file-type")') as Promise< typeof import("file-type") @@ -362,12 +389,17 @@ export const getFileType = async ( const fileNameMime = getFileTypeFromFilename(fileName); if (!fileNameMime?.ext) throw new Error("File name must contain extenion"); - const res = await fileTypeFromBuffer(typeof file === "string" ? Buffer.from(file, "utf8") : file); + const res = await fileTypeFromBuffer( + typeof file === "string" ? Buffer.from(file, "utf8") : file, + ); if (!res) { /* Set correct/missing extension */ const nameExt = fileNameMime?.ext; - if (["xml", "txt", "csv", "tsv", "svg", "sql"].includes(nameExt) && fileNameMime.mime) { + if ( + ["xml", "txt", "csv", "tsv", "svg", "sql"].includes(nameExt) && + fileNameMime.mime + ) { return fileNameMime as any; } @@ -375,7 +407,7 @@ export const getFileType = async ( } else { if (!res.ext || fileNameMime?.ext.toLowerCase() !== res.ext.toLowerCase()) { throw new Error( - `There is a mismatch between file name extension and actual buffer extension: ${fileNameMime?.ext} vs ${res.ext}` + `There is a mismatch between file name extension and actual buffer extension: ${fileNameMime?.ext} vs ${res.ext}`, ); } } diff --git a/lib/FileManager/getValidatedFileType.ts b/lib/FileManager/getValidatedFileType.ts index d194d283..c73246e5 100644 --- a/lib/FileManager/getValidatedFileType.ts +++ b/lib/FileManager/getValidatedFileType.ts @@ -1,6 +1,16 @@ -import { ALLOWED_CONTENT_TYPE, ALLOWED_EXTENSION, CONTENT_TYPE_TO_EXT, getKeys, isObject } from "prostgles-types"; +import { + ALLOWED_CONTENT_TYPE, + ALLOWED_EXTENSION, + CONTENT_TYPE_TO_EXT, + getKeys, + isObject, +} from "prostgles-types"; import { parseFieldFilter } from "../DboBuilder/ViewHandler/parseFieldFilter"; -import { FileManager, getFileType, getFileTypeFromFilename } from "./FileManager"; +import { + FileManager, + getFileType, + getFileTypeFromFilename, +} from "./FileManager"; type Args = { file: Buffer | string; @@ -8,7 +18,10 @@ type Args = { colName?: string; tableName?: string; }; -export async function getValidatedFileType(this: FileManager, args: Args): Promise<{ +export async function getValidatedFileType( + this: FileManager, + args: Args, +): Promise<{ mime: string | ALLOWED_CONTENT_TYPE; ext: string | ALLOWED_EXTENSION; @@ -17,53 +30,84 @@ export async function getValidatedFileType(this: FileManager, args: Args): Promi }> { const { file, fileName, tableName, colName } = args; const config = this.prostgles?.opts.fileTable; - if(!config) throw new Error("File table config missing"); + if (!config) throw new Error("File table config missing"); - const buffer = typeof file === "string"? Buffer.from(file, 'utf8') : file; + const buffer = typeof file === "string" ? Buffer.from(file, "utf8") : file; const result = await getFileTypeFromFilename(fileName); - if(tableName && colName){ + if (tableName && colName) { const tableConfig = config.referencedTables?.[tableName]; - if(tableConfig && isObject(tableConfig) && tableConfig.referenceColumns[colName]){ + if ( + tableConfig && + isObject(tableConfig) && + tableConfig.referenceColumns[colName] + ) { const colConfig = tableConfig.referenceColumns[colName]!; - if(colConfig.maxFileSizeMB){ + if (colConfig.maxFileSizeMB) { const actualBufferSize = Buffer.byteLength(buffer); - if((actualBufferSize/1e6) > colConfig.maxFileSizeMB){ - throw new Error(`Provided file is larger than the ${colConfig.maxFileSizeMB}MB limit`); + if (actualBufferSize / 1e6 > colConfig.maxFileSizeMB) { + throw new Error( + `Provided file is larger than the ${colConfig.maxFileSizeMB}MB limit`, + ); } } - - if("acceptedContent" in colConfig && colConfig.acceptedContent && colConfig.acceptedContent !== "*"){ + + if ( + "acceptedContent" in colConfig && + colConfig.acceptedContent && + colConfig.acceptedContent !== "*" + ) { const mime = await getFileType(buffer, fileName); - const CONTENTS = [ - "image", - "audio", - "video", - "text", - "application", - ]; - const allowedContent = parseFieldFilter(colConfig.acceptedContent, false, CONTENTS); - if(!allowedContent.some(c => mime.mime.startsWith(c))){ - throw new Error(`Dissallowed content type provided: ${mime.mime.split("/")[0]}. Allowed content types: ${allowedContent} `) + const CONTENTS = ["image", "audio", "video", "text", "application"]; + const allowedContent = parseFieldFilter( + colConfig.acceptedContent, + false, + CONTENTS, + ); + if (!allowedContent.some((c) => mime.mime.startsWith(c))) { + throw new Error( + `Dissallowed content type provided: ${mime.mime.split("/")[0]}. Allowed content types: ${allowedContent} `, + ); } - } else if("acceptedContentType" in colConfig && colConfig.acceptedContentType && colConfig.acceptedContentType !== "*"){ + } else if ( + "acceptedContentType" in colConfig && + colConfig.acceptedContentType && + colConfig.acceptedContentType !== "*" + ) { const mime = await getFileType(buffer, fileName); - const allowedContentTypes = parseFieldFilter(colConfig.acceptedContentType, false, getKeys(CONTENT_TYPE_TO_EXT)); - - if(!allowedContentTypes.some(c => c === mime.mime)){ - throw new Error(`Dissallowed MIME provided: ${mime.mime}. Allowed MIME values: ${allowedContentTypes} `) + const allowedContentTypes = parseFieldFilter( + colConfig.acceptedContentType, + false, + getKeys(CONTENT_TYPE_TO_EXT), + ); + + if (!allowedContentTypes.some((c) => c === mime.mime)) { + throw new Error( + `Dissallowed MIME provided: ${mime.mime}. Allowed MIME values: ${allowedContentTypes} `, + ); } - } else if("acceptedFileTypes" in colConfig && colConfig.acceptedFileTypes && colConfig.acceptedFileTypes !== "*"){ + } else if ( + "acceptedFileTypes" in colConfig && + colConfig.acceptedFileTypes && + colConfig.acceptedFileTypes !== "*" + ) { const mime = await getFileType(buffer, fileName); - const allowedExtensions = parseFieldFilter(colConfig.acceptedFileTypes, false, Object.values(CONTENT_TYPE_TO_EXT).flat()); - - if(!allowedExtensions.some(c => c === mime.ext)){ - throw new Error(`Dissallowed extension provided: ${mime.ext}. Allowed extension values: ${allowedExtensions} `) + const allowedExtensions = parseFieldFilter( + colConfig.acceptedFileTypes, + false, + Object.values(CONTENT_TYPE_TO_EXT).flat(), + ); + + if (!allowedExtensions.some((c) => c === mime.ext)) { + throw new Error( + `Dissallowed extension provided: ${mime.ext}. Allowed extension values: ${allowedExtensions} `, + ); } } - } + } } - if(!result?.mime) throw `File MIME type not found for the provided extension: ${result?.ext}`; + if (!result?.mime) + throw `File MIME type not found for the provided extension: ${result?.ext}`; return result; -} \ No newline at end of file +} diff --git a/lib/FileManager/initFileManager.ts b/lib/FileManager/initFileManager.ts index 34b5f3ff..51749cdc 100644 --- a/lib/FileManager/initFileManager.ts +++ b/lib/FileManager/initFileManager.ts @@ -17,9 +17,10 @@ export async function initFileManager(this: FileManager, prg: Prostgles) { const { tableName = "files", referencedTables = {} } = fileTable; this.tableName = tableName; - const maxBfSizeMB = (prg.opts.io?.engine?.opts?.maxHttpBufferSize || 1e6) / 1e6; + const maxBfSizeMB = + (prg.opts.io?.engine?.opts?.maxHttpBufferSize || 1e6) / 1e6; console.log( - `Prostgles: Initiated file manager. Max allowed file size: ${maxBfSizeMB}MB (maxHttpBufferSize = 1e6). To increase this set maxHttpBufferSize in socket.io server init options` + `Prostgles: Initiated file manager. Max allowed file size: ${maxBfSizeMB}MB (maxHttpBufferSize = 1e6). To increase this set maxHttpBufferSize in socket.io server init options`, ); const canCreate = await canCreateTables(this.db); @@ -65,7 +66,7 @@ export async function initFileManager(this: FileManager, prg: Prostgles) { UNIQUE(id), UNIQUE(name) )`, - `Create fileTable ${asName(tableName)}` + `Create fileTable ${asName(tableName)}`, ); await prg.refreshDBO(); } @@ -84,7 +85,9 @@ export async function initFileManager(this: FileManager, prg: Prostgles) { for (const [colName] of Object.entries(tableConfig.referenceColumns)) { const existingCol = cols.find((c) => c.name === colName); if (existingCol) { - if (existingCol.references?.some(({ ftable }) => ftable === tableName)) { + if ( + existingCol.references?.some(({ ftable }) => ftable === tableName) + ) { // All ok } else { if (existingCol.udt_name === "uuid") { @@ -94,12 +97,12 @@ export async function initFileManager(this: FileManager, prg: Prostgles) { await runQuery(query, msg); } catch (e) { console.error( - `Could not add constraing. Err: ${e instanceof Error ? e.message : JSON.stringify(e)}` + `Could not add constraing. Err: ${e instanceof Error ? e.message : JSON.stringify(e)}`, ); } } else { console.error( - `Referenced file column ${refTable} (${colName}) exists but is not of required type (UUID). Choose a different column name or ALTER the existing column to match the type and the data found in file table ${tableName}(id)` + `Referenced file column ${refTable} (${colName}) exists but is not of required type (UUID). Choose a different column name or ALTER the existing column to match the type and the data found in file table ${tableName}(id)`, ); } } @@ -115,7 +118,7 @@ export async function initFileManager(this: FileManager, prg: Prostgles) { // } // await createColumn(); console.error( - `Referenced file column ${refTable} (${colName}) does not exist. Create it using this query:\n${query}` + `Referenced file column ${refTable} (${colName}) does not exist. Create it using this query:\n${query}`, ); } } @@ -154,7 +157,13 @@ export async function initFileManager(this: FileManager, prg: Prostgles) { } const id = name.slice(0, 36); const selectParams = { - select: { id: 1, name: 1, signed_url: 1, signed_url_expires: 1, content_type: 1 }, + select: { + id: 1, + name: 1, + signed_url: 1, + signed_url_expires: 1, + content_type: 1, + }, }; const media = await runClientRequest.bind(this.prostgles)({ type: "http", @@ -178,7 +187,10 @@ export async function initFileManager(this: FileManager, prg: Prostgles) { const EXPIRES = Date.now() + HOUR; if (!url || expires < EXPIRES) { url = await this.getFileCloudDownloadURL(media.name, 60 * 60); - await mediaTable.update({ name }, { signed_url: url, signed_url_expires: EXPIRES }); + await mediaTable.update( + { name }, + { signed_url: url, signed_url_expires: EXPIRES }, + ); } res.redirect(url); diff --git a/lib/FileManager/upload.ts b/lib/FileManager/upload.ts index 911ba23d..4d5fc73e 100644 --- a/lib/FileManager/upload.ts +++ b/lib/FileManager/upload.ts @@ -1,26 +1,30 @@ -import { FileManager, LocalConfig, OnProgress, UploadedItem } from "./FileManager"; -import * as stream from 'stream'; -import * as fs from 'fs'; +import { + FileManager, + LocalConfig, + OnProgress, + UploadedItem, +} from "./FileManager"; +import * as stream from "stream"; +import * as fs from "fs"; export async function upload( this: FileManager, - file: Buffer | string | stream.PassThrough, + file: Buffer | string | stream.PassThrough, name: string, mime: string, - onProgress?: OnProgress + onProgress?: OnProgress, ): Promise { - return new Promise(async (resolve, reject) => { - if(!file){ + if (!file) { throw "No file. Expecting: Buffer | String | stream.PassThrough"; } - if(!name){ + if (!name) { throw "No file name. Expecting: String"; } const url = this.getLocalFileUrl(name); - if(!this.cloudClient){ - if(file instanceof stream.PassThrough){ + if (!this.cloudClient) { + if (file instanceof stream.PassThrough) { throw new Error("S3 config missing. Can only upload streams to S3"); } const config = this.config as LocalConfig; @@ -31,32 +35,30 @@ export async function upload( resolve({ url, etag: `none`, - content_length: fs.statSync(filePath).size + content_length: fs.statSync(filePath).size, }); - } catch(err){ + } catch (err) { console.error("Error saving file locally", err); - reject("Internal error") + reject("Internal error"); } } else { - let content_length = 0; this.cloudClient.upload({ fileName: name, contentType: mime, file, onFinish: (err, uploaded) => { - if(err){ + if (err) { reject(err.toString()); } else { resolve({ ...uploaded, url }); } }, - onProgress: loaded => { + onProgress: (loaded) => { content_length = loaded; onProgress?.({ loaded, total: content_length }); - } + }, }); } - }); -} \ No newline at end of file +} diff --git a/lib/FileManager/uploadStream.ts b/lib/FileManager/uploadStream.ts index f7f9208a..e88e9cb3 100644 --- a/lib/FileManager/uploadStream.ts +++ b/lib/FileManager/uploadStream.ts @@ -1,54 +1,55 @@ import { FileManager, OnProgress, UploadedItem } from "./FileManager"; -import * as fs from 'fs'; -import * as stream from 'stream'; -import * as path from "path"; +import * as fs from "fs"; +import * as stream from "stream"; +import * as path from "path"; export function uploadStream( this: FileManager, name: string, mime: string, onProgress?: OnProgress, - onError?: (error: any)=>void, - onEnd?: (item: UploadedItem)=>void, - expectedSizeBytes?: number -){ + onError?: (error: any) => void, + onEnd?: (item: UploadedItem) => void, + expectedSizeBytes?: number, +) { const passThrough = new stream.PassThrough(); - if(!this.cloudClient && "localFolderPath" in this.config) { - + if (!this.cloudClient && "localFolderPath" in this.config) { try { - this.checkFreeSpace(this.config.localFolderPath, expectedSizeBytes).catch(err => { - onError?.(err) - passThrough.end(); - }); - const url = this.getLocalFileUrl(name) + this.checkFreeSpace(this.config.localFolderPath, expectedSizeBytes).catch( + (err) => { + onError?.(err); + passThrough.end(); + }, + ); + const url = this.getLocalFileUrl(name); fs.mkdirSync(this.config.localFolderPath, { recursive: true }); const filePath = path.resolve(`${this.config.localFolderPath}/${name}`); const writeStream = fs.createWriteStream(filePath); let errored = false; let loaded = 0; - writeStream.on('error', err => { + writeStream.on("error", (err) => { errored = true; - onError?.(err) + onError?.(err); }); let lastProgress = Date.now(); const throttle = 1000; - if(onProgress){ - passThrough.on('data', function(chunk){ + if (onProgress) { + passThrough.on("data", function (chunk) { loaded += chunk.length; const now = Date.now(); - if(now - lastProgress > throttle){ + if (now - lastProgress > throttle) { lastProgress = now; onProgress?.({ loaded, total: expectedSizeBytes ?? 0 }); } }); } - if(onEnd) { - writeStream.on('finish', () => { - if(errored) return; + if (onEnd) { + writeStream.on("finish", () => { + if (errored) return; let content_length = 0; try { content_length = fs.statSync(filePath).size; @@ -57,23 +58,21 @@ export function uploadStream( url, filePath, etag: `none`, - content_length - }) - } catch (err){ - onError?.(err) + content_length, + }); + } catch (err) { + onError?.(err); } - }) + }); } - passThrough.pipe(writeStream); - } catch(err){ - onError?.(err) + } catch (err) { + onError?.(err); } } else { - this.upload(passThrough, name, mime, onProgress).then(onEnd) - .catch(onError) + this.upload(passThrough, name, mime, onProgress).then(onEnd).catch(onError); } - + return passThrough; -} \ No newline at end of file +} diff --git a/lib/Filtering.ts b/lib/Filtering.ts index 1cc1adfe..6977be51 100644 --- a/lib/Filtering.ts +++ b/lib/Filtering.ts @@ -1,33 +1,31 @@ - - import { BetweenFilterKeys, CompareFilterKeys, CompareInFilterKeys, FilterDataType, FullFilter, - GeomFilterKeys, GeomFilter_Funcs, + GeomFilterKeys, + GeomFilter_Funcs, JsonbFilterKeys, TextFilterKeys, TextFilter_FullTextSearchFilterKeys, getKeys, isEmpty, - isObject + isObject, } from "prostgles-types"; import { SelectItem } from "./DboBuilder/QueryBuilder/QueryBuilder"; import { pgp } from "./DboBuilder/DboBuilderTypes"; - export const FILTER_OPERANDS = [ ...TextFilterKeys, ...JsonbFilterKeys, ...CompareFilterKeys, ...BetweenFilterKeys, - ...CompareInFilterKeys + ...CompareInFilterKeys, ] as const; export const FILTER_OPERAND_TO_SQL_OPERAND = Object.fromEntries( - FILTER_OPERANDS.map(filterOperand => { + FILTER_OPERANDS.map((filterOperand) => { let sqlOperand = filterOperand as string; if (filterOperand === "$eq") sqlOperand = "="; else if (filterOperand === "$gt") sqlOperand = ">"; @@ -43,18 +41,20 @@ export const FILTER_OPERAND_TO_SQL_OPERAND = Object.fromEntries( else if (filterOperand === "$nin") sqlOperand = "NOT IN"; else if (filterOperand === "$between") sqlOperand = "BETWEEN"; else if (filterOperand === "$notBetween") sqlOperand = "NOT BETWEEN"; - else if (filterOperand === "$isDistinctFrom") sqlOperand = "IS DISTINCT FROM"; - else if (filterOperand === "$isNotDistinctFrom") sqlOperand = "IS NOT DISTINCT FROM"; + else if (filterOperand === "$isDistinctFrom") + sqlOperand = "IS DISTINCT FROM"; + else if (filterOperand === "$isNotDistinctFrom") + sqlOperand = "IS NOT DISTINCT FROM"; return [filterOperand, sqlOperand]; - }) -) as Record; + }), +) as Record<(typeof FILTER_OPERANDS)[number], string>; /** -* Parse a single filter -* Ensure only single key objects reach this point -*/ -type ParseFilterItemArgs = { - filter: FullFilter; + * Parse a single filter + * Ensure only single key objects reach this point + */ +type ParseFilterItemArgs = { + filter: FullFilter; select: SelectItem[] | undefined; tableAlias: string | undefined; allowedColumnNames: string[]; @@ -63,401 +63,453 @@ type ParseFilterItemArgs = { export const parseFilterItem = (args: ParseFilterItemArgs): string => { const { filter: _f, select, tableAlias, allowedColumnNames } = args; - if(!_f || isEmpty(_f)) return ""; + if (!_f || isEmpty(_f)) return ""; const mErr = (msg: string) => { - throw `${msg}: ${JSON.stringify(_f, null, 2)}` + throw `${msg}: ${JSON.stringify(_f, null, 2)}`; }; const asValue = (v: any) => pgp.as.format("$1", [v]); - const fKeys = getKeys(_f) - if(fKeys.length === 0){ + const fKeys = getKeys(_f); + if (fKeys.length === 0) { return ""; - - /** - * { field1: cond1, field2: cond2 } - */ - } else if(fKeys.length > 1){ - return fKeys.map(fk => parseFilterItem({ - filter: { [fk]: _f[fk] }, - select, - tableAlias, - allowedColumnNames, - })) - .sort() /* sorted to ensure duplicate subscription channels are not created due to different condition order */ - .join(" AND ") + + /** + * { field1: cond1, field2: cond2 } + */ + } else if (fKeys.length > 1) { + return fKeys + .map((fk) => + parseFilterItem({ + filter: { [fk]: _f[fk] }, + select, + tableAlias, + allowedColumnNames, + }), + ) + .sort() /* sorted to ensure duplicate subscription channels are not created due to different condition order */ + .join(" AND "); } const fKey: string = fKeys[0]!; let selItem: SelectItem | undefined; - if(select) { - selItem = select.find(s => fKey === s.alias); + if (select) { + selItem = select.find((s) => fKey === s.alias); } let rightF: FilterDataType = (_f as any)[fKey]; const validateSelectedItemFilter = (selectedItem: SelectItem | undefined) => { const fields = selectedItem?.getFields(); - if(Array.isArray(fields) && fields.length > 1) { - const dissallowedFields = fields.filter(fname => !allowedColumnNames.includes(fname)); - if(dissallowedFields.length){ - throw new Error(`Invalid/disallowed columns found in filter: ${dissallowedFields}`) + if (Array.isArray(fields) && fields.length > 1) { + const dissallowedFields = fields.filter( + (fname) => !allowedColumnNames.includes(fname), + ); + if (dissallowedFields.length) { + throw new Error( + `Invalid/disallowed columns found in filter: ${dissallowedFields}`, + ); } } - } + }; const getLeftQ = (selItm: SelectItem) => { validateSelectedItemFilter(selItem); - if(selItm.type === "function" || selItm.type === "aggregation") return selItm.getQuery(); + if (selItm.type === "function" || selItm.type === "aggregation") + return selItm.getQuery(); return selItm.getQuery(tableAlias); - } + }; /** - * Parsed left side of the query - */ - let leftQ: string | undefined;// = asName(selItem.alias); + * Parsed left side of the query + */ + let leftQ: string | undefined; // = asName(selItem.alias); /* Select item not found. Check if dot/json notation. Build obj if necessary */ const dot_notation_delims = ["->", "."]; - if(!selItem){ - + if (!selItem) { /* See if dot notation. Pick the best matching starting string */ - if(select){ - selItem = select.find(s => - dot_notation_delims.find(delimiter => fKey.startsWith(s.alias + delimiter)) + if (select) { + selItem = select.find((s) => + dot_notation_delims.find((delimiter) => + fKey.startsWith(s.alias + delimiter), + ), ); validateSelectedItemFilter(selItem); } - if(!selItem) { - return mErr("Bad filter. Could not match to a column or alias or dot notation" + select?.map(s => s.alias)); + if (!selItem) { + return mErr( + "Bad filter. Could not match to a column or alias or dot notation" + + select?.map((s) => s.alias), + ); } let remainingStr = fKey.slice(selItem.alias.length); /* Is json path spec */ - if(remainingStr.startsWith("->")){ - + if (remainingStr.startsWith("->")) { /** Has shorthand operand 'col->>key.<>' */ - const matchingOperand = CompareFilterKeys.find(operand => remainingStr.endsWith(`.${operand}`)); - if(matchingOperand){ - remainingStr = remainingStr.slice(0, -matchingOperand.length - 1) - rightF = { [matchingOperand]: rightF } + const matchingOperand = CompareFilterKeys.find((operand) => + remainingStr.endsWith(`.${operand}`), + ); + if (matchingOperand) { + remainingStr = remainingStr.slice(0, -matchingOperand.length - 1); + rightF = { [matchingOperand]: rightF }; } leftQ = getLeftQ(selItem); - + /** - * get json path separators. Expecting -> to come first - */ - type GetSepRes = { idx: number; sep: string } | undefined + * get json path separators. Expecting -> to come first + */ + type GetSepRes = { idx: number; sep: string } | undefined; const getSep = (fromIdx = 0): GetSepRes => { - const strPart = remainingStr.slice(fromIdx) + const strPart = remainingStr.slice(fromIdx); let idx = strPart.indexOf("->"); const idxx = strPart.indexOf("->>"); - if(idx > -1) { + if (idx > -1) { /* if -> matches then check if it's the last separator */ - if(idx === idxx) return { idx: idx + fromIdx, sep: "->>" } - return { idx: idx + fromIdx, sep: "->" } + if (idx === idxx) return { idx: idx + fromIdx, sep: "->>" }; + return { idx: idx + fromIdx, sep: "->" }; } idx = strPart.indexOf("->>"); - if(idx > -1) { - return { idx: idx + fromIdx, sep: "->>" } + if (idx > -1) { + return { idx: idx + fromIdx, sep: "->>" }; } return undefined; - } - + }; let currSep = getSep(); - while(currSep){ + while (currSep) { let nextSep = getSep(currSep.idx + currSep.sep.length); - let nextIdx = nextSep? nextSep.idx : remainingStr.length; + let nextIdx = nextSep ? nextSep.idx : remainingStr.length; /* If ending in set then add set as well into key */ - if(nextSep && nextIdx + nextSep.sep.length === remainingStr.length) { + if (nextSep && nextIdx + nextSep.sep.length === remainingStr.length) { nextIdx = remainingStr.length; - nextSep = undefined; + nextSep = undefined; } - leftQ += currSep.sep + asValue(remainingStr.slice(currSep.idx + currSep.sep.length, nextIdx)); + leftQ += + currSep.sep + + asValue( + remainingStr.slice(currSep.idx + currSep.sep.length, nextIdx), + ); currSep = nextSep; } - /* + /* Is collapsed filter spec e.g. { "col.$ilike": 'text' } will transform into { col: { $ilike: ['text'] } } */ - } else if(remainingStr.startsWith(".")){ + } else if (remainingStr.startsWith(".")) { leftQ = getLeftQ(selItem); const getSep = (fromIdx = 0) => { const idx = remainingStr.slice(fromIdx).indexOf("."); - if(idx > -1) return fromIdx + idx; - return idx; - } + if (idx > -1) return fromIdx + idx; + return idx; + }; let currIdx = getSep(); const res: any = {}; let curObj = res; - while(currIdx > -1){ + while (currIdx > -1) { let nextIdx = getSep(currIdx + 1); - let nIdx = nextIdx > -1? nextIdx : remainingStr.length; + let nIdx = nextIdx > -1 ? nextIdx : remainingStr.length; /* If ending in dot then add dot as well into key */ - if(nextIdx + 1 === remainingStr.length) { + if (nextIdx + 1 === remainingStr.length) { nIdx = remainingStr.length; nextIdx = -1; } const key = remainingStr.slice(currIdx + 1, nIdx); - curObj[key] = nextIdx > -1? {} : (_f as any)[fKey]; + curObj[key] = nextIdx > -1 ? {} : (_f as any)[fKey]; curObj = curObj[key]; currIdx = nextIdx; } - + rightF = res; } else { // console.trace(141, select, selItem, remainingStr) - mErr("Bad filter. Could not find the valid col name or alias or col json path") + mErr( + "Bad filter. Could not find the valid col name or alias or col json path", + ); } - } else { leftQ = getLeftQ(selItem); } - if(!leftQ) mErr("Internal error: leftQ missing?!"); + if (!leftQ) mErr("Internal error: leftQ missing?!"); - const parseRightVal = (val: any, expect?: "csv" | "array" | "json" | "jsonb") => { + const parseRightVal = ( + val: any, + expect?: "csv" | "array" | "json" | "jsonb", + ) => { try { return parseFilterRightValue(val, { selectItem: selItem, expect }); - } catch(e: any){ + } catch (e: any) { return mErr(e); } - } + }; /* Matching sel item */ - if(isObject(rightF)){ - + if (isObject(rightF)) { const filterKeys = Object.keys(rightF); - let filterOperand: typeof FILTER_OPERANDS[number] = filterKeys[0] as any; + let filterOperand: (typeof FILTER_OPERANDS)[number] = filterKeys[0] as any; /** JSON cannot be compared so we'll cast it to TEXT */ - if(selItem?.column_udt_type === "json" || TextFilterKeys.includes(filterOperand as any)){ - leftQ += "::TEXT " + if ( + selItem?.column_udt_type === "json" || + TextFilterKeys.includes(filterOperand as any) + ) { + leftQ += "::TEXT "; } /** It's an object key which means it's an equality comparison against a json object */ - if(selItem?.column_udt_type?.startsWith("json") && !FILTER_OPERANDS.includes(filterOperand)){ + if ( + selItem?.column_udt_type?.startsWith("json") && + !FILTER_OPERANDS.includes(filterOperand) + ) { return leftQ + " = " + parseRightVal(rightF); - } + } let filterValue = rightF[filterOperand]; - const ALLOWED_FUNCS = [ ...GeomFilter_Funcs, ...TextFilter_FullTextSearchFilterKeys] as const; - let funcName: undefined | typeof ALLOWED_FUNCS[number]; + const ALLOWED_FUNCS = [ + ...GeomFilter_Funcs, + ...TextFilter_FullTextSearchFilterKeys, + ] as const; + let funcName: undefined | (typeof ALLOWED_FUNCS)[number]; let funcArgs: undefined | any[]; - if(selItem.column_udt_type === "interval" && isObject(rightF) && Object.values(rightF).every(v => Number.isFinite(v))){ + if ( + selItem.column_udt_type === "interval" && + isObject(rightF) && + Object.values(rightF).every((v) => Number.isFinite(v)) + ) { filterOperand = "="; - filterValue = Object.entries(rightF).map(([k, v]) => `${v}${k}`).join(" "); - - } else if(filterKeys.length !== 1 && selItem.column_udt_type !== "jsonb") { + filterValue = Object.entries(rightF) + .map(([k, v]) => `${v}${k}`) + .join(" "); + } else if (filterKeys.length !== 1 && selItem.column_udt_type !== "jsonb") { return mErr("Bad filter. Expecting one key only"); - - } else if(isObject(filterValue) && !(filterValue instanceof Date)){ - + } else if (isObject(filterValue) && !(filterValue instanceof Date)) { /** * Filter notation * geom && st_makeenvelope(funcArgs) */ const filterValueKeys = Object.keys(filterValue); funcName = filterValueKeys[0] as any; - if(ALLOWED_FUNCS.includes(funcName as any)){ + if (ALLOWED_FUNCS.includes(funcName as any)) { funcArgs = filterValue[funcName as any]; } else { funcName = undefined; } - } + } /** st_makeenvelope */ - if(GeomFilterKeys.includes(filterOperand as any) && funcName && GeomFilter_Funcs.includes(funcName as any)){ - - /** + if ( + GeomFilterKeys.includes(filterOperand as any) && + funcName && + GeomFilter_Funcs.includes(funcName as any) + ) { + /** * If leftQ is geography then: - * - err can happen: 'Antipodal (180 degrees long) edge detected!' + * - err can happen: 'Antipodal (180 degrees long) edge detected!' * - inacurrate results at large envelopes due to the curvature of the earth * https://gis.stackexchange.com/questions/78816/maximum-size-on-the-bounding-box-with-st-makeenvelope-and-and-geography-colum - */ - if(funcName.toLowerCase() === "st_makeenvelope") { + */ + if (funcName.toLowerCase() === "st_makeenvelope") { leftQ += "::geometry"; } return `${leftQ} ${filterOperand} ${funcName}${parseRightVal(funcArgs, "csv")}`; - - } else if(["=", "$eq"].includes(filterOperand) && !funcName){ - if(filterValue === null) return leftQ + " IS NULL "; + } else if (["=", "$eq"].includes(filterOperand) && !funcName) { + if (filterValue === null) return leftQ + " IS NULL "; return leftQ + " = " + parseRightVal(filterValue); - - } else if(["<>", "$ne"].includes(filterOperand)){ - if(filterValue === null) return leftQ + " IS NOT NULL "; + } else if (["<>", "$ne"].includes(filterOperand)) { + if (filterValue === null) return leftQ + " IS NOT NULL "; return leftQ + " <> " + parseRightVal(filterValue); - - } else if([">", "$gt"].includes(filterOperand)){ + } else if ([">", "$gt"].includes(filterOperand)) { return leftQ + " > " + parseRightVal(filterValue); - - } else if(["<", "$lt"].includes(filterOperand)){ + } else if (["<", "$lt"].includes(filterOperand)) { return leftQ + " < " + parseRightVal(filterValue); - - } else if([">=", "$gte"].includes(filterOperand)){ + } else if ([">=", "$gte"].includes(filterOperand)) { return leftQ + " >= " + parseRightVal(filterValue); - - } else if(["<=", "$lte"].includes(filterOperand)){ + } else if (["<=", "$lte"].includes(filterOperand)) { return leftQ + " <= " + parseRightVal(filterValue); - - } else if(["$in"].includes(filterOperand)){ - if(!filterValue?.length) { + } else if (["$in"].includes(filterOperand)) { + if (!filterValue?.length) { return " FALSE "; } - const filterNonNullValues: any[] = filterValue.filter((v: any) => v !== null); - let c1 = "", c2 = ""; - if(filterNonNullValues.length) { + const filterNonNullValues: any[] = filterValue.filter( + (v: any) => v !== null, + ); + let c1 = "", + c2 = ""; + if (filterNonNullValues.length) { c1 = leftQ + " IN " + parseRightVal(filterNonNullValues, "csv"); } - if(filterValue.includes(null)) { + if (filterValue.includes(null)) { c2 = ` ${leftQ} IS NULL `; } - return [c1, c2].filter(c => c).join(" OR "); - - } else if(["$nin"].includes(filterOperand)){ - if(!filterValue?.length) { + return [c1, c2].filter((c) => c).join(" OR "); + } else if (["$nin"].includes(filterOperand)) { + if (!filterValue?.length) { return " TRUE "; } - const nonNullFilterValues: any[] = filterValue.filter((v: any) => v !== null); - let c1 = "", c2 = ""; - if(nonNullFilterValues.length) c1 = leftQ + " NOT IN " + parseRightVal(nonNullFilterValues, "csv"); - if(filterValue.includes(null)) c2 = ` ${leftQ} IS NOT NULL `; - return [c1, c2].filter(c => c).join(" AND "); - - } else if(["$between"].includes(filterOperand)){ - if(!Array.isArray(filterValue) || filterValue.length !== 2){ + const nonNullFilterValues: any[] = filterValue.filter( + (v: any) => v !== null, + ); + let c1 = "", + c2 = ""; + if (nonNullFilterValues.length) + c1 = leftQ + " NOT IN " + parseRightVal(nonNullFilterValues, "csv"); + if (filterValue.includes(null)) c2 = ` ${leftQ} IS NOT NULL `; + return [c1, c2].filter((c) => c).join(" AND "); + } else if (["$between"].includes(filterOperand)) { + if (!Array.isArray(filterValue) || filterValue.length !== 2) { return mErr("Between filter expects an array of two values"); } - return leftQ + " BETWEEN " + asValue(filterValue[0]) + " AND " + asValue(filterValue[1]); - - } else if(["$ilike"].includes(filterOperand)){ + return ( + leftQ + + " BETWEEN " + + asValue(filterValue[0]) + + " AND " + + asValue(filterValue[1]) + ); + } else if (["$ilike"].includes(filterOperand)) { return leftQ + " ILIKE " + asValue(filterValue); - - } else if(["$like"].includes(filterOperand)){ + } else if (["$like"].includes(filterOperand)) { return leftQ + " LIKE " + asValue(filterValue); - - } else if(["$nilike"].includes(filterOperand)){ + } else if (["$nilike"].includes(filterOperand)) { return leftQ + " NOT ILIKE " + asValue(filterValue); - - } else if(["$nlike"].includes(filterOperand)){ + } else if (["$nlike"].includes(filterOperand)) { return leftQ + " NOT LIKE " + asValue(filterValue); - - } else if(filterOperand === "$isDistinctFrom" || filterOperand === "$isNotDistinctFrom"){ + } else if ( + filterOperand === "$isDistinctFrom" || + filterOperand === "$isNotDistinctFrom" + ) { const operator = FILTER_OPERAND_TO_SQL_OPERAND[filterOperand]; return leftQ + ` ${operator} ` + asValue(filterValue); - /* MAYBE TEXT OR MAYBE ARRAY */ - } else if(["@>", "<@", "$contains", "$containedBy", "$overlaps", "&&", "@@"].includes(filterOperand)){ - const operand = filterOperand === "@@"? "@@": - ["@>", "$contains"].includes(filterOperand)? "@>" : - ["&&", "$overlaps"].includes(filterOperand)? "&&" : - "<@"; + /* MAYBE TEXT OR MAYBE ARRAY */ + } else if ( + [ + "@>", + "<@", + "$contains", + "$containedBy", + "$overlaps", + "&&", + "@@", + ].includes(filterOperand) + ) { + const operand = + filterOperand === "@@" + ? "@@" + : ["@>", "$contains"].includes(filterOperand) + ? "@>" + : ["&&", "$overlaps"].includes(filterOperand) + ? "&&" + : "<@"; /* Array for sure */ - if(Array.isArray(filterValue)){ + if (Array.isArray(filterValue)) { return leftQ + operand + parseRightVal(filterValue, "array"); - - /* FTSQuery */ - } else if(["@@"].includes(filterOperand) && TextFilter_FullTextSearchFilterKeys.includes(funcName! as any)) { + + /* FTSQuery */ + } else if ( + ["@@"].includes(filterOperand) && + TextFilter_FullTextSearchFilterKeys.includes(funcName! as any) + ) { let lq = `to_tsvector(${leftQ}::text)`; - if(selItem && selItem.columnPGDataType === "tsvector") lq = leftQ!; + if (selItem && selItem.columnPGDataType === "tsvector") lq = leftQ!; - const res = `${lq} ${operand} ` + `${funcName}${parseRightVal(funcArgs, "csv")}`; + const res = + `${lq} ${operand} ` + `${funcName}${parseRightVal(funcArgs, "csv")}`; return res; } else { return mErr("Unrecognised filter operand: " + filterOperand + " "); } - } else { - return mErr("Unrecognised filter operand: " + filterOperand + " "); } - - } else { - /* Is an equal filter */ - if(rightF === null){ + if (rightF === null) { return leftQ + " IS NULL "; } else { - /** * Ensure that when comparing an array to a json column, the array is cast to json */ let valueStr = asValue(rightF); - if(selItem?.column_udt_type?.startsWith("json") && Array.isArray(rightF)){ + if ( + selItem?.column_udt_type?.startsWith("json") && + Array.isArray(rightF) + ) { valueStr = pgp.as.format(`$1::jsonb`, [JSON.stringify(rightF)]); - } + } return `${leftQ} = ${valueStr}`; } } -} +}; type ParseRightValOpts = { expect?: "csv" | "array" | "json" | "jsonb"; selectItem: SelectItem | undefined; -} -export const parseFilterRightValue = (val: any, { expect, selectItem }: ParseRightValOpts) => { +}; +export const parseFilterRightValue = ( + val: any, + { expect, selectItem }: ParseRightValOpts, +) => { const asValue = (v: any) => pgp.as.format("$1", [v]); const checkIfArr = () => { - if(!Array.isArray(val)) { + if (!Array.isArray(val)) { throw "This type of filter/column expects an Array of items"; } - } - if(expect === "csv" || expect?.startsWith("json")){ + }; + if (expect === "csv" || expect?.startsWith("json")) { checkIfArr(); return pgp.as.format(`($1:${expect})`, [val]); - - } else if(expect === "array" || selectItem?.columnPGDataType === "ARRAY"){ - checkIfArr(); + } else if (expect === "array" || selectItem?.columnPGDataType === "ARRAY") { + checkIfArr(); return pgp.as.format(" ARRAY[$1:csv]", [val]); - } return asValue(val); -} +}; // ensure pgp is not NULL!!! // const asValue = v => v;// pgp.as.value; // const filters: FilterSpec[] = [ -// ...(["ilike", "like"].map(op => ({ +// ...(["ilike", "like"].map(op => ({ // operands: ["$" + op], // tsDataTypes: ["any"] as TSDataType[], // tsDefinition: ` { $${op}: string } `, -// // data_types: +// // data_types: // getQuery: (leftQuery: string, rightVal: any) => { -// return `${leftQuery}::text ${op.toUpperCase()} ${asValue(rightVal)}::text` +// return `${leftQuery}::text ${op.toUpperCase()} ${asValue(rightVal)}::text` // } // }))), -// { +// { // operands: ["", "="], // tsDataTypes: ["any"], // tsDefinition: ` { "=": any } | any `, -// // data_types: +// // data_types: // getQuery: (leftQuery: string, rightVal: any) => { // if(rightVal === null) return`${leftQuery} IS NULL `; // return `${leftQuery} = ${asValue(rightVal)}`; // } // } -// ]; \ No newline at end of file +// ]; diff --git a/lib/JSONBValidation/validate_jsonb_schema_sql.ts b/lib/JSONBValidation/validate_jsonb_schema_sql.ts index 0648e29f..115cbcaf 100644 --- a/lib/JSONBValidation/validate_jsonb_schema_sql.ts +++ b/lib/JSONBValidation/validate_jsonb_schema_sql.ts @@ -1,20 +1,16 @@ import { DATA_TYPES } from "prostgles-types"; import { PubSubManager } from "../PubSubManager/PubSubManager"; - const raiseException = (err: string) => ` IF (context->'silent')::BOOLEAN = TRUE THEN RETURN FALSE; ELSE RAISE EXCEPTION ${err} USING HINT = path, COLUMN = colname, TABLE = tablename, CONSTRAINT = 'validate_jsonb_schema: ' || jsonb_pretty(jsonb_schema::JSONB); END IF; -` +`; export const VALIDATE_SCHEMA_FUNCNAME = "validate_jsonb_schema" as const; -export const JSONB_DATA_TYPES = [ - ...DATA_TYPES, - "Lookup","Lookup[]" -] as const; +export const JSONB_DATA_TYPES = [...DATA_TYPES, "Lookup", "Lookup[]"] as const; export const validate_jsonb_schema_sql = ` @@ -499,4 +495,3 @@ SELECT ${VALIDATE_SCHEMA_FUNCNAME}('{ "type": "Date"}', '"2222-22-22"'); SELECT ${VALIDATE_SCHEMA_FUNCNAME}('{ "oneOf": ["number"]}','2'); `; - \ No newline at end of file diff --git a/lib/JSONBValidation/validation.ts b/lib/JSONBValidation/validation.ts index d3c81aae..5279c68f 100644 --- a/lib/JSONBValidation/validation.ts +++ b/lib/JSONBValidation/validation.ts @@ -1,53 +1,81 @@ -import { getKeys, isEmpty, isObject, JSONB, TableSchema } from "prostgles-types"; +import { + getKeys, + isEmpty, + isObject, + JSONB, + TableSchema, +} from "prostgles-types"; import { postgresToTsType } from "../DboBuilder/DboBuilder"; import { asValue } from "../PubSubManager/PubSubManager"; - - const getFieldTypeObj = (rawFieldType: JSONB.FieldType): JSONB.FieldTypeObj => { - if(typeof rawFieldType === "string") return { type: rawFieldType }; + if (typeof rawFieldType === "string") return { type: rawFieldType }; return rawFieldType; -} +}; -export function validate(obj: T, key: keyof T, rawFieldType: JSONB.FieldType): boolean { +export function validate( + obj: T, + key: keyof T, + rawFieldType: JSONB.FieldType, +): boolean { let err = `The provided value for ${JSON.stringify(key)} is of invalid type. Expecting `; const val = obj[key]; const fieldType = getFieldTypeObj(rawFieldType); if ("type" in fieldType && fieldType.type) { if (typeof fieldType.type !== "string") { - getKeys(fieldType.type).forEach(subKey => { - validate(val, subKey as any, (fieldType.type as JSONB.ObjectType["type"])[subKey]!) + getKeys(fieldType.type).forEach((subKey) => { + validate( + val, + subKey as any, + (fieldType.type as JSONB.ObjectType["type"])[subKey]!, + ); }); } err += fieldType.type; - if (fieldType.type === "boolean" && typeof val !== fieldType.type) throw new Error(err) - if (fieldType.type === "string" && typeof val !== fieldType.type) throw new Error(err) - if (fieldType.type === "number" && !Number.isFinite(val)) throw new Error(err) - if (fieldType.type === "integer" && !Number.isInteger(val)) throw new Error(err); - + if (fieldType.type === "boolean" && typeof val !== fieldType.type) + throw new Error(err); + if (fieldType.type === "string" && typeof val !== fieldType.type) + throw new Error(err); + if (fieldType.type === "number" && !Number.isFinite(val)) + throw new Error(err); + if (fieldType.type === "integer" && !Number.isInteger(val)) + throw new Error(err); } else if (fieldType.enum) { err += `on of: ${fieldType.enum}`; - if (!fieldType.enum.includes(val)) throw new Error(err) + if (!fieldType.enum.includes(val)) throw new Error(err); } - return true + return true; } -export function validateSchema(schema: S, obj: JSONB.GetObjectType, objName?: string, optional = false) { - if ((!schema || isEmpty(schema)) && !optional) throw new Error(`Expecting ${objName} to be defined`); - getKeys(schema).forEach(k => validate(obj as any, k, schema[k]!)); +export function validateSchema( + schema: S, + obj: JSONB.GetObjectType, + objName?: string, + optional = false, +) { + if ((!schema || isEmpty(schema)) && !optional) + throw new Error(`Expecting ${objName} to be defined`); + getKeys(schema).forEach((k) => validate(obj as any, k, schema[k]!)); } - -type ColOpts = { nullable?: boolean }; - - -export function getJSONBSchemaTSTypes(schema: JSONB.JSONBSchema, colOpts: ColOpts, outerLeading = "", tables: TableSchema[]): string { - - const getFieldType = (rawFieldType: JSONB.FieldType, isOneOf = false, innerLeading = "", depth = 0): string => { +type ColOpts = { nullable?: boolean }; + +export function getJSONBSchemaTSTypes( + schema: JSONB.JSONBSchema, + colOpts: ColOpts, + outerLeading = "", + tables: TableSchema[], +): string { + const getFieldType = ( + rawFieldType: JSONB.FieldType, + isOneOf = false, + innerLeading = "", + depth = 0, + ): string => { const fieldType = getFieldTypeObj(rawFieldType); - const nullType = (fieldType.nullable ? `null | ` : ""); - + const nullType = fieldType.nullable ? `null | ` : ""; + /** Primitives */ if (typeof fieldType?.type === "string") { const correctType = fieldType.type @@ -55,89 +83,126 @@ export function getJSONBSchemaTSTypes(schema: JSONB.JSONBSchema, colOpts: ColOpt .replace("time", "string") .replace("timestamp", "string") .replace("Date", "string"); - + if (fieldType.allowedValues && fieldType.type.endsWith("[]")) { - return nullType + ` (${fieldType.allowedValues.map(v => JSON.stringify(v)).join(" | ")})[]` + return ( + nullType + + ` (${fieldType.allowedValues.map((v) => JSON.stringify(v)).join(" | ")})[]` + ); } return nullType + correctType; - /** Object */ + /** Object */ } else if (isObject(fieldType.type)) { - const addSemicolonIfMissing = (v: string) => v.trim().endsWith(";")? v : (v.trim() + ";"); + const addSemicolonIfMissing = (v: string) => + v.trim().endsWith(";") ? v : v.trim() + ";"; const { type } = fieldType; const spacing = isOneOf ? " " : " "; - let objDef = ` {${spacing}` + getKeys(type).map(key => { - const fieldType = getFieldTypeObj(type[key]!); - const escapedKey = isValidIdentifier(key) ? key : JSON.stringify(key); - return `${spacing}${escapedKey}${fieldType.optional ? "?" : ""}: ` + addSemicolonIfMissing(getFieldType(fieldType, true, undefined, depth + 1)); - }).join(" ") + `${spacing}}`; - if(!isOneOf){ + let objDef = + ` {${spacing}` + + getKeys(type) + .map((key) => { + const fieldType = getFieldTypeObj(type[key]!); + const escapedKey = isValidIdentifier(key) + ? key + : JSON.stringify(key); + return ( + `${spacing}${escapedKey}${fieldType.optional ? "?" : ""}: ` + + addSemicolonIfMissing( + getFieldType(fieldType, true, undefined, depth + 1), + ) + ); + }) + .join(" ") + + `${spacing}}`; + if (!isOneOf) { objDef = addSemicolonIfMissing(objDef); } - + /** Keep single line */ - if (isOneOf){ + if (isOneOf) { objDef = objDef.split("\n").join(""); } return nullType + objDef; - } else if (fieldType?.enum) { - return nullType + fieldType.enum.map(v => asValue(v)).join(" | "); - + return nullType + fieldType.enum.map((v) => asValue(v)).join(" | "); } else if (fieldType?.oneOf || fieldType?.oneOfType) { - const oneOf = fieldType?.oneOf || fieldType?.oneOfType.map(type => ({ type })); - return (fieldType.nullable ? `\n${innerLeading} | null` : "") + oneOf.map(v => `\n${innerLeading} | ` + getFieldType(v, true, undefined, depth + 1)).join(""); - + const oneOf = + fieldType?.oneOf || fieldType?.oneOfType.map((type) => ({ type })); + return ( + (fieldType.nullable ? `\n${innerLeading} | null` : "") + + oneOf + .map( + (v) => + `\n${innerLeading} | ` + + getFieldType(v, true, undefined, depth + 1), + ) + .join("") + ); } else if (fieldType?.arrayOf || fieldType?.arrayOfType) { const arrayOf = fieldType?.arrayOf || { type: fieldType?.arrayOfType }; return `${fieldType.nullable ? `null | ` : ""} ( ${getFieldType(arrayOf, true, undefined, depth + 1)} )[]`; - } else if (fieldType?.record) { const { keysEnum, values, partial } = fieldType.record; // TODO: ensure props with undefined values are not allowed in the TS type (strict union) - const getRecord = (v: string) => partial? `Partial>` : `Record<${v}>`; - return `${fieldType.nullable ? `null |` : ""} ${getRecord(`${keysEnum?.map(v => asValue(v)).join(" | ") ?? "string"}, ${!values? "any" : getFieldType(values, true, undefined, depth + 1)}`)}` - - } else if(fieldType?.lookup){ - + const getRecord = (v: string) => + partial ? `Partial>` : `Record<${v}>`; + return `${fieldType.nullable ? `null |` : ""} ${getRecord(`${keysEnum?.map((v) => asValue(v)).join(" | ") ?? "string"}, ${!values ? "any" : getFieldType(values, true, undefined, depth + 1)}`)}`; + } else if (fieldType?.lookup) { const l = fieldType.lookup; - if(l.type === "data-def"){ - return `${fieldType.nullable ? `null |` : ""} ${ - getFieldType({ - type: { - table: "string", - column: "string", - filter: { record: {}, optional: true }, - isArray: { type: "boolean", optional: true }, - searchColumns: { type: "string[]", optional: true }, - isFullRow: { optional: true, type: { - displayColumns:{ type: "string[]", optional: true } - }}, - showInRowCard: { optional: true, record: {} } - } - }) - }`; - } - + if (l.type === "data-def") { + return `${fieldType.nullable ? `null |` : ""} ${getFieldType({ + type: { + table: "string", + column: "string", + filter: { record: {}, optional: true }, + isArray: { type: "boolean", optional: true }, + searchColumns: { type: "string[]", optional: true }, + isFullRow: { + optional: true, + type: { + displayColumns: { type: "string[]", optional: true }, + }, + }, + showInRowCard: { optional: true, record: {} }, + }, + })}`; + } + const isSChema = l.type === "schema"; - let type = isSChema? (l.object === "table"? "string" : `{ "table": string; "column": string; }`) : ""; - if(!isSChema){ - const cols = tables.find(t => t.name === l.table)?.columns - if(!l.isFullRow){ - type = postgresToTsType(cols?.find(c => c.name === l.column)?.udt_name ?? "text"); + let type = isSChema + ? l.object === "table" + ? "string" + : `{ "table": string; "column": string; }` + : ""; + if (!isSChema) { + const cols = tables.find((t) => t.name === l.table)?.columns; + if (!l.isFullRow) { + type = postgresToTsType( + cols?.find((c) => c.name === l.column)?.udt_name ?? "text", + ); } else { - type = !cols? "any" : `{ ${cols.map(c => `${JSON.stringify(c.name)}: ${c.is_nullable? "null | " : "" } ${postgresToTsType(c.udt_name)}; `).join(" ")} }` + type = !cols + ? "any" + : `{ ${cols.map((c) => `${JSON.stringify(c.name)}: ${c.is_nullable ? "null | " : ""} ${postgresToTsType(c.udt_name)}; `).join(" ")} }`; } } - return `${fieldType.nullable ? `null | ` : ""}${type}${l.isArray? "[]" : ""}`; - - } else throw "Unexpected getSchemaTSTypes: " + JSON.stringify({ fieldType, schema }, null, 2) - } - - return getFieldType({ ...schema as any, nullable: colOpts.nullable }, undefined, outerLeading); + return `${fieldType.nullable ? `null | ` : ""}${type}${l.isArray ? "[]" : ""}`; + } else + throw ( + "Unexpected getSchemaTSTypes: " + + JSON.stringify({ fieldType, schema }, null, 2) + ); + }; + + return getFieldType( + { ...(schema as any), nullable: colOpts.nullable }, + undefined, + outerLeading, + ); } const isValidIdentifier = (str: string) => { const identifierRegex = /^[A-Za-z$_][A-Za-z0-9$_]*$/; return identifierRegex.test(str); -} +}; diff --git a/lib/Logging.ts b/lib/Logging.ts index 8bb805ed..0791f251 100644 --- a/lib/Logging.ts +++ b/lib/Logging.ts @@ -5,127 +5,137 @@ import { TableHandler } from "./DboBuilder/TableHandler/TableHandler"; type ClientInfo = { socketId: string | undefined; sid: string | undefined; -} +}; export namespace EventTypes { type DebugInfo = { duration: number; error?: any; - } - - export type Table = ClientInfo & DebugInfo & { - type: "table"; - tableName: string; - command: keyof TableHandler; - txInfo: AnyObject | undefined; - data: AnyObject; - localParams: LocalParams | undefined; }; - type SyncOneClient = ClientInfo & DebugInfo & { - type: "sync"; - tableName: string; - localParams?: LocalParams; - connectedSocketIds: string[]; - } & ( - { - command: "syncData"; - source: "client" | "trigger"; - connectedSocketIds: string[]; - lr: string; - } | { - command: "upsertData" | "pushData"; - rows: number; - socketId: string; - } | { - command: "addTrigger"; - state: "ok" | "fail"; - /** If no socket id then it's a local subscribe */ - socketId: string | undefined; - condition: string; - } | { - command: "addSync" | "unsync"; - socketId: string; - condition: string; - } | { - command: "upsertSocket.disconnect"; - socketId: string; - remainingSyncs: string; - remainingSubs: string; + export type Table = ClientInfo & + DebugInfo & { + type: "table"; + tableName: string; + command: keyof TableHandler; + txInfo: AnyObject | undefined; + data: AnyObject; + localParams: LocalParams | undefined; + }; + + type SyncOneClient = ClientInfo & + DebugInfo & { + type: "sync"; + tableName: string; + localParams?: LocalParams; connectedSocketIds: string[]; - } - ); + } & ( + | { + command: "syncData"; + source: "client" | "trigger"; + connectedSocketIds: string[]; + lr: string; + } + | { + command: "upsertData" | "pushData"; + rows: number; + socketId: string; + } + | { + command: "addTrigger"; + state: "ok" | "fail"; + /** If no socket id then it's a local subscribe */ + socketId: string | undefined; + condition: string; + } + | { + command: "addSync" | "unsync"; + socketId: string; + condition: string; + } + | { + command: "upsertSocket.disconnect"; + socketId: string; + remainingSyncs: string; + remainingSubs: string; + connectedSocketIds: string[]; + } + ); export type SyncMultiClient = { type: "sync"; tableName: string; localParams?: LocalParams; connectedSocketIds: string[]; - } & ( - { - command: "notifListener"; - op_name: string | undefined; - condition_ids_str: string | undefined; - tableTriggers: string[] | undefined; - tableSyncs: string; - state: "ok" | "error" | "no-triggers" | "invalid_condition_ids"; - } - ); + } & { + command: "notifListener"; + op_name: string | undefined; + condition_ids_str: string | undefined; + tableTriggers: string[] | undefined; + tableSyncs: string; + state: "ok" | "error" | "no-triggers" | "invalid_condition_ids"; + }; export type Sync = SyncOneClient | SyncMultiClient; - export type Connection = (ClientInfo & { - type: "connect" | "disconnect"; - socketId: string; - connectedSocketIds: string[]; - }) | (ClientInfo & DebugInfo & { - type: "connect.getClientSchema"; - }); + export type Connection = + | (ClientInfo & { + type: "connect" | "disconnect"; + socketId: string; + connectedSocketIds: string[]; + }) + | (ClientInfo & + DebugInfo & { + type: "connect.getClientSchema"; + }); - export type Method = ClientInfo & DebugInfo & { - type: "method"; - args: any[]; - localParams: LocalParams; - }; - export type Auth = ClientInfo & DebugInfo & { - type: "auth"; - } & ( - | { command: "getClientInfo"; } - | { command: "login"; } - ); + export type Method = ClientInfo & + DebugInfo & { + type: "method"; + args: any[]; + localParams: LocalParams; + }; + export type Auth = ClientInfo & + DebugInfo & { + type: "auth"; + } & ({ command: "getClientInfo" } | { command: "login" }); - export type Debug = DebugInfo & ({ - type: "debug"; - command: - | "initFileTable" - | "initTableConfig" - | "runSQLFile" - | "schemaChangeNotif" - | "TableConfig.runQueries.start" - | "TableConfig.runQueries.end" - | "refreshDBO.start" - | "refreshDBO.end" - | "tableConfigurator.init.start" - | "tableConfigurator.init.end" - | "initFileTable.start" - | "initFileTable.end" - | "initFileManager.runQuery" - | "DboBuilder.getTablesForSchemaPostgresSQL" - | "PubSubManager.create" - data?: AnyObject; - } | { - type: "debug"; - command: "pushSocketSchema"; - data: { socketId: string, clientSchema: ClientSchema; } - }) + export type Debug = DebugInfo & + ( + | { + type: "debug"; + command: + | "initFileTable" + | "initTableConfig" + | "runSQLFile" + | "schemaChangeNotif" + | "TableConfig.runQueries.start" + | "TableConfig.runQueries.end" + | "refreshDBO.start" + | "refreshDBO.end" + | "tableConfigurator.init.start" + | "tableConfigurator.init.end" + | "initFileTable.start" + | "initFileTable.end" + | "initFileManager.runQuery" + | "DboBuilder.getTablesForSchemaPostgresSQL" + | "PubSubManager.create"; + data?: AnyObject; + } + | { + type: "debug"; + command: "pushSocketSchema"; + data: { socketId: string; clientSchema: ClientSchema }; + } + ); } export type EventInfo = | EventTypes.Auth | EventTypes.Table | EventTypes.Method - | EventTypes.Sync - | EventTypes.SyncMultiClient + | EventTypes.Sync + | EventTypes.SyncMultiClient | EventTypes.Connection | EventTypes.Debug; -export type TableEvent = EventTypes.Table; \ No newline at end of file +export type TableEvent = EventTypes.Table; diff --git a/lib/PostgresNotifListenManager.ts b/lib/PostgresNotifListenManager.ts index 69d29d02..8b4f282d 100644 --- a/lib/PostgresNotifListenManager.ts +++ b/lib/PostgresNotifListenManager.ts @@ -23,22 +23,36 @@ export class PostgresNotifListenManager { isListening: any; client?: pg.IClient; - static create = (db_pg: DB, notifListener: PrglNotifListener, db_channel_name: string): Promise => { - const res = new PostgresNotifListenManager(db_pg, notifListener, db_channel_name, true); + static create = ( + db_pg: DB, + notifListener: PrglNotifListener, + db_channel_name: string, + ): Promise => { + const res = new PostgresNotifListenManager( + db_pg, + notifListener, + db_channel_name, + true, + ); return res.init(); - } - - constructor(db_pg: DB, notifListener: PrglNotifListener, db_channel_name: string, noInit = false) { - if (!db_pg || !notifListener || !db_channel_name) throw "PostgresNotifListenManager: db_pg OR notifListener OR db_channel_name MISSING"; + }; + + constructor( + db_pg: DB, + notifListener: PrglNotifListener, + db_channel_name: string, + noInit = false, + ) { + if (!db_pg || !notifListener || !db_channel_name) + throw "PostgresNotifListenManager: db_pg OR notifListener OR db_channel_name MISSING"; this.db_pg = db_pg; this.notifListener = notifListener; this.db_channel_name = db_channel_name; - if (!noInit) this.init() + if (!noInit) this.init(); } async init(): Promise { - this.connection = undefined; this.isListening = await this.startListening(); @@ -50,12 +64,15 @@ export class PostgresNotifListenManager { } startListening() { - if (!this.db_pg || !this.notifListener) throw "PostgresNotifListenManager: db_pg OR notifListener missing"; - - return this.reconnect() - .catch(error => { - console.log('PostgresNotifListenManager: Failed Initial Connection:', error); - }); + if (!this.db_pg || !this.notifListener) + throw "PostgresNotifListenManager: db_pg OR notifListener missing"; + + return this.reconnect().catch((error) => { + console.log( + "PostgresNotifListenManager: Failed Initial Connection:", + error, + ); + }); } destroyed = false; @@ -64,75 +81,91 @@ export class PostgresNotifListenManager { await this.stopListening(); this.connection?.done(); this.connection = undefined; - } + }; stopListening = async () => { if (this.db_channel_name) { try { - await this.connection?.none('UNLISTEN $1~', [this.db_channel_name]) - await this.client?.query('UNLISTEN $1~', [this.db_channel_name]) - } catch(error){ - - } + await this.connection?.none("UNLISTEN $1~", [this.db_channel_name]); + await this.client?.query("UNLISTEN $1~", [this.db_channel_name]); + } catch (error) {} } - } + }; - reconnect(delay: number | undefined = 0, maxAttempts: number | undefined = 0) { - if (!this.db_pg || !this.notifListener) throw "db_pg OR notifListener missing"; + reconnect( + delay: number | undefined = 0, + maxAttempts: number | undefined = 0, + ) { + if (!this.db_pg || !this.notifListener) + throw "db_pg OR notifListener missing"; if (this.destroyed) { - return Promise.reject("Destroyed") + return Promise.reject("Destroyed"); } delay = delay > 0 ? parseInt(delay + "") : 0; maxAttempts = maxAttempts > 0 ? parseInt(maxAttempts + "") : 1; - const setListeners = (client: pg.IClient, notifListener: PrglNotifListener, db_channel_name: string) => { - client.on('notification', notifListener); - this.client = client; - if (!this.connection) throw "Connection missing"; - return this.connection.none( - `/* prostgles-server internal query used for subscriptions and schema hot reload */ \nLISTEN $1~`, db_channel_name) - .catch(error => { - console.log("PostgresNotifListenManager: unexpected error: ", error); // unlikely to ever happen - }); - }, - removeListeners = (client: pg.IClient) => { - client.removeListener('notification', this.notifListener); - }, - onConnectionLost = (err: any, e: pgPromise.ILostContext) => { - - console.log('PostgresNotifListenManager: Connectivity Problem:', err); - this.connection = undefined; // prevent use of the broken connection - removeListeners(e.client); - - this.reconnect(5000, 10) // retry 10 times, with 5-second intervals - .then(() => { - console.log('PostgresNotifListenManager: Successfully Reconnected'); - }) - .catch(() => { - // failed after 10 attempts - console.log('PostgresNotifListenManager: Connection Lost Permanently. No more retryies'); - // process.exit(); // exiting the process - }); - } + const setListeners = ( + client: pg.IClient, + notifListener: PrglNotifListener, + db_channel_name: string, + ) => { + client.on("notification", notifListener); + this.client = client; + if (!this.connection) throw "Connection missing"; + return this.connection + .none( + `/* prostgles-server internal query used for subscriptions and schema hot reload */ \nLISTEN $1~`, + db_channel_name, + ) + .catch((error) => { + console.log( + "PostgresNotifListenManager: unexpected error: ", + error, + ); // unlikely to ever happen + }); + }, + removeListeners = (client: pg.IClient) => { + client.removeListener("notification", this.notifListener); + }, + onConnectionLost = (err: any, e: pgPromise.ILostContext) => { + console.log("PostgresNotifListenManager: Connectivity Problem:", err); + this.connection = undefined; // prevent use of the broken connection + removeListeners(e.client); + + this.reconnect(5000, 10) // retry 10 times, with 5-second intervals + .then(() => { + console.log("PostgresNotifListenManager: Successfully Reconnected"); + }) + .catch(() => { + // failed after 10 attempts + console.log( + "PostgresNotifListenManager: Connection Lost Permanently. No more retryies", + ); + // process.exit(); // exiting the process + }); + }; return new Promise((resolve, reject) => { setTimeout(() => { - this.db_pg.connect({ direct: true, onLost: onConnectionLost }) - .then(obj => { + this.db_pg + .connect({ direct: true, onLost: onConnectionLost }) + .then((obj) => { this.connection = obj; // global connection is now available resolve(obj); - return setListeners(obj.client, this.notifListener, this.db_channel_name); + return setListeners( + obj.client, + this.notifListener, + this.db_channel_name, + ); }) - .catch(error => { + .catch((error) => { /** Database was destroyed */ - if(this.destroyed || error && error.code === "3D000") return; - console.log('PostgresNotifListenManager: Error Connecting:', error); + if (this.destroyed || (error && error.code === "3D000")) return; + console.log("PostgresNotifListenManager: Error Connecting:", error); if (--maxAttempts) { - this.reconnect(delay, maxAttempts) - .then(resolve) - .catch(reject); + this.reconnect(delay, maxAttempts).then(resolve).catch(reject); } else { reject(error); } @@ -140,4 +173,4 @@ export class PostgresNotifListenManager { }, delay); }); } -} \ No newline at end of file +} diff --git a/lib/Prostgles.ts b/lib/Prostgles.ts index ad7a7431..78879773 100644 --- a/lib/Prostgles.ts +++ b/lib/Prostgles.ts @@ -9,7 +9,10 @@ import { FileManager } from "./FileManager/FileManager"; import { SchemaWatch } from "./SchemaWatch/SchemaWatch"; import { OnInitReason, initProstgles } from "./initProstgles"; import { makeSocketError, onSocketConnected } from "./onSocketConnected"; -import { clientCanRunSqlRequest, runClientSqlRequest } from "./runClientRequest"; +import { + clientCanRunSqlRequest, + runClientSqlRequest, +} from "./runClientRequest"; import pg = require("pg-promise/typescript/pg-subset"); const { version } = require("../package.json"); @@ -27,15 +30,33 @@ import { export { DBHandlerServer }; export type PGP = pgPromise.IMain<{}, pg.IClient>; -import { CHANNELS, ClientSchema, SQLRequest, isObject, omitKeys, tryCatch } from "prostgles-types"; +import { + CHANNELS, + ClientSchema, + SQLRequest, + isObject, + omitKeys, + tryCatch, +} from "prostgles-types"; import { DBEventsManager } from "./DBEventsManager"; import { PublishParser } from "./PublishParser/PublishParser"; -export { getOrSetTransporter, sendEmail, verifySMTPConfig } from "./Auth/sendEmail"; +export { + getOrSetTransporter, + sendEmail, + verifySMTPConfig, +} from "./Auth/sendEmail"; export type DB = pgPromise.IDatabase<{}, pg.IClient>; export type DBorTx = DB | pgPromise.ITask<{}>; -export const TABLE_METHODS = ["update", "find", "findOne", "insert", "delete", "upsert"] as const; +export const TABLE_METHODS = [ + "update", + "find", + "findOne", + "insert", + "delete", + "upsert", +] as const; /* 1. Connect to db @@ -146,10 +167,12 @@ export class Prostgles { testRulesOnConnect: 1, }; const unknownParams = Object.keys(params).filter( - (key: string) => !Object.keys(config).includes(key) + (key: string) => !Object.keys(config).includes(key), ); if (unknownParams.length) { - console.error(`Unrecognised ProstglesInitOptions params: ${unknownParams.join()}`); + console.error( + `Unrecognised ProstglesInitOptions params: ${unknownParams.join()}`, + ); } Object.assign(this.opts, params); @@ -168,7 +191,8 @@ export class Prostgles { destroyed = false; checkDb() { - if (!this.db || !this.db.connect) throw "something went wrong getting a db connection"; + if (!this.db || !this.db.connect) + throw "something went wrong getting a db connection"; } getTSFileName() { @@ -206,11 +230,16 @@ export class Prostgles { fs.readFile(fullPath, "utf8", function (err, data) { if (err || force || data !== fileContent) { fs.writeFileSync(fullPath, fileContent); - console.log("Prostgles: Created typescript schema definition file: \n " + fileName); + console.log( + "Prostgles: Created typescript schema definition file: \n " + + fileName, + ); } }); } else if (force) { - console.error("Schema changed. tsGeneratedTypesDir needs to be set to reload server"); + console.error( + "Schema changed. tsGeneratedTypesDir needs to be set to reload server", + ); } } @@ -283,7 +312,11 @@ export class Prostgles { } } }); - await this.opts.onLog?.({ type: "debug", command: "initTableConfig", ...res }); + await this.opts.onLog?.({ + type: "debug", + command: "initTableConfig", + ...res, + }); if (res.hasError) throw res.error; return res.data; }; @@ -297,7 +330,10 @@ export class Prostgles { if (!cloudClient && !localConfig) throw "fileTable missing param: Must provide awsS3Config OR localConfig"; - this.fileManager = new FileManager(cloudClient || localConfig!, imageOptions); + this.fileManager = new FileManager( + cloudClient || localConfig!, + imageOptions, + ); try { await this.fileManager.init(this); @@ -332,7 +368,9 @@ export class Prostgles { const result = await this.db ?.multi(fileContent) .then((data) => { - console.log("Prostgles: SQL file executed successfuly \n -> " + filePath); + console.log( + "Prostgles: SQL file executed successfuly \n -> " + filePath, + ); return data; }) .catch((err) => { @@ -343,14 +381,17 @@ export class Prostgles { if (position && length && fileContent) { const startLine = Math.max( 0, - fileContent.substring(0, position).split("\n").length - 2 + fileContent.substring(0, position).split("\n").length - 2, ), endLine = startLine + 3; errMsg += "\n\n"; errMsg += lines .slice(startLine, endLine) - .map((txt, i) => `${startLine + i + 1} ${i === 1 ? "->" : " "} ${txt}`) + .map( + (txt, i) => + `${startLine + i + 1} ${i === 1 ? "->" : " "} ${txt}`, + ) .join("\n"); errMsg += "\n\n"; } @@ -377,7 +418,7 @@ export class Prostgles { this.opts.publishRawSQL, this.dbo, this.db!, - this + this, ); this.publishParser = publishParser; @@ -396,33 +437,49 @@ export class Prostgles { this.opts.io.removeAllListeners("connection"); this.opts.io.on("connection", this.onSocketConnected); /** In some cases io will re-init with already connected sockets */ - this.opts.io?.sockets.sockets.forEach((socket) => this.onSocketConnected(socket)); + this.opts.io?.sockets.sockets.forEach((socket) => + this.onSocketConnected(socket), + ); } onSocketConnected = onSocketConnected.bind(this); - getClientSchema = async (clientReq: Pick) => { + getClientSchema = async ( + clientReq: Pick, + ) => { const result = await tryCatch(async () => { - const clientInfo = - clientReq.socket ? { type: "socket" as const, socket: clientReq.socket } - : clientReq.httpReq ? { type: "http" as const, httpReq: clientReq.httpReq } - : undefined; + const clientInfo = clientReq.socket + ? { type: "socket" as const, socket: clientReq.socket } + : clientReq.httpReq + ? { type: "http" as const, httpReq: clientReq.httpReq } + : undefined; if (!clientInfo) throw "Invalid client"; if (!this.authHandler) throw "this.authHandler missing"; const userData = await this.authHandler.getClientInfo(clientInfo); const { publishParser } = this; - let fullSchema: Awaited> | undefined; + let fullSchema: + | Awaited> + | undefined; let publishValidationError; try { if (!publishParser) throw "publishParser undefined"; - fullSchema = await publishParser.getSchemaFromPublish({ ...clientInfo, userData }); + fullSchema = await publishParser.getSchemaFromPublish({ + ...clientInfo, + userData, + }); } catch (e) { publishValidationError = e; - console.error(`\nProstgles Publish validation failed (after socket connected):\n ->`, e); + console.error( + `\nProstgles Publish validation failed (after socket connected):\n ->`, + e, + ); } let rawSQL = false; - if (this.opts.publishRawSQL && typeof this.opts.publishRawSQL === "function") { + if ( + this.opts.publishRawSQL && + typeof this.opts.publishRawSQL === "function" + ) { const { allowed } = await clientCanRunSqlRequest.bind(this)(clientInfo); rawSQL = allowed; } @@ -436,7 +493,10 @@ export class Prostgles { if (this.opts.joins) { const _joinTables2 = this.dboBuilder .getAllJoinPaths() - .filter((jp) => ![jp.t1, jp.t2].find((t) => !schema[t] || !schema[t]?.findOne)) + .filter( + (jp) => + ![jp.t1, jp.t2].find((t) => !schema[t] || !schema[t]?.findOne), + ) .map((jp) => [jp.t1, jp.t2].sort()); _joinTables2.map((jt) => { if (!joinTables2.find((_jt) => _jt.join() === jt.join())) { @@ -445,11 +505,13 @@ export class Prostgles { }); } - const methods = await publishParser?.getAllowedMethods(clientInfo, userData); + const methods = await publishParser?.getAllowedMethods( + clientInfo, + userData, + ); - const methodSchema: ClientSchema["methods"] = - !methods ? - [] + const methodSchema: ClientSchema["methods"] = !methods + ? [] : Object.entries(methods) .map(([methodName, method]) => { if (isObject(method) && "run" in method) { @@ -477,7 +539,9 @@ export class Prostgles { tableSchemaErrors, auth, version, - err: publishValidationError ? "Server Error: User publish validation failed." : undefined, + err: publishValidationError + ? "Server Error: User publish validation failed." + : undefined, }; return { @@ -486,7 +550,8 @@ export class Prostgles { userData, }; }); - const sid = result.userData?.sid ?? this.authHandler?.getSIDNoError(clientReq); + const sid = + result.userData?.sid ?? this.authHandler?.getSIDNoError(clientReq); await this.opts.onLog?.({ type: "connect.getClientSchema", duration: result.duration, @@ -510,17 +575,23 @@ export class Prostgles { { query, params, options }: SQLRequest, cb = (..._callback: any) => { /* Empty */ - } + }, ) => { runClientSqlRequest - .bind(this)({ type: "socket", socket, query, args: params, options }) + .bind(this)({ + type: "socket", + socket, + query, + args: params, + options, + }) .then((res) => { cb(null, res); }) .catch((err) => { makeSocketError(cb, err); }); - } + }, ); } await this.dboBuilder.prostgles.opts.onLog?.({ diff --git a/lib/ProstglesTypes.ts b/lib/ProstglesTypes.ts index 01c880a2..e1ed67b6 100644 --- a/lib/ProstglesTypes.ts +++ b/lib/ProstglesTypes.ts @@ -1,7 +1,11 @@ import { FileColumnConfig } from "prostgles-types"; import { Auth, AuthRequestParams, SessionUser } from "./Auth/AuthTypes"; import { EventTriggerTagFilter } from "./Event_Trigger_Tags"; -import { CloudClient, ImageOptions, LocalConfig } from "./FileManager/FileManager"; +import { + CloudClient, + ImageOptions, + LocalConfig, +} from "./FileManager/FileManager"; import { DbConnection, OnReadyCallback } from "./initProstgles"; import { EventInfo } from "./Logging"; import { ExpressApp, RestApiConfig } from "./RestApi"; @@ -16,7 +20,11 @@ import pg from "pg-promise/typescript/pg-subset"; import { AnyObject } from "prostgles-types"; import type { Server } from "socket.io"; import { DB } from "./Prostgles"; -import { Publish, PublishMethods, PublishParams } from "./PublishParser/PublishParser"; +import { + Publish, + PublishMethods, + PublishParams, +} from "./PublishParser/PublishParser"; /** * Allows uploading and downloading files. @@ -94,7 +102,12 @@ export type FileTableConfig = { localConfig?: LocalConfig; }; -export const JOIN_TYPES = ["one-many", "many-one", "one-one", "many-many"] as const; +export const JOIN_TYPES = [ + "one-many", + "many-one", + "one-one", + "many-many", +] as const; export type Join = { tables: [string, string]; on: { [key: string]: string }[]; // Allow multi references to table @@ -102,7 +115,10 @@ export type Join = { }; type Joins = Join[] | "inferred"; -export type ProstglesInitOptions = { +export type ProstglesInitOptions< + S = void, + SUser extends SessionUser = SessionUser, +> = { /** * Database connection details and options */ @@ -171,7 +187,9 @@ export type ProstglesInitOptions): (boolean | "*") | Promise; + publishRawSQL?( + params: PublishParams, + ): (boolean | "*") | Promise; /** * Server-side functions that can be invoked by the client @@ -218,14 +236,14 @@ export type ProstglesInitOptions & { socket: PRGLIOSocket } + args: AuthRequestParams & { socket: PRGLIOSocket }, ) => void | Promise; /** * Called when a socket disconnects */ onSocketDisconnect?: ( - args: AuthRequestParams & { socket: PRGLIOSocket } + args: AuthRequestParams & { socket: PRGLIOSocket }, ) => void | Promise; /** @@ -263,7 +281,11 @@ export type ProstglesInitOptions Promise; }) => void; diff --git a/lib/PubSubManager/PubSubManager.ts b/lib/PubSubManager/PubSubManager.ts index ab63d220..58a1ec2b 100644 --- a/lib/PubSubManager/PubSubManager.ts +++ b/lib/PubSubManager/PubSubManager.ts @@ -29,7 +29,11 @@ import { } from "prostgles-types"; import { find, pickKeys, tryCatch } from "prostgles-types/dist/util"; -import { LocalFuncs, getOnDataFunc, matchesLocalFuncs } from "../DboBuilder/ViewHandler/subscribe"; +import { + LocalFuncs, + getOnDataFunc, + matchesLocalFuncs, +} from "../DboBuilder/ViewHandler/subscribe"; import { EVENT_TRIGGER_TAGS } from "../Event_Trigger_Tags"; import { EventTypes } from "../Logging"; import { TableRule } from "../PublishParser/PublishParser"; @@ -109,7 +113,10 @@ export type ViewSubscriptionOptions = ( }[]; }; -export type SubscriptionParams = Pick & { +export type SubscriptionParams = Pick< + SubscribeParams, + "throttle" | "throttleOpts" +> & { socket_id?: string; channel_name: string; @@ -230,16 +237,17 @@ export class PubSubManager { const { watchSchema } = this.dboBuilder.prostgles.opts; if (watchSchema && !(await getIsSuperUser(this.db))) { console.warn( - "prostgles watchSchema requires superuser db user. Will not watch using event triggers" + "prostgles watchSchema requires superuser db user. Will not watch using event triggers", ); } try { /** We use these names because they include schema where necessary */ - const allTableNames = Object.keys(this.dbo).filter((k) => this.dbo[k]?.tableOrViewInfo); - const tableFilterQuery = - allTableNames.length ? - `OR table_name NOT IN (${allTableNames.map((tblName) => asValue(tblName)).join(", ")})` + const allTableNames = Object.keys(this.dbo).filter( + (k) => this.dbo[k]?.tableOrViewInfo, + ); + const tableFilterQuery = allTableNames.length + ? `OR table_name NOT IN (${allTableNames.map((tblName) => asValue(tblName)).join(", ")})` : ""; const query = pgp.as.format( ` @@ -295,7 +303,7 @@ export class PubSubManager { COMMIT; `, - { EVENT_TRIGGER_TAGS } + { EVENT_TRIGGER_TAGS }, ); await this.db @@ -316,45 +324,58 @@ export class PubSubManager { channel_name, localFuncs, socket_id, - }: Pick): Subscription[] { + }: Pick< + Subscription, + "localFuncs" | "socket_id" | "channel_name" + >): Subscription[] { return this.subs.filter((s) => { return ( s.channel_name === channel_name && - (matchesLocalFuncs(localFuncs, s.localFuncs) || (socket_id && s.socket_id === socket_id)) + (matchesLocalFuncs(localFuncs, s.localFuncs) || + (socket_id && s.socket_id === socket_id)) ); }); } getTriggerSubs(table_name: string, condition: string): Subscription[] { - const subs = this.subs.filter((s) => find(s.triggers, { table_name, condition })); + const subs = this.subs.filter((s) => + find(s.triggers, { table_name, condition }), + ); return subs; } removeLocalSub(channelName: string, localFuncs: LocalFuncs) { const matchingSubIdx = this.subs.findIndex( (s) => - s.channel_name === channelName && getOnDataFunc(localFuncs) === getOnDataFunc(s.localFuncs) + s.channel_name === channelName && + getOnDataFunc(localFuncs) === getOnDataFunc(s.localFuncs), ); if (matchingSubIdx > -1) { this.subs.splice(matchingSubIdx, 1); } else { - console.error("Could not unsubscribe. Subscription might not have initialised yet", { - channelName, - }); + console.error( + "Could not unsubscribe. Subscription might not have initialised yet", + { + channelName, + }, + ); } } getSyncs(table_name: string, condition: string) { return (this.syncs || []).filter( - (s: SyncParams) => s.table_name === table_name && s.condition === condition + (s: SyncParams) => + s.table_name === table_name && s.condition === condition, ); } notifListener = notifListener.bind(this); getSubData = async ( - sub: Subscription - ): Promise<{ data: any[]; err?: undefined } | { data?: undefined; err: any }> => { + sub: Subscription, + ): Promise< + { data: any[]; err?: undefined } | { data?: undefined; err: any } + > => { const { table_info, filter, params, table_rules } = sub; //, subOne = false const { name: table_name } = table_info; @@ -363,7 +384,12 @@ export class PubSubManager { } try { - const data = await this.dbo?.[table_name]!.find!(filter, params, undefined, table_rules); + const data = await this.dbo?.[table_name]!.find!( + filter, + params, + undefined, + table_rules, + ); return { data }; } catch (err) { return { err }; @@ -395,10 +421,13 @@ export class PubSubManager { socketId: socket.id, connectedSocketIds: this.connectedSocketIds, remainingSubs: JSON.stringify( - this.subs.map((s) => ({ tableName: s.table_info.name, triggers: s.triggers })) + this.subs.map((s) => ({ + tableName: s.table_info.name, + triggers: s.triggers, + })), ), remainingSyncs: JSON.stringify( - this.syncs.map((s) => pickKeys(s, ["table_name", "condition"])) + this.syncs.map((s) => pickKeys(s, ["table_name", "condition"])), ), }); @@ -424,7 +453,9 @@ export class PubSubManager { getActiveListeners = (): { table_name: string; condition: string }[] => { const activeListeners: { table_name: string; condition: string }[] = []; const upsert = (t: string, c: string) => { - if (!activeListeners.find((r) => r.table_name === t && r.condition === c)) { + if ( + !activeListeners.find((r) => r.table_name === t && r.condition === c) + ) { activeListeners.push({ table_name: t, condition: c }); } }; @@ -455,7 +486,7 @@ export class PubSubManager { WHERE app_id = $1 ORDER BY table_name, condition `, - [this.dboBuilder.prostgles.appId] + [this.dboBuilder.prostgles.appId], ); this._triggers = {}; @@ -473,7 +504,7 @@ export class PubSubManager { async addTrigger( params: { table_name: string; condition: string }, viewOptions: ViewSubscriptionOptions | undefined, - socket: PRGLIOSocket | undefined + socket: PRGLIOSocket | undefined, ) { const addedTrigger = await tryCatch(async () => { const { table_name } = { ...params }; @@ -491,7 +522,9 @@ export class PubSubManager { const trgVals = { tbl: asValue(table_name), cond: asValue(condition), - condHash: asValue(crypto.createHash("md5").update(condition).digest("hex")), + condHash: asValue( + crypto.createHash("md5").update(condition).digest("hex"), + ), }; await this.db.tx((t) => @@ -530,7 +563,7 @@ export class PubSubManager { ON CONFLICT DO NOTHING; COMMIT WORK; - `) + `), ); /** This might be redundant due to trigger on app_triggers */ @@ -549,7 +582,9 @@ export class PubSubManager { error: addedTrigger.error, sid: this.dboBuilder.prostgles.authHandler?.getSIDNoError({ socket }), tableName: addedTrigger.tbl ?? params.table_name, - connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map((s) => s.id), + connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map( + (s) => s.id, + ), localParams: { socket }, }); diff --git a/lib/PubSubManager/addSub.ts b/lib/PubSubManager/addSub.ts index 2d0f4b10..3ff378a5 100644 --- a/lib/PubSubManager/addSub.ts +++ b/lib/PubSubManager/addSub.ts @@ -1,20 +1,41 @@ import { SubscriptionChannels } from "prostgles-types"; -import { BasicCallback, parseCondition, PubSubManager, Subscription, SubscriptionParams } from "./PubSubManager"; +import { + BasicCallback, + parseCondition, + PubSubManager, + Subscription, + SubscriptionParams, +} from "./PubSubManager"; import { VoidFunction } from "../SchemaWatch/SchemaWatch"; type AddSubscriptionParams = SubscriptionParams & { condition: string; -} +}; -type AddSubResult = SubscriptionChannels & { sendFirstData: VoidFunction | undefined } +type AddSubResult = SubscriptionChannels & { + sendFirstData: VoidFunction | undefined; +}; /* Must return a channel for socket */ /* The distinct list of {table_name, condition} must have a corresponding trigger in the database */ -export async function addSub(this: PubSubManager, subscriptionParams: Omit): Promise { +export async function addSub( + this: PubSubManager, + subscriptionParams: Omit< + AddSubscriptionParams, + "channel_name" | "parentSubParams" + >, +): Promise { const { - socket, localFuncs, table_rules, filter = {}, - params = {}, condition = "", throttle = 0, //subOne = false, - viewOptions, table_info, throttleOpts, + socket, + localFuncs, + table_rules, + filter = {}, + params = {}, + condition = "", + throttle = 0, //subOne = false, + viewOptions, + table_info, + throttleOpts, } = subscriptionParams || {}; const table_name = table_info.name; @@ -55,71 +76,70 @@ export async function addSub(this: PubSubManager, subscriptionParams: Omit { this.pushSubData(newSub); - } - + }; } else if (socket) { const removeListeners = () => { socket.removeAllListeners(channel_name); socket.removeAllListeners(result.channelNameReady); socket.removeAllListeners(result.channelNameUnsubscribe); - } + }; removeListeners(); socket.once(result.channelNameReady, () => { this.pushSubData(newSub); }); - socket.once(result.channelNameUnsubscribe, (_data: any, cb: BasicCallback) => { - const res = "ok"; - this.subs = this.subs.filter(s => { - const isMatch = s.socket?.id === socket.id && s.channel_name === channel_name; - return !isMatch; - }); - removeListeners(); - cb(null, { res }); - }); + socket.once( + result.channelNameUnsubscribe, + (_data: any, cb: BasicCallback) => { + const res = "ok"; + this.subs = this.subs.filter((s) => { + const isMatch = + s.socket?.id === socket.id && s.channel_name === channel_name; + return !isMatch; + }); + removeListeners(); + cb(null, { res }); + }, + ); } this.subs.push(newSub); @@ -129,7 +149,6 @@ export async function addSub(this: PubSubManager, subscriptionParams: Omit { - const { - socket = null, table_info = null, table_rules, synced_field = null, - allow_delete = false, id_fields = [], filter = {}, - params, condition = "", throttle = 0 - } = syncParams || {} as AddSyncParams; + socket = null, + table_info = null, + table_rules, + synced_field = null, + allow_delete = false, + id_fields = [], + filter = {}, + params, + condition = "", + throttle = 0, + } = syncParams || ({} as AddSyncParams); const conditionParsed = parseCondition(condition); if (!socket || !table_info) throw "socket or table_info missing"; - + const { name: table_name } = table_info; const channel_name = `${this.socketChannelPreffix}.${table_name}.${JSON.stringify(filter)}.sync`; - + if (!synced_field) throw "synced_field missing from table_rules"; this.upsertSocket(socket); - + const upsertSync = () => { const newSync = { channel_name, @@ -46,7 +59,7 @@ export async function addSync(this: PubSubManager, syncParams: AddSyncParams) { is_syncing: false, wal: undefined, socket, - params + params, }; /* Only a sync per socket per table per condition allowed */ @@ -54,15 +67,15 @@ export async function addSync(this: PubSubManager, syncParams: AddSyncParams) { const existing = find(this.syncs, { socket_id: socket.id, channel_name }); if (!existing) { this.syncs.push(newSync); - + const unsyncChn = channel_name + "unsync"; socket.removeAllListeners(unsyncChn); socket.once(unsyncChn, (_data: any, cb: BasicCallback) => { - this._log({ - type: "sync", - command: "unsync", - socketId: socket.id, - tableName: table_name, + this._log({ + type: "sync", + command: "unsync", + socketId: socket.id, + tableName: table_name, condition, sid, connectedSocketIds: this.connectedSocketIds, @@ -70,8 +83,11 @@ export async function addSync(this: PubSubManager, syncParams: AddSyncParams) { }); socket.removeAllListeners(channel_name); socket.removeAllListeners(unsyncChn); - this.syncs = this.syncs.filter(s => { - const isMatch = s.socket_id && s.socket_id === socket.id && s.channel_name === channel_name; + this.syncs = this.syncs.filter((s) => { + const isMatch = + s.socket_id && + s.socket_id === socket.id && + s.channel_name === channel_name; return !isMatch; }); cb(null, { res: "ok" }); @@ -79,14 +95,13 @@ export async function addSync(this: PubSubManager, syncParams: AddSyncParams) { socket.removeAllListeners(channel_name); socket.on(channel_name, (data: any, cb: BasicCallback) => { - if (!data) { cb({ err: "Unexpected request. Need data or onSyncRequest" }); return; } /* - */ + */ /* Server will: 1. Ask for last_synced emit(onSyncRequest) @@ -103,12 +118,10 @@ export async function addSync(this: PubSubManager, syncParams: AddSyncParams) { if (data.onSyncRequest) { this.syncData(newSync, data.onSyncRequest, "client"); - } else { - console.error("Unexpected sync request data from client: ", data) + console.error("Unexpected sync request data from client: ", data); } }); - } else { console.warn("UNCLOSED DUPLICATE SYNC FOUND", existing.channel_name); } @@ -118,16 +131,20 @@ export async function addSync(this: PubSubManager, syncParams: AddSyncParams) { upsertSync(); - await this.addTrigger({ table_name, condition: conditionParsed }, undefined, socket); + await this.addTrigger( + { table_name, condition: conditionParsed }, + undefined, + socket, + ); return { result: channel_name }; }); - await this._log({ - type: "sync", - command: "addSync", - tableName: syncParams.table_info.name, - condition: syncParams.condition, + await this._log({ + type: "sync", + command: "addSync", + tableName: syncParams.table_info.name, + condition: syncParams.condition, socketId: syncParams.socket.id, connectedSocketIds: this.connectedSocketIds, duration: res.duration, @@ -135,7 +152,7 @@ export async function addSync(this: PubSubManager, syncParams: AddSyncParams) { sid, }); - if(res.error !== undefined) throw res.error; + if (res.error !== undefined) throw res.error; return res.result; -} \ No newline at end of file +} diff --git a/lib/PubSubManager/getCreatePubSubManagerError.ts b/lib/PubSubManager/getCreatePubSubManagerError.ts index ea91e22d..fa67cc3f 100644 --- a/lib/PubSubManager/getCreatePubSubManagerError.ts +++ b/lib/PubSubManager/getCreatePubSubManagerError.ts @@ -3,10 +3,12 @@ import { getPubSubManagerInitQuery } from "./getPubSubManagerInitQuery"; import { getCanExecute } from "../DboBuilder/dboBuilderUtils"; import { DboBuilder } from "../DboBuilder/DboBuilder"; -export const getCreatePubSubManagerError = async (dboBuilder: DboBuilder): Promise => { +export const getCreatePubSubManagerError = async ( + dboBuilder: DboBuilder, +): Promise => { const db = dboBuilder.db; - - const canExecute = await getCanExecute(db) + + const canExecute = await getCanExecute(db); if (!canExecute) return "Cannot run EXECUTE statements on this connection"; /** Check if prostgles schema exists */ @@ -16,25 +18,26 @@ export const getCreatePubSubManagerError = async (dboBuilder: DboBuilder): Promi WHERE nspname = 'prostgles' `); - const checkIfCanCreateProstglesSchema = () => tryCatch(async () => { - const allGood = await db.task(async t => { - try { - await t.none(` + const checkIfCanCreateProstglesSchema = () => + tryCatch(async () => { + const allGood = await db.task(async (t) => { + try { + await t.none(` BEGIN; DROP SCHEMA IF EXISTS prostgles CASCADE; CREATE SCHEMA IF NOT EXISTS prostgles; ROLLBACK; `); - } catch (e) { - await t.none(`ROLLBACK`); - return false; - } + } catch (e) { + await t.none(`ROLLBACK`); + return false; + } - return true; - }); + return true; + }); - return allGood; - }); + return allGood; + }); if (!prglSchema.length) { const canCreate = await checkIfCanCreateProstglesSchema(); @@ -52,21 +55,26 @@ export const getCreatePubSubManagerError = async (dboBuilder: DboBuilder): Promi return { ok: true }; }); - if(!canCheckVersion.ok){ - console.error("prostgles schema exists but cannot check version. Check logs", canCheckVersion.error); + if (!canCheckVersion.ok) { + console.error( + "prostgles schema exists but cannot check version. Check logs", + canCheckVersion.error, + ); return "prostgles schema exists but cannot check version. Check logs"; } } - const initQuery = await tryCatch(async () => ({ query: await getPubSubManagerInitQuery.bind(dboBuilder)() })); - if(initQuery.hasError){ + const initQuery = await tryCatch(async () => ({ + query: await getPubSubManagerInitQuery.bind(dboBuilder)(), + })); + if (initQuery.hasError) { console.error(initQuery.error); return "Could not get initQuery. Check logs"; } - if(!initQuery.query){ + if (!initQuery.query) { return undefined; } return undefined; -} \ No newline at end of file +}; diff --git a/lib/PubSubManager/getPubSubManagerInitQuery.ts b/lib/PubSubManager/getPubSubManagerInitQuery.ts index 98c8f099..2ab80fe0 100644 --- a/lib/PubSubManager/getPubSubManagerInitQuery.ts +++ b/lib/PubSubManager/getPubSubManagerInitQuery.ts @@ -1,7 +1,11 @@ - import { tryCatch } from "prostgles-types"; import { pgp } from "../DboBuilder/DboBuilderTypes"; -import { asValue, NOTIF_CHANNEL, NOTIF_TYPE, PubSubManager } from "./PubSubManager"; +import { + asValue, + NOTIF_CHANNEL, + NOTIF_TYPE, + PubSubManager, +} from "./PubSubManager"; const { version } = require("../../package.json"); import { getAppCheckQuery } from "./orphanTriggerCheck"; import { DboBuilder } from "../DboBuilder/DboBuilder"; @@ -11,7 +15,7 @@ export const DB_OBJ_NAMES = { data_watch_func: "prostgles.prostgles_trigger_function", schema_watch_func: "prostgles.schema_watch_func", schema_watch_trigger: "prostgles_schema_watch_trigger_new", - schema_watch_trigger_drop: "prostgles_schema_watch_trigger_new_drop" + schema_watch_trigger_drop: "prostgles_schema_watch_trigger_new_drop", } as const; const PROSTGLES_SCHEMA_EXISTS_QUERY = ` @@ -30,11 +34,10 @@ const PROSTGLES_SCHEMA_VERSION_OK_QUERY = ` `; const getInitQuery = (debugMode: boolean | undefined, pgVersion: number) => { - const canReplaceTriggers = pgVersion >= 140006; - const createTriggerQuery = canReplaceTriggers ? - `CREATE OR REPLACE TRIGGER %1$I`: - ` + const createTriggerQuery = canReplaceTriggers + ? `CREATE OR REPLACE TRIGGER %1$I` + : ` DROP TRIGGER IF EXISTS %1$I ON %2$s; CREATE TRIGGER %1$I `; @@ -334,7 +337,7 @@ BEGIN THEN concat_ws('; ', 'error', err_text, err_detail, err_hint, 'query: ' || query ) ELSE COALESCE(v_trigger.cids, '') END - ${debugMode? (", COALESCE(current_query(), 'current_query ??'), ' ', query") : ""} + ${debugMode ? ", COALESCE(current_query(), 'current_query ??'), ' ', query" : ""} ), 7999/4) -- Some chars are 2bytes -> 'Ω' ); END LOOP; @@ -504,7 +507,7 @@ BEGIN $q$ DO $e$ BEGIN - /* ${ PubSubManager.EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID} */ + /* ${PubSubManager.EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID} */ %s END $e$; @@ -532,7 +535,7 @@ BEGIN json_build_object( 'TG_OP', TG_OP, 'duration', (EXTRACT(EPOCH FROM now()) * 1000) - start_time, - 'query', ${debugMode? 'LEFT(current_query(), 400)' : "'Only shown in debug mode'"} + 'query', ${debugMode ? "LEFT(current_query(), 400)" : "'Only shown in debug mode'"} ) )::TEXT, 7999/4) ); @@ -609,7 +612,7 @@ BEGIN ${asValue(NOTIF_TYPE.schema)}, tg_tag , TG_event, - ${debugMode? 'curr_query' : "'Only shown in debug mode'"} + ${debugMode ? "curr_query" : "'Only shown in debug mode'"} ), 7999/4) ); END LOOP; @@ -629,34 +632,49 @@ $do$; COMMIT; `; - -} +}; /** * Initialize the prostgles schema and functions needed for realtime data and schema changes * undefined returned if the database contains the apropriate prostgles schema */ -export const getPubSubManagerInitQuery = async function(this: DboBuilder): Promise { - - const versionNum = await this.db.one("SELECT current_setting('server_version_num')::int as val"); - const initQuery = getInitQuery(this.prostgles.opts.DEBUG_MODE, versionNum.val); - const { schema_md5 = "none" } = await this.db.oneOrNone("SELECT md5($1) as schema_md5", [initQuery.trim()]); +export const getPubSubManagerInitQuery = async function ( + this: DboBuilder, +): Promise { + const versionNum = await this.db.one( + "SELECT current_setting('server_version_num')::int as val", + ); + const initQuery = getInitQuery( + this.prostgles.opts.DEBUG_MODE, + versionNum.val, + ); + const { schema_md5 = "none" } = await this.db.oneOrNone( + "SELECT md5($1) as schema_md5", + [initQuery.trim()], + ); const query = pgp.as.format(initQuery, { schema_md5, version }); const existingSchema = await this.db.any(PROSTGLES_SCHEMA_EXISTS_QUERY); - if(!existingSchema.length){ - console.log("getPubSubManagerInitQuery: No prostgles.versions table found. Creating..."); + if (!existingSchema.length) { + console.log( + "getPubSubManagerInitQuery: No prostgles.versions table found. Creating...", + ); return query; } const { existingSchemaVersions } = await tryCatch(async () => { - const existingSchemaVersions = await this.db.any(PROSTGLES_SCHEMA_VERSION_OK_QUERY, { schema_md5, version }); + const existingSchemaVersions = await this.db.any( + PROSTGLES_SCHEMA_VERSION_OK_QUERY, + { schema_md5, version }, + ); return { - existingSchemaVersions - } + existingSchemaVersions, + }; }); - if(!existingSchemaVersions?.length){ - console.log("getPubSubManagerInitQuery: Outdated prostgles schema. Re-creating..."); + if (!existingSchemaVersions?.length) { + console.log( + "getPubSubManagerInitQuery: Outdated prostgles schema. Re-creating...", + ); return query; } - + return undefined; -} \ No newline at end of file +}; diff --git a/lib/PubSubManager/initPubSubManager.ts b/lib/PubSubManager/initPubSubManager.ts index e204558a..28dbc0a4 100644 --- a/lib/PubSubManager/initPubSubManager.ts +++ b/lib/PubSubManager/initPubSubManager.ts @@ -3,11 +3,14 @@ import { PostgresNotifListenManager } from "../PostgresNotifListenManager"; import { getWatchSchemaTagList } from "../SchemaWatch/getWatchSchemaTagList"; import { NOTIF_CHANNEL, PubSubManager, asValue } from "./PubSubManager"; import { getPubSubManagerInitQuery } from "./getPubSubManagerInitQuery"; -export const REALTIME_TRIGGER_CHECK_QUERY = "prostgles-server internal query used to manage realtime triggers" as const; +export const REALTIME_TRIGGER_CHECK_QUERY = + "prostgles-server internal query used to manage realtime triggers" as const; -export const tout = (ms: number) => new Promise(res => setTimeout(res, ms)); +export const tout = (ms: number) => new Promise((res) => setTimeout(res, ms)); -export async function initPubSubManager(this: PubSubManager): Promise { +export async function initPubSubManager( + this: PubSubManager, +): Promise { if (!this.getIsDestroyed()) return undefined; const initQuery = await getPubSubManagerInitQuery.bind(this.dboBuilder)(); @@ -15,7 +18,7 @@ export async function initPubSubManager(this: PubSubManager): Promise t.any(initQuery)); - await this.db.tx(t => t.any(initQuery)); + await this.db.tx((t) => t.any(initQuery)); error = undefined; tries = 0; } catch (e: any) { - if(!didDeadlock && isObject(e) && e.code === "40P01"){ + if (!didDeadlock && isObject(e) && e.code === "40P01") { didDeadlock = true; tries = 5; console.error("Deadlock detected. Retrying..."); } error = e; - tries --; + tries--; } } - if(error){ + if (error) { throw error; } @@ -48,19 +51,21 @@ export async function initPubSubManager(this: PubSubManager): Promise +v); - - - if(!table_name) { + const condition_ids = condition_ids_str?.split(",").map((v) => +v); + + if (!table_name) { throw "table_name undef"; } - const tableTriggerConditions = this._triggers?.[table_name]?.map((condition, idx) => ({ - idx, - condition, - subs: this.getTriggerSubs(table_name, condition), - syncs: this.getSyncs(table_name, condition), - })); + const tableTriggerConditions = this._triggers?.[table_name]?.map( + (condition, idx) => ({ + idx, + condition, + subs: this.getTriggerSubs(table_name, condition), + syncs: this.getSyncs(table_name, condition), + }), + ); let state: "error" | "no-triggers" | "ok" | "invalid_condition_ids" = "ok"; - + // const triggers = await this.db.any("SELECT * FROM prostgles.triggers WHERE table_name = $1 AND id IN ($2:csv)", [table_name, condition_ids_str.split(",").map(v => +v)]); // const conditions: string[] = triggers.map(t => t.condition); - - if(!tableTriggerConditions?.length){ + if (!tableTriggerConditions?.length) { state = "no-triggers"; /* Trigger error */ - } else if ( - condition_ids_str?.startsWith("error") - ) { + } else if (condition_ids_str?.startsWith("error")) { state = "error"; const pref = "INTERNAL ERROR"; - console.error(`${pref}: condition_ids_str: ${condition_ids_str}`) + console.error(`${pref}: condition_ids_str: ${condition_ids_str}`); tableTriggerConditions.map(({ condition }) => { const subs = this.getTriggerSubs(table_name, condition); - subs.map(s => { - this.pushSubData(s, pref + ". Check server logs. Schema might have changed"); - }) + subs.map((s) => { + this.pushSubData( + s, + pref + ". Check server logs. Schema might have changed", + ); + }); }); /* Trigger ok */ - } else if ( - condition_ids?.every(id => Number.isInteger(id)) - ) { - + } else if (condition_ids?.every((id) => Number.isInteger(id))) { state = "ok"; - const firedTableConditions = tableTriggerConditions.filter(({ idx }) => condition_ids.includes(idx)); + const firedTableConditions = tableTriggerConditions.filter(({ idx }) => + condition_ids.includes(idx), + ); const orphanedTableConditions = condition_ids.filter((condId) => { const tc = tableTriggerConditions.at(condId); return !tc || (tc.subs.length === 0 && tc.syncs.length === 0); }); - if(orphanedTableConditions.length){ + if (orphanedTableConditions.length) { this.db - .any(` + .any( + ` /* Delete removed subscriptions */ /* ${PubSubManager.EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID} */ DELETE FROM prostgles.app_triggers at @@ -105,52 +115,57 @@ export async function notifListener(this: PubSubManager, data: { payload: string AND at.table_name = t.table_name AND at.condition = t.condition ) - `, - [table_name, orphanedTableConditions, this.appId] + `, + [table_name, orphanedTableConditions, this.appId], ) .then(() => { this.refreshTriggers(); }) - .catch(e => { + .catch((e) => { console.error("Error deleting orphaned triggers", e); }); } firedTableConditions.map(({ subs, syncs }) => { - - log("notifListener", subs.map(s => s.channel_name), syncs.map(s => s.channel_name)) + log( + "notifListener", + subs.map((s) => s.channel_name), + syncs.map((s) => s.channel_name), + ); syncs.map((s) => { this.syncData(s, undefined, "trigger"); }); /* Throttle the subscriptions */ - const activeAndReadySubs = subs.filter(sub => - sub.triggers.some(trg => - this.dbo[trg.table_name] && - sub.is_ready && - (sub.socket_id && this.sockets[sub.socket_id] || sub.localFuncs) - ) + const activeAndReadySubs = subs.filter((sub) => + sub.triggers.some( + (trg) => + this.dbo[trg.table_name] && + sub.is_ready && + ((sub.socket_id && this.sockets[sub.socket_id]) || sub.localFuncs), + ), ); - activeAndReadySubs.forEach(sub => { + activeAndReadySubs.forEach((sub) => { const { throttle = 0, throttleOpts } = sub; - if (!throttleOpts?.skipFirst && sub.last_throttled <= Date.now() - throttle) { + if ( + !throttleOpts?.skipFirst && + sub.last_throttled <= Date.now() - throttle + ) { sub.last_throttled = Date.now(); /* It is assumed the policy was checked before this point */ this.pushSubData(sub); } else if (!sub.is_throttling) { - - log("throttling sub") + log("throttling sub"); sub.is_throttling = setTimeout(() => { - log("throttling finished. pushSubData...") + log("throttling finished. pushSubData..."); sub.is_throttling = null; sub.last_throttled = Date.now(); this.pushSubData(sub); }, throttle); } }); - }); /* Trigger unknown issue */ @@ -158,16 +173,19 @@ export async function notifListener(this: PubSubManager, data: { payload: string state = "invalid_condition_ids"; } - await this._log({ + await this._log({ type: "sync", command: "notifListener", state, - tableName: table_name, - op_name, + tableName: table_name, + op_name, condition_ids_str, tableTriggers: this._triggers?.[table_name], - tableSyncs: JSON.stringify(this.syncs.filter(s => s.table_name === table_name).map(s => pickKeys(s, ["condition", "socket_id"]))), + tableSyncs: JSON.stringify( + this.syncs + .filter((s) => s.table_name === table_name) + .map((s) => pickKeys(s, ["condition", "socket_id"])), + ), connectedSocketIds: this.connectedSocketIds, }); - -} \ No newline at end of file +} diff --git a/lib/PubSubManager/orphanTriggerCheck.ts b/lib/PubSubManager/orphanTriggerCheck.ts index 53c28fa4..6feb3316 100644 --- a/lib/PubSubManager/orphanTriggerCheck.ts +++ b/lib/PubSubManager/orphanTriggerCheck.ts @@ -1,13 +1,14 @@ -import { PubSubManager } from './PubSubManager'; +import { PubSubManager } from "./PubSubManager"; import { REALTIME_TRIGGER_CHECK_QUERY } from "./initPubSubManager"; /** - * Schema and Data watch triggers (DB_OBJ_NAMES.schema_watch_func, DB_OBJ_NAMES.data_watch_func) + * Schema and Data watch triggers (DB_OBJ_NAMES.schema_watch_func, DB_OBJ_NAMES.data_watch_func) * survive and continue to user resources even after the client disconnects. * We must therefore delete apps that do not have active connections */ -const queryIdentifier = "prostgles query used to keep track of which prgl backend clients are still connected"; +const queryIdentifier = + "prostgles query used to keep track of which prgl backend clients are still connected"; const connectedApplicationNamesQuery = ` SELECT DISTINCT application_name FROM prostgles.apps @@ -15,7 +16,7 @@ const connectedApplicationNamesQuery = ` SELECT application_name FROM pg_catalog.pg_stat_activity ) -`; +`; export const DELETE_DISCONNECTED_APPS_QUERY = ` DELETE FROM prostgles.apps a diff --git a/lib/PubSubManager/pushSubData.ts b/lib/PubSubManager/pushSubData.ts index 1f042a81..3ba10741 100644 --- a/lib/PubSubManager/pushSubData.ts +++ b/lib/PubSubManager/pushSubData.ts @@ -1,11 +1,15 @@ import { parseLocalFuncs } from "../DboBuilder/ViewHandler/subscribe"; import { log, PubSubManager, Subscription } from "./PubSubManager"; -export async function pushSubData(this: PubSubManager, sub: Subscription, err?: any) { +export async function pushSubData( + this: PubSubManager, + sub: Subscription, + err?: any, +) { if (!sub) throw "pushSubData: invalid sub"; const { socket_id, channel_name } = sub; - if(!this.subs.some(s => s.channel_name === channel_name)){ + if (!this.subs.some((s) => s.channel_name === channel_name)) { // Might be throttling a sub that was removed return; } @@ -20,13 +24,12 @@ export async function pushSubData(this: PubSubManager, sub: Subscription, err?: return new Promise(async (resolve, reject) => { /* TODO: Retire subOne -> it's redundant */ - - const { data, err } = await this.getSubData(sub); - if(data){ + const { data, err } = await this.getSubData(sub); + if (data) { if (socket_id && this.sockets[socket_id]) { - log("Pushed " + data.length + " records to sub") + log("Pushed " + data.length + " records to sub"); this.sockets[socket_id].emit(channel_name, { data }, () => { resolve(data); }); @@ -38,13 +41,12 @@ export async function pushSubData(this: PubSubManager, sub: Subscription, err?: resolve(data); } // sub.last_throttled = Date.now(); - } else { const errObj = { _err_msg: err.toString(), err }; if (socket_id && this.sockets[socket_id]) { this.sockets[socket_id].emit(channel_name, { err: errObj }); } else if (localFuncs) { - if(!localFuncs.onError){ + if (!localFuncs.onError) { console.error("Uncaught subscription error", err); } localFuncs.onError?.(errObj); @@ -52,4 +54,4 @@ export async function pushSubData(this: PubSubManager, sub: Subscription, err?: reject(errObj); } }); -} \ No newline at end of file +} diff --git a/lib/PublishParser/PublishParser.ts b/lib/PublishParser/PublishParser.ts index fd6eb330..01eaa4d5 100644 --- a/lib/PublishParser/PublishParser.ts +++ b/lib/PublishParser/PublishParser.ts @@ -1,4 +1,4 @@ -import { Method, isObject } from "prostgles-types"; +import { AnyObject, Method, getObjectEntries, isObject } from "prostgles-types"; import { AuthResult, SessionUser } from "../Auth/AuthTypes"; import { LocalParams } from "../DboBuilder/DboBuilder"; import { DB, DBHandlerServer, Prostgles } from "../Prostgles"; @@ -6,17 +6,33 @@ import { VoidFunction } from "../SchemaWatch/SchemaWatch"; import { getFileTableRules } from "./getFileTableRules"; import { getSchemaFromPublish } from "./getSchemaFromPublish"; import { getTableRulesWithoutFileTable } from "./getTableRulesWithoutFileTable"; -import { DboTable, DboTableCommand, ParsedPublishTable, PublishMethods, PublishObject, PublishParams, RULE_TO_METHODS, TableRule } from "./publishTypesAndUtils"; +import { + DboTable, + DboTableCommand, + ParsedPublishTable, + PublishMethods, + PublishObject, + PublishParams, + RULE_TO_METHODS, + TableRule, +} from "./publishTypesAndUtils"; export class PublishParser { publish: any; publishMethods?: PublishMethods | undefined; publishRawSQL?: any; dbo: DBHandlerServer; - db: DB + db: DB; prostgles: Prostgles; - constructor(publish: any, publishMethods: PublishMethods | undefined, publishRawSQL: any, dbo: DBHandlerServer, db: DB, prostgles: Prostgles) { + constructor( + publish: any, + publishMethods: PublishMethods | undefined, + publishRawSQL: any, + dbo: DBHandlerServer, + db: DB, + prostgles: Prostgles + ) { this.publish = publish; this.publishMethods = publishMethods; this.publishRawSQL = publishRawSQL; @@ -27,32 +43,42 @@ export class PublishParser { if (!this.dbo || !this.publish) throw "INTERNAL ERROR: dbo and/or publish missing"; } - async getPublishParams(localParams: LocalParams, clientInfo?: AuthResult): Promise { - if (!this.dbo) throw "dbo missing" + async getPublishParams( + localParams: LocalParams, + clientInfo?: AuthResult + ): Promise { + if (!this.dbo) throw "dbo missing"; return { - ...(clientInfo || await this.prostgles.authHandler?.getClientInfo(localParams)), + ...(clientInfo || (await this.prostgles.authHandler?.getClientInfo(localParams))), dbo: this.dbo as any, db: this.db, socket: localParams.socket!, tables: this.prostgles.dboBuilder.tables, - } + }; } - async getAllowedMethods(reqInfo: Pick, userData?: AuthResult): Promise<{ [key: string]: Method; }> { - const methods: { [key: string]: Method; } = {}; + async getAllowedMethods( + reqInfo: Pick, + userData?: AuthResult + ): Promise<{ [key: string]: Method }> { + const methods: { [key: string]: Method } = {}; const publishParams = await this.getPublishParams(reqInfo, userData); const _methods = await applyParamsIfFunc(this.publishMethods, publishParams); if (_methods && Object.keys(_methods).length) { - Object.entries(_methods).map(([key, method]) => { - const isFuncLike = (maybeFunc: VoidFunction | Promise) => (typeof maybeFunc === "function" || maybeFunc && typeof maybeFunc.then === "function"); - //@ts-ignore - if (method && (isFuncLike(method) || isObject(method) && isFuncLike(method.run))) { - //@ts-ignore - methods[key] = _methods[key]; + getObjectEntries(_methods).map(([key, method]) => { + const isFuncLike = (maybeFunc: VoidFunction | Promise | Promise) => + typeof maybeFunc === "function" || (maybeFunc && typeof maybeFunc.then === "function"); + if ( + method && + (isFuncLike(method as Extract>) || + // @ts-ignore + (isObject(method) && isFuncLike(method.run))) + ) { + methods[key] = _methods[key]!; } else { - throw `invalid publishMethods item -> ${key} \n Expecting a function or promise` + throw `invalid publishMethods item -> ${key} \n Expecting a function or promise`; } }); } @@ -62,16 +88,16 @@ export class PublishParser { /** * Parses the first level of publish. (If false then nothing if * then all tables and views) - * @param socket - * @param user + * @param socket + * @param user */ async getPublish(localParams: LocalParams, clientInfo?: AuthResult): Promise { - const publishParams = await this.getPublishParams(localParams, clientInfo) + const publishParams = await this.getPublishParams(localParams, clientInfo); const _publish = await applyParamsIfFunc(this.publish, publishParams); if (_publish === "*") { const publish: PublishObject = {}; - this.prostgles.dboBuilder.tablesOrViews?.map(tov => { + this.prostgles.dboBuilder.tablesOrViews?.map((tov) => { publish[tov.name] = "*"; }); return publish; @@ -79,29 +105,41 @@ export class PublishParser { return _publish; } - async getValidatedRequestRuleWusr({ tableName, command, localParams }: DboTableCommand): Promise { - + async getValidatedRequestRuleWusr({ + tableName, + command, + localParams, + }: DboTableCommand): Promise { const clientInfo = await this.prostgles.authHandler!.getClientInfo(localParams); - const rules = await this.getValidatedRequestRule({ tableName, command, localParams }, clientInfo); + const rules = await this.getValidatedRequestRule( + { tableName, command, localParams }, + clientInfo + ); return rules; } - async getValidatedRequestRule({ tableName, command, localParams }: DboTableCommand, clientInfo?: AuthResult): Promise { + async getValidatedRequestRule( + { tableName, command, localParams }: DboTableCommand, + clientInfo?: AuthResult + ): Promise { if (!this.dbo) throw "INTERNAL ERROR: dbo is missing"; if (!command || !tableName) throw "command OR tableName are missing"; - const rtm = RULE_TO_METHODS.find(rtms => (rtms.methods as any).includes(command)); + const rtm = RULE_TO_METHODS.find((rtms) => (rtms.methods as any).includes(command)); if (!rtm) { throw "Invalid command: " + command; } /* Must be local request -> allow everything */ if (!localParams || (!localParams.socket && !localParams.httpReq)) { - return RULE_TO_METHODS.reduce((a, v) => ({ - ...a, - [v.rule]: v.no_limits - }), {}) + return RULE_TO_METHODS.reduce( + (a, v) => ({ + ...a, + [v.rule]: v.no_limits, + }), + {} + ); } /* Must be from socket. Must have a publish */ @@ -113,44 +151,64 @@ export class PublishParser { if (errorInfo) throw errorInfo.error; const table_rule = await this.getTableRules({ tableName, localParams }, clientInfo); - if (!table_rule) throw { stack: ["getValidatedRequestRule()"], message: "Invalid or disallowed table: " + tableName }; - + if (!table_rule) + throw { + stack: ["getValidatedRequestRule()"], + message: "Invalid or disallowed table: " + tableName, + }; if (command === "upsert") { if (!table_rule.update || !table_rule.insert) { - throw { stack: ["getValidatedRequestRule()"], message: `Invalid or disallowed command: upsert` }; + throw { + stack: ["getValidatedRequestRule()"], + message: `Invalid or disallowed command: upsert`, + }; } } if (rtm && table_rule && table_rule[rtm.rule]) { return table_rule; - } else throw { stack: ["getValidatedRequestRule()"], message: `Invalid or disallowed command: ${tableName}.${command}` }; + } else + throw { + stack: ["getValidatedRequestRule()"], + message: `Invalid or disallowed command: ${tableName}.${command}`, + }; } - async getTableRules(args: DboTable, clientInfo?: AuthResult): Promise { - - if(this.dbo[args.tableName]?.is_media){ - const fileTablePublishRules = await this.getTableRulesWithoutFileTable(args, clientInfo) - const { rules } = await getFileTableRules.bind(this)(args.tableName, fileTablePublishRules, args.localParams, clientInfo); + async getTableRules( + args: DboTable, + clientInfo?: AuthResult + ): Promise { + if (this.dbo[args.tableName]?.is_media) { + const fileTablePublishRules = await this.getTableRulesWithoutFileTable(args, clientInfo); + const { rules } = await getFileTableRules.bind(this)( + args.tableName, + fileTablePublishRules, + args.localParams, + clientInfo + ); return rules; } - return await this.getTableRulesWithoutFileTable(args, clientInfo) + return await this.getTableRulesWithoutFileTable(args, clientInfo); } getTableRulesWithoutFileTable = getTableRulesWithoutFileTable.bind(this); /* Prepares schema for client. Only allowed views and commands will be present */ getSchemaFromPublish = getSchemaFromPublish.bind(this); - } export * from "./publishTypesAndUtils"; -type FunctionWithArguments = (...args: any) => any -function applyParamsIfFunc(maybeFunc: T, ...params: any): T extends FunctionWithArguments ? ReturnType : T { +type FunctionWithArguments = (...args: any) => any; +function applyParamsIfFunc( + maybeFunc: T, + ...params: any +): T extends FunctionWithArguments ? ReturnType : T { if ( - (maybeFunc !== null && maybeFunc !== undefined) && + maybeFunc !== null && + maybeFunc !== undefined && //@ts-ignore (typeof maybeFunc === "function" || typeof maybeFunc.then === "function") ) { diff --git a/lib/PublishParser/getFileTableRules.ts b/lib/PublishParser/getFileTableRules.ts index b9267da3..d84bd3ea 100644 --- a/lib/PublishParser/getFileTableRules.ts +++ b/lib/PublishParser/getFileTableRules.ts @@ -7,72 +7,102 @@ import { ParsedPublishTable, UpdateRule } from "./publishTypesAndUtils"; /** * Permissions for referencedTables columns are propagated to the file table (even if file table has no permissions) - * File table existing permissions that include the referenced column resulting permissions are left as they are + * File table existing permissions that include the referenced column resulting permissions are left as they are * Select on a referenced column allows selecting from file table any records that join the referenced table and the select filters - * Insert on a referenced column allows inserting a file (according to any file type/size rules) only if it is a nested from that table - * Update on a referenced column allows updating a file (delete and insert) only if it is a nested update from that table - * Delete on a referenced column table allows deleting any referenced file - */ -export async function getFileTableRules (this: PublishParser, fileTableName: string, fileTablePublishRules: ParsedPublishTable | undefined, localParams: LocalParams, clientInfo: AuthResult | undefined) { + * Insert on a referenced column allows inserting a file (according to any file type/size rules) only if it is a nested from that table + * Update on a referenced column allows updating a file (delete and insert) only if it is a nested update from that table + * Delete on a referenced column table allows deleting any referenced file + */ +export async function getFileTableRules( + this: PublishParser, + fileTableName: string, + fileTablePublishRules: ParsedPublishTable | undefined, + localParams: LocalParams, + clientInfo: AuthResult | undefined, +) { const forcedDeleteFilters: FullFilter[] = []; const forcedSelectFilters: FullFilter[] = []; - const forcedUpdateFilters: FullFilter[] = []; + const forcedUpdateFilters: FullFilter[] = []; const allowedNestedInserts: { table: string; column: string }[] = []; - const referencedColumns = this.prostgles.dboBuilder.tablesOrViews?.filter(t => !t.is_view && t.name !== fileTableName).map(t => { - const refCols = t.columns.filter(c => c.references?.some(r => r.ftable === fileTableName)); - if(!refCols.length) return undefined; - return { - tableName: t.name, - fileColumns: refCols.map(c => c.name), - allColumns: t.columns.map(c => c.name), - } - }).filter(isDefined) - if(referencedColumns?.length){ - for await (const { tableName, fileColumns, allColumns } of referencedColumns){ - const table_rules = await this.getTableRules({ localParams, tableName }, clientInfo); - if(table_rules){ - fileColumns.map(column => { + const referencedColumns = this.prostgles.dboBuilder.tablesOrViews + ?.filter((t) => !t.is_view && t.name !== fileTableName) + .map((t) => { + const refCols = t.columns.filter((c) => + c.references?.some((r) => r.ftable === fileTableName), + ); + if (!refCols.length) return undefined; + return { + tableName: t.name, + fileColumns: refCols.map((c) => c.name), + allColumns: t.columns.map((c) => c.name), + }; + }) + .filter(isDefined); + if (referencedColumns?.length) { + for await (const { + tableName, + fileColumns, + allColumns, + } of referencedColumns) { + const table_rules = await this.getTableRules( + { localParams, tableName }, + clientInfo, + ); + if (table_rules) { + fileColumns.map((column) => { const path = [{ table: tableName, on: [{ id: column }] }]; - if(table_rules.delete){ + if (table_rules.delete) { forcedDeleteFilters.push({ $existsJoined: { path, filter: table_rules.delete.forcedFilter ?? {}, - } - }) + }, + }); } - if(table_rules.select){ - const parsedFields = parseFieldFilter(table_rules.select.fields, false, allColumns); + if (table_rules.select) { + const parsedFields = parseFieldFilter( + table_rules.select.fields, + false, + allColumns, + ); /** Must be allowed to view this column */ - if(parsedFields.includes(column as any)){ + if (parsedFields.includes(column as any)) { forcedSelectFilters.push({ $existsJoined: { path, filter: table_rules.select.forcedFilter ?? {}, - } + }, }); } } - if(table_rules.insert){ - const parsedFields = parseFieldFilter(table_rules.insert.fields, false, allColumns); + if (table_rules.insert) { + const parsedFields = parseFieldFilter( + table_rules.insert.fields, + false, + allColumns, + ); /** Must be allowed to view this column */ - if(parsedFields.includes(column as any)){ + if (parsedFields.includes(column as any)) { allowedNestedInserts.push({ table: tableName, column }); } } - if(table_rules.update){ - const parsedFields = parseFieldFilter(table_rules.update.fields, false, allColumns); + if (table_rules.update) { + const parsedFields = parseFieldFilter( + table_rules.update.fields, + false, + allColumns, + ); /** Must be allowed to view this column */ - if(parsedFields.includes(column as any)){ + if (parsedFields.includes(column as any)) { forcedUpdateFilters.push({ $existsJoined: { path, filter: table_rules.update.forcedFilter ?? {}, - } + }, }); } } - }) + }); } } } @@ -81,44 +111,57 @@ export async function getFileTableRules (this: PublishParser, fileTableName: str ...fileTablePublishRules, }; - const getForcedFilter = (rule: Pick | undefined, forcedFilters: FullFilter[]) => { - return (rule && !rule.forcedFilter)? {} : { - forcedFilter: { - $or: forcedFilters.concat(rule?.forcedFilter? [rule?.forcedFilter] : []), - } - } - } - if(forcedSelectFilters.length || fileTablePublishRules?.select){ + const getForcedFilter = ( + rule: Pick | undefined, + forcedFilters: FullFilter[], + ) => { + return rule && !rule.forcedFilter + ? {} + : { + forcedFilter: { + $or: forcedFilters.concat( + rule?.forcedFilter ? [rule?.forcedFilter] : [], + ), + }, + }; + }; + if (forcedSelectFilters.length || fileTablePublishRules?.select) { fileTableRule.select = { fields: "*", ...fileTablePublishRules?.select, ...getForcedFilter(fileTablePublishRules?.select, forcedSelectFilters), - } + }; } - if(forcedDeleteFilters.length || fileTablePublishRules?.delete){ + if (forcedDeleteFilters.length || fileTablePublishRules?.delete) { fileTableRule.delete = { filterFields: "*", ...fileTablePublishRules?.delete, ...getForcedFilter(fileTablePublishRules?.delete, forcedDeleteFilters), - } + }; } - if(forcedUpdateFilters.length || fileTablePublishRules?.update){ + if (forcedUpdateFilters.length || fileTablePublishRules?.update) { fileTableRule.update = { fields: "*", ...fileTablePublishRules?.update, ...getForcedFilter(fileTablePublishRules?.update, forcedUpdateFilters), - } + }; } - if(allowedNestedInserts.length || fileTablePublishRules?.insert){ + if (allowedNestedInserts.length || fileTablePublishRules?.insert) { fileTableRule.insert = { fields: "*", ...fileTablePublishRules?.insert, - allowedNestedInserts: fileTablePublishRules?.insert? undefined : allowedNestedInserts, - } + allowedNestedInserts: fileTablePublishRules?.insert + ? undefined + : allowedNestedInserts, + }; } /** Add missing implied methods (getColumns, getInfo) */ - const rules = await this.getTableRulesWithoutFileTable.bind(this)({ localParams, tableName: fileTableName }, clientInfo, { [fileTableName]: fileTableRule }) + const rules = await this.getTableRulesWithoutFileTable.bind(this)( + { localParams, tableName: fileTableName }, + clientInfo, + { [fileTableName]: fileTableRule }, + ); return { rules, allowedInserts: allowedNestedInserts }; -} \ No newline at end of file +} diff --git a/lib/PublishParser/getSchemaFromPublish.ts b/lib/PublishParser/getSchemaFromPublish.ts index f8cc94e5..edf7bd0f 100644 --- a/lib/PublishParser/getSchemaFromPublish.ts +++ b/lib/PublishParser/getSchemaFromPublish.ts @@ -1,63 +1,96 @@ -import { DBSchemaTable, MethodKey, TableInfo, TableSchemaErrors, TableSchemaForClient, getKeys, pickKeys } from "prostgles-types"; +import { + DBSchemaTable, + MethodKey, + TableInfo, + TableSchemaErrors, + TableSchemaForClient, + getKeys, + pickKeys, +} from "prostgles-types"; import { AuthResult, ExpressReq } from "../Auth/AuthTypes"; import { getErrorAsObject, PRGLIOSocket } from "../DboBuilder/DboBuilder"; -import { PublishObject, PublishParser } from "./PublishParser" +import { PublishObject, PublishParser } from "./PublishParser"; import { TABLE_METHODS } from "../Prostgles"; -type Args = ({ - socket: PRGLIOSocket; - httpReq?: undefined; -} | { - httpReq: ExpressReq; - socket?: undefined; -}) & { +type Args = ( + | { + socket: PRGLIOSocket; + httpReq?: undefined; + } + | { + httpReq: ExpressReq; + socket?: undefined; + } +) & { userData: AuthResult | undefined; -} - -export async function getSchemaFromPublish(this: PublishParser, { userData, ...clientReq }: Args): Promise<{ schema: TableSchemaForClient; tables: DBSchemaTable[]; tableSchemaErrors: TableSchemaErrors }> { +}; + +export async function getSchemaFromPublish( + this: PublishParser, + { userData, ...clientReq }: Args, +): Promise<{ + schema: TableSchemaForClient; + tables: DBSchemaTable[]; + tableSchemaErrors: TableSchemaErrors; +}> { const schema: TableSchemaForClient = {}; const tableSchemaErrors: TableSchemaErrors = {}; - let tables: DBSchemaTable[] = [] + let tables: DBSchemaTable[] = []; try { /* Publish tables and views based on socket */ - const clientInfo = userData ?? await this.prostgles.authHandler?.getClientInfo(clientReq); + const clientInfo = + userData ?? (await this.prostgles.authHandler?.getClientInfo(clientReq)); let _publish: PublishObject | undefined; try { _publish = await this.getPublish(clientReq, clientInfo); - } catch(err){ - console.error("Error within then Publish function ", err) + } catch (err) { + console.error("Error within then Publish function ", err); throw err; } - if (_publish && Object.keys(_publish).length) { let txKey = "tx"; if (!this.prostgles.opts.transactions) txKey = ""; - if (typeof this.prostgles.opts.transactions === "string") txKey = this.prostgles.opts.transactions; + if (typeof this.prostgles.opts.transactions === "string") + txKey = this.prostgles.opts.transactions; - const tableNames = Object.keys(_publish).filter(k => !txKey || txKey !== k); + const tableNames = Object.keys(_publish).filter( + (k) => !txKey || txKey !== k, + ); const fileTableName = this.prostgles.fileManager?.tableName; - if(fileTableName && this.dbo[fileTableName]?.is_media && !tableNames.includes(fileTableName)){ - const isReferenced = this.prostgles.dboBuilder.tablesOrViews?.some(t => t.columns.some(c => c.references?.some(r => r.ftable === fileTableName))) - if(isReferenced){ + if ( + fileTableName && + this.dbo[fileTableName]?.is_media && + !tableNames.includes(fileTableName) + ) { + const isReferenced = this.prostgles.dboBuilder.tablesOrViews?.some( + (t) => + t.columns.some((c) => + c.references?.some((r) => r.ftable === fileTableName), + ), + ); + if (isReferenced) { tableNames.unshift(fileTableName); } } - await Promise.all(tableNames - .map(async tableName => { + await Promise.all( + tableNames.map(async (tableName) => { if (!this.dbo[tableName]) { const errMsg = [ `Table ${tableName} does not exist`, - `Expecting one of: ${JSON.stringify(this.prostgles.dboBuilder.tablesOrViews?.map(tov => tov.name))}`, - `DBO tables: ${JSON.stringify(Object.keys(this.dbo).filter(k => (this.dbo[k] as any).find))}`, + `Expecting one of: ${JSON.stringify(this.prostgles.dboBuilder.tablesOrViews?.map((tov) => tov.name))}`, + `DBO tables: ${JSON.stringify(Object.keys(this.dbo).filter((k) => (this.dbo[k] as any).find))}`, ].join("\n"); throw errMsg; } - const table_rules = await this.getTableRules({ localParams: clientReq, tableName }, clientInfo); + const table_rules = await this.getTableRules( + { localParams: clientReq, tableName }, + clientInfo, + ); if (table_rules && Object.keys(table_rules).length) { schema[tableName] = {}; @@ -70,68 +103,107 @@ export async function getSchemaFromPublish(this: PublishParser, { userData, ...c methods = getKeys(table_rules) as any; } - if(!this.prostgles.dboBuilder.canSubscribe){ - methods = methods.filter(m => !["subscribe", "subscribeOne", "sync", "unsubscribe", "unsync"].includes(m)); + if (!this.prostgles.dboBuilder.canSubscribe) { + methods = methods.filter( + (m) => + ![ + "subscribe", + "subscribeOne", + "sync", + "unsubscribe", + "unsync", + ].includes(m), + ); } - await Promise.all(methods.filter(m => m !== "select" as any) - .map(async method => { - if (method === "sync" && table_rules[method]) { - - /* Pass sync info */ - tableSchema[method] = table_rules[method]; - } else if ((table_rules as any)[method]) { - - tableSchema[method] = method === "insert"? pickKeys(table_rules.insert!, ["allowedNestedInserts"]) : {}; - - /* Test for issues with the common table CRUD methods () */ - if (TABLE_METHODS.includes(method as any)) { - - try { - const valid_table_command_rules = await this.getValidatedRequestRule({ tableName, command: method, localParams: clientReq }, clientInfo); - if(this.prostgles.opts.testRulesOnConnect){ - await (this.dbo[tableName] as any)[method]({}, {}, {}, valid_table_command_rules, { ...clientReq, isRemoteRequest: true, testRule: true }); + await Promise.all( + methods + .filter((m) => m !== ("select" as any)) + .map(async (method) => { + if (method === "sync" && table_rules[method]) { + /* Pass sync info */ + tableSchema[method] = table_rules[method]; + } else if ((table_rules as any)[method]) { + tableSchema[method] = + method === "insert" + ? pickKeys(table_rules.insert!, [ + "allowedNestedInserts", + ]) + : {}; + + /* Test for issues with the common table CRUD methods () */ + if (TABLE_METHODS.includes(method as any)) { + try { + const valid_table_command_rules = + await this.getValidatedRequestRule( + { + tableName, + command: method, + localParams: clientReq, + }, + clientInfo, + ); + if (this.prostgles.opts.testRulesOnConnect) { + await (this.dbo[tableName] as any)[method]( + {}, + {}, + {}, + valid_table_command_rules, + { + ...clientReq, + isRemoteRequest: true, + testRule: true, + }, + ); + } + } catch (e) { + tableSchemaErrors[tableName] ??= {}; + tableSchemaErrors[tableName]![method] = { + error: "Internal publish error. Check server logs", + }; + + throw { + ...getErrorAsObject(e), + publish_path: `publish.${tableName}.${method}: \n -> ${e}`, + }; + } } - } catch (e) { - tableSchemaErrors[tableName] ??= {}; - tableSchemaErrors[tableName]![method] = { error: "Internal publish error. Check server logs" }; - - throw { - ...getErrorAsObject(e), - publish_path: `publish.${tableName}.${method}: \n -> ${e}` - }; - } - } - - if (method === "getInfo" || method === "getColumns") { - const tableRules = await this.getValidatedRequestRule({ tableName, command: method, localParams: clientReq }, clientInfo); - const res = await (this.dbo[tableName] as any)[method](undefined, undefined, undefined, tableRules, { ...clientReq, isRemoteRequest: true }); - if (method === "getInfo") { - tableInfo = res; - } else if (method === "getColumns") { - tableColumns = res; + if (method === "getInfo" || method === "getColumns") { + const tableRules = await this.getValidatedRequestRule( + { tableName, command: method, localParams: clientReq }, + clientInfo, + ); + const res = await (this.dbo[tableName] as any)[method]( + undefined, + undefined, + undefined, + tableRules, + { ...clientReq, isRemoteRequest: true }, + ); + if (method === "getInfo") { + tableInfo = res; + } else if (method === "getColumns") { + tableColumns = res; + } + } } - } - } - })); + }), + ); if (tableInfo && tableColumns) { - tables.push({ name: tableName, info: tableInfo, - columns: tableColumns - }) + columns: tableColumns, + }); } } return true; - }) + }), ); } - - } catch (e) { console.error("Prostgles \nERRORS IN PUBLISH: ", JSON.stringify(e)); throw e; @@ -139,4 +211,4 @@ export async function getSchemaFromPublish(this: PublishParser, { userData, ...c tables = tables.sort((a, b) => a.name.localeCompare(b.name)); return { schema, tables, tableSchemaErrors }; -} \ No newline at end of file +} diff --git a/lib/PublishParser/getTableRulesWithoutFileTable.ts b/lib/PublishParser/getTableRulesWithoutFileTable.ts index a79765fc..11e8fc3a 100644 --- a/lib/PublishParser/getTableRulesWithoutFileTable.ts +++ b/lib/PublishParser/getTableRulesWithoutFileTable.ts @@ -5,41 +5,65 @@ import { ViewHandler } from "../DboBuilder/ViewHandler/ViewHandler"; import { DEFAULT_SYNC_BATCH_SIZE } from "../PubSubManager/PubSubManager"; import { PublishParser } from "./PublishParser"; import { - DboTable, ParsedPublishTable, PublishObject, PublishTableRule, - PublishViewRule, RULE_TO_METHODS, SubscribeRule + DboTable, + ParsedPublishTable, + PublishObject, + PublishTableRule, + PublishViewRule, + RULE_TO_METHODS, + SubscribeRule, } from "./publishTypesAndUtils"; -export async function getTableRulesWithoutFileTable(this: PublishParser, { tableName, localParams }: DboTable, clientInfo?: AuthResult, overridenPublish?: PublishObject): Promise { - - if (!localParams || !tableName) throw { stack: ["getTableRules()"], message: "publish OR socket OR dbo OR tableName are missing" }; - - const _publish = overridenPublish ?? await this.getPublish(localParams, clientInfo); +export async function getTableRulesWithoutFileTable( + this: PublishParser, + { tableName, localParams }: DboTable, + clientInfo?: AuthResult, + overridenPublish?: PublishObject, +): Promise { + if (!localParams || !tableName) + throw { + stack: ["getTableRules()"], + message: "publish OR socket OR dbo OR tableName are missing", + }; + + const _publish = + overridenPublish ?? (await this.getPublish(localParams, clientInfo)); const raw_table_rules = _publish[tableName]; - if (!raw_table_rules || isObject(raw_table_rules) && Object.values(raw_table_rules).every(v => !v)) { + if ( + !raw_table_rules || + (isObject(raw_table_rules) && + Object.values(raw_table_rules).every((v) => !v)) + ) { return undefined; } let parsed_table: ParsedPublishTable = {}; /* Get view or table specific rules */ - const tHandler = (this.dbo[tableName] as TableHandler | ViewHandler); + const tHandler = this.dbo[tableName] as TableHandler | ViewHandler; const is_view = tHandler.is_view; /** * Allow subscribing to a view if it has primary key columns from other tables */ - const canSubscribe = (!is_view || tHandler.columns.some(c => c.references)); + const canSubscribe = !is_view || tHandler.columns.some((c) => c.references); if (!tHandler) { - throw { stack: ["getTableRules()"], message: `${tableName} could not be found in dbo` }; + throw { + stack: ["getTableRules()"], + message: `${tableName} could not be found in dbo`, + }; } - - const MY_RULES = RULE_TO_METHODS.filter(r => { + const MY_RULES = RULE_TO_METHODS.filter((r) => { /** Check PG User privileges */ const pgUserIsAllowedThis = tHandler.tableOrViewInfo.privileges[r.sqlRule]; let result = (!is_view || !r.table_only) && pgUserIsAllowedThis; - if (!pgUserIsAllowedThis && isObject(raw_table_rules) && (raw_table_rules as PublishTableRule)[r.sqlRule]) { + if ( + !pgUserIsAllowedThis && + isObject(raw_table_rules) && + (raw_table_rules as PublishTableRule)[r.sqlRule] + ) { throw `Your postgres user is not allowed ${r.sqlRule} on table ${tableName}`; } @@ -51,63 +75,84 @@ export async function getTableRulesWithoutFileTable(this: PublishParser, { table // } // } - if(r.rule === "subscribe" && !canSubscribe){ + if (r.rule === "subscribe" && !canSubscribe) { result = false; } return result; }); - - /* All methods allowed. Add no limits for table rules */ if ([true, "*"].includes(raw_table_rules as any)) { parsed_table = {}; - MY_RULES.filter(r => r.no_limits).forEach(r => { - parsed_table[r.rule] = { ...r.no_limits as object } as any; + MY_RULES.filter((r) => r.no_limits).forEach((r) => { + parsed_table[r.rule] = { ...(r.no_limits as object) } as any; }); /** Specific rules allowed */ } else if (isObject(raw_table_rules) && getKeys(raw_table_rules).length) { - const allRuleKeys: (keyof PublishViewRule | keyof PublishTableRule)[] = getKeys(raw_table_rules); - const dissallowedRuleKeys = allRuleKeys.filter(m => !(raw_table_rules as PublishTableRule)[m]) + const allRuleKeys: (keyof PublishViewRule | keyof PublishTableRule)[] = + getKeys(raw_table_rules); + const dissallowedRuleKeys = allRuleKeys.filter( + (m) => !(raw_table_rules as PublishTableRule)[m], + ); - MY_RULES.map(r => { + MY_RULES.map((r) => { /** Unless specifically disabled these are allowed */ - if (["getInfo", "getColumns"].includes(r.rule) && !dissallowedRuleKeys.includes(r.rule as any)) { + if ( + ["getInfo", "getColumns"].includes(r.rule) && + !dissallowedRuleKeys.includes(r.rule as any) + ) { parsed_table[r.rule] = r.no_limits as any; return; } /** Add no_limit values for implied/ fully allowed methods */ - if ([true, "*"].includes((raw_table_rules as PublishTableRule)[r.rule] as any) && r.no_limits) { + if ( + [true, "*"].includes( + (raw_table_rules as PublishTableRule)[r.rule] as any, + ) && + r.no_limits + ) { parsed_table[r.rule] = Object.assign({}, r.no_limits) as any; /** Carry over detailed config */ } else if (isObject((raw_table_rules as any)[r.rule])) { - parsed_table[r.rule] = (raw_table_rules as any)[r.rule] + parsed_table[r.rule] = (raw_table_rules as any)[r.rule]; } }); - allRuleKeys.filter(m => parsed_table[m]) + allRuleKeys + .filter((m) => parsed_table[m]) .forEach((method) => { const rule = parsed_table[method]; - - const rm = MY_RULES.find(r => r.rule === method || (r.methods as readonly string[]).includes(method)); + + const rm = MY_RULES.find( + (r) => + r.rule === method || + (r.methods as readonly string[]).includes(method), + ); if (!rm) { let extraInfo = ""; - if (is_view && RULE_TO_METHODS.find(r => !is_view && r.rule === method || (r.methods as any).includes(method))) { + if ( + is_view && + RULE_TO_METHODS.find( + (r) => + (!is_view && r.rule === method) || + (r.methods as any).includes(method), + ) + ) { extraInfo = "You've specified table rules to a view\n"; } - throw `Invalid rule in publish.${tableName} -> ${method} \n${extraInfo}Expecting any of: ${MY_RULES.flatMap(r => [r.rule, ...r.methods]).join(", ")}`; + throw `Invalid rule in publish.${tableName} -> ${method} \n${extraInfo}Expecting any of: ${MY_RULES.flatMap((r) => [r.rule, ...r.methods]).join(", ")}`; } /* Check RULES for invalid params */ /* Methods do not have params -> They use them from rules */ if (method === rm.rule && isObject(rule)) { const method_params = Object.keys(rule); - const allowed_params = Object.keys(rm?.allowed_params) - const iparam = method_params.find(p => !allowed_params.includes(p)); + const allowed_params = Object.keys(rm?.allowed_params); + const iparam = method_params.find((p) => !allowed_params.includes(p)); if (iparam) { throw `Invalid setting in publish.${tableName}.${method} -> ${iparam}. \n Expecting any of: ${allowed_params.join(", ")}`; } @@ -115,7 +160,6 @@ export async function getTableRulesWithoutFileTable(this: PublishParser, { table /* Add default params (if missing) */ if (method === "sync") { - if ([true, "*"].includes(parsed_table[method] as any)) { throw "Invalid sync rule. Expecting { id_fields: string[], synced_field: string } "; } @@ -132,35 +176,41 @@ export async function getTableRulesWithoutFileTable(this: PublishParser, { table const subKey = "subscribe" as const; if (method === "select" && !dissallowedRuleKeys.includes(subKey)) { - const sr = MY_RULES.find(r => r.rule === subKey); + const sr = MY_RULES.find((r) => r.rule === subKey); if (sr && canSubscribe) { - parsed_table[subKey] = { ...sr.no_limits as SubscribeRule }; - parsed_table.subscribeOne = { ...sr.no_limits as SubscribeRule }; + parsed_table[subKey] = { ...(sr.no_limits as SubscribeRule) }; + parsed_table.subscribeOne = { ...(sr.no_limits as SubscribeRule) }; } } }); - } else { - throw "Unexpected publish" + throw "Unexpected publish"; } - const getImpliedMethods = (tableRules: ParsedPublishTable): ParsedPublishTable => { + const getImpliedMethods = ( + tableRules: ParsedPublishTable, + ): ParsedPublishTable => { const res = { ...tableRules }; /* Add implied methods if not specifically dissallowed */ - MY_RULES.map(r => { - + MY_RULES.map((r) => { /** THIS IS A MESS -> some methods cannot be dissallowed (unsync, unsubscribe...) */ - r.methods.forEach(method => { - const isAllowed = tableRules[r.rule] && (tableRules as any)[method] === undefined; + r.methods.forEach((method) => { + const isAllowed = + tableRules[r.rule] && (tableRules as any)[method] === undefined; if (isAllowed) { - - if (method === "updateBatch" && (!tableRules.update || tableRules.update.checkFilter || tableRules.update.postValidate)) { + if ( + method === "updateBatch" && + (!tableRules.update || + tableRules.update.checkFilter || + tableRules.update.postValidate) + ) { // not allowed - - } else if (method === "upsert" && (!tableRules.update || !tableRules.insert)) { + } else if ( + method === "upsert" && + (!tableRules.update || !tableRules.insert) + ) { // not allowed - } else { (res as any)[method] ??= true; } @@ -169,9 +219,9 @@ export async function getTableRulesWithoutFileTable(this: PublishParser, { table }); return res; - } + }; parsed_table = getImpliedMethods(parsed_table); - return parsed_table; -} \ No newline at end of file + return parsed_table; +} diff --git a/lib/PublishParser/publishTypesAndUtils.ts b/lib/PublishParser/publishTypesAndUtils.ts index a7519278..7434e597 100644 --- a/lib/PublishParser/publishTypesAndUtils.ts +++ b/lib/PublishParser/publishTypesAndUtils.ts @@ -1,4 +1,10 @@ -import { AnyObject, DBSchema, FullFilter, Method, RULE_METHODS } from "prostgles-types"; +import { + AnyObject, + DBSchema, + FullFilter, + Method, + RULE_METHODS, +} from "prostgles-types"; import type { DBOFullyTyped, PublishFullyTyped } from "../DBSchemaBuilder"; import { CommonTableRules, @@ -9,8 +15,11 @@ import { } from "../DboBuilder/DboBuilder"; import { DB, DBHandlerServer } from "../Prostgles"; -export type PublishMethods = ( - params: PublishParams +export type PublishMethods< + S = void, + SUser extends SessionUser = SessionUser, +> = ( + params: PublishParams, ) => { [key: string]: Method } | Promise<{ [key: string]: Method } | null>; export type Awaitable = T | Promise; @@ -71,7 +80,11 @@ export const RULE_TO_METHODS = [ rule: "update", sqlRule: "update", methods: RULE_METHODS.update, - no_limits: { fields: "*", filterFields: "*", returningFields: "*" }, + no_limits: { + fields: "*", + filterFields: "*", + returningFields: "*", + }, table_only: true, allowed_params: { checkFilter: 1, @@ -158,7 +171,10 @@ export type DeleteRequestData = { filter: object; returning: FieldFilter; }; -export type UpdateRequestDataOne = { +export type UpdateRequestDataOne< + R extends AnyObject, + S extends DBSchema | void = void, +> = { filter: FullFilter; data: Partial; returning: FieldFilter; @@ -179,28 +195,42 @@ export type ValidateRowArgs = { dbx: DBX; localParams: LocalParams; }; -export type ValidateUpdateRowArgs, F = Filter, DBX = DBHandlerServer> = { +export type ValidateUpdateRowArgs< + U = Partial, + F = Filter, + DBX = DBHandlerServer, +> = { update: U; filter: F; dbx: DBX; localParams: LocalParams; }; export type ValidateRow = ( - args: ValidateRowArgs> + args: ValidateRowArgs>, ) => R | Promise; export type PostValidateRow = ( - args: ValidateRowArgs> + args: ValidateRowArgs>, ) => void | Promise; -export type PostValidateRowBasic = (args: ValidateRowArgs) => void | Promise; -export type ValidateRowBasic = (args: ValidateRowArgs) => AnyObject | Promise; -export type ValidateUpdateRow = ( - args: ValidateUpdateRowArgs, FullFilter, DBOFullyTyped> +export type PostValidateRowBasic = ( + args: ValidateRowArgs, +) => void | Promise; +export type ValidateRowBasic = ( + args: ValidateRowArgs, +) => AnyObject | Promise; +export type ValidateUpdateRow< + R extends AnyObject = AnyObject, + S extends DBSchema | void = void, +> = ( + args: ValidateUpdateRowArgs, FullFilter, DBOFullyTyped>, ) => Partial | Promise>; export type ValidateUpdateRowBasic = ( - args: ValidateUpdateRowArgs + args: ValidateUpdateRowArgs, ) => AnyObject | Promise; -export type SelectRule = { +export type SelectRule< + Cols extends AnyObject = AnyObject, + S extends DBSchema | void = void, +> = { /** * Fields allowed to be selected. * Tip: Use false to exclude field @@ -231,7 +261,9 @@ export type SelectRule; + validate?( + args: SelectRequestData, + ): SelectRequestData | Promise; }; export type CommonInsertUpdateRule< @@ -279,7 +311,9 @@ export type InsertRule< * Validation logic to check/update data after the insert. * Happens in the same transaction so upon throwing an error the record will be deleted (not committed) */ - postValidate?: S extends DBSchema ? PostValidateRow, S> : PostValidateRowBasic; + postValidate?: S extends DBSchema + ? PostValidateRow, S> + : PostValidateRowBasic; /** * If defined then only nested inserts from these tables are allowed @@ -331,16 +365,23 @@ export type UpdateRule< /** * Validation logic to check/update data for each request */ - validate?: S extends DBSchema ? ValidateUpdateRow : ValidateUpdateRowBasic; + validate?: S extends DBSchema + ? ValidateUpdateRow + : ValidateUpdateRowBasic; /** * Validation logic to check/update data after the insert. * Happens in the same transaction so upon throwing an error the record will be deleted (not committed) */ - postValidate?: S extends DBSchema ? PostValidateRow, S> : PostValidateRowBasic; + postValidate?: S extends DBSchema + ? PostValidateRow, S> + : PostValidateRowBasic; }; -export type DeleteRule = { +export type DeleteRule< + Cols extends AnyObject = AnyObject, + S extends DBSchema | void = void, +> = { /** * Filter added to every query (e.g. user_id) to restrict access */ @@ -407,7 +448,10 @@ export type TableRule< sync?: SyncRule; subscribe?: SubscribeRule; }; -export type PublishViewRule = { +export type PublishViewRule< + Col extends AnyObject = AnyObject, + S extends DBSchema | void = void, +> = { select?: SelectRule | PublishAllOrNothing; getColumns?: PublishAllOrNothing; getInfo?: PublishAllOrNothing; @@ -451,12 +495,19 @@ export type PublishParams = { export type RequestParams = { dbo?: DBHandlerServer; socket?: any }; export type PublishAllOrNothing = boolean | "*" | null; export type PublishObject = { - [table_name: string]: PublishTableRule | PublishViewRule | PublishAllOrNothing; + [table_name: string]: + | PublishTableRule + | PublishViewRule + | PublishAllOrNothing; }; export type ParsedPublishTables = { [table_name: string]: ParsedPublishTable; }; -export type PublishedResult = PublishAllOrNothing | PublishFullyTyped; +export type PublishedResult = + | PublishAllOrNothing + | PublishFullyTyped; export type Publish = | PublishedResult - | ((params: PublishParams) => Awaitable>); + | (( + params: PublishParams, + ) => Awaitable>); diff --git a/lib/RestApi.ts b/lib/RestApi.ts index 01e58e17..499f3acd 100644 --- a/lib/RestApi.ts +++ b/lib/RestApi.ts @@ -4,7 +4,11 @@ import { HTTPCODES } from "./Auth/AuthHandler"; import { ExpressReq, ExpressRes } from "./Auth/AuthTypes"; import { getSerializedClientErrorFromPGError } from "./DboBuilder/DboBuilder"; import { Prostgles } from "./Prostgles"; -import { runClientMethod, runClientRequest, runClientSqlRequest } from "./runClientRequest"; +import { + runClientMethod, + runClientRequest, + runClientSqlRequest, +} from "./runClientRequest"; import { VoidFunction } from "./SchemaWatch/SchemaWatch"; const jsonParser = bodyParser.json(); @@ -49,7 +53,11 @@ export class RestApi { schema: string; }; expressApp: Express; - constructor({ expressApp, routePrefix, prostgles }: RestApiConfig & { prostgles: Prostgles }) { + constructor({ + expressApp, + routePrefix, + prostgles, + }: RestApiConfig & { prostgles: Prostgles }) { this.prostgles = prostgles; this.routes = { db: `${routePrefix}/db/:tableName/:command`, diff --git a/lib/SchemaWatch/SchemaWatch.ts b/lib/SchemaWatch/SchemaWatch.ts index bbc7995d..12df43dc 100644 --- a/lib/SchemaWatch/SchemaWatch.ts +++ b/lib/SchemaWatch/SchemaWatch.ts @@ -1,67 +1,83 @@ import type { DboBuilder } from "../DboBuilder/DboBuilder"; import { EVENT_TRIGGER_TAGS } from "../Event_Trigger_Tags"; import { PubSubManager, log } from "../PubSubManager/PubSubManager"; -import { ValidatedWatchSchemaType, getValidatedWatchSchemaType } from "./getValidatedWatchSchemaType"; -const COMMAND_FIRST_KEYWORDS = EVENT_TRIGGER_TAGS - .map(tag => tag.split(" ")[0]!) - .filter(tag => tag !== "SELECT"); /** SELECT INTO is not easily detectable with pg-node (command = "SELECT") */ +import { + ValidatedWatchSchemaType, + getValidatedWatchSchemaType, +} from "./getValidatedWatchSchemaType"; +const COMMAND_FIRST_KEYWORDS = EVENT_TRIGGER_TAGS.map( + (tag) => tag.split(" ")[0]!, +).filter( + (tag) => tag !== "SELECT", +); /** SELECT INTO is not easily detectable with pg-node (command = "SELECT") */ -const DB_FALLBACK_COMMANDS = Array.from(new Set(COMMAND_FIRST_KEYWORDS)) - .concat([ - "DO", // Do statement - "COMMIT" // Transaction block - ]); +const DB_FALLBACK_COMMANDS = Array.from(new Set(COMMAND_FIRST_KEYWORDS)).concat( + [ + "DO", // Do statement + "COMMIT", // Transaction block + ], +); export type VoidFunction = () => void; -export type OnSchemaChangeCallback = ((event: { command: string; query: string }) => void); +export type OnSchemaChangeCallback = (event: { + command: string; + query: string; +}) => void; export class SchemaWatch { - dboBuilder: DboBuilder; type: ValidatedWatchSchemaType; - private constructor(dboBuilder: DboBuilder){ + private constructor(dboBuilder: DboBuilder) { this.dboBuilder = dboBuilder; this.type = getValidatedWatchSchemaType(dboBuilder); - if(this.type.watchType === "NONE") { + if (this.type.watchType === "NONE") { this.onSchemaChange = undefined; this.onSchemaChangeFallback = undefined; } - if(this.type.watchType === "DDL_trigger") { + if (this.type.watchType === "DDL_trigger") { this.onSchemaChangeFallback = undefined; } } static create = async (dboBuilder: DboBuilder) => { const instance = new SchemaWatch(dboBuilder); - if(instance.type.watchType === "DDL_trigger") { + if (instance.type.watchType === "DDL_trigger") { await dboBuilder.getPubSubManager(); // TODO finish createSchemaWatchEventTrigger to ensure the query is not used in NOTIFY and exclude happens inside Postgres } return instance; - } + }; /** * Fallback for watchSchema in case of not a superuser (cannot add db event listener) */ - onSchemaChangeFallback: OnSchemaChangeCallback | undefined = async ({ command, query }) => { - - if (typeof query === "string" && query.includes(PubSubManager.EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID)) { - log("Schema change event excluded from triggers due to EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID"); + onSchemaChangeFallback: OnSchemaChangeCallback | undefined = async ({ + command, + query, + }) => { + if ( + typeof query === "string" && + query.includes(PubSubManager.EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID) + ) { + log( + "Schema change event excluded from triggers due to EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID", + ); return; } - if( - this.type.watchType !== "prostgles_queries" || - !this.onSchemaChange || + if ( + this.type.watchType !== "prostgles_queries" || + !this.onSchemaChange || !DB_FALLBACK_COMMANDS.includes(command) - ) return; + ) + return; + + this.onSchemaChange({ command, query }); + }; - this.onSchemaChange({ command, query }) - } - onSchemaChange: OnSchemaChangeCallback | undefined = async (event) => { - - const { watchSchema, onReady, tsGeneratedTypesDir } = this.dboBuilder.prostgles.opts; + const { watchSchema, onReady, tsGeneratedTypesDir } = + this.dboBuilder.prostgles.opts; if (watchSchema && this.dboBuilder.prostgles.loaded) { log("Schema changed"); const { query, command } = event; @@ -70,7 +86,6 @@ export class SchemaWatch { if (typeof watchSchema === "function") { /* Only call the provided func */ watchSchema(event); - } else if (watchSchema === "hotReloadMode") { if (tsGeneratedTypesDir) { /* Hot reload integration. Will only touch tsGeneratedTypesDir */ @@ -79,12 +94,15 @@ export class SchemaWatch { await this.dboBuilder.prostgles.refreshDBO(); this.dboBuilder.prostgles.writeDBSchema(true); } - } else if (watchSchema) { /* Full re-init. Sockets must reconnect */ - console.log("watchSchema: Full re-initialisation", { query }) - this.dboBuilder.prostgles.init(onReady as any, { type: "schema change", query, command }); + console.log("watchSchema: Full re-initialisation", { query }); + this.dboBuilder.prostgles.init(onReady as any, { + type: "schema change", + query, + command, + }); } } }; -} \ No newline at end of file +} diff --git a/lib/SchemaWatch/createSchemaWatchEventTrigger.ts b/lib/SchemaWatch/createSchemaWatchEventTrigger.ts index 366105b5..6dd883bc 100644 --- a/lib/SchemaWatch/createSchemaWatchEventTrigger.ts +++ b/lib/SchemaWatch/createSchemaWatchEventTrigger.ts @@ -1,3 +1,3 @@ // export const createSchemaWatchEventTrigger = () => { -// } \ No newline at end of file +// } diff --git a/lib/SchemaWatch/getValidatedWatchSchemaType.ts b/lib/SchemaWatch/getValidatedWatchSchemaType.ts index fb749c0e..535144fa 100644 --- a/lib/SchemaWatch/getValidatedWatchSchemaType.ts +++ b/lib/SchemaWatch/getValidatedWatchSchemaType.ts @@ -1,45 +1,55 @@ import type { DboBuilder } from "../DboBuilder/DboBuilder"; import { OnSchemaChangeCallback } from "./SchemaWatch"; -export type ValidatedWatchSchemaType = -| { watchType: "NONE" } -| { watchType: "DDL_trigger"; onChange?: OnSchemaChangeCallback; } -| { watchType: "prostgles_queries"; onChange?: OnSchemaChangeCallback; isFallbackFromDDL: boolean; } +export type ValidatedWatchSchemaType = + | { watchType: "NONE" } + | { watchType: "DDL_trigger"; onChange?: OnSchemaChangeCallback } + | { + watchType: "prostgles_queries"; + onChange?: OnSchemaChangeCallback; + isFallbackFromDDL: boolean; + }; + +export const getValidatedWatchSchemaType = ( + dboBuilder: DboBuilder, +): ValidatedWatchSchemaType => { + const { watchSchema, watchSchemaType, tsGeneratedTypesDir, disableRealtime } = + dboBuilder.prostgles.opts; + if (!watchSchema) return { watchType: "NONE" }; -export const getValidatedWatchSchemaType = (dboBuilder: DboBuilder): ValidatedWatchSchemaType => { - const {watchSchema, watchSchemaType, tsGeneratedTypesDir, disableRealtime } = dboBuilder.prostgles.opts; - if(!watchSchema) return { watchType: "NONE" }; - if (watchSchema === "hotReloadMode" && !tsGeneratedTypesDir) { throw "tsGeneratedTypesDir option is needed for watchSchema: hotReloadMode to work "; } - const onChange = typeof watchSchema === "function"? watchSchema : undefined; - - if(watchSchemaType === "DDL_trigger" || !watchSchemaType){ - if(!dboBuilder.prostgles.isSuperUser || disableRealtime){ + const onChange = typeof watchSchema === "function" ? watchSchema : undefined; - if(watchSchemaType === "DDL_trigger"){ - console.error(`watchSchemaType "DDL_trigger" cannot be used because db user is not a superuser. Will fallback to watchSchemaType "prostgles_queries" `) + if (watchSchemaType === "DDL_trigger" || !watchSchemaType) { + if (!dboBuilder.prostgles.isSuperUser || disableRealtime) { + if (watchSchemaType === "DDL_trigger") { + console.error( + `watchSchemaType "DDL_trigger" cannot be used because db user is not a superuser. Will fallback to watchSchemaType "prostgles_queries" `, + ); } else { - console.warn(`watchSchema fallback to watchSchemaType "prostgles_queries" due to ${disableRealtime? "disableRealtime setting" : "non-superuser"}`) + console.warn( + `watchSchema fallback to watchSchemaType "prostgles_queries" due to ${disableRealtime ? "disableRealtime setting" : "non-superuser"}`, + ); } return { watchType: "prostgles_queries", onChange, - isFallbackFromDDL: true - } + isFallbackFromDDL: true, + }; } return { watchType: "DDL_trigger", - onChange + onChange, }; } - + return { watchType: watchSchemaType, isFallbackFromDDL: false, - onChange - } -} + onChange, + }; +}; diff --git a/lib/SchemaWatch/getWatchSchemaTagList.ts b/lib/SchemaWatch/getWatchSchemaTagList.ts index 1360eec4..5d75d9cf 100644 --- a/lib/SchemaWatch/getWatchSchemaTagList.ts +++ b/lib/SchemaWatch/getWatchSchemaTagList.ts @@ -2,26 +2,34 @@ import { getKeys, isObject } from "prostgles-types"; import { EVENT_TRIGGER_TAGS } from "../Event_Trigger_Tags"; import { ProstglesInitOptions } from "../ProstglesTypes"; -export const getWatchSchemaTagList = (watchSchema: ProstglesInitOptions["watchSchema"]) => { - if(!watchSchema) return undefined; +export const getWatchSchemaTagList = ( + watchSchema: ProstglesInitOptions["watchSchema"], +) => { + if (!watchSchema) return undefined; - if(watchSchema === "*"){ + if (watchSchema === "*") { return EVENT_TRIGGER_TAGS.slice(0); - } - if (isObject(watchSchema) && typeof watchSchema !== "function"){ + } + if (isObject(watchSchema) && typeof watchSchema !== "function") { const watchSchemaKeys = getKeys(watchSchema); - const isInclusive = Object.values(watchSchema).every(v => v); - return EVENT_TRIGGER_TAGS - .slice(0) - .filter(v => { - const matches = watchSchemaKeys.includes(v); - return isInclusive? matches : !matches; - }); + const isInclusive = Object.values(watchSchema).every((v) => v); + return EVENT_TRIGGER_TAGS.slice(0).filter((v) => { + const matches = watchSchemaKeys.includes(v); + return isInclusive ? matches : !matches; + }); } - const coreTags: typeof EVENT_TRIGGER_TAGS[number][] = [ - 'COMMENT', 'CREATE TABLE', 'ALTER TABLE', 'DROP TABLE', 'CREATE VIEW', - 'DROP VIEW', 'ALTER VIEW', 'CREATE TABLE AS', 'SELECT INTO', 'CREATE POLICY' + const coreTags: (typeof EVENT_TRIGGER_TAGS)[number][] = [ + "COMMENT", + "CREATE TABLE", + "ALTER TABLE", + "DROP TABLE", + "CREATE VIEW", + "DROP VIEW", + "ALTER VIEW", + "CREATE TABLE AS", + "SELECT INTO", + "CREATE POLICY", ]; return coreTags; -} \ No newline at end of file +}; diff --git a/lib/SyncReplication.ts b/lib/SyncReplication.ts index a55d5ebb..535d9e1c 100644 --- a/lib/SyncReplication.ts +++ b/lib/SyncReplication.ts @@ -1,5 +1,10 @@ - -import { PubSubManager, SyncParams, pickKeys, omitKeys, log } from "./PubSubManager/PubSubManager"; +import { + PubSubManager, + SyncParams, + pickKeys, + omitKeys, + log, +} from "./PubSubManager/PubSubManager"; import { OrderBy, WAL, AnyObject, SyncBatchParams } from "prostgles-types"; import { TableHandler } from "./DboBuilder/TableHandler/TableHandler"; @@ -19,7 +24,7 @@ export type ServerSyncInfo = Partial<{ * PG count is ussually string due to bigint */ s_count: number | string; -}> +}>; export type SyncBatchInfo = Partial<{ from_synced: number | null; @@ -27,44 +32,65 @@ export type SyncBatchInfo = Partial<{ end_offset: number | null; }>; -export type onSyncRequestResponse = { - onSyncRequest?: ClientSyncInfo -} | { - err: AnyObject | string; -}; +export type onSyncRequestResponse = + | { + onSyncRequest?: ClientSyncInfo; + } + | { + err: AnyObject | string; + }; export type ClientExpressData = ClientSyncInfo & { data?: AnyObject[]; deleted?: AnyObject[]; -} +}; -function getNumbers(numberArr: (null | undefined | string | number)[]): number[] { - return numberArr.filter(v => v !== null && v !== undefined && Number.isFinite(+v)) as any; +function getNumbers( + numberArr: (null | undefined | string | number)[], +): number[] { + return numberArr.filter( + (v) => v !== null && v !== undefined && Number.isFinite(+v), + ) as any; } /** * Server or client requested data sync */ -export async function syncData (this: PubSubManager, sync: SyncParams, clientData: ClientExpressData | undefined, source: "trigger" | "client"){ - await this._log({ +export async function syncData( + this: PubSubManager, + sync: SyncParams, + clientData: ClientExpressData | undefined, + source: "trigger" | "client", +) { + await this._log({ type: "sync", - command: "syncData", + command: "syncData", tableName: sync.table_name, sid: sync.sid, source, ...pickKeys(sync, ["socket_id", "condition", "last_synced", "is_syncing"]), lr: JSON.stringify(sync.lr), - connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map(s => s.id), + connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map( + (s) => s.id, + ), localParams: undefined, duration: -1, - socketId: sync.socket_id + socketId: sync.socket_id, }); const { - socket_id, channel_name, table_name, filter, - table_rules, allow_delete = false, params, - synced_field, id_fields = [], batch_size, - wal, throttle = 0 + socket_id, + channel_name, + table_name, + filter, + table_rules, + allow_delete = false, + params, + synced_field, + id_fields = [], + batch_size, + wal, + throttle = 0, } = sync; const socket = this.sockets[socket_id]; @@ -73,93 +99,149 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat } const sync_fields = [synced_field, ...id_fields.sort()], - orderByAsc: OrderBy = sync_fields.reduce((a, v) => ({ ...a, [v]: true }), {}), + orderByAsc: OrderBy = sync_fields.reduce( + (a, v) => ({ ...a, [v]: true }), + {}, + ), rowsIdsMatch = (a?: AnyObject, b?: AnyObject) => { - return a && b && !id_fields.find(key => (a[key]).toString() !== (b[key]).toString()) + return ( + a && + b && + !id_fields.find((key) => a[key].toString() !== b[key].toString()) + ); }, rowsFullyMatch = (a?: AnyObject, b?: AnyObject) => { - return rowsIdsMatch(a, b) && a?.[synced_field].toString() === b?.[synced_field].toString(); + return ( + rowsIdsMatch(a, b) && + a?.[synced_field].toString() === b?.[synced_field].toString() + ); }, - getServerRowInfo = async (args: SyncBatchParams = {}): Promise => { + getServerRowInfo = async ( + args: SyncBatchParams = {}, + ): Promise => { const { from_synced = null, to_synced = null, offset = 0, limit } = args; const _filter: AnyObject = { ...filter }; if (from_synced || to_synced) { _filter[synced_field] = { ...(from_synced ? { $gte: from_synced } : {}), - ...(to_synced ? { $lte: to_synced } : {}) - } + ...(to_synced ? { $lte: to_synced } : {}), + }; } - if (this.dbo?.[table_name]?.find === undefined || this?.dbo?.[table_name]?.count === undefined) { + if ( + this.dbo?.[table_name]?.find === undefined || + this?.dbo?.[table_name]?.count === undefined + ) { throw `dbo.${table_name}.find or .count are missing or not allowed`; } - const first_rows = await this.dbo?.[table_name]?.find?.(_filter, { orderBy: orderByAsc, select: sync_fields, limit, offset }, undefined, table_rules); + const first_rows = await this.dbo?.[table_name]?.find?.( + _filter, + { orderBy: orderByAsc, select: sync_fields, limit, offset }, + undefined, + table_rules, + ); const last_rows = first_rows?.slice(-1); // Why not logic below? // const last_rows = await _this?.dbo[table_name]?.find?.(_filter, { orderBy: (orderByDesc as OrderBy), select: sync_fields, limit: 1, offset: -offset || 0 }, null, table_rules); - const count = await this.dbo?.[table_name]?.count?.(_filter, undefined, undefined, table_rules); - - return { s_fr: first_rows?.[0] || null, s_lr: last_rows?.[0] || null, s_count: count } + const count = await this.dbo?.[table_name]?.count?.( + _filter, + undefined, + undefined, + table_rules, + ); + + return { + s_fr: first_rows?.[0] || null, + s_lr: last_rows?.[0] || null, + s_count: count, + }; }, getClientRowInfo = (args: SyncBatchInfo = {}) => { const { from_synced = null, to_synced = null, end_offset = null } = args; const res = new Promise((resolve, reject) => { - const onSyncRequest = { from_synced, to_synced, end_offset };//, forReal: true }; - socket.emit(channel_name, { onSyncRequest }, (resp?: onSyncRequestResponse) => { - if (resp && "onSyncRequest" in resp && resp?.onSyncRequest) { - const c_fr = resp.onSyncRequest.c_fr, - c_lr = resp.onSyncRequest.c_lr, - c_count = resp.onSyncRequest.c_count; - - // console.log(onSyncRequest, { c_fr, c_lr, c_count }, socket._user); - return resolve({ c_fr, c_lr, c_count }); - } else if (resp && "err" in resp && resp?.err) { - reject(resp.err); - } - }); + const onSyncRequest = { from_synced, to_synced, end_offset }; //, forReal: true }; + socket.emit( + channel_name, + { onSyncRequest }, + (resp?: onSyncRequestResponse) => { + if (resp && "onSyncRequest" in resp && resp?.onSyncRequest) { + const c_fr = resp.onSyncRequest.c_fr, + c_lr = resp.onSyncRequest.c_lr, + c_count = resp.onSyncRequest.c_count; + + // console.log(onSyncRequest, { c_fr, c_lr, c_count }, socket._user); + return resolve({ c_fr, c_lr, c_count }); + } else if (resp && "err" in resp && resp?.err) { + reject(resp.err); + } + }, + ); }); return res; }, getClientData = (from_synced = 0, offset = 0): Promise => { return new Promise((resolve, reject) => { - const onPullRequest = { from_synced: from_synced || 0, offset: offset || 0, limit: batch_size }; - socket.emit(channel_name, { onPullRequest }, async (resp?: { data?: AnyObject[] }) => { - if (resp && resp.data && Array.isArray(resp.data)) { - // console.log({ onPullRequest, resp }, socket._user) - resolve(sortClientData(resp.data)); - } else { - reject("unexpected onPullRequest response: " + JSON.stringify(resp)); - } - }); + const onPullRequest = { + from_synced: from_synced || 0, + offset: offset || 0, + limit: batch_size, + }; + socket.emit( + channel_name, + { onPullRequest }, + async (resp?: { data?: AnyObject[] }) => { + if (resp && resp.data && Array.isArray(resp.data)) { + // console.log({ onPullRequest, resp }, socket._user) + resolve(sortClientData(resp.data)); + } else { + reject( + "unexpected onPullRequest response: " + JSON.stringify(resp), + ); + } + }, + ); }); function sortClientData(data: AnyObject[]) { return data.sort((a, b) => { /* Order by increasing synced and ids (sorted alphabetically) */ - return (+a[synced_field] - +b[synced_field]) || id_fields.sort().map(idKey => a[idKey] < b[idKey] ? -1 : a[idKey] > b[idKey] ? 1 : 0).find(v => v) || 0; + return ( + +a[synced_field] - +b[synced_field] || + id_fields + .sort() + .map((idKey) => + a[idKey] < b[idKey] ? -1 : a[idKey] > b[idKey] ? 1 : 0, + ) + .find((v) => v) || + 0 + ); }); } }, - getServerData = async (from_synced = 0, offset = 0): Promise => { + getServerData = async ( + from_synced = 0, + offset = 0, + ): Promise => { const _filter = { ...filter, - [synced_field]: { $gte: from_synced || 0 } + [synced_field]: { $gte: from_synced || 0 }, }; - if (!this?.dbo?.[table_name]?.find) throw "_this?.dbo?.[table_name]?.find is missing"; + if (!this?.dbo?.[table_name]?.find) + throw "_this?.dbo?.[table_name]?.find is missing"; try { const res = this?.dbo?.[table_name]?.find?.( _filter, { select: params.select, - orderBy: (orderByAsc as OrderBy), + orderBy: orderByAsc as OrderBy, offset: offset || 0, - limit: batch_size + limit: batch_size, }, undefined, - table_rules + table_rules, ); if (!res) throw "_this?.dbo?.[table_name]?.find is missing"; @@ -167,140 +249,196 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat return res; } catch (e) { console.error("Sync getServerData failed: ", e); - throw "INTERNAL ERROR" + throw "INTERNAL ERROR"; } }, deleteData = async (deleted: AnyObject[]) => { // console.log("deleteData deleteData deleteData " + deleted.length); if (allow_delete) { - return Promise.all(deleted.map(async d => { - const id_filter = pickKeys(d, id_fields); - try { - await (this.dbo[table_name] as TableHandler).delete(id_filter, undefined, undefined, table_rules); - return 1; - } catch (e) { - console.error(e) - } - return 0; - })) + return Promise.all( + deleted.map(async (d) => { + const id_filter = pickKeys(d, id_fields); + try { + await (this.dbo[table_name] as TableHandler).delete( + id_filter, + undefined, + undefined, + table_rules, + ); + return 1; + } catch (e) { + console.error(e); + } + return 0; + }), + ); } else { - console.warn("client tried to delete data without permission (allow_delete is false)") + console.warn( + "client tried to delete data without permission (allow_delete is false)", + ); } return false; }, - /** - * Upserts the given client data where synced_field is higher than on server + * Upserts the given client data where synced_field is higher than on server */ upsertData = async (data: AnyObject[]) => { - const start = Date.now(); - const result = await this.dboBuilder.getTX(async (dbTX) => { - const tbl = dbTX[table_name] as TableHandler; - const existingData = await tbl.find( - { $or: data.map(d => pickKeys(d, id_fields)) }, - { - select: [synced_field, ...id_fields], - orderBy: (orderByAsc as OrderBy), - }, - undefined, - table_rules - ); - let inserts = data.filter(d => !existingData.find(ed => rowsIdsMatch(ed, d))); - let updates = data.filter(d => existingData.find(ed => rowsIdsMatch(ed, d) && +ed[synced_field] < +d[synced_field])); - try { - if (!table_rules) throw "table_rules missing"; - - if (table_rules.update && updates.length) { - const updateData: [any, any][] = []; - await Promise.all(updates.map(upd => { - const id_filter = pickKeys(upd, id_fields); - const syncSafeFilter = { $and: [id_filter, { [synced_field]: { "<": upd[synced_field] } }] } - - updateData.push([syncSafeFilter, omitKeys(upd, id_fields)]) - })); - await tbl.updateBatch(updateData, { removeDisallowedFields: true }, undefined, table_rules); - } else { - updates = []; - } - - if (table_rules.insert && inserts.length) { - await tbl.insert(inserts, { removeDisallowedFields: true }, undefined, table_rules); - } else { - inserts = []; + const result = await this.dboBuilder + .getTX(async (dbTX) => { + const tbl = dbTX[table_name] as TableHandler; + const existingData = await tbl.find( + { $or: data.map((d) => pickKeys(d, id_fields)) }, + { + select: [synced_field, ...id_fields], + orderBy: orderByAsc as OrderBy, + }, + undefined, + table_rules, + ); + let inserts = data.filter( + (d) => !existingData.find((ed) => rowsIdsMatch(ed, d)), + ); + let updates = data.filter((d) => + existingData.find( + (ed) => + rowsIdsMatch(ed, d) && +ed[synced_field] < +d[synced_field], + ), + ); + try { + if (!table_rules) throw "table_rules missing"; + + if (table_rules.update && updates.length) { + const updateData: [any, any][] = []; + await Promise.all( + updates.map((upd) => { + const id_filter = pickKeys(upd, id_fields); + const syncSafeFilter = { + $and: [ + id_filter, + { [synced_field]: { "<": upd[synced_field] } }, + ], + }; + + updateData.push([syncSafeFilter, omitKeys(upd, id_fields)]); + }), + ); + await tbl.updateBatch( + updateData, + { removeDisallowedFields: true }, + undefined, + table_rules, + ); + } else { + updates = []; + } + + if (table_rules.insert && inserts.length) { + await tbl.insert( + inserts, + { removeDisallowedFields: true }, + undefined, + table_rules, + ); + } else { + inserts = []; + } + + return { inserts, updates }; + } catch (e) { + console.trace(e); + throw e; } + }) + .then(({ inserts, updates }) => { + log( + `upsertData: inserted( ${inserts.length} ) updated( ${updates.length} ) total( ${data.length} ) \n last insert ${JSON.stringify(inserts.at(-1))} \n last update ${JSON.stringify(updates.at(-1))}`, + ); + return { + inserted: inserts.length, + updated: updates.length, + total: data.length, + }; + }) + .catch((err) => { + console.trace( + "Something went wrong with syncing to server: \n ->", + err, + data.length, + id_fields, + ); + return Promise.reject( + "Something went wrong with syncing to server: ", + ); + }); - return { inserts, updates }; - } catch (e) { - console.trace(e); - throw e; - } - - }).then(({ inserts, updates }) => { - log(`upsertData: inserted( ${inserts.length} ) updated( ${updates.length} ) total( ${data.length} ) \n last insert ${JSON.stringify(inserts.at(-1))} \n last update ${JSON.stringify(updates.at(-1))}`); - return { inserted: inserts.length, updated: updates.length, total: data.length }; - }) - .catch(err => { - console.trace("Something went wrong with syncing to server: \n ->", err, data.length, id_fields); - return Promise.reject("Something went wrong with syncing to server: ") - }); - - await this._log({ - type: "sync", - command: "upsertData", - tableName: sync.table_name, - rows: data.length, + await this._log({ + type: "sync", + command: "upsertData", + tableName: sync.table_name, + rows: data.length, socketId: socket_id, sid: sync.sid, duration: Date.now() - start, - connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map(s => s.id) + connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map( + (s) => s.id, + ), }); return result; }, - /** * Pushes the given data to client - * @param isSynced = true if + * @param isSynced = true if */ - pushData = async (data?: AnyObject[], isSynced = false, err: any = null) => { + pushData = async ( + data?: AnyObject[], + isSynced = false, + err: any = null, + ) => { const start = Date.now(); const result = await new Promise((resolve, reject) => { - socket.emit(channel_name, { data, isSynced }, (resp?: { ok: boolean }) => { - - if (resp && resp.ok) { - // console.log("PUSHED to client: fr/lr", data[0], data[data.length - 1]); - resolve({ pushed: data?.length, resp }) - } else { - reject(resp); - console.error("Unexpected response"); - } - }); + socket.emit( + channel_name, + { data, isSynced }, + (resp?: { ok: boolean }) => { + if (resp && resp.ok) { + // console.log("PUSHED to client: fr/lr", data[0], data[data.length - 1]); + resolve({ pushed: data?.length, resp }); + } else { + reject(resp); + console.error("Unexpected response"); + } + }, + ); }); - await this._log({ - type: "sync", - command: "pushData", - tableName: sync.table_name, - rows: data?.length ?? 0, + await this._log({ + type: "sync", + command: "pushData", + tableName: sync.table_name, + rows: data?.length ?? 0, socketId: socket_id, duration: Date.now() - start, sid: sync.sid, - connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map(s => s.id) + connectedSocketIds: this.dboBuilder.prostgles.connectedSockets.map( + (s) => s.id, + ), }); return result; }, - /** - * Returns the lowest synced_field between server and client by checking client and server sync data. + * Returns the lowest synced_field between server and client by checking client and server sync data. * If last rows don't match it will find an earlier matching last row and use that last matching from_synced * If no rows or fully synced (c_lr and s_lr match) then returns null */ - getLastSynced = async (clientSyncInfo?: ClientSyncInfo): Promise => { - + getLastSynced = async ( + clientSyncInfo?: ClientSyncInfo, + ): Promise => { // Get latest row info - const { c_fr, c_lr, c_count } = clientSyncInfo || await getClientRowInfo(); + const { c_fr, c_lr, c_count } = + clientSyncInfo || (await getClientRowInfo()); const { s_fr, s_lr, s_count } = await getServerRowInfo(); // console.log("getLastSynced", clientData, socket._user ) @@ -308,7 +446,8 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat let result = null; /* Nothing to sync */ - if (!c_fr && !s_fr || rowsFullyMatch(c_lr, s_lr)) { // c_count === s_count && + if ((!c_fr && !s_fr) || rowsFullyMatch(c_lr, s_lr)) { + // c_count === s_count && // sync.last_synced = null; result = null; @@ -316,38 +455,49 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat } else if (!rowsFullyMatch(c_fr, s_fr)) { if (c_fr && s_fr) { result = Math.min(c_fr[synced_field], s_fr[synced_field]); - } else if (c_fr || s_fr) { result = (c_fr || s_fr)[synced_field]; } /* Sync from last matching synced value */ } else if (rowsFullyMatch(c_fr, s_fr)) { - if (s_lr && c_lr) { - result = Math.min(...getNumbers([c_lr[synced_field], s_lr[synced_field]])); + result = Math.min( + ...getNumbers([c_lr[synced_field], s_lr[synced_field]]), + ); } else { - result = Math.min(...getNumbers([c_fr[synced_field], s_fr?.[synced_field]])); + result = Math.min( + ...getNumbers([c_fr[synced_field], s_fr?.[synced_field]]), + ); } const min_count = Math.min(...getNumbers([c_count, s_count])); - let end_offset = 1;// Math.min(s_count, c_count) - 1; + let end_offset = 1; // Math.min(s_count, c_count) - 1; let step = 0; while (min_count > 5 && end_offset < min_count) { - const { c_lr = null } = await getClientRowInfo({ from_synced: 0, to_synced: result, end_offset }); + const { c_lr = null } = await getClientRowInfo({ + from_synced: 0, + to_synced: result, + end_offset, + }); // console.log("getLastSynced... end_offset > " + end_offset); let server_row; if (c_lr) { const _filter: AnyObject = {}; - sync_fields.map(key => { + sync_fields.map((key) => { _filter[key] = c_lr[key]; }); - server_row = await this?.dbo?.[table_name]?.find?.(_filter, { select: sync_fields, limit: 1 }, undefined, table_rules); + server_row = await this?.dbo?.[table_name]?.find?.( + _filter, + { select: sync_fields, limit: 1 }, + undefined, + table_rules, + ); } - // if(rowsFullyMatch(c_lr, s_lr)){ //c_count === s_count && + // if(rowsFullyMatch(c_lr, s_lr)){ //c_count === s_count && if (server_row && server_row.length) { server_row = server_row[0]; @@ -365,18 +515,25 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat return result; }, - updateSyncLR = (data: AnyObject) => { if (data.length) { const lastRow = data[data.length - 1]; - if (sync.lr?.[synced_field] && +sync.lr?.[synced_field] > +lastRow[synced_field]) { - console.error({ syncIssue: "sync.lr[synced_field] is greater than lastRow[synced_field]" }, sync.table_name) + if ( + sync.lr?.[synced_field] && + +sync.lr?.[synced_field] > +lastRow[synced_field] + ) { + console.error( + { + syncIssue: + "sync.lr[synced_field] is greater than lastRow[synced_field]", + }, + sync.table_name, + ); } sync.lr = lastRow; sync.last_synced = +sync.lr?.[synced_field]; } }, - /** * Will push pull sync between client and server from a given from_synced value */ @@ -387,14 +544,17 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat min_synced = from_synced || 0, max_synced = from_synced; - let inserted = 0, updated = 0, pushed = 0, deleted = 0, total = 0; + let inserted = 0, + updated = 0, + pushed = 0, + deleted = 0, + total = 0; // console.log("syncBatch", from_synced) while (canContinue) { const cData = await getClientData(min_synced, offset); - if (cData.length) { const res = await upsertData(cData); inserted += res.inserted; @@ -406,30 +566,42 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat sData = await getServerData(min_synced, offset); } catch (e) { console.trace("sync getServerData err", e); - await pushData(undefined, undefined, "Internal error. Check server logs"); - throw " d" + await pushData( + undefined, + undefined, + "Internal error. Check server logs", + ); + throw " d"; } // console.log("allow_delete", table_rules.delete); if (allow_delete && table_rules?.delete) { - const to_delete = sData.filter(d => { - !cData.find(c => rowsIdsMatch(c, d)) + const to_delete = sData.filter((d) => { + !cData.find((c) => rowsIdsMatch(c, d)); }); - await Promise.all(to_delete.map(d => { - deleted++; - return (this.dbo[table_name] as TableHandler).delete(pickKeys(d, id_fields), {}, undefined, table_rules); - })); + await Promise.all( + to_delete.map((d) => { + deleted++; + return (this.dbo[table_name] as TableHandler).delete( + pickKeys(d, id_fields), + {}, + undefined, + table_rules, + ); + }), + ); sData = await getServerData(min_synced, offset); } - const forClient = sData.filter(s => { - return !cData.find(c => - rowsIdsMatch(c, s) && - +c[synced_field] >= +s[synced_field] + const forClient = sData.filter((s) => { + return !cData.find( + (c) => rowsIdsMatch(c, s) && +c[synced_field] >= +s[synced_field], ); }); if (forClient.length) { - const res: any = await pushData(forClient.filter(d => !sync.wal || !sync.wal.isInHistory(d))); + const res: any = await pushData( + forClient.filter((d) => !sync.wal || !sync.wal.isInHistory(d)), + ); pushed += res.pushed; } @@ -443,7 +615,10 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat canContinue = sData.length >= limit; // console.log(`sData ${sData.length} limit ${limit}`); } - log(`server.syncBatch ${table_name}: inserted( ${inserted} ) updated( ${updated} ) deleted( ${deleted} ) pushed to client( ${pushed} ) total( ${total} )`, socket._user ); + log( + `server.syncBatch ${table_name}: inserted( ${inserted} ) updated( ${updated} ) deleted( ${deleted} ) pushed to client( ${pushed} ) total( ${total} )`, + socket._user, + ); return true; }; @@ -451,7 +626,10 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat if (!wal) { /* Used to throttle and merge incomming updates */ sync.wal = new WAL({ - id_fields, synced_field, throttle, batch_size, + id_fields, + synced_field, + throttle, + batch_size, DEBUG_MODE: this.dboBuilder.prostgles.opts.DEBUG_MODE, onSendStart: () => { sync.is_syncing = true; @@ -484,7 +662,7 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat */ this.syncData(sync, undefined, source); }, - }) + }); } /* Debounce sync requests */ @@ -495,7 +673,7 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat this.syncTimeout = undefined; // console.log("SYNC FROM TIMEOUT") this.syncData(sync, undefined, source); - }, throttle) + }, throttle); } // console.log("SYNC THROTTLE") return; @@ -508,15 +686,23 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat * Add to WAL manager which will sync at the end */ if (clientData) { - if (clientData.data && Array.isArray(clientData.data) && clientData.data.length) { + if ( + clientData.data && + Array.isArray(clientData.data) && + clientData.data.length + ) { if (!sync.wal) throw "sync.wal missing"; - sync.wal.addData(clientData.data.map(d => ({ current: d }))); + sync.wal.addData(clientData.data.map((d) => ({ current: d }))); return; // await upsertData(clientData.data, true); /* Not expecting this anymore. use normal db.table.delete channel */ - } else if (clientData.deleted && Array.isArray(clientData.deleted) && clientData.deleted.length) { + } else if ( + clientData.deleted && + Array.isArray(clientData.deleted) && + clientData.deleted.length + ) { await deleteData(clientData.deleted); } } else { @@ -554,4 +740,4 @@ export async function syncData (this: PubSubManager, sync: SyncParams, clientDat sync.is_syncing = false; // console.log(`Finished sync for ${table_name}`, socket._user); -} \ No newline at end of file +} diff --git a/lib/TableConfig/TableConfig.ts b/lib/TableConfig/TableConfig.ts index 2050aa36..dfeb6b14 100644 --- a/lib/TableConfig/TableConfig.ts +++ b/lib/TableConfig/TableConfig.ts @@ -1,4 +1,13 @@ -import { asName as _asName, AnyObject, TableInfo, ALLOWED_EXTENSION, ALLOWED_CONTENT_TYPE, isObject, JSONB, ColumnInfo } from "prostgles-types"; +import { + asName as _asName, + AnyObject, + TableInfo, + ALLOWED_EXTENSION, + ALLOWED_CONTENT_TYPE, + isObject, + JSONB, + ColumnInfo, +} from "prostgles-types"; import { isPlainObject, JoinInfo, LocalParams } from "../DboBuilder/DboBuilder"; import { DB, DBHandlerServer, Prostgles } from "../Prostgles"; import { InsertRule, ValidateRowArgs } from "../PublishParser/PublishParser"; @@ -14,39 +23,41 @@ type ColExtraInfo = { export type I18N_Config = { [lang_id in keyof LANG_IDS]: string; -} +}; export const parseI18N = (params: { - config?: I18N_Config | string; - lang?: keyof LANG_IDS | string; + config?: I18N_Config | string; + lang?: keyof LANG_IDS | string; defaultLang: keyof LANG_IDS | string; defaultValue: Def; }): Def | string => { const { config, lang, defaultLang, defaultValue } = params; - if(config){ - if(isPlainObject(config)){ + if (config) { + if (isPlainObject(config)) { //@ts-ignore return config[lang] ?? config[defaultLang]; - } else if(typeof config === "string"){ + } else if (typeof config === "string") { return config; } } return defaultValue; -} +}; type BaseTableDefinition = { info?: { label?: string | I18N_Config; - } + }; dropIfExistsCascade?: boolean; dropIfExists?: boolean; hooks?: { /** - * Hook used to run custom logic before inserting a row. + * Hook used to run custom logic before inserting a row. * The returned row must satisfy the table schema */ - getPreInsertRow?: (args: GetPreInsertRowArgs) => Promise<{ row: AnyObject; onInserted: Promise; }>; + getPreInsertRow?: ( + args: GetPreInsertRowArgs, + ) => Promise<{ row: AnyObject; onInserted: Promise }>; }; triggers?: { [triggerName: string]: { @@ -77,19 +88,21 @@ type BaseTableDefinition = { END; */ query: string; - } + }; }; -} +}; type LookupTableDefinition = { isLookupTable: { values: { - [id_value: string]: {} | { - [lang_id in keyof LANG_IDS]: string - } - } - } -} + [id_value: string]: + | {} + | { + [lang_id in keyof LANG_IDS]: string; + }; + }; + }; +}; export type BaseColumn = { /** @@ -97,21 +110,20 @@ export type BaseColumn = { */ info?: ColExtraInfo; - label?: string | Partial<{ [lang_id in keyof LANG_IDS]: string; }>; -} + label?: string | Partial<{ [lang_id in keyof LANG_IDS]: string }>; +}; type SQLDefColumn = { - /** * Raw sql statement used in creating/adding column */ sqlDefinition?: string; -} +}; export type BaseColumnTypes = { defaultValue?: any; nullable?: boolean; -} +}; type TextColumn = BaseColumnTypes & { isText: true; @@ -124,44 +136,51 @@ type TextColumn = BaseColumnTypes & { * Value will be lower cased before update/insert */ lowerCased?: boolean; -} +}; export type JSONBColumnDef = (BaseColumnTypes & { /** * If the new schema CHECK fails old rows the update old rows using this function */ // onMigrationFail?: (failedRow: T) => AnyObject | Promise; -}) & ({ - jsonbSchema: JSONB.JSONBSchema; - jsonbSchemaType?: undefined; -} | { - jsonbSchema?: undefined; - jsonbSchemaType: JSONB.ObjectType["type"]; -}) +}) & + ( + | { + jsonbSchema: JSONB.JSONBSchema; + jsonbSchemaType?: undefined; + } + | { + jsonbSchema?: undefined; + jsonbSchemaType: JSONB.ObjectType["type"]; + } + ); /** * Allows referencing media to this table. * Requires this table to have a primary key AND a valid fileTable config */ -type MediaColumn = ({ - +type MediaColumn = { name: string; label?: string; files: "one" | "many"; } & ( - { - + | { /** * https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept */ - allowedContentType?: Record, 1> - } | - { - allowedExtensions?: Record, 1> + allowedContentType?: Record< + Partial< + "audio/*" | "video/*" | "image/*" | "text/*" | ALLOWED_CONTENT_TYPE + >, + 1 + >; + } + | { + allowedExtensions?: Record, 1>; } - )); +); -type ReferencedColumn = { +type ReferencedColumn = { /** * Will create a lookup table that this column will reference */ @@ -171,14 +190,14 @@ type ReferencedColumn = { * Defaults to id */ columnName?: string; - } -} + }; +}; type JoinDef = { sourceTable: string; targetTable: string; on: JoinInfo["paths"][number]["on"]; -} +}; /** * Used in specifying a join path to a table. This column name can then be used in select @@ -186,54 +205,72 @@ type JoinDef = { type NamedJoinColumn = { label?: string; joinDef: JoinDef[]; -} +}; -type Enum = { +type Enum = { enum: T[] | readonly T[]; - nullable?: boolean; - defaultValue?: T; + nullable?: boolean; + defaultValue?: T; }; -export type ColumnConfig = string | StrictUnion & (SQLDefColumn | ReferencedColumn | TextColumn | JSONBColumnDef | Enum))>; +export type ColumnConfig = + | string + | StrictUnion< + | NamedJoinColumn + | MediaColumn + | (BaseColumn & + ( + | SQLDefColumn + | ReferencedColumn + | TextColumn + | JSONBColumnDef + | Enum + )) + >; export type ColumnConfigs = { - sql: string | BaseColumn & SQLDefColumn; + sql: string | (BaseColumn & SQLDefColumn); join: BaseColumn & NamedJoinColumn; media: BaseColumn & MediaColumn; referenced: BaseColumn & ReferencedColumn; text: BaseColumn & TextColumn; jsonb: BaseColumn & JSONBColumnDef; enum: BaseColumn & Enum; -} +}; type UnionKeys = T extends T ? keyof T : never; -type StrictUnionHelper = T extends any ? T & Partial, keyof T>, never>> : never; -export type StrictUnion = StrictUnionHelper +type StrictUnionHelper = T extends any + ? T & Partial, keyof T>, never>> + : never; +export type StrictUnion = StrictUnionHelper; -export const CONSTRAINT_TYPES = ["PRIMARY KEY", "UNIQUE", "CHECK"] as const; +export const CONSTRAINT_TYPES = ["PRIMARY KEY", "UNIQUE", "CHECK"] as const; export type TableDefinition = { - onMount?: (params: { dbo: DBHandlerServer; _db: DB }) => Promise void; }>; + onMount?: (params: { + dbo: DBHandlerServer; + _db: DB; + }) => Promise void }>; columns?: { - [column_name: string]: ColumnConfig - }, - constraints?: - | string[] + [column_name: string]: ColumnConfig; + }; + constraints?: + | string[] | { - [constraint_name: string]: - | string - | { - type: typeof CONSTRAINT_TYPES[number]; - dropIfExists?: boolean; - /** - * E.g.: - * colname - * col1, col2 - * col1 > col3 - */ - content: string; - } + [constraint_name: string]: + | string + | { + type: (typeof CONSTRAINT_TYPES)[number]; + dropIfExists?: boolean; + /** + * E.g.: + * colname + * col1, col2 + * col1 > col3 + */ + content: string; + }; // & ({ - // } + // } // | { // type: "FOREIGN KEY", // columns: string[]; @@ -241,7 +278,7 @@ export type TableDefinition = { // fcols: string[]; // } // ) - }, + }; /** * Similar to unique constraints but expressions are allowed inside definition @@ -249,7 +286,6 @@ export type TableDefinition = { replaceUniqueIndexes?: boolean; indexes?: { [index_name: string]: { - /** * If true then will drop any existing index with this name * Overrides replaceUniqueIndexes @@ -257,14 +293,14 @@ export type TableDefinition = { replace?: boolean; /** - * Causes the system to check for duplicate values in the table when the index is created (if data already exist) and each time data is added. + * Causes the system to check for duplicate values in the table when the index is created (if data already exist) and each time data is added. * Attempts to insert or update data which would result in duplicate entries will generate an error. */ unique?: boolean; /** - * When this option is used, PostgreSQL will build the index without taking any locks that prevent - * concurrent inserts, updates, or deletes on the table; whereas a standard index build locks out writes (but not reads) on the table until it's done. + * When this option is used, PostgreSQL will build the index without taking any locks that prevent + * concurrent inserts, updates, or deletes on the table; whereas a standard index build locks out writes (but not reads) on the table until it's done. * There are several caveats to be aware of when using this option — see Building Indexes Concurrently. */ concurrently?: boolean; @@ -288,44 +324,44 @@ export type TableDefinition = { where?: string; /** - * The name of the index method to be used. + * The name of the index method to be used. * Choices are btree, hash, gist, and gin. The default method is btree. */ using?: "btree" | "hash" | "gist" | "gin"; - } - } -} + }; + }; +}; type GetPreInsertRowArgs = Omit & { // preValidate: InsertRule["preValidate"]; validate: InsertRule["validate"]; localParams: LocalParams | undefined; -} +}; /** * Helper utility to create lookup tables for TEXT columns */ export type TableConfig = { - [table_name: string]: BaseTableDefinition & (TableDefinition | LookupTableDefinition); -} + [table_name: string]: BaseTableDefinition & + (TableDefinition | LookupTableDefinition); +}; /** * Will be run between initSQL and fileTable */ export default class TableConfigurator { - instanceId = Date.now() + Math.random(); - + config: TableConfig = {}; get dbo(): DBHandlerServer { - if (!this.prostgles.dbo) throw "this.prostgles.dbo missing" - return this.prostgles.dbo - } + if (!this.prostgles.dbo) throw "this.prostgles.dbo missing"; + return this.prostgles.dbo; + } get db(): DB { - if (!this.prostgles.db) throw "this.prostgles.db missing" - return this.prostgles.db - } - prostgles: Prostgles + if (!this.prostgles.db) throw "this.prostgles.db missing"; + return this.prostgles.db; + } + prostgles: Prostgles; constructor(prostgles: Prostgles) { this.config = (prostgles.opts.tableConfig as any) ?? {}; @@ -333,56 +369,80 @@ export default class TableConfigurator { } destroy = async () => { - for await(const { onUnmount } of Object.values(this.tableOnMounts)){ + for await (const { onUnmount } of Object.values(this.tableOnMounts)) { try { await onUnmount(); } catch (error) { console.error(error); } } - } + }; - tableOnMounts: Record void; }> = {}; + tableOnMounts: Record void }> = {}; setTableOnMounts = async () => { this.tableOnMounts = {}; for (const [tableName, tableConfig] of Object.entries(this.config)) { - if("onMount" in tableConfig && tableConfig.onMount){ - const cleanup = await tableConfig.onMount({ dbo: this.dbo, _db: this.db }); - if(cleanup){ + if ("onMount" in tableConfig && tableConfig.onMount) { + const cleanup = await tableConfig.onMount({ + dbo: this.dbo, + _db: this.db, + }); + if (cleanup) { this.tableOnMounts[tableName] = cleanup; } } } - } + }; - getColumnConfig = (tableName: string, colName: string): ColumnConfig | undefined => { + getColumnConfig = ( + tableName: string, + colName: string, + ): ColumnConfig | undefined => { const tconf = this.config?.[tableName]; if (tconf && "columns" in tconf) { return tconf.columns?.[colName]; } return undefined; - } + }; - getTableInfo = (params: { tableName: string; lang?: string }): TableInfo["info"] | undefined => { + getTableInfo = (params: { + tableName: string; + lang?: string; + }): TableInfo["info"] | undefined => { const tconf = this.config?.[params.tableName]; - + return { - label: parseI18N({ config: tconf?.info?.label, lang: params.lang, defaultLang: "en", defaultValue: params.tableName }) - } - } + label: parseI18N({ + config: tconf?.info?.label, + lang: params.lang, + defaultLang: "en", + defaultValue: params.tableName, + }), + }; + }; - getColInfo = (params: { col: string, table: string, lang?: string }): (ColExtraInfo & { label?: string; } & Pick) | undefined => { + getColInfo = (params: { + col: string; + table: string; + lang?: string; + }): + | (ColExtraInfo & { label?: string } & Pick) + | undefined => { const colConf = this.getColumnConfig(params.table, params.col); let result: Partial> = undefined; if (colConf) { - if (isObject(colConf)) { const { jsonbSchema, jsonbSchemaType, info } = colConf; result = { ...(result ?? {}), ...info, - ...((jsonbSchema || jsonbSchemaType) && { jsonbSchema: { nullable: colConf.nullable, ...(jsonbSchema || { type: jsonbSchemaType }) } }) - } + ...((jsonbSchema || jsonbSchemaType) && { + jsonbSchema: { + nullable: colConf.nullable, + ...(jsonbSchema || { type: jsonbSchemaType }), + }, + }), + }; /** * Get labels from TableConfig if specified @@ -393,34 +453,35 @@ export default class TableConfigurator { if (["string", "object"].includes(typeof lbl)) { if (typeof lbl === "string") { result ??= {}; - result.label = lbl + result.label = lbl; } else if (lang && (lbl?.[lang as "en"] || lbl?.en)) { result ??= {}; - result.label = (lbl?.[lang as "en"]) || lbl?.en; + result.label = lbl?.[lang as "en"] || lbl?.en; } } - } - } - } - return result; - } + }; - checkColVal = (params: { col: string, table: string, value: any }): void => { + checkColVal = (params: { col: string; table: string; value: any }): void => { const conf = this.getColInfo(params); if (conf) { const { value } = params; const { min, max } = conf; - if (min !== undefined && value !== undefined && value < min) throw `${params.col} must be greater than ${min}` - if (max !== undefined && value !== undefined && value > max) throw `${params.col} must be less than ${max}` + if (min !== undefined && value !== undefined && value < min) + throw `${params.col} must be greater than ${min}`; + if (max !== undefined && value !== undefined && value > max) + throw `${params.col} must be less than ${max}`; } - } + }; - getJoinInfo = (sourceTable: string, targetTable: string): JoinInfo | undefined => { + getJoinInfo = ( + sourceTable: string, + targetTable: string, + ): JoinInfo | undefined => { if ( this.config && sourceTable in this.config && @@ -431,7 +492,7 @@ export default class TableConfigurator { if ("columns" in td && td.columns?.[targetTable]) { const cd = td.columns[targetTable]; if (isObject(cd) && "joinDef" in cd) { - if(!cd.joinDef) throw "cd.joinDef missing" + if (!cd.joinDef) throw "cd.joinDef missing"; const { joinDef } = cd; const res: JoinInfo = { expectOne: false, @@ -439,30 +500,33 @@ export default class TableConfigurator { source: sourceTable, target: targetTable, table, - on + on, })), - } + }; return res; } } } return undefined; - } + }; - getPreInsertRow = async (tableHandler: TableHandler, args: Pick): Promise => { + getPreInsertRow = async ( + tableHandler: TableHandler, + args: Pick, + ): Promise => { const tableHook = this.config?.[tableHandler.name]?.hooks?.getPreInsertRow; - if(tableHandler.is_media){ + if (tableHandler.is_media) { return uploadFile.bind(tableHandler)(args) as Promise; - } - if(tableHook){ - return tableHook(args) + } + if (tableHook) { + return tableHook(args); } return args.row; - } + }; prevInitQueryHistory?: string[]; initialising = false; init = initTableConfig.bind(this); -} \ No newline at end of file +} diff --git a/lib/TableConfig/getColumnDefinitionQuery.ts b/lib/TableConfig/getColumnDefinitionQuery.ts index 018420af..8e0bf242 100644 --- a/lib/TableConfig/getColumnDefinitionQuery.ts +++ b/lib/TableConfig/getColumnDefinitionQuery.ts @@ -1,71 +1,81 @@ -import { asName, pickKeys } from "prostgles-types"; +import { asName, pickKeys } from "prostgles-types"; import { DB } from "../Prostgles"; import { asValue } from "../PubSubManager/PubSubManager"; import { VALIDATE_SCHEMA_FUNCNAME } from "../JSONBValidation/validate_jsonb_schema_sql"; import { BaseColumnTypes, ColumnConfig } from "./TableConfig"; -import pgPromise from "pg-promise"; +import pgPromise from "pg-promise"; type Args = { - column: string; + column: string; colConf: ColumnConfig; - db: DB; + db: DB; table: string; }; /** * Column create statement for a given config */ -export const getColumnDefinitionQuery = async ({ colConf: colConfRaw, column, db, table }: Args): Promise => { - const colConf = typeof colConfRaw === "string"? { sqlDefinition: colConfRaw } : colConfRaw; +export const getColumnDefinitionQuery = async ({ + colConf: colConfRaw, + column, + db, + table, +}: Args): Promise => { + const colConf = + typeof colConfRaw === "string" ? { sqlDefinition: colConfRaw } : colConfRaw; const colNameEsc = asName(column); - const getColTypeDef = (colConf: BaseColumnTypes, pgType: "TEXT" | "JSONB") => { + const getColTypeDef = ( + colConf: BaseColumnTypes, + pgType: "TEXT" | "JSONB", + ) => { const { nullable, defaultValue } = colConf; - return `${pgType} ${!nullable ? " NOT NULL " : ""} ${defaultValue ? ` DEFAULT ${asValue(defaultValue)} ` : ""}` - } + return `${pgType} ${!nullable ? " NOT NULL " : ""} ${defaultValue ? ` DEFAULT ${asValue(defaultValue)} ` : ""}`; + }; const jsonbSchema = - ("jsonbSchema" in colConf && colConf.jsonbSchema) ? { jsonbSchema: colConf.jsonbSchema, jsonbSchemaType: undefined } : - ("jsonbSchemaType" in colConf && colConf.jsonbSchemaType) ? { jsonbSchema: undefined, jsonbSchemaType: colConf.jsonbSchemaType } : - undefined; - + "jsonbSchema" in colConf && colConf.jsonbSchema + ? { jsonbSchema: colConf.jsonbSchema, jsonbSchemaType: undefined } + : "jsonbSchemaType" in colConf && colConf.jsonbSchemaType + ? { jsonbSchema: undefined, jsonbSchemaType: colConf.jsonbSchemaType } + : undefined; if ("references" in colConf && colConf.references) { - - const { tableName: lookupTable, columnName: lookupCol = "id" } = colConf.references; + const { tableName: lookupTable, columnName: lookupCol = "id" } = + colConf.references; return ` ${colNameEsc} ${getColTypeDef(colConf.references, "TEXT")} REFERENCES ${lookupTable} (${lookupCol}) `; - } else if ("sqlDefinition" in colConf && colConf.sqlDefinition) { - return ` ${colNameEsc} ${colConf.sqlDefinition} `; - } else if ("isText" in colConf && colConf.isText) { let checks = ""; const colChecks: string[] = []; if (colConf.lowerCased) { - colChecks.push(`${colNameEsc} = LOWER(${colNameEsc})`) + colChecks.push(`${colNameEsc} = LOWER(${colNameEsc})`); } if (colConf.trimmed) { - colChecks.push(`${colNameEsc} = BTRIM(${colNameEsc})`) + colChecks.push(`${colNameEsc} = BTRIM(${colNameEsc})`); } if (colChecks.length) { - checks = `CHECK (${colChecks.join(" AND ")})` + checks = `CHECK (${colChecks.join(" AND ")})`; } return ` ${colNameEsc} ${getColTypeDef(colConf, "TEXT")} ${checks}`; - } else if (jsonbSchema) { - - const jsonbSchemaStr = asValue({ - ...pickKeys(colConf, ["enum", "nullable", "info"]), - ...(jsonbSchema.jsonbSchemaType ? { type: jsonbSchema.jsonbSchemaType } : jsonbSchema.jsonbSchema) - }) + "::TEXT"; + const jsonbSchemaStr = + asValue({ + ...pickKeys(colConf, ["enum", "nullable", "info"]), + ...(jsonbSchema.jsonbSchemaType + ? { type: jsonbSchema.jsonbSchemaType } + : jsonbSchema.jsonbSchema), + }) + "::TEXT"; /** Validate default value against jsonbSchema */ const validationQuery = `SELECT ${VALIDATE_SCHEMA_FUNCNAME}(${jsonbSchemaStr}, ${asValue(colConf.defaultValue) + "::JSONB"}, ${asValue({ table, column })}) as v`; if (colConf.defaultValue) { - const failedDefault = (err?: any) => { - return { msg: `Default value (${colConf.defaultValue}) for ${table}.${column} does not satisfy the jsonb constraint check: ${validationQuery}`, err }; - } + return { + msg: `Default value (${colConf.defaultValue}) for ${table}.${column} does not satisfy the jsonb constraint check: ${validationQuery}`, + err, + }; + }; try { const row = await db.oneOrNone(validationQuery); if (!row?.v) { @@ -74,38 +84,49 @@ export const getColumnDefinitionQuery = async ({ colConf: colConfRaw, column, db } catch (e) { throw failedDefault(e); } - } + } return ` ${colNameEsc} ${getColTypeDef(colConf, "JSONB")} CHECK(${VALIDATE_SCHEMA_FUNCNAME}(${jsonbSchemaStr}, ${colNameEsc}, ${asValue({ table, column })} ))`; - } else if ("enum" in colConf) { - if (!colConf.enum?.length) throw new Error("colConf.enum Must not be empty"); - const type = colConf.enum.every(v => Number.isFinite(v)) ? "NUMERIC" : "TEXT"; - const checks = colConf.enum.map(v => `${colNameEsc} = ${asValue(v)}`).join(" OR "); + if (!colConf.enum?.length) + throw new Error("colConf.enum Must not be empty"); + const type = colConf.enum.every((v) => Number.isFinite(v)) + ? "NUMERIC" + : "TEXT"; + const checks = colConf.enum + .map((v) => `${colNameEsc} = ${asValue(v)}`) + .join(" OR "); return ` ${colNameEsc} ${type} ${colConf.nullable ? "" : "NOT NULL"} ${"defaultValue" in colConf ? ` DEFAULT ${asValue(colConf.defaultValue)}` : ""} CHECK(${checks})`; - } else { return undefined; // throw "Unknown column config: " + JSON.stringify(colConf); } -} - +}; -export type ColumnMinimalInfo = { +export type ColumnMinimalInfo = { table_name: string; table_schema: string; column_name: string; column_default: string | null; udt_name: string; - nullable: boolean; + nullable: boolean; }; -export const getTableColumns = ({ db, table }: { db: DB | pgPromise.ITask<{}>; table: string;}): Promise => { - return db.manyOrNone(` +export const getTableColumns = ({ + db, + table, +}: { + db: DB | pgPromise.ITask<{}>; + table: string; +}): Promise => { + return db.manyOrNone( + ` SELECT table_name, table_schema, column_name, column_default, udt_name, is_nullable = 'YES' as nullable FROM information_schema.columns WHERE table_name = $1 - `, [table]); -} \ No newline at end of file + `, + [table], + ); +}; diff --git a/lib/TableConfig/getConstraintDefinitionQueries.ts b/lib/TableConfig/getConstraintDefinitionQueries.ts index a2be02d3..624e7e3e 100644 --- a/lib/TableConfig/getConstraintDefinitionQueries.ts +++ b/lib/TableConfig/getConstraintDefinitionQueries.ts @@ -6,48 +6,55 @@ import { TableConfig } from "./TableConfig"; type Args = { tableName: string; - tableConf: TableConfig[string] + tableConf: TableConfig[string]; // tableConf: BaseTableDefinition & (TableDefinition | LookupTableDefinition) }; -export type ConstraintDef = { +export type ConstraintDef = { /** * Named constraints are used to show a relevant error message */ - name?: string; - content: string; - alterQuery: string; + name?: string; + content: string; + alterQuery: string; }; -export const getConstraintDefinitionQueries = ({ tableConf, tableName }: Args): ConstraintDef[] | undefined => { - +export const getConstraintDefinitionQueries = ({ + tableConf, + tableName, +}: Args): ConstraintDef[] | undefined => { if ("constraints" in tableConf && tableConf.constraints) { const { constraints } = tableConf; - if(!constraints){ + if (!constraints) { return undefined; } - - if(Array.isArray(constraints)) { - return constraints.map(c => ({ content: c, alterQuery: `ALTER TABLE ${asName(tableName)} ADD ${c}`})); - + + if (Array.isArray(constraints)) { + return constraints.map((c) => ({ + content: c, + alterQuery: `ALTER TABLE ${asName(tableName)} ADD ${c}`, + })); } else { const constraintNames = Object.keys(constraints); - return constraintNames.map(constraintName => { + return constraintNames.map((constraintName) => { const _cnstr = constraints[constraintName]!; - const constraintDef = typeof _cnstr === "string"? _cnstr : `${_cnstr.type} (${_cnstr.content})`; - + const constraintDef = + typeof _cnstr === "string" + ? _cnstr + : `${_cnstr.type} (${_cnstr.content})`; + /** Drop constraints with the same name */ // const existingConstraint = constraints.some(c => c.conname === constraintName); // if(existingConstraint){ // if(canDrop) queries.push(`ALTER TABLE ${asName(tableName)} DROP CONSTRAINT ${asName(constraintName)};`); // } - + const alterQuery = `ALTER TABLE ${asName(tableName)} ADD CONSTRAINT ${asName(constraintName)} ${constraintDef};`; return { name: constraintName, alterQuery, content: constraintDef }; }); } } -} +}; export type ColConstraint = { name: string; @@ -56,14 +63,18 @@ export type ColConstraint = { cols: Array; definition: string; schema: string; -} +}; type ColConstraintsArgs = { db: DB | pgPromise.ITask<{}>; table?: string; column?: string; types?: ColConstraint["type"][]; -} -export const getColConstraintsQuery = ({ column, table, types }: Omit) => { +}; +export const getColConstraintsQuery = ({ + column, + table, + types, +}: Omit) => { let query = ` SELECT * FROM ( @@ -86,10 +97,15 @@ export const getColConstraintsQuery = ({ column, table, types }: Omit ARRAY[${asValue(column)}]`; - if (types?.length) query += `\nAND type IN (${types.map(v => asValue(v)).join(", ")})`; + if (types?.length) + query += `\nAND type IN (${types.map((v) => asValue(v)).join(", ")})`; return query; -} -export const getColConstraints = ({ db, column, table, types }: ColConstraintsArgs ): Promise => { - +}; +export const getColConstraints = ({ + db, + column, + table, + types, +}: ColConstraintsArgs): Promise => { return db.manyOrNone(getColConstraintsQuery({ column, table, types })); -} \ No newline at end of file +}; diff --git a/lib/TableConfig/getFutureTableSchema.ts b/lib/TableConfig/getFutureTableSchema.ts index 9b2786f1..895e3520 100644 --- a/lib/TableConfig/getFutureTableSchema.ts +++ b/lib/TableConfig/getFutureTableSchema.ts @@ -2,42 +2,53 @@ import { asName } from "prostgles-types"; import { pgp } from "../DboBuilder/DboBuilder"; import { DB } from "../Prostgles"; import { ColumnMinimalInfo, getTableColumns } from "./getColumnDefinitionQuery"; -import { ColConstraint, ConstraintDef, getColConstraints } from "./getConstraintDefinitionQueries"; +import { + ColConstraint, + ConstraintDef, + getColConstraints, +} from "./getConstraintDefinitionQueries"; type Args = { - db: DB, - columnDefs: string[]; + db: DB; + columnDefs: string[]; tableName: string; constraintDefs?: ConstraintDef[]; }; /** - * Given a table name, column definitions and constraint definitions, + * Given a table name, column definitions and constraint definitions, * returns structured resulting column definitions and constraints of the table */ -export const getFutureTableSchema = async ({ columnDefs, tableName, constraintDefs = [], db }: Args): Promise<{ +export const getFutureTableSchema = async ({ + columnDefs, + tableName, + constraintDefs = [], + db, +}: Args): Promise<{ constraints: ColConstraint[]; cols: ColumnMinimalInfo[]; -}> => { +}> => { const { TransactionMode, isolationLevel } = pgp.txMode; - + let constraints: ColConstraint[] = []; let cols: ColumnMinimalInfo[] = []; const ROLLBACK = "Rollback"; try { const txMode = new TransactionMode({ - tiLevel: isolationLevel.serializable + tiLevel: isolationLevel.serializable, }); - await db.tx({ mode: txMode }, async t => { - + await db.tx({ mode: txMode }, async (t) => { /** To prevent deadlocks we use a random table name -> Not feasible because named constraints cannot be recreated without dropping the existing ones from actual table */ - // const tableEsc = asName(tableName.slice(0, 12) + (await t.oneOrNone(`SELECT md5(now()::text) as md5`)).md5); - + // const tableEsc = asName(tableName.slice(0, 12) + (await t.oneOrNone(`SELECT md5(now()::text) as md5`)).md5); + const tableEsc = asName(tableName); - const consQueries = constraintDefs.map(c => - `ALTER TABLE ${tableEsc} ADD ${c.name? ` CONSTRAINT ${asName(c.name)}` : ""} ${c.content};` - ).join("\n"); + const consQueries = constraintDefs + .map( + (c) => + `ALTER TABLE ${tableEsc} ADD ${c.name ? ` CONSTRAINT ${asName(c.name)}` : ""} ${c.content};`, + ) + .join("\n"); const query = ` DROP TABLE IF EXISTS ${tableEsc} CASCADE; @@ -51,18 +62,17 @@ export const getFutureTableSchema = async ({ columnDefs, tableName, constraintDe constraints = await getColConstraints({ db: t, table: tableName }); cols = await getTableColumns({ db: t, table: tableName }); - + /** Rollback */ return Promise.reject(new Error(ROLLBACK)); }); - - } catch(e: any){ - if(e instanceof Error && e.message === ROLLBACK) { + } catch (e: any) { + if (e instanceof Error && e.message === ROLLBACK) { // Ignore } else { throw e; } } - + return { cols, constraints }; -} \ No newline at end of file +}; diff --git a/lib/TableConfig/getPGIndexes.ts b/lib/TableConfig/getPGIndexes.ts index 8f35804a..7ea5a5b0 100644 --- a/lib/TableConfig/getPGIndexes.ts +++ b/lib/TableConfig/getPGIndexes.ts @@ -13,8 +13,11 @@ type PGIndex = { size: string; description: string | null; }; -export const getPGIndexes = async (db: DB, tableName: string, schema: string): Promise => { - +export const getPGIndexes = async ( + db: DB, + tableName: string, + schema: string, +): Promise => { const indexQuery = ` SELECT n.nspname as schemaname, c.relname as indexname, @@ -48,6 +51,6 @@ export const getPGIndexes = async (db: DB, tableName: string, schema: string): P AND c2.relname = \${tableName} AND n.nspname = \${schema} ORDER BY 1,2; - ` + `; return db.any(indexQuery, { tableName, schema }); -}; \ No newline at end of file +}; diff --git a/lib/TableConfig/getTableColumnQueries.ts b/lib/TableConfig/getTableColumnQueries.ts index f3f8ab9c..ae56b999 100644 --- a/lib/TableConfig/getTableColumnQueries.ts +++ b/lib/TableConfig/getTableColumnQueries.ts @@ -1,8 +1,17 @@ -import { getKeys, asName as _asName, isObject, asName, getObjectEntries } from "prostgles-types"; +import { + getKeys, + asName as _asName, + isObject, + asName, + getObjectEntries, +} from "prostgles-types"; import { DB, DBHandlerServer } from "../Prostgles"; import { validate_jsonb_schema_sql } from "../JSONBValidation/validate_jsonb_schema_sql"; -import { getColumnDefinitionQuery, getTableColumns } from "./getColumnDefinitionQuery"; -import { TableConfig } from "./TableConfig"; +import { + getColumnDefinitionQuery, + getTableColumns, +} from "./getColumnDefinitionQuery"; +import { TableConfig } from "./TableConfig"; import { getFutureTableSchema } from "./getFutureTableSchema"; type Args = { @@ -10,19 +19,26 @@ type Args = { tableConf: TableConfig[string]; tableName: string; tableHandler: DBHandlerServer[string] | undefined; -} +}; /** * Given a tableHandler, table name, column definitions and constraint definitions, * returns the queries to align any existing table with the given column definitions */ -export const getTableColumnQueries = async ({ db, tableConf, tableName, tableHandler }: Args): Promise => { - +export const getTableColumnQueries = async ({ + db, + tableConf, + tableName, + tableHandler, +}: Args): Promise< + | undefined + | { + columnDefs: string[]; + newColumnDefs: string[]; + fullQuery: string; + isCreate: boolean; + } +> => { let newColumnDefs: string[] = []; const droppedColNames: string[] = []; const alteredColQueries: string[] = []; @@ -32,89 +48,121 @@ export const getTableColumnQueries = async ({ db, tableConf, tableName, tableHan if (!("columns" in tableConf && tableConf.columns)) { return undefined; } - - const hasJSONBValidation = getKeys(tableConf.columns).some(c => { + + const hasJSONBValidation = getKeys(tableConf.columns).some((c) => { const cConf = tableConf.columns?.[c]; - return cConf && isObject(cConf) && (cConf.jsonbSchema || cConf.jsonbSchemaType) + return ( + cConf && isObject(cConf) && (cConf.jsonbSchema || cConf.jsonbSchemaType) + ); }); /** Must install validation function */ - if(hasJSONBValidation){ + if (hasJSONBValidation) { try { await db.any(validate_jsonb_schema_sql); - } catch(err: any){ - console.error("Could not install the jsonb validation function due to error: ", err); + } catch (err: any) { + console.error( + "Could not install the jsonb validation function due to error: ", + err, + ); throw err; } } const columns = getObjectEntries(tableConf.columns).filter(([_, colDef]) => { /** Exclude NamedJoinColumn */ - return typeof colDef === "string" || !("joinDef" in colDef) + return typeof colDef === "string" || !("joinDef" in colDef); }); const colDefs: { name: string; def: string }[] = []; - - for (const [colName, colConf] of columns) { + for (const [colName, colConf] of columns) { /* Get column definition */ - const colDef = await getColumnDefinitionQuery({ colConf, column: colName.toString(), db, table: tableName }); - if(colDef){ - colDefs.push({ name: colName.toString(), def: colDef}); + const colDef = await getColumnDefinitionQuery({ + colConf, + column: colName.toString(), + db, + table: tableName, + }); + if (colDef) { + colDefs.push({ name: colName.toString(), def: colDef }); } } - const columnDefs = colDefs.map(c => c.def); + const columnDefs = colDefs.map((c) => c.def); - if(!colDefs.length){ + if (!colDefs.length) { return undefined; } - const ALTERQ = `ALTER TABLE ${asName(tableName)}`; if (!tableHandler) { - newColumnDefs.push(...colDefs.map(c => c.def)); - + newColumnDefs.push(...colDefs.map((c) => c.def)); } else if (tableHandler) { const currCols = await getTableColumns({ db, table: tableName }); /** Add new columns */ - newColumnDefs = colDefs.filter(nc => !tableHandler.columns?.some(c => nc.name === c.name)).map(c => c.def); - + newColumnDefs = colDefs + .filter((nc) => !tableHandler.columns?.some((c) => nc.name === c.name)) + .map((c) => c.def); + /** Altered/Dropped columns */ - const { cols: futureCols } = await getFutureTableSchema({ tableName, columnDefs, constraintDefs: [], db }); - currCols.forEach(c => { - const newCol = futureCols.find(nc => nc.column_name === c.column_name); - if(!newCol){ + const { cols: futureCols } = await getFutureTableSchema({ + tableName, + columnDefs, + constraintDefs: [], + db, + }); + currCols.forEach((c) => { + const newCol = futureCols.find((nc) => nc.column_name === c.column_name); + if (!newCol) { droppedColNames.push(c.column_name); - } else if(newCol.nullable !== c.nullable){ - alteredColQueries.push(`${ALTERQ} ALTER COLUMN ${asName(c.column_name)} ${newCol.nullable? "DROP" : "SET"} NOT NULL;`) - } else if(newCol.udt_name !== c.udt_name){ - alteredColQueries.push(`${ALTERQ} ALTER COLUMN ${asName(c.column_name)} TYPE ${newCol.udt_name} USING ${asName(c.column_name)}::${newCol.udt_name};`) - } else if(newCol.column_default !== c.column_default){ - const colConfig = colDefs.find(cd => cd.name === c.column_name); - if(["serial", "bigserial"].some(t => colConfig?.def.toLowerCase().includes(` ${t}`)) && c.column_default?.toLowerCase().includes("nextval")){ + } else if (newCol.nullable !== c.nullable) { + alteredColQueries.push( + `${ALTERQ} ALTER COLUMN ${asName(c.column_name)} ${newCol.nullable ? "DROP" : "SET"} NOT NULL;`, + ); + } else if (newCol.udt_name !== c.udt_name) { + alteredColQueries.push( + `${ALTERQ} ALTER COLUMN ${asName(c.column_name)} TYPE ${newCol.udt_name} USING ${asName(c.column_name)}::${newCol.udt_name};`, + ); + } else if (newCol.column_default !== c.column_default) { + const colConfig = colDefs.find((cd) => cd.name === c.column_name); + if ( + ["serial", "bigserial"].some((t) => + colConfig?.def.toLowerCase().includes(` ${t}`), + ) && + c.column_default?.toLowerCase().includes("nextval") + ) { /** Ignore SERIAL/BIGSERIAL <> nextval mismatch */ } else { - alteredColQueries.push(`${ALTERQ} ALTER COLUMN ${asName(c.column_name)} ${newCol.column_default === null? "DROP DEFAULT" : `SET DEFAULT ${newCol.column_default}`};`) + alteredColQueries.push( + `${ALTERQ} ALTER COLUMN ${asName(c.column_name)} ${newCol.column_default === null ? "DROP DEFAULT" : `SET DEFAULT ${newCol.column_default}`};`, + ); } } }); } - if (!tableHandler || tableConf.dropIfExists || tableConf.dropIfExistsCascade) { + if ( + !tableHandler || + tableConf.dropIfExists || + tableConf.dropIfExistsCascade + ) { isCreate = true; const DROPQ = `DROP TABLE IF EXISTS ${asName(tableName)}`; - fullQuery = ([ - ...(tableConf.dropIfExists? [`${DROPQ};`] : tableConf.dropIfExistsCascade? [`${DROPQ} CASCADE;`] : []), + fullQuery = [ + ...(tableConf.dropIfExists + ? [`${DROPQ};`] + : tableConf.dropIfExistsCascade + ? [`${DROPQ} CASCADE;`] + : []), `CREATE TABLE ${asName(tableName)} (`, - columnDefs.join(", \n"), - `);` - ].join("\n")); - + columnDefs.join(", \n"), + `);`, + ].join("\n"); } else { fullQuery = [ - ...droppedColNames.map(c => `${ALTERQ} DROP COLUMN ${asName(c)};`), - ...newColumnDefs.map(c => `${ALTERQ} ADD COLUMN ${c};`), + ...droppedColNames.map((c) => `${ALTERQ} DROP COLUMN ${asName(c)};`), + ...newColumnDefs.map((c) => `${ALTERQ} ADD COLUMN ${c};`), ...alteredColQueries, ].join("\n"); } @@ -124,6 +172,5 @@ export const getTableColumnQueries = async ({ db, tableConf, tableName, tableHan columnDefs, isCreate, newColumnDefs, - } - -} \ No newline at end of file + }; +}; diff --git a/lib/TableConfig/initTableConfig.ts b/lib/TableConfig/initTableConfig.ts index 07d84f06..7279b84f 100644 --- a/lib/TableConfig/initTableConfig.ts +++ b/lib/TableConfig/initTableConfig.ts @@ -1,68 +1,99 @@ - import { asName as _asName } from "prostgles-types"; import { PubSubManager, asValue, log } from "../PubSubManager/PubSubManager"; import TableConfigurator from "./TableConfig"; -import { getColConstraints, getConstraintDefinitionQueries } from "./getConstraintDefinitionQueries"; +import { + getColConstraints, + getConstraintDefinitionQueries, +} from "./getConstraintDefinitionQueries"; import { getFutureTableSchema } from "./getFutureTableSchema"; import { getTableColumnQueries } from "./getTableColumnQueries"; import { getPGIndexes } from "./getPGIndexes"; export const initTableConfig = async function (this: TableConfigurator) { - let changedSchema = false; const failedQueries: { query: string; error: any }[] = []; this.initialising = true; const queryHistory: string[] = []; let queries: string[] = []; - const makeQuery = (q: string[]) => q.filter(v => v.trim().length).map(v => v.trim().endsWith(";") ? v : `${v};`).join("\n"); + const makeQuery = (q: string[]) => + q + .filter((v) => v.trim().length) + .map((v) => (v.trim().endsWith(";") ? v : `${v};`)) + .join("\n"); const runQueries = async (_queries = queries) => { let q = makeQuery(queries); - if (!_queries.some(q => q.trim().length)) { + if (!_queries.some((q) => q.trim().length)) { return 0; } q = `/* ${PubSubManager.EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID} */ \n\n` + q; queryHistory.push(q); - this.prostgles.opts.onLog?.({ type: "debug", command: "TableConfig.runQueries.start", data: { q }, duration: -1 }); + this.prostgles.opts.onLog?.({ + type: "debug", + command: "TableConfig.runQueries.start", + data: { q }, + duration: -1, + }); const now = Date.now(); - await this.db.multi(q).catch(err => { + await this.db.multi(q).catch((err) => { log({ err, q }); failedQueries.push({ query: q, error: err }); return Promise.reject(err); }); - this.prostgles.opts.onLog?.({ type: "debug", command: "TableConfig.runQueries.end", duration: Date.now() - now, data: { q } }); + this.prostgles.opts.onLog?.({ + type: "debug", + command: "TableConfig.runQueries.end", + duration: Date.now() - now, + data: { q }, + }); changedSchema = true; _queries = []; queries = []; return 1; - } + }; if (!this.prostgles.pgp) { throw "pgp missing"; } - const MAX_IDENTIFIER_LENGTH = +(await this.db.one("SHOW max_identifier_length;") as any).max_identifier_length; - if (!Number.isFinite(MAX_IDENTIFIER_LENGTH)) throw `Could not obtain a valid max_identifier_length`; + const MAX_IDENTIFIER_LENGTH = +( + (await this.db.one("SHOW max_identifier_length;")) as any + ).max_identifier_length; + if (!Number.isFinite(MAX_IDENTIFIER_LENGTH)) + throw `Could not obtain a valid max_identifier_length`; const asName = (v: string) => { if (v.length > MAX_IDENTIFIER_LENGTH - 1) { - throw `The identifier name provided (${v}) is longer than the allowed limit (max_identifier_length - 1 = ${MAX_IDENTIFIER_LENGTH - 1} characters )\n Longest allowed: ${_asName(v.slice(0, MAX_IDENTIFIER_LENGTH - 1))} ` + throw `The identifier name provided (${v}) is longer than the allowed limit (max_identifier_length - 1 = ${MAX_IDENTIFIER_LENGTH - 1} characters )\n Longest allowed: ${_asName(v.slice(0, MAX_IDENTIFIER_LENGTH - 1))} `; } return _asName(v); - } + }; - let migrations: { version: number; table: string; } | undefined; + let migrations: { version: number; table: string } | undefined; if (this.prostgles.opts.tableConfigMigrations) { - const { onMigrate, version, versionTableName = "schema_version" } = this.prostgles.opts.tableConfigMigrations; + const { + onMigrate, + version, + versionTableName = "schema_version", + } = this.prostgles.opts.tableConfigMigrations; await this.db.any(` /* ${PubSubManager.EXCLUDE_QUERY_FROM_SCHEMA_WATCH_ID} */ CREATE TABLE IF NOT EXISTS ${asName(versionTableName)}(id NUMERIC PRIMARY KEY, table_config JSONB NOT NULL) `); migrations = { version, table: versionTableName }; - const maxVersion = +(await this.db.oneOrNone(`SELECT MAX(id) as v FROM ${asName(versionTableName)}`)).v + const maxVersion = +( + await this.db.oneOrNone( + `SELECT MAX(id) as v FROM ${asName(versionTableName)}`, + ) + ).v; const latestVersion = Number.isFinite(maxVersion) ? maxVersion : undefined; if (latestVersion === version) { - const isLatest = (await this.db.oneOrNone(`SELECT table_config = \${table_config} as v FROM ${asName(versionTableName)} WHERE id = \${version}`, { version, table_config: this.config })).v; + const isLatest = ( + await this.db.oneOrNone( + `SELECT table_config = \${table_config} as v FROM ${asName(versionTableName)} WHERE id = \${version}`, + { version, table_config: this.config }, + ) + ).v; if (isLatest) { /** * If the table config is the same as the latest version then we can skip all schema checks and changes @@ -71,7 +102,12 @@ export const initTableConfig = async function (this: TableConfigurator) { } } if (latestVersion !== undefined && latestVersion < version) { - await onMigrate({ db: this.db, oldVersion: latestVersion, getConstraints: (table, col, types) => getColConstraints({ db: this.db, table, column: col, types }) }) + await onMigrate({ + db: this.db, + oldVersion: latestVersion, + getConstraints: (table, col, types) => + getColConstraints({ db: this.db, table, column: col, types }), + }); } } @@ -79,7 +115,10 @@ export const initTableConfig = async function (this: TableConfigurator) { for (const [tableNameRaw, tableConf] of Object.entries(this.config)) { const tableName = asName(tableNameRaw); - if ("isLookupTable" in tableConf && Object.keys(tableConf.isLookupTable?.values).length) { + if ( + "isLookupTable" in tableConf && + Object.keys(tableConf.isLookupTable?.values).length + ) { const { dropIfExists = false, dropIfExistsCascade = false } = tableConf; const isDropped = dropIfExists || dropIfExistsCascade; @@ -89,26 +128,38 @@ export const initTableConfig = async function (this: TableConfigurator) { queries.push(`DROP TABLE IF EXISTS ${tableName};`); } - const rows = Object.entries(tableConf.isLookupTable?.values).map(([id, otherColumns])=> ({ id, ...otherColumns })); + const rows = Object.entries(tableConf.isLookupTable?.values).map( + ([id, otherColumns]) => ({ id, ...otherColumns }), + ); const lookupTableHandler = this.dbo?.[tableNameRaw]; - const columnNames = Object.keys(rows[0]!).filter(k => k !== "id"); + const columnNames = Object.keys(rows[0]!).filter((k) => k !== "id"); if (isDropped || !lookupTableHandler) { queries.push( `CREATE TABLE IF NOT EXISTS ${tableName} ( id TEXT PRIMARY KEY - ${columnNames.length ? (", " + columnNames.map(k => asName(k) + " TEXT ").join(", ")) : ""} - );` + ${columnNames.length ? ", " + columnNames.map((k) => asName(k) + " TEXT ").join(", ") : ""} + );`, ); } - if(rows.length){ - const existingValues: { id: any }[] = !lookupTableHandler? [] : await this.db.any(`SELECT id FROM ${tableName} WHERE id IN (${rows.map(r => asValue(r.id)).join(", ")});`); + if (rows.length) { + const existingValues: { id: any }[] = !lookupTableHandler + ? [] + : await this.db.any( + `SELECT id FROM ${tableName} WHERE id IN (${rows.map((r) => asValue(r.id)).join(", ")});`, + ); rows - .filter(r => !existingValues.some(ev => ev.id === r.id)) - .map(row => { - const allColumns = ["id", ...columnNames] - const values = allColumns.map(key => (row as any)[key]); - queries.push(this.prostgles.pgp!.as.format(`INSERT INTO ${tableName} (${allColumns.map(t => asName(t)).join(", ")}) ` + " VALUES (${values:csv});", { values })) - }); + .filter((r) => !existingValues.some((ev) => ev.id === r.id)) + .map((row) => { + const allColumns = ["id", ...columnNames]; + const values = allColumns.map((key) => (row as any)[key]); + queries.push( + this.prostgles.pgp!.as.format( + `INSERT INTO ${tableName} (${allColumns.map((t) => asName(t)).join(", ")}) ` + + " VALUES (${values:csv});", + { values }, + ), + ); + }); } } } @@ -125,56 +176,79 @@ export const initTableConfig = async function (this: TableConfigurator) { const ALTER_TABLE_Q = `ALTER TABLE ${asName(tableName)}`; /* isLookupTable table has already been created */ - const coldef = "isLookupTable" in tableConf? undefined : await getTableColumnQueries({ db: this.db, tableConf, tableHandler, tableName }); + const coldef = + "isLookupTable" in tableConf + ? undefined + : await getTableColumnQueries({ + db: this.db, + tableConf, + tableHandler, + tableName, + }); if (coldef) { queries.push(coldef.fullQuery); } /** CONSTRAINTS */ - const constraintDefs = getConstraintDefinitionQueries({ tableName, tableConf }); + const constraintDefs = getConstraintDefinitionQueries({ + tableName, + tableConf, + }); if (coldef?.isCreate) { - queries.push(...constraintDefs?.map(c => c.alterQuery) ?? []); - + queries.push(...(constraintDefs?.map((c) => c.alterQuery) ?? [])); } else if (coldef) { - const fullSchema = await getFutureTableSchema({ db: this.db, tableName, columnDefs: coldef.columnDefs, constraintDefs }); - const futureCons = fullSchema.constraints.map(nc => ({ + const fullSchema = await getFutureTableSchema({ + db: this.db, + tableName, + columnDefs: coldef.columnDefs, + constraintDefs, + }); + const futureCons = fullSchema.constraints.map((nc) => ({ ...nc, - isNamed: constraintDefs?.some(c => c.name === nc.name) + isNamed: constraintDefs?.some((c) => c.name === nc.name), })); /** Run this first to ensure any dropped cols drop their constraints as well */ await runQueries(queries); - const currCons = await getColConstraints({ db: this.db, table: tableName }); + const currCons = await getColConstraints({ + db: this.db, + table: tableName, + }); /** Drop removed/modified */ - currCons.forEach(c => { - if (!futureCons.some(nc => nc.definition === c.definition && (!nc.isNamed || nc.name === c.name))) { - queries.push(`${ALTER_TABLE_Q} DROP CONSTRAINT ${asName(c.name)};`) + currCons.forEach((c) => { + if ( + !futureCons.some( + (nc) => + nc.definition === c.definition && + (!nc.isNamed || nc.name === c.name), + ) + ) { + queries.push(`${ALTER_TABLE_Q} DROP CONSTRAINT ${asName(c.name)};`); } }); /** Add missing named constraints */ - constraintDefs?.forEach(c => { - if (c.name && !currCons.some(cc => cc.name === c.name)) { - const fc = futureCons.find(nc => nc.name === c.name); + constraintDefs?.forEach((c) => { + if (c.name && !currCons.some((cc) => cc.name === c.name)) { + const fc = futureCons.find((nc) => nc.name === c.name); if (fc) { - queries.push(`${ALTER_TABLE_Q} ADD CONSTRAINT ${asName(c.name)} ${c.content};`); + queries.push( + `${ALTER_TABLE_Q} ADD CONSTRAINT ${asName(c.name)} ${c.content};`, + ); } } }); /** Add remaining missing constraints */ futureCons - .filter(nc => - !currCons.some(c => c.definition === nc.definition) - ) - .forEach(c => { - queries.push(`${ALTER_TABLE_Q} ADD ${c.definition};`) + .filter((nc) => !currCons.some((c) => c.definition === nc.definition)) + .forEach((c) => { + queries.push(`${ALTER_TABLE_Q} ADD ${c.definition};`); }); } - if ("indexes" in tableConf && tableConf.indexes) { /* CREATE [ UNIQUE ] INDEX [ CONCURRENTLY ] [ [ IF NOT EXISTS ] name ] ON [ ONLY ] table_name [ USING method ] @@ -186,40 +260,42 @@ export const initTableConfig = async function (this: TableConfigurator) { [ WHERE predicate ] */ const currIndexes = await getPGIndexes(this.db, tableName, "public"); - Object.entries(tableConf.indexes).forEach(([ - indexName, - { - columns, - concurrently, - replace, - unique, - using, - where = "" - } - ]) => { - - if (replace || typeof replace !== "boolean" && tableConf.replaceUniqueIndexes) { - queries.push(`DROP INDEX IF EXISTS ${asName(indexName)};`); - } - if (!currIndexes.some(idx => idx.indexname === indexName)) { - queries.push([ - "CREATE", - unique && "UNIQUE", - concurrently && "CONCURRENTLY", - `INDEX ${asName(indexName)} ON ${asName(tableName)}`, - using && ("USING " + using), - `(${columns})`, - where && `WHERE ${where}` - ].filter(v => v).join(" ") + ";"); - } - }); + Object.entries(tableConf.indexes).forEach( + ([ + indexName, + { columns, concurrently, replace, unique, using, where = "" }, + ]) => { + if ( + replace || + (typeof replace !== "boolean" && tableConf.replaceUniqueIndexes) + ) { + queries.push(`DROP INDEX IF EXISTS ${asName(indexName)};`); + } + if (!currIndexes.some((idx) => idx.indexname === indexName)) { + queries.push( + [ + "CREATE", + unique && "UNIQUE", + concurrently && "CONCURRENTLY", + `INDEX ${asName(indexName)} ON ${asName(tableName)}`, + using && "USING " + using, + `(${columns})`, + where && `WHERE ${where}`, + ] + .filter((v) => v) + .join(" ") + ";", + ); + } + }, + ); } const { triggers, dropIfExists, dropIfExistsCascade } = tableConf; if (triggers) { const isDropped = dropIfExists || dropIfExistsCascade; - const existingTriggers = await this.dbo.sql!(` + const existingTriggers = (await this.dbo.sql!( + ` SELECT event_object_table ,trigger_name FROM information_schema.triggers @@ -227,18 +303,17 @@ export const initTableConfig = async function (this: TableConfigurator) { ORDER BY event_object_table `, { tableName }, - { returnType: "rows" } - ) as { trigger_name: string }[]; + { returnType: "rows" }, + )) as { trigger_name: string }[]; // const existingTriggerFuncs = await this.dbo.sql!(` // SELECT p.oid,proname,prosrc,u.usename - // FROM pg_proc p - // JOIN pg_user u ON u.usesysid = p.proowner + // FROM pg_proc p + // JOIN pg_user u ON u.usesysid = p.proowner // WHERE prorettype = 2279; // `, {}, { returnType: "rows" }) as { proname: string }[]; Object.entries(triggers).forEach(([triggerFuncName, trigger]) => { - const funcNameParsed = asName(triggerFuncName); let addedFunc = false; @@ -256,21 +331,31 @@ export const initTableConfig = async function (this: TableConfigurator) { $$; `); - } + }; - trigger.actions.forEach(action => { + trigger.actions.forEach((action) => { const triggerActionName = triggerFuncName + "_" + action; - const triggerActionNameParsed = asName(triggerActionName) + const triggerActionNameParsed = asName(triggerActionName); if (isDropped) { - queries.push(`DROP TRIGGER IF EXISTS ${triggerActionNameParsed} ON ${tableName};`) + queries.push( + `DROP TRIGGER IF EXISTS ${triggerActionNameParsed} ON ${tableName};`, + ); } - if (isDropped || !existingTriggers.some(t => t.trigger_name === triggerActionName)) { + if ( + isDropped || + !existingTriggers.some((t) => t.trigger_name === triggerActionName) + ) { addFuncDef(); - const newTableName = action !== "delete" ? "NEW TABLE AS new_table" : ""; - const oldTableName = action !== "insert" ? "OLD TABLE AS old_table" : ""; - const transitionTables = trigger.forEach === "row" ? "" : `REFERENCING ${newTableName} ${oldTableName}`; + const newTableName = + action !== "delete" ? "NEW TABLE AS new_table" : ""; + const oldTableName = + action !== "insert" ? "OLD TABLE AS old_table" : ""; + const transitionTables = + trigger.forEach === "row" + ? "" + : `REFERENCING ${newTableName} ${oldTableName}`; queries.push(` CREATE TRIGGER ${triggerActionNameParsed} ${trigger.type} ${action} ON ${tableName} @@ -279,8 +364,8 @@ export const initTableConfig = async function (this: TableConfigurator) { EXECUTE PROCEDURE ${funcNameParsed}(); `); } - }) - }) + }); + }); } } @@ -296,7 +381,9 @@ export const initTableConfig = async function (this: TableConfigurator) { if (err.position) { const pos = +err.position; if (Number.isInteger(pos)) { - return Promise.reject(err.toString() + "\n At:" + q.slice(pos - 50, pos + 50)); + return Promise.reject( + err.toString() + "\n At:" + q.slice(pos - 50, pos + 50), + ); } } @@ -305,22 +392,26 @@ export const initTableConfig = async function (this: TableConfigurator) { } if (migrations) { - await this.db.any(`INSERT INTO ${migrations.table}(id, table_config) VALUES (${asValue(migrations.version)}, ${asValue(this.config)}) ON CONFLICT DO NOTHING;`) + await this.db.any( + `INSERT INTO ${migrations.table}(id, table_config) VALUES (${asValue(migrations.version)}, ${asValue(this.config)}) ON CONFLICT DO NOTHING;`, + ); } this.initialising = false; if (changedSchema && !failedQueries.length) { if (!this.prevInitQueryHistory) { this.prevInitQueryHistory = queryHistory; } else if (this.prevInitQueryHistory.join() !== queryHistory.join()) { - this.prostgles.init(this.prostgles.opts.onReady as any, { type: "TableConfig" }); + this.prostgles.init(this.prostgles.opts.onReady as any, { + type: "TableConfig", + }); } else { - console.error("TableConfig loop bug", queryHistory) + console.error("TableConfig loop bug", queryHistory); } } if (failedQueries.length) { - console.error("Table config failed queries: ", failedQueries) + console.error("Table config failed queries: ", failedQueries); } await this.prostgles.refreshDBO(); this.setTableOnMounts(); -} \ No newline at end of file +}; diff --git a/lib/index.ts b/lib/index.ts index f40281eb..7715b148 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -5,9 +5,10 @@ import { ProstglesInitOptions } from "./ProstglesTypes"; import { testDboTypes } from "./typeTests/dboTypeCheck"; testDboTypes(); -function prostgles(params: ProstglesInitOptions){ - - const prgl = new Prostgles(params as any); - return prgl.init(params.onReady as any, { type: "init" }); +function prostgles( + params: ProstglesInitOptions, +) { + const prgl = new Prostgles(params as any); + return prgl.init(params.onReady as any, { type: "init" }); } -export = prostgles; \ No newline at end of file +export = prostgles; diff --git a/lib/initProstgles.ts b/lib/initProstgles.ts index a22c011e..10909abb 100644 --- a/lib/initProstgles.ts +++ b/lib/initProstgles.ts @@ -70,12 +70,14 @@ export type InitResult = { options: ProstglesInitOptions; }; -const clientOnlyUpdateKeys = ["auth"] as const satisfies (keyof UpdateableOptions)[]; +const clientOnlyUpdateKeys = [ + "auth", +] as const satisfies (keyof UpdateableOptions)[]; export const initProstgles = async function ( this: Prostgles, onReady: OnReadyCallbackBasic, - reason: OnInitReason + reason: OnInitReason, ): Promise { this.loaded = false; @@ -94,14 +96,16 @@ export const initProstgles = async function ( try { const url = new URL(connString); existingAppName = - url.searchParams.get("application_name") ?? url.searchParams.get("ApplicationName") ?? ""; + url.searchParams.get("application_name") ?? + url.searchParams.get("ApplicationName") ?? + ""; } catch (e) {} } const conObj = - typeof this.opts.dbConnection === "string" ? - { connectionString: this.opts.dbConnection } - : this.opts.dbConnection; + typeof this.opts.dbConnection === "string" + ? { connectionString: this.opts.dbConnection } + : this.opts.dbConnection; const application_name = `prostgles ${this.appId} ${existingAppName}`; /* 1. Connect to db */ @@ -151,7 +155,7 @@ export const initProstgles = async function ( this.opts.publishRawSQL, this.dbo!, this.db, - this as any + this as any, ); this.dboBuilder.publishParser = this.publishParser; @@ -217,7 +221,9 @@ export const initProstgles = async function ( * While others also affect the server and onReady should be called */ if ( - getKeys(newOpts).every((updatedKey) => clientOnlyUpdateKeys.includes(updatedKey as any)) + getKeys(newOpts).every((updatedKey) => + clientOnlyUpdateKeys.includes(updatedKey as any), + ) ) { await this.setSocketEvents(); } else { @@ -273,52 +279,52 @@ const getDbConnection = function ({ const onQueryOrError: | undefined | ((error: any, ctx: pgPromise.IEventContext) => void) = - !onQuery && !DEBUG_MODE ? - undefined - : (error, ctx) => { - if (onQuery) { - onQuery(error, ctx); - } else if (DEBUG_MODE) { - if (error) { - console.error(error, ctx); - } else { - console.log(ctx); + !onQuery && !DEBUG_MODE + ? undefined + : (error, ctx) => { + if (onQuery) { + onQuery(error, ctx); + } else if (DEBUG_MODE) { + if (error) { + console.error(error, ctx); + } else { + console.log(ctx); + } } - } - }; + }; const pgp: PGP = pgPromise({ - ...(onQueryOrError ? - { - query: (ctx) => onQueryOrError(undefined, ctx), - error: onQueryOrError, - } - : {}), - ...(onNotice || DEBUG_MODE ? - { - connect: function ({ client, useCount }) { - const isFresh = !useCount; - if (isFresh && !client.listeners("notice").length) { - client.on("notice", function (msg) { - if (onNotice) { - onNotice(msg, msg?.message); - } else { - console.log("notice: %j", msg?.message); - } - }); - } - if (isFresh && !client.listeners("error").length) { - client.on("error", function (msg) { - if (onNotice) { - onNotice(msg, msg?.message); - } else { - console.log("error: %j", msg?.message); - } - }); - } - }, - } - : {}), + ...(onQueryOrError + ? { + query: (ctx) => onQueryOrError(undefined, ctx), + error: onQueryOrError, + } + : {}), + ...(onNotice || DEBUG_MODE + ? { + connect: function ({ client, useCount }) { + const isFresh = !useCount; + if (isFresh && !client.listeners("notice").length) { + client.on("notice", function (msg) { + if (onNotice) { + onNotice(msg, msg?.message); + } else { + console.log("notice: %j", msg?.message); + } + }); + } + if (isFresh && !client.listeners("error").length) { + client.on("error", function (msg) { + if (onNotice) { + onNotice(msg, msg?.message); + } else { + console.log("error: %j", msg?.message); + } + }); + } + }, + } + : {}), }); // pgp.pg.defaults.max = 70; diff --git a/lib/onSocketConnected.ts b/lib/onSocketConnected.ts index 24a3d07a..decb420c 100644 --- a/lib/onSocketConnected.ts +++ b/lib/onSocketConnected.ts @@ -8,16 +8,16 @@ export async function onSocketConnected(this: Prostgles, socket: PRGLIOSocket) { if (this.destroyed) { console.log("Socket connected to destroyed instance"); socket.disconnect(); - return + return; } this.connectedSockets.push(socket); try { await this.opts.onLog?.({ - type: "connect", + type: "connect", sid: this.authHandler?.getSID({ socket }), socketId: socket.id, - connectedSocketIds: this.connectedSockets.map(s => s.id) + connectedSocketIds: this.connectedSockets.map((s) => s.id), }); if (!this.db || !this.dbo) throw new Error("db/dbo missing"); @@ -25,78 +25,113 @@ export async function onSocketConnected(this: Prostgles, socket: PRGLIOSocket) { if (this.opts.onSocketConnect) { try { - const getUser = async () => { return await this.authHandler?.getClientInfo({ socket }); } - await this.opts.onSocketConnect({ socket, dbo: dbo as any, db, getUser }); - } catch(error) { - const connectionError = error instanceof Error? error.message : typeof error === "string"? error : JSON.stringify(error); + const getUser = async () => { + return await this.authHandler?.getClientInfo({ socket }); + }; + await this.opts.onSocketConnect({ + socket, + dbo: dbo as any, + db, + getUser, + }); + } catch (error) { + const connectionError = + error instanceof Error + ? error.message + : typeof error === "string" + ? error + : JSON.stringify(error); socket.emit(CHANNELS.CONNECTION, { connectionError }); socket.disconnect(); return; } } - socket.removeAllListeners(CHANNELS.DEFAULT) - socket.on(CHANNELS.DEFAULT, async (args: SocketRequestParams, cb = (..._callback: any[]) => { /* Empty */}) => { - runClientRequest.bind(this)({ ...args, type: "socket", socket }) - .then(res => { - cb(null, res) - }).catch(err => { - cb(err); - }); - }); + socket.removeAllListeners(CHANNELS.DEFAULT); + socket.on( + CHANNELS.DEFAULT, + async ( + args: SocketRequestParams, + cb = (..._callback: any[]) => { + /* Empty */ + }, + ) => { + runClientRequest + .bind(this)({ ...args, type: "socket", socket }) + .then((res) => { + cb(null, res); + }) + .catch((err) => { + cb(err); + }); + }, + ); socket.on("disconnect", () => { - this.dbEventsManager?.removeNotice(socket); this.dbEventsManager?.removeNotify(undefined, socket); - this.connectedSockets = this.connectedSockets.filter(s => s.id !== socket.id); + this.connectedSockets = this.connectedSockets.filter( + (s) => s.id !== socket.id, + ); this.dboBuilder.queryStreamer.onDisconnect(socket.id); - this.opts.onLog?.({ - type: "disconnect", + this.opts.onLog?.({ + type: "disconnect", sid: this.authHandler?.getSID({ socket }), socketId: socket.id, - connectedSocketIds: this.connectedSockets.map(s => s.id) + connectedSocketIds: this.connectedSockets.map((s) => s.id), }); if (this.opts.onSocketDisconnect) { - const getUser = async () => { return await this.authHandler?.getClientInfo({ socket }); } + const getUser = async () => { + return await this.authHandler?.getClientInfo({ socket }); + }; this.opts.onSocketDisconnect({ socket, dbo: dbo as any, db, getUser }); } }); - socket.removeAllListeners(CHANNELS.METHOD) - socket.on(CHANNELS.METHOD, async ({ method, params }: SocketMethodRequest, cb = (..._callback: any) => { /* Empty */ }) => { - runClientMethod.bind(this)({ - type: "socket", - socket, - method, - params - }).then(res => { - cb(null, res) - }).catch(err => { - makeSocketError(cb, err) - }); - }); + socket.removeAllListeners(CHANNELS.METHOD); + socket.on( + CHANNELS.METHOD, + async ( + { method, params }: SocketMethodRequest, + cb = (..._callback: any) => { + /* Empty */ + }, + ) => { + runClientMethod + .bind(this)({ + type: "socket", + socket, + method, + params, + }) + .then((res) => { + cb(null, res); + }) + .catch((err) => { + makeSocketError(cb, err); + }); + }, + ); this.pushSocketSchema(socket); } catch (e) { - console.trace("setSocketEvents: ", e) + console.trace("setSocketEvents: ", e); } } - export function makeSocketError(cb: (err: AnyObject) => void, err: any) { cb(getErrorAsObject(err)); } type SocketRequestParams = { tableName: string; - command: typeof TABLE_METHODS[number]; + command: (typeof TABLE_METHODS)[number]; param1: any; param2: any; param3: any; -} +}; type SocketMethodRequest = { method: string; params: any; -} \ No newline at end of file +}; diff --git a/lib/runClientRequest.ts b/lib/runClientRequest.ts index 51857a64..3fdce705 100644 --- a/lib/runClientRequest.ts +++ b/lib/runClientRequest.ts @@ -1,6 +1,10 @@ -import { AnyObject, - TableHandler, - UserLike, getKeys, pickKeys } from "prostgles-types"; +import { + AnyObject, + TableHandler, + UserLike, + getKeys, + pickKeys, +} from "prostgles-types"; import { ExpressReq } from "./Auth/AuthTypes"; import { LocalParams, PRGLIOSocket } from "./DboBuilder/DboBuilder"; import { parseFieldFilter } from "./DboBuilder/ViewHandler/parseFieldFilter"; @@ -8,44 +12,55 @@ import { canRunSQL } from "./DboBuilder/runSQL"; import { Prostgles } from "./Prostgles"; import { TableHandler as TableHandlerServer } from "./DboBuilder/TableHandler/TableHandler"; import { TableRule } from "./PublishParser/publishTypesAndUtils"; - -type ReqInfo = { - type: "socket"; - socket: PRGLIOSocket; - httpReq?: undefined; -} | { - type: "http"; - httpReq: ExpressReq; - socket?: undefined; -} -type ReqInfoClient = { - socket: PRGLIOSocket; -} | { - httpReq: ExpressReq; -} + +type ReqInfo = + | { + type: "socket"; + socket: PRGLIOSocket; + httpReq?: undefined; + } + | { + type: "http"; + httpReq: ExpressReq; + socket?: undefined; + }; +type ReqInfoClient = + | { + socket: PRGLIOSocket; + } + | { + httpReq: ExpressReq; + }; const TABLE_METHODS = { - find: 1, - findOne: 1, - count: 1, - size: 1, + find: 1, + findOne: 1, + count: 1, + size: 1, update: 1, - updateBatch: 1, - delete: 1, - upsert: 1, - insert: 1, - subscribe: 1, - subscribeOne: 1, - getColumns: 1, + updateBatch: 1, + delete: 1, + upsert: 1, + insert: 1, + subscribe: 1, + subscribeOne: 1, + getColumns: 1, getInfo: 1, sync: 1, -} as const satisfies Record<(keyof (TableHandler & Pick)), 1>; +} as const satisfies Record< + keyof (TableHandler & Pick), + 1 +>; const TABLE_METHODS_KEYS = getKeys(TABLE_METHODS); -const SOCKET_ONLY_COMMANDS = ["subscribe", "subscribeOne", "sync"] as const satisfies typeof TABLE_METHODS_KEYS; +const SOCKET_ONLY_COMMANDS = [ + "subscribe", + "subscribeOne", + "sync", +] as const satisfies typeof TABLE_METHODS_KEYS; type Args = ReqInfo & { - tableName: string; + tableName: string; command: string; param1: any; param2: any; @@ -53,114 +68,168 @@ type Args = ReqInfo & { }; const getReqInfoClient = (reqInfo: ReqInfo): ReqInfoClient => { - if(reqInfo.type === "socket"){ + if (reqInfo.type === "socket") { return { socket: reqInfo.socket }; } return { httpReq: reqInfo.httpReq }; -} +}; -type TableMethodFunctionWithRulesAndLocalParams = ((arg1: any, arg2: any, arg3: any, tableRule: TableRule, localParams: LocalParams) => any); +type TableMethodFunctionWithRulesAndLocalParams = ( + arg1: any, + arg2: any, + arg3: any, + tableRule: TableRule, + localParams: LocalParams, +) => any; -export const runClientRequest = async function(this: Prostgles, args: Args){ +export const runClientRequest = async function (this: Prostgles, args: Args) { /* Channel name will only include client-sent params so we ignore table_rules enforced params */ - if ((args.type === "socket" && !args.socket) || (args.type === "http" && !args.httpReq) || !this.authHandler || !this.publishParser || !this.dbo) { + if ( + (args.type === "socket" && !args.socket) || + (args.type === "http" && !args.httpReq) || + !this.authHandler || + !this.publishParser || + !this.dbo + ) { throw "socket/httpReq or authhandler missing"; } - const { tableName, command: nonValidatedCommand, param1, param2, param3 } = args; - if(!TABLE_METHODS_KEYS.some(v => v === nonValidatedCommand)){ - throw `Invalid command: ${nonValidatedCommand}. Expecting one of: ${TABLE_METHODS_KEYS};` + const { + tableName, + command: nonValidatedCommand, + param1, + param2, + param3, + } = args; + if (!TABLE_METHODS_KEYS.some((v) => v === nonValidatedCommand)) { + throw `Invalid command: ${nonValidatedCommand}. Expecting one of: ${TABLE_METHODS_KEYS};`; } const command = nonValidatedCommand as keyof TableHandler; - if(args.type !== "socket" && SOCKET_ONLY_COMMANDS.some(v => v === command)){ - throw "The following commands cannot be completed over a non-websocket connection: " + SOCKET_ONLY_COMMANDS; + if ( + args.type !== "socket" && + SOCKET_ONLY_COMMANDS.some((v) => v === command) + ) { + throw ( + "The following commands cannot be completed over a non-websocket connection: " + + SOCKET_ONLY_COMMANDS + ); } const reqInfo = getReqInfoClient(args); const clientInfo = await this.authHandler.getClientInfo(args); - const validRules = await this.publishParser.getValidatedRequestRule({ tableName, command, localParams: reqInfo }, clientInfo); + const validRules = await this.publishParser.getValidatedRequestRule( + { tableName, command, localParams: reqInfo }, + clientInfo, + ); if (!validRules) { throw `Invalid OR disallowed request: ${tableName}.${command} `; } - const sessionUser: UserLike | undefined = !clientInfo?.user? undefined : { - ...parseFieldFilter(clientInfo.sessionFields ?? [] as any, false, Object.keys(clientInfo.user)), - ...pickKeys(clientInfo.user, ["id", "type"]) as UserLike, - } - const localParams: LocalParams = { ...reqInfo, isRemoteRequest: { user: sessionUser } } - if(param3 && (param3 as LocalParams).returnQuery){ + const sessionUser: UserLike | undefined = !clientInfo?.user + ? undefined + : { + ...parseFieldFilter( + clientInfo.sessionFields ?? ([] as any), + false, + Object.keys(clientInfo.user), + ), + ...(pickKeys(clientInfo.user, ["id", "type"]) as UserLike), + }; + const localParams: LocalParams = { + ...reqInfo, + isRemoteRequest: { user: sessionUser }, + }; + if (param3 && (param3 as LocalParams).returnQuery) { const isAllowed = await canRunSQL(this, localParams); - if(isAllowed){ + if (isAllowed) { localParams.returnQuery = (param3 as LocalParams).returnQuery; } else { throw "Must be allowed to run sql to use returnQuery"; } } const tableHandler = this.dbo[tableName]; - if(!tableHandler || !tableHandler.column_names) throw `Invalid tableName ${tableName} provided`; + if (!tableHandler || !tableHandler.column_names) + throw `Invalid tableName ${tableName} provided`; /** * satisfies check is used to ensure rules arguments are correctly passed to each method */ - const tableCommand = tableHandler[command]?.bind(tableHandler) satisfies undefined | TableMethodFunctionWithRulesAndLocalParams; - if(!tableCommand) throw `Invalid or disallowed command provided: ${command}`; - return (this.dbo[tableName]![command] as TableMethodFunctionWithRulesAndLocalParams)(param1, param2, param3, validRules, localParams); + const tableCommand = tableHandler[command]?.bind(tableHandler) satisfies + | undefined + | TableMethodFunctionWithRulesAndLocalParams; + if (!tableCommand) throw `Invalid or disallowed command provided: ${command}`; + return ( + this.dbo[tableName]![command] as TableMethodFunctionWithRulesAndLocalParams + )(param1, param2, param3, validRules, localParams); // This approach is breaking context // const result = await (tableCommand as TableMethodFunctionWithRulesAndLocalParams)(param1, param2, param3, validRules, localParams); // return result; -} +}; -export const clientCanRunSqlRequest = async function(this: Prostgles, args: ReqInfo){ +export const clientCanRunSqlRequest = async function ( + this: Prostgles, + args: ReqInfo, +) { const reqInfo = getReqInfoClient(args); - if(!this.opts.publishRawSQL || typeof this.opts.publishRawSQL !== "function"){ - return { allowed: false, reqInfo } - } + if ( + !this.opts.publishRawSQL || + typeof this.opts.publishRawSQL !== "function" + ) { + return { allowed: false, reqInfo }; + } const canRunSQL = async () => { - if(!this.authHandler){ + if (!this.authHandler) { throw "authHandler missing"; - } + } const publishParams = await this.publishParser?.getPublishParams(reqInfo); const res = await this.opts.publishRawSQL?.(publishParams as any); - return Boolean(res && typeof res === "boolean" || res === "*"); - } + return Boolean((res && typeof res === "boolean") || res === "*"); + }; const allowed = await canRunSQL(); return { allowed, reqInfo }; -} +}; type ArgsSql = ReqInfo & { query: string; args?: AnyObject | any[]; options?: any; -} -export const runClientSqlRequest = async function(this: Prostgles, params: ArgsSql){ +}; +export const runClientSqlRequest = async function ( + this: Prostgles, + params: ArgsSql, +) { const { allowed, reqInfo } = await clientCanRunSqlRequest.bind(this)(params); - if(!allowed){ + if (!allowed) { throw "Not allowed to execute sql"; } if (!this.dbo?.sql) throw "Internal error: sql handler missing"; const { query, args, options } = params; return this.dbo.sql(query, args, options, reqInfo); -} - - +}; type ArgsMethod = ReqInfo & { method: string; - params?: any[] -} -export const runClientMethod = async function(this: Prostgles, reqArgs: ArgsMethod){ - + params?: any[]; +}; +export const runClientMethod = async function ( + this: Prostgles, + reqArgs: ArgsMethod, +) { const reqInfo = getReqInfoClient(reqArgs); const { method, params = [] } = reqArgs; const methods = await this.publishParser?.getAllowedMethods(reqInfo); if (!methods || !methods[method]) { - throw ("Disallowed/missing method " + JSON.stringify(method)); - } + throw "Disallowed/missing method " + JSON.stringify(method); + } const methodDef = methods[method]!; - const onRun = (typeof methodDef === "function" || typeof (methodDef as any).then === "function")? (methodDef as (...args: any) => Promise) : methodDef.run; + const onRun = + typeof methodDef === "function" || + typeof (methodDef as any).then === "function" + ? (methodDef as (...args: any) => Promise) + : methodDef.run; const res = await onRun(...params); - return res; -} \ No newline at end of file + return res; +}; diff --git a/lib/shortestPath.ts b/lib/shortestPath.ts index 073a7f81..883588db 100644 --- a/lib/shortestPath.ts +++ b/lib/shortestPath.ts @@ -1,85 +1,89 @@ import { AnyObject } from "prostgles-types"; const shortestDistanceNode = (distances: AnyObject, visited: AnyObject) => { - let shortest = null; - - for (const node in distances) { - const currentIsShortest = - shortest === null || distances[node] < distances[shortest]; - if (currentIsShortest && !visited.includes(node)) { - shortest = node; - } - } - return shortest; + let shortest = null; + + for (const node in distances) { + const currentIsShortest = + shortest === null || distances[node] < distances[shortest]; + if (currentIsShortest && !visited.includes(node)) { + shortest = node; + } + } + return shortest; }; export type Graph = { - [key: string]: { [key: string]: number } + [key: string]: { [key: string]: number }; }; -export const findShortestPath = (graph: Graph, startNode: string, endNode: string): { distance: number, path: string[] } => { - // establish object for recording distances from the start node - let distances: AnyObject = {}; - distances[endNode] = "Infinity"; - distances = Object.assign(distances, graph[startNode]); - - // track paths - const parents: AnyObject = { endNode: null }; - for (const child in graph[startNode]) { - parents[child] = startNode; - } - - // track nodes that have already been visited - const visited: AnyObject = []; - - // find the nearest node - let node = shortestDistanceNode(distances, visited); - - // for that node - while (node) { - // find its distance from the start node & its child nodes - const distance = distances[node]; - const children = graph[node]; - // for each of those child nodes - for (const child in children) { - // make sure each child node is not the start node - if (String(child) === String(startNode)) { - continue; - } else { - // save the distance from the start node to the child node - const newdistance = distance + children[child]; - // if there's no recorded distance from the start node to the child node in the distances object - // or if the recorded distance is shorter than the previously stored distance from the start node to the child node - // save the distance to the object - // record the path - if (!distances[child] || distances[child] > newdistance) { - distances[child] = newdistance; - parents[child] = node; - } - } - } - // move the node to the visited set - visited.push(node); - // move to the nearest neighbor node - node = shortestDistanceNode(distances, visited); - } - - // using the stored paths from start node to end node - // record the shortest path - const shortestPath = [endNode]; - let parent = parents[endNode]; - while (parent) { - shortestPath.push(parent); - parent = parents[parent]; - } - shortestPath.reverse(); - - // return the shortest path from start node to end node & its distance - const results = { - distance: distances[endNode], - path: shortestPath, - }; - - return results; +export const findShortestPath = ( + graph: Graph, + startNode: string, + endNode: string, +): { distance: number; path: string[] } => { + // establish object for recording distances from the start node + let distances: AnyObject = {}; + distances[endNode] = "Infinity"; + distances = Object.assign(distances, graph[startNode]); + + // track paths + const parents: AnyObject = { endNode: null }; + for (const child in graph[startNode]) { + parents[child] = startNode; + } + + // track nodes that have already been visited + const visited: AnyObject = []; + + // find the nearest node + let node = shortestDistanceNode(distances, visited); + + // for that node + while (node) { + // find its distance from the start node & its child nodes + const distance = distances[node]; + const children = graph[node]; + // for each of those child nodes + for (const child in children) { + // make sure each child node is not the start node + if (String(child) === String(startNode)) { + continue; + } else { + // save the distance from the start node to the child node + const newdistance = distance + children[child]; + // if there's no recorded distance from the start node to the child node in the distances object + // or if the recorded distance is shorter than the previously stored distance from the start node to the child node + // save the distance to the object + // record the path + if (!distances[child] || distances[child] > newdistance) { + distances[child] = newdistance; + parents[child] = node; + } + } + } + // move the node to the visited set + visited.push(node); + // move to the nearest neighbor node + node = shortestDistanceNode(distances, visited); + } + + // using the stored paths from start node to end node + // record the shortest path + const shortestPath = [endNode]; + let parent = parents[endNode]; + while (parent) { + shortestPath.push(parent); + parent = parents[parent]; + } + shortestPath.reverse(); + + // return the shortest path from start node to end node & its distance + const results = { + distance: distances[endNode], + path: shortestPath, + }; + + return results; }; /* Usage: @@ -119,4 +123,4 @@ const graph = { findShortestPath(graph, 'start', 'end'); -*/ \ No newline at end of file +*/ diff --git a/lib/typeTests/DBoGenerated.d.ts b/lib/typeTests/DBoGenerated.d.ts index 2c574701..2aaa64dc 100644 --- a/lib/typeTests/DBoGenerated.d.ts +++ b/lib/typeTests/DBoGenerated.d.ts @@ -266,7 +266,11 @@ export type DBGeneratedSchema = { columns: { email: string; id?: number; - preferences: { showIntro?: boolean; theme?: "light" | "dark" | "auto"; others: any[] }; + preferences: { + showIntro?: boolean; + theme?: "light" | "dark" | "auto"; + others: any[]; + }; status: "active" | "disabled" | "pending"; }; }; diff --git a/lib/typeTests/dboTypeCheck.ts b/lib/typeTests/dboTypeCheck.ts index 109d40a2..e1f7f2f2 100644 --- a/lib/typeTests/dboTypeCheck.ts +++ b/lib/typeTests/dboTypeCheck.ts @@ -37,7 +37,7 @@ export const testDboTypes = () => { orderBy: { id: 1, }, - } + }, ); r[0]?.id; @@ -51,7 +51,7 @@ export const testDboTypes = () => { { select: { id: 1 }, orderBy: { tr1_id: 1 }, - } + }, ); tr2.find( @@ -59,7 +59,7 @@ export const testDboTypes = () => { { //@ts-expect-error select: { bad_col: 1 }, - } + }, ); tr2.find( @@ -67,13 +67,13 @@ export const testDboTypes = () => { { //@ts-expect-error orderBy: { bad_col: 1 }, - } + }, ); - (await db.items2.find({}, { select: { items_id: 1 }, returnType: "values" })) satisfies ( - | number - | null - )[]; + (await db.items2.find( + {}, + { select: { items_id: 1 }, returnType: "values" }, + )) satisfies (number | null)[]; const publish: Publish = { items: { diff --git a/package-lock.json b/package-lock.json index 8a551741..24edaede 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "prostgles-server", - "version": "4.2.182", + "version": "4.2.183", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "prostgles-server", - "version": "4.2.182", + "version": "4.2.183", "license": "MIT", "dependencies": { "@aws-sdk/client-ses": "^3.699.0", diff --git a/package.json b/package.json index 54d8e0fd..8bbff123 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "prostgles-server", - "version": "4.2.182", + "version": "4.2.183", "description": "", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/tests/client/hooks.spec.ts b/tests/client/hooks.spec.ts index 69a8ddb1..79fb29fd 100644 --- a/tests/client/hooks.spec.ts +++ b/tests/client/hooks.spec.ts @@ -3,129 +3,122 @@ import { describe, test } from "node:test"; import { AnyObject, pickKeys } from "prostgles-types"; import type { DBHandlerClient } from "./index"; import { renderReactHook } from "./renderReactHook"; - -export const clientHooks = async (db: DBHandlerClient, getSocketOptions: (watchSchema?: boolean) => AnyObject) => { +export const clientHooks = async ( + db: DBHandlerClient, + getSocketOptions: (watchSchema?: boolean) => AnyObject, +) => { const resultLoading = { data: undefined, isLoading: true, error: undefined }; await describe("React hooks", async (t) => { - const defaultFilter = { name: "abc" }; - await Promise.all([ - "useFind", - "useSubscribe", - "useFindOne", - "useSubscribeOne", - ].map(async hookName => { - await test(hookName, async (t) => { - const expectsOne = hookName.includes("One"); - const options = { - select: { added: "$Mon" }, - limit: expectsOne? undefined : 1 - }; - const expectedData = expectsOne? { added: "Dec" } : [{ added: "Dec" }]; - const { rerender, results } = await renderReactHook({ - hook: db.items4[hookName]!, - props: [{ name: "abc" }, options], - expectedRerenders: 2 - }); - - assert.deepStrictEqual( - results, [ - resultLoading, - { data: expectedData, isLoading:false, error: undefined} - ] - ); - - const { results: errorResults } = await rerender({ - props: [{ named: "error" }, options], - expectedRerenders: 2, + await Promise.all( + ["useFind", "useSubscribe", "useFindOne", "useSubscribeOne"].map( + async (hookName) => { + await test(hookName, async (t) => { + const expectsOne = hookName.includes("One"); + const options = { + select: { added: "$Mon" }, + limit: expectsOne ? undefined : 1, + }; + const expectedData = expectsOne + ? { added: "Dec" } + : [{ added: "Dec" }]; + const { rerender, results } = await renderReactHook({ + hook: db.items4[hookName]!, + props: [{ name: "abc" }, options], + expectedRerenders: 2, + }); + + assert.deepStrictEqual(results, [ + resultLoading, + { data: expectedData, isLoading: false, error: undefined }, + ]); + + const { results: errorResults } = await rerender({ + props: [{ named: "error" }, options], + expectedRerenders: 2, + }); + + assert.deepStrictEqual(errorResults, [ + resultLoading, + { + data: undefined, + isLoading: false, + error: { + message: + "Table: items4 -> disallowed/inexistent columns in filter: named \n" + + ' Expecting one of: added, "id", "public", "name"', + }, + }, + ]); + }); + }, + ), + ); + + await Promise.all( + [ + { + hookName: "useCount", + result1: { data: 2, error: undefined, isLoading: false }, + result2: { data: 0, error: undefined, isLoading: false }, + }, + { + hookName: "useSize", + result1: { data: "93", error: undefined, isLoading: false }, + result2: { data: "0", error: undefined, isLoading: false }, + }, + ].map(async ({ hookName, result1, result2 }) => { + await test(hookName, async (t) => { + const { results, rerender } = await renderReactHook({ + hook: db.items4[hookName]!, + props: [defaultFilter], + expectedRerenders: 2, + }); + + // Initial state + assert.deepStrictEqual(results, [resultLoading, result1]); + + // Rerender with different filter + const { results: noResults } = await rerender({ + props: [{ id: -1 }], + expectedRerenders: 2, + }); + + // New results + assert.deepStrictEqual(noResults, [resultLoading, result2]); }); - - assert.deepStrictEqual( - errorResults, [ - resultLoading, - { data: undefined, isLoading: false, error: { - message: 'Table: items4 -> disallowed/inexistent columns in filter: named \n' + - ' Expecting one of: added, "id", "public", "name"', - } } - ] - ); - }); - })); - - await Promise.all([ - { - hookName: "useCount", - result1: { data: 2, error: undefined, isLoading: false }, - result2: { data: 0, error: undefined, isLoading: false }, - }, - { - hookName: "useSize", - result1: { data: "93", error: undefined, isLoading: false }, - result2: { data: "0", error: undefined, isLoading: false } - }, - ].map(async ({ hookName, result1, result2 }) => { - await test(hookName, async (t) => { - const { results, rerender } = await renderReactHook({ - hook: db.items4[hookName]!, - props: [defaultFilter], - expectedRerenders: 2 - }); - - // Initial state - assert.deepStrictEqual( - results, [ - resultLoading, - result1 - ] - ); - - // Rerender with different filter - const { results: noResults } = await rerender({ - props: [{ id: -1 }], - expectedRerenders: 2, - }); - - // New results - assert.deepStrictEqual( - noResults, - [resultLoading, result2] - ); - }); - - })); + }), + ); await test("useCount planes", async (t) => { const { results } = await renderReactHook({ hook: db.planes.useCount!, props: [{}], - expectedRerenders: 2 + expectedRerenders: 2, }); - assert.deepStrictEqual( - results, - [ - { data: undefined, isLoading: true, error: undefined }, - { data: 100, error: undefined, isLoading: false } - ] - ); + assert.deepStrictEqual(results, [ + { data: undefined, isLoading: true, error: undefined }, + { data: 100, error: undefined, isLoading: false }, + ]); }); // // TODO fix useSync test await test("useSync", async (t) => { const funcHandles = { - '$cloneMultiSync': 1, - '$cloneSync': 1, - '$delete': 1, - '$find': 1, - '$get': 1, - '$unsync': 1, - '$update': 1, + $cloneMultiSync: 1, + $cloneSync: 1, + $delete: 1, + $find: 1, + $get: 1, + $unsync: 1, + $update: 1, }; const plane0 = { id: 0, x: 20, - y: 0 - } + y: 0, + }; // await db.planes.insert({ name: "abc" }); // const { results: firstPlaneResults } = await renderReactHook({ @@ -142,13 +135,14 @@ export const clientHooks = async (db: DBHandlerClient, getSocketOptions: (watchS const { results, rerender } = await renderReactHook({ hook: db.planes.useSync!, props, - expectedRerenders: 3 + expectedRerenders: 3, }); assert.equal(results.length, 3); - assert.deepStrictEqual( - results[0], - { data: undefined, isLoading: true, error: undefined } - ); + assert.deepStrictEqual(results[0], { + data: undefined, + isLoading: true, + error: undefined, + }); /** This fails from time to time */ // assert.deepStrictEqual( // results[1], @@ -160,19 +154,19 @@ export const clientHooks = async (db: DBHandlerClient, getSocketOptions: (watchS assert.equal(lastData.length, 1); const lastDataItem = lastData[0]; assert.deepStrictEqual( - pickKeys(lastDataItem, Object.keys(plane0)), - plane0 + pickKeys(lastDataItem, Object.keys(plane0)), + plane0, ); // Update item db.planes.update({ id: 0 }, { x: 230 }); const { results: deletedResults } = await rerender({ props, - expectedRerenders: 3, + expectedRerenders: 3, }); assert.deepStrictEqual( - deletedResults.map(({ data }) => data?.[0]?.x), + deletedResults.map(({ data }) => data?.[0]?.x), [ undefined, // TODO - should be defined and 20 20, @@ -182,7 +176,7 @@ export const clientHooks = async (db: DBHandlerClient, getSocketOptions: (watchS // // Rerender with different filter // rerender({ id: -1 }); - + // await expectValues( // result, // [ @@ -190,7 +184,7 @@ export const clientHooks = async (db: DBHandlerClient, getSocketOptions: (watchS // { data: [], error: undefined, isLoading: false }, // ] // ); - + // await expectValues( // result, // [ @@ -198,8 +192,6 @@ export const clientHooks = async (db: DBHandlerClient, getSocketOptions: (watchS // { data: [], error: undefined, isLoading: false } // ] // ); - }); - }); -} \ No newline at end of file +}; diff --git a/tests/client/index.ts b/tests/client/index.ts index 1b2e62e2..67085798 100644 --- a/tests/client/index.ts +++ b/tests/client/index.ts @@ -3,7 +3,10 @@ import io from "socket.io-client"; import { AuthHandler } from "prostgles-client/dist/Auth"; export { AuthHandler } from "prostgles-client/dist/Auth"; -import type { DBHandlerClient, MethodHandler } from "prostgles-client/dist/prostgles"; +import type { + DBHandlerClient, + MethodHandler, +} from "prostgles-client/dist/prostgles"; import { DBSchemaTable } from "prostgles-types"; import { clientFileTests } from "../clientFileTests.spec"; import { clientOnlyQueries } from "../clientOnlyQueries.spec"; @@ -15,9 +18,14 @@ export { DBHandlerClient } from "prostgles-client/dist/prostgles"; const start = Date.now(); const log = (msgOrObj: any, extra?: any) => { - const msg = msgOrObj && typeof msgOrObj === "object"? JSON.stringify(msgOrObj) : msgOrObj; - console.log(...[`(client) t+ ${(Date.now() - start)}ms ` + msg, extra].filter(v => v)); -} + const msg = + msgOrObj && typeof msgOrObj === "object" + ? JSON.stringify(msgOrObj) + : msgOrObj; + console.log( + ...[`(client) t+ ${Date.now() - start}ms ` + msg, extra].filter((v) => v), + ); +}; log("Started client..."); const { TEST_NAME } = process.env; @@ -26,7 +34,7 @@ const path = "/teztz/s"; const pathWatchSchema = "/teztz/sWatchSchema"; const getSocketOptions = (watchSchema = false) => ({ uri: url, - path: watchSchema? pathWatchSchema : path, + path: watchSchema ? pathWatchSchema : path, query: { token: TEST_NAME }, }); const { uri, ...socketOpts } = getSocketOptions(); @@ -41,7 +49,7 @@ type ClientTestSpecV2 = (args: { }) => Promise; const tests: Record = { - main: async ({ db, methods, tableSchema, auth }) => { + main: async ({ db, methods, tableSchema, auth }) => { await db.sql(`DROP TABLE IF EXISTS ${newly_created_table}`); await isomorphicQueries(db, log); await clientOnlyQueries(db, auth, log, methods, tableSchema, TEST_NAME); @@ -51,52 +59,52 @@ const tests: Record = { await useProstglesTest(db, getSocketOptions); }, files: async ({ db, methods, tableSchema, auth }) => { - await clientFileTests(db, auth, log, methods, tableSchema) - }, + await clientFileTests(db, auth, log, methods, tableSchema); + }, rest_api: async ({ db, methods, tableSchema, auth }) => { - await clientRestApi(db, auth, log, methods, tableSchema, TEST_NAME); - } + await clientRestApi(db, auth, log, methods, tableSchema, TEST_NAME); + }, }; const test = tests[TEST_NAME]; -if(!test){ +if (!test) { throw `Invalid TEST_NAME env var provided (${TEST_NAME}). Expecting one of: ${Object.keys(tests)}`; } -const stopTest = (args?: { err: any; }) => { +const stopTest = (args?: { err: any }) => { const { err } = args ?? {}; - if(args) { + if (args) { log(`TEST_NAME: ${TEST_NAME} Error: ${JSON.stringify(err)}`, err); } else { log(`TEST_NAME: ${TEST_NAME} Finished OK`); } setTimeout(() => { - socket.emit("stop-test", !args? undefined : { err: (err ?? "Unknown").toString(), error: err }, cb => { - - log("Stopping client..."); - if(err) console.trace(err); - - }); + socket.emit( + "stop-test", + !args ? undefined : { err: (err ?? "Unknown").toString(), error: err }, + (cb) => { + log("Stopping client..."); + if (err) console.trace(err); + }, + ); setTimeout(() => { - process.exit(err? 1 : 0) + process.exit(err ? 1 : 0); }, 1000); }, 1000); - }; - + try { socket.on("connected", () => { - log("Client connected.") + log("Client connected."); }); socket.on("connect", () => { - log("Client connect.") + log("Client connect."); }); socket.on("connect_failed", (err) => { - log("connect_failed", err) - }) + log("connect_failed", err); + }); socket.on("start-test", (data) => { - //@ts-ignore prostgles({ socket, @@ -104,36 +112,41 @@ try { log("Reconnected"); }, onReady: async (db, methods, tableSchema, auth, isReconnect) => { - log(`TEST_NAME: ${TEST_NAME} Started`) + log(`TEST_NAME: ${TEST_NAME} Started`); try { //@ts-ignore - if(typeof window !== "undefined"){ + if (typeof window !== "undefined") { const onLog = (...args: any[]) => { - socket.emit("log", args.map(v => typeof v === "object"? JSON.stringify(v) : v).join(" ")); - } + socket.emit( + "log", + args + .map((v) => (typeof v === "object" ? JSON.stringify(v) : v)) + .join(" "), + ); + }; //@ts-ignore - window.onerror = function myErrorHandler(errorMsg, url, lineNumber) { + window.onerror = function myErrorHandler( + errorMsg, + url, + lineNumber, + ) { console.error("Error occured: " + errorMsg); stopTest({ err: errorMsg }); return false; - } + }; console.log = onLog; } await test({ db, methods, tableSchema, auth, isReconnect }); - - stopTest(); - } catch (err){ + stopTest(); + } catch (err) { stopTest({ err }); } - } - }); - - }) - -} catch(e) { - console.trace(e) + }, + }); + }); +} catch (e) { + console.trace(e); stopTest(e); throw e; } - \ No newline at end of file diff --git a/tests/client/renderReactHook.ts b/tests/client/renderReactHook.ts index 86505d6b..9d35835c 100644 --- a/tests/client/renderReactHook.ts +++ b/tests/client/renderReactHook.ts @@ -14,7 +14,7 @@ const { window } = new JSDOM(` `); global.window = window; global.navigator = window.navigator; -global.document = window.document; +global.document = window.document; import React from "react"; import { createRoot } from "react-dom/client"; @@ -23,155 +23,178 @@ type Hook = (...args: any[]) => any; // TODO: add hook result types type RenderHookArgs = { - hook: Hook; - props: any[]; - onResult?: (result: any) => void; - expectedRerenders: number; - timeout?: number; - /** - * Time to wait after the last render to resolve the promise - * Used to catch any extra unwanted renders - */ - lastRenderWait?: number; + hook: Hook; + props: any[]; + onResult?: (result: any) => void; + expectedRerenders: number; + timeout?: number; + /** + * Time to wait after the last render to resolve the promise + * Used to catch any extra unwanted renders + */ + lastRenderWait?: number; }; let testedHook: Function; -const root = createRoot(window.document.getElementById('root')); -const reactRender = ({ hook, props, onResult, onUnmount }: Pick, "hook" | "props" | "onResult"> & { onUnmount: () => void; }) => { - const BasicComponent = ({ props }) => { - const result = hook(...props); - React.useEffect(() => { - return onUnmount; - }, []); - onResult(result); - return React.createElement('h1', null, `Hello`); - } - root.render( - React.createElement(BasicComponent, { props }, null) - ); +const root = createRoot(window.document.getElementById("root")); +const reactRender = ({ + hook, + props, + onResult, + onUnmount, +}: Pick, "hook" | "props" | "onResult"> & { + onUnmount: () => void; +}) => { + const BasicComponent = ({ props }) => { + const result = hook(...props); + React.useEffect(() => { + return onUnmount; + }, []); + onResult(result); + return React.createElement("h1", null, `Hello`); + }; + root.render(React.createElement(BasicComponent, { props }, null)); }; type RenderResult = { - results: any[]; - rerender: (args: Omit) => Promise; -} + results: any[]; + rerender: (args: Omit) => Promise; +}; const resetBasicComponent = () => { - const OtherBasicComponent = ({ props }) => { - return React.createElement('div', null, `Goodbye`); - } - root.render( - React.createElement(OtherBasicComponent, { props: {} }, null) - ); -} + const OtherBasicComponent = ({ props }) => { + return React.createElement("div", null, `Goodbye`); + }; + root.render(React.createElement(OtherBasicComponent, { props: {} }, null)); +}; type OnEnd = (results: ReturnType[]) => Promise | void; export const renderReactHookManual = async (rootArgs: { - hook: H; - initialProps: Parameters; - onUnmount?: () => void; - /** - * Time to wait after the last render to resolve the promise - * default: 250 - */ - renderDuration?: number; - onEnd?: OnEnd; - onRender?: OnEnd; + hook: H; + initialProps: Parameters; + onUnmount?: () => void; + /** + * Time to wait after the last render to resolve the promise + * default: 250 + */ + renderDuration?: number; + onEnd?: OnEnd; + onRender?: OnEnd; }): Promise<{ - setProps: (props: Parameters, opts: { waitFor?: number; onEnd?: OnEnd; }) => void; - getResults: () => ReturnType[]; + setProps: ( + props: Parameters, + opts: { waitFor?: number; onEnd?: OnEnd }, + ) => void; + getResults: () => ReturnType[]; }> => { - const { hook, onUnmount, renderDuration = 250, onEnd, onRender } = rootArgs; - let lastRenderWaitTimeout: NodeJS.Timeout | null = null; - let didResolve = false; - let setProps: (props: any[]) => void; - resetBasicComponent(); - return new Promise((resolve, reject) => { - const results = []; - const onCompRender = (result) => { - results.push(result); - if(didResolve) return; - onRender?.(results); - clearTimeout(lastRenderWaitTimeout); - lastRenderWaitTimeout = setTimeout(async () => { - - if(!setProps) { - reject("setProps not set"); - return; - } - await onEnd?.(results); - didResolve = true; - return resolve({ - setProps: async (props, { waitFor = 250, onEnd } = {}) => { - setProps(props); - await tout(waitFor); - await onEnd?.(results); - }, - getResults: () => results - }); - }, renderDuration); - } - const BasicComponent = ({ props: initialProps }) => { - const [props, _setProps] = React.useState(initialProps); - setProps = _setProps; - const result = hook(...props); - React.useEffect(() => { - return () => { - onUnmount?.(); - }; - }, []); - onCompRender(result); - return React.createElement('h1', null, `Hello`); - } - root.render( - React.createElement(BasicComponent, { props: rootArgs.initialProps }, null) - ); - }); -} + const { hook, onUnmount, renderDuration = 250, onEnd, onRender } = rootArgs; + let lastRenderWaitTimeout: NodeJS.Timeout | null = null; + let didResolve = false; + let setProps: (props: any[]) => void; + resetBasicComponent(); + return new Promise((resolve, reject) => { + const results = []; + const onCompRender = (result) => { + results.push(result); + if (didResolve) return; + onRender?.(results); + clearTimeout(lastRenderWaitTimeout); + lastRenderWaitTimeout = setTimeout(async () => { + if (!setProps) { + reject("setProps not set"); + return; + } + await onEnd?.(results); + didResolve = true; + return resolve({ + setProps: async (props, { waitFor = 250, onEnd } = {}) => { + setProps(props); + await tout(waitFor); + await onEnd?.(results); + }, + getResults: () => results, + }); + }, renderDuration); + }; + const BasicComponent = ({ props: initialProps }) => { + const [props, _setProps] = React.useState(initialProps); + setProps = _setProps; + const result = hook(...props); + React.useEffect(() => { + return () => { + onUnmount?.(); + }; + }, []); + onCompRender(result); + return React.createElement("h1", null, `Hello`); + }; + root.render( + React.createElement( + BasicComponent, + { props: rootArgs.initialProps }, + null, + ), + ); + }); +}; -export const renderReactHook = (rootArgs: RenderHookArgs): Promise => { - const { hook, props, onResult, expectedRerenders, timeout = 5000, lastRenderWait = 250 } = rootArgs; - const isRerender = testedHook && testedHook === hook; - if(testedHook && testedHook !== hook) { - resetBasicComponent(); - } - testedHook = hook; - let lastRenderWaitTimeout: NodeJS.Timeout | null = null; - return new Promise((resolve, reject) => { - const results: any[] = []; - let resolved = false; - const onRender = (result) => { - results.push(result); - onResult?.(result); - clearTimeout(lastRenderWaitTimeout); - resolved = expectedRerenders === results.length; - if(resolved){ - lastRenderWaitTimeout = setTimeout(() => { - resolve({ - results, - rerender: (args: Omit) => renderReactHook({ - hook, - ...args, - }) - }); - }, lastRenderWait); - } - } - reactRender({ - hook, - props, - onResult: onRender, - onUnmount: () => { - if(isRerender){ - reject(new Error("Unmounted before expected rerenders")); - } - } - }); - setTimeout(() => { - if(!resolved){ - reject(new Error(`Expected ${expectedRerenders} rerenders, got ${results.length}:\n${JSON.stringify(results)}`)); - } - }, timeout); - }); -} +export const renderReactHook = ( + rootArgs: RenderHookArgs, +): Promise => { + const { + hook, + props, + onResult, + expectedRerenders, + timeout = 5000, + lastRenderWait = 250, + } = rootArgs; + const isRerender = testedHook && testedHook === hook; + if (testedHook && testedHook !== hook) { + resetBasicComponent(); + } + testedHook = hook; + let lastRenderWaitTimeout: NodeJS.Timeout | null = null; + return new Promise((resolve, reject) => { + const results: any[] = []; + let resolved = false; + const onRender = (result) => { + results.push(result); + onResult?.(result); + clearTimeout(lastRenderWaitTimeout); + resolved = expectedRerenders === results.length; + if (resolved) { + lastRenderWaitTimeout = setTimeout(() => { + resolve({ + results, + rerender: (args: Omit) => + renderReactHook({ + hook, + ...args, + }), + }); + }, lastRenderWait); + } + }; + reactRender({ + hook, + props, + onResult: onRender, + onUnmount: () => { + if (isRerender) { + reject(new Error("Unmounted before expected rerenders")); + } + }, + }); + setTimeout(() => { + if (!resolved) { + reject( + new Error( + `Expected ${expectedRerenders} rerenders, got ${results.length}:\n${JSON.stringify(results)}`, + ), + ); + } + }, timeout); + }); +}; -export const tout = (ms: number) => new Promise(res => setTimeout(res, ms)); \ No newline at end of file +export const tout = (ms: number) => new Promise((res) => setTimeout(res, ms)); diff --git a/tests/client/useProstgles.spec.ts b/tests/client/useProstgles.spec.ts index 12284f00..606ac085 100644 --- a/tests/client/useProstgles.spec.ts +++ b/tests/client/useProstgles.spec.ts @@ -6,30 +6,26 @@ import type { DBHandlerClient } from "./index"; import { renderReactHook, renderReactHookManual } from "./renderReactHook"; export const newly_created_table = "newly_created_table"; -export const useProstglesTest = async (db: DBHandlerClient, getSocketOptions: (watchSchema?: boolean) => AnyObject) => { +export const useProstglesTest = async ( + db: DBHandlerClient, + getSocketOptions: (watchSchema?: boolean) => AnyObject, +) => { await db.sql(`DROP TABLE IF EXISTS ${newly_created_table};`); await describe("useProstgles hook", async (t) => { const socketOptions = getSocketOptions(); await test("useProstglesClient", async (t) => { - const { results: [res1, res2] } = await renderReactHook({ + const { + results: [res1, res2], + } = await renderReactHook({ hook: useProstglesClient, props: [{ socketOptions }], - expectedRerenders: 2 + expectedRerenders: 2, }); - assert.deepStrictEqual( - res1, - { isLoading: true } - ); - assert.equal( - typeof (res2 as any)?.dbo.items4.useFind, - "function" - ); - assert.equal( - typeof (res2 as any)?.dbo[newly_created_table], - "undefined" - ); + assert.deepStrictEqual(res1, { isLoading: true }); + assert.equal(typeof (res2 as any)?.dbo.items4.useFind, "function"); + assert.equal(typeof (res2 as any)?.dbo[newly_created_table], "undefined"); }); - + await test("useProstglesClient with schema reload", async (t) => { await db.sql(`select pg_sleep(1)`); await renderReactHookManual({ @@ -37,35 +33,26 @@ export const useProstglesTest = async (db: DBHandlerClient, getSocketOptions: (w initialProps: [{ socketOptions: getSocketOptions(true) }], renderDuration: 1000, onRender: async (results) => { - if(results.length !== 1) return; + if (results.length !== 1) return; db.sql(`CREATE TABLE ${newly_created_table}(id integer);`); }, onEnd: async (results) => { const [res1, res2, res3] = results; - assert.deepStrictEqual( - res1, - { isLoading: true } - ); - assert.equal( - res2.isLoading, - false - ); + assert.deepStrictEqual(res1, { isLoading: true }); + assert.equal(res2.isLoading, false); assert.equal( typeof (res2 as any)?.dbo[newly_created_table]?.useFind, - "undefined" + "undefined", ); assert.equal( typeof (res3 as any)?.dbo[newly_created_table].useFind, - "function" + "function", ); - assert.equal( - results.length, 3 - ); - + assert.equal(results.length, 3); + const count = await (res3 as any)?.dbo[newly_created_table].count(); assert.equal(count, 0); - - } + }, }); }); @@ -75,23 +62,26 @@ export const useProstglesTest = async (db: DBHandlerClient, getSocketOptions: (w initialProps: [{ socketOptions, skip: true }], onEnd: async (results) => { assert.deepStrictEqual(results, [{ isLoading: true }]); - } + }, }); await setProps([{ socketOptions }], { onEnd: async (results) => { assert.equal(results.length, 3); const [res1, res2, res3] = results; - assert.deepStrictEqual([res1, res2], [{ isLoading: true }, { isLoading: true }]); + assert.deepStrictEqual( + [res1, res2], + [{ isLoading: true }, { isLoading: true }], + ); const count = await (res3 as any)?.dbo.items4.count(); assert.equal(count, 0); assert.equal(res3.isLoading, false); - if("error" in res3) throw res3.error; + if ("error" in res3) throw res3.error; assert.equal( typeof res3.dbo[newly_created_table]?.useFind, - "undefined" + "undefined", ); - } + }, }); await setProps([{ socketOptions: getSocketOptions(true) }], { @@ -100,21 +90,19 @@ export const useProstglesTest = async (db: DBHandlerClient, getSocketOptions: (w const [res1, res2, res3, res4, res5] = results; assert.equal(results.length, 5); assert.equal(res5.isLoading, false); - if("error" in res5) throw res5.error; - + if ("error" in res5) throw res5.error; + const count = await res5.dbo.items4.count(); assert.equal(count, 0); - + assert.equal( typeof res5.dbo[newly_created_table].useFind, - "function" + "function", ); const count0 = await res5.dbo[newly_created_table].count(); assert.equal(count0, 0); - } + }, }); }); - }); - -} \ No newline at end of file +}; diff --git a/tests/clientFileTests.spec.ts b/tests/clientFileTests.spec.ts index 71b1e6d1..90fa76d1 100644 --- a/tests/clientFileTests.spec.ts +++ b/tests/clientFileTests.spec.ts @@ -1,17 +1,22 @@ - import { DBSchemaTable } from "prostgles-types"; import type { AuthHandler, DBHandlerClient } from "./client"; -import { strict as assert } from 'assert'; +import { strict as assert } from "assert"; import fs from "fs"; import { describe, test } from "node:test"; -export const clientFileTests = async (db: DBHandlerClient, auth: AuthHandler, log: (...args: any[]) => any, methods, tableSchema: DBSchemaTable[]) => { - +export const clientFileTests = async ( + db: DBHandlerClient, + auth: AuthHandler, + log: (...args: any[]) => any, + methods, + tableSchema: DBSchemaTable[], +) => { await describe("clientFileTests", async () => { - const fileFolder = `${__dirname}/../../server/dist/server/media/`; - const getFiles = () => db.sql("SELECT id, original_name FROM files", { }, { returnType: "rows" }) - await db.sql(` + const getFiles = () => + db.sql("SELECT id, original_name FROM files", {}, { returnType: "rows" }); + await db.sql( + ` ALTER TABLE users_public_info DROP CONSTRAINT "users_public_info_avatar_fkey"; @@ -19,45 +24,54 @@ export const clientFileTests = async (db: DBHandlerClient, auth: AuthHandler, lo ADD FOREIGN KEY ("avatar") REFERENCES "files" ("id") ON DELETE SET NULL - `, { }) + `, + {}, + ); const initialFiles = await getFiles(); await test("Files table is present", async () => { const files = await db.files.find!(); - assert.deepStrictEqual(files, []); + assert.deepStrictEqual(files, []); }); - - const file = { - data: Buffer.from("This is a string", "utf-8"), - name: "sample_file.txt" + const file = { + data: Buffer.from("This is a string", "utf-8"), + name: "sample_file.txt", }; let insertedFile; await test("Insert file from nested insert", async () => { - const nestedInsert = await db.users_public_info.insert!({ name: "somename.txt", avatar: file }, { returning: "*" }); + const nestedInsert = await db.users_public_info.insert!( + { name: "somename.txt", avatar: file }, + { returning: "*" }, + ); const files = await db.files.find!(); assert.equal(files.length, 1); assert.equal(files[0].id, nestedInsert.avatar.id); assert.equal(files[0].original_name, file.name); - const initialFileStr = fs.readFileSync(fileFolder + files[0].name).toString('utf8'); + const initialFileStr = fs + .readFileSync(fileFolder + files[0].name) + .toString("utf8"); assert.equal(file.data.toString(), initialFileStr); - insertedFile = files[0] + insertedFile = files[0]; }); await test("Cannot Insert file directly", async () => { try { await db.files.insert!(file, { returning: "*" }); - throw "Should not be able to insert files directly" - } catch (err){ - assert.equal(err.message.startsWith("Direct inserts not allowed"), true); + throw "Should not be able to insert files directly"; + } catch (err) { + assert.equal( + err.message.startsWith("Direct inserts not allowed"), + true, + ); } }); await test("Can update allowed files directly", async () => { - const newData = { - data: Buffer.from("aa", "utf-8"), - name: "a.txt" - } + const newData = { + data: Buffer.from("aa", "utf-8"), + name: "a.txt", + }; await db.files.update!({ id: insertedFile.id }, newData); const newFiles = await db.files.find!(); assert.equal(newFiles.length, 1); @@ -65,28 +79,36 @@ export const clientFileTests = async (db: DBHandlerClient, auth: AuthHandler, lo assert.equal(newFile?.original_name, newData.name); assert.equal(newFile.id, insertedFile.id); assert.equal( - fs.readFileSync(fileFolder + newFile.name).toString('utf8').toString(), - newData.data.toString() + fs + .readFileSync(fileFolder + newFile.name) + .toString("utf8") + .toString(), + newData.data.toString(), ); }); await test("Can insert allowed files through a nested update", async () => { - await db.files.delete!(); const user = await db.users_public_info.findOne!(); - const newData = { - data: Buffer.from("nestedupdate", "utf-8"), - name: "nestedupdate.txt" - } - const d = await db.users_public_info.update!({ id: user?.id }, { avatar: newData }, { returning: "*" }); + const newData = { + data: Buffer.from("nestedupdate", "utf-8"), + name: "nestedupdate.txt", + }; + const d = await db.users_public_info.update!( + { id: user?.id }, + { avatar: newData }, + { returning: "*" }, + ); const avatarFile = await db.files.findOne!({ id: d?.at(0).avatar.id }); - const initialFileStr = fs.readFileSync(fileFolder + avatarFile!.name).toString('utf8'); + const initialFileStr = fs + .readFileSync(fileFolder + avatarFile!.name) + .toString("utf8"); assert.equal(newData.data.toString(), initialFileStr); }); await test("Can delete only allowed files directly", async () => { const users = await db.users_public_info.find!(); - for await(const user of users){ + for await (const user of users) { await db.files.delete!({ id: user.avatar.id }); await db.users_public_info.delete!({ id: user.id }); } @@ -94,9 +116,8 @@ export const clientFileTests = async (db: DBHandlerClient, auth: AuthHandler, lo await db.users_public_info.delete!(); const files = await db.files.find!(); assert.deepStrictEqual(files, []); - const latestFiles = await getFiles() + const latestFiles = await getFiles(); assert.equal(initialFiles.length, latestFiles.length); }); - }); -} \ No newline at end of file +}; diff --git a/tests/clientOnlyQueries.spec.ts b/tests/clientOnlyQueries.spec.ts index a67be22d..821b57e3 100644 --- a/tests/clientOnlyQueries.spec.ts +++ b/tests/clientOnlyQueries.spec.ts @@ -1,6 +1,11 @@ import { strict as assert } from "assert"; import type { DBHandlerClient, AuthHandler } from "./client"; -import { AnyObject, DBSchemaTable, SocketSQLStreamPacket, isDefined } from "prostgles-types"; +import { + AnyObject, + DBSchemaTable, + SocketSQLStreamPacket, + isDefined, +} from "prostgles-types"; import { tryRun, tryRunP } from "./isomorphicQueries.spec"; import { describe, test } from "node:test"; @@ -10,7 +15,7 @@ export const clientOnlyQueries = async ( log: (...args: any[]) => any, methods, tableSchema: DBSchemaTable[], - token: string + token: string, ) => { await describe("Client only queries", async (t) => { // await test("Social auth redirect routes work", async ( ) => { @@ -26,7 +31,7 @@ export const clientOnlyQueries = async ( const res = await db.sql!( `SELECT * FROM generate_series(1, ${expectedRowCount})`, {}, - { returnType: "stream" } + { returnType: "stream" }, ); const listener = async (packet: SocketSQLStreamPacket) => { if (packet.type === "error") { @@ -54,7 +59,7 @@ export const clientOnlyQueries = async ( const res = await db.sql!( query, {}, - { returnType: "stream", persistStreamConnection: true, streamLimit } + { returnType: "stream", persistStreamConnection: true, streamLimit }, ); const listener = async (packet: SocketSQLStreamPacket) => { try { @@ -89,7 +94,7 @@ export const clientOnlyQueries = async ( const res = await db.sql!( query, {}, - { returnType: "stream", persistStreamConnection: true } + { returnType: "stream", persistStreamConnection: true }, ); const pids: number[] = []; const listener = async (packet: SocketSQLStreamPacket) => { @@ -123,11 +128,14 @@ export const clientOnlyQueries = async ( const queryState = await db.sql!( "SELECT * FROM pg_stat_activity WHERE query = $1", [query], - { returnType: "rows" } + { returnType: "rows" }, ); assert.equal(queryState.length, 1); assert.equal(queryState[0].state, "idle"); - assert.equal(packet.error.message, "canceling statement due to user request"); + assert.equal( + packet.error.message, + "canceling statement due to user request", + ); resolve("ok"); } else { assert.equal(packet.type, "data"); @@ -148,7 +156,7 @@ export const clientOnlyQueries = async ( const res = await db.sql!( "SELECT * FROM generate_series(1, 1e5)", {}, - { returnType: "stream", streamLimit: 10 } + { returnType: "stream", streamLimit: 10 }, ); const listener = async (packet: SocketSQLStreamPacket) => { if (packet.type === "error") { @@ -175,7 +183,7 @@ export const clientOnlyQueries = async ( const queryState = await db.sql!( "SELECT * FROM pg_stat_activity WHERE query = $1", [query], - { returnType: "rows" } + { returnType: "rows" }, ); assert.equal(queryState.length, 0); resolve("ok"); @@ -204,7 +212,7 @@ export const clientOnlyQueries = async ( const res = await db.sql!( `SELECT v.* FROM generate_series(1, ${numberOfRows}) v`, {}, - { returnType: "stream" } + { returnType: "stream" }, ); let rows: any[] = []; const listener = async (packet: SocketSQLStreamPacket) => { @@ -220,7 +228,7 @@ export const clientOnlyQueries = async ( }; await res.start(listener); }); - }) + }), ); }); @@ -228,7 +236,11 @@ export const clientOnlyQueries = async ( await tryRunP("", async (resolve, reject) => { const getExpected = (val: string) => new Promise(async (resolve, reject) => { - const res = await db.sql!("SELECT ${val} as val", { val }, { returnType: "stream" }); + const res = await db.sql!( + "SELECT ${val} as val", + { val }, + { returnType: "stream" }, + ); const listener = async (packet: SocketSQLStreamPacket) => { try { assert.equal(packet.type, "data"); @@ -261,9 +273,14 @@ export const clientOnlyQueries = async ( const res = await db.sql!(badQuery, {}, { returnType: "stream" }); const listener = async (packet: SocketSQLStreamPacket) => { try { - const normalSqlError = await db.sql!(badQuery, {}).catch((err) => err); + const normalSqlError = await db.sql!(badQuery, {}).catch( + (err) => err, + ); assert.equal(packet.type, "error"); - assert.equal(packet.error.message, 'relation "not_existing_table" does not exist'); + assert.equal( + packet.error.message, + 'relation "not_existing_table" does not exist', + ); assert.deepEqual(packet.error, normalSqlError); resolve("ok"); } catch (err) { @@ -276,7 +293,11 @@ export const clientOnlyQueries = async ( await test("SQL Stream streamLimit", async () => { await tryRunP("", async (resolve, reject) => { const generate_series = "SELECT * FROM generate_series(1, 100)"; - const res = await db.sql!(generate_series, {}, { returnType: "stream", streamLimit: 10 }); + const res = await db.sql!( + generate_series, + {}, + { returnType: "stream", streamLimit: 10 }, + ); const listener = async (packet: SocketSQLStreamPacket) => { if (packet.type === "error") { reject(packet.error); @@ -296,7 +317,7 @@ export const clientOnlyQueries = async ( assert.deepStrictEqual( packet.rows.flat(), - Array.from({ length: 10 }, (_, i) => i + 1).flat() + Array.from({ length: 10 }, (_, i) => i + 1).flat(), ); resolve("ok"); } @@ -309,7 +330,11 @@ export const clientOnlyQueries = async ( await tryRunP("", async (resolve, reject) => { await db.sql!("TRUNCATE planes RESTART IDENTITY CASCADE;", {}); await db.sql!("INSERT INTO planes (last_updated) VALUES (56789);", {}); - const res = await db.sql!("SELECT * FROM planes", {}, { returnType: "stream" }); + const res = await db.sql!( + "SELECT * FROM planes", + {}, + { returnType: "stream" }, + ); const listener = async (packet: SocketSQLStreamPacket) => { if (packet.type === "error") { reject(packet.error); @@ -332,7 +357,7 @@ export const clientOnlyQueries = async ( const res = await db.sql!( "SELECT * FROM planes; SELECT 1 as a", {}, - { returnType: "stream" } + { returnType: "stream" }, ); const listener = async (packet: SocketSQLStreamPacket) => { if (packet.type === "error") { @@ -362,7 +387,9 @@ export const clientOnlyQueries = async ( return !!(h.getColumns && h.getInfo) ? k : undefined; }) .filter(isDefined); - const missingTbl = dbTables.find((t) => !tableSchema.some((st) => st.name === t)); + const missingTbl = dbTables.find( + (t) => !tableSchema.some((st) => st.name === t), + ); if (missingTbl) throw `${missingTbl} is missing from tableSchema: ${JSON.stringify(tableSchema)}`; const missingscTbl = tableSchema.find((t) => !dbTables.includes(t.name)); @@ -374,7 +401,7 @@ export const clientOnlyQueries = async ( const info = await db[tbl.name]?.getInfo?.(); assert.deepStrictEqual(tbl.columns, cols); assert.deepStrictEqual(tbl.info, info); - }) + }), ); }); @@ -388,27 +415,47 @@ export const clientOnlyQueries = async ( /* RAWSQL */ await tryRun("SQL Full result", async () => { if (!db.sql) throw "db.sql missing"; - const sqlStatement = await db.sql("SELECT $1", [1], { returnType: "statement" }); - assert.equal(sqlStatement, "SELECT 1", "db.sql statement query failed"); + const sqlStatement = await db.sql("SELECT $1", [1], { + returnType: "statement", + }); + assert.equal( + sqlStatement, + "SELECT 1", + "db.sql statement query failed", + ); await db.sql("SELECT 1 -- ${param}", {}, { hasParams: false }); const arrayMode = await db.sql("SELECT 1 as a, 2 as a", undefined, { returnType: "arrayMode", }); - assert.equal(arrayMode.rows?.[0].join("."), "1.2", "db.sql statement arrayMode failed"); + assert.equal( + arrayMode.rows?.[0].join("."), + "1.2", + "db.sql statement arrayMode failed", + ); assert.equal( arrayMode.fields?.map((f) => f.name).join("."), "a.a", - "db.sql statement arrayMode failed" + "db.sql statement arrayMode failed", ); - const select1 = await db.sql("SELECT $1 as col1", [1], { returnType: "rows" }); - assert.deepStrictEqual(select1[0], { col1: 1 }, "db.sql justRows query failed"); + const select1 = await db.sql("SELECT $1 as col1", [1], { + returnType: "rows", + }); + assert.deepStrictEqual( + select1[0], + { col1: 1 }, + "db.sql justRows query failed", + ); const fullResult = await db.sql("SELECT $1 as col1", [1]); // console.log(fullResult) - assert.deepStrictEqual(fullResult.rows[0], { col1: 1 }, "db.sql query failed"); + assert.deepStrictEqual( + fullResult.rows[0], + { col1: 1 }, + "db.sql query failed", + ); assert.deepStrictEqual( fullResult.fields, [ @@ -425,7 +472,7 @@ export const clientOnlyQueries = async ( tsDataType: "number", }, ], - "db.sql query failed" + "db.sql query failed", ); }); @@ -436,7 +483,7 @@ export const clientOnlyQueries = async ( const sub = await db.sql( "LISTEN chnl ", {}, - { allowListen: true, returnType: "arrayMode" } + { allowListen: true, returnType: "arrayMode" }, ); if (!("addListener" in sub)) { reject("addListener missing"); @@ -448,7 +495,7 @@ export const clientOnlyQueries = async ( if (notif === expected) resolve(true); else reject( - `Notif value is not what we expect: ${JSON.stringify(notif)} is not ${JSON.stringify(expected)} (expected) ` + `Notif value is not what we expect: ${JSON.stringify(notif)} is not ${JSON.stringify(expected)} (expected) `, ); }); db.sql("NOTIFY chnl , 'hello'; "); @@ -462,14 +509,18 @@ export const clientOnlyQueries = async ( async (resolve, reject) => { if (!db.sql) throw "db.sql missing"; - const sub = await db.sql("", {}, { returnType: "noticeSubscription" }); + const sub = await db.sql( + "", + {}, + { returnType: "noticeSubscription" }, + ); sub.addListener((notice) => { const expected = "hello2"; if (notice.message === expected) resolve(true); else reject( - `Notice value is not what we expect: ${JSON.stringify(notice)} is not ${JSON.stringify(expected)} (expected) ` + `Notice value is not what we expect: ${JSON.stringify(notice)} is not ${JSON.stringify(expected)} (expected) `, ); }); db.sql(` @@ -481,7 +532,7 @@ export const clientOnlyQueries = async ( END $$; `); }, - { log } + { log }, ); /* REPLICATION */ @@ -492,7 +543,12 @@ export const clientOnlyQueries = async ( await db.sql!("TRUNCATE planes RESTART IDENTITY CASCADE;", {}); let inserts = new Array(100) .fill(null) - .map((d, i) => ({ id: i, flight_number: `FN${i}`, x: Math.random(), y: i })); + .map((d, i) => ({ + id: i, + flight_number: `FN${i}`, + x: Math.random(), + y: i, + })); await db.planes.insert!(inserts); const CLOCK_DRIFT = 2000; @@ -510,34 +566,46 @@ export const clientOnlyQueries = async ( */ /* After all sync records are updated to x10 here we'll update them to x20 */ - const sP = await db.planes.subscribe!({ x: 10 }, {}, async (planes) => { - const p10 = planes.filter((p) => p.x == 10); - log( - Date.now() + - ": sub stats: x10 -> " + - p10.length + - " x20 ->" + - planes.filter((p) => p.x == 20).length - ); + const sP = await db.planes.subscribe!( + { x: 10 }, + {}, + async (planes) => { + const p10 = planes.filter((p) => p.x == 10); + log( + Date.now() + + ": sub stats: x10 -> " + + p10.length + + " x20 ->" + + planes.filter((p) => p.x == 20).length, + ); - if (p10.length === 100) { - /** 2 second delay to account for client-server clock drift */ - setTimeout(async () => { - // db.planes.findOne({}, { select: { last_updated: "$max"}}).then(log); - - await sP.unsubscribe(); - log(Date.now() + ": sub: db.planes.update({}, { x: 20, last_updated });"); - const dLastUpdated = Math.max(...p10.map((v) => +v.last_updated)); - const last_updated = Date.now(); - if (dLastUpdated >= last_updated) - throw "dLastUpdated >= last_updated should not happen"; - await db.planes.update!({}, { x: 20, last_updated }); - log(Date.now() + ": sub: Updated to x20", await db.planes.count!({ x: 20 })); - - // db.planes.findOne({}, { select: { last_updated: "$max"}}).then(log) - }, CLOCK_DRIFT); - } - }); + if (p10.length === 100) { + /** 2 second delay to account for client-server clock drift */ + setTimeout(async () => { + // db.planes.findOne({}, { select: { last_updated: "$max"}}).then(log); + + await sP.unsubscribe(); + log( + Date.now() + + ": sub: db.planes.update({}, { x: 20, last_updated });", + ); + const dLastUpdated = Math.max( + ...p10.map((v) => +v.last_updated), + ); + const last_updated = Date.now(); + if (dLastUpdated >= last_updated) + throw "dLastUpdated >= last_updated should not happen"; + await db.planes.update!({}, { x: 20, last_updated }); + log( + Date.now() + ": sub: Updated to x20", + await db.planes.count!({ x: 20 }), + ); + + // db.planes.findOne({}, { select: { last_updated: "$max"}}).then(log) + }, CLOCK_DRIFT); + } + }, + ); let updt = 0; const sync = await db.planes.sync!( @@ -546,7 +614,10 @@ export const clientOnlyQueries = async ( (planes, deltas) => { const x20 = planes.filter((p) => p.x == 20).length; const x10 = planes.filter((p) => p.x == 10); - log(Date.now() + `: sync stats: x10 -> ${x10.length} x20 -> ${x20}`); + log( + Date.now() + + `: sync stats: x10 -> ${x10.length} x20 -> ${x20}`, + ); let update = false; planes.map((p) => { @@ -556,7 +627,9 @@ export const clientOnlyQueries = async ( updt++; update = true; p.$update!({ x: 10 }); - log(Date.now() + `: sync: p.$update({ x: 10 }); (id: ${p.id})`); + log( + Date.now() + `: sync: p.$update({ x: 10 }); (id: ${p.id})`, + ); } }); // if(update) log("$update({ x: 10 })", updt) @@ -568,11 +641,11 @@ export const clientOnlyQueries = async ( Date.now() + ": sync end: Finished replication test. Inserting 100 rows then updating two times took: " + (Date.now() - start - CLOCK_DRIFT) + - "ms" + "ms", ); resolveTest(true); } - } + }, ); const msLimit = 20000; @@ -580,12 +653,17 @@ export const clientOnlyQueries = async ( const dbCounts = { x10: await db.planes.count!({ x: 10 }), x20: await db.planes.count!({ x: 20 }), - latest: await db.planes.findOne!({}, { orderBy: { last_updated: -1 } }), + latest: await db.planes.findOne!( + {}, + { orderBy: { last_updated: -1 } }, + ), }; const syncCounts = { x10: sync?.getItems().filter((d) => d.x == 10).length, x20: sync?.getItems().filter((d) => d.x == 20).length, - latest: sync?.getItems()?.sort((a, b) => +b.last_updated - +a.last_updated)[0], + latest: sync + ?.getItems() + ?.sort((a, b) => +b.last_updated - +a.last_updated)[0], }; const msg = "Replication test failed due to taking longer than " + @@ -606,7 +684,11 @@ export const clientOnlyQueries = async ( /* TODO: SECURITY */ log("auth.user:", auth.user); - assert.equal(!!auth.login?.withPassword, true, "auth.login.withPassword should be defined"); + assert.equal( + !!auth.login?.withPassword, + true, + "auth.login.withPassword should be defined", + ); const isUser = !!auth.user; // Public data @@ -620,14 +702,19 @@ export const clientOnlyQueries = async ( const cols = await db.insert_rules.getColumns!(); assert.equal( - cols.filter(({ insert, update: u, select: s, delete: d }) => insert && !u && s && !d) - .length, + cols.filter( + ({ insert, update: u, select: s, delete: d }) => + insert && !u && s && !d, + ).length, 2, - "Validated getColumns failed" + "Validated getColumns failed", ); /* Validated insert */ - const expectB = await db.insert_rules.insert!({ name: "a" }, { returning: "*" }); + const expectB = await db.insert_rules.insert!( + { name: "a" }, + { returning: "*" }, + ); assert.deepStrictEqual(expectB, { name: "b" }, "Validated insert failed"); /* forced UUID insert */ @@ -637,19 +724,26 @@ export const clientOnlyQueries = async ( try { await db.insert_rules.insert!({ name: "notfail" }, { returning: "*" }); await db.insert_rules.insert!({ name: "fail" }, { returning: "*" }); - await db.insert_rules.insert!({ name: "fail-check" }, { returning: "*" }); + await db.insert_rules.insert!( + { name: "fail-check" }, + { returning: "*" }, + ); throw "post insert checks should have failed"; } catch (err) {} - assert.equal(0, +(await db.insert_rules.count!({ name: "fail" })), "postValidation failed"); + assert.equal( + 0, + +(await db.insert_rules.count!({ name: "fail" })), + "postValidation failed", + ); assert.equal( 0, +(await db.insert_rules.count!({ name: "fail-check" })), - "checkFilter failed" + "checkFilter failed", ); assert.equal( 1, +(await db.insert_rules.count!({ name: "notfail" })), - "postValidation failed" + "postValidation failed", ); }); @@ -689,24 +783,39 @@ export const clientOnlyQueries = async ( some_table.sync({}, { handlesOnData: false }, console.log); return no data items */ - await test("sync handlesOnData true -> false no data bug", { skip: isUser }, async () => { - let sync1Planes: AnyObject[] = []; - let sync2Planes: AnyObject[] = []; - const sync1 = await db.planes.sync!({}, { handlesOnData: true }, async (planes1, deltas) => { - sync1Planes = planes1; - log("sync handlesOnData true", planes1.length); - }); - await tout(1000); - const sync2 = await db.planes.sync!({}, { handlesOnData: false }, (planes2, deltas) => { - sync2Planes = planes2; - }); - await tout(1000); - if (sync1Planes.length !== sync2Planes.length || sync1Planes.length === 0) { - throw `sync2Planes.length !== 100: ${sync1Planes.length} vs ${sync2Planes.length}`; - } - await sync1.$unsync(); - await sync2.$unsync(); - }); + await test( + "sync handlesOnData true -> false no data bug", + { skip: isUser }, + async () => { + let sync1Planes: AnyObject[] = []; + let sync2Planes: AnyObject[] = []; + const sync1 = await db.planes.sync!( + {}, + { handlesOnData: true }, + async (planes1, deltas) => { + sync1Planes = planes1; + log("sync handlesOnData true", planes1.length); + }, + ); + await tout(1000); + const sync2 = await db.planes.sync!( + {}, + { handlesOnData: false }, + (planes2, deltas) => { + sync2Planes = planes2; + }, + ); + await tout(1000); + if ( + sync1Planes.length !== sync2Planes.length || + sync1Planes.length === 0 + ) { + throw `sync2Planes.length !== 100: ${sync1Planes.length} vs ${sync2Planes.length}`; + } + await sync1.$unsync(); + await sync2.$unsync(); + }, + ); // User data await test("Security rules example", { skip: !isUser }, async () => { diff --git a/tests/clientRestApi.spec.ts b/tests/clientRestApi.spec.ts index 4fd92bec..a36410e8 100644 --- a/tests/clientRestApi.spec.ts +++ b/tests/clientRestApi.spec.ts @@ -1,17 +1,40 @@ -import { strict as assert } from 'assert'; +import { strict as assert } from "assert"; import type { DBHandlerClient, AuthHandler } from "./client"; import { DBSchemaTable } from "prostgles-types"; import { describe, test } from "node:test"; -export const clientRestApi = async(db: DBHandlerClient, auth: AuthHandler, log: (...args: any[]) => any, methods, tableSchema: DBSchemaTable[], token: string) => { - +export const clientRestApi = async ( + db: DBHandlerClient, + auth: AuthHandler, + log: (...args: any[]) => any, + methods, + tableSchema: DBSchemaTable[], + token: string, +) => { await describe("clientRestApi", async () => { - - const rest = async ({ tableName, command, noAuth }: { tableName: string; command: string; noAuth?: boolean; }, ...params: any[]) => post({ path: `db/${tableName}/${command}`, noAuth, token }, ...(params ?? [])) - const dbRest = (tableName: string, command: string, ...params: any[]) => rest({ tableName, command }, ...(params ?? [])) - const dbRestNoAuth = (tableName: string, command: string, ...params: any[]) => rest({ tableName, command, noAuth: true }, ...(params ?? [])); - const sqlRest = (query: string, ...params: any[]) => post({ path: `db/sql`, token }, query, ...(params ?? [])) - const sqlMethods = (methodName: string, ...params: any[]) => post({ path: `methods/${methodName}`, token }, ...(params ?? [])) + const rest = async ( + { + tableName, + command, + noAuth, + }: { tableName: string; command: string; noAuth?: boolean }, + ...params: any[] + ) => + post( + { path: `db/${tableName}/${command}`, noAuth, token }, + ...(params ?? []), + ); + const dbRest = (tableName: string, command: string, ...params: any[]) => + rest({ tableName, command }, ...(params ?? [])); + const dbRestNoAuth = ( + tableName: string, + command: string, + ...params: any[] + ) => rest({ tableName, command, noAuth: true }, ...(params ?? [])); + const sqlRest = (query: string, ...params: any[]) => + post({ path: `db/sql`, token }, query, ...(params ?? [])); + const sqlMethods = (methodName: string, ...params: any[]) => + post({ path: `methods/${methodName}`, token }, ...(params ?? [])); await test("Rest api test", async () => { const dataFilter = { id: 123123123, last_updated: Date.now() }; @@ -19,14 +42,14 @@ export const clientRestApi = async(db: DBHandlerClient, auth: AuthHandler, log: await db.planes.insert?.(dataFilter); const item = await db.planes.findOne?.(dataFilter); const itemR = await dbRest("planes", "findOne", dataFilter); - const itemRNA = await dbRestNoAuth("planes", "findOne", dataFilter); + const itemRNA = await dbRestNoAuth("planes", "findOne", dataFilter); assert.deepStrictEqual(item, itemR); const { last_updated, ...allowedData } = item!; assert.deepStrictEqual(allowedData, itemRNA); - + await dbRest("planes", "insert", dataFilter1); - const filter = { "id.>=": dataFilter.id } - const count = await db.planes.count?.(filter) + const filter = { "id.>=": dataFilter.id }; + const count = await db.planes.count?.(filter); const restCount = await dbRest("planes", "count", filter); assert.equal(count, 2); assert.equal(restCount, 2); @@ -36,47 +59,49 @@ export const clientRestApi = async(db: DBHandlerClient, auth: AuthHandler, log: const restTableSchema = await post({ path: "schema", token }); assert.deepStrictEqual(tableSchema, restTableSchema.tableSchema); - await Promise.all(tableSchema.map(async tbl => { - const cols = await db[tbl.name]?.getColumns?.(); - const info = await db[tbl.name]?.getInfo?.(); - if(db[tbl.name]?.getColumns){ - const restCols = await dbRest(tbl.name, "getColumns", {}); - assert.deepStrictEqual(tbl.columns, cols); - assert.deepStrictEqual(tbl.columns, restCols); - assert.deepStrictEqual(tbl.info, info); - } - })); + await Promise.all( + tableSchema.map(async (tbl) => { + const cols = await db[tbl.name]?.getColumns?.(); + const info = await db[tbl.name]?.getInfo?.(); + if (db[tbl.name]?.getColumns) { + const restCols = await dbRest(tbl.name, "getColumns", {}); + assert.deepStrictEqual(tbl.columns, cols); + assert.deepStrictEqual(tbl.columns, restCols); + assert.deepStrictEqual(tbl.info, info); + } + }), + ); const two22 = await sqlMethods("get", {}); assert.equal(two22, 222); }); - - }); -} +}; -const post = async ({ path, noAuth, token }: { path: string; token: string; noAuth?: boolean}, ...params: any[]) => { +const post = async ( + { path, noAuth, token }: { path: string; token: string; noAuth?: boolean }, + ...params: any[] +) => { const headers = new Headers({ - 'Authorization': `Bearer ${Buffer.from(noAuth? "noAuth" : token, "utf-8").toString("base64")}`, - 'Accept': 'application/json', - 'Content-Type': 'application/json' + Authorization: `Bearer ${Buffer.from(noAuth ? "noAuth" : token, "utf-8").toString("base64")}`, + Accept: "application/json", + "Content-Type": "application/json", + }); + const res = await fetch(`http://127.0.0.1:3001/api/${path}`, { + method: "POST", + headers, + body: !params?.length ? undefined : JSON.stringify(params), + }); + const resBodyJson = await res.text().then((text) => { + try { + return JSON.parse(text); + } catch { + return text; + } }); - const res = await fetch(`http://127.0.0.1:3001/api/${path}`, { - method: "POST", - headers, - body: !params?.length? undefined : JSON.stringify(params) - }); - const resBodyJson = await res.text() - .then(text => { - try { - return JSON.parse(text); - } catch { - return text; - } - }); - if(res.status !== 200){ + if (res.status !== 200) { return Promise.reject(resBodyJson); } return resBodyJson; -} \ No newline at end of file +}; diff --git a/tests/config_test/index.html b/tests/config_test/index.html index 6af273e9..81706c7e 100644 --- a/tests/config_test/index.html +++ b/tests/config_test/index.html @@ -1,109 +1,111 @@ - + - - Prostgles - - - - - - - + + Prostgles + + -
-
-
-
-
- - - + + + +
+
+
+
+
+ + + - - \ No newline at end of file + db.media.subscribe({}, {}, (d) => { + if (d1) { + d1.innerText = JSON.stringify(d, null, 2); + d1.onclick = () => (window.location = d[0].url); + } + }); + // db.various.subscribe({ }, { select: { id: 1, media: "*"} }, d => { + // if(d1){ + // d1.innerText = JSON.stringify(d, null, 2); + // d1.onclick = () => window.location = (d[0].url); + // } + // }); + } catch (e) { + console.error(e); + } + }, + }); + + + diff --git a/tests/isomorphicQueries.spec.ts b/tests/isomorphicQueries.spec.ts index b03c05b8..6ac77af0 100644 --- a/tests/isomorphicQueries.spec.ts +++ b/tests/isomorphicQueries.spec.ts @@ -11,7 +11,7 @@ import { SubscriptionHandler, pickKeys } from "prostgles-types"; export const isomorphicQueries = async ( db: DBOFullyTyped | DBHandlerClient, - log: (msg: string, extra?: any) => void + log: (msg: string, extra?: any) => void, ) => { log("Starting isomorphic queries"); const isServer = !!(db.items as any).dboBuilder; @@ -36,11 +36,19 @@ export const isomorphicQueries = async ( const errFind = await db.items.find?.({ h: "a" }).catch((err) => err); const errCount = await db.items.count?.({ h: "a" }).catch((err) => err); const errSize = await db.items.size?.({ h: "a" }).catch((err) => err); - const errFindOne = await db.items.findOne?.({ h: "a" }).catch((err) => err); + const errFindOne = await db.items + .findOne?.({ h: "a" }) + .catch((err) => err); const errDelete = await db.items.delete?.({ h: "a" }).catch((err) => err); - const errUpdate = await db.items.update?.({}, { h: "a" }).catch((err) => err); - const errUpdateBatch = await db.items.updateBatch?.([[{}, { h: "a" }]]).catch((err) => err); - const errUpsert = await db.items.upsert?.({}, { h: "a" }).catch((err) => err); + const errUpdate = await db.items + .update?.({}, { h: "a" }) + .catch((err) => err); + const errUpdateBatch = await db.items + .updateBatch?.([[{}, { h: "a" }]]) + .catch((err) => err); + const errUpsert = await db.items + .upsert?.({}, { h: "a" }) + .catch((err) => err); const errInsert = await db.items.insert?.({ h: "a" }).catch((err) => err); const errSubscribe = await db.items .subscribe?.({ h: "a" }, {}, console.warn) @@ -84,8 +92,15 @@ export const isomorphicQueries = async ( "routine", "code_info", ]; - assert.deepStrictEqual(Object.keys(err ?? {}).sort(), allKeys.sort(), "index: " + index); - assert.deepStrictEqual(pickKeys(err, Object.keys(clientOnlyError)), clientOnlyError); + assert.deepStrictEqual( + Object.keys(err ?? {}).sort(), + allKeys.sort(), + "index: " + index, + ); + assert.deepStrictEqual( + pickKeys(err, Object.keys(clientOnlyError)), + clientOnlyError, + ); assert.equal(typeof err.detail, "string"); assert.equal(typeof err.query, "string"); assert.equal(typeof err.length, "number"); @@ -126,9 +141,12 @@ export const isomorphicQueries = async ( await test("Prepare data", async () => { if (!db.sql) throw "db.sql missing"; - const res = await db.items.insert!([{ name: "a" }, { name: "a" }, { name: "b" }], { - returning: "*", - }); + const res = await db.items.insert!( + [{ name: "a" }, { name: "a" }, { name: "b" }], + { + returning: "*", + }, + ); assert.equal(res.length, 3); const added1 = "04 Dec 1995 00:12:00"; const added2 = "04 Dec 1996 00:12:00"; @@ -148,12 +166,16 @@ export const isomorphicQueries = async ( await db.sql(`REFRESH MATERIALIZED VIEW prostgles_test.mv_basic1;`); assert.deepStrictEqual( await db["prostgles_test.mv_basic1"].find!(), - await db["prostgles_test.basic1"].find!() + await db["prostgles_test.basic1"].find!(), ); /* Ensure */ await db[`"*"`].insert!([{ "*": "a" }, { "*": "a" }, { "*": "b" }]); - await db[`"""*"""`].insert!([{ [`"*"`]: "a" }, { [`"*"`]: "a" }, { [`"*"`]: "b" }]); + await db[`"""*"""`].insert!([ + { [`"*"`]: "a" }, + { [`"*"`]: "a" }, + { [`"*"`]: "b" }, + ]); await db.various.insert!([ { name: "abc9", added: added1, jsn: { a: { b: 2 } } }, @@ -164,13 +186,22 @@ export const isomorphicQueries = async ( await db.sql("TRUNCATE files CASCADE"); }); - const json = { a: true, arr: "2", arr1: 3, arr2: [1], arrStr: ["1123.string"] }; + const json = { + a: true, + arr: "2", + arr1: 3, + arr2: [1], + arrStr: ["1123.string"], + }; await test("merge json", async () => { - const inserted = await db.tjson.insert!({ colOneOf: "a", json }, { returning: "*" }); + const inserted = await db.tjson.insert!( + { colOneOf: "a", json }, + { returning: "*" }, + ); const res = await db.tjson.update!( { colOneOf: "a" }, { json: { $merge: [{ a: false }] } }, - { returning: "*" } + { returning: "*" }, ); assert.deepStrictEqual(res?.[0].json, { ...json, a: false }); }); @@ -183,11 +214,11 @@ export const isomorphicQueries = async ( await test("onConflict do update", async () => { const initial = await db.items4.insert!( { id: -99, name: "onConflict", public: "onConflict" }, - { returning: "*" } + { returning: "*" }, ); const updated = await db.items4.insert!( { id: -99, name: "onConflict", public: "onConflict2" }, - { onConflict: "DoUpdate", returning: "*" } + { onConflict: "DoUpdate", returning: "*" }, ); assert.equal(initial.id, -99); assert.equal(initial.public, "onConflict"); @@ -210,7 +241,7 @@ export const isomorphicQueries = async ( await tryRun("Nested insert", async () => { const nestedInsert = await db.users_public_info.insert!( { name: "somename.txt", avatar: mediaFile }, - { returning: "*" } + { returning: "*" }, ); const { name, avatar } = nestedInsert; const { extension, content_type, original_name } = avatar; @@ -220,7 +251,7 @@ export const isomorphicQueries = async ( extension: "txt", content_type: "text/plain", original_name: "sample_file.txt", - } + }, ); assert.equal(name, "somename.txt"); @@ -258,12 +289,16 @@ export const isomorphicQueries = async ( const originals = await db.files.find!({ original_name: file.name }); assert.equal(originals.length, 1); const [original] = originals; - const initialFileStr = fs.readFileSync(fileFolder + original.name).toString("utf8"); + const initialFileStr = fs + .readFileSync(fileFolder + original.name) + .toString("utf8"); assert.equal(initialStr, initialFileStr); await db.files.update!({ id: original.id }, newFile); - const newFileStr = fs.readFileSync(fileFolder + original.name).toString("utf8"); + const newFileStr = fs + .readFileSync(fileFolder + original.name) + .toString("utf8"); assert.equal(newStr, newFileStr); const newF = await db.files.findOne!({ id: original.id }); @@ -382,12 +417,18 @@ export const isomorphicQueries = async ( ]; assert.deepStrictEqual(res, expected); - const resDynamic = await db.tr2.getColumns!("fr", { rule: "update", filter: {} }); + const resDynamic = await db.tr2.getColumns!("fr", { + rule: "update", + filter: {}, + }); assert.deepStrictEqual(resDynamic, expected); }); await test("returnType", async () => { - const whereStatement = await db.tr1.find!({ t1: "a" }, { returnType: "statement-where" }); + const whereStatement = await db.tr1.find!( + { t1: "a" }, + { returnType: "statement-where" }, + ); assert.equal(whereStatement, `"t1" = 'a'`); }); @@ -414,7 +455,7 @@ export const isomorphicQueries = async ( await test("$unnest_words", async () => { const res = await db.various.find!( {}, - { returnType: "values", select: { name: "$unnest_words" } } + { returnType: "values", select: { name: "$unnest_words" } }, ); assert.deepStrictEqual(res, ["abc9", "abc1", "abc81", "here"]); @@ -424,10 +465,13 @@ export const isomorphicQueries = async ( * Group by/Distinct */ await test("Group by/Distinct", async () => { - const res = await db.items.find!({}, { select: { name: 1 }, groupBy: true }); + const res = await db.items.find!( + {}, + { select: { name: 1 }, groupBy: true }, + ); const resV = await db.items.find!( {}, - { select: { name: 1 }, groupBy: true, returnType: "values" } + { select: { name: 1 }, groupBy: true, returnType: "values" }, ); assert.deepStrictEqual(res, [{ name: "a" }, { name: "b" }]); @@ -439,7 +483,7 @@ export const isomorphicQueries = async ( const res = await db.sql!( "delete from items2 returning name; ", {}, - { returnType: "default-with-rollback" } + { returnType: "default-with-rollback" }, ); assert.deepEqual(res.rows, [item]); const count = await db.items2.count!(); @@ -452,7 +496,7 @@ export const isomorphicQueries = async ( await test("returnType: value", async () => { const resVl = await db.items.find!( {}, - { select: { name: { $array_agg: ["name"] } }, returnType: "value" } + { select: { name: { $array_agg: ["name"] } }, returnType: "value" }, ); assert.deepStrictEqual(resVl, ["a", "a", "b"]); @@ -474,7 +518,7 @@ export const isomorphicQueries = async ( added: "$year", addedY: { $date: ["added"] }, }, - } + }, ); // console.log(d); await db.various.findOne!( @@ -486,7 +530,7 @@ export const isomorphicQueries = async ( added: "$year", addedY: { $date: ["added"] }, }, - } + }, ); /* @@ -510,13 +554,19 @@ export const isomorphicQueries = async ( h: { $term_highlight: [["name"], term, {}] }, hFull: { $term_highlight: ["*", "81", {}] }, hOrdered: { $term_highlight: [["name", "id"], "81", {}] }, - hIdx: { $term_highlight: [["name"], term, { returnType: "index" }] }, - hBool: { $term_highlight: [["name"], term, { returnType: "boolean" }] }, - hObj: { $term_highlight: [["name"], term, { returnType: "object" }] }, + hIdx: { + $term_highlight: [["name"], term, { returnType: "index" }], + }, + hBool: { + $term_highlight: [["name"], term, { returnType: "boolean" }], + }, + hObj: { + $term_highlight: [["name"], term, { returnType: "object" }], + }, hObjAll: { $term_highlight: ["*", term, { returnType: "object" }] }, }, orderBy: { hIdx: -1 }, - } + }, ); assert.deepStrictEqual(res[0], { @@ -552,7 +602,7 @@ export const isomorphicQueries = async ( const testToEnsureTriggersAreDisabled = async ( sub: SubscriptionHandler, - table_name: string + table_name: string, ) => { const getTableTriggers = async (table_name: string) => { return (await db.sql?.( @@ -562,7 +612,7 @@ export const isomorphicQueries = async ( WHERE tgname like format('prostgles_triggers_%s_', \${table_name}) || '%' `, { table_name }, - { returnType: "rows" } + { returnType: "rows" }, )) as { tgname: string; enabled: boolean }[]; }; let validTriggers = await getTableTriggers(table_name); @@ -579,12 +629,16 @@ export const isomorphicQueries = async ( validTriggers, }, null, - 2 - )}` + 2, + )}`, ); await db.sql?.(`DELETE FROM prostgles.app_triggers`, []); validTriggers = await getTableTriggers(table_name); - assert.equal(validTriggers.length, 3, "3 Triggers should exist but be disabled"); + assert.equal( + validTriggers.length, + 3, + "3 Triggers should exist but be disabled", + ); assert.equal(validTriggers.filter((t) => t.enabled).length, 0); }; @@ -593,47 +647,55 @@ export const isomorphicQueries = async ( "subscribe", async (resolve, reject) => { await db.various.insert!({ id: 99 }); - const sub = await db.various.subscribe!({ id: 99 }, {}, async (items) => { - const item = items[0]; + const sub = await db.various.subscribe!( + { id: 99 }, + {}, + async (items) => { + const item = items[0]; - if (item && item.name === "zz3zz3") { - await db.various.delete!({ name: "zz3zz3" }); - await testToEnsureTriggersAreDisabled(sub, "various"); - resolve(true); - } - }); + if (item && item.name === "zz3zz3") { + await db.various.delete!({ name: "zz3zz3" }); + await testToEnsureTriggersAreDisabled(sub, "various"); + resolve(true); + } + }, + ); await db.various.update!({ id: 99 }, { name: "zz3zz1" }); await db.various.update!({ id: 99 }, { name: "zz3zz2" }); await db.various.update!({ id: 99 }, { name: "zz3zz3" }); }, - { timeout: 4000 } + { timeout: 4000 }, ); }); await test("subscribe to schema.table", async () => { await tryRunP("subscribe to schema.table", async (resolve, reject) => { let runs = 0; - const sub = await db[`prostgles_test.basic1`].subscribe!({}, {}, async (items) => { - runs++; - if (runs === 1) { - if (items.length !== 1) { - reject("Should have 1 item"); - } else { - await db[`prostgles_test.basic1`].insert!({ - txt: "basic12", - }); - } - } else if (runs === 2) { - if (items.length !== 2) { - reject("Should have 2 items"); + const sub = await db[`prostgles_test.basic1`].subscribe!( + {}, + {}, + async (items) => { + runs++; + if (runs === 1) { + if (items.length !== 1) { + reject("Should have 1 item"); + } else { + await db[`prostgles_test.basic1`].insert!({ + txt: "basic12", + }); + } + } else if (runs === 2) { + if (items.length !== 2) { + reject("Should have 2 items"); + } else { + await sub.unsubscribe(); + resolve(true); + } } else { - await sub.unsubscribe(); - resolve(true); + reject("Expecting only 2 runs"); } - } else { - reject("Expecting only 2 runs"); - } - }); + }, + ); }); }); @@ -642,19 +704,25 @@ export const isomorphicQueries = async ( let callbacksFired = {}; const subscriptions = await Promise.all( [1, 2, 3, 4, 5].map(async (subId) => { - const handler = await db.various.subscribe!({ id: 99 }, {}, async (items) => { - callbacksFired[subId] = true; - }); + const handler = await db.various.subscribe!( + { id: 99 }, + {}, + async (items) => { + callbacksFired[subId] = true; + }, + ); return { handler, }; - }) + }), ).catch(reject); await tout(2000); await Promise.all( - Object.values(subscriptions).map(async ({ handler }) => handler.unsubscribe()) + Object.values(subscriptions).map(async ({ handler }) => + handler.unsubscribe(), + ), ); assert.equal(Object.keys(callbacksFired).length, 5); resolve(true); @@ -672,17 +740,22 @@ export const isomorphicQueries = async ( { throttle: 1700 }, async (item) => { const now = Date.now(); - if (item && item.name === "zz3zz2" && now - start > 1600 && now - start < 1800) { + if ( + item && + item.name === "zz3zz2" && + now - start > 1600 && + now - start < 1800 + ) { await db.various.delete!({ name: "zz3zz2" }); sub.unsubscribe(); resolve(true); } - } + }, ); await db.various.update!({ id: 99 }, { name: "zz3zz1" }); await db.various.update!({ id: 99 }, { name: "zz3zz2" }); }, - { timeout: 4000 } + { timeout: 4000 }, ); }); @@ -708,11 +781,11 @@ export const isomorphicQueries = async ( await test("template_string function", async () => { const res = await db.various.findOne!( { name: "abc9" }, - { select: { tstr: { $template_string: ["{name} is hehe"] } } } + { select: { tstr: { $template_string: ["{name} is hehe"] } } }, ); const res2 = await db.various.findOne!( { name: "abc9" }, - { select: { tstr: { $template_string: ["is hehe"] } } } + { select: { tstr: { $template_string: ["is hehe"] } } }, ); assert.equal(res?.tstr, "abc9 is hehe"); assert.equal(res2?.tstr, "is hehe"); @@ -725,7 +798,9 @@ export const isomorphicQueries = async ( assert.equal(res, 1); }); await test("In filtering", async () => { - const res = await db.various.count!({ added: { $in: ["04 Dec 1996 00:12:00"] } }); + const res = await db.various.count!({ + added: { $in: ["04 Dec 1996 00:12:00"] }, + }); assert.equal(res, 1); }); @@ -734,10 +809,16 @@ export const isomorphicQueries = async ( {}, { select: { name: 1 }, - orderBy: [{ key: "name", asc: false, nulls: "first", nullEmpty: true }], - } + orderBy: [ + { key: "name", asc: false, nulls: "first", nullEmpty: true }, + ], + }, ); - assert.deepStrictEqual(res, [{ name: "b" }, { name: "a" }, { name: "a" }]); + assert.deepStrictEqual(res, [ + { name: "b" }, + { name: "a" }, + { name: "a" }, + ]); }); await test("Order by aliased func", async () => { const res = await db.items.find!( @@ -745,7 +826,7 @@ export const isomorphicQueries = async ( { select: { uname: { $upper: ["name"] }, count: { $countAll: [] } }, orderBy: { uname: -1 }, - } + }, ); assert.deepStrictEqual(res, [ { uname: "B", count: "1" }, @@ -755,21 +836,21 @@ export const isomorphicQueries = async ( await test("Filter by aliased func", async () => { const res = await db.items.find!( { uname: "B" }, - { select: { uname: { $upper: ["name"] }, count: { $countAll: [] } } } + { select: { uname: { $upper: ["name"] }, count: { $countAll: [] } } }, ); assert.deepStrictEqual(res, [{ uname: "B", count: "1" }]); }); await test("Count with Filter by aliased func ", async () => { const res = await db.items.count!( { uname: "A" }, - { select: { uname: { $upper: ["name"] } } } + { select: { uname: { $upper: ["name"] } } }, ); assert.deepStrictEqual(res, 2); }); await test("Count with Aggregate and Filter by aliased func ", async () => { const res = await db.items.count!( { uname: "A" }, - { select: { uname: { $upper: ["name"] }, count: { $countAll: [] } } } + { select: { uname: { $upper: ["name"] }, count: { $countAll: [] } } }, ); assert.deepStrictEqual(res, 1); }); @@ -782,7 +863,10 @@ export const isomorphicQueries = async ( await test("Order by aggregation", async () => { const res = await db.items.find!( {}, - { select: { name: 1, count: { $countAll: [] } }, orderBy: { count: -1 } } + { + select: { name: 1, count: { $countAll: [] } }, + orderBy: { count: -1 }, + }, ); assert.deepStrictEqual(res, [ { name: "a", count: "2" }, @@ -792,7 +876,10 @@ export const isomorphicQueries = async ( await test("Order by colliding alias name", async () => { const res = await db.items.find!( {}, - { select: { name: { $countAll: [] }, n: { $left: ["name", 1] } }, orderBy: { name: -1 } } + { + select: { name: { $countAll: [] }, n: { $left: ["name", 1] } }, + orderBy: { name: -1 }, + }, ); assert.deepStrictEqual(res, [ { name: "2", n: "a" }, @@ -811,7 +898,7 @@ export const isomorphicQueries = async ( await test("Function example", async () => { const f = await db.items4.findOne!( {}, - { select: { public: 1, p_5: { $left: ["public", 3] } } } + { select: { public: 1, p_5: { $left: ["public", 3] } } }, ); assert.equal(f?.p_5.length, 3); assert.equal(f?.p_5, f.public.substr(0, 3)); @@ -819,28 +906,41 @@ export const isomorphicQueries = async ( // Nested function const fg = await db.items2.findOne!( {}, - { select: { id: 1, name: 1, items3: { name: "$upper" } } } + { select: { id: 1, name: 1, items3: { name: "$upper" } } }, ); // { $upper: ["public"] } } }); assert.deepStrictEqual(fg, { id: 1, name: "a", items3: [{ name: "A" }] }); // Date utils - const Mon = await db.items4.findOne!({ name: "abc" }, { select: { added: "$Mon" } }); + const Mon = await db.items4.findOne!( + { name: "abc" }, + { select: { added: "$Mon" } }, + ); assert.deepStrictEqual(Mon, { added: "Dec" }); // Date + agg const MonAgg = await db.items4.find!( { name: "abc" }, - { select: { added: "$Mon", public: "$count" } } + { select: { added: "$Mon", public: "$count" } }, ); assert.deepStrictEqual(MonAgg, [{ added: "Dec", public: "2" }]); // Returning const returningParam = { - returning: { id: 1, name: 1, public: 1, $rowhash: 1, added_day: { $day: ["added"] } }, + returning: { + id: 1, + name: 1, + public: 1, + $rowhash: 1, + added_day: { $day: ["added"] }, + }, } as const; // ctid: 1, let i = await db.items4_pub.insert!( - { name: "abc123", public: "public data", added: "04 Dec 1995 00:12:00" }, - returningParam + { + name: "abc123", + public: "public data", + added: "04 Dec 1995 00:12:00", + }, + returningParam, ); assert.deepStrictEqual(i, { id: 1, @@ -853,7 +953,7 @@ export const isomorphicQueries = async ( let u = await db.items4_pub.update!( { name: "abc123" }, { public: "public data2" }, - returningParam + returningParam, ); assert.deepStrictEqual(u, [ { @@ -915,7 +1015,7 @@ export const isomorphicQueries = async ( geomGeo: { $ST_AsGeoJSON: ["geom"] }, }, orderBy: "geom", - } + }, ); assert.deepStrictEqual(f, { geomGeo: { @@ -939,7 +1039,7 @@ export const isomorphicQueries = async ( extent: { $ST_Extent: ["geom"] }, // extent3D: { "$ST_3DExtent": ["geom"] }, }, - } + }, ); assert.deepStrictEqual(aggs, { xMax: -1, @@ -968,9 +1068,15 @@ export const isomorphicQueries = async ( }, */ - const fo = await db.tjson.insert!({ colOneOf: "a", json }, { returning: "*" }); + const fo = await db.tjson.insert!( + { colOneOf: "a", json }, + { returning: "*" }, + ); // assert.deepStrictEqual(fo.json, json); - await db.tjson.insert!({ colOneOf: "a", json: { ...json, o: { o1: 2 } } }); + await db.tjson.insert!({ + colOneOf: "a", + json: { ...json, o: { o1: 2 } }, + }); try { await db.tjson.insert!({ colOneOf: "a", json: { a: true, arr: "22" } }); throw "Should have failed"; @@ -993,7 +1099,10 @@ export const isomorphicQueries = async ( await test("Basic exists", async () => { const expect0 = await db.items.count!({ - $and: [{ $exists: { items2: { name: "a" } } }, { $exists: { items3: { name: "b" } } }], + $and: [ + { $exists: { items2: { name: "a" } } }, + { $exists: { items3: { name: "b" } } }, + ], }); assert.equal(expect0, 0, "$exists query failed"); }); @@ -1048,7 +1157,11 @@ export const isomorphicQueries = async ( await test("Upsert example", async () => { await db.items.upsert!({ name: "tx" }, { name: "tx" }); await db.items.upsert!({ name: "tx" }, { name: "tx" }); - assert.equal(await db.items.count!({ name: "tx" }), 1, "upsert command failed"); + assert.equal( + await db.items.count!({ name: "tx" }), + 1, + "upsert command failed", + ); }); /* Joins example */ @@ -1061,12 +1174,14 @@ export const isomorphicQueries = async ( items3: "*", items22: db.leftJoin?.items2({}, "*"), }, - } + }, ); if ( !items.length || - !items.every((it) => Array.isArray(it.items3) && Array.isArray(it.items22)) + !items.every( + (it) => Array.isArray(it.items3) && Array.isArray(it.items22), + ) ) { console.log(items[0].items3); throw "Joined select query failed"; @@ -1082,7 +1197,7 @@ export const isomorphicQueries = async ( "*": 1, items2: "*", }, - } + }, ); const items2j = await db.items.find!( {}, @@ -1092,48 +1207,60 @@ export const isomorphicQueries = async ( items2: "*", items2j: db.leftJoin?.items2({}, "*"), }, - } + }, ); items2.forEach((d, i) => { assert.deepStrictEqual( d.items2, items2j[i].items2, - "Joins duplicate aliased table query failed" + "Joins duplicate aliased table query failed", ); assert.deepStrictEqual( d.items2, items2j[i].items2j, - "Joins duplicate aliased table query failed" + "Joins duplicate aliased table query failed", ); }); }); await test("Join aggregate functions example", async () => { - const singleShortHandAgg = await db.items.findOne!({}, { select: { id: "$max" } }); - const singleAgg = await db.items.findOne!({}, { select: { id: { $max: ["id"] } } }); + const singleShortHandAgg = await db.items.findOne!( + {}, + { select: { id: "$max" } }, + ); + const singleAgg = await db.items.findOne!( + {}, + { select: { id: { $max: ["id"] } } }, + ); assert.deepStrictEqual(singleShortHandAgg, { id: 4 }); assert.deepStrictEqual(singleAgg, { id: 4 }); const shortHandAggJoined = await db.items.findOne!( { id: 4 }, - { select: { id: 1, items2: { name: "$max" } } } + { select: { id: 1, items2: { name: "$max" } } }, ); assert.deepStrictEqual(shortHandAggJoined, { id: 4, items2: [] }); }); /* $rowhash -> Custom column that returms md5(ctid + allowed select columns). Used in joins & CRUD to bypass PKey details */ await test("$rowhash example", async () => { - const rowhash = await db.items.findOne!({}, { select: { $rowhash: 1, "*": 1 } }); + const rowhash = await db.items.findOne!( + {}, + { select: { $rowhash: 1, "*": 1 } }, + ); const f = { $rowhash: rowhash?.$rowhash }; - const rowhashView = await db.v_items.findOne!({}, { select: { $rowhash: 1 } }); + const rowhashView = await db.v_items.findOne!( + {}, + { select: { $rowhash: 1 } }, + ); const rh1 = await db.items.findOne!( { $rowhash: rowhash?.$rowhash }, - { select: { $rowhash: 1 } } + { select: { $rowhash: 1 } }, ); const rhView = await db.v_items.findOne!( { $rowhash: rowhashView?.$rowhash }, - { select: { $rowhash: 1 } } + { select: { $rowhash: 1 } }, ); // console.log({ rowhash, f }); @@ -1154,7 +1281,10 @@ export const isomorphicQueries = async ( await test("Reference column nested insert", async () => { const nestedRow = { name: "nested_insert" }; const parentRow = { name: "parent insert" }; - const pr = await db.items2.insert!({ items_id: nestedRow, ...parentRow }, { returning: "*" }); + const pr = await db.items2.insert!( + { items_id: nestedRow, ...parentRow }, + { returning: "*" }, + ); const childRows = await db.items.find!(nestedRow); assert.equal(childRows.length, 1); @@ -1190,7 +1320,7 @@ export const isomorphicQueries = async ( [`id2 max`]: { $max: [`"id2"`] }, }, }, - } + }, ); assert.deepStrictEqual(res[0], { @@ -1222,7 +1352,7 @@ export const isomorphicQueries = async ( }, }, }, - } + }, ); assert.deepStrictEqual(aliasedQuotedJoin, [ @@ -1249,78 +1379,94 @@ export const isomorphicQueries = async ( }, }, }, - { select: "*" } + { select: "*" }, ); /** Duplicated tables */ const exists2 = await db[`"""quoted0"""`].find!( { $existsJoined: { - path: ['"""quoted1"""', '"""quoted2"""', '"""quoted1"""', '"""quoted2"""'], + path: [ + '"""quoted1"""', + '"""quoted2"""', + '"""quoted1"""', + '"""quoted2"""', + ], filter: { '"id2"': 1, }, }, }, - { select: "*" } + { select: "*" }, ); assert.deepStrictEqual(exists1, exists2); }); await test("subscribe to escaped table name", async () => { - await tryRunP("subscribe to escaped table name", async (resolve, reject) => { - const filter = { [`"text_col0"`]: "0" }; - let runs = 0; - setTimeout(async () => { - /** Used for debugging */ - if (runs < 2) { - const appName = await db.sql?.( - ` + await tryRunP( + "subscribe to escaped table name", + async (resolve, reject) => { + const filter = { [`"text_col0"`]: "0" }; + let runs = 0; + setTimeout(async () => { + /** Used for debugging */ + if (runs < 2) { + const appName = await db.sql?.( + ` SELECT application_name FROM pg_catalog.pg_stat_activity WHERE pid = pg_backend_pid() `, - [], - { returnType: "rows" } - ); - const apps = await db.sql?.(`SELECT * FROM prostgles.apps`, [], { - returnType: "value", - }); - const app_triggers = await db.sql?.(`SELECT * FROM prostgles.app_triggers`, [], { - returnType: "rows", - }); - log("show-logs"); - log(JSON.stringify({ appName, apps, app_triggers }, null, 2)); - } - }, 2000); - const sub = await db[`"""quoted0"""`].subscribe!(filter, {}, async (items) => { - const item = items[0]; - log( - JSON.stringify( - { - item, - runs, - }, - null, - 2 - ) - ); - if (item && item[`"text_col0"`] === "0") { - runs++; - if (runs === 1) { - await db[`"""quoted0"""`].update!(filter, filter); - } - if (runs < 2) { - return; - } - try { - await testToEnsureTriggersAreDisabled(sub, `"""quoted0"""`); - resolve(true); - } catch (e) { - reject(e); + [], + { returnType: "rows" }, + ); + const apps = await db.sql?.(`SELECT * FROM prostgles.apps`, [], { + returnType: "value", + }); + const app_triggers = await db.sql?.( + `SELECT * FROM prostgles.app_triggers`, + [], + { + returnType: "rows", + }, + ); + log("show-logs"); + log(JSON.stringify({ appName, apps, app_triggers }, null, 2)); } - } - }).catch(reject); - }); + }, 2000); + const sub = await db[`"""quoted0"""`].subscribe!( + filter, + {}, + async (items) => { + const item = items[0]; + log( + JSON.stringify( + { + item, + runs, + }, + null, + 2, + ), + ); + if (item && item[`"text_col0"`] === "0") { + runs++; + if (runs === 1) { + await db[`"""quoted0"""`].update!(filter, filter); + } + if (runs < 2) { + return; + } + try { + await testToEnsureTriggersAreDisabled(sub, `"""quoted0"""`); + resolve(true); + } catch (e) { + reject(e); + } + } + }, + ).catch(reject); + }, + ); }); const testName = "subscribe using a filter bigger than block_size"; await test(testName, async () => { @@ -1334,13 +1480,16 @@ export const isomorphicQueries = async ( await sub.unsubscribe(); resolve(true); }, 10); - } + }, ); }); }); await test("Reverse join with agg", async () => { - const inserted = await db.tr1.insert!({ tr2: { t1: "a", t2: "b" } }, { returning: "*" }); + const inserted = await db.tr1.insert!( + { tr2: { t1: "a", t2: "b" } }, + { returning: "*" }, + ); const idAggSelect = { ids: { @@ -1351,18 +1500,33 @@ export const isomorphicQueries = async ( {}, { orderBy: { id: true }, - select: { "*": 1, tr2: { $innerJoin: "tr2", filter: { t1: "a" }, select: idAggSelect } }, - } + select: { + "*": 1, + tr2: { + $innerJoin: "tr2", + filter: { t1: "a" }, + select: idAggSelect, + }, + }, + }, ); const reverseJoin = await db.tr2.find!( { t1: "a" }, { orderBy: { id: true }, select: { "*": 1, tr1: { $innerJoin: "tr1", select: idAggSelect } }, - } + }, ); - assert.deepStrictEqual(normalJoin[0], { id: 1, t1: null, tr2: [{ ids: [1] }] }); - assert.deepStrictEqual(normalJoin[1], { id: 2, t1: null, tr2: [{ ids: [2] }] }); + assert.deepStrictEqual(normalJoin[0], { + id: 1, + t1: null, + tr2: [{ ids: [1] }], + }); + assert.deepStrictEqual(normalJoin[1], { + id: 2, + t1: null, + tr2: [{ ids: [2] }], + }); assert.deepStrictEqual(reverseJoin[0], { id: 1, tr1_id: 1, @@ -1400,7 +1564,7 @@ export const isomorphicQueries = async ( await sub.unsubscribe(); resolve(true); } - } + }, ); }); }); @@ -1419,7 +1583,7 @@ export const isomorphicQueries = async ( orderBy: { "tr2.maxId": asc, }, - } + }, ); const sortedAsc = await getSorted(true); const sortedDesc = await getSorted(false); @@ -1428,7 +1592,7 @@ export const isomorphicQueries = async ( .map((d) => d.tr2[0].maxId) .slice(0) .reverse(), - sortedDesc.map((d) => d.tr2[0].maxId) + sortedDesc.map((d) => d.tr2[0].maxId), ); }); @@ -1445,14 +1609,14 @@ export const isomorphicQueries = async ( orderBy: { id: true, }, - } + }, ); assert.deepStrictEqual( res.map((row) => [row.id, row.tr2[0]!.sign]), [ [1, 1], [2, 1], - ] + ], ); }); @@ -1463,7 +1627,7 @@ export const isomorphicQueries = async ( items2_id: { name: "it2", items_id: { name: "it" } }, name: "it4a", }, - { returning: "*" } + { returning: "*" }, ); const itemsCount = await db.items.count!({ name: "it" }); const items2Count = await db.items2.count!({ name: "it2" }); @@ -1483,17 +1647,20 @@ export const isomorphicQueries = async ( items3_id: { name: "multi" }, name: "root_multi", }, - { returning: "*" } + { returning: "*" }, ); const itemsCount = await db.items.count!({ name: "multi" }); assert.equal(+itemsCount, 4); const multiItem = await db.items_multi.findOne!( { name: "root_multi" }, - { select: { "*": 1, items: "*" } } + { select: { "*": 1, items: "*" } }, ); assert.equal(multiItem?.name, "root_multi"); - assert.equal(multiItem?.items.filter((d) => d.name === "multi").length, 4); + assert.equal( + multiItem?.items.filter((d) => d.name === "multi").length, + 4, + ); }); await test("Join path with order by nested", async () => { @@ -1503,7 +1670,7 @@ export const isomorphicQueries = async ( items1_id: { name: "multi1" }, name: "root_multi", }, - { returning: "*" } + { returning: "*" }, ); const res = await db.items_multi.find!( @@ -1518,7 +1685,7 @@ export const isomorphicQueries = async ( orderBy: { "i0.name": -1, }, - } + }, ); assert.equal(res.length, 1); assert.equal(res[0].i0[0].name, "multi0"); @@ -1529,8 +1696,14 @@ export const isomorphicQueries = async ( await test("Self join", async () => { await db.self_join.delete!(); const a = await db.self_join.insert!({ name: "a" }); - const a1 = await db.self_join.insert!({ name: "a", my_id: { name: "b" } }); - const a2 = await db.self_join.insert!({ name: "a", my_id1: { name: "b1" } }); + const a1 = await db.self_join.insert!({ + name: "a", + my_id: { name: "b" }, + }); + const a2 = await db.self_join.insert!({ + name: "a", + my_id1: { name: "b1" }, + }); const one = await db.self_join.find!( {}, @@ -1544,7 +1717,7 @@ export const isomorphicQueries = async ( orderBy: "name", }, }, - } + }, ); assert.equal(one.length, 1); assert.equal(one[0].my.length, 1); @@ -1577,7 +1750,7 @@ export const isomorphicQueries = async ( select: "*", }, }, - } + }, ); assert.equal(res.length, 3); res.forEach((row) => { @@ -1606,7 +1779,7 @@ export const isomorphicQueries = async ( orderBy: { price: 1 }, }, }, - } + }, ); assert.equal(resSortedInnerJoin.length, 2); resSortedInnerJoin.forEach((row) => { @@ -1624,7 +1797,7 @@ export const isomorphicQueries = async ( having: { c: 4, }, - } + }, ); assert.deepStrictEqual(res, [ { @@ -1642,7 +1815,7 @@ export const isomorphicQueries = async ( having: { $filter: [{ $countAll: [] }, "=", 4], }, - } + }, ); assert.deepStrictEqual(res, [ { @@ -1666,7 +1839,7 @@ export const isomorphicQueries = async ( having: { c: 1 }, }, }, - } + }, ); assert.deepStrictEqual(res, [ { @@ -1695,12 +1868,11 @@ export async function tryRun(desc: string, func: () => any, log?: Function) { export function tryRunP( desc: string, func: (resolve: any, reject: any) => any, - opts?: { log?: Function; timeout?: number } + opts?: { log?: Function; timeout?: number }, ) { return new Promise(async (rv, rj) => { - const testTimeout = - Number.isFinite(opts?.timeout) ? - setTimeout(() => { + const testTimeout = Number.isFinite(opts?.timeout) + ? setTimeout(() => { const errMsg = `${desc} failed. Reason: Timout reached: ${opts!.timeout}ms`; opts?.log?.(errMsg); rj(errMsg); diff --git a/tests/server/index.html b/tests/server/index.html index 8549e276..0917de73 100644 --- a/tests/server/index.html +++ b/tests/server/index.html @@ -1,73 +1,95 @@ - + - - Prostgles - - - - - - - - + + + + - - \ No newline at end of file + db.planes + .findOne({}, { select: { last_updated: "$max" } }) + .then(console.log); + sP.unsubscribe(); + } + }, + ); + + // console.log(await db.items.findOne()) + // console.log(await db.items.getColumns()) + }, + }); + + + diff --git a/tests/server/index.ts b/tests/server/index.ts index 4fa3b5b2..df931fbc 100644 --- a/tests/server/index.ts +++ b/tests/server/index.ts @@ -10,9 +10,12 @@ import { testTableConfig } from "./testTableConfig"; testPublishTypes(); const isClientTest = process.env.TEST_TYPE === "client"; -const io = !isClientTest ? undefined : require("socket.io")(http, { path: "/teztz/s" }); -const ioWatchSchema = - !isClientTest ? undefined : require("socket.io")(http, { path: "/teztz/sWatchSchema" }); +const io = !isClientTest + ? undefined + : require("socket.io")(http, { path: "/teztz/s" }); +const ioWatchSchema = !isClientTest + ? undefined + : require("socket.io")(http, { path: "/teztz/sWatchSchema" }); http.listen(3001); @@ -28,7 +31,9 @@ export type { DBHandlerServer } from "prostgles-server/dist/Prostgles"; let logs = []; export const log = (msg: string, extra?: any, trace?: boolean) => { - const msgs = msg.includes("show-logs") ? logs : ["(server): " + msg, extra].filter((v) => v); + const msgs = msg.includes("show-logs") + ? logs + : ["(server): " + msg, extra].filter((v) => v); if (trace) { console.trace(...msgs); } else { @@ -50,7 +55,9 @@ type USER = { password: string; type: string; }; -const users: USER[] = [{ id: "1a", username: "john", password: "secret", type: "default" }]; +const users: USER[] = [ + { id: "1a", username: "john", password: "secret", type: "default" }, +]; process.on("unhandledRejection", (reason, p) => { console.trace("Unhandled Rejection at:", p, "reason:", reason); @@ -71,8 +78,9 @@ const dbConnection = { }; function dd() { - const dbo: DBOFullyTyped<{ tbl: { is_view: true; columns: { col1: { type: number } } } }> = - 1 as any; + const dbo: DBOFullyTyped<{ + tbl: { is_view: true; columns: { col1: { type: number } } }; + }> = 1 as any; if (!dbo) return; dbo.tbl.find; } @@ -99,7 +107,11 @@ function dd() { onLog: async (ev) => { logs.push(ev); logs = logs.slice(-10); - if (ev.type === "debug" || ev.type === "connect" || ev.type === "disconnect") { + if ( + ev.type === "debug" || + ev.type === "connect" || + ev.type === "disconnect" + ) { // log("onLog", ev); } }, @@ -131,7 +143,10 @@ function dd() { onSocketConnect: ({ socket, db }) => { console.log("onSocketConnect", socket.id); if (isClientTest) { - log("Client connected -> console does not work. use log function. socket.id:", socket.id); + log( + "Client connected -> console does not work. use log function. socket.id:", + socket.id, + ); socket.emit("start-test", { server_id: Math.random() }); socket.on("log", async (data, cb) => { console.log("Client log ", data); @@ -167,7 +182,12 @@ function dd() { return { sid: s.id, user, - clientUser: { sid: s.id, uid: user.id, id: user.id, type: user.type }, + clientUser: { + sid: s.id, + uid: user.id, + id: user.id, + type: user.type, + }, }; } } @@ -175,10 +195,16 @@ function dd() { return undefined; }, login: async (loginData) => { - if (loginData.type !== "username") throw "Only username login is supported"; + if (loginData.type !== "username") + throw "Only username login is supported"; const { username, password } = loginData; - const u = users.find((u) => u.username === username && u.password === password); - if (!u) throw "something went wrong: " + JSON.stringify({ username, password }); + const u = users.find( + (u) => u.username === username && u.password === password, + ); + if (!u) + throw ( + "something went wrong: " + JSON.stringify({ username, password }) + ); let s = sessions.find((s) => s.user_id === u.id); if (!s) { s = { id: "SID" + Date.now(), user_id: u.id }; @@ -190,7 +216,9 @@ function dd() { cacheSession: { getSession: async (sid) => { const s = sessions.find((s) => s.id === sid); - return s ? { sid: s.id, expires: Infinity, onExpiration: "redirect" } : undefined; + return s + ? { sid: s.id, expires: Infinity, onExpiration: "redirect" } + : undefined; }, }, expressConfig: { @@ -241,7 +269,12 @@ function dd() { }, { tables: ["items_multi", "items"], - on: [{ items0_id: "id" }, { items1_id: "id" }, { items2_id: "id" }, { items3_id: "id" }], + on: [ + { items0_id: "id" }, + { items1_id: "id" }, + { items2_id: "id" }, + { items3_id: "id" }, + ], type: "many-many", }, ], @@ -260,7 +293,7 @@ function dd() { // "--inspect-brk", "dist/client/index.js", ], - { cwd: execPath, stdio: "inherit" } + { cwd: execPath, stdio: "inherit" }, ); log("Waiting for client..."); diff --git a/tests/server/package-lock.json b/tests/server/package-lock.json index 5817c2ae..627aafdf 100644 --- a/tests/server/package-lock.json +++ b/tests/server/package-lock.json @@ -21,7 +21,7 @@ }, "../..": { "name": "prostgles-server", - "version": "4.2.182", + "version": "4.2.183", "license": "MIT", "dependencies": { "@aws-sdk/client-ses": "^3.699.0", diff --git a/tests/server/publishTypeCheck.ts b/tests/server/publishTypeCheck.ts index 8b5dff88..8875f4e0 100644 --- a/tests/server/publishTypeCheck.ts +++ b/tests/server/publishTypeCheck.ts @@ -63,13 +63,12 @@ export const testPublishTypes = () => { }, items4: { - select: - Math.random() ? "*" : ( - { + select: Math.random() + ? "*" + : { fields: { name: 0 }, forcedFilter: { name: "abc" }, - } - ), + }, insert: "*", update: "*", delete: "*", diff --git a/tests/server/testPublish.ts b/tests/server/testPublish.ts index 7c31b029..7e3c9fe6 100644 --- a/tests/server/testPublish.ts +++ b/tests/server/testPublish.ts @@ -1,8 +1,14 @@ -import { Publish, PublishTableRule } from "prostgles-server/dist/PublishParser/PublishParser"; +import { + Publish, + PublishTableRule, +} from "prostgles-server/dist/PublishParser/PublishParser"; import { DBGeneratedSchema } from "./DBGeneratedSchema"; import type { PublishFullyTyped } from "prostgles-server/dist/DBSchemaBuilder"; -export const testPublish: Publish = async ({ user, sid }) => { +export const testPublish: Publish = async ({ + user, + sid, +}) => { if (sid === "noAuth") { return { planes: { @@ -67,14 +73,13 @@ export const testPublish: Publish = async ({ user, sid }) => }, items4: { - select: - user ? "*" : ( - { + select: user + ? "*" + : { fields: { name: 0 }, orderByFields: { added: 1 }, forcedFilter: { name: "abc" }, - } - ), + }, insert: "*", update: "*", delete: "*", @@ -110,9 +115,13 @@ export const testPublish: Publish = async ({ user, sid }) => }, postValidate: async ({ row, dbx: dboTx }) => { /** Records must exist in this transaction */ - const exists = await dboTx.sql("SELECT * FROM insert_rules WHERE id = ${id}", row, { - returnType: "row", - }); + const exists = await dboTx.sql( + "SELECT * FROM insert_rules WHERE id = ${id}", + row, + { + returnType: "row", + }, + ); const existsd = await dboTx.insert_rules.findOne({ id: row.id }); if (row.id !== exists.id || row.id !== existsd.id) { console.error("postValidate failed"); diff --git a/tests/server/testTableConfig.ts b/tests/server/testTableConfig.ts index f058f76f..bf8cc7e1 100644 --- a/tests/server/testTableConfig.ts +++ b/tests/server/testTableConfig.ts @@ -1,11 +1,13 @@ -import type { TableConfig } from 'prostgles-server/dist/TableConfig/TableConfig'; - -export const testTableConfig: TableConfig<{ en: 1, fr: 1 }> = { +import type { TableConfig } from "prostgles-server/dist/TableConfig/TableConfig"; +export const testTableConfig: TableConfig<{ en: 1; fr: 1 }> = { tr2: { // dropIfExists: true, columns: { - t1: { label: { fr: "fr_t1" }, info: { hint: "hint...", min: "a", max: "b" } }, + t1: { + label: { fr: "fr_t1" }, + info: { hint: "hint...", min: "a", max: "b" }, + }, t2: { label: { en: "en_t2" } }, }, triggers: { @@ -24,9 +26,9 @@ export const testTableConfig: TableConfig<{ en: 1, fr: 1 }> = { RETURN NULL; END; - ` - } - } + `, + }, + }, }, users_public_info: { // dropIfExists: true, @@ -36,84 +38,97 @@ export const testTableConfig: TableConfig<{ en: 1, fr: 1 }> = { // avatar: `UUID REFERENCES media ON DELETE CASCADE` avatar: `UUID`, sid: `TEXT`, - } + }, }, users: { dropIfExists: true, columns: { - id: { sqlDefinition: `SERIAL PRIMARY KEY ` }, - email: { sqlDefinition: `TEXT NOT NULL` }, - status: { enum: ["active", "disabled", "pending"] }, - preferences: { - jsonbSchemaType: { - showIntro: { type: "boolean", optional: true }, - theme: { enum: ["light", "dark", "auto"], optional: true }, - others: { type: "any[]" } - } + id: { sqlDefinition: `SERIAL PRIMARY KEY ` }, + email: { sqlDefinition: `TEXT NOT NULL` }, + status: { enum: ["active", "disabled", "pending"] }, + preferences: { + jsonbSchemaType: { + showIntro: { type: "boolean", optional: true }, + theme: { enum: ["light", "dark", "auto"], optional: true }, + others: { type: "any[]" }, + }, }, - } + }, }, tjson: { dropIfExists: true, columns: { - json: { jsonbSchemaType: { + json: { + jsonbSchemaType: { a: { type: "boolean" }, arr: { enum: ["1", "2", "3"] }, arr1: { enum: [1, 2, 3] }, arr2: { type: "integer[]" }, arrStr: { type: "string[]", optional: true, nullable: true }, - o: { optional: true, nullable: true, oneOfType: [ - { o1: "integer" }, - { o2: "boolean" } - ] }, - } + o: { + optional: true, + nullable: true, + oneOfType: [{ o1: "integer" }, { o2: "boolean" }], + }, + }, }, colOneOf: { enum: ["a", "b", "c"] }, - status: { - nullable: true, + status: { + nullable: true, jsonbSchema: { oneOfType: [ { ok: { type: "string" } }, { err: { type: "string" } }, - { - loading: { type: { - loaded: { type: "number" }, - total: { type: "number" } - } - } - } - ] - } + { + loading: { + type: { + loaded: { type: "number" }, + total: { type: "number" }, + }, + }, + }, + ], + }, }, - jsonOneOf: { - nullable: true, - jsonbSchema: { + jsonOneOf: { + nullable: true, + jsonbSchema: { oneOfType: [ { command: { enum: ["a"] } }, - { + { command: { enum: ["b"] }, - option: { type: "integer[]" } - } - ] - } + option: { type: "integer[]" }, + }, + ], + }, }, - table_config: { nullable: true, jsonbSchemaType: { - referencedTables: { optional: true, arrayOfType: { name: "string", minFiles: "number" } }, + table_config: { + nullable: true, + jsonbSchemaType: { + referencedTables: { + optional: true, + arrayOfType: { name: "string", minFiles: "number" }, + }, recType: { - nullable: true, optional: true, record: { keysEnum: ["a", "b"], values: { type: { bools: "boolean[]" } } } - } - } - } - } + nullable: true, + optional: true, + record: { + keysEnum: ["a", "b"], + values: { type: { bools: "boolean[]" } }, + }, + }, + }, + }, + }, }, lookup_col1: { - dropIfExistsCascade: true, + dropIfExistsCascade: true, isLookupTable: { values: { a: { description: "desc" }, - b: {} + b: {}, }, - } + }, }, uuid_text: { columns: { @@ -122,15 +137,15 @@ export const testTableConfig: TableConfig<{ en: 1, fr: 1 }> = { references: { tableName: "lookup_col1", nullable: true, - } + }, }, col2: { references: { tableName: "lookup_col1", nullable: true, - } - } - } + }, + }, + }, }, api_table: { dropIfExists: true, @@ -139,18 +154,18 @@ export const testTableConfig: TableConfig<{ en: 1, fr: 1 }> = { }, onMount: async ({ _db, dbo }) => { await _db.any(`ALTER TABLE api_table ADD COLUMN col1 TEXT`); - } + }, }, rec_ref: { columns: { id: "SERIAL PRIMARY KEY", - } + }, }, rec: { columns: { id: "SERIAL PRIMARY KEY", parent_id: "INTEGER REFERENCES rec", recf: "INTEGER REFERENCES rec_ref", - } - } -} \ No newline at end of file + }, + }, +}; diff --git a/tests/serverOnlyQueries.spec.ts b/tests/serverOnlyQueries.spec.ts index 7eca91c4..3468f0a8 100644 --- a/tests/serverOnlyQueries.spec.ts +++ b/tests/serverOnlyQueries.spec.ts @@ -3,30 +3,32 @@ import type { DBHandlerServer } from "../dist/DboBuilder"; import { test, describe } from "node:test"; export const serverOnlyQueries = async (db: DBHandlerServer) => { - - await describe('Server Only Queries', async () => { - await test('Self reference recursion bug', async () => { + await describe("Server Only Queries", async () => { + await test("Self reference recursion bug", async () => { await db.rec.findOne!({ id: 1 }, { select: { "*": 1, rec_ref: "*" } }); }); - await test('Transactions', async () => { - await db.tx!(async t => { + await test("Transactions", async () => { + await db.tx!(async (t) => { await t.items.insert!({ name: "tx_" }); const expect1 = await t.items.count!({ name: "tx_" }); const expect0 = await db.items.count!({ name: "tx_" }); - if(expect0 !== 0 || expect1 !== 1) throw "db.tx failed"; - + if (expect0 !== 0 || expect1 !== 1) throw "db.tx failed"; + //throw "err"; // Any errors will revert all data-changing commands using the transaction object ( t ) }); const expect1 = await db.items.count!({ name: "tx_" }); - if(expect1 !== 1) throw "db.tx failed"; + if (expect1 !== 1) throw "db.tx failed"; }); - - await test('TableConfig onMount works', async () => { + + await test("TableConfig onMount works", async () => { await db.api_table.findOne!({ id: 1 }); - const newRow = await db.api_table.insert!({ }, { returning: "*" }); - if(newRow.col1 !== null) { - throw "api_table onMount failed: col1 missing. Got: " + JSON.stringify(newRow); + const newRow = await db.api_table.insert!({}, { returning: "*" }); + if (newRow.col1 !== null) { + throw ( + "api_table onMount failed: col1 missing. Got: " + + JSON.stringify(newRow) + ); } - }); + }); }); -} \ No newline at end of file +};