diff --git a/.github/workflows/TypeScriptWorkspace.yml b/.github/workflows/TypeScriptWorkspace.yml index da58696..7156946 100644 --- a/.github/workflows/TypeScriptWorkspace.yml +++ b/.github/workflows/TypeScriptWorkspace.yml @@ -13,9 +13,38 @@ on: workflow_dispatch: jobs: - hello: - name: "Hello" + build_and_test: + name: Build & Test runs-on: ubuntu-latest steps: - - name: Say Hello - run: echo Hello from TypeScript Workspace job! + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + package_json_file: ts/package.json + + - name: Setup Node with pnpm cache + uses: actions/setup-node@v4 + with: + cache: 'pnpm' + cache-dependency-path: ts/pnpm-lock.yaml + + # Src files are built using preinstall + - name: Install dependencies & build src + working-directory: ts + run: pnpm install + + # This step is needed to type-check test files. (Src files are built during install.) + - name: Build src & test (to type-check test) + working-directory: ts + run: pnpm build + + - name: Check formatting & linting rules + working-directory: ts + run: pnpm check + + - name: Test + working-directory: ts + run: pnpm test diff --git a/ts/.gitignore b/ts/.gitignore new file mode 100644 index 0000000..fc688a9 --- /dev/null +++ b/ts/.gitignore @@ -0,0 +1,3 @@ +node_modules +**/out/* +**/test/tsconfig.tsbuildinfo diff --git a/ts/.prettierignore b/ts/.prettierignore new file mode 100644 index 0000000..1c30098 --- /dev/null +++ b/ts/.prettierignore @@ -0,0 +1,3 @@ +out +**/*.json +README.md diff --git a/ts/.prettierrc b/ts/.prettierrc new file mode 100644 index 0000000..544138b --- /dev/null +++ b/ts/.prettierrc @@ -0,0 +1,3 @@ +{ + "singleQuote": true +} diff --git a/ts/README.md b/ts/README.md index dc6b64d..89886d2 100644 --- a/ts/README.md +++ b/ts/README.md @@ -1 +1,55 @@ # TypeScript Workspace + +## Structure + +This directory is a [pnpm workspace](https://pnpm.io/workspaces). Use the [pnpm](https://pnpm.io/) package manager, not npm or yarn. + +One (recommended) way to install pnpm is using [corepack](https://pnpm.io/installation#using-corepack). + +## Build + +Run `pnpm install` (or just `pnpm i`) in a package directory to install dependencies and build. Note that this will also build dependent packages in this workspace. This builds src files, but not test files. + +Run `pnpm build` to just run the build. This will not build dependencies. It will build both src and test files. To build just src or just test, use `pnpm build:src` or `pnpm build:test`. + +Run `pnpm build:watch` in a package to rebuild (both src and test files) when source files are changed. + +Run `pnpm check` in a package to check formatting and linting rules. To just check formatting, run `pnpm format:check`. To correct formatting, run `pnpm format:write`. To just check linting rules, run `pnpm lint`. + +Run `pnpm clean` in that package to remove built output files for that package. + +Run `pnpm build` at the root of the workspace to build all packages (both src and test files). + +Run `pnpm build:watch` at the root can be used to rebuild (only) relevant packages when source files are changed. + +Run `pnpm check` at the root of the workspace to check formatting and linting rules all packages. + +## Test + +Run `pnpm test` in a package directory to run its tests. + +Run `pnpm test:watch` in a package directory to run its tests and rerun when source files change. + +Tests use [vitest](https://vitest.dev/), either in Node or in [Browser Mode](https://vitest.dev/guide/browser.html) (using Chrome), depending on the package. + +Run `pnpm test` at the root of the workspace to test all packages. + +## Create + +To create a new package, add a directory under `packages`. + +Add a `package.json` file following the conventions of other packages. + +The `package.json` should have `preinstall`, `build`, `clean`, and `test` scripts, as well as 'check', 'format', and 'lint' scripts. See existing packages for details. +It should have a `name`, `version`, and `description`, set `"type": "module"`, and set `main`, `module`, and `types` appropriately. + +Production source code should go in a `src` subdirectory. +Put a `tsconfig.json` in this directory that extends `tsconfig.library.json` and sets the `outDir` to `../out`. + +Test source code should got in a `test` subdirectory. +Put a `tsconfig.json` in this directory that extends `tsconfig.test.json` and references `../src`. + +For browser-based tests, create a `vite.config.js` file, and enable `browser` mode, set the `headless` option to `true`, and set the `type` to `chrome`. +Note that `crossOriginIsolated` can be enabled by setting server headers. See example in `wasm-extension`. + +Add references to both the `src` and `test` directories of your new package to the root `tsconfig.json` of the workspace. diff --git a/ts/eslint.config.mjs b/ts/eslint.config.mjs new file mode 100644 index 0000000..3fad129 --- /dev/null +++ b/ts/eslint.config.mjs @@ -0,0 +1,21 @@ +// @ts-check + +import eslint from '@eslint/js'; +import tseslint from 'typescript-eslint'; + +export default tseslint.config( + eslint.configs.recommended, + ...tseslint.configs.recommended, + { + rules: { + '@typescript-eslint/no-unused-vars': [ + 'error', + { + argsIgnorePattern: '^_', + varsIgnorePattern: '^_', + caughtErrorsIgnorePattern: '^_', + }, + ], + }, + }, +); diff --git a/ts/package.json b/ts/package.json new file mode 100644 index 0000000..07aacf8 --- /dev/null +++ b/ts/package.json @@ -0,0 +1,19 @@ +{ + "private": true, + "scripts": { + "build": "tsc -b", + "build:watch": "tsc -b --watch", + "check": "pnpm -r check", + "test": "pnpm -r test" + }, + "devDependencies": { + "typescript": "^5.8.3" + }, + "pnpm": { + "overrides": { + "tar-fs": "^3.0.8", + "ws": "^8.18.1" + } + }, + "packageManager": "pnpm@9.15.2+sha512.93e57b0126f0df74ce6bff29680394c0ba54ec47246b9cf321f0121d8d9bb03f750a705f24edc3c1180853afd7c2c3b94196d0a3d53d3e069d9e2793ef11f321" +} diff --git a/ts/pkgs/duckdb-data-reader/package.json b/ts/pkgs/duckdb-data-reader/package.json new file mode 100644 index 0000000..86e1868 --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/package.json @@ -0,0 +1,38 @@ +{ + "name": "@duckdb/data-reader", + "version": "0.0.1", + "description": "Utilities for representing and reading tabular data returned by DuckDB", + "type": "module", + "main": "./out/index.js", + "module": "./out/index.js", + "types": "./out/index.d.ts", + "scripts": { + "preinstall": "pnpm build:src", + "build": "tsc -b src test", + "build:src": "tsc -b src", + "build:test": "tsc -b test", + "build:watch": "tsc -b src test --watch", + "check": "pnpm format:check && pnpm lint", + "clean": "rimraf out", + "format:check": "prettier . --ignore-path $(find-up .prettierignore) --check", + "format:write": "prettier . --ignore-path $(find-up .prettierignore) --write", + "lint": "pnpm eslint src test", + "test": "vitest run", + "test:watch": "vitest" + }, + "dependencies": { + "@duckdb/data-types": "workspace:*", + "@duckdb/data-values": "workspace:*" + }, + "devDependencies": { + "@eslint/js": "^9.24.0", + "eslint": "^9.24.0", + "find-up-cli": "^6.0.0", + "prettier": "^3.5.3", + "rimraf": "^6.0.1", + "typescript": "^5.8.3", + "typescript-eslint": "^8.30.1", + "vite": "^6.2.6", + "vitest": "^3.1.1" + } +} diff --git a/ts/pkgs/duckdb-data-reader/src/AsyncDuckDBDataBatchIterator.ts b/ts/pkgs/duckdb-data-reader/src/AsyncDuckDBDataBatchIterator.ts new file mode 100644 index 0000000..3f36e7b --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/src/AsyncDuckDBDataBatchIterator.ts @@ -0,0 +1,11 @@ +import { DuckDBData } from './DuckDBData.js'; + +export type DuckDBDataBatchIteratorResult = IteratorResult< + DuckDBData, + DuckDBData | undefined +>; + +export type AsyncDuckDBDataBatchIterator = AsyncIterator< + DuckDBData, + DuckDBData | undefined +>; diff --git a/ts/pkgs/duckdb-data-reader/src/ColumnFilteredDuckDBData.ts b/ts/pkgs/duckdb-data-reader/src/ColumnFilteredDuckDBData.ts new file mode 100644 index 0000000..123bb53 --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/src/ColumnFilteredDuckDBData.ts @@ -0,0 +1,55 @@ +import { DuckDBType } from '@duckdb/data-types'; +import { DuckDBValue } from '@duckdb/data-values'; +import { DuckDBData } from './DuckDBData.js'; + +export class ColumnFilteredDuckDBData extends DuckDBData { + private readonly inputColumnIndexForOutputColumnIndex: readonly number[]; + + constructor( + private data: DuckDBData, + columnVisibility: readonly boolean[], + ) { + super(); + + const inputColumnIndexForOutputColumnIndex: number[] = []; + const inputColumnCount = data.columnCount; + let inputIndex = 0; + while (inputIndex < inputColumnCount) { + while (inputIndex < inputColumnCount && !columnVisibility[inputIndex]) { + inputIndex++; + } + if (inputIndex < inputColumnCount) { + inputColumnIndexForOutputColumnIndex.push(inputIndex++); + } + } + this.inputColumnIndexForOutputColumnIndex = + inputColumnIndexForOutputColumnIndex; + } + + get columnCount() { + return this.inputColumnIndexForOutputColumnIndex.length; + } + + get rowCount() { + return this.data.rowCount; + } + + columnName(columnIndex: number): string { + return this.data.columnName( + this.inputColumnIndexForOutputColumnIndex[columnIndex], + ); + } + + columnType(columnIndex: number): DuckDBType { + return this.data.columnType( + this.inputColumnIndexForOutputColumnIndex[columnIndex], + ); + } + + value(columnIndex: number, rowIndex: number): DuckDBValue { + return this.data.value( + this.inputColumnIndexForOutputColumnIndex[columnIndex], + rowIndex, + ); + } +} diff --git a/ts/pkgs/duckdb-data-reader/src/DuckDBData.ts b/ts/pkgs/duckdb-data-reader/src/DuckDBData.ts new file mode 100644 index 0000000..13ead1f --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/src/DuckDBData.ts @@ -0,0 +1,114 @@ +import { DuckDBType } from '@duckdb/data-types'; +import { DuckDBValue } from '@duckdb/data-values'; +import { DuckDBRow } from './DuckDBRow.js'; + +/** + * A two-dimensional table of data along with column metadata. + * + * May represent either a partial or full result set, or a batch of rows read from a result stream. + * */ +export abstract class DuckDBData { + /** + * Number of columns. + * + * May be zero until the first part of the result is read. Will not change after the initial read. + */ + abstract get columnCount(): number; + + /** + * Current number of rows. + * + * For a partial result set, this may change as more rows are read. + * For a full result, or a batch, this will not change. + */ + abstract get rowCount(): number; + + /** + * Returns the name of column at the given index (starting at zero). + * + * Note that duplicate column names are possible. + */ + abstract columnName(columnIndex: number): string; + + /** + * Returns the type of the column at the given index (starting at zero). + */ + abstract columnType(columnIndex: number): DuckDBType; + + /** + * Returns the value for the given column and row. Both are zero-indexed. + */ + abstract value(columnIndex: number, rowIndex: number): DuckDBValue; + + /** + * Returns the single value, assuming exactly one column and row. Throws otherwise. + */ + singleValue(): DuckDBValue { + const { columnCount, rowCount } = this; + if (columnCount === 0) { + throw Error('no column data'); + } + if (rowCount === 0) { + throw Error('no rows'); + } + if (columnCount > 1) { + throw Error('more than one column'); + } + if (rowCount > 1) { + throw Error('more than one row'); + } + return this.value(0, 0); + } + + /** + * Returns the column names as an array. + */ + columnNames(): readonly string[] { + const { columnCount } = this; + const outputColumnNames: string[] = []; + for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) { + outputColumnNames.push(this.columnName(columnIndex)); + } + return outputColumnNames; + } + + /** + * Returns the column names as an array, deduplicated following DuckDB's "Auto-Increment Duplicate Column Names" + * behavior. + */ + deduplicatedColumnNames(): readonly string[] { + const { columnCount } = this; + const outputColumnNames: string[] = []; + const columnNameCount: { [columnName: string]: number } = {}; + for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) { + const inputColumnName = this.columnName(columnIndex); + const nameCount = (columnNameCount[inputColumnName] || 0) + 1; + columnNameCount[inputColumnName] = nameCount; + if (nameCount > 1) { + outputColumnNames.push(`${inputColumnName}:${nameCount - 1}`); + } else { + outputColumnNames.push(inputColumnName); + } + } + return outputColumnNames; + } + + /** + * Returns the data as an array of row objects, keyed by column names. + * + * The column names are deduplicated following DuckDB's "Auto-Increment Duplicate Column Names" behavior. + */ + toRows(): readonly DuckDBRow[] { + const { rowCount, columnCount } = this; + const outputColumnNames = this.deduplicatedColumnNames(); + const outputRows: DuckDBRow[] = []; + for (let rowIndex = 0; rowIndex < rowCount; rowIndex++) { + const row: { [columnName: string]: DuckDBValue } = {}; + for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) { + row[outputColumnNames[columnIndex]] = this.value(columnIndex, rowIndex); + } + outputRows.push(row); + } + return outputRows; + } +} diff --git a/ts/pkgs/duckdb-data-reader/src/DuckDBDataReader.ts b/ts/pkgs/duckdb-data-reader/src/DuckDBDataReader.ts new file mode 100644 index 0000000..38fdd2a --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/src/DuckDBDataReader.ts @@ -0,0 +1,179 @@ +import { DuckDBType } from '@duckdb/data-types'; +import { DuckDBValue } from '@duckdb/data-values'; +import { AsyncDuckDBDataBatchIterator } from './AsyncDuckDBDataBatchIterator.js'; +import { DuckDBData } from './DuckDBData.js'; + +// Stores information about a run of similarly-sized batches. +interface BatchSizeRun { + batchCount: number; + batchSize: number; + rowCount: number; // Always equal to batchCount * batchSize. Precalculated for efficiency. +} + +/** + * A result set that can be read incrementally. + * + * Represents either a partial or full result. + * For full results, the `done` property will be true. + * To read more rows into a partial result, use the `readUntil` or `readAll` methods. + */ +export class DuckDBDataReader extends DuckDBData { + private readonly iterator: AsyncDuckDBDataBatchIterator; + + private iteratorDone: boolean = false; + + private totalRowsRead: number = 0; + + private readonly batches: DuckDBData[] = []; + + // Stores the sizes of the batches using run-length encoding to make lookup efficient. + // Since batches before the last should be a consistent size, this array is not expected to grow beyond length 2. + // (One run for the N-1 batches of consistent size, plus one run for the differently-size last batch, if any.) + private readonly batchSizeRuns: BatchSizeRun[] = []; + + constructor(iterator: AsyncDuckDBDataBatchIterator) { + super(); + this.iterator = iterator; + } + + /** + * Number of columns. + * + * Will be zero until the first part of the result is read. Will not change after the initial read. + */ + public get columnCount(): number { + if (this.batches.length === 0) { + return 0; + } + return this.batches[0].columnCount; + } + + /** + * Current number of rows. + * + * For a partial result set, with `done` false, this may change as more rows are read. + * For a full result, with `done` true, this will not change. + */ + public get rowCount(): number { + return this.totalRowsRead; + } + + /** + * Returns the name of column at the given index (starting at zero). + * + * Note that duplicate column names are possible. + * + * Will return an error if no part of the result has been read yet. + */ + public columnName(columnIndex: number): string { + if (this.batches.length === 0) { + throw Error('no column data'); + } + return this.batches[0].columnName(columnIndex); + } + + /** + * Returns the type of the column at the given index (starting at zero). + * + * Will return an error if no part of the result has been read yet. + */ + public columnType(columnIndex: number): DuckDBType { + if (this.batches.length === 0) { + throw Error('no column data'); + } + return this.batches[0].columnType(columnIndex); + } + + /** + * Returns the value for the given column and row. Both are zero-indexed. + * + * Will return an error if `rowIndex` is not less than the current `rowCount`. + */ + public value(columnIndex: number, rowIndex: number): DuckDBValue { + if (this.totalRowsRead === 0) { + throw Error('no data'); + } + let batchIndex = 0; + let currentRowIndex = rowIndex; + // Find which run of batches our row is in. + // Since batchSizeRuns shouldn't ever be longer than 2, this should be O(1). + for (const run of this.batchSizeRuns) { + if (currentRowIndex < run.rowCount) { + // The row we're looking for is in this run. + // Calculate the batch index and the row index in that batch. + batchIndex += Math.floor(currentRowIndex / run.batchSize); + const rowIndexInBatch = currentRowIndex % run.batchSize; + const batch = this.batches[batchIndex]; + return batch.value(columnIndex, rowIndexInBatch); + } + // The row we're looking for is not in this run. + // Update our counts for this run and move to the next one. + batchIndex += run.batchCount; + currentRowIndex -= run.rowCount; + } + // We didn't find our row. It must have been out of range. + throw Error( + `Row index ${rowIndex} requested, but only ${this.totalRowsRead} row have been read so far.`, + ); + } + + /** + * Returns true if all rows have been read. + */ + public get done(): boolean { + return this.iteratorDone; + } + + /** + * Read all rows. + */ + public async readAll(): Promise { + return this.read(); + } + + /** + * Read rows until at least the given target row count has been met. + * + * Note that the resulting row count could be greater than the target, since rows are read in batches, typically of 2048 rows each. + */ + public async readUntil(targetRowCount: number): Promise { + return this.read(targetRowCount); + } + + private async read(targetRowCount?: number): Promise { + while ( + !( + this.iteratorDone || + (targetRowCount !== undefined && this.totalRowsRead >= targetRowCount) + ) + ) { + const { value, done } = await this.iterator.next(); + if (value) { + this.updateBatchSizeRuns(value); + this.batches.push(value); + this.totalRowsRead += value.rowCount; + } + if (done) { + this.iteratorDone = done; + } + } + } + + private updateBatchSizeRuns(batch: DuckDBData) { + if (this.batchSizeRuns.length > 0) { + const lastRun = this.batchSizeRuns[this.batchSizeRuns.length - 1]; + if (lastRun.batchSize === batch.rowCount) { + // If the new batch is the same size as the last one, just update our last run. + lastRun.batchCount += 1; + lastRun.rowCount += lastRun.batchSize; + return; + } + } + // If this is our first batch, or it's a different size, create a new run. + this.batchSizeRuns.push({ + batchCount: 1, + batchSize: batch.rowCount, + rowCount: batch.rowCount, + }); + } +} diff --git a/ts/pkgs/duckdb-data-reader/src/DuckDBRow.ts b/ts/pkgs/duckdb-data-reader/src/DuckDBRow.ts new file mode 100644 index 0000000..4bc0007 --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/src/DuckDBRow.ts @@ -0,0 +1,5 @@ +import { DuckDBValue } from '@duckdb/data-values'; + +export interface DuckDBRow { + readonly [columnName: string]: DuckDBValue; +} diff --git a/ts/pkgs/duckdb-data-reader/src/MemoryDuckDBData.ts b/ts/pkgs/duckdb-data-reader/src/MemoryDuckDBData.ts new file mode 100644 index 0000000..cdc3c25 --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/src/MemoryDuckDBData.ts @@ -0,0 +1,32 @@ +import { DuckDBType } from '@duckdb/data-types'; +import { DuckDBValue } from '@duckdb/data-values'; +import { DuckDBData } from './DuckDBData.js'; + +export class MemoryDuckDBData extends DuckDBData { + constructor( + private columns: { name: string; type: DuckDBType }[], + private values: DuckDBValue[][], + ) { + super(); + } + + get columnCount() { + return this.columns.length; + } + + get rowCount() { + return this.values.length > 0 ? this.values[0].length : 0; + } + + columnName(columnIndex: number): string { + return this.columns[columnIndex].name; + } + + columnType(columnIndex: number): DuckDBType { + return this.columns[columnIndex].type; + } + + value(columnIndex: number, rowIndex: number): DuckDBValue { + return this.values[columnIndex][rowIndex]; + } +} diff --git a/ts/pkgs/duckdb-data-reader/src/index.ts b/ts/pkgs/duckdb-data-reader/src/index.ts new file mode 100644 index 0000000..b60e425 --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/src/index.ts @@ -0,0 +1,6 @@ +export * from './AsyncDuckDBDataBatchIterator.js'; +export * from './ColumnFilteredDuckDBData.js'; +export * from './DuckDBData.js'; +export * from './DuckDBDataReader.js'; +export * from './DuckDBRow.js'; +export * from './MemoryDuckDBData.js'; diff --git a/ts/pkgs/duckdb-data-reader/src/tsconfig.json b/ts/pkgs/duckdb-data-reader/src/tsconfig.json new file mode 100644 index 0000000..f970179 --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/src/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.library.json", + "compilerOptions": { + "outDir": "../out" + } +} diff --git a/ts/pkgs/duckdb-data-reader/test/DuckDBDataReader.test.ts b/ts/pkgs/duckdb-data-reader/test/DuckDBDataReader.test.ts new file mode 100644 index 0000000..2de8554 --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/test/DuckDBDataReader.test.ts @@ -0,0 +1,177 @@ +import { DuckDBType, INTEGER, VARCHAR } from '@duckdb/data-types'; +import { DuckDBValue } from '@duckdb/data-values'; +import { expect, suite, test } from 'vitest'; +import { + AsyncDuckDBDataBatchIterator, + DuckDBData, + DuckDBDataReader, + MemoryDuckDBData, +} from '../src'; + +const ITERATOR_DONE = Object.freeze({ done: true, value: undefined }); + +class TestAsyncDuckDBDataBatchIterator implements AsyncDuckDBDataBatchIterator { + private batches: readonly DuckDBData[]; + + private nextBatchIndex: number | null; + + constructor(batches: readonly DuckDBData[]) { + this.batches = batches; + this.nextBatchIndex = this.batches.length > 0 ? 0 : null; + } + + async next(): Promise> { + if (this.nextBatchIndex == null) { + return ITERATOR_DONE; + } + const nextBatch = this.batches[this.nextBatchIndex++]; + if (this.nextBatchIndex >= this.batches.length) { + this.nextBatchIndex = null; + } + return { + done: this.nextBatchIndex == null, + value: nextBatch, + } as IteratorResult; + } + + async return(): Promise> { + return ITERATOR_DONE; + } + + async throw(_err: Error): Promise> { + return ITERATOR_DONE; + } + + [Symbol.asyncIterator](): AsyncDuckDBDataBatchIterator { + return this; + } +} + +function expectColumns( + data: DuckDBData, + columns: { name: string; type: DuckDBType }[], +) { + expect(data.columnCount).toBe(columns.length); + for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) { + const column = columns[columnIndex]; + expect(data.columnName(columnIndex)).toBe(column.name); + expect(data.columnType(columnIndex)).toStrictEqual(column.type); + } +} + +function expectValues(data: DuckDBData, values: DuckDBValue[][]) { + for (let columnIndex = 0; columnIndex < values.length; columnIndex++) { + const column = values[columnIndex]; + for (let rowIndex = 0; rowIndex < column.length; rowIndex++) { + expect(data.value(columnIndex, rowIndex)).toBe(column[rowIndex]); + } + } +} + +suite('DuckDBDataReader', () => { + test('should work for an empty batch list', async () => { + const batches: DuckDBData[] = []; + const iterator = new TestAsyncDuckDBDataBatchIterator(batches); + const reader = new DuckDBDataReader(iterator); + expect(reader.done).toBe(false); + expect(reader.columnCount).toBe(0); + expect(reader.rowCount).toBe(0); + await reader.readAll(); + expect(reader.done).toBe(true); + expect(reader.columnCount).toBe(0); + expect(reader.rowCount).toBe(0); + }); + test('should work for a single batch', async () => { + const columns = [ + { name: 'num', type: INTEGER }, + { name: 'str', type: VARCHAR }, + ]; + const values = [ + [2, 3, 5], + ['z', 'y', 'x'], + ]; + const batches: DuckDBData[] = [new MemoryDuckDBData(columns, values)]; + const iterator = new TestAsyncDuckDBDataBatchIterator(batches); + const reader = new DuckDBDataReader(iterator); + expect(reader.done).toBe(false); + expect(reader.columnCount).toBe(0); + expect(reader.rowCount).toBe(0); + await reader.readAll(); + expect(reader.done).toBe(true); + expectColumns(reader, columns); + expect(reader.rowCount).toBe(3); + expectValues(reader, values); + }); + test('should work for multiple batches', async () => { + const columns = [ + { name: 'num', type: INTEGER }, + { name: 'str', type: VARCHAR }, + ]; + const values = [ + [12, 13, 15, 22, 23, 25, 32, 33, 35], + ['z1', 'y1', 'x1', 'z2', 'y2', 'x2', 'z3', 'y3', 'x3'], + ]; + const batches: DuckDBData[] = [ + new MemoryDuckDBData(columns, [ + values[0].slice(0, 3), + values[1].slice(0, 3), + ]), + new MemoryDuckDBData(columns, [ + values[0].slice(3, 6), + values[1].slice(3, 6), + ]), + new MemoryDuckDBData(columns, [ + values[0].slice(6, 9), + values[1].slice(6, 9), + ]), + ]; + const iterator = new TestAsyncDuckDBDataBatchIterator(batches); + const reader = new DuckDBDataReader(iterator); + expect(reader.done).toBe(false); + expect(reader.columnCount).toBe(0); + expect(reader.rowCount).toBe(0); + await reader.readAll(); + expect(reader.done).toBe(true); + expectColumns(reader, columns); + expect(reader.rowCount).toBe(9); + expectValues(reader, values); + }); + test('should work for partial reads of multiple batches', async () => { + const columns = [ + { name: 'num', type: INTEGER }, + { name: 'str', type: VARCHAR }, + ]; + const values = [ + [12, 13, 15, 22, 23, 25, 32, 33], + ['z1', 'y1', 'x1', 'z2', 'y2', 'x2', 'z3', 'y3'], + ]; + const batches: DuckDBData[] = [ + new MemoryDuckDBData(columns, [ + values[0].slice(0, 3), + values[1].slice(0, 3), + ]), + new MemoryDuckDBData(columns, [ + values[0].slice(3, 6), + values[1].slice(3, 6), + ]), + new MemoryDuckDBData(columns, [ + values[0].slice(6, 8), + values[1].slice(6, 8), + ]), + ]; + const iterator = new TestAsyncDuckDBDataBatchIterator(batches); + const reader = new DuckDBDataReader(iterator); + expect(reader.done).toBe(false); + expect(reader.columnCount).toBe(0); + expect(reader.rowCount).toBe(0); + await reader.readUntil(5); + expect(reader.done).toBe(false); + expectColumns(reader, columns); + expect(reader.rowCount).toBe(6); + expectValues(reader, [values[0].slice(0, 6), values[1].slice(0, 6)]); + await reader.readUntil(10); + expect(reader.done).toBe(true); + expect(reader.rowCount).toBe(8); + expectValues(reader, values); + }); +}); diff --git a/ts/pkgs/duckdb-data-reader/test/tsconfig.json b/ts/pkgs/duckdb-data-reader/test/tsconfig.json new file mode 100644 index 0000000..8c93c98 --- /dev/null +++ b/ts/pkgs/duckdb-data-reader/test/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.test.json", + "references": [ + { "path": "../src" } + ] +} diff --git a/ts/pkgs/duckdb-data-types/package.json b/ts/pkgs/duckdb-data-types/package.json new file mode 100644 index 0000000..6cee53b --- /dev/null +++ b/ts/pkgs/duckdb-data-types/package.json @@ -0,0 +1,37 @@ +{ + "name": "@duckdb/data-types", + "version": "0.0.1", + "description": "Utilities for representing DuckDB types", + "type": "module", + "main": "./out/index.js", + "module": "./out/index.js", + "types": "./out/index.d.ts", + "scripts": { + "preinstall": "pnpm build:src", + "build": "tsc -b src test", + "build:src": "tsc -b src", + "build:test": "tsc -b test", + "build:watch": "tsc -b src test --watch", + "check": "pnpm format:check && pnpm lint", + "clean": "rimraf out", + "format:check": "prettier . --ignore-path $(find-up .prettierignore) --check", + "format:write": "prettier . --ignore-path $(find-up .prettierignore) --write", + "lint": "pnpm eslint src test", + "test": "vitest run", + "test:watch": "vitest" + }, + "dependencies": { + "@duckdb/data-values": "workspace:*" + }, + "devDependencies": { + "@eslint/js": "^9.24.0", + "eslint": "^9.24.0", + "find-up-cli": "^6.0.0", + "prettier": "^3.5.3", + "rimraf": "^6.0.1", + "typescript": "^5.8.3", + "typescript-eslint": "^8.30.1", + "vite": "^6.2.6", + "vitest": "^3.1.1" + } +} diff --git a/ts/pkgs/duckdb-data-types/src/DuckDBType.ts b/ts/pkgs/duckdb-data-types/src/DuckDBType.ts new file mode 100644 index 0000000..fcfa40e --- /dev/null +++ b/ts/pkgs/duckdb-data-types/src/DuckDBType.ts @@ -0,0 +1,989 @@ +import { Json } from '@duckdb/data-values'; +import { DuckDBTypeId } from './DuckDBTypeId.js'; +import { quotedIdentifier, quotedString } from './sql.js'; + +export interface DuckDBTypeToStringOptions { + short?: boolean; +} +export abstract class BaseDuckDBType { + public readonly typeId: T; + public readonly alias?: string; + protected constructor(typeId: T, alias?: string) { + this.typeId = typeId; + this.alias = alias; + } + public toString(_options?: DuckDBTypeToStringOptions): string { + return this.alias ?? DuckDBTypeId[this.typeId]; + } + public toJson(): Json { + return { + typeId: this.typeId, + ...(this.alias ? { alias: this.alias } : {}), + }; + } +} + +export class DuckDBBooleanType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.BOOLEAN, alias); + } + public static readonly instance = new DuckDBBooleanType(); + public static create(alias?: string): DuckDBBooleanType { + return alias ? new DuckDBBooleanType(alias) : DuckDBBooleanType.instance; + } +} +export const BOOLEAN = DuckDBBooleanType.instance; + +export class DuckDBTinyIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.TINYINT, alias); + } + public static readonly instance = new DuckDBTinyIntType(); + public static create(alias?: string): DuckDBTinyIntType { + return alias ? new DuckDBTinyIntType(alias) : DuckDBTinyIntType.instance; + } + public static readonly Max = 2 ** 7 - 1; + public static readonly Min = -(2 ** 7); + public get max() { + return DuckDBTinyIntType.Max; + } + public get min() { + return DuckDBTinyIntType.Min; + } +} +export const TINYINT = DuckDBTinyIntType.instance; + +export class DuckDBSmallIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.SMALLINT, alias); + } + public static readonly instance = new DuckDBSmallIntType(); + public static create(alias?: string): DuckDBSmallIntType { + return alias ? new DuckDBSmallIntType(alias) : DuckDBSmallIntType.instance; + } + public static readonly Max = 2 ** 15 - 1; + public static readonly Min = -(2 ** 15); + public get max() { + return DuckDBSmallIntType.Max; + } + public get min() { + return DuckDBSmallIntType.Min; + } +} +export const SMALLINT = DuckDBSmallIntType.instance; + +export class DuckDBIntegerType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.INTEGER, alias); + } + public static readonly instance = new DuckDBIntegerType(); + public static create(alias?: string): DuckDBIntegerType { + return alias ? new DuckDBIntegerType(alias) : DuckDBIntegerType.instance; + } + public static readonly Max = 2 ** 31 - 1; + public static readonly Min = -(2 ** 31); + public get max() { + return DuckDBIntegerType.Max; + } + public get min() { + return DuckDBIntegerType.Min; + } +} +export const INTEGER = DuckDBIntegerType.instance; + +export class DuckDBBigIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.BIGINT, alias); + } + public static readonly instance = new DuckDBBigIntType(); + public static create(alias?: string): DuckDBBigIntType { + return alias ? new DuckDBBigIntType(alias) : DuckDBBigIntType.instance; + } + public static readonly Max: bigint = 2n ** 63n - 1n; + public static readonly Min: bigint = -(2n ** 63n); + public get max() { + return DuckDBBigIntType.Max; + } + public get min() { + return DuckDBBigIntType.Min; + } +} +export const BIGINT = DuckDBBigIntType.instance; + +export class DuckDBUTinyIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.UTINYINT, alias); + } + public static readonly instance = new DuckDBUTinyIntType(); + public static create(alias?: string): DuckDBUTinyIntType { + return alias ? new DuckDBUTinyIntType(alias) : DuckDBUTinyIntType.instance; + } + public static readonly Max = 2 ** 8 - 1; + public static readonly Min = 0; + public get max() { + return DuckDBUTinyIntType.Max; + } + public get min() { + return DuckDBUTinyIntType.Min; + } +} +export const UTINYINT = DuckDBUTinyIntType.instance; + +export class DuckDBUSmallIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.USMALLINT, alias); + } + public static readonly instance = new DuckDBUSmallIntType(); + public static create(alias?: string): DuckDBUSmallIntType { + return alias + ? new DuckDBUSmallIntType(alias) + : DuckDBUSmallIntType.instance; + } + public static readonly Max = 2 ** 16 - 1; + public static readonly Min = 0; + public get max() { + return DuckDBUSmallIntType.Max; + } + public get min() { + return DuckDBUSmallIntType.Min; + } +} +export const USMALLINT = DuckDBUSmallIntType.instance; + +export class DuckDBUIntegerType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.UINTEGER, alias); + } + public static readonly instance = new DuckDBUIntegerType(); + public static create(alias?: string): DuckDBUIntegerType { + return alias ? new DuckDBUIntegerType(alias) : DuckDBUIntegerType.instance; + } + public static readonly Max = 2 ** 32 - 1; + public static readonly Min = 0; + public get max() { + return DuckDBUIntegerType.Max; + } + public get min() { + return DuckDBUIntegerType.Min; + } +} +export const UINTEGER = DuckDBUIntegerType.instance; + +export class DuckDBUBigIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.UBIGINT, alias); + } + public static readonly instance = new DuckDBUBigIntType(); + public static create(alias?: string): DuckDBUBigIntType { + return alias ? new DuckDBUBigIntType(alias) : DuckDBUBigIntType.instance; + } + public static readonly Max: bigint = 2n ** 64n - 1n; + public static readonly Min: bigint = 0n; + public get max() { + return DuckDBUBigIntType.Max; + } + public get min() { + return DuckDBUBigIntType.Min; + } +} +export const UBIGINT = DuckDBUBigIntType.instance; + +export class DuckDBFloatType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.FLOAT, alias); + } + public static readonly instance = new DuckDBFloatType(); + public static create(alias?: string): DuckDBFloatType { + return alias ? new DuckDBFloatType(alias) : DuckDBFloatType.instance; + } + public static readonly Max = Math.fround(3.4028235e38); + public static readonly Min = Math.fround(-3.4028235e38); + public get max() { + return DuckDBFloatType.Max; + } + public get min() { + return DuckDBFloatType.Min; + } +} +export const FLOAT = DuckDBFloatType.instance; + +export class DuckDBDoubleType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.DOUBLE, alias); + } + public static readonly instance = new DuckDBDoubleType(); + public static create(alias?: string): DuckDBDoubleType { + return alias ? new DuckDBDoubleType(alias) : DuckDBDoubleType.instance; + } + public static readonly Max = Number.MAX_VALUE; + public static readonly Min = -Number.MAX_VALUE; + public get max() { + return DuckDBDoubleType.Max; + } + public get min() { + return DuckDBDoubleType.Min; + } +} +export const DOUBLE = DuckDBDoubleType.instance; + +export class DuckDBTimestampType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.TIMESTAMP, alias); + } + public static readonly instance = new DuckDBTimestampType(); + public static create(alias?: string): DuckDBTimestampType { + return alias + ? new DuckDBTimestampType(alias) + : DuckDBTimestampType.instance; + } + // TODO: common DuckDBValues on type objects + // public get epoch() { + // return DuckDBTimestampValue.Epoch; + // } + // public get max() { + // return DuckDBTimestampValue.Max; + // } + // public get min() { + // return DuckDBTimestampValue.Min; + // } + // public get posInf() { + // return DuckDBTimestampValue.PosInf; + // } + // public get negInf() { + // return DuckDBTimestampValue.NegInf; + // } +} +export const TIMESTAMP = DuckDBTimestampType.instance; + +export type DuckDBTimestampMicrosecondsType = DuckDBTimestampType; +export const DuckDBTimestampMicrosecondsType = DuckDBTimestampType; + +export class DuckDBDateType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.DATE, alias); + } + public static readonly instance = new DuckDBDateType(); + public static create(alias?: string): DuckDBDateType { + return alias ? new DuckDBDateType(alias) : DuckDBDateType.instance; + } + // TODO: common DuckDBValues on type objects + // public get epoch() { + // return DuckDBDateValue.Epoch; + // } + // public get max() { + // return DuckDBDateValue.Max; + // } + // public get min() { + // return DuckDBDateValue.Min; + // } + // public get posInf() { + // return DuckDBDateValue.PosInf; + // } + // public get negInf() { + // return DuckDBDateValue.NegInf; + // } +} +export const DATE = DuckDBDateType.instance; + +export class DuckDBTimeType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.TIME, alias); + } + public static readonly instance = new DuckDBTimeType(); + public static create(alias?: string): DuckDBTimeType { + return alias ? new DuckDBTimeType(alias) : DuckDBTimeType.instance; + } + // TODO: common DuckDBValues on type objects + // public get max() { + // return DuckDBTimeValue.Max; + // } + // public get min() { + // return DuckDBTimeValue.Min; + // } +} +export const TIME = DuckDBTimeType.instance; + +export class DuckDBIntervalType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.INTERVAL, alias); + } + public static readonly instance = new DuckDBIntervalType(); + public static create(alias?: string): DuckDBIntervalType { + return alias ? new DuckDBIntervalType(alias) : DuckDBIntervalType.instance; + } +} +export const INTERVAL = DuckDBIntervalType.instance; + +export class DuckDBHugeIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.HUGEINT, alias); + } + public static readonly instance = new DuckDBHugeIntType(); + public static create(alias?: string): DuckDBHugeIntType { + return alias ? new DuckDBHugeIntType(alias) : DuckDBHugeIntType.instance; + } + public static readonly Max: bigint = 2n ** 127n - 1n; + public static readonly Min: bigint = -(2n ** 127n); + public get max() { + return DuckDBHugeIntType.Max; + } + public get min() { + return DuckDBHugeIntType.Min; + } +} +export const HUGEINT = DuckDBHugeIntType.instance; + +export class DuckDBUHugeIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.UHUGEINT, alias); + } + public static readonly instance = new DuckDBUHugeIntType(); + public static create(alias?: string): DuckDBUHugeIntType { + return alias ? new DuckDBUHugeIntType(alias) : DuckDBUHugeIntType.instance; + } + public static readonly Max: bigint = 2n ** 128n - 1n; + public static readonly Min: bigint = 0n; + public get max() { + return DuckDBUHugeIntType.Max; + } + public get min() { + return DuckDBUHugeIntType.Min; + } +} +export const UHUGEINT = DuckDBUHugeIntType.instance; + +export class DuckDBVarCharType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.VARCHAR, alias); + } + public static readonly instance = new DuckDBVarCharType(); + public static create(alias?: string): DuckDBVarCharType { + return alias ? new DuckDBVarCharType(alias) : DuckDBVarCharType.instance; + } +} +export const VARCHAR = DuckDBVarCharType.instance; + +export class DuckDBBlobType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.BLOB, alias); + } + public static readonly instance = new DuckDBBlobType(); + public static create(alias?: string): DuckDBBlobType { + return alias ? new DuckDBBlobType(alias) : DuckDBBlobType.instance; + } +} +export const BLOB = DuckDBBlobType.instance; + +export class DuckDBDecimalType extends BaseDuckDBType { + public readonly width: number; + public readonly scale: number; + public constructor(width: number, scale: number, alias?: string) { + super(DuckDBTypeId.DECIMAL, alias); + this.width = width; + this.scale = scale; + } + public toString(_options?: DuckDBTypeToStringOptions): string { + return this.alias ?? `DECIMAL(${this.width},${this.scale})`; + } + public override toJson(): Json { + return { + typeId: this.typeId, + width: this.width, + scale: this.scale, + ...(this.alias ? { alias: this.alias } : {}), + }; + } + public static readonly default = new DuckDBDecimalType(18, 3); +} +export function DECIMAL( + width?: number, + scale?: number, + alias?: string, +): DuckDBDecimalType { + if (width === undefined) { + return DuckDBDecimalType.default; + } + if (scale === undefined) { + return new DuckDBDecimalType(width, 0); + } + return new DuckDBDecimalType(width, scale, alias); +} + +export class DuckDBTimestampSecondsType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.TIMESTAMP_S, alias); + } + public static readonly instance = new DuckDBTimestampSecondsType(); + public static create(alias?: string): DuckDBTimestampSecondsType { + return alias + ? new DuckDBTimestampSecondsType(alias) + : DuckDBTimestampSecondsType.instance; + } + // TODO: common DuckDBValues on type objects + // public get epoch() { + // return DuckDBTimestampSecondsValue.Epoch; + // } + // public get max() { + // return DuckDBTimestampSecondsValue.Max; + // } + // public get min() { + // return DuckDBTimestampSecondsValue.Min; + // } + // public get posInf() { + // return DuckDBTimestampSecondsValue.PosInf; + // } + // public get negInf() { + // return DuckDBTimestampSecondsValue.NegInf; + // } +} +export const TIMESTAMP_S = DuckDBTimestampSecondsType.instance; + +export class DuckDBTimestampMillisecondsType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.TIMESTAMP_MS, alias); + } + public static readonly instance = new DuckDBTimestampMillisecondsType(); + public static create(alias?: string): DuckDBTimestampMillisecondsType { + return alias + ? new DuckDBTimestampMillisecondsType(alias) + : DuckDBTimestampMillisecondsType.instance; + } + // TODO: common DuckDBValues on type objects + // public get epoch() { + // return DuckDBTimestampMillisecondsValue.Epoch; + // } + // public get max() { + // return DuckDBTimestampMillisecondsValue.Max; + // } + // public get min() { + // return DuckDBTimestampMillisecondsValue.Min; + // } + // public get posInf() { + // return DuckDBTimestampMillisecondsValue.PosInf; + // } + // public get negInf() { + // return DuckDBTimestampMillisecondsValue.NegInf; + // } +} +export const TIMESTAMP_MS = DuckDBTimestampMillisecondsType.instance; + +export class DuckDBTimestampNanosecondsType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.TIMESTAMP_NS, alias); + } + public static readonly instance = new DuckDBTimestampNanosecondsType(); + public static create(alias?: string): DuckDBTimestampNanosecondsType { + return alias + ? new DuckDBTimestampNanosecondsType(alias) + : DuckDBTimestampNanosecondsType.instance; + } + // TODO: common DuckDBValues on type objects + // public get epoch() { + // return DuckDBTimestampNanosecondsValue.Epoch; + // } + // public get max() { + // return DuckDBTimestampNanosecondsValue.Max; + // } + // public get min() { + // return DuckDBTimestampNanosecondsValue.Min; + // } + // public get posInf() { + // return DuckDBTimestampNanosecondsValue.PosInf; + // } + // public get negInf() { + // return DuckDBTimestampNanosecondsValue.NegInf; + // } +} +export const TIMESTAMP_NS = DuckDBTimestampNanosecondsType.instance; + +export class DuckDBEnumType extends BaseDuckDBType { + public readonly values: readonly string[]; + public readonly valueIndexes: Readonly>; + public readonly internalTypeId: DuckDBTypeId; + public constructor( + values: readonly string[], + internalTypeId: DuckDBTypeId, + alias?: string, + ) { + super(DuckDBTypeId.ENUM, alias); + this.values = values; + const valueIndexes: Record = {}; + for (let i = 0; i < values.length; i++) { + valueIndexes[values[i]] = i; + } + this.valueIndexes = valueIndexes; + this.internalTypeId = internalTypeId; + } + public indexForValue(value: string): number { + return this.valueIndexes[value]; + } + public toString(options?: DuckDBTypeToStringOptions): string { + if (this.alias) { + return this.alias; + } + if (options?.short) { + return `ENUM(…)`; + } + return `ENUM(${this.values.map(quotedString).join(', ')})`; + } + public override toJson(): Json { + return { + typeId: this.typeId, + values: [...this.values], + internalTypeId: this.internalTypeId, + ...(this.alias ? { alias: this.alias } : {}), + }; + } +} +export function ENUM8( + values: readonly string[], + alias?: string, +): DuckDBEnumType { + return new DuckDBEnumType(values, DuckDBTypeId.UTINYINT, alias); +} +export function ENUM16( + values: readonly string[], + alias?: string, +): DuckDBEnumType { + return new DuckDBEnumType(values, DuckDBTypeId.USMALLINT, alias); +} +export function ENUM32( + values: readonly string[], + alias?: string, +): DuckDBEnumType { + return new DuckDBEnumType(values, DuckDBTypeId.UINTEGER, alias); +} +export function ENUM( + values: readonly string[], + alias?: string, +): DuckDBEnumType { + if (values.length < 256) { + return ENUM8(values, alias); + } else if (values.length < 65536) { + return ENUM16(values, alias); + } else if (values.length < 4294967296) { + return ENUM32(values, alias); + } else { + throw new Error( + `ENUM types cannot have more than 4294967295 values; received ${values.length}`, + ); + } +} + +export class DuckDBListType extends BaseDuckDBType { + public readonly valueType: DuckDBType; + public constructor(valueType: DuckDBType, alias?: string) { + super(DuckDBTypeId.LIST, alias); + this.valueType = valueType; + } + public toString(options?: DuckDBTypeToStringOptions): string { + return this.alias ?? `${this.valueType.toString(options)}[]`; + } + public override toJson(): Json { + return { + typeId: this.typeId, + valueType: this.valueType.toJson(), + ...(this.alias ? { alias: this.alias } : {}), + }; + } +} +export function LIST(valueType: DuckDBType, alias?: string): DuckDBListType { + return new DuckDBListType(valueType, alias); +} + +export class DuckDBStructType extends BaseDuckDBType { + public readonly entryNames: readonly string[]; + public readonly entryTypes: readonly DuckDBType[]; + public readonly entryIndexes: Readonly>; + public constructor( + entryNames: readonly string[], + entryTypes: readonly DuckDBType[], + alias?: string, + ) { + super(DuckDBTypeId.STRUCT, alias); + if (entryNames.length !== entryTypes.length) { + throw new Error(`Could not create DuckDBStructType: \ + entryNames length (${entryNames.length}) does not match entryTypes length (${entryTypes.length})`); + } + this.entryNames = entryNames; + this.entryTypes = entryTypes; + const entryIndexes: Record = {}; + for (let i = 0; i < entryNames.length; i++) { + entryIndexes[entryNames[i]] = i; + } + this.entryIndexes = entryIndexes; + } + public get entryCount() { + return this.entryNames.length; + } + public indexForEntry(entryName: string): number { + return this.entryIndexes[entryName]; + } + public typeForEntry(entryName: string): DuckDBType { + return this.entryTypes[this.entryIndexes[entryName]]; + } + public toString(options?: DuckDBTypeToStringOptions): string { + if (this.alias) { + return this.alias; + } + if (options?.short) { + return `STRUCT(…)`; + } + const parts: string[] = []; + for (let i = 0; i < this.entryNames.length; i++) { + parts.push( + `${quotedIdentifier(this.entryNames[i])} ${this.entryTypes[i]}`, + ); + } + return `STRUCT(${parts.join(', ')})`; + } + public override toJson(): Json { + return { + typeId: this.typeId, + entryNames: [...this.entryNames], + entryTypes: this.entryTypes.map((t) => t.toJson()), + ...(this.alias ? { alias: this.alias } : {}), + }; + } +} +export function STRUCT( + entries: Record, + alias?: string, +): DuckDBStructType { + const entryNames = Object.keys(entries); + const entryTypes = Object.values(entries); + return new DuckDBStructType(entryNames, entryTypes, alias); +} + +export class DuckDBMapType extends BaseDuckDBType { + public readonly keyType: DuckDBType; + public readonly valueType: DuckDBType; + public constructor( + keyType: DuckDBType, + valueType: DuckDBType, + alias?: string, + ) { + super(DuckDBTypeId.MAP, alias); + this.keyType = keyType; + this.valueType = valueType; + } + public toString(options?: DuckDBTypeToStringOptions): string { + if (this.alias) { + return this.alias; + } + if (options?.short) { + return `MAP(…)`; + } + return `MAP(${this.keyType}, ${this.valueType})`; + } + public override toJson(): Json { + return { + typeId: this.typeId, + keyType: this.keyType.toJson(), + valueType: this.valueType.toJson(), + ...(this.alias ? { alias: this.alias } : {}), + }; + } +} +export function MAP( + keyType: DuckDBType, + valueType: DuckDBType, + alias?: string, +): DuckDBMapType { + return new DuckDBMapType(keyType, valueType, alias); +} + +export class DuckDBArrayType extends BaseDuckDBType { + public readonly valueType: DuckDBType; + public readonly length: number; + public constructor(valueType: DuckDBType, length: number, alias?: string) { + super(DuckDBTypeId.ARRAY, alias); + this.valueType = valueType; + this.length = length; + } + public toString(options?: DuckDBTypeToStringOptions): string { + return this.alias ?? `${this.valueType.toString(options)}[${this.length}]`; + } + public override toJson(): Json { + return { + typeId: this.typeId, + valueType: this.valueType.toJson(), + length: this.length, + ...(this.alias ? { alias: this.alias } : {}), + }; + } +} +export function ARRAY( + valueType: DuckDBType, + length: number, + alias?: string, +): DuckDBArrayType { + return new DuckDBArrayType(valueType, length, alias); +} + +export class DuckDBUUIDType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.UUID, alias); + } + public static readonly instance = new DuckDBUUIDType(); + public static create(alias?: string): DuckDBUUIDType { + return alias ? new DuckDBUUIDType(alias) : DuckDBUUIDType.instance; + } + // TODO: common DuckDBValues on type objects + // public get max() { + // return DuckDBUUIDValue.Max; + // } + // public get min() { + // return DuckDBUUIDValue.Min; + // } +} +export const UUID = DuckDBUUIDType.instance; + +export class DuckDBUnionType extends BaseDuckDBType { + public readonly memberTags: readonly string[]; + public readonly tagMemberIndexes: Readonly>; + public readonly memberTypes: readonly DuckDBType[]; + public constructor( + memberTags: readonly string[], + memberTypes: readonly DuckDBType[], + alias?: string, + ) { + super(DuckDBTypeId.UNION, alias); + if (memberTags.length !== memberTypes.length) { + throw new Error(`Could not create DuckDBUnionType: \ + tags length (${memberTags.length}) does not match valueTypes length (${memberTypes.length})`); + } + this.memberTags = memberTags; + const tagMemberIndexes: Record = {}; + for (let i = 0; i < memberTags.length; i++) { + tagMemberIndexes[memberTags[i]] = i; + } + this.tagMemberIndexes = tagMemberIndexes; + this.memberTypes = memberTypes; + } + public memberIndexForTag(tag: string): number { + return this.tagMemberIndexes[tag]; + } + public memberTypeForTag(tag: string): DuckDBType { + return this.memberTypes[this.tagMemberIndexes[tag]]; + } + public get memberCount() { + return this.memberTags.length; + } + public toString(options?: DuckDBTypeToStringOptions): string { + if (this.alias) { + return this.alias; + } + if (options?.short) { + return `UNION(…)`; + } + const parts: string[] = []; + for (let i = 0; i < this.memberTags.length; i++) { + parts.push( + `${quotedIdentifier(this.memberTags[i])} ${this.memberTypes[i]}`, + ); + } + return `UNION(${parts.join(', ')})`; + } + public override toJson(): Json { + return { + typeId: this.typeId, + memberTags: [...this.memberTags], + memberTypes: this.memberTypes.map((t) => t.toJson()), + ...(this.alias ? { alias: this.alias } : {}), + }; + } +} +export function UNION( + members: Record, + alias?: string, +): DuckDBUnionType { + const memberTags = Object.keys(members); + const memberTypes = Object.values(members); + return new DuckDBUnionType(memberTags, memberTypes, alias); +} + +export class DuckDBBitType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.BIT, alias); + } + public static readonly instance = new DuckDBBitType(); + public static create(alias?: string): DuckDBBitType { + return alias ? new DuckDBBitType(alias) : DuckDBBitType.instance; + } +} +export const BIT = DuckDBBitType.instance; + +export class DuckDBTimeTZType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.TIME_TZ, alias); + } + public toString(options?: DuckDBTypeToStringOptions): string { + if (this.alias) { + return this.alias; + } + if (options?.short) { + return 'TIMETZ'; + } + return 'TIME WITH TIME ZONE'; + } + public static readonly instance = new DuckDBTimeTZType(); + public static create(alias?: string): DuckDBTimeTZType { + return alias ? new DuckDBTimeTZType(alias) : DuckDBTimeTZType.instance; + } + // TODO: common DuckDBValues on type objects + // public get max() { + // return DuckDBTimeTZValue.Max; + // } + // public get min() { + // return DuckDBTimeTZValue.Min; + // } +} +export const TIMETZ = DuckDBTimeTZType.instance; + +export class DuckDBTimestampTZType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.TIMESTAMP_TZ, alias); + } + public toString(options?: DuckDBTypeToStringOptions): string { + if (this.alias) { + return this.alias; + } + if (options?.short) { + return 'TIMESTAMPTZ'; + } + return 'TIMESTAMP WITH TIME ZONE'; + } + public static readonly instance = new DuckDBTimestampTZType(); + public static create(alias?: string): DuckDBTimestampTZType { + return alias + ? new DuckDBTimestampTZType(alias) + : DuckDBTimestampTZType.instance; + } + // TODO: common DuckDBValues on type objects + // public get epoch() { + // return DuckDBTimestampTZValue.Epoch; + // } + // public get max() { + // return DuckDBTimestampTZValue.Max; + // } + // public get min() { + // return DuckDBTimestampTZValue.Min; + // } + // public get posInf() { + // return DuckDBTimestampTZValue.PosInf; + // } + // public get negInf() { + // return DuckDBTimestampTZValue.NegInf; + // } +} +export const TIMESTAMPTZ = DuckDBTimestampTZType.instance; + +export class DuckDBAnyType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.ANY, alias); + } + public static readonly instance = new DuckDBAnyType(); + public static create(alias?: string): DuckDBAnyType { + return alias ? new DuckDBAnyType(alias) : DuckDBAnyType.instance; + } +} +export const ANY = DuckDBAnyType.instance; + +export class DuckDBVarIntType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.VARINT, alias); + } + public static readonly instance = new DuckDBVarIntType(); + public static create(alias?: string): DuckDBVarIntType { + return alias ? new DuckDBVarIntType(alias) : DuckDBVarIntType.instance; + } + public static readonly Max: bigint = + 179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n; + public static readonly Min: bigint = + -179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n; + public get max() { + return DuckDBVarIntType.Max; + } + public get min() { + return DuckDBVarIntType.Min; + } +} +export const VARINT = DuckDBVarIntType.instance; + +export class DuckDBSQLNullType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.SQLNULL, alias); + } + public static readonly instance = new DuckDBSQLNullType(); + public static create(alias?: string): DuckDBSQLNullType { + return alias ? new DuckDBSQLNullType(alias) : DuckDBSQLNullType.instance; + } +} +export const SQLNULL = DuckDBSQLNullType.instance; + +export class DuckDBStringLiteralType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.STRING_LITERAL, alias); + } + public static readonly instance = new DuckDBStringLiteralType(); + public static create(alias?: string): DuckDBStringLiteralType { + return alias + ? new DuckDBStringLiteralType(alias) + : DuckDBStringLiteralType.instance; + } +} +export const STRING_LITERAL = DuckDBStringLiteralType.instance; + +export class DuckDBIntegerLiteralType extends BaseDuckDBType { + public constructor(alias?: string) { + super(DuckDBTypeId.INTEGER_LITERAL, alias); + } + public static readonly instance = new DuckDBIntegerLiteralType(); + public static create(alias?: string): DuckDBIntegerLiteralType { + return alias + ? new DuckDBIntegerLiteralType(alias) + : DuckDBIntegerLiteralType.instance; + } +} +export const INTEGER_LITERAL = DuckDBIntegerLiteralType.instance; + +export type DuckDBType = + | DuckDBBooleanType + | DuckDBTinyIntType + | DuckDBSmallIntType + | DuckDBIntegerType + | DuckDBBigIntType + | DuckDBUTinyIntType + | DuckDBUSmallIntType + | DuckDBUIntegerType + | DuckDBUBigIntType + | DuckDBFloatType + | DuckDBDoubleType + | DuckDBTimestampType + | DuckDBDateType + | DuckDBTimeType + | DuckDBIntervalType + | DuckDBHugeIntType + | DuckDBUHugeIntType + | DuckDBVarCharType + | DuckDBBlobType + | DuckDBDecimalType + | DuckDBTimestampSecondsType + | DuckDBTimestampMillisecondsType + | DuckDBTimestampNanosecondsType + | DuckDBEnumType + | DuckDBListType + | DuckDBStructType + | DuckDBMapType + | DuckDBArrayType + | DuckDBUUIDType + | DuckDBUnionType + | DuckDBBitType + | DuckDBTimeTZType + | DuckDBTimestampTZType + | DuckDBAnyType + | DuckDBVarIntType + | DuckDBSQLNullType + | DuckDBStringLiteralType + | DuckDBIntegerLiteralType; diff --git a/ts/pkgs/duckdb-data-types/src/DuckDBTypeId.ts b/ts/pkgs/duckdb-data-types/src/DuckDBTypeId.ts new file mode 100644 index 0000000..073cdab --- /dev/null +++ b/ts/pkgs/duckdb-data-types/src/DuckDBTypeId.ts @@ -0,0 +1,42 @@ +// copy of DUCKDB_TYPE from the C API, with names shortened +export enum DuckDBTypeId { + INVALID = 0, + BOOLEAN = 1, + TINYINT = 2, + SMALLINT = 3, + INTEGER = 4, + BIGINT = 5, + UTINYINT = 6, + USMALLINT = 7, + UINTEGER = 8, + UBIGINT = 9, + FLOAT = 10, + DOUBLE = 11, + TIMESTAMP = 12, + DATE = 13, + TIME = 14, + INTERVAL = 15, + HUGEINT = 16, + UHUGEINT = 32, + VARCHAR = 17, + BLOB = 18, + DECIMAL = 19, + TIMESTAMP_S = 20, + TIMESTAMP_MS = 21, + TIMESTAMP_NS = 22, + ENUM = 23, + LIST = 24, + STRUCT = 25, + MAP = 26, + ARRAY = 33, + UUID = 27, + UNION = 28, + BIT = 29, + TIME_TZ = 30, + TIMESTAMP_TZ = 31, + ANY = 34, + VARINT = 35, + SQLNULL = 36, + STRING_LITERAL = 37, + INTEGER_LITERAL = 38, +} diff --git a/ts/pkgs/duckdb-data-types/src/extensionTypes.ts b/ts/pkgs/duckdb-data-types/src/extensionTypes.ts new file mode 100644 index 0000000..ec94f65 --- /dev/null +++ b/ts/pkgs/duckdb-data-types/src/extensionTypes.ts @@ -0,0 +1,46 @@ +import { + DOUBLE, + DuckDBBlobType, + DuckDBVarCharType, + FLOAT, + HUGEINT, + LIST, + STRUCT, + USMALLINT, + UTINYINT, +} from './DuckDBType.js'; + +// see https://github.com/duckdb/duckdb-inet/blob/main/src/inet_extension.cpp +export const INET = STRUCT( + { ip_type: UTINYINT, address: HUGEINT, mask: USMALLINT }, + 'INET', +); + +// see LogicalType::JSON() in https://github.com/duckdb/duckdb/blob/main/src/common/types.cpp +export const JSONType = DuckDBVarCharType.create('JSON'); + +// see https://github.com/duckdb/duckdb-spatial/blob/main/src/spatial/spatial_types.cpp +export const BOX_2D = STRUCT( + { min_x: DOUBLE, min_y: DOUBLE, max_x: DOUBLE, max_y: DOUBLE }, + 'BOX_2D', +); +export const BOX_2DF = STRUCT( + { min_x: FLOAT, min_y: FLOAT, max_x: FLOAT, max_y: FLOAT }, + 'BOX_2DF', +); +export const GEOMETRY = DuckDBBlobType.create('GEOMETRY'); +export const LINESTRING_2D = LIST( + STRUCT({ x: DOUBLE, y: DOUBLE }), + 'LINESTRING_2D', +); +export const POINT_2D = STRUCT({ x: DOUBLE, y: DOUBLE }, 'POINT_2D'); +export const POINT_3D = STRUCT({ x: DOUBLE, y: DOUBLE, z: DOUBLE }, 'POINT_3D'); +export const POINT_4D = STRUCT( + { x: DOUBLE, y: DOUBLE, z: DOUBLE, m: DOUBLE }, + 'POINT_4D', +); +export const POLYGON_2D = LIST( + LIST(STRUCT({ x: DOUBLE, y: DOUBLE })), + 'POLYGON_2D', +); +export const WKB_BLOB = DuckDBBlobType.create('WKB_BLOB'); diff --git a/ts/pkgs/duckdb-data-types/src/index.ts b/ts/pkgs/duckdb-data-types/src/index.ts new file mode 100644 index 0000000..1e2b881 --- /dev/null +++ b/ts/pkgs/duckdb-data-types/src/index.ts @@ -0,0 +1,4 @@ +export * from './DuckDBType.js'; +export * from './DuckDBTypeId.js'; +export * from './extensionTypes.js'; +export * from './parseLogicalTypeString.js'; diff --git a/ts/pkgs/duckdb-data-types/src/parseLogicalTypeString.ts b/ts/pkgs/duckdb-data-types/src/parseLogicalTypeString.ts new file mode 100644 index 0000000..47d48c3 --- /dev/null +++ b/ts/pkgs/duckdb-data-types/src/parseLogicalTypeString.ts @@ -0,0 +1,286 @@ +import { + ARRAY, + BIGINT, + BIT, + BLOB, + BOOLEAN, + DATE, + DECIMAL, + DOUBLE, + DuckDBMapType, + DuckDBStructType, + DuckDBType, + DuckDBUnionType, + ENUM, + FLOAT, + HUGEINT, + INTEGER, + INTERVAL, + LIST, + MAP, + SMALLINT, + SQLNULL, + STRUCT, + TIME, + TIMESTAMP, + TIMESTAMP_MS, + TIMESTAMP_NS, + TIMESTAMP_S, + TIMESTAMPTZ, + TIMETZ, + TINYINT, + UBIGINT, + UHUGEINT, + UINTEGER, + UNION, + USMALLINT, + UTINYINT, + UUID, + VARCHAR, + VARINT, +} from './DuckDBType.js'; +import { + BOX_2D, + BOX_2DF, + GEOMETRY, + INET, + JSONType, + LINESTRING_2D, + POINT_2D, + POINT_3D, + POINT_4D, + POLYGON_2D, + WKB_BLOB, +} from './extensionTypes.js'; + +const simpleTypeMap: Record = { + BIGINT: BIGINT, + BIT: BIT, + BOOLEAN: BOOLEAN, + BLOB: BLOB, + BOX_2D: BOX_2D, + BOX_2DF: BOX_2DF, + DATE: DATE, + DOUBLE: DOUBLE, + FLOAT: FLOAT, + GEOMETRY: GEOMETRY, + HUGEINT: HUGEINT, + INET: INET, + INTEGER: INTEGER, + INTERVAL: INTERVAL, + JSON: JSONType, + LINESTRING_2D: LINESTRING_2D, + POINT_2D: POINT_2D, + POINT_3D: POINT_3D, + POINT_4D: POINT_4D, + POLYGON_2D: POLYGON_2D, + SMALLINT: SMALLINT, + SQLNULL: SQLNULL, + TIME: TIME, + 'TIME WITH TIME ZONE': TIMETZ, + TIMESTAMP: TIMESTAMP, + 'TIMESTAMP WITH TIME ZONE': TIMESTAMPTZ, + TIMESTAMP_S: TIMESTAMP_S, + TIMESTAMP_MS: TIMESTAMP_MS, + TIMESTAMP_NS: TIMESTAMP_NS, + TINYINT: TINYINT, + UBIGINT: UBIGINT, + UHUGEINT: UHUGEINT, + UINTEGER: UINTEGER, + USMALLINT: USMALLINT, + UTINYINT: UTINYINT, + UUID: UUID, + VARCHAR: VARCHAR, + VARINT: VARINT, + WKB_BLOB: WKB_BLOB, +}; + +function matchStructMapOrUnion( + typeString: string, +): DuckDBStructType | DuckDBMapType | DuckDBUnionType | undefined { + typeString = typeString.trim(); + + const fields = parseStructLike(typeString); + if (!fields) { + return undefined; + } + + if (typeString.startsWith('STRUCT')) { + const entries: Record = {}; + for (const field of fields) { + if (field.key && field.type) { + entries[field.key] = field.type; + } + } + return STRUCT(entries); + } + if (typeString.startsWith('MAP')) { + const keyType = fields[0]?.type; + const valueType = fields[1]?.type; + if (keyType && valueType) { + return MAP(keyType, valueType); + } + } + if (typeString.startsWith('UNION')) { + const members: Record = {}; + for (const field of fields) { + if (field.key && field.type) { + members[field.key] = field.type; + } + } + return UNION(members); + } + return undefined; +} + +function parseStructLike(typeString: string): ParsedField[] | undefined { + const structPattern = /^(STRUCT|MAP|UNION)\s*\((.*)\)$/; + const match = structPattern.exec(typeString); + if (!match) { + return undefined; + } + + const fieldsString = match[2]; + return parseFields(fieldsString); +} + +/** Parse the fields substring. We do this by counting parens and double quotes. + * When checking for double-quotes, we only need to count an even number of them + * to count brackets, since in cases where there escaped double quotes inside + * a double-quoted string, the double quotes appear adjacent to each other, + * always incrementing the count by 2 before there could theoretically be another + * paren. + */ +function parseFields(fieldsString: string): ParsedField[] { + const fields: ParsedField[] = []; + let currentFieldStartIndex: number | null = null; + let parenCount = 0; + let quoteCount = 0; + + for (let i = 0; i < fieldsString.length; i++) { + const char = fieldsString[i]; + + if ( + currentFieldStartIndex === null && + char !== '(' && + char !== ')' && + char !== ',' + ) { + currentFieldStartIndex = i; + } + + if (char === '"') { + quoteCount++; + } + + if ( + char === ',' && + parenCount === 0 && + quoteCount % 2 === 0 && + currentFieldStartIndex !== null + ) { + const field = fieldsString.slice(currentFieldStartIndex, i); + fields.push(parseField(field.trim())); + currentFieldStartIndex = null; + } else { + if (char === '(' && quoteCount % 2 === 0) parenCount++; + if (char === ')' && quoteCount % 2 === 0) parenCount--; + } + } + + if (currentFieldStartIndex !== null) { + const lastField = fieldsString.slice(currentFieldStartIndex); + fields.push(parseField(lastField.trim())); + } + return fields; +} + +interface ParsedField { + key?: string; + type?: DuckDBType; +} + +function parseField(fieldString: string): ParsedField { + const fieldPattern = /^(".*?"|\w+)\s+(.+)$/; + const match = fieldPattern.exec(fieldString); + if (match) { + const key = match[1]; + const type = parseLogicalTypeString(match[2].trim()); + return { key, type }; + } else { + const type = parseLogicalTypeString(fieldString); + return { type }; + } +} + +function matchDecimal(typeString: string) { + const match = typeString.match(/^DECIMAL\((\d+),(\d+)\)$/); + if (match) { + return DECIMAL(Number(match[1]), Number(match[2])); + } + return undefined; +} + +function matchEnum(typeString: string) { + const match = /ENUM\(([^)]*)\)/i.exec(typeString); + if (match) { + const matches = match[1].matchAll(/'((?:[^']|'')*)'/g); + const values: string[] = []; + for (const match of matches) { + values.push(match[1].replace(/''/, `'`)); + } + return ENUM(values); + } + return undefined; +} + +function matchList(typeString: string) { + if (typeString.endsWith('[]')) { + const innerType = typeString.slice(0, -2); + return LIST(parseLogicalTypeString(innerType)); + } + return undefined; +} + +function matchArray(typeString: string) { + const match = typeString.match(/\[(\d+)\]$/); + if (match) { + const innerType = typeString.slice(0, -match[0].length); + const length = match[1]; + return ARRAY(parseLogicalTypeString(innerType), Number(length)); + } + return undefined; +} + +export function parseLogicalTypeString(typeString: string): DuckDBType { + if (typeString in simpleTypeMap) { + return simpleTypeMap[typeString]; + } + + const listType = matchList(typeString); + if (listType) { + return listType; + } + + const arrayType = matchArray(typeString); + if (arrayType) { + return arrayType; + } + + const decimalType = matchDecimal(typeString); + if (decimalType) { + return decimalType; + } + + const enumType = matchEnum(typeString); + if (enumType) { + return enumType; + } + + const structMapOrUnionType = matchStructMapOrUnion(typeString); + if (structMapOrUnionType) { + return structMapOrUnionType; + } + + throw Error(`unimplemented type match: ${typeString}`); +} diff --git a/ts/pkgs/duckdb-data-types/src/sql.ts b/ts/pkgs/duckdb-data-types/src/sql.ts new file mode 100644 index 0000000..91460da --- /dev/null +++ b/ts/pkgs/duckdb-data-types/src/sql.ts @@ -0,0 +1,7 @@ +export function quotedString(input: string): string { + return `'${input.replace(`'`, `''`)}'`; +} + +export function quotedIdentifier(input: string): string { + return `"${input.replace(`"`, `""`)}"`; +} diff --git a/ts/pkgs/duckdb-data-types/src/tsconfig.json b/ts/pkgs/duckdb-data-types/src/tsconfig.json new file mode 100644 index 0000000..f970179 --- /dev/null +++ b/ts/pkgs/duckdb-data-types/src/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.library.json", + "compilerOptions": { + "outDir": "../out" + } +} diff --git a/ts/pkgs/duckdb-data-types/test/DuckDBType.test.ts b/ts/pkgs/duckdb-data-types/test/DuckDBType.test.ts new file mode 100644 index 0000000..f074ab1 --- /dev/null +++ b/ts/pkgs/duckdb-data-types/test/DuckDBType.test.ts @@ -0,0 +1,1025 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBTypeId } from '../src'; +import { + ANY, + ARRAY, + BIGINT, + BIT, + BLOB, + BOOLEAN, + DATE, + DECIMAL, + DOUBLE, + DuckDBAnyType, + DuckDBBigIntType, + DuckDBBitType, + DuckDBBlobType, + DuckDBBooleanType, + DuckDBDateType, + DuckDBDoubleType, + DuckDBFloatType, + DuckDBHugeIntType, + DuckDBIntegerLiteralType, + DuckDBIntegerType, + DuckDBIntervalType, + DuckDBSmallIntType, + DuckDBSQLNullType, + DuckDBStringLiteralType, + DuckDBTimestampMillisecondsType, + DuckDBTimestampNanosecondsType, + DuckDBTimestampSecondsType, + DuckDBTimestampType, + DuckDBTimestampTZType, + DuckDBTimeType, + DuckDBTimeTZType, + DuckDBTinyIntType, + DuckDBUBigIntType, + DuckDBUHugeIntType, + DuckDBUIntegerType, + DuckDBUSmallIntType, + DuckDBUTinyIntType, + DuckDBUUIDType, + DuckDBVarCharType, + DuckDBVarIntType, + ENUM, + FLOAT, + HUGEINT, + INTEGER, + INTEGER_LITERAL, + INTERVAL, + LIST, + MAP, + SMALLINT, + SQLNULL, + STRING_LITERAL, + STRUCT, + TIME, + TIMESTAMP, + TIMESTAMP_MS, + TIMESTAMP_NS, + TIMESTAMP_S, + TIMESTAMPTZ, + TIMETZ, + TINYINT, + UBIGINT, + UHUGEINT, + UINTEGER, + UNION, + USMALLINT, + UTINYINT, + UUID, + VARCHAR, + VARINT, +} from '../src/DuckDBType'; + +suite('DuckDBBooleanType', () => { + test('toString', () => { + expect(BOOLEAN.toString()).toBe('BOOLEAN'); + }); + test('toString short', () => { + expect(BOOLEAN.toString({ short: true })).toBe('BOOLEAN'); + }); + test('toString with alias', () => { + expect(DuckDBBooleanType.create('mybool').toString()).toBe('mybool'); + }); + test('toJson', () => { + expect(BOOLEAN.toJson()).toStrictEqual({ typeId: DuckDBTypeId.BOOLEAN }); + }); + test('toJson with alias', () => { + expect(DuckDBBooleanType.create('mybool').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.BOOLEAN, + alias: 'mybool', + }); + }); +}); + +suite('DuckDBTinyIntType', () => { + test('toString', () => { + expect(TINYINT.toString()).toBe('TINYINT'); + }); + test('toString short', () => { + expect(TINYINT.toString({ short: true })).toBe('TINYINT'); + }); + test('toString with alias', () => { + expect(DuckDBTinyIntType.create('mytinyint').toString()).toBe('mytinyint'); + }); + test('toJson', () => { + expect(TINYINT.toJson()).toStrictEqual({ typeId: DuckDBTypeId.TINYINT }); + }); + test('toJson with alias', () => { + expect(DuckDBTinyIntType.create('mytinyint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TINYINT, + alias: 'mytinyint', + }); + }); +}); + +suite('DuckDBSmallIntType', () => { + test('toString', () => { + expect(SMALLINT.toString()).toBe('SMALLINT'); + }); + test('toString short', () => { + expect(SMALLINT.toString({ short: true })).toBe('SMALLINT'); + }); + test('toString with alias', () => { + expect(DuckDBSmallIntType.create('mysmallint').toString()).toBe( + 'mysmallint', + ); + }); + test('toJson', () => { + expect(SMALLINT.toJson()).toStrictEqual({ typeId: DuckDBTypeId.SMALLINT }); + }); + test('toJson with alias', () => { + expect(DuckDBSmallIntType.create('mysmallint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.SMALLINT, + alias: 'mysmallint', + }); + }); +}); + +suite('DuckDBIntegerType', () => { + test('toString', () => { + expect(INTEGER.toString()).toBe('INTEGER'); + }); + test('toString short', () => { + expect(INTEGER.toString({ short: true })).toBe('INTEGER'); + }); + test('toString with alias', () => { + expect(DuckDBIntegerType.create('myinteger').toString()).toBe('myinteger'); + }); + test('toJson', () => { + expect(INTEGER.toJson()).toStrictEqual({ typeId: DuckDBTypeId.INTEGER }); + }); + test('toJson with alias', () => { + expect(DuckDBIntegerType.create('myinteger').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.INTEGER, + alias: 'myinteger', + }); + }); +}); + +suite('DuckDBBigIntType', () => { + test('toString', () => { + expect(BIGINT.toString()).toBe('BIGINT'); + }); + test('toString short', () => { + expect(BIGINT.toString({ short: true })).toBe('BIGINT'); + }); + test('toString with alias', () => { + expect(DuckDBBigIntType.create('mybigint').toString()).toBe('mybigint'); + }); + test('toJson', () => { + expect(BIGINT.toJson()).toStrictEqual({ typeId: DuckDBTypeId.BIGINT }); + }); + test('toJson with alias', () => { + expect(DuckDBBigIntType.create('mybigint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.BIGINT, + alias: 'mybigint', + }); + }); +}); + +suite('DuckDBUTinyIntType', () => { + test('toString', () => { + expect(UTINYINT.toString()).toBe('UTINYINT'); + }); + test('toString short', () => { + expect(UTINYINT.toString({ short: true })).toBe('UTINYINT'); + }); + test('toString with alias', () => { + expect(DuckDBUTinyIntType.create('myutinyint').toString()).toBe( + 'myutinyint', + ); + }); + test('toJson', () => { + expect(UTINYINT.toJson()).toStrictEqual({ typeId: DuckDBTypeId.UTINYINT }); + }); + test('toJson with alias', () => { + expect(DuckDBUTinyIntType.create('myutinyint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.UTINYINT, + alias: 'myutinyint', + }); + }); +}); + +suite('DuckDBUSmallIntType', () => { + test('toString', () => { + expect(USMALLINT.toString()).toBe('USMALLINT'); + }); + test('toString short', () => { + expect(USMALLINT.toString({ short: true })).toBe('USMALLINT'); + }); + test('toString with alias', () => { + expect(DuckDBUSmallIntType.create('myusmallint').toString()).toBe( + 'myusmallint', + ); + }); + test('toJson', () => { + expect(USMALLINT.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.USMALLINT, + }); + }); + test('toJson with alias', () => { + expect(DuckDBUSmallIntType.create('myusmallint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.USMALLINT, + alias: 'myusmallint', + }); + }); +}); + +suite('DuckDBUIntegerType', () => { + test('toString', () => { + expect(UINTEGER.toString()).toBe('UINTEGER'); + }); + test('toString short', () => { + expect(UINTEGER.toString({ short: true })).toBe('UINTEGER'); + }); + test('toString with alias', () => { + expect(DuckDBUIntegerType.create('myuinteger').toString()).toBe( + 'myuinteger', + ); + }); + test('toJson', () => { + expect(UINTEGER.toJson()).toStrictEqual({ typeId: DuckDBTypeId.UINTEGER }); + }); + test('toJson with alias', () => { + expect(DuckDBUIntegerType.create('myuinteger').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.UINTEGER, + alias: 'myuinteger', + }); + }); +}); + +suite('DuckDBUBigIntType', () => { + test('toString', () => { + expect(UBIGINT.toString()).toBe('UBIGINT'); + }); + test('toString short', () => { + expect(UBIGINT.toString({ short: true })).toBe('UBIGINT'); + }); + test('toString with alias', () => { + expect(DuckDBUBigIntType.create('myubigint').toString()).toBe('myubigint'); + }); + test('toJson', () => { + expect(UBIGINT.toJson()).toStrictEqual({ typeId: DuckDBTypeId.UBIGINT }); + }); + test('toJson with alias', () => { + expect(DuckDBUBigIntType.create('myubigint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.UBIGINT, + alias: 'myubigint', + }); + }); +}); + +suite('DuckDBFloatType', () => { + test('toString', () => { + expect(FLOAT.toString()).toBe('FLOAT'); + }); + test('toString short', () => { + expect(FLOAT.toString({ short: true })).toBe('FLOAT'); + }); + test('toString with alias', () => { + expect(DuckDBFloatType.create('myfloat').toString()).toBe('myfloat'); + }); + test('toJson', () => { + expect(FLOAT.toJson()).toStrictEqual({ typeId: DuckDBTypeId.FLOAT }); + }); + test('toJson with alias', () => { + expect(DuckDBFloatType.create('myfloat').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.FLOAT, + alias: 'myfloat', + }); + }); +}); + +suite('DuckDBDoubleType', () => { + test('toString', () => { + expect(DOUBLE.toString()).toBe('DOUBLE'); + }); + test('toString short', () => { + expect(DOUBLE.toString({ short: true })).toBe('DOUBLE'); + }); + test('toString with alias', () => { + expect(DuckDBDoubleType.create('mydouble').toString()).toBe('mydouble'); + }); + test('toJson', () => { + expect(DOUBLE.toJson()).toStrictEqual({ typeId: DuckDBTypeId.DOUBLE }); + }); + test('toJson with alias', () => { + expect(DuckDBDoubleType.create('mydouble').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.DOUBLE, + alias: 'mydouble', + }); + }); +}); + +suite('DuckDBTimestampType', () => { + test('toString', () => { + expect(TIMESTAMP.toString()).toBe('TIMESTAMP'); + }); + test('toString short', () => { + expect(TIMESTAMP.toString({ short: true })).toBe('TIMESTAMP'); + }); + test('toString with alias', () => { + expect(DuckDBTimestampType.create('mytimestamp').toString()).toBe( + 'mytimestamp', + ); + }); + test('toJson', () => { + expect(TIMESTAMP.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP, + }); + }); + test('toJson with alias', () => { + expect(DuckDBTimestampType.create('mytimestamp').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP, + alias: 'mytimestamp', + }); + }); +}); + +suite('DuckDBDateType', () => { + test('toString', () => { + expect(DATE.toString()).toBe('DATE'); + }); + test('toString short', () => { + expect(DATE.toString({ short: true })).toBe('DATE'); + }); + test('toString with alias', () => { + expect(DuckDBDateType.create('mydate').toString()).toBe('mydate'); + }); + test('toJson', () => { + expect(DATE.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.DATE, + }); + }); + test('toJson with alias', () => { + expect(DuckDBDateType.create('mydate').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.DATE, + alias: 'mydate', + }); + }); +}); + +suite('DuckDBTimeType', () => { + test('toString', () => { + expect(TIME.toString()).toBe('TIME'); + }); + test('toString short', () => { + expect(TIME.toString({ short: true })).toBe('TIME'); + }); + test('toString with alias', () => { + expect(DuckDBTimeType.create('mytime').toString()).toBe('mytime'); + }); + test('toJson', () => { + expect(TIME.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIME, + }); + }); + test('toJson with alias', () => { + expect(DuckDBTimeType.create('mytime').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIME, + alias: 'mytime', + }); + }); +}); + +suite('DuckDBIntervalType', () => { + test('toString', () => { + expect(INTERVAL.toString()).toBe('INTERVAL'); + }); + test('toString short', () => { + expect(INTERVAL.toString({ short: true })).toBe('INTERVAL'); + }); + test('toString with alias', () => { + expect(DuckDBIntervalType.create('myinterval').toString()).toBe( + 'myinterval', + ); + }); + test('toJson', () => { + expect(INTERVAL.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.INTERVAL, + }); + }); + test('toJson with alias', () => { + expect(DuckDBIntervalType.create('myinterval').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.INTERVAL, + alias: 'myinterval', + }); + }); +}); + +suite('DuckDBHugeIntType', () => { + test('toString', () => { + expect(HUGEINT.toString()).toBe('HUGEINT'); + }); + test('toString short', () => { + expect(HUGEINT.toString({ short: true })).toBe('HUGEINT'); + }); + test('toString with alias', () => { + expect(DuckDBHugeIntType.create('myhugeint').toString()).toBe('myhugeint'); + }); + test('toJson', () => { + expect(HUGEINT.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.HUGEINT, + }); + }); + test('toJson with alias', () => { + expect(DuckDBHugeIntType.create('myhugeint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.HUGEINT, + alias: 'myhugeint', + }); + }); +}); + +suite('DuckDBUHugeIntType', () => { + test('toString', () => { + expect(UHUGEINT.toString()).toBe('UHUGEINT'); + }); + test('toString short', () => { + expect(UHUGEINT.toString({ short: true })).toBe('UHUGEINT'); + }); + test('toString with alias', () => { + expect(DuckDBUHugeIntType.create('myuhugeint').toString()).toBe( + 'myuhugeint', + ); + }); + test('toJson', () => { + expect(UHUGEINT.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.UHUGEINT, + }); + }); + test('toJson with alias', () => { + expect(DuckDBUHugeIntType.create('myuhugeint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.UHUGEINT, + alias: 'myuhugeint', + }); + }); +}); + +suite('DuckDBVarCharType', () => { + test('toString', () => { + expect(VARCHAR.toString()).toBe('VARCHAR'); + }); + test('toString short', () => { + expect(VARCHAR.toString({ short: true })).toBe('VARCHAR'); + }); + test('toString with alias', () => { + expect(DuckDBVarCharType.create('myvarchar').toString()).toBe('myvarchar'); + }); + test('toJson', () => { + expect(VARCHAR.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.VARCHAR, + }); + }); + test('toJson with alias', () => { + expect(DuckDBVarCharType.create('myvarchar').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.VARCHAR, + alias: 'myvarchar', + }); + }); +}); + +suite('DuckDBBlobType', () => { + test('toString', () => { + expect(BLOB.toString()).toBe('BLOB'); + }); + test('toString short', () => { + expect(BLOB.toString({ short: true })).toBe('BLOB'); + }); + test('toString with alias', () => { + expect(DuckDBBlobType.create('myblob').toString()).toBe('myblob'); + }); + test('toJson', () => { + expect(BLOB.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.BLOB, + }); + }); + test('toJson with alias', () => { + expect(DuckDBBlobType.create('myblob').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.BLOB, + alias: 'myblob', + }); + }); +}); + +suite('DuckDBDecimalType', () => { + test('toString default', () => { + expect(DECIMAL().toString()).toBe('DECIMAL(18,3)'); + }); + test('toString explicit width and scale', () => { + expect(DECIMAL(38, 10).toString()).toBe('DECIMAL(38,10)'); + }); + test('toString short', () => { + expect(DECIMAL().toString({ short: true })).toBe('DECIMAL(18,3)'); + }); + test('toString with alias', () => { + expect(DECIMAL(38, 10, 'mydecimal').toString()).toBe('mydecimal'); + }); + test('toJson', () => { + expect(DECIMAL().toJson()).toStrictEqual({ + typeId: DuckDBTypeId.DECIMAL, + width: 18, + scale: 3, + }); + }); + test('toJson with alias', () => { + expect(DECIMAL(38, 10, 'mydecimal').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.DECIMAL, + width: 38, + scale: 10, + alias: 'mydecimal', + }); + }); +}); + +suite('DuckDBTimestampSecondsType', () => { + test('toString', () => { + expect(TIMESTAMP_S.toString()).toBe('TIMESTAMP_S'); + }); + test('toString short', () => { + expect(TIMESTAMP_S.toString({ short: true })).toBe('TIMESTAMP_S'); + }); + test('toString with alias', () => { + expect(DuckDBTimestampSecondsType.create('mytimestamp_s').toString()).toBe( + 'mytimestamp_s', + ); + }); + test('toJson', () => { + expect(TIMESTAMP_S.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP_S, + }); + }); + test('toJson with alias', () => { + expect( + DuckDBTimestampSecondsType.create('mytimestamp_s').toJson(), + ).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP_S, + alias: 'mytimestamp_s', + }); + }); +}); + +suite('DuckDBTimestampMillisecondsType', () => { + test('toString', () => { + expect(TIMESTAMP_MS.toString()).toBe('TIMESTAMP_MS'); + }); + test('toString short', () => { + expect(TIMESTAMP_MS.toString({ short: true })).toBe('TIMESTAMP_MS'); + }); + test('toString with alias', () => { + expect( + DuckDBTimestampMillisecondsType.create('mytimestamp_ms').toString(), + ).toBe('mytimestamp_ms'); + }); + test('toJson', () => { + expect(TIMESTAMP_MS.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP_MS, + }); + }); + test('toJson with alias', () => { + expect( + DuckDBTimestampMillisecondsType.create('mytimestamp_ms').toJson(), + ).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP_MS, + alias: 'mytimestamp_ms', + }); + }); +}); + +suite('DuckDBTimestampNanosecondsType', () => { + test('toString', () => { + expect(TIMESTAMP_NS.toString()).toBe('TIMESTAMP_NS'); + }); + test('toString short', () => { + expect(TIMESTAMP_NS.toString({ short: true })).toBe('TIMESTAMP_NS'); + }); + test('toString with alias', () => { + expect( + DuckDBTimestampNanosecondsType.create('mytimestamp_ns').toString(), + ).toBe('mytimestamp_ns'); + }); + test('toJson', () => { + expect(TIMESTAMP_NS.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP_NS, + }); + }); + test('toJson with alias', () => { + expect( + DuckDBTimestampNanosecondsType.create('mytimestamp_ns').toJson(), + ).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP_NS, + alias: 'mytimestamp_ns', + }); + }); +}); + +suite('DuckDBEnumType', () => { + test('toString', () => { + expect(ENUM(['abc', 'def']).toString()).toBe(`ENUM('abc', 'def')`); + }); + test('toString short', () => { + expect(ENUM(['abc', 'def']).toString({ short: true })).toBe('ENUM(…)'); + }); + test('toString with alias', () => { + expect(ENUM(['abc', 'def'], 'myenum').toString()).toBe('myenum'); + }); + test('toJson', () => { + expect(ENUM(['abc', 'def']).toJson()).toStrictEqual({ + typeId: DuckDBTypeId.ENUM, + values: ['abc', 'def'], + internalTypeId: DuckDBTypeId.UTINYINT, + }); + }); + test('toJson with alias', () => { + expect(ENUM(['abc', 'def'], 'myenum').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.ENUM, + values: ['abc', 'def'], + internalTypeId: DuckDBTypeId.UTINYINT, + alias: 'myenum', + }); + }); + test('indexForValue', () => { + expect(ENUM(['abc', 'def']).indexForValue('def')).toBe(1); + }); +}); + +suite('DuckDBListType', () => { + test('toString', () => { + expect(LIST(INTEGER).toString()).toBe('INTEGER[]'); + }); + test('toString short', () => { + expect(LIST(INTEGER).toString({ short: true })).toBe('INTEGER[]'); + }); + test('toString with alias', () => { + expect(LIST(INTEGER, 'mylist').toString()).toBe('mylist'); + }); + test('toJson', () => { + expect(LIST(INTEGER).toJson()).toStrictEqual({ + typeId: DuckDBTypeId.LIST, + valueType: INTEGER.toJson(), + }); + }); + test('toJson with alias', () => { + expect(LIST(INTEGER, 'mylist').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.LIST, + valueType: INTEGER.toJson(), + alias: 'mylist', + }); + }); +}); + +suite('DuckDBStructType', () => { + test('toString', () => { + expect(STRUCT({ a: INTEGER, 'b"c': VARCHAR }).toString()).toBe( + 'STRUCT("a" INTEGER, "b""c" VARCHAR)', + ); + }); + test('toString short', () => { + expect( + STRUCT({ a: INTEGER, 'b"c': VARCHAR }).toString({ short: true }), + ).toBe('STRUCT(…)'); + }); + test('toString with alias', () => { + expect(STRUCT({ a: INTEGER, 'b"c': VARCHAR }, 'mystruct').toString()).toBe( + 'mystruct', + ); + }); + test('toJson', () => { + expect(STRUCT({ a: INTEGER, 'b"c': VARCHAR }).toJson()).toStrictEqual({ + typeId: DuckDBTypeId.STRUCT, + entryNames: ['a', 'b"c'], + entryTypes: [INTEGER.toJson(), VARCHAR.toJson()], + }); + }); + test('toJson with alias', () => { + expect( + STRUCT({ a: INTEGER, 'b"c': VARCHAR }, 'mystruct').toJson(), + ).toStrictEqual({ + typeId: DuckDBTypeId.STRUCT, + entryNames: ['a', 'b"c'], + entryTypes: [INTEGER.toJson(), VARCHAR.toJson()], + alias: 'mystruct', + }); + }); + test('indexForEntry', () => { + expect(STRUCT({ a: INTEGER, 'b"c': VARCHAR }).indexForEntry('b"c')).toBe(1); + }); + test('typeForEntry', () => { + expect( + STRUCT({ a: INTEGER, 'b"c': VARCHAR }).typeForEntry('b"c'), + ).toStrictEqual(VARCHAR); + }); +}); + +suite('DuckDBMapType', () => { + test('toString', () => { + expect(MAP(INTEGER, VARCHAR).toString()).toBe('MAP(INTEGER, VARCHAR)'); + }); + test('toString short', () => { + expect(MAP(INTEGER, VARCHAR).toString({ short: true })).toBe('MAP(…)'); + }); + test('toString with alias', () => { + expect(MAP(INTEGER, VARCHAR, 'mymap').toString()).toBe('mymap'); + }); + test('toJson', () => { + expect(MAP(INTEGER, VARCHAR).toJson()).toStrictEqual({ + typeId: DuckDBTypeId.MAP, + keyType: INTEGER.toJson(), + valueType: VARCHAR.toJson(), + }); + }); + test('toJson with alias', () => { + expect(MAP(INTEGER, VARCHAR, 'mymap').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.MAP, + keyType: INTEGER.toJson(), + valueType: VARCHAR.toJson(), + alias: 'mymap', + }); + }); +}); + +suite('DuckDBArrayType', () => { + test('toString', () => { + expect(ARRAY(INTEGER, 3).toString()).toBe('INTEGER[3]'); + }); + test('toString short', () => { + expect(ARRAY(INTEGER, 3).toString({ short: true })).toBe('INTEGER[3]'); + }); + test('toString with alias', () => { + expect(ARRAY(INTEGER, 3, 'myarray').toString()).toBe('myarray'); + }); + test('toJson', () => { + expect(ARRAY(INTEGER, 3).toJson()).toStrictEqual({ + typeId: DuckDBTypeId.ARRAY, + valueType: INTEGER.toJson(), + length: 3, + }); + }); + test('toJson with alias', () => { + expect(ARRAY(INTEGER, 3, 'myarray').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.ARRAY, + valueType: INTEGER.toJson(), + length: 3, + alias: 'myarray', + }); + }); +}); + +suite('DuckDBUUIDType', () => { + test('toString', () => { + expect(UUID.toString()).toBe('UUID'); + }); + test('toString short', () => { + expect(UUID.toString({ short: true })).toBe('UUID'); + }); + test('toString with alias', () => { + expect(DuckDBUUIDType.create('myuuid').toString()).toBe('myuuid'); + }); + test('toJson', () => { + expect(UUID.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.UUID, + }); + }); + test('toJson with alias', () => { + expect(DuckDBUUIDType.create('myuuid').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.UUID, + alias: 'myuuid', + }); + }); +}); + +suite('DuckDBUnionType', () => { + test('toString', () => { + expect(UNION({ a: INTEGER, 'b"c': VARCHAR }).toString()).toBe( + 'UNION("a" INTEGER, "b""c" VARCHAR)', + ); + }); + test('toString short', () => { + expect( + UNION({ a: INTEGER, 'b"c': VARCHAR }).toString({ short: true }), + ).toBe('UNION(…)'); + }); + test('toString with alias', () => { + expect(UNION({ a: INTEGER, 'b"c': VARCHAR }, 'myunion').toString()).toBe( + 'myunion', + ); + }); + test('toJson', () => { + expect(UNION({ a: INTEGER, 'b"c': VARCHAR }).toJson()).toStrictEqual({ + typeId: DuckDBTypeId.UNION, + memberTags: ['a', 'b"c'], + memberTypes: [INTEGER.toJson(), VARCHAR.toJson()], + }); + }); + test('toJson with alias', () => { + expect( + UNION({ a: INTEGER, 'b"c': VARCHAR }, 'myunion').toJson(), + ).toStrictEqual({ + typeId: DuckDBTypeId.UNION, + memberTags: ['a', 'b"c'], + memberTypes: [INTEGER.toJson(), VARCHAR.toJson()], + alias: 'myunion', + }); + }); + test('memberIndexForTag', () => { + expect(UNION({ a: INTEGER, 'b"c': VARCHAR }).memberIndexForTag('b"c')).toBe( + 1, + ); + }); + test('memberTypeForTag', () => { + expect( + UNION({ a: INTEGER, 'b"c': VARCHAR }).memberTypeForTag('b"c'), + ).toStrictEqual(VARCHAR); + }); +}); + +suite('DuckDBBitType', () => { + test('toString', () => { + expect(BIT.toString()).toBe('BIT'); + }); + test('toString short', () => { + expect(BIT.toString({ short: true })).toBe('BIT'); + }); + test('toString with alias', () => { + expect(DuckDBBitType.create('mybit').toString()).toBe('mybit'); + }); + test('toJson', () => { + expect(BIT.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.BIT, + }); + }); + test('toJson with alias', () => { + expect(DuckDBBitType.create('mybit').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.BIT, + alias: 'mybit', + }); + }); +}); + +suite('DuckDBTimeTZType', () => { + test('toString', () => { + expect(TIMETZ.toString()).toBe('TIME WITH TIME ZONE'); + }); + test('toString short', () => { + expect(TIMETZ.toString({ short: true })).toBe('TIMETZ'); + }); + test('toString with alias', () => { + expect(DuckDBTimeTZType.create('mytimetz').toString()).toBe('mytimetz'); + }); + test('toJson', () => { + expect(TIMETZ.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIME_TZ, + }); + }); + test('toJson with alias', () => { + expect(DuckDBTimeTZType.create('mytimetz').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIME_TZ, + alias: 'mytimetz', + }); + }); +}); + +suite('DuckDBTimestampTZType', () => { + test('toString', () => { + expect(TIMESTAMPTZ.toString()).toBe('TIMESTAMP WITH TIME ZONE'); + }); + test('toString short', () => { + expect(TIMESTAMPTZ.toString({ short: true })).toBe('TIMESTAMPTZ'); + }); + test('toString with alias', () => { + expect(DuckDBTimestampTZType.create('mytimestamptz').toString()).toBe( + 'mytimestamptz', + ); + }); + test('toJson', () => { + expect(TIMESTAMPTZ.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP_TZ, + }); + }); + test('toJson with alias', () => { + expect( + DuckDBTimestampTZType.create('mytimestamptz').toJson(), + ).toStrictEqual({ + typeId: DuckDBTypeId.TIMESTAMP_TZ, + alias: 'mytimestamptz', + }); + }); +}); + +suite('DuckDBAnyType', () => { + test('toString', () => { + expect(ANY.toString()).toBe('ANY'); + }); + test('toString short', () => { + expect(ANY.toString({ short: true })).toBe('ANY'); + }); + test('toString with alias', () => { + expect(DuckDBAnyType.create('myany').toString()).toBe('myany'); + }); + test('toJson', () => { + expect(ANY.toJson()).toStrictEqual({ typeId: DuckDBTypeId.ANY }); + }); + test('toJson with alias', () => { + expect(DuckDBAnyType.create('myany').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.ANY, + alias: 'myany', + }); + }); +}); + +suite('DuckDBVarIntType', () => { + test('toString', () => { + expect(VARINT.toString()).toBe('VARINT'); + }); + test('toString short', () => { + expect(VARINT.toString({ short: true })).toBe('VARINT'); + }); + test('toString with alias', () => { + expect(DuckDBVarIntType.create('myvarint').toString()).toBe('myvarint'); + }); + test('toJson', () => { + expect(VARINT.toJson()).toStrictEqual({ typeId: DuckDBTypeId.VARINT }); + }); + test('toJson with alias', () => { + expect(DuckDBVarIntType.create('myvarint').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.VARINT, + alias: 'myvarint', + }); + }); +}); + +suite('DuckDBSQLNullType', () => { + test('toString', () => { + expect(SQLNULL.toString()).toBe('SQLNULL'); + }); + test('toString short', () => { + expect(SQLNULL.toString({ short: true })).toBe('SQLNULL'); + }); + test('toString with alias', () => { + expect(DuckDBSQLNullType.create('mysqlnull').toString()).toBe('mysqlnull'); + }); + test('toJson', () => { + expect(SQLNULL.toJson()).toStrictEqual({ typeId: DuckDBTypeId.SQLNULL }); + }); + test('toJson with alias', () => { + expect(DuckDBSQLNullType.create('mysqlnull').toJson()).toStrictEqual({ + typeId: DuckDBTypeId.SQLNULL, + alias: 'mysqlnull', + }); + }); +}); + +suite('DuckDBStringLiteralType', () => { + test('toString', () => { + expect(STRING_LITERAL.toString()).toBe('STRING_LITERAL'); + }); + test('toString short', () => { + expect(STRING_LITERAL.toString({ short: true })).toBe('STRING_LITERAL'); + }); + test('toString with alias', () => { + expect(DuckDBStringLiteralType.create('mystringliteral').toString()).toBe( + 'mystringliteral', + ); + }); + test('toJson', () => { + expect(STRING_LITERAL.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.STRING_LITERAL, + }); + }); + test('toJson with alias', () => { + expect( + DuckDBStringLiteralType.create('mystringliteral').toJson(), + ).toStrictEqual({ + typeId: DuckDBTypeId.STRING_LITERAL, + alias: 'mystringliteral', + }); + }); +}); + +suite('DuckDBIntegerLiteralType', () => { + test('toString', () => { + expect(INTEGER_LITERAL.toString()).toBe('INTEGER_LITERAL'); + }); + test('toString short', () => { + expect(INTEGER_LITERAL.toString({ short: true })).toBe('INTEGER_LITERAL'); + }); + test('toString with alias', () => { + expect(DuckDBIntegerLiteralType.create('myintegerliteral').toString()).toBe( + 'myintegerliteral', + ); + }); + test('toJson', () => { + expect(INTEGER_LITERAL.toJson()).toStrictEqual({ + typeId: DuckDBTypeId.INTEGER_LITERAL, + }); + }); + test('toJson with alias', () => { + expect( + DuckDBIntegerLiteralType.create('myintegerliteral').toJson(), + ).toStrictEqual({ + typeId: DuckDBTypeId.INTEGER_LITERAL, + alias: 'myintegerliteral', + }); + }); +}); diff --git a/ts/pkgs/duckdb-data-types/test/parseLogicalTypeString.test.ts b/ts/pkgs/duckdb-data-types/test/parseLogicalTypeString.test.ts new file mode 100644 index 0000000..d055af6 --- /dev/null +++ b/ts/pkgs/duckdb-data-types/test/parseLogicalTypeString.test.ts @@ -0,0 +1,326 @@ +import { expect, suite, test } from 'vitest'; +import { + ARRAY, + BIGINT, + BIT, + BLOB, + BOOLEAN, + DATE, + DECIMAL, + DOUBLE, + ENUM, + FLOAT, + HUGEINT, + INTEGER, + INTERVAL, + LIST, + MAP, + SMALLINT, + STRUCT, + TIME, + TIMESTAMP, + TIMESTAMP_MS, + TIMESTAMP_NS, + TIMESTAMP_S, + TIMESTAMPTZ, + TIMETZ, + TINYINT, + UBIGINT, + UHUGEINT, + UINTEGER, + UNION, + USMALLINT, + UTINYINT, + UUID, + VARCHAR, + VARINT, +} from '../src/DuckDBType'; +import { + BOX_2D, + BOX_2DF, + GEOMETRY, + INET, + JSONType, + LINESTRING_2D, + POINT_2D, + POINT_3D, + POINT_4D, + POLYGON_2D, + WKB_BLOB, +} from '../src/extensionTypes'; +import { parseLogicalTypeString } from '../src/parseLogicalTypeString'; + +suite('parseLogicalTypeString', () => { + test('BOOLEAN', () => { + expect(parseLogicalTypeString('BOOLEAN')).toStrictEqual(BOOLEAN); + }); + test('TINYINT', () => { + expect(parseLogicalTypeString('TINYINT')).toStrictEqual(TINYINT); + }); + test('GEOMETRY', () => { + expect(parseLogicalTypeString('GEOMETRY')).toStrictEqual(GEOMETRY); + }); + test('LINESTRING_2D', () => { + expect(parseLogicalTypeString('LINESTRING_2D')).toStrictEqual( + LINESTRING_2D, + ); + }); + test('BOX_2D', () => { + expect(parseLogicalTypeString('BOX_2D')).toStrictEqual(BOX_2D); + }); + test('BOX_2DF', () => { + expect(parseLogicalTypeString('BOX_2DF')).toStrictEqual(BOX_2DF); + }); + test('POINT_2D', () => { + expect(parseLogicalTypeString('POINT_2D')).toStrictEqual(POINT_2D); + }); + test('POINT_3D', () => { + expect(parseLogicalTypeString('POINT_3D')).toStrictEqual(POINT_3D); + }); + test('POINT_4D', () => { + expect(parseLogicalTypeString('POINT_4D')).toStrictEqual(POINT_4D); + }); + test('POLYGON_2D', () => { + expect(parseLogicalTypeString('POLYGON_2D')).toStrictEqual(POLYGON_2D); + }); + test('INET', () => { + expect(parseLogicalTypeString('INET')).toStrictEqual(INET); + }); + test('JSON', () => { + expect(parseLogicalTypeString('JSON')).toStrictEqual(JSONType); + }); + test('WKB_BLOB', () => { + expect(parseLogicalTypeString('WKB_BLOB')).toStrictEqual(WKB_BLOB); + }); + test('SMALLINT', () => { + expect(parseLogicalTypeString('SMALLINT')).toStrictEqual(SMALLINT); + }); + test('INTEGER', () => { + expect(parseLogicalTypeString('INTEGER')).toStrictEqual(INTEGER); + }); + test('BIGINT', () => { + expect(parseLogicalTypeString('BIGINT')).toStrictEqual(BIGINT); + }); + test('HUGEINT', () => { + expect(parseLogicalTypeString('HUGEINT')).toStrictEqual(HUGEINT); + }); + test('UTINYINT', () => { + expect(parseLogicalTypeString('UTINYINT')).toStrictEqual(UTINYINT); + }); + test('UHUGEINT', () => { + expect(parseLogicalTypeString('UHUGEINT')).toStrictEqual(UHUGEINT); + }); + test('USMALLINT', () => { + expect(parseLogicalTypeString('USMALLINT')).toStrictEqual(USMALLINT); + }); + test('UINTEGER', () => { + expect(parseLogicalTypeString('UINTEGER')).toStrictEqual(UINTEGER); + }); + test('UBIGINT', () => { + expect(parseLogicalTypeString('UBIGINT')).toStrictEqual(UBIGINT); + }); + test('DATE', () => { + expect(parseLogicalTypeString('DATE')).toStrictEqual(DATE); + }); + test('TIME', () => { + expect(parseLogicalTypeString('TIME')).toStrictEqual(TIME); + }); + test('TIMESTAMP', () => { + expect(parseLogicalTypeString('TIMESTAMP')).toStrictEqual(TIMESTAMP); + }); + test('TIMESTAMP_S', () => { + expect(parseLogicalTypeString('TIMESTAMP_S')).toStrictEqual(TIMESTAMP_S); + }); + test('TIMESTAMP_MS', () => { + expect(parseLogicalTypeString('TIMESTAMP_MS')).toStrictEqual(TIMESTAMP_MS); + }); + test('TIMESTAMP_NS', () => { + expect(parseLogicalTypeString('TIMESTAMP_NS')).toStrictEqual(TIMESTAMP_NS); + }); + test('TIME WITH TIME ZONE', () => { + expect(parseLogicalTypeString('TIME WITH TIME ZONE')).toStrictEqual(TIMETZ); + }); + test('TIMESTAMP WITH TIME ZONE', () => { + expect(parseLogicalTypeString('TIMESTAMP WITH TIME ZONE')).toStrictEqual( + TIMESTAMPTZ, + ); + }); + test('FLOAT', () => { + expect(parseLogicalTypeString('FLOAT')).toStrictEqual(FLOAT); + }); + test('DOUBLE', () => { + expect(parseLogicalTypeString('DOUBLE')).toStrictEqual(DOUBLE); + }); + + test('DECIMAL(18,6)', () => { + expect(parseLogicalTypeString('DECIMAL(18,6)')).toStrictEqual( + DECIMAL(18, 6), + ); + }); + + test(`ENUM('DUCK_DUCK_ENUM', 'GOOSE')`, () => { + expect( + parseLogicalTypeString(`ENUM('DUCK_DUCK_ENUM', 'GOOSE')`), + ).toStrictEqual(ENUM(['DUCK_DUCK_ENUM', 'GOOSE'])); + }); + + test('DOUBLE[]', () => { + expect(parseLogicalTypeString('DOUBLE[]')).toStrictEqual(LIST(DOUBLE)); + }); + + test('STRUCT(a INTEGER, b VARCHAR)', () => { + expect( + parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)'), + ).toStrictEqual( + STRUCT({ + a: INTEGER, + b: VARCHAR, + }), + ); + }); + + test('STRUCT(a INTEGER[], b VARCHAR[])', () => { + expect( + parseLogicalTypeString('STRUCT(a INTEGER[], b VARCHAR[])'), + ).toStrictEqual( + STRUCT({ + a: LIST(INTEGER), + b: LIST(VARCHAR), + }), + ); + }); + + test('STRUCT(a INTEGER, b VARCHAR)[]', () => { + expect( + parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)[]'), + ).toStrictEqual( + LIST( + STRUCT({ + a: INTEGER, + b: VARCHAR, + }), + ), + ); + }); + + // addition: nested struct + test('STRUCT(a STRUCT(b INTEGER), b VARCHAR)', () => { + expect( + parseLogicalTypeString('STRUCT(a STRUCT(b INTEGER), b VARCHAR)'), + ).toStrictEqual( + STRUCT({ + a: STRUCT({ b: INTEGER }), + b: VARCHAR, + }), + ); + }); + test('STRUCT("my weird ""key" INTEGER, b VARCHAR)', () => { + expect( + parseLogicalTypeString('STRUCT("my weird ""key" INTEGER, b VARCHAR)'), + ).toStrictEqual( + STRUCT({ + '"my weird ""key"': INTEGER, + b: VARCHAR, + }), + ); + }); + test('STRUCT("my weird ""key" STRUCT("my other ""weird key" INTEGER), b VARCHAR)', () => { + expect( + parseLogicalTypeString( + 'STRUCT("my weird ""key" STRUCT("my other ""weird key" INTEGER), b VARCHAR)', + ), + ).toStrictEqual( + STRUCT({ + '"my weird ""key"': STRUCT({ + '"my other ""weird key"': INTEGER, + }), + b: VARCHAR, + }), + ); + }); + + test('MAP(INTEGER, VARCHAR)', () => { + expect(parseLogicalTypeString('MAP(INTEGER, VARCHAR)')).toStrictEqual( + MAP(INTEGER, VARCHAR), + ); + }); + + test('MAP(VARCHAR, STRUCT(b INTEGER))', () => { + expect( + parseLogicalTypeString('MAP(VARCHAR, STRUCT(b INTEGER))'), + ).toStrictEqual(MAP(VARCHAR, STRUCT({ b: INTEGER }))); + }); + + test('UNION("name" VARCHAR, age SMALLINT)', () => { + expect( + parseLogicalTypeString('UNION("name" VARCHAR, age SMALLINT)'), + ).toStrictEqual( + UNION({ + '"name"': VARCHAR, + age: SMALLINT, + }), + ); + }); + + test('INTEGER[3]', () => { + expect(parseLogicalTypeString('INTEGER[3]')).toStrictEqual( + ARRAY(INTEGER, 3), + ); + }); + + test('STRUCT(a INTEGER, b VARCHAR)[3]', () => { + expect( + parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)[3]'), + ).toStrictEqual( + ARRAY( + STRUCT({ + a: INTEGER, + b: VARCHAR, + }), + 3, + ), + ); + }); + + test('STRUCT(a INTEGER[3], b VARCHAR[3])', () => { + expect( + parseLogicalTypeString('STRUCT(a INTEGER[3], b VARCHAR[3])'), + ).toStrictEqual( + STRUCT({ + a: ARRAY(INTEGER, 3), + b: ARRAY(VARCHAR, 3), + }), + ); + }); + + test('INTEGER[][3]', () => { + expect(parseLogicalTypeString('INTEGER[][3]')).toStrictEqual( + ARRAY(LIST(INTEGER), 3), + ); + }); + + test('INTEGER[3][]', () => { + expect(parseLogicalTypeString('INTEGER[3][]')).toStrictEqual( + LIST(ARRAY(INTEGER, 3)), + ); + }); + + test('UUID', () => { + expect(parseLogicalTypeString('UUID')).toStrictEqual(UUID); + }); + test('INTERVAL', () => { + expect(parseLogicalTypeString('INTERVAL')).toStrictEqual(INTERVAL); + }); + test('VARCHAR', () => { + expect(parseLogicalTypeString('VARCHAR')).toStrictEqual(VARCHAR); + }); + test('VARINT', () => { + expect(parseLogicalTypeString('VARINT')).toStrictEqual(VARINT); + }); + test('BLOB', () => { + expect(parseLogicalTypeString('BLOB')).toStrictEqual(BLOB); + }); + test('BIT', () => { + expect(parseLogicalTypeString('BIT')).toStrictEqual(BIT); + }); +}); diff --git a/ts/pkgs/duckdb-data-types/test/tsconfig.json b/ts/pkgs/duckdb-data-types/test/tsconfig.json new file mode 100644 index 0000000..8c93c98 --- /dev/null +++ b/ts/pkgs/duckdb-data-types/test/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.test.json", + "references": [ + { "path": "../src" } + ] +} diff --git a/ts/pkgs/duckdb-data-values/package.json b/ts/pkgs/duckdb-data-values/package.json new file mode 100644 index 0000000..7806638 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/package.json @@ -0,0 +1,34 @@ +{ + "name": "@duckdb/data-values", + "version": "0.0.1", + "description": "Utilities for representing DuckDB values", + "type": "module", + "main": "./out/index.js", + "module": "./out/index.js", + "types": "./out/index.d.ts", + "scripts": { + "preinstall": "pnpm build:src", + "build": "tsc -b src test", + "build:src": "tsc -b src", + "build:test": "tsc -b test", + "build:watch": "tsc -b src test --watch", + "check": "pnpm format:check && pnpm lint", + "clean": "rimraf out", + "format:check": "prettier . --ignore-path $(find-up .prettierignore) --check", + "format:write": "prettier . --ignore-path $(find-up .prettierignore) --write", + "lint": "pnpm eslint src test", + "test": "vitest run", + "test:watch": "vitest" + }, + "devDependencies": { + "@eslint/js": "^9.24.0", + "eslint": "^9.24.0", + "find-up-cli": "^6.0.0", + "prettier": "^3.5.3", + "rimraf": "^6.0.1", + "typescript": "^5.8.3", + "typescript-eslint": "^8.30.1", + "vite": "^6.2.6", + "vitest": "^3.1.1" + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts new file mode 100644 index 0000000..c10f7d0 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts @@ -0,0 +1,23 @@ +import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js'; +import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js'; +import { DuckDBValue } from './DuckDBValue.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBArrayValue extends SpecialDuckDBValue { + public readonly values: readonly DuckDBValue[]; + + constructor(values: readonly DuckDBValue[]) { + super(); + this.values = values; + } + + public toDuckDBString(): string { + const valueStrings = this.values.map(displayStringForDuckDBValue); + return `[${valueStrings.join(', ')}]`; + } + + public toJson(): Json { + return this.values.map(jsonFromDuckDBValue); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts new file mode 100644 index 0000000..c8823a8 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts @@ -0,0 +1,123 @@ +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBBitValue extends SpecialDuckDBValue { + public readonly data: Uint8Array; + + constructor(data: Uint8Array) { + super(); + this.data = data; + } + + public padding(): number { + return this.data[0]; + } + + public get length(): number { + return (this.data.length - 1) * 8 - this.padding(); + } + + public getBool(index: number): boolean { + const offset = index + this.padding(); + const dataIndex = Math.floor(offset / 8) + 1; + const byte = this.data[dataIndex] >> (7 - (offset % 8)); + return (byte & 1) !== 0; + } + + public toBools(): boolean[] { + const bools: boolean[] = []; + const length = this.length; + for (let i = 0; i < length; i++) { + bools.push(this.getBool(i)); + } + return bools; + } + + public getBit(index: number): 0 | 1 { + return this.getBool(index) ? 1 : 0; + } + + public toBits(): number[] { + const bits: number[] = []; + const length = this.length; + for (let i = 0; i < length; i++) { + bits.push(this.getBit(i)); + } + return bits; + } + + public toDuckDBString(): string { + const length = this.length; + const chars = Array.from({ length }); + for (let i = 0; i < length; i++) { + chars[i] = this.getBool(i) ? '1' : '0'; + } + return chars.join(''); + } + + public toJson(): Json { + return this.toDuckDBString(); + } + + public static fromString(str: string, on: string = '1'): DuckDBBitValue { + return DuckDBBitValue.fromLengthAndPredicate( + str.length, + (i) => str[i] === on, + ); + } + + public static fromBits( + bits: readonly number[], + on: number = 1, + ): DuckDBBitValue { + return DuckDBBitValue.fromLengthAndPredicate( + bits.length, + (i) => bits[i] === on, + ); + } + + public static fromBools(bools: readonly boolean[]): DuckDBBitValue { + return DuckDBBitValue.fromLengthAndPredicate(bools.length, (i) => bools[i]); + } + + public static fromLengthAndPredicate( + length: number, + predicate: (index: number) => boolean, + ): DuckDBBitValue { + const byteCount = Math.ceil(length / 8) + 1; + const paddingBitCount = (8 - (length % 8)) % 8; + + const data = new Uint8Array(byteCount); + let byteIndex = 0; + + // first byte contains count of padding bits + data[byteIndex++] = paddingBitCount; + + let byte = 0; + let byteBit = 0; + + // padding consists of 1s in MSB of second byte + while (byteBit < paddingBitCount) { + byte <<= 1; + byte |= 1; + byteBit++; + } + + let bitIndex = 0; + + while (byteIndex < byteCount) { + while (byteBit < 8) { + byte <<= 1; + if (predicate(bitIndex++)) { + byte |= 1; + } + byteBit++; + } + data[byteIndex++] = byte; + byte = 0; + byteBit = 0; + } + + return new DuckDBBitValue(data); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts new file mode 100644 index 0000000..f2a8913 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts @@ -0,0 +1,20 @@ +import { stringFromBlob } from './conversion/stringFromBlob.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBBlobValue extends SpecialDuckDBValue { + public readonly bytes: Uint8Array; + + constructor(bytes: Uint8Array) { + super(); + this.bytes = bytes; + } + + public toDuckDBString(): string { + return stringFromBlob(this.bytes); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts new file mode 100644 index 0000000..d40f8d5 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts @@ -0,0 +1,20 @@ +import { getDuckDBDateStringFromDays } from './conversion/dateTimeStringConversion.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBDateValue extends SpecialDuckDBValue { + public readonly days: number; + + constructor(days: number) { + super(); + this.days = days; + } + + public toDuckDBString(): string { + return getDuckDBDateStringFromDays(this.days); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts new file mode 100644 index 0000000..a434121 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts @@ -0,0 +1,38 @@ +import { + DuckDBDecimalFormatOptions, + stringFromDecimal, +} from './conversion/stringFromDecimal.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBDecimalValue extends SpecialDuckDBValue { + public readonly scaledValue: bigint; + + public readonly scale: number; + + constructor(scaledValue: bigint, scale: number) { + super(); + this.scaledValue = scaledValue; + this.scale = scale; + } + + public toDuckDBString(): string { + return stringFromDecimal(this.scaledValue, this.scale); + } + + /** Returns a string representation appropriate to the host environment's current locale. */ + + public toLocaleString( + locales?: string | string[], + options?: DuckDBDecimalFormatOptions, + ): string { + return stringFromDecimal(this.scaledValue, this.scale, { + locales, + options, + }); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts new file mode 100644 index 0000000..68a6938 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts @@ -0,0 +1,26 @@ +import { getDuckDBIntervalString } from './conversion/dateTimeStringConversion.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBIntervalValue extends SpecialDuckDBValue { + public readonly months: number; + + public readonly days: number; + + public readonly microseconds: bigint; + + constructor(months: number, days: number, microseconds: bigint) { + super(); + this.months = months; + this.days = days; + this.microseconds = microseconds; + } + + public toDuckDBString(): string { + return getDuckDBIntervalString(this.months, this.days, this.microseconds); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts new file mode 100644 index 0000000..518a33d --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts @@ -0,0 +1,23 @@ +import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js'; +import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js'; +import { DuckDBValue } from './DuckDBValue.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBListValue extends SpecialDuckDBValue { + public readonly values: readonly DuckDBValue[]; + + constructor(values: readonly DuckDBValue[]) { + super(); + this.values = values; + } + + public toDuckDBString(): string { + const valueStrings = this.values.map(displayStringForDuckDBValue); + return `[${valueStrings.join(', ')}]`; + } + + public toJson(): Json { + return this.values.map(jsonFromDuckDBValue); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts b/ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts new file mode 100644 index 0000000..665505e --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts @@ -0,0 +1,6 @@ +import { DuckDBValue } from './DuckDBValue.js'; + +export interface DuckDBMapEntry { + readonly key: DuckDBValue; + readonly value: DuckDBValue; +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts new file mode 100644 index 0000000..d837bc1 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts @@ -0,0 +1,33 @@ +import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js'; +import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js'; +import { DuckDBMapEntry } from './DuckDBMapEntry.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBMapValue extends SpecialDuckDBValue { + public readonly entries: readonly DuckDBMapEntry[]; + + constructor(entries: readonly DuckDBMapEntry[]) { + super(); + this.entries = entries; + } + + public toDuckDBString(): string { + const entryStrings = this.entries.map( + ({ key, value }) => + `${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue( + value, + )}`, + ); + return `{${entryStrings.join(', ')}}`; + } + + public toJson(): Json { + const result: Json = {}; + for (const { key, value } of this.entries) { + const keyString = displayStringForDuckDBValue(key); + result[keyString] = jsonFromDuckDBValue(value); + } + return result; + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts b/ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts new file mode 100644 index 0000000..b4289e3 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts @@ -0,0 +1,6 @@ +import { DuckDBValue } from './DuckDBValue.js'; + +export interface DuckDBStructEntry { + readonly key: string; + readonly value: DuckDBValue; +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts new file mode 100644 index 0000000..4adc651 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts @@ -0,0 +1,33 @@ +import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js'; +import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js'; +import { DuckDBStructEntry } from './DuckDBStructEntry.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBStructValue extends SpecialDuckDBValue { + public readonly entries: readonly DuckDBStructEntry[]; + + constructor(entries: readonly DuckDBStructEntry[]) { + super(); + this.entries = entries; + } + + public toDuckDBString(): string { + const entryStrings = this.entries.map( + ({ key, value }) => + `${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue( + value, + )}`, + ); + return `{${entryStrings.join(', ')}}`; + } + + public toJson(): Json { + const result: Json = {}; + for (const { key, value } of this.entries) { + const keyString = displayStringForDuckDBValue(key); + result[keyString] = jsonFromDuckDBValue(value); + } + return result; + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts new file mode 100644 index 0000000..f8d7a1e --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts @@ -0,0 +1,42 @@ +import { + getDuckDBTimeStringFromMicrosecondsInDay, + getOffsetStringFromSeconds, +} from './conversion/dateTimeStringConversion.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBTimeTZValue extends SpecialDuckDBValue { + public readonly micros: bigint; + public readonly offset: number; + + constructor(micros: bigint, offset: number) { + super(); + this.micros = micros; + this.offset = offset; + } + + public toDuckDBString(): string { + return `${getDuckDBTimeStringFromMicrosecondsInDay( + this.micros, + )}${getOffsetStringFromSeconds(this.offset)}`; + } + + public toJson(): Json { + return this.toDuckDBString(); + } + + private static TimeBits = 40; + private static OffsetBits = 24; + private static MaxOffset = 16 * 60 * 60 - 1; // ±15:59:59 = 57599 seconds + + public static fromBits(bits: bigint): DuckDBTimeTZValue { + const micros = BigInt.asUintN( + DuckDBTimeTZValue.TimeBits, + bits >> BigInt(DuckDBTimeTZValue.OffsetBits), + ); + const offset = + DuckDBTimeTZValue.MaxOffset - + Number(BigInt.asUintN(DuckDBTimeTZValue.OffsetBits, bits)); + return new DuckDBTimeTZValue(micros, offset); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts new file mode 100644 index 0000000..22d71b2 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts @@ -0,0 +1,20 @@ +import { getDuckDBTimeStringFromMicrosecondsInDay } from './conversion/dateTimeStringConversion.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBTimeValue extends SpecialDuckDBValue { + public readonly microseconds: bigint; + + constructor(microseconds: bigint) { + super(); + this.microseconds = microseconds; + } + + public toDuckDBString(): string { + return getDuckDBTimeStringFromMicrosecondsInDay(this.microseconds); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBTimestampMicrosecondsValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampMicrosecondsValue.ts new file mode 100644 index 0000000..790b482 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampMicrosecondsValue.ts @@ -0,0 +1,22 @@ +import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBTimestampMicrosecondsValue extends SpecialDuckDBValue { + public readonly microseconds: bigint; + + constructor(microseconds: bigint) { + super(); + this.microseconds = microseconds; + } + + public toDuckDBString(): string { + return getDuckDBTimestampStringFromMicroseconds(this.microseconds); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} + +export type DuckDBTimestamp = DuckDBTimestampMicrosecondsValue; diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBTimestampMillisecondsValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampMillisecondsValue.ts new file mode 100644 index 0000000..5e5429a --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampMillisecondsValue.ts @@ -0,0 +1,20 @@ +import { getDuckDBTimestampStringFromMilliseconds } from './conversion/dateTimeStringConversion.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBTimestampMillisecondsValue extends SpecialDuckDBValue { + public readonly milliseconds: bigint; + + constructor(milliseconds: bigint) { + super(); + this.milliseconds = milliseconds; + } + + public toDuckDBString(): string { + return getDuckDBTimestampStringFromMilliseconds(this.milliseconds); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBTimestampNanosecondsValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampNanosecondsValue.ts new file mode 100644 index 0000000..f65745d --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampNanosecondsValue.ts @@ -0,0 +1,20 @@ +import { getDuckDBTimestampStringFromNanoseconds } from './conversion/dateTimeStringConversion.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBTimestampNanosecondsValue extends SpecialDuckDBValue { + public readonly nanoseconds: bigint; + + constructor(nanoseconds: bigint) { + super(); + this.nanoseconds = nanoseconds; + } + + public toDuckDBString(): string { + return getDuckDBTimestampStringFromNanoseconds(this.nanoseconds); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBTimestampSecondsValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampSecondsValue.ts new file mode 100644 index 0000000..99989b8 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampSecondsValue.ts @@ -0,0 +1,20 @@ +import { getDuckDBTimestampStringFromSeconds } from './conversion/dateTimeStringConversion.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBTimestampSecondsValue extends SpecialDuckDBValue { + public readonly seconds: bigint; + + constructor(seconds: bigint) { + super(); + this.seconds = seconds; + } + + public toDuckDBString(): string { + return getDuckDBTimestampStringFromSeconds(this.seconds); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts new file mode 100644 index 0000000..e16c5c8 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts @@ -0,0 +1,24 @@ +import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js'; +import { DuckDBToStringOptions } from './DuckDBToStringOptions.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBTimestampTZValue extends SpecialDuckDBValue { + public readonly microseconds: bigint; + + constructor(microseconds: bigint) { + super(); + this.microseconds = microseconds; + } + + public toDuckDBString(toStringOptions?: DuckDBToStringOptions): string { + return getDuckDBTimestampStringFromMicroseconds( + this.microseconds, + toStringOptions?.timezoneOffsetInMinutes || 0, + ); + } + + public toJson(): Json { + return this.toDuckDBString(); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts b/ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts new file mode 100644 index 0000000..0a6a25b --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts @@ -0,0 +1,3 @@ +export interface DuckDBToStringOptions { + timezoneOffsetInMinutes?: number; +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts new file mode 100644 index 0000000..3b0d473 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts @@ -0,0 +1,48 @@ +import { hexFromBlob } from './conversion/hexFromBlob.js'; +import { Json } from './Json.js'; +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export class DuckDBUUIDValue extends SpecialDuckDBValue { + public readonly bytes: Uint8Array; + + constructor(bytes: Uint8Array) { + super(); + this.bytes = bytes; + } + + public toDuckDBString(): string { + if (this.bytes.length !== 16) { + throw new Error('Invalid UUID bytes length'); + } + + // Insert dashes to format the UUID + return `${hexFromBlob(this.bytes, 0, 4)}-${hexFromBlob(this.bytes, 4, 6)}-${hexFromBlob(this.bytes, 6, 8)}-${hexFromBlob(this.bytes, 8, 10)}-${hexFromBlob(this.bytes, 10, 16)}`; + } + + public toJson(): Json { + return this.toDuckDBString(); + } + + /** + * Create a DuckDBUUIDValue value from a HUGEINT as stored by DuckDB. + * + * UUID values are stored with their MSB flipped so their numeric ordering matches their string ordering. + */ + public static fromStoredHugeint(hugeint: bigint): DuckDBUUIDValue { + // Flip the MSB and truncate to 128 bits to extract the represented unsigned 128-bit value. + const uint128 = + (hugeint ^ 0x80000000000000000000000000000000n) & + 0xffffffffffffffffffffffffffffffffn; + return DuckDBUUIDValue.fromUint128(uint128); + } + + /** Create a DuckDBUUIDValue value from an unsigned 128-bit integer in a JS BigInt. */ + public static fromUint128(uint128: bigint): DuckDBUUIDValue { + const bytes = new Uint8Array(16); + const dv = new DataView(bytes.buffer); + // Write the unsigned 128-bit integer to the buffer in big endian format. + dv.setBigUint64(0, BigInt.asUintN(64, uint128 >> BigInt(64)), false); + dv.setBigUint64(8, BigInt.asUintN(64, uint128), false); + return new DuckDBUUIDValue(bytes); + } +} diff --git a/ts/pkgs/duckdb-data-values/src/DuckDBValue.ts b/ts/pkgs/duckdb-data-values/src/DuckDBValue.ts new file mode 100644 index 0000000..4835b45 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/DuckDBValue.ts @@ -0,0 +1,9 @@ +import { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; + +export type DuckDBValue = + | null + | boolean + | number + | string + | bigint // TODO: Should types requiring bigint be SpecialDBValues? + | SpecialDuckDBValue; diff --git a/ts/pkgs/duckdb-data-values/src/Json.ts b/ts/pkgs/duckdb-data-values/src/Json.ts new file mode 100644 index 0000000..288d735 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/Json.ts @@ -0,0 +1,7 @@ +export type Json = + | null + | boolean + | number + | string + | Json[] + | { [key: string]: Json }; diff --git a/ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts b/ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts new file mode 100644 index 0000000..1130f1b --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts @@ -0,0 +1,15 @@ +import { DuckDBToStringOptions } from './DuckDBToStringOptions.js'; +import { Json } from './Json.js'; + +export abstract class SpecialDuckDBValue { + // The presence of this function can be used to identify SpecialDuckDBValue objects. + public abstract toDuckDBString( + toStringOptions?: DuckDBToStringOptions, + ): string; + + public toString(): string { + return this.toDuckDBString(); + } + + public abstract toJson(): Json; +} diff --git a/ts/pkgs/duckdb-data-values/src/conversion/dateTimeStringConversion.ts b/ts/pkgs/duckdb-data-values/src/conversion/dateTimeStringConversion.ts new file mode 100644 index 0000000..83d82ca --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/conversion/dateTimeStringConversion.ts @@ -0,0 +1,264 @@ +const DAYS_IN_400_YEARS = 146097; // (((365 * 4 + 1) * 25) - 1) * 4 + 1 +const MILLISECONDS_PER_DAY_NUM = 86400000; // 1000 * 60 * 60 * 24 + +const MICROSECONDS_PER_SECOND = BigInt(1000000); +const MICROSECONDS_PER_MILLISECOND = BigInt(1000); +const NANOSECONDS_PER_MICROSECOND = BigInt(1000); +const SECONDS_PER_MINUTE = BigInt(60); +const MINUTES_PER_HOUR = BigInt(60); +const MICROSECONDS_PER_DAY = BigInt(86400000000); // 24 * 60 * 60 * 1000000 + +const NEGATIVE_INFINITY_TIMESTAMP = BigInt('-9223372036854775807'); // -(2^63-1) +const POSITIVE_INFINITY_TIMESTAMP = BigInt('9223372036854775807'); // 2^63-1 + +export function getDuckDBDateStringFromYearMonthDay( + year: number, + month: number, + dayOfMonth: number, +): string { + const yearStr = String(Math.abs(year)).padStart(4, '0'); + const monthStr = String(month).padStart(2, '0'); + const dayOfMonthStr = String(dayOfMonth).padStart(2, '0'); + return `${yearStr}-${monthStr}-${dayOfMonthStr}${year < 0 ? ' (BC)' : ''}`; +} + +export function getDuckDBDateStringFromDays(days: number): string { + const absDays = Math.abs(days); + const sign = days < 0 ? -1 : 1; + // 400 years is the shortest interval with a fixed number of days. (Leap years and different length months can result + // in shorter intervals having different number of days.) By separating the number of 400 year intervals from the + // interval covered by the remaining days, we can guarantee that the date resulting from shifting the epoch by the + // remaining interval is within the valid range of the JS Date object. This allows us to use JS Date to calculate the + // year, month, and day of month for the date represented by the remaining interval, thus accounting for leap years + // and different length months. We can then safely add back the years from the 400 year intervals, because the month + // and day of month won't change when a date is shifted by a whole number of such intervals. + const num400YearIntervals = Math.floor(absDays / DAYS_IN_400_YEARS); + const yearsFrom400YearIntervals = sign * num400YearIntervals * 400; + const absDaysFromRemainingInterval = absDays % DAYS_IN_400_YEARS; + const millisecondsFromRemainingInterval = + sign * absDaysFromRemainingInterval * MILLISECONDS_PER_DAY_NUM; + const date = new Date(millisecondsFromRemainingInterval); + let year = yearsFrom400YearIntervals + date.getUTCFullYear(); + if (year < 0) { + year--; // correct for non-existence of year zero + } + const month = date.getUTCMonth() + 1; // getUTCMonth returns zero-indexed month, but we want a one-index month for display + const dayOfMonth = date.getUTCDate(); // getUTCDate returns one-indexed day-of-month + return getDuckDBDateStringFromYearMonthDay(year, month, dayOfMonth); +} + +export function getTimezoneOffsetString( + timezoneOffsetInMinutes?: number, +): string | undefined { + if (timezoneOffsetInMinutes === undefined) { + return undefined; + } + const negative = timezoneOffsetInMinutes < 0; + const positiveMinutes = Math.abs(timezoneOffsetInMinutes); + const minutesPart = positiveMinutes % 60; + const hoursPart = Math.floor(positiveMinutes / 60); + const minutesStr = + minutesPart !== 0 ? String(minutesPart).padStart(2, '0') : ''; + const hoursStr = String(hoursPart).padStart(2, '0'); + return `${negative ? '-' : '+'}${hoursStr}${minutesStr ? `:${minutesStr}` : ''}`; +} + +export function getAbsoluteOffsetStringFromParts( + hoursPart: number, + minutesPart: number, + secondsPart: number, +): string { + const hoursStr = String(hoursPart).padStart(2, '0'); + const minutesStr = + minutesPart !== 0 || secondsPart !== 0 + ? String(minutesPart).padStart(2, '0') + : ''; + const secondsStr = + secondsPart !== 0 ? String(secondsPart).padStart(2, '0') : ''; + let result = hoursStr; + if (minutesStr) { + result += `:${minutesStr}`; + if (secondsStr) { + result += `:${secondsStr}`; + } + } + return result; +} + +export function getOffsetStringFromAbsoluteSeconds( + absoluteOffsetInSeconds: number, +): string { + const secondsPart = absoluteOffsetInSeconds % 60; + const minutes = Math.floor(absoluteOffsetInSeconds / 60); + const minutesPart = minutes % 60; + const hoursPart = Math.floor(minutes / 60); + return getAbsoluteOffsetStringFromParts(hoursPart, minutesPart, secondsPart); +} + +export function getOffsetStringFromSeconds(offsetInSeconds: number): string { + const negative = offsetInSeconds < 0; + const absoluteOffsetInSeconds = negative ? -offsetInSeconds : offsetInSeconds; + const absoluteString = getOffsetStringFromAbsoluteSeconds( + absoluteOffsetInSeconds, + ); + return `${negative ? '-' : '+'}${absoluteString}`; +} + +export function getDuckDBTimeStringFromParts( + hoursPart: bigint, + minutesPart: bigint, + secondsPart: bigint, + microsecondsPart: bigint, +): string { + const hoursStr = String(hoursPart).padStart(2, '0'); + const minutesStr = String(minutesPart).padStart(2, '0'); + const secondsStr = String(secondsPart).padStart(2, '0'); + const microsecondsStr = String(microsecondsPart) + .padStart(6, '0') + .replace(/0+$/, ''); + return `${hoursStr}:${minutesStr}:${secondsStr}${ + microsecondsStr.length > 0 ? `.${microsecondsStr}` : '' + }`; +} + +export function getDuckDBTimeStringFromPositiveMicroseconds( + positiveMicroseconds: bigint, +): string { + const microsecondsPart = positiveMicroseconds % MICROSECONDS_PER_SECOND; + const seconds = positiveMicroseconds / MICROSECONDS_PER_SECOND; + const secondsPart = seconds % SECONDS_PER_MINUTE; + const minutes = seconds / SECONDS_PER_MINUTE; + const minutesPart = minutes % MINUTES_PER_HOUR; + const hoursPart = minutes / MINUTES_PER_HOUR; + return getDuckDBTimeStringFromParts( + hoursPart, + minutesPart, + secondsPart, + microsecondsPart, + ); +} + +export function getDuckDBTimeStringFromMicrosecondsInDay( + microsecondsInDay: bigint, +): string { + const positiveMicroseconds = + microsecondsInDay < 0 + ? microsecondsInDay + MICROSECONDS_PER_DAY + : microsecondsInDay; + return getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds); +} + +export function getDuckDBTimeStringFromMicroseconds( + microseconds: bigint, +): string { + const negative = microseconds < 0; + const positiveMicroseconds = negative ? -microseconds : microseconds; + const positiveString = + getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds); + return negative ? `-${positiveString}` : positiveString; +} + +export function getDuckDBTimestampStringFromDaysAndMicroseconds( + days: bigint, + microsecondsInDay: bigint, + timezonePart?: string, +): string { + // This conversion of BigInt to Number is safe, because the largest absolute value that `days` can has is 106751991, + // which fits without loss of precision in a JS Number. (106751991 = (2^63-1) / MICROSECONDS_PER_DAY) + const dateStr = getDuckDBDateStringFromDays(Number(days)); + const timeStr = getDuckDBTimeStringFromMicrosecondsInDay(microsecondsInDay); + return `${dateStr} ${timeStr}${timezonePart ?? ''}`; +} + +export function getDuckDBTimestampStringFromMicroseconds( + microseconds: bigint, + timezoneOffsetInMinutes?: number, +): string { + // Note that -infinity and infinity are only representable in TIMESTAMP (and TIMESTAMPTZ), not the other timestamp + // variants. This is by-design and matches DuckDB. + if (microseconds === NEGATIVE_INFINITY_TIMESTAMP) { + return '-infinity'; + } + if (microseconds === POSITIVE_INFINITY_TIMESTAMP) { + return 'infinity'; + } + const offsetMicroseconds = + timezoneOffsetInMinutes !== undefined + ? microseconds + + BigInt(timezoneOffsetInMinutes) * + MICROSECONDS_PER_SECOND * + SECONDS_PER_MINUTE + : microseconds; + let days = offsetMicroseconds / MICROSECONDS_PER_DAY; + let microsecondsPart = offsetMicroseconds % MICROSECONDS_PER_DAY; + if (microsecondsPart < 0) { + days--; + microsecondsPart += MICROSECONDS_PER_DAY; + } + return getDuckDBTimestampStringFromDaysAndMicroseconds( + days, + microsecondsPart, + getTimezoneOffsetString(timezoneOffsetInMinutes), + ); +} + +export function getDuckDBTimestampStringFromSeconds(seconds: bigint): string { + return getDuckDBTimestampStringFromMicroseconds( + seconds * MICROSECONDS_PER_SECOND, + ); +} + +export function getDuckDBTimestampStringFromMilliseconds( + milliseconds: bigint, +): string { + return getDuckDBTimestampStringFromMicroseconds( + milliseconds * MICROSECONDS_PER_MILLISECOND, + ); +} + +export function getDuckDBTimestampStringFromNanoseconds( + nanoseconds: bigint, +): string { + // Note that this division causes loss of precision. This matches the behavior of the DuckDB. It's important that this + // precision loss happen before the negative correction in getTimestampStringFromMicroseconds, otherwise off-by-one + // errors can occur. + return getDuckDBTimestampStringFromMicroseconds( + nanoseconds / NANOSECONDS_PER_MICROSECOND, + ); +} + +// Assumes baseUnit can be pluralized by adding an 's'. +function numberAndUnit(value: number, baseUnit: string): string { + return `${value} ${baseUnit}${value !== 1 ? 's' : ''}`; +} + +export function getDuckDBIntervalString( + months: number, + days: number, + microseconds: bigint, +): string { + const parts: string[] = []; + if (months !== 0) { + const sign = months < 0 ? -1 : 1; + const absMonths = Math.abs(months); + const absYears = Math.floor(absMonths / 12); + const years = sign * absYears; + const extraMonths = sign * (absMonths - absYears * 12); + if (years !== 0) { + parts.push(numberAndUnit(years, 'year')); + } + if (extraMonths !== 0) { + parts.push(numberAndUnit(extraMonths, 'month')); + } + } + if (days !== 0) { + parts.push(numberAndUnit(days, 'day')); + } + if (microseconds !== BigInt(0)) { + parts.push(getDuckDBTimeStringFromMicroseconds(microseconds)); + } + if (parts.length > 0) { + return parts.join(' '); + } + return '00:00:00'; +} diff --git a/ts/pkgs/duckdb-data-values/src/conversion/displayStringForDuckDBValue.ts b/ts/pkgs/duckdb-data-values/src/conversion/displayStringForDuckDBValue.ts new file mode 100644 index 0000000..1b8a76e --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/conversion/displayStringForDuckDBValue.ts @@ -0,0 +1,11 @@ +import { DuckDBValue } from '../DuckDBValue.js'; + +export function displayStringForDuckDBValue(value: DuckDBValue): string { + if (value == null) { + return 'NULL'; + } + if (typeof value === 'string') { + return `'${value.replace(`'`, `''`)}'`; + } + return String(value); +} diff --git a/ts/pkgs/duckdb-data-values/src/conversion/getVarIntFromBytes.ts b/ts/pkgs/duckdb-data-values/src/conversion/getVarIntFromBytes.ts new file mode 100644 index 0000000..a3ca01e --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/conversion/getVarIntFromBytes.ts @@ -0,0 +1,34 @@ +/** + * Returns the JS bigint value represented by the byte array a VARINT in DuckDB's internal format. + * + * DuckDB stores VARINTs as an array of bytes consisting of a three-byte header followed by a variable number of bytes + * (at least one). The header specifies the number of bytes after the header, and whether the number is positive or + * negative. The bytes after the header specify the absolute value of the number, in big endian format. + * + * The sign of the number is determined by the MSB of the header, which is 1 for positive and 0 for negative. Negative + * numbers also have all bytes of both the header and value inverted. (For negative numbers, the MSB is 0 after this + * inversion. Put another way: the MSB of the header is always 1, but it's inverted for negative numbers.) + */ +export function getVarIntFromBytes(bytes: Uint8Array): bigint { + const firstByte = bytes[0]; + const positive = (firstByte & 0x80) > 0; + const uint64Mask = positive ? 0n : 0xffffffffffffffffn; + const uint8Mask = positive ? 0 : 0xff; + const dv = new DataView( + bytes.buffer, + bytes.byteOffset + 3, + bytes.byteLength - 3, + ); + const lastUint64Offset = dv.byteLength - 8; + let offset = 0; + let result = 0n; + while (offset <= lastUint64Offset) { + result = (result << 64n) | (dv.getBigUint64(offset) ^ uint64Mask); + offset += 8; + } + while (offset < dv.byteLength) { + result = (result << 8n) | BigInt(dv.getUint8(offset) ^ uint8Mask); + offset += 1; + } + return positive ? result : -result; +} diff --git a/ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts b/ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts new file mode 100644 index 0000000..0844335 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts @@ -0,0 +1,20 @@ +export function hexFromBlob( + blob: Uint8Array, + start: number | undefined, + end: number | undefined, +): string { + if (start === undefined) { + start = 0; + } + if (end === undefined) { + end = blob.length; + } + let hex = ''; + + for (let i = start; i < end; i++) { + const byte = blob[i]; + // Ensure each byte is 2 hex characters + hex += (byte < 16 ? '0' : '') + byte.toString(16); + } + return hex; +} diff --git a/ts/pkgs/duckdb-data-values/src/conversion/jsonFromDuckDBValue.ts b/ts/pkgs/duckdb-data-values/src/conversion/jsonFromDuckDBValue.ts new file mode 100644 index 0000000..24c4451 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/conversion/jsonFromDuckDBValue.ts @@ -0,0 +1,16 @@ +import { DuckDBValue } from '../DuckDBValue.js'; +import { Json } from '../Json.js'; +import { SpecialDuckDBValue } from '../SpecialDuckDBValue.js'; + +export function jsonFromDuckDBValue(value: DuckDBValue): Json { + if (value === null) { + return null; + } + if (typeof value === 'bigint') { + return String(value); + } + if (value instanceof SpecialDuckDBValue) { + return value.toJson(); + } + return value; +} diff --git a/ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts b/ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts new file mode 100644 index 0000000..62a79b4 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts @@ -0,0 +1,17 @@ +/** Matches BLOB-to-VARCHAR conversion behavior of DuckDB. */ +export function stringFromBlob(bytes: Uint8Array): string { + let result = ''; + for (const byte of bytes) { + if ( + byte <= 0x1f || + byte === 0x22 /* single quote */ || + byte === 0x27 /* double quote */ || + byte >= 0x7f + ) { + result += `\\x${byte.toString(16).toUpperCase().padStart(2, '0')}`; + } else { + result += String.fromCharCode(byte); + } + } + return result; +} diff --git a/ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts b/ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts new file mode 100644 index 0000000..5ef6496 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts @@ -0,0 +1,129 @@ +/** + * Decimal string formatting. + * + * Supports a subset of the functionality of `BigInt.prototype.toLocaleString` for locale-specific formatting. + */ + +/* + * Locale formatting options for DuckDBDecimalValue. + * + * This is a subset of the options available for `BigInt.prototype.toLocaleString` + */ +export interface DuckDBDecimalFormatOptions { + useGrouping?: boolean; + minimumFractionDigits?: number; + maximumFractionDigits?: number; +} + +export interface LocaleOptions { + locales?: string | string[]; + options?: DuckDBDecimalFormatOptions; +} + +/* + * Get the decimal separator for a given locale. + * Somewhat expensive, so use getCachedDecimalSeparator if you need to call this multiple times. + */ + +function getDecimalSeparator(locales?: string | string[]): string { + const decimalSeparator = + new Intl.NumberFormat(locales, { useGrouping: false }) + .formatToParts(0.1) + .find((part) => part.type === 'decimal')?.value ?? '.'; + return decimalSeparator; +} + +/* + * Get the decimal separator for a given locale, and cache the result. + */ +const cachedDecimalSeparators: { [localeKey: string]: string } = {}; + +function getCachedDecimalSeparator(locales?: string | string[]): string { + const cacheKey = JSON.stringify(locales); + if (cacheKey in cachedDecimalSeparators) { + return cachedDecimalSeparators[cacheKey]; + } + const decimalSeparator = getDecimalSeparator(locales); + cachedDecimalSeparators[cacheKey] = decimalSeparator; + return decimalSeparator; +} + +// Helper function to format whole part of a decimal value. +// Note that we explicitly omit 'minimumFractionDigits' and 'maximumFractionDigits' from the options +// passed to toLocaleString, because they are only relevant for the fractional part of the number, and +// would result in formatting the whole part as a real number, which we don't want. +function formatWholePart( + localeOptions: LocaleOptions | undefined, + val: bigint, +): string { + if (localeOptions) { + const { + minimumFractionDigits: _minFD, + maximumFractionDigits: _maxFD, + ...restOptions + } = localeOptions.options ?? {}; + return val.toLocaleString(localeOptions?.locales, restOptions); + } + return String(val); +} + +// Format the fractional part of a decimal value +// Note that we must handle minimumFractionDigits and maximumFractionDigits ourselves, and that +// we don't apply `useGrouping` because that only applies to the whole part of the number. +function formatFractionalPart( + localeOptions: LocaleOptions | undefined, + val: bigint, + scale: number, +): string { + const fractionalPartStr = String(val).padStart(scale, '0'); + if (!localeOptions) { + return fractionalPartStr; + } + const minFracDigits = localeOptions?.options?.minimumFractionDigits ?? 0; + const maxFracDigits = localeOptions?.options?.maximumFractionDigits ?? 20; + + return fractionalPartStr.padEnd(minFracDigits, '0').slice(0, maxFracDigits); +} + +/** + * Convert a scaled decimal value to a string, possibly using locale-specific formatting. + */ +export function stringFromDecimal( + scaledValue: bigint, + scale: number, + localeOptions?: LocaleOptions, +): string { + // Decimal values are represented as integers that have been scaled up by a power of ten. The `scale` property of + // the type is the exponent of the scale factor. For a scale greater than zero, we need to separate out the + // fractional part by reversing this scaling. + if (scale > 0) { + const scaleFactor = BigInt(10) ** BigInt(scale); + const absScaledValue = scaledValue < 0 ? -scaledValue : scaledValue; + + const prefix = scaledValue < 0 ? '-' : ''; + + const wholePartNum = absScaledValue / scaleFactor; + const wholePartStr = formatWholePart(localeOptions, wholePartNum); + + const fractionalPartNum = absScaledValue % scaleFactor; + const fractionalPartStr = formatFractionalPart( + localeOptions, + fractionalPartNum, + scale, + ); + + const decimalSeparatorStr = localeOptions + ? getCachedDecimalSeparator(localeOptions.locales) + : '.'; + + return `${prefix}${wholePartStr}${decimalSeparatorStr}${fractionalPartStr}`; + } + // For a scale of zero, there is no fractional part, so a direct string conversion works. + if (localeOptions) { + return scaledValue.toLocaleString( + localeOptions?.locales, + localeOptions?.options as BigIntToLocaleStringOptions | undefined, + ); + } + return String(scaledValue); +} diff --git a/ts/pkgs/duckdb-data-values/src/index.ts b/ts/pkgs/duckdb-data-values/src/index.ts new file mode 100644 index 0000000..af07eec --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/index.ts @@ -0,0 +1,25 @@ +export { getVarIntFromBytes } from './conversion/getVarIntFromBytes.js'; +export { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js'; +export { DuckDBArrayValue } from './DuckDBArrayValue.js'; +export { DuckDBBitValue } from './DuckDBBitValue.js'; +export { DuckDBBlobValue } from './DuckDBBlobValue.js'; +export { DuckDBDateValue } from './DuckDBDateValue.js'; +export { DuckDBDecimalValue } from './DuckDBDecimalValue.js'; +export { DuckDBIntervalValue } from './DuckDBIntervalValue.js'; +export { DuckDBListValue } from './DuckDBListValue.js'; +export { DuckDBMapEntry } from './DuckDBMapEntry.js'; +export { DuckDBMapValue } from './DuckDBMapValue.js'; +export { DuckDBStructEntry } from './DuckDBStructEntry.js'; +export { DuckDBStructValue } from './DuckDBStructValue.js'; +export { DuckDBTimestampMicrosecondsValue } from './DuckDBTimestampMicrosecondsValue.js'; +export { DuckDBTimestampMillisecondsValue } from './DuckDBTimestampMillisecondsValue.js'; +export { DuckDBTimestampNanosecondsValue } from './DuckDBTimestampNanosecondsValue.js'; +export { DuckDBTimestampSecondsValue } from './DuckDBTimestampSecondsValue.js'; +export { DuckDBTimestampTZValue } from './DuckDBTimestampTZValue.js'; +export { DuckDBTimeTZValue } from './DuckDBTimeTZValue.js'; +export { DuckDBTimeValue } from './DuckDBTimeValue.js'; +export { DuckDBToStringOptions } from './DuckDBToStringOptions.js'; +export { DuckDBUUIDValue } from './DuckDBUUIDValue.js'; +export { DuckDBValue } from './DuckDBValue.js'; +export { Json } from './Json.js'; +export { SpecialDuckDBValue } from './SpecialDuckDBValue.js'; diff --git a/ts/pkgs/duckdb-data-values/src/tsconfig.json b/ts/pkgs/duckdb-data-values/src/tsconfig.json new file mode 100644 index 0000000..f970179 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/src/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.library.json", + "compilerOptions": { + "outDir": "../out" + } +} diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts new file mode 100644 index 0000000..d918da5 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts @@ -0,0 +1,49 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBArrayValue } from '../src/DuckDBArrayValue'; +import { DuckDBMapValue } from '../src/DuckDBMapValue'; + +suite('DuckDBArrayValue', () => { + test('should render an empty array to the correct string', () => { + expect(new DuckDBArrayValue([]).toString()).toStrictEqual('[]'); + }); + test('should render a single element array to the correct string', () => { + expect(new DuckDBArrayValue([123]).toString()).toStrictEqual('[123]'); + }); + test('should render a multi-element array to the correct string', () => { + expect( + new DuckDBArrayValue(['abc', null, true, '']).toString(), + ).toStrictEqual(`['abc', NULL, true, '']`); + }); + test('should render an array with nested arrays to the correct string', () => { + expect( + new DuckDBArrayValue([ + new DuckDBArrayValue([]), + null, + new DuckDBArrayValue([123, null, 'xyz']), + ]).toString(), + ).toStrictEqual(`[[], NULL, [123, NULL, 'xyz']]`); + }); + test('toJson array with basic values', () => { + expect(new DuckDBArrayValue([123, 'abc', null]).toJson()).toStrictEqual([ + 123, + 'abc', + null, + ]); + }); + test('toJson array with complex values', () => { + expect( + new DuckDBArrayValue([ + new DuckDBMapValue([ + { key: 'foo', value: 123 }, + { key: 'bar', value: 'abc' }, + ]), + new DuckDBArrayValue([123, null, 'xyz']), + null, + ]).toJson(), + ).toStrictEqual([ + { "'foo'": 123, "'bar'": 'abc' }, + [123, null, 'xyz'], + null, + ]); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts new file mode 100644 index 0000000..9d18aec --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts @@ -0,0 +1,33 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBBitValue } from '../src/DuckDBBitValue'; + +suite('DuckDBBitValue', () => { + test('should render an empty byte array to the correct string', () => { + expect(new DuckDBBitValue(new Uint8Array([])).toString()).toStrictEqual(''); + }); + test('should render bit string with no padding to the correct string', () => { + expect( + new DuckDBBitValue(new Uint8Array([0x00, 0xf1, 0xe2, 0xd3])).toString(), + ).toStrictEqual('111100011110001011010011'); + }); + test('should render bit string with padding to the correct string', () => { + expect( + new DuckDBBitValue(new Uint8Array([0x03, 0xf1, 0xe2, 0xd3])).toString(), + ).toStrictEqual('100011110001011010011'); + }); + test('should round-trip bit string with no padding', () => { + expect( + DuckDBBitValue.fromString('111100011110001011010011').toString(), + ).toStrictEqual('111100011110001011010011'); + }); + test('should round-trip bit string with padding', () => { + expect( + DuckDBBitValue.fromString('100011110001011010011').toString(), + ).toStrictEqual('100011110001011010011'); + }); + test('toJson', () => { + expect( + DuckDBBitValue.fromString('100011110001011010011').toJson(), + ).toStrictEqual('100011110001011010011'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts new file mode 100644 index 0000000..5c983ef --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts @@ -0,0 +1,92 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBBlobValue } from '../src/DuckDBBlobValue'; + +suite('DuckDBBlobValue', () => { + test('should render an empty byte array to the correct string', () => { + expect(new DuckDBBlobValue(new Uint8Array([])).toString()).toStrictEqual( + '', + ); + }); + test('should render a byte array to the correct string', () => { + expect( + new DuckDBBlobValue( + new Uint8Array([0x41, 0x42, 0x43, 0x31, 0x32, 0x33]), + ).toString(), + ).toStrictEqual('ABC123'); + }); + test('should render a byte array containing single-digit non-printables to the correct string', () => { + expect( + new DuckDBBlobValue( + new Uint8Array([ + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, + 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + ]), + ).toString(), + ).toStrictEqual( + '\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F', + ); + }); + test('should render a byte array containing double-digit non-printables to the correct string', () => { + expect( + new DuckDBBlobValue( + new Uint8Array([ + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, + 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, + ]), + ).toString(), + ).toStrictEqual( + '\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F', + ); + }); + test('should render a byte array containing min printables (including single and double quotes) to the correct string', () => { + expect( + new DuckDBBlobValue( + new Uint8Array([ + 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, + 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, + ]), + ).toString(), + ).toStrictEqual(' !\\x22#$%&\\x27()*+,-./'); + }); + test('should render a byte array containing max printables (including backspace) to the correct string', () => { + expect( + new DuckDBBlobValue( + new Uint8Array([ + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, + 0x7b, 0x7c, 0x7d, 0x7e, 0x7f, + ]), + ).toString(), + ).toStrictEqual('pqrstuvwxyz{|}~\\x7F'); + }); + test('should render a byte array containing high non-printables to the correct string', () => { + expect( + new DuckDBBlobValue( + new Uint8Array([ + 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, + 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, + ]), + ).toString(), + ).toStrictEqual( + '\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F', + ); + }); + test('should render a byte array containing max non-printables to the correct string', () => { + expect( + new DuckDBBlobValue( + new Uint8Array([ + 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, + 0xfb, 0xfc, 0xfd, 0xfe, 0xff, + ]), + ).toString(), + ).toStrictEqual( + '\\xF0\\xF1\\xF2\\xF3\\xF4\\xF5\\xF6\\xF7\\xF8\\xF9\\xFA\\xFB\\xFC\\xFD\\xFE\\xFF', + ); + }); + test('toJson', () => { + expect( + new DuckDBBlobValue( + new Uint8Array([0x41, 0x42, 0x43, 0x31, 0x32, 0x33]), + ).toJson(), + ).toStrictEqual('ABC123'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts new file mode 100644 index 0000000..af1edd5 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts @@ -0,0 +1,18 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBDateValue } from '../src/DuckDBDateValue'; + +suite('DuckDBDateValue', () => { + test('should render a normal date value to the correct string', () => { + expect(new DuckDBDateValue(19643).toString()).toStrictEqual('2023-10-13'); + }); + test('should render the max date value to the correct string', () => { + expect(new DuckDBDateValue(2 ** 31 - 2).toString()).toStrictEqual( + '5881580-07-10', + ); + }); + test('should render the min date value to the correct string', () => { + expect(new DuckDBDateValue(-(2 ** 31) + 2).toString()).toStrictEqual( + '5877642-06-25 (BC)', + ); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts new file mode 100644 index 0000000..dca24a4 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts @@ -0,0 +1,150 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBDecimalValue } from '../src/DuckDBDecimalValue'; + +suite('DuckDBDecimalValue', () => { + test('should render a scaled value of zero with a scale of zero to the correct string', () => { + expect(new DuckDBDecimalValue(0n, 0).toString()).toStrictEqual('0'); + }); + test('should render a small positive scaled value with a scale of zero to the correct string', () => { + expect(new DuckDBDecimalValue(7n, 0).toString()).toStrictEqual('7'); + }); + test('should render a small negative scaled value with a scale of zero to the correct string', () => { + expect(new DuckDBDecimalValue(-7n, 0).toString()).toStrictEqual('-7'); + }); + test('should render a large positive scaled value with a scale of zero to the correct string', () => { + expect( + new DuckDBDecimalValue(987654321098765432109876543210n, 0).toString(), + ).toStrictEqual('987654321098765432109876543210'); + }); + test('should render a large negative scaled value with a scale of zero to the correct string', () => { + expect( + new DuckDBDecimalValue(-987654321098765432109876543210n, 0).toString(), + ).toStrictEqual('-987654321098765432109876543210'); + }); + test('should render the maximum positive scaled value with a scale of zero to the correct string', () => { + expect( + new DuckDBDecimalValue( + 99999999999999999999999999999999999999n, + 0, + ).toString(), + ).toStrictEqual('99999999999999999999999999999999999999'); + }); + test('should render the maximum negative scaled value with a scale of zero to the correct string', () => { + expect( + new DuckDBDecimalValue( + -99999999999999999999999999999999999999n, + 0, + ).toString(), + ).toStrictEqual('-99999999999999999999999999999999999999'); + }); + + test('should render a scaled value of zero with a non-zero scale to the correct string', () => { + expect(new DuckDBDecimalValue(0n, 3).toString()).toStrictEqual('0.000'); + }); + test('should render a small positive scaled value with a non-zero scale to the correct string', () => { + expect(new DuckDBDecimalValue(12345n, 3).toString()).toStrictEqual( + '12.345', + ); + }); + test('should render a small negative scaled value with a non-zero scale to the correct string', () => { + expect(new DuckDBDecimalValue(-12345n, 3).toString()).toStrictEqual( + '-12.345', + ); + }); + test('should render a large positive scaled value with a non-zero scale to the correct string', () => { + expect( + new DuckDBDecimalValue(987654321098765432109876543210n, 10).toString(), + ).toStrictEqual('98765432109876543210.9876543210'); + }); + test('should render a large negative scaled value with a non-zero scale to the correct string', () => { + expect( + new DuckDBDecimalValue(-987654321098765432109876543210n, 10).toString(), + ).toStrictEqual('-98765432109876543210.9876543210'); + }); + test('should render leading and trailing zeros in the fractional part of value greater than one correctly', () => { + expect(new DuckDBDecimalValue(120034500n, 7).toString()).toStrictEqual( + '12.0034500', + ); + }); + test('should render leading and trailing zeros in the fractional part of value less than negative one correctly', () => { + expect(new DuckDBDecimalValue(-120034500n, 7).toString()).toStrictEqual( + '-12.0034500', + ); + }); + test('should render leading and trailing zeros in the fractional part of value between zero and one correctly', () => { + expect(new DuckDBDecimalValue(34500n, 7).toString()).toStrictEqual( + '0.0034500', + ); + }); + test('should render leading and trailing zeros in the fractional part of value between zero and negative one correctly', () => { + expect(new DuckDBDecimalValue(-34500n, 7).toString()).toStrictEqual( + '-0.0034500', + ); + }); + test('should render a small positive scaled value with a the maximum scale to the correct string', () => { + expect(new DuckDBDecimalValue(1n, 38).toString()).toStrictEqual( + '0.00000000000000000000000000000000000001', + ); + }); + test('should render a small negative scaled value with a the maximum scale to the correct string', () => { + expect(new DuckDBDecimalValue(-1n, 38).toString()).toStrictEqual( + '-0.00000000000000000000000000000000000001', + ); + }); + test('should render the maximum positive scaled value with a the maximum scale to the correct string', () => { + expect( + new DuckDBDecimalValue( + 99999999999999999999999999999999999999n, + 38, + ).toString(), + ).toStrictEqual('0.99999999999999999999999999999999999999'); + }); + test('should render the maximum negative scaled value with a the maximum scale to the correct string', () => { + expect( + new DuckDBDecimalValue( + -99999999999999999999999999999999999999n, + 38, + ).toString(), + ).toStrictEqual('-0.99999999999999999999999999999999999999'); + }); + + test('should render a locale string with grouping by default', () => { + expect( + new DuckDBDecimalValue(9876543210n, 0).toLocaleString(), + ).toStrictEqual('9,876,543,210'); + }); + + test('should render a European locale with . for grouping', () => { + expect( + new DuckDBDecimalValue(9876543210n, 0).toLocaleString('de-DE'), + ).toStrictEqual('9.876.543.210'); + }); + + test('should render a locale string with a specified minimum fraction digits', () => { + expect( + new DuckDBDecimalValue(12345n, 3).toLocaleString(undefined, { + minimumFractionDigits: 5, + }), + ).toStrictEqual('12.34500'); + }); + + test('should render a locale string with a specified maximum fraction digits', () => { + expect( + new DuckDBDecimalValue(12345n, 3).toLocaleString(undefined, { + maximumFractionDigits: 1, + }), + ).toStrictEqual('12.3'); + }); + + test('should render a decimal with a large whole part and fractional part in a European locale with the correct grouping and decimal', () => { + expect( + new DuckDBDecimalValue(98765432109876543210n, 10).toLocaleString( + 'de-DE', + { + useGrouping: true, + maximumFractionDigits: 5, + }, + ), + ).toStrictEqual('9.876.543.210,98765'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts new file mode 100644 index 0000000..a72927d --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts @@ -0,0 +1,219 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBIntervalValue } from '../src/DuckDBIntervalValue'; + +const MICROS_IN_SEC = 1000000n; +const MICROS_IN_MIN = 60n * MICROS_IN_SEC; +const MICROS_IN_HR = 60n * MICROS_IN_MIN; +const MAX_INT32 = 2n ** 31n - 1n; + +suite('DuckDBIntervalValue', () => { + test('should render an empty interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 0, 0n).toString()).toStrictEqual( + '00:00:00', + ); + }); + + test('should render a one month interval to the correct string', () => { + expect(new DuckDBIntervalValue(1, 0, 0n).toString()).toStrictEqual( + '1 month', + ); + }); + test('should render a negative one month interval to the correct string', () => { + expect(new DuckDBIntervalValue(-1, 0, 0n).toString()).toStrictEqual( + '-1 months', + ); + }); + test('should render a two month interval to the correct string', () => { + expect(new DuckDBIntervalValue(2, 0, 0n).toString()).toStrictEqual( + '2 months', + ); + }); + test('should render a negative two month interval to the correct string', () => { + expect(new DuckDBIntervalValue(-2, 0, 0n).toString()).toStrictEqual( + '-2 months', + ); + }); + test('should render a one year interval to the correct string', () => { + expect(new DuckDBIntervalValue(12, 0, 0n).toString()).toStrictEqual( + '1 year', + ); + }); + test('should render a negative one year interval to the correct string', () => { + expect(new DuckDBIntervalValue(-12, 0, 0n).toString()).toStrictEqual( + '-1 years', + ); + }); + test('should render a two year interval to the correct string', () => { + expect(new DuckDBIntervalValue(24, 0, 0n).toString()).toStrictEqual( + '2 years', + ); + }); + test('should render a negative two year interval to the correct string', () => { + expect(new DuckDBIntervalValue(-24, 0, 0n).toString()).toStrictEqual( + '-2 years', + ); + }); + test('should render a two year, three month interval to the correct string', () => { + expect(new DuckDBIntervalValue(24 + 3, 0, 0n).toString()).toStrictEqual( + '2 years 3 months', + ); + }); + test('should render a negative two year, three month interval to the correct string', () => { + expect(new DuckDBIntervalValue(-(24 + 3), 0, 0n).toString()).toStrictEqual( + '-2 years -3 months', + ); + }); + + test('should render a one day interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 1, 0n).toString()).toStrictEqual('1 day'); + }); + test('should render a negative one day interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, -1, 0n).toString()).toStrictEqual( + '-1 days', + ); + }); + test('should render a two day interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 2, 0n).toString()).toStrictEqual( + '2 days', + ); + }); + test('should render a negative two day interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, -2, 0n).toString()).toStrictEqual( + '-2 days', + ); + }); + test('should render a 30 day interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 30, 0n).toString()).toStrictEqual( + '30 days', + ); + }); + test('should render a 365 day interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 365, 0n).toString()).toStrictEqual( + '365 days', + ); + }); + + test('should render a one microsecond interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 0, 1n).toString()).toStrictEqual( + '00:00:00.000001', + ); + }); + test('should render a negative one microsecond interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 0, -1n).toString()).toStrictEqual( + '-00:00:00.000001', + ); + }); + test('should render a large microsecond interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 0, 987654n).toString()).toStrictEqual( + '00:00:00.987654', + ); + }); + test('should render a large negative microsecond interval to the correct string', () => { + expect(new DuckDBIntervalValue(0, 0, -987654n).toString()).toStrictEqual( + '-00:00:00.987654', + ); + }); + test('should render a one second interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, MICROS_IN_SEC).toString(), + ).toStrictEqual('00:00:01'); + }); + test('should render a negative one second interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, -MICROS_IN_SEC).toString(), + ).toStrictEqual('-00:00:01'); + }); + test('should render a 59 second interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, 59n * MICROS_IN_SEC).toString(), + ).toStrictEqual('00:00:59'); + }); + test('should render a -59 second interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, -59n * MICROS_IN_SEC).toString(), + ).toStrictEqual('-00:00:59'); + }); + test('should render a one minute interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, MICROS_IN_MIN).toString(), + ).toStrictEqual('00:01:00'); + }); + test('should render a negative one minute interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, -MICROS_IN_MIN).toString(), + ).toStrictEqual('-00:01:00'); + }); + test('should render a 59 minute interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, 59n * MICROS_IN_MIN).toString(), + ).toStrictEqual('00:59:00'); + }); + test('should render a -59 minute interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, -59n * MICROS_IN_MIN).toString(), + ).toStrictEqual('-00:59:00'); + }); + test('should render a one hour interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, MICROS_IN_HR).toString(), + ).toStrictEqual('01:00:00'); + }); + test('should render a negative one hour interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, -MICROS_IN_HR).toString(), + ).toStrictEqual('-01:00:00'); + }); + test('should render a 24 hour interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, 24n * MICROS_IN_HR).toString(), + ).toStrictEqual('24:00:00'); + }); + test('should render a -24 hour interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, -24n * MICROS_IN_HR).toString(), + ).toStrictEqual('-24:00:00'); + }); + test('should render a very large interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, MAX_INT32 * MICROS_IN_HR).toString(), + ).toStrictEqual('2147483647:00:00'); + }); + test('should render a very large negative interval to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, -MAX_INT32 * MICROS_IN_HR).toString(), + ).toStrictEqual('-2147483647:00:00'); + }); + test('should render a very large interval with microseconds to the correct string', () => { + expect( + new DuckDBIntervalValue(0, 0, MAX_INT32 * MICROS_IN_HR + 1n).toString(), + ).toStrictEqual('2147483647:00:00.000001'); + }); + test('should render a very large negative interval with microseconds to the correct string', () => { + expect( + new DuckDBIntervalValue( + 0, + 0, + -(MAX_INT32 * MICROS_IN_HR + 1n), + ).toString(), + ).toStrictEqual('-2147483647:00:00.000001'); + }); + + test('should render a interval with multiple parts to the correct string', () => { + expect( + new DuckDBIntervalValue( + 24 + 3, + 5, + 7n * MICROS_IN_HR + 11n * MICROS_IN_MIN + 13n * MICROS_IN_SEC + 17n, + ).toString(), + ).toStrictEqual('2 years 3 months 5 days 07:11:13.000017'); + }); + test('should render a negative interval with multiple parts to the correct string', () => { + expect( + new DuckDBIntervalValue( + -(24 + 3), + -5, + -(7n * MICROS_IN_HR + 11n * MICROS_IN_MIN + 13n * MICROS_IN_SEC + 17n), + ).toString(), + ).toStrictEqual('-2 years -3 months -5 days -07:11:13.000017'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts new file mode 100644 index 0000000..34cad6e --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts @@ -0,0 +1,45 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBMapValue } from '../src'; +import { DuckDBListValue } from '../src/DuckDBListValue'; + +suite('DuckDBListValue', () => { + test('should render an empty list to the correct string', () => { + expect(new DuckDBListValue([]).toString()).toStrictEqual('[]'); + }); + test('should render a single element list to the correct string', () => { + expect(new DuckDBListValue([123]).toString()).toStrictEqual('[123]'); + }); + test('should render a multi-element list to the correct string', () => { + expect( + new DuckDBListValue(['abc', null, true, '']).toString(), + ).toStrictEqual(`['abc', NULL, true, '']`); + }); + test('should render a list with nested lists to the correct string', () => { + expect( + new DuckDBListValue([ + new DuckDBListValue([]), + null, + new DuckDBListValue([123, null, 'xyz']), + ]).toString(), + ).toStrictEqual(`[[], NULL, [123, NULL, 'xyz']]`); + }); + test('toJson with complex values', () => { + expect( + new DuckDBListValue([ + new DuckDBMapValue([ + { key: 'foo', value: 123 }, + { key: 'bar', value: 'abc' }, + ]), + null, + new DuckDBMapValue([ + { key: 'foo', value: null }, + { key: 'bar', value: 'xyz' }, + ]), + ]).toJson(), + ).toStrictEqual([ + { "'foo'": 123, "'bar'": 'abc' }, + null, + { "'foo'": null, "'bar'": 'xyz' }, + ]); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts new file mode 100644 index 0000000..7d3cf90 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts @@ -0,0 +1,77 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBListValue } from '../src/DuckDBListValue'; +import { DuckDBMapValue } from '../src/DuckDBMapValue'; + +suite('DuckDBMapValue', () => { + test('should render an empty map to the correct string', () => { + expect(new DuckDBMapValue([]).toString()).toStrictEqual('{}'); + }); + test('should render a single-entry map to the correct string', () => { + expect( + new DuckDBMapValue([{ key: 'x', value: 1 }]).toString(), + ).toStrictEqual(`{'x': 1}`); + }); + test('should render a multi-entry map to the correct string', () => { + expect( + new DuckDBMapValue([ + { key: 1, value: 42.001 }, + { key: 5, value: -32.1 }, + { key: 3, value: null }, + ]).toString(), + ).toStrictEqual(`{1: 42.001, 5: -32.1, 3: NULL}`); + }); + test('should render a multi-entry map with complex key types to the correct string', () => { + expect( + new DuckDBMapValue([ + { + key: new DuckDBListValue(['a', 'b']), + value: new DuckDBListValue([1.1, 2.2]), + }, + { + key: new DuckDBListValue(['c', 'd']), + value: new DuckDBListValue([3.3, 4.4]), + }, + ]).toString(), + ).toStrictEqual(`{['a', 'b']: [1.1, 2.2], ['c', 'd']: [3.3, 4.4]}`); + }); + test('should render a map with nested maps to the correct string', () => { + expect( + new DuckDBMapValue([ + { key: new DuckDBMapValue([]), value: new DuckDBMapValue([]) }, + { + key: new DuckDBMapValue([{ key: 'key1', value: 'value1' }]), + value: new DuckDBMapValue([ + { key: 1, value: 42.001 }, + { key: 5, value: -32.1 }, + { key: 3, value: null }, + ]), + }, + ]).toString(), + ).toStrictEqual( + `{{}: {}, {'key1': 'value1'}: {1: 42.001, 5: -32.1, 3: NULL}}`, + ); + }); + test('toJson basics', () => { + expect( + new DuckDBMapValue([ + { key: 'a', value: 1 }, + { key: 'b', value: 2 }, + { key: 'c', value: 3 }, + ]).toJson(), + ).toStrictEqual({ "'a'": 1, "'b'": 2, "'c'": 3 }); + }); + test('toJson with complex keys and values', () => { + expect( + new DuckDBMapValue([ + { + key: new DuckDBListValue(['a', 'b']), + value: new DuckDBListValue([1.1, 2.2]), + }, + { + key: new DuckDBListValue(['c', 'd']), + value: new DuckDBListValue([3.3, 4.4]), + }, + ]).toJson(), + ).toStrictEqual({ "['a', 'b']": [1.1, 2.2], "['c', 'd']": [3.3, 4.4] }); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts new file mode 100644 index 0000000..7b46aa8 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts @@ -0,0 +1,110 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBMapValue } from '../src/DuckDBMapValue'; +import { DuckDBStructValue } from '../src/DuckDBStructValue'; + +suite('DuckDBStructValue', () => { + test('should render an empty struct to the correct string', () => { + expect(new DuckDBStructValue([]).toString()).toStrictEqual('{}'); + }); + test('should render a single-entry struct to the correct string', () => { + expect( + new DuckDBStructValue([{ key: 'x', value: 1 }]).toString(), + ).toStrictEqual(`{'x': 1}`); + }); + test('should render a multi-entry struct to the correct string', () => { + expect( + new DuckDBStructValue([ + { key: 'x', value: 1 }, + { key: 'y', value: 2 }, + { key: 'z', value: 3 }, + ]).toString(), + ).toStrictEqual(`{'x': 1, 'y': 2, 'z': 3}`); + }); + test('should render a multi-entry struct with different value types to the correct string', () => { + expect( + new DuckDBStructValue([ + { key: 'key1', value: 'string' }, + { key: 'key2', value: 1 }, + { key: 'key3', value: 12.345 }, + { key: 'key0', value: null }, + ]).toString(), + ).toStrictEqual( + `{'key1': 'string', 'key2': 1, 'key3': 12.345, 'key0': NULL}`, + ); + }); + test('should render a multi-entry struct with empty keys to the correct string', () => { + expect( + new DuckDBStructValue([ + { key: '', value: 2 }, + { key: '', value: 1 }, + { key: '', value: 3 }, + ]).toString(), + ).toStrictEqual(`{'': 2, '': 1, '': 3}`); + }); + test('should render a struct with nested structs to the correct string', () => { + expect( + new DuckDBStructValue([ + { key: 'empty_struct', value: new DuckDBStructValue([]) }, + { + key: 'struct', + value: new DuckDBStructValue([ + { key: 'key1', value: 'string' }, + { key: 'key2', value: 1 }, + { key: 'key3', value: 12.345 }, + ]), + }, + ]).toString(), + ).toStrictEqual( + `{'empty_struct': {}, 'struct': {'key1': 'string', 'key2': 1, 'key3': 12.345}}`, + ); + }); + test('toJson with simple keys and values', () => { + expect( + new DuckDBStructValue([ + { key: 'x', value: 1 }, + { key: 'y', value: 2 }, + { key: 'z', value: 3 }, + ]).toJson(), + ).toStrictEqual({ "'x'": 1, "'y'": 2, "'z'": 3 }); + }); + test('toJson with nested struct values', () => { + expect( + new DuckDBStructValue([ + { key: 'empty_struct', value: new DuckDBStructValue([]) }, + { + key: 'struct', + value: new DuckDBStructValue([ + { key: 'key1', value: 'string' }, + { key: 'key2', value: 1 }, + { key: 'key3', value: 12.345 }, + ]), + }, + ]).toJson(), + ).toStrictEqual({ + "'empty_struct'": {}, + "'struct'": { "'key1'": 'string', "'key2'": 1, "'key3'": 12.345 }, + }); + }); + test('toJson with nested complex values', () => { + expect( + new DuckDBStructValue([ + { key: 'empty_struct', value: new DuckDBStructValue([]) }, + { + key: 'struct', + value: new DuckDBStructValue([ + { + key: 'key1', + value: new DuckDBMapValue([ + { key: 'foo', value: null }, + { key: 'bar', value: 'xyz' }, + ]), + }, + ]), + }, + ]).toJson(), + ).toStrictEqual({ + "'empty_struct'": {}, + "'struct'": { "'key1'": { "'foo'": null, "'bar'": 'xyz' } }, + }); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts new file mode 100644 index 0000000..4356487 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts @@ -0,0 +1,60 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBTimeTZValue } from '../src/DuckDBTimeTZValue'; + +suite('DuckDBTimeTZValue', () => { + test('should render a normal time value with a positive offset to the correct string', () => { + expect( + new DuckDBTimeTZValue( + ((12n * 60n + 34n) * 60n + 56n) * 1000000n + 789012n, + (13 * 60 + 24) * 60 + 57, + ).toString(), + ).toStrictEqual('12:34:56.789012+13:24:57'); + }); + test('should render a normal time value with millisecond precision with an offset in minutes to the correct string', () => { + expect( + new DuckDBTimeTZValue( + ((12n * 60n + 34n) * 60n + 56n) * 1000000n + 789000n, + (13 * 60 + 24) * 60, + ).toString(), + ).toStrictEqual('12:34:56.789+13:24'); + }); + test('should render a normal time value with second precision with an offset in hours to the correct string', () => { + expect( + new DuckDBTimeTZValue( + ((12n * 60n + 34n) * 60n + 56n) * 1000000n, + (13 * 60 + 0) * 60, + ).toString(), + ).toStrictEqual('12:34:56+13'); + }); + test('should render a zero time value with a zero offset to the correct string', () => { + expect(new DuckDBTimeTZValue(0n, 0).toString()).toStrictEqual( + '00:00:00+00', + ); + }); + test('should render the max value to the correct string', () => { + expect( + new DuckDBTimeTZValue( + ((24n * 60n + 0n) * 60n + 0n) * 1000000n, + -((15 * 60 + 59) * 60 + 59), + ).toString(), + ).toStrictEqual('24:00:00-15:59:59'); + }); + test('should render the min value to the correct string', () => { + expect( + new DuckDBTimeTZValue(0n, (15 * 60 + 59) * 60 + 59).toString(), + ).toStrictEqual('00:00:00+15:59:59'); + }); + test('should construct the correct value from bits', () => { + expect(DuckDBTimeTZValue.fromBits(0n).toString()).toStrictEqual( + '00:00:00+15:59:59', + ); + }); + test('should construct the correct value from bits', () => { + expect( + DuckDBTimeTZValue.fromBits( + (BigInt.asUintN(40, ((24n * 60n + 0n) * 60n + 0n) * 1000000n) << 24n) | + BigInt.asUintN(24, (31n * 60n + 59n) * 60n + 58n), + ).toString(), + ).toStrictEqual('24:00:00-15:59:59'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts new file mode 100644 index 0000000..1e7fadd --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts @@ -0,0 +1,18 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBTimeValue } from '../src/DuckDBTimeValue'; + +suite('DuckDBTimeValue', () => { + test('should render a normal time value to the correct string', () => { + expect(new DuckDBTimeValue(45296000000n).toString()).toStrictEqual( + '12:34:56', + ); + }); + test('should render the max time value to the correct string', () => { + expect(new DuckDBTimeValue(86399999999n).toString()).toStrictEqual( + '23:59:59.999999', + ); + }); + test('should render the min time value to the correct string', () => { + expect(new DuckDBTimeValue(0n).toString()).toStrictEqual('00:00:00'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBTimestampMicrosecondsValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampMicrosecondsValue.test.ts new file mode 100644 index 0000000..ffde37a --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampMicrosecondsValue.test.ts @@ -0,0 +1,55 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBTimestampMicrosecondsValue } from '../src/DuckDBTimestampMicrosecondsValue'; + +suite('DuckDBTimestampMicrosecondsValue', () => { + test('should render a normal timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMicrosecondsValue(1612325106007800n).toString(), + ).toStrictEqual('2021-02-03 04:05:06.0078'); + }); + test('should render a zero timestamp value to the correct string', () => { + expect(new DuckDBTimestampMicrosecondsValue(0n).toString()).toStrictEqual( + '1970-01-01 00:00:00', + ); + }); + test('should render a negative timestamp value to the correct string', () => { + expect(new DuckDBTimestampMicrosecondsValue(-7n).toString()).toStrictEqual( + '1969-12-31 23:59:59.999993', + ); + }); + test('should render a large positive timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMicrosecondsValue(2353318271999999000n).toString(), + ).toStrictEqual('76543-09-08 23:59:59.999'); + }); + test('should render a large negative (AD) timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMicrosecondsValue(-58261244276543211n).toString(), + ).toStrictEqual('0123-10-11 01:02:03.456789'); + }); + test('should render a large negative (BC) timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMicrosecondsValue(-65992661876543211n).toString(), + ).toStrictEqual('0123-10-11 (BC) 01:02:03.456789'); + }); + test('should render the max timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMicrosecondsValue(9223372036854775806n).toString(), + ).toStrictEqual('294247-01-10 04:00:54.775806'); + }); + test('should render the min timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMicrosecondsValue(-9223372022400000000n).toString(), + ).toStrictEqual('290309-12-22 (BC) 00:00:00'); + }); + test('should render the positive infinity timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMicrosecondsValue(9223372036854775807n).toString(), + ).toStrictEqual('infinity'); + }); + test('should render the negative infinity timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMicrosecondsValue(-9223372036854775807n).toString(), + ).toStrictEqual('-infinity'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBTimestampMillisecondsValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampMillisecondsValue.test.ts new file mode 100644 index 0000000..d9f8eda --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampMillisecondsValue.test.ts @@ -0,0 +1,45 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBTimestampMillisecondsValue } from '../src/DuckDBTimestampMillisecondsValue'; + +suite('DuckDBTimestampMillisecondsValue', () => { + test('should render a normal timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMillisecondsValue(1612325106007n).toString(), + ).toStrictEqual('2021-02-03 04:05:06.007'); + }); + test('should render a zero timestamp value to the correct string', () => { + expect(new DuckDBTimestampMillisecondsValue(0n).toString()).toStrictEqual( + '1970-01-01 00:00:00', + ); + }); + test('should render a negative timestamp value to the correct string', () => { + expect(new DuckDBTimestampMillisecondsValue(-7n).toString()).toStrictEqual( + '1969-12-31 23:59:59.993', + ); + }); + test('should render a large positive timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMillisecondsValue(2353318271999999n).toString(), + ).toStrictEqual('76543-09-08 23:59:59.999'); + }); + test('should render a large negative (AD) timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMillisecondsValue(-58261244276544n).toString(), + ).toStrictEqual('0123-10-11 01:02:03.456'); + }); + test('should render a large negative (BC) timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMillisecondsValue(-65992661876544n).toString(), + ).toStrictEqual('0123-10-11 (BC) 01:02:03.456'); + }); + test('should render the max timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMillisecondsValue(9223372036854775n).toString(), + ).toStrictEqual('294247-01-10 04:00:54.775'); + }); + test('should render the min timestamp value to the correct string', () => { + expect( + new DuckDBTimestampMillisecondsValue(-9223372022400000n).toString(), + ).toStrictEqual('290309-12-22 (BC) 00:00:00'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBTimestampNanosecondsValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampNanosecondsValue.test.ts new file mode 100644 index 0000000..16c8923 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampNanosecondsValue.test.ts @@ -0,0 +1,40 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBTimestampNanosecondsValue } from '../src/DuckDBTimestampNanosecondsValue'; + +suite('DuckDBTimestampNanosecondsValue', () => { + test('should render a normal timestamp value to the correct string', () => { + expect( + new DuckDBTimestampNanosecondsValue(1612325106007891000n).toString(), + ).toStrictEqual('2021-02-03 04:05:06.007891'); + }); + test('should render a zero timestamp value to the correct string', () => { + expect(new DuckDBTimestampNanosecondsValue(0n).toString()).toStrictEqual( + '1970-01-01 00:00:00', + ); + }); + test('should render a negative timestamp value to the correct string', () => { + expect( + new DuckDBTimestampNanosecondsValue(-7000n).toString(), + ).toStrictEqual('1969-12-31 23:59:59.999993'); + }); + test('should render a large positive timestamp value to the correct string', () => { + expect( + new DuckDBTimestampNanosecondsValue(8857641599999123000n).toString(), + ).toStrictEqual('2250-09-08 23:59:59.999123'); + }); + test('should render a large negative timestamp value to the correct string', () => { + expect( + new DuckDBTimestampNanosecondsValue(-8495881076543211000n).toString(), + ).toStrictEqual('1700-10-11 01:02:03.456789'); + }); + test('should render the max timestamp value to the correct string', () => { + expect( + new DuckDBTimestampNanosecondsValue(9223372036854775806n).toString(), + ).toStrictEqual('2262-04-11 23:47:16.854775'); + }); + test('should render the min timestamp value to the correct string', () => { + expect( + new DuckDBTimestampNanosecondsValue(-9223372036854775806n).toString(), + ).toStrictEqual('1677-09-21 00:12:43.145225'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBTimestampSecondsValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampSecondsValue.test.ts new file mode 100644 index 0000000..09933a6 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampSecondsValue.test.ts @@ -0,0 +1,45 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBTimestampSecondsValue } from '../src/DuckDBTimestampSecondsValue'; + +suite('DuckDBTimestampSecondsValue', () => { + test('should render a normal timestamp value to the correct string', () => { + expect( + new DuckDBTimestampSecondsValue(1612325106n).toString(), + ).toStrictEqual('2021-02-03 04:05:06'); + }); + test('should render a zero timestamp value to the correct string', () => { + expect(new DuckDBTimestampSecondsValue(0n).toString()).toStrictEqual( + '1970-01-01 00:00:00', + ); + }); + test('should render a negative timestamp value to the correct string', () => { + expect(new DuckDBTimestampSecondsValue(-7n).toString()).toStrictEqual( + '1969-12-31 23:59:53', + ); + }); + test('should render a large positive timestamp value to the correct string', () => { + expect( + new DuckDBTimestampSecondsValue(2353318271999n).toString(), + ).toStrictEqual('76543-09-08 23:59:59'); + }); + test('should render a large negative (AD) timestamp value to the correct string', () => { + expect( + new DuckDBTimestampSecondsValue(-58261244277n).toString(), + ).toStrictEqual('0123-10-11 01:02:03'); + }); + test('should render a large negative (BC) timestamp value to the correct string', () => { + expect( + new DuckDBTimestampSecondsValue(-65992661877n).toString(), + ).toStrictEqual('0123-10-11 (BC) 01:02:03'); + }); + test('should render the max timestamp value to the correct string', () => { + expect( + new DuckDBTimestampSecondsValue(9223372036854n).toString(), + ).toStrictEqual('294247-01-10 04:00:54'); + }); + test('should render the min timestamp value to the correct string', () => { + expect( + new DuckDBTimestampSecondsValue(-9223372022400n).toString(), + ).toStrictEqual('290309-12-22 (BC) 00:00:00'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBTimestampTZValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampTZValue.test.ts new file mode 100644 index 0000000..7071283 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBTimestampTZValue.test.ts @@ -0,0 +1,38 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBTimestampTZValue } from '../src/DuckDBTimestampTZValue'; + +suite('DuckDBTimestampTZValue', () => { + test('should render a timestamp tz value with no timezone offset to the correct string', () => { + expect( + new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString(), + ).toStrictEqual('2021-02-03 04:05:06.0078+00'); // defaults to UTC + }); + test('should render a timestamp tz value with a zero timezone offset to the correct string', () => { + expect( + new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({ + timezoneOffsetInMinutes: 0, + }), + ).toStrictEqual('2021-02-03 04:05:06.0078+00'); + }); + test('should render a timestamp tz value with a positive timezone offset to the correct string', () => { + expect( + new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({ + timezoneOffsetInMinutes: 300, + }), + ).toStrictEqual('2021-02-03 09:05:06.0078+05'); + }); + test('should render a timestamp tz value with a negative timezone offset to the correct string', () => { + expect( + new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({ + timezoneOffsetInMinutes: -300, + }), + ).toStrictEqual('2021-02-02 23:05:06.0078-05'); + }); + test('should render a timestamp tz value with a timezone offset containing minutes to the correct string', () => { + expect( + new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({ + timezoneOffsetInMinutes: 330, + }), + ).toStrictEqual('2021-02-03 09:35:06.0078+05:30'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts b/ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts new file mode 100644 index 0000000..73e9665 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts @@ -0,0 +1,49 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBUUIDValue } from '../src/DuckDBUUIDValue'; + +suite('DuckDBUUIDValue', () => { + test('should render all zero bytes to the correct string', () => { + expect( + new DuckDBUUIDValue( + new Uint8Array([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, + ]), + ).toString(), + ).toStrictEqual('00000000-0000-0000-0000-000000000000'); + }); + test('should render all max bytes to the correct string', () => { + expect( + new DuckDBUUIDValue( + new Uint8Array([ + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, + ]), + ).toString(), + ).toStrictEqual('ffffffff-ffff-ffff-ffff-ffffffffffff'); + }); + test('should render arbitrary bytes to the correct string', () => { + expect( + new DuckDBUUIDValue( + new Uint8Array([ + 0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87, 0xfe, 0xdc, 0xba, + 0x98, 0x76, 0x54, 0x32, 0x10, + ]), + ).toString(), + ).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210'); + }); + test('should render a uint128 to the correct string', () => { + expect( + DuckDBUUIDValue.fromUint128( + 0xf0e1d2c3b4a59687fedcba9876543210n, + ).toString(), + ).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210'); + }); + test('should render a stored hugeint to the correct string', () => { + expect( + DuckDBUUIDValue.fromStoredHugeint( + 0x70e1d2c3b4a59687fedcba9876543210n, // note the flipped MSB + ).toString(), + ).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210'); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/conversion/getVarIntFromBytes.test.ts b/ts/pkgs/duckdb-data-values/test/conversion/getVarIntFromBytes.test.ts new file mode 100644 index 0000000..e805e62 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/conversion/getVarIntFromBytes.test.ts @@ -0,0 +1,61 @@ +import { expect, suite, test } from 'vitest'; +import { getVarIntFromBytes } from '../../src/conversion/getVarIntFromBytes'; + +suite('getVarIntFromBytes', () => { + test('should return correct value for varint representation of 0', () => { + expect( + getVarIntFromBytes(new Uint8Array([0x80, 0x00, 0x01, 0x00])), + ).toEqual(0n); + }); + test('should return correct value for varint representation of 1', () => { + expect( + getVarIntFromBytes(new Uint8Array([0x80, 0x00, 0x01, 0x01])), + ).toEqual(1n); + }); + test('should return correct value for varint representation of -1', () => { + expect( + getVarIntFromBytes(new Uint8Array([0x7f, 0xff, 0xfe, 0xfe])), + ).toEqual(-1n); + }); + test('should return correct value for max varint', () => { + // max VARINT = max IEEE double = 2^1023 * (1 + (1 − 2^−52)) ~= 1.7976931348623157 * 10^308 + // Note that the storage format supports much larger than this, but DuckDB specifies this max to support conversion to/from DOUBLE. + expect( + getVarIntFromBytes( + // prettier-ignore + new Uint8Array([0x80, 0x00, 0x80, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + ]), + ), + ).toEqual( + 179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n, + ); + }); + test('should return correct value for min varint', () => { + // min VARINT = -max VARINT + expect( + getVarIntFromBytes( + // prettier-ignore + new Uint8Array([0x7F, 0xFF, 0x7F, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + ]), + ), + ).toEqual( + -179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n, + ); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/conversion/jsonFromDuckDBValue.test.ts b/ts/pkgs/duckdb-data-values/test/conversion/jsonFromDuckDBValue.test.ts new file mode 100644 index 0000000..86fb17f --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/conversion/jsonFromDuckDBValue.test.ts @@ -0,0 +1,26 @@ +import { expect, suite, test } from 'vitest'; +import { DuckDBListValue } from '../../src'; +import { jsonFromDuckDBValue } from '../../src/conversion/jsonFromDuckDBValue'; + +suite('jsonFromDuckDBValue', () => { + test('null', () => { + expect(jsonFromDuckDBValue(null)).toBe(null); + }); + test('boolean', () => { + expect(jsonFromDuckDBValue(true)).toBe(true); + }); + test('number', () => { + expect(jsonFromDuckDBValue(42)).toBe(42); + }); + test('bigint', () => { + expect(jsonFromDuckDBValue(12345n)).toBe('12345'); + }); + test('string', () => { + expect(jsonFromDuckDBValue('foo')).toBe('foo'); + }); + test('special', () => { + expect(jsonFromDuckDBValue(new DuckDBListValue([1, 2, 3]))).toStrictEqual([ + 1, 2, 3, + ]); + }); +}); diff --git a/ts/pkgs/duckdb-data-values/test/tsconfig.json b/ts/pkgs/duckdb-data-values/test/tsconfig.json new file mode 100644 index 0000000..8c93c98 --- /dev/null +++ b/ts/pkgs/duckdb-data-values/test/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.test.json", + "references": [ + { "path": "../src" } + ] +} diff --git a/ts/pkgs/duckdb-ui-client/package.json b/ts/pkgs/duckdb-ui-client/package.json new file mode 100644 index 0000000..e2d4afe --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/package.json @@ -0,0 +1,41 @@ +{ + "name": "@duckdb/ui-client", + "version": "0.0.1", + "description": "Client for communicating with the DuckDB UI server", + "type": "module", + "main": "./out/index.js", + "module": "./out/index.js", + "types": "./out/index.d.ts", + "scripts": { + "preinstall": "pnpm build:src", + "build": "tsc -b src test", + "build:src": "tsc -b src", + "build:test": "tsc -b test", + "build:watch": "tsc -b src test --watch", + "check": "pnpm format:check && pnpm lint", + "clean": "rimraf out", + "format:check": "prettier . --ignore-path $(find-up .prettierignore) --check", + "format:write": "prettier . --ignore-path $(find-up .prettierignore) --write", + "lint": "pnpm eslint src test", + "test": "vitest run", + "test:watch": "vitest" + }, + "dependencies": { + "@duckdb/data-reader": "workspace:*", + "@duckdb/data-types": "workspace:*", + "@duckdb/data-values": "workspace:*", + "core-js": "^3.41.0" + }, + "devDependencies": { + "@eslint/js": "^9.24.0", + "eslint": "^9.24.0", + "find-up-cli": "^6.0.0", + "msw": "^2.10.2", + "prettier": "^3.5.3", + "rimraf": "^6.0.1", + "typescript": "^5.8.3", + "typescript-eslint": "^8.30.1", + "vite": "^6.2.6", + "vitest": "^3.1.1" + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/client/classes/DuckDBUIClient.ts b/ts/pkgs/duckdb-ui-client/src/client/classes/DuckDBUIClient.ts new file mode 100644 index 0000000..152aa9c --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/client/classes/DuckDBUIClient.ts @@ -0,0 +1,72 @@ +import { sendDuckDBUIHttpRequest } from '../../http/functions/sendDuckDBUIHttpRequest.js'; +import { tokenizeResultFromBuffer } from '../../serialization/functions/tokenizeResultFromBuffer.js'; +import type { TokenizeResult } from '../../serialization/types/TokenizeResult.js'; +import { DuckDBUIClientConnection } from './DuckDBUIClientConnection.js'; + +export { DuckDBUIClientConnection }; +export type { TokenizeResult }; + +export class DuckDBUIClient { + private readonly eventSource: EventSource; + + private defaultConnection: DuckDBUIClientConnection | undefined; + + private constructor() { + this.eventSource = new EventSource('/localEvents'); + } + + public addOpenEventListener(listener: (event: Event) => void) { + this.eventSource.addEventListener('open', listener); + } + + public removeOpenEventListener(listener: (event: Event) => void) { + this.eventSource.removeEventListener('open', listener); + } + + public addErrorEventListener(listener: (event: Event) => void) { + this.eventSource.addEventListener('error', listener); + } + + public removeErrorEventListener(listener: (event: Event) => void) { + this.eventSource.removeEventListener('error', listener); + } + + public addMessageEventListener( + type: string, + listener: (event: MessageEvent) => void, + ) { + this.eventSource.addEventListener(type, listener); + } + + public removeMessageEventListener( + type: string, + listener: (event: MessageEvent) => void, + ) { + this.eventSource.removeEventListener(type, listener); + } + + public connect() { + return new DuckDBUIClientConnection(); + } + + public get connection(): DuckDBUIClientConnection { + if (!this.defaultConnection) { + this.defaultConnection = this.connect(); + } + return this.defaultConnection; + } + + public async tokenize(text: string): Promise { + const buffer = await sendDuckDBUIHttpRequest('/ddb/tokenize', text); + return tokenizeResultFromBuffer(buffer); + } + + private static singletonInstance: DuckDBUIClient; + + public static get singleton(): DuckDBUIClient { + if (!DuckDBUIClient.singletonInstance) { + DuckDBUIClient.singletonInstance = new DuckDBUIClient(); + } + return DuckDBUIClient.singletonInstance; + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/client/classes/DuckDBUIClientConnection.ts b/ts/pkgs/duckdb-ui-client/src/client/classes/DuckDBUIClientConnection.ts new file mode 100644 index 0000000..ef68812 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/client/classes/DuckDBUIClientConnection.ts @@ -0,0 +1,70 @@ +import { DuckDBUIHttpRequestQueue } from '../../http/classes/DuckDBUIHttpRequestQueue.js'; +import { + DuckDBUIHttpRequestHeaderOptions, + makeDuckDBUIHttpRequestHeaders, +} from '../../http/functions/makeDuckDBUIHttpRequestHeaders.js'; +import { sendDuckDBUIHttpRequest } from '../../http/functions/sendDuckDBUIHttpRequest.js'; +import { randomString } from '../../util/functions/randomString.js'; +import { materializedRunResultFromQueueResult } from '../functions/materializedRunResultFromQueueResult.js'; +import { MaterializedRunResult } from '../types/MaterializedRunResult.js'; + +export class DuckDBUIClientConnection { + private readonly connectionName = `connection_${randomString()}`; + + private readonly requestQueue: DuckDBUIHttpRequestQueue = + new DuckDBUIHttpRequestQueue(); + + public async run( + sql: string, + args?: unknown[], + ): Promise { + const queueResult = await this.requestQueue.enqueueAndWait( + '/ddb/run', + sql, + this.makeHeaders({ parameters: args }), + ); + return materializedRunResultFromQueueResult(queueResult); + } + + public enqueue(sql: string, args?: unknown[]): string { + return this.requestQueue.enqueue( + '/ddb/run', + sql, + this.makeHeaders({ parameters: args }), + ); + } + + public cancel( + id: string, + errorMessage?: string, + failure?: (reason: unknown) => void, + ) { + this.requestQueue.cancel(id, errorMessage); + // If currently running, then interrupt it. + if (this.requestQueue.isCurrent(id)) { + // Don't await (but report any unexpected errors). Canceling should return synchronously. + sendDuckDBUIHttpRequest('/ddb/interrupt', '', this.makeHeaders()).catch( + failure, + ); + } + return true; + } + + public async runQueued(id: string): Promise { + const queueResult = await this.requestQueue.enqueuedResult(id); + return materializedRunResultFromQueueResult(queueResult); + } + + public get queuedCount(): number { + return this.requestQueue.length; + } + + private makeHeaders( + options: Omit = {}, + ): Headers { + return makeDuckDBUIHttpRequestHeaders({ + ...options, + connectionName: this.connectionName, + }); + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/client/functions/materializedRunResultFromQueueResult.ts b/ts/pkgs/duckdb-ui-client/src/client/functions/materializedRunResultFromQueueResult.ts new file mode 100644 index 0000000..27c6796 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/client/functions/materializedRunResultFromQueueResult.ts @@ -0,0 +1,20 @@ +import { DuckDBDataReader } from '@duckdb/data-reader'; +import { DuckDBDataChunkIterator } from '../../data-chunk/classes/DuckDBDataChunkIterator.js'; +import { DuckDBUIHttpRequestQueueResult } from '../../http/classes/DuckDBUIHttpRequestQueue.js'; +import { deserializerFromBuffer } from '../../serialization/functions/deserializeFromBuffer.js'; +import { readQueryResult } from '../../serialization/functions/resultReaders.js'; +import { MaterializedRunResult } from '../types/MaterializedRunResult.js'; + +export async function materializedRunResultFromQueueResult( + queueResult: DuckDBUIHttpRequestQueueResult, +): Promise { + const { buffer, startTimeMs, endTimeMs } = queueResult; + const deserializer = deserializerFromBuffer(buffer); + const result = readQueryResult(deserializer); + if (!result.success) { + throw new Error(result.error); + } + const dataReader = new DuckDBDataReader(new DuckDBDataChunkIterator(result)); + await dataReader.readAll(); + return { data: dataReader, startTimeMs, endTimeMs }; +} diff --git a/ts/pkgs/duckdb-ui-client/src/client/types/MaterializedRunResult.ts b/ts/pkgs/duckdb-ui-client/src/client/types/MaterializedRunResult.ts new file mode 100644 index 0000000..b6caf37 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/client/types/MaterializedRunResult.ts @@ -0,0 +1,14 @@ +import { DuckDBData } from '@duckdb/data-reader'; + +export interface MaterializedRunResult { + /** + * Full result set. + * + * Includes column metadata, such as types. Supports duplicate column names without renaming. + * + * See the `DuckDBData` interface for details. + */ + data: DuckDBData; + startTimeMs: number; + endTimeMs: number; +} diff --git a/ts/pkgs/duckdb-ui-client/src/conversion/functions/dataViewReaders.ts b/ts/pkgs/duckdb-ui-client/src/conversion/functions/dataViewReaders.ts new file mode 100644 index 0000000..c1bcf16 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/conversion/functions/dataViewReaders.ts @@ -0,0 +1,58 @@ +// DuckDB's physical storage and binary serialization format is little endian. +const littleEndian = true; + +export function getInt8(dataView: DataView, offset: number): number { + return dataView.getInt8(offset); +} + +export function getUInt8(dataView: DataView, offset: number): number { + return dataView.getUint8(offset); +} + +export function getInt16(dataView: DataView, offset: number): number { + return dataView.getInt16(offset, littleEndian); +} + +export function getUInt16(dataView: DataView, offset: number): number { + return dataView.getUint16(offset, littleEndian); +} + +export function getInt32(dataView: DataView, offset: number): number { + return dataView.getInt32(offset, littleEndian); +} + +export function getUInt32(dataView: DataView, offset: number): number { + return dataView.getUint32(offset, littleEndian); +} + +export function getInt64(dataView: DataView, offset: number): bigint { + return dataView.getBigInt64(offset, littleEndian); +} + +export function getUInt64(dataView: DataView, offset: number): bigint { + return dataView.getBigUint64(offset, littleEndian); +} + +export function getFloat32(dataView: DataView, offset: number): number { + return dataView.getFloat32(offset, littleEndian); +} + +export function getFloat64(dataView: DataView, offset: number): number { + return dataView.getFloat64(offset, littleEndian); +} + +export function getInt128(dataView: DataView, offset: number): bigint { + const lower = getUInt64(dataView, offset); + const upper = getInt64(dataView, offset + 8); + return (upper << BigInt(64)) + lower; +} + +export function getUInt128(dataView: DataView, offset: number): bigint { + const lower = getUInt64(dataView, offset); + const upper = getUInt64(dataView, offset + 8); + return (BigInt.asUintN(64, upper) << BigInt(64)) | BigInt.asUintN(64, lower); +} + +export function getBoolean(dataView: DataView, offset: number): boolean { + return getUInt8(dataView, offset) !== 0; +} diff --git a/ts/pkgs/duckdb-ui-client/src/conversion/functions/duckDBTypeFromTypeIdAndInfo.ts b/ts/pkgs/duckdb-ui-client/src/conversion/functions/duckDBTypeFromTypeIdAndInfo.ts new file mode 100644 index 0000000..e41668c --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/conversion/functions/duckDBTypeFromTypeIdAndInfo.ts @@ -0,0 +1,178 @@ +import { + ARRAY, + DECIMAL, + DuckDBBigIntType, + DuckDBBitType, + DuckDBBlobType, + DuckDBBooleanType, + DuckDBDateType, + DuckDBDoubleType, + DuckDBFloatType, + DuckDBHugeIntType, + DuckDBIntegerType, + DuckDBIntervalType, + DuckDBSmallIntType, + DuckDBTimestampMillisecondsType, + DuckDBTimestampNanosecondsType, + DuckDBTimestampSecondsType, + DuckDBTimestampType, + DuckDBTimestampTZType, + DuckDBTimeType, + DuckDBTimeTZType, + DuckDBTinyIntType, + DuckDBType, + DuckDBUBigIntType, + DuckDBUHugeIntType, + DuckDBUIntegerType, + DuckDBUSmallIntType, + DuckDBUTinyIntType, + DuckDBUUIDType, + DuckDBVarCharType, + DuckDBVarIntType, + ENUM, + JSONType, + LIST, + MAP, + STRUCT, + UNION, +} from '@duckdb/data-types'; +import { LogicalTypeId } from '../../serialization/constants/LogicalTypeId.js'; +import { TypeIdAndInfo } from '../../serialization/types/TypeInfo.js'; +import { + getArrayTypeInfo, + getDecimalTypeInfo, + getEnumTypeInfo, + getListTypeInfo, + getMapTypeInfos, + getStructTypeInfo, +} from './typeInfoGetters.js'; + +/** Return the DuckDBType corresponding to the given TypeIdAndInfo. */ +export function duckDBTypeFromTypeIdAndInfo( + typeIdAndInfo: TypeIdAndInfo, +): DuckDBType { + const { id, typeInfo } = typeIdAndInfo; + const alias = typeInfo?.alias; + switch (id) { + case LogicalTypeId.BOOLEAN: + return DuckDBBooleanType.create(alias); + + case LogicalTypeId.TINYINT: + return DuckDBTinyIntType.create(alias); + case LogicalTypeId.SMALLINT: + return DuckDBSmallIntType.create(alias); + case LogicalTypeId.INTEGER: + return DuckDBIntegerType.create(alias); + case LogicalTypeId.BIGINT: + return DuckDBBigIntType.create(alias); + + case LogicalTypeId.DATE: + return DuckDBDateType.create(alias); + case LogicalTypeId.TIME: + return DuckDBTimeType.create(alias); + case LogicalTypeId.TIMESTAMP_SEC: + return DuckDBTimestampSecondsType.create(alias); + case LogicalTypeId.TIMESTAMP_MS: + return DuckDBTimestampMillisecondsType.create(alias); + case LogicalTypeId.TIMESTAMP: + return DuckDBTimestampType.create(alias); + case LogicalTypeId.TIMESTAMP_NS: + return DuckDBTimestampNanosecondsType.create(alias); + + case LogicalTypeId.DECIMAL: { + const { width, scale } = getDecimalTypeInfo(typeInfo); + return DECIMAL(width, scale, alias); + } + + case LogicalTypeId.FLOAT: + return DuckDBFloatType.create(alias); + case LogicalTypeId.DOUBLE: + return DuckDBDoubleType.create(alias); + + case LogicalTypeId.CHAR: + case LogicalTypeId.VARCHAR: + // Minor optimization for JSON type to avoid creating new type object. + if (alias === JSONType.alias) { + return JSONType; + } + return DuckDBVarCharType.create(alias); + case LogicalTypeId.BLOB: + return DuckDBBlobType.create(alias); + + case LogicalTypeId.INTERVAL: + return DuckDBIntervalType.create(alias); + + case LogicalTypeId.UTINYINT: + return DuckDBUTinyIntType.create(alias); + case LogicalTypeId.USMALLINT: + return DuckDBUSmallIntType.create(alias); + case LogicalTypeId.UINTEGER: + return DuckDBUIntegerType.create(alias); + case LogicalTypeId.UBIGINT: + return DuckDBUBigIntType.create(alias); + + case LogicalTypeId.TIMESTAMP_TZ: + return DuckDBTimestampTZType.create(alias); + case LogicalTypeId.TIME_TZ: + return DuckDBTimeTZType.create(alias); + + case LogicalTypeId.BIT: + return DuckDBBitType.create(alias); + + case LogicalTypeId.VARINT: + return DuckDBVarIntType.create(alias); + + case LogicalTypeId.UHUGEINT: + return DuckDBUHugeIntType.create(alias); + case LogicalTypeId.HUGEINT: + return DuckDBHugeIntType.create(alias); + + case LogicalTypeId.UUID: + return DuckDBUUIDType.create(alias); + + case LogicalTypeId.STRUCT: { + const { childTypes } = getStructTypeInfo(typeInfo); + const entries: Record = {}; + for (const [key, valueTypeIdAndInfo] of childTypes) { + entries[key] = duckDBTypeFromTypeIdAndInfo(valueTypeIdAndInfo); + } + return STRUCT(entries, alias); + } + + case LogicalTypeId.LIST: { + const { childType } = getListTypeInfo(typeInfo); + return LIST(duckDBTypeFromTypeIdAndInfo(childType), alias); + } + + case LogicalTypeId.MAP: { + const { keyType, valueType } = getMapTypeInfos(typeInfo); + return MAP( + duckDBTypeFromTypeIdAndInfo(keyType), + duckDBTypeFromTypeIdAndInfo(valueType), + alias, + ); + } + + case LogicalTypeId.ENUM: { + const { values } = getEnumTypeInfo(typeInfo); + return ENUM(values, alias); + } + + case LogicalTypeId.UNION: { + const { childTypes } = getStructTypeInfo(typeInfo); + const members: Record = {}; + for (const [key, valueTypeIdAndInfo] of childTypes) { + members[key] = duckDBTypeFromTypeIdAndInfo(valueTypeIdAndInfo); + } + return UNION(members, alias); + } + + case LogicalTypeId.ARRAY: { + const { childType, size } = getArrayTypeInfo(typeInfo); + return ARRAY(duckDBTypeFromTypeIdAndInfo(childType), size, alias); + } + + default: + throw new Error(`type id not implemented: ${typeIdAndInfo.id}`); + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/conversion/functions/duckDBValueFromVector.ts b/ts/pkgs/duckdb-ui-client/src/conversion/functions/duckDBValueFromVector.ts new file mode 100644 index 0000000..90322c1 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/conversion/functions/duckDBValueFromVector.ts @@ -0,0 +1,271 @@ +import { + DuckDBArrayValue, + DuckDBBitValue, + DuckDBBlobValue, + DuckDBDateValue, + DuckDBDecimalValue, + DuckDBIntervalValue, + DuckDBListValue, + DuckDBMapValue, + DuckDBStructValue, + DuckDBTimeTZValue, + DuckDBTimeValue, + DuckDBTimestampMicrosecondsValue, + DuckDBTimestampMillisecondsValue, + DuckDBTimestampNanosecondsValue, + DuckDBTimestampSecondsValue, + DuckDBTimestampTZValue, + DuckDBUUIDValue, + DuckDBValue, + getVarIntFromBytes, +} from '@duckdb/data-values'; +import { LogicalTypeId } from '../../serialization/constants/LogicalTypeId.js'; +import { TypeIdAndInfo } from '../../serialization/types/TypeInfo.js'; +import { Vector } from '../../serialization/types/Vector.js'; +import { + getBoolean, + getFloat32, + getFloat64, + getInt128, + getInt16, + getInt32, + getInt64, + getInt8, + getUInt128, + getUInt16, + getUInt32, + getUInt64, + getUInt8, +} from './dataViewReaders.js'; +import { isRowValid } from './isRowValid.js'; +import { + getArrayTypeInfo, + getDecimalTypeInfo, + getEnumTypeInfo, + getListTypeInfo, + getMapTypeInfos, + getStructTypeInfo, +} from './typeInfoGetters.js'; +import { + getArrayVector, + getDataListVector, + getDataVector, + getListVector, + getStringVector, + getVectorListVector, +} from './vectorGetters.js'; + +/** Return the DuckDBValue at the given index in the given Vector with the type described by the given TypeIdAndInfo. */ +export function duckDBValueFromVector( + typeIdAndInfo: TypeIdAndInfo, + vector: Vector, + rowIndex: number, +): DuckDBValue { + if (!isRowValid(vector.validity, rowIndex)) return null; + + const { id, typeInfo } = typeIdAndInfo; + switch (id) { + case LogicalTypeId.BOOLEAN: + return getBoolean(getDataVector(vector).data, rowIndex); + + case LogicalTypeId.TINYINT: + return getInt8(getDataVector(vector).data, rowIndex); + case LogicalTypeId.SMALLINT: + return getInt16(getDataVector(vector).data, rowIndex * 2); + case LogicalTypeId.INTEGER: + return getInt32(getDataVector(vector).data, rowIndex * 4); + case LogicalTypeId.BIGINT: + return getInt64(getDataVector(vector).data, rowIndex * 8); + + case LogicalTypeId.DATE: + return new DuckDBDateValue( + getInt32(getDataVector(vector).data, rowIndex * 4), + ); + case LogicalTypeId.TIME: + return new DuckDBTimeValue( + getInt64(getDataVector(vector).data, rowIndex * 8), + ); + case LogicalTypeId.TIMESTAMP_SEC: + return new DuckDBTimestampSecondsValue( + getInt64(getDataVector(vector).data, rowIndex * 8), + ); + case LogicalTypeId.TIMESTAMP_MS: + return new DuckDBTimestampMillisecondsValue( + getInt64(getDataVector(vector).data, rowIndex * 8), + ); + case LogicalTypeId.TIMESTAMP: + return new DuckDBTimestampMicrosecondsValue( + getInt64(getDataVector(vector).data, rowIndex * 8), + ); + case LogicalTypeId.TIMESTAMP_NS: + return new DuckDBTimestampNanosecondsValue( + getInt64(getDataVector(vector).data, rowIndex * 8), + ); + + case LogicalTypeId.DECIMAL: { + const { width, scale } = getDecimalTypeInfo(typeInfo); + if (width <= 4) { + return new DuckDBDecimalValue( + BigInt(getInt16(getDataVector(vector).data, rowIndex * 2)), + scale, + ); + } else if (width <= 9) { + return new DuckDBDecimalValue( + BigInt(getInt32(getDataVector(vector).data, rowIndex * 4)), + scale, + ); + } else if (width <= 18) { + return new DuckDBDecimalValue( + getInt64(getDataVector(vector).data, rowIndex * 8), + scale, + ); + } else if (width <= 38) { + return new DuckDBDecimalValue( + getInt128(getDataVector(vector).data, rowIndex * 16), + scale, + ); + } + throw new Error(`unsupported decimal width: ${width}`); + } + + case LogicalTypeId.FLOAT: + return getFloat32(getDataVector(vector).data, rowIndex * 4); + case LogicalTypeId.DOUBLE: + return getFloat64(getDataVector(vector).data, rowIndex * 8); + + case LogicalTypeId.CHAR: + case LogicalTypeId.VARCHAR: + return getStringVector(vector).data[rowIndex]; + + case LogicalTypeId.BLOB: { + const dv = getDataListVector(vector).data[rowIndex]; + return new DuckDBBlobValue( + new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength), + ); + } + + case LogicalTypeId.INTERVAL: { + const { data } = getDataVector(vector); + const months = getInt32(data, rowIndex * 16 + 0); + const days = getInt32(data, rowIndex * 16 + 4); + const micros = getInt64(data, rowIndex * 16 + 8); + return new DuckDBIntervalValue(months, days, micros); + } + + case LogicalTypeId.UTINYINT: + return getUInt8(getDataVector(vector).data, rowIndex); + case LogicalTypeId.USMALLINT: + return getUInt16(getDataVector(vector).data, rowIndex * 2); + case LogicalTypeId.UINTEGER: + return getUInt32(getDataVector(vector).data, rowIndex * 4); + case LogicalTypeId.UBIGINT: + return getUInt64(getDataVector(vector).data, rowIndex * 8); + + case LogicalTypeId.TIMESTAMP_TZ: + return new DuckDBTimestampTZValue( + getInt64(getDataVector(vector).data, rowIndex * 8), + ); + case LogicalTypeId.TIME_TZ: + return DuckDBTimeTZValue.fromBits( + getUInt64(getDataVector(vector).data, rowIndex * 8), + ); + + case LogicalTypeId.BIT: { + const dv = getDataListVector(vector).data[rowIndex]; + return new DuckDBBitValue( + new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength), + ); + } + + case LogicalTypeId.VARINT: { + const dv = getDataListVector(vector).data[rowIndex]; + return getVarIntFromBytes( + new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength), + ); + } + + case LogicalTypeId.UHUGEINT: + return getUInt128(getDataVector(vector).data, rowIndex * 16); + case LogicalTypeId.HUGEINT: + return getInt128(getDataVector(vector).data, rowIndex * 16); + + case LogicalTypeId.UUID: + return DuckDBUUIDValue.fromStoredHugeint( + getInt128(getDataVector(vector).data, rowIndex * 16), + ); + + case LogicalTypeId.STRUCT: { + const { childTypes } = getStructTypeInfo(typeInfo); + const { data } = getVectorListVector(vector); + return new DuckDBStructValue( + Array.from({ length: childTypes.length }).map((_, i) => ({ + key: childTypes[i][0], + value: duckDBValueFromVector(childTypes[i][1], data[i], rowIndex), + })), + ); + } + + case LogicalTypeId.LIST: { + const { childType } = getListTypeInfo(typeInfo); + const { child, entries } = getListVector(vector); + const { offset, length } = entries[rowIndex]; + return new DuckDBListValue( + Array.from({ length }).map((_, i) => + duckDBValueFromVector(childType, child, offset + i), + ), + ); + } + + case LogicalTypeId.MAP: { + const { keyType, valueType } = getMapTypeInfos(typeInfo); + const { child, entries } = getListVector(vector); + const { offset, length } = entries[rowIndex]; + const { data } = getVectorListVector(child); + return new DuckDBMapValue( + Array.from({ length }).map((_, i) => ({ + key: duckDBValueFromVector(keyType, data[0], offset + i), + value: duckDBValueFromVector(valueType, data[1], offset + i), + })), + ); + } + + case LogicalTypeId.ENUM: { + const { values } = getEnumTypeInfo(typeInfo); + if (values.length < 256) { + return values[getUInt8(getDataVector(vector).data, rowIndex)]; + } else if (values.length < 65536) { + return values[getUInt16(getDataVector(vector).data, rowIndex * 2)]; + } else if (values.length < 4294967296) { + return values[getUInt32(getDataVector(vector).data, rowIndex * 4)]; + } + throw new Error(`unsupported enum size: values.length=${values.length}`); + } + + case LogicalTypeId.UNION: { + const { childTypes } = getStructTypeInfo(typeInfo); + const { data } = getVectorListVector(vector); + const tag = Number( + duckDBValueFromVector(childTypes[0][1], data[0], rowIndex), + ); + const altIndex = tag + 1; + return duckDBValueFromVector( + childTypes[altIndex][1], + data[altIndex], + rowIndex, + ); + } + + case LogicalTypeId.ARRAY: { + const { childType, size } = getArrayTypeInfo(typeInfo); + const { child } = getArrayVector(vector); + return new DuckDBArrayValue( + Array.from({ length: size }).map((_, i) => + duckDBValueFromVector(childType, child, rowIndex * size + i), + ), + ); + } + + default: + throw new Error(`type not implemented: ${id}`); + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/conversion/functions/isRowValid.ts b/ts/pkgs/duckdb-ui-client/src/conversion/functions/isRowValid.ts new file mode 100644 index 0000000..1bd2a0b --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/conversion/functions/isRowValid.ts @@ -0,0 +1,7 @@ +import { getUInt64 } from './dataViewReaders.js'; + +export function isRowValid(validity: DataView | null, row: number): boolean { + if (!validity) return true; + const bigint = getUInt64(validity, Math.floor(row / 64) * 8); + return (bigint & (1n << BigInt(row % 64))) !== 0n; +} diff --git a/ts/pkgs/duckdb-ui-client/src/conversion/functions/typeInfoGetters.ts b/ts/pkgs/duckdb-ui-client/src/conversion/functions/typeInfoGetters.ts new file mode 100644 index 0000000..3545ab1 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/conversion/functions/typeInfoGetters.ts @@ -0,0 +1,93 @@ +import { + ArrayTypeInfo, + DecimalTypeInfo, + EnumTypeInfo, + ListTypeInfo, + StructTypeInfo, + TypeIdAndInfo, + TypeInfo, +} from '../../serialization/types/TypeInfo.js'; + +export function getArrayTypeInfo( + typeInfo: TypeInfo | undefined, +): ArrayTypeInfo { + if (!typeInfo) { + throw new Error(`ARRAY has no typeInfo!`); + } + if (typeInfo.kind !== 'array') { + throw new Error(`ARRAY has unexpected typeInfo.kind: ${typeInfo.kind}`); + } + return typeInfo; +} + +export function getDecimalTypeInfo( + typeInfo: TypeInfo | undefined, +): DecimalTypeInfo { + if (!typeInfo) { + throw new Error(`DECIMAL has no typeInfo!`); + } + if (typeInfo.kind !== 'decimal') { + throw new Error(`DECIMAL has unexpected typeInfo.kind: ${typeInfo.kind}`); + } + return typeInfo; +} + +export function getEnumTypeInfo(typeInfo: TypeInfo | undefined): EnumTypeInfo { + if (!typeInfo) { + throw new Error(`ENUM has no typeInfo!`); + } + if (typeInfo.kind !== 'enum') { + throw new Error(`ENUM has unexpected typeInfo.kind: ${typeInfo.kind}`); + } + return typeInfo; +} + +export function getListTypeInfo(typeInfo: TypeInfo | undefined): ListTypeInfo { + if (!typeInfo) { + throw new Error(`LIST has no typeInfo!`); + } + if (typeInfo.kind !== 'list') { + throw new Error(`LIST has unexpected typeInfo.kind: ${typeInfo.kind}`); + } + return typeInfo; +} + +export function getStructTypeInfo( + typeInfo: TypeInfo | undefined, +): StructTypeInfo { + if (!typeInfo) { + throw new Error(`STRUCT has no typeInfo!`); + } + if (typeInfo.kind !== 'struct') { + throw new Error(`STRUCT has unexpected typeInfo.kind: ${typeInfo.kind}`); + } + return typeInfo; +} + +export function getMapTypeInfos(typeInfo: TypeInfo | undefined): { + keyType: TypeIdAndInfo; + valueType: TypeIdAndInfo; +} { + // MAP = LIST(STRUCT(key KEY_TYPE, value VALUE_TYPE)) + const { childType } = getListTypeInfo(typeInfo); + const { childTypes } = getStructTypeInfo(childType.typeInfo); + if (childTypes.length !== 2) { + throw new Error( + `MAP childType has unexpected childTypes length: ${childTypes.length}`, + ); + } + if (childTypes[0].length !== 2) { + throw new Error( + `MAP childType has unexpected childTypes[0] length: ${childTypes[0].length}`, + ); + } + if (childTypes[1].length !== 2) { + throw new Error( + `MAP childType has unexpected childTypes[1] length: ${childTypes[1].length}`, + ); + } + return { + keyType: childTypes[0][1], + valueType: childTypes[1][1], + }; +} diff --git a/ts/pkgs/duckdb-ui-client/src/conversion/functions/vectorGetters.ts b/ts/pkgs/duckdb-ui-client/src/conversion/functions/vectorGetters.ts new file mode 100644 index 0000000..612712a --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/conversion/functions/vectorGetters.ts @@ -0,0 +1,51 @@ +import { + ArrayVector, + DataListVector, + DataVector, + ListVector, + StringVector, + Vector, + VectorListVector, +} from '../../serialization/types/Vector.js'; + +export function getDataVector(vector: Vector): DataVector { + if (vector.kind !== 'data') { + throw new Error(`Unexpected vector.kind: ${vector.kind}`); + } + return vector; +} + +export function getStringVector(vector: Vector): StringVector { + if (vector.kind !== 'string') { + throw new Error(`Unexpected vector.kind: ${vector.kind}`); + } + return vector; +} + +export function getDataListVector(vector: Vector): DataListVector { + if (vector.kind !== 'datalist') { + throw new Error(`Unexpected vector.kind: ${vector.kind}`); + } + return vector; +} + +export function getVectorListVector(vector: Vector): VectorListVector { + if (vector.kind !== 'vectorlist') { + throw new Error(`Unexpected vector.kind: ${vector.kind}`); + } + return vector; +} + +export function getListVector(vector: Vector): ListVector { + if (vector.kind !== 'list') { + throw new Error(`Unexpected vector.kind: ${vector.kind}`); + } + return vector; +} + +export function getArrayVector(vector: Vector): ArrayVector { + if (vector.kind !== 'array') { + throw new Error(`Unexpected vector.kind: ${vector.kind}`); + } + return vector; +} diff --git a/ts/pkgs/duckdb-ui-client/src/data-chunk/classes/DuckDBDataChunk.ts b/ts/pkgs/duckdb-ui-client/src/data-chunk/classes/DuckDBDataChunk.ts new file mode 100644 index 0000000..7aeba23 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/data-chunk/classes/DuckDBDataChunk.ts @@ -0,0 +1,42 @@ +import { DuckDBData } from '@duckdb/data-reader'; +import { DuckDBType } from '@duckdb/data-types'; +import { DuckDBValue } from '@duckdb/data-values'; +import { duckDBTypeFromTypeIdAndInfo } from '../../conversion/functions/duckDBTypeFromTypeIdAndInfo.js'; +import { duckDBValueFromVector } from '../../conversion/functions/duckDBValueFromVector.js'; +import { ColumnNamesAndTypes } from '../../serialization/types/ColumnNamesAndTypes.js'; +import { DataChunk } from '../../serialization/types/DataChunk.js'; + +export class DuckDBDataChunk extends DuckDBData { + constructor( + private columnNamesAndTypes: ColumnNamesAndTypes, + private chunk: DataChunk, + ) { + super(); + } + + get columnCount() { + return this.columnNamesAndTypes.names.length; + } + + get rowCount() { + return this.chunk.rowCount; + } + + columnName(columnIndex: number): string { + return this.columnNamesAndTypes.names[columnIndex]; + } + + columnType(columnIndex: number): DuckDBType { + return duckDBTypeFromTypeIdAndInfo( + this.columnNamesAndTypes.types[columnIndex], + ); + } + + value(columnIndex: number, rowIndex: number): DuckDBValue { + return duckDBValueFromVector( + this.columnNamesAndTypes.types[columnIndex], + this.chunk.vectors[columnIndex], + rowIndex, + ); + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/data-chunk/classes/DuckDBDataChunkIterator.ts b/ts/pkgs/duckdb-ui-client/src/data-chunk/classes/DuckDBDataChunkIterator.ts new file mode 100644 index 0000000..5485048 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/data-chunk/classes/DuckDBDataChunkIterator.ts @@ -0,0 +1,51 @@ +import { + AsyncDuckDBDataBatchIterator, + DuckDBData, + DuckDBDataBatchIteratorResult, +} from '@duckdb/data-reader'; +import { SuccessQueryResult } from '../../serialization/types/QueryResult.js'; +import { DuckDBDataChunk } from './DuckDBDataChunk.js'; + +const ITERATOR_DONE: DuckDBDataBatchIteratorResult = Object.freeze({ + done: true, + value: undefined, +}); + +export class DuckDBDataChunkIterator implements AsyncDuckDBDataBatchIterator { + private result: SuccessQueryResult; + + private index: number; + + constructor(result: SuccessQueryResult) { + this.result = result; + this.index = 0; + } + + async next(): Promise { + if (this.index < this.result.chunks.length) { + return { + done: false, + value: new DuckDBDataChunk( + this.result.columnNamesAndTypes, + this.result.chunks[this.index++], + ), + }; + } + return ITERATOR_DONE; + } + + async return(value?: DuckDBData): Promise { + if (value) { + return { done: true, value }; + } + return ITERATOR_DONE; + } + + async throw(_e?: unknown): Promise { + return ITERATOR_DONE; + } + + [Symbol.asyncIterator](): AsyncDuckDBDataBatchIterator { + return this; + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/http/classes/DuckDBUIHttpRequestQueue.ts b/ts/pkgs/duckdb-ui-client/src/http/classes/DuckDBUIHttpRequestQueue.ts new file mode 100644 index 0000000..39346e5 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/http/classes/DuckDBUIHttpRequestQueue.ts @@ -0,0 +1,155 @@ +import 'core-js/actual/promise/with-resolvers.js'; +import { randomString } from '../../util/functions/randomString.js'; +import { sendDuckDBUIHttpRequest } from '../functions/sendDuckDBUIHttpRequest.js'; + +export interface DuckDBUIHttpRequestQueueResult { + buffer: ArrayBuffer; + startTimeMs: number; + endTimeMs: number; +} + +export interface DuckDBUIHttpRequestQueueEntry { + id: string; + url: string; + body: string; + headers?: Headers; + deferredResult: PromiseWithResolvers; + canceled?: boolean; +} + +export class DuckDBUIHttpRequestQueue { + /** + * Invariants: The first entry in the queue has been sent and we're waiting for its response. If the first entry is + * canceled, it remains in the queue until its response is received. If an entry other than the first is canceled, it + * remains in the queue until it comes to the front, at which point it is removed without being sent. + */ + private entries: DuckDBUIHttpRequestQueueEntry[] = []; + + public get length() { + return this.entries.length; + } + + public enqueueAndWait( + url: string, + body: string, + headers?: Headers, + ): Promise { + return this.internalEnqueue(url, body, headers).deferredResult.promise; + } + + public enqueue(url: string, body: string, headers?: Headers): string { + return this.internalEnqueue(url, body, headers).id; + } + + public enqueuedResult(id: string): Promise { + const index = this.entries.findIndex((entry) => entry.id === id); + if (index < 0) { + throw new Error(`Invalid id: ${id}`); + } + return this.entries[index].deferredResult.promise; + } + + public cancel(id: string, errorMessage?: string) { + const index = this.entries.findIndex((entry) => entry.id === id); + if (index >= 0) { + // Mark the entry as canceled and reject its promise. If it was already sent, then we'll remove it from the queue + // when we get its response. If not, then we'll remove it when the (non-canceled) request before it completes. The + // caller may or may not arrange for the response to return early with an error, for example, by interrupting it; + // whether that happens doesn't change how the queue operates. + this.entries[index].canceled = true; + this.entries[index].deferredResult.reject( + new Error(errorMessage ?? 'query was canceled'), + ); + } else { + console.warn(`Couldn't cancel; no entry found for id: ${id}`); + } + } + + /** + * Returns true if the given entry id is the front of the queue. + * Note that it may be canceled. + */ + public isCurrent(id: string): boolean { + return this.entries.length > 0 && this.entries[0].id === id; + } + + private internalEnqueue( + url: string, + body: string, + headers?: Headers, + ): DuckDBUIHttpRequestQueueEntry { + const id = randomString(); + const deferredResult = + Promise.withResolvers(); + const entry: DuckDBUIHttpRequestQueueEntry = { + id, + url, + body, + headers, + deferredResult, + }; + this.entries.push(entry); + // If the new entry is the first in our queue, then send it. + if (this.entries.length === 1) { + this.sendRequest(this.entries[0]); + } + return entry; + } + + private handleResponse( + entryId: string, + startTimeMs: number, + buffer: ArrayBuffer | undefined, + reason?: unknown, + ) { + if (this.entries.length === 0) { + console.warn( + `DuckDBUIHttpRequestQueue.handleResponse(entryId=${entryId}): queue unexpectedly empty`, + ); + return; + } + if (this.entries[0].id !== entryId) { + console.warn( + `DuckDBUIHttpRequestQueue.handleResponse(entryId=${entryId}): front of queue doesn't match response`, + ); + return; + } + // Remove the entry corresponding to this response. + const entry = this.entries.shift(); + // There should always be an entry because of the length check above, but we need to appease the compiler. + // If the entry was canceled, we've already rejected the promise, so there's nothing more to do. + if (entry && !entry.canceled) { + if (buffer) { + const endTimeMs = performance.now(); + // If the entry has a valid buffer, then resolve its promise to it. + entry.deferredResult.resolve({ buffer, startTimeMs, endTimeMs }); + } else { + // Otherwise, reject it with the provided reason. + entry.deferredResult.reject(reason); + } + } + // Send the next request (if there are any). + this.sendNextInQueue(); + } + + /** If there are any entries in our queue that aren't canceled, send the first one. */ + private sendNextInQueue() { + // Remove any unsent canceled entries from the front of the queue. + while (this.entries.length > 0 && this.entries[0].canceled) { + this.entries.shift(); + } + // If there's an uncanceled entry left, send it. + if (this.entries.length > 0) { + this.sendRequest(this.entries[0]); + } + } + + private sendRequest(entry: DuckDBUIHttpRequestQueueEntry) { + const startTimeMs = performance.now(); + sendDuckDBUIHttpRequest(entry.url, entry.body, entry.headers) + .then((buffer) => this.handleResponse(entry.id, startTimeMs, buffer)) + .catch((reason) => + this.handleResponse(entry.id, startTimeMs, undefined, reason), + ); + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/http/functions/makeDuckDBUIHttpRequestHeaders.ts b/ts/pkgs/duckdb-ui-client/src/http/functions/makeDuckDBUIHttpRequestHeaders.ts new file mode 100644 index 0000000..53ced44 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/http/functions/makeDuckDBUIHttpRequestHeaders.ts @@ -0,0 +1,39 @@ +import { toBase64 } from '../../util/functions/toBase64.js'; + +export interface DuckDBUIHttpRequestHeaderOptions { + description?: string; + connectionName?: string; + databaseName?: string; + parameters?: unknown[]; +} + +export function makeDuckDBUIHttpRequestHeaders({ + description, + connectionName, + databaseName, + parameters, +}: DuckDBUIHttpRequestHeaderOptions): Headers { + const headers = new Headers(); + if (description) { + headers.append('X-DuckDB-UI-Request-Description', description); + } + if (connectionName) { + headers.append('X-DuckDB-UI-Connection-Name', connectionName); + } + if (databaseName) { + // base64 encode the value because it can contain characters invalid in an HTTP header + headers.append('X-DuckDB-UI-Database-Name', toBase64(databaseName)); + } + if (parameters) { + headers.append('X-DuckDB-UI-Parameter-Count', String(parameters.length)); + for (let i = 0; i < parameters.length; i++) { + // base64 encode the value because it can contain characters invalid in an HTTP header + // TODO: support non-string parameters? + headers.append( + `X-DuckDB-UI-Parameter-Value-${i}`, + toBase64(String(parameters[i])), + ); + } + } + return headers; +} diff --git a/ts/pkgs/duckdb-ui-client/src/http/functions/sendDuckDBUIHttpRequest.ts b/ts/pkgs/duckdb-ui-client/src/http/functions/sendDuckDBUIHttpRequest.ts new file mode 100644 index 0000000..82ce598 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/http/functions/sendDuckDBUIHttpRequest.ts @@ -0,0 +1,13 @@ +export async function sendDuckDBUIHttpRequest( + url: string, + body: string, + headers?: Headers, +): Promise { + const response = await fetch(url, { + method: 'POST', + headers, + body, + }); + const buffer = await response.arrayBuffer(); + return buffer; +} diff --git a/ts/pkgs/duckdb-ui-client/src/index.ts b/ts/pkgs/duckdb-ui-client/src/index.ts new file mode 100644 index 0000000..50405e1 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/index.ts @@ -0,0 +1 @@ +export * from './client/classes/DuckDBUIClient.js'; diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/classes/BinaryDeserializer.ts b/ts/pkgs/duckdb-ui-client/src/serialization/classes/BinaryDeserializer.ts new file mode 100644 index 0000000..35a376c --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/classes/BinaryDeserializer.ts @@ -0,0 +1,130 @@ +import { BinaryStreamReader } from './BinaryStreamReader.js'; + +export type Reader = (deserializer: BinaryDeserializer) => T; +export type ListReader = ( + deserializer: BinaryDeserializer, + index: number, +) => T; + +const decoder = new TextDecoder(); + +/** + * An implementation of a subset of DuckDB's BinaryDeserializer. + * + * See: + * - https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/serializer/binary_deserializer.hpp + * - https://github.com/duckdb/duckdb/blob/main/src/common/serializer/binary_deserializer.cpp + */ +export class BinaryDeserializer { + private reader: BinaryStreamReader; + + public constructor(reader: BinaryStreamReader) { + this.reader = reader; + } + + private peekFieldId() { + return this.reader.peekUint16(true); + } + + private consumeFieldId() { + this.reader.consume(2); + } + + private checkFieldId(possibleFieldId: number) { + const fieldId = this.peekFieldId(); + if (fieldId === possibleFieldId) { + this.consumeFieldId(); + return true; + } + return false; + } + + private expectFieldId(expectedFieldId: number) { + const fieldId = this.peekFieldId(); + if (fieldId === expectedFieldId) { + this.consumeFieldId(); + } else { + throw new Error( + `Expected field id ${expectedFieldId} but got ${fieldId} (offset=${this.reader.getOffset()})`, + ); + } + } + + public expectObjectEnd() { + this.expectFieldId(0xffff); + } + + public throwUnsupported() { + throw new Error(`unsupported type, offset=${this.reader.getOffset()}`); + } + + public readUint8() { + return this.reader.readUint8(); + } + + public readVarInt() { + let result = 0; + let byte = 0; + let shift = 0; + do { + byte = this.reader.readUint8(); + result |= (byte & 0x7f) << shift; + shift += 7; + } while (byte & 0x80); + return result; + } + + public readNullable(reader: Reader) { + const present = this.readUint8(); + if (present) { + return reader(this); + } + return null; + } + + public readData() { + const length = this.readVarInt(); + return this.reader.readData(length); + } + + public readString() { + const length = this.readVarInt(); + const dv = this.reader.readData(length); + return decoder.decode(dv); + } + + public readList(reader: ListReader) { + const count = this.readVarInt(); + const items: T[] = []; + for (let i = 0; i < count; i++) { + items.push(reader(this, i)); + } + return items; + } + + public readPair( + firstReader: Reader, + secondReader: Reader, + ): [T, U] { + const first = this.readProperty(0, firstReader); + const second = this.readProperty(1, secondReader); + this.expectObjectEnd(); + return [first, second]; + } + + public readProperty(expectedFieldId: number, reader: Reader) { + this.expectFieldId(expectedFieldId); + return reader(this); + } + + public readPropertyWithDefault( + possibleFieldId: number, + reader: Reader, + defaultValue: T, + ): T { + if (this.checkFieldId(possibleFieldId)) { + return reader(this); + } + return defaultValue; + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/classes/BinaryStreamReader.ts b/ts/pkgs/duckdb-ui-client/src/serialization/classes/BinaryStreamReader.ts new file mode 100644 index 0000000..5a0fc56 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/classes/BinaryStreamReader.ts @@ -0,0 +1,48 @@ +/** + * Enables reading or peeking at values of a binary buffer. + * Subsequent reads start from the end of the previous one. + */ +export class BinaryStreamReader { + private dv: DataView; + + private offset: number; + + public constructor(buffer: ArrayBuffer) { + this.dv = new DataView(buffer); + this.offset = 0; + } + + public getOffset() { + return this.offset; + } + + public peekUint8() { + return this.dv.getUint8(this.offset); + } + + public peekUint16(le: boolean) { + return this.dv.getUint16(this.offset, le); + } + + public consume(byteCount: number) { + this.offset += byteCount; + } + + private offsetBeforeConsume(byteCount: number) { + const offsetBefore = this.offset; + this.consume(byteCount); + return offsetBefore; + } + + public readUint8() { + return this.dv.getUint8(this.offsetBeforeConsume(1)); + } + + public readData(length: number) { + return new DataView( + this.dv.buffer, + this.offsetBeforeConsume(length), + length, + ); + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/constants/LogicalTypeId.ts b/ts/pkgs/duckdb-ui-client/src/serialization/constants/LogicalTypeId.ts new file mode 100644 index 0000000..e52d909 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/constants/LogicalTypeId.ts @@ -0,0 +1,42 @@ +/** + * Copy of DuckDB's LogicalTypeId. + * + * See LogicalTypeId in https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/types.hpp + */ +export const LogicalTypeId = { + BOOLEAN: 10, + TINYINT: 11, + SMALLINT: 12, + INTEGER: 13, + BIGINT: 14, + DATE: 15, + TIME: 16, + TIMESTAMP_SEC: 17, + TIMESTAMP_MS: 18, + TIMESTAMP: 19, + TIMESTAMP_NS: 20, + DECIMAL: 21, + FLOAT: 22, + DOUBLE: 23, + CHAR: 24, + VARCHAR: 25, + BLOB: 26, + INTERVAL: 27, + UTINYINT: 28, + USMALLINT: 29, + UINTEGER: 30, + UBIGINT: 31, + TIMESTAMP_TZ: 32, + TIME_TZ: 34, + BIT: 36, + VARINT: 39, + UHUGEINT: 49, + HUGEINT: 50, + UUID: 54, + STRUCT: 100, + LIST: 101, + MAP: 102, + ENUM: 104, + UNION: 107, + ARRAY: 108, +}; diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/functions/basicReaders.ts b/ts/pkgs/duckdb-ui-client/src/serialization/functions/basicReaders.ts new file mode 100644 index 0000000..e7677c9 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/functions/basicReaders.ts @@ -0,0 +1,63 @@ +import { + BinaryDeserializer, + ListReader, + Reader, +} from '../classes/BinaryDeserializer.js'; + +export function readUnsupported(deserializer: BinaryDeserializer): void { + deserializer.throwUnsupported(); +} + +export function readNullable( + deserializer: BinaryDeserializer, + reader: Reader, +): T | null { + return deserializer.readNullable(reader); +} + +export function readUint8(deserializer: BinaryDeserializer): number { + return deserializer.readUint8(); +} + +export function readBoolean(deserializer: BinaryDeserializer): boolean { + return deserializer.readUint8() !== 0; +} + +export function readVarInt(deserializer: BinaryDeserializer): number { + return deserializer.readVarInt(); +} + +export function readVarIntList(deserializer: BinaryDeserializer): number[] { + return readList(deserializer, readVarInt); +} + +export function readData(deserializer: BinaryDeserializer): DataView { + return deserializer.readData(); +} + +export function readDataList(deserializer: BinaryDeserializer): DataView[] { + return readList(deserializer, readData); +} + +export function readString(deserializer: BinaryDeserializer): string { + return deserializer.readString(); +} + +export function readList( + deserializer: BinaryDeserializer, + reader: ListReader, +): T[] { + return deserializer.readList(reader); +} + +export function readStringList(deserializer: BinaryDeserializer): string[] { + return readList(deserializer, readString); +} + +export function readPair( + deserializer: BinaryDeserializer, + firstReader: Reader, + secondReader: Reader, +): [T, U] { + return deserializer.readPair(firstReader, secondReader); +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/functions/deserializeFromBuffer.ts b/ts/pkgs/duckdb-ui-client/src/serialization/functions/deserializeFromBuffer.ts new file mode 100644 index 0000000..0cac1ee --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/functions/deserializeFromBuffer.ts @@ -0,0 +1,10 @@ +import { BinaryDeserializer } from '../classes/BinaryDeserializer.js'; +import { BinaryStreamReader } from '../classes/BinaryStreamReader.js'; + +export function deserializerFromBuffer( + buffer: ArrayBuffer, +): BinaryDeserializer { + const streamReader = new BinaryStreamReader(buffer); + const deserializer = new BinaryDeserializer(streamReader); + return deserializer; +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/functions/resultReaders.ts b/ts/pkgs/duckdb-ui-client/src/serialization/functions/resultReaders.ts new file mode 100644 index 0000000..2e8de2c --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/functions/resultReaders.ts @@ -0,0 +1,85 @@ +import { BinaryDeserializer } from '../classes/BinaryDeserializer.js'; +import { ColumnNamesAndTypes } from '../types/ColumnNamesAndTypes.js'; +import { DataChunk } from '../types/DataChunk.js'; +import { + ErrorQueryResult, + QueryResult, + SuccessQueryResult, +} from '../types/QueryResult.js'; +import { TokenizeResult } from '../types/TokenizeResult.js'; +import { TypeIdAndInfo } from '../types/TypeInfo.js'; +import { + readBoolean, + readList, + readString, + readStringList, + readVarInt, + readVarIntList, +} from './basicReaders.js'; +import { readTypeList } from './typeReaders.js'; +import { readVectorList } from './vectorReaders.js'; + +export function readTokenizeResult( + deserializer: BinaryDeserializer, +): TokenizeResult { + const offsets = deserializer.readProperty(100, readVarIntList); + const types = deserializer.readProperty(101, readVarIntList); + deserializer.expectObjectEnd(); + return { offsets, types }; +} + +export function readColumnNamesAndTypes( + deserializer: BinaryDeserializer, +): ColumnNamesAndTypes { + const names = deserializer.readProperty(100, readStringList); + const types = deserializer.readProperty(101, readTypeList); + deserializer.expectObjectEnd(); + return { names, types }; +} + +export function readChunk( + deserializer: BinaryDeserializer, + types: TypeIdAndInfo[], +): DataChunk { + const rowCount = deserializer.readProperty(100, readVarInt); + const vectors = deserializer.readProperty(101, (d) => + readVectorList(d, types), + ); + deserializer.expectObjectEnd(); + return { rowCount, vectors }; +} + +export function readDataChunkList( + deserializer: BinaryDeserializer, + types: TypeIdAndInfo[], +): DataChunk[] { + return readList(deserializer, (d) => readChunk(d, types)); +} + +export function readSuccessQueryResult( + deserializer: BinaryDeserializer, +): SuccessQueryResult { + const columnNamesAndTypes = deserializer.readProperty( + 101, + readColumnNamesAndTypes, + ); + const chunks = deserializer.readProperty(102, (d) => + readDataChunkList(d, columnNamesAndTypes.types), + ); + return { success: true, columnNamesAndTypes, chunks }; +} + +export function readErrorQueryResult( + deserializer: BinaryDeserializer, +): ErrorQueryResult { + const error = deserializer.readProperty(101, readString); + return { success: false, error }; +} + +export function readQueryResult(deserializer: BinaryDeserializer): QueryResult { + const success = deserializer.readProperty(100, readBoolean); + if (success) { + return readSuccessQueryResult(deserializer); + } + return readErrorQueryResult(deserializer); +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/functions/tokenizeResultFromBuffer.ts b/ts/pkgs/duckdb-ui-client/src/serialization/functions/tokenizeResultFromBuffer.ts new file mode 100644 index 0000000..4dbfbf2 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/functions/tokenizeResultFromBuffer.ts @@ -0,0 +1,8 @@ +import { TokenizeResult } from '../types/TokenizeResult.js'; +import { deserializerFromBuffer } from './deserializeFromBuffer.js'; +import { readTokenizeResult } from './resultReaders.js'; + +export function tokenizeResultFromBuffer(buffer: ArrayBuffer): TokenizeResult { + const deserializer = deserializerFromBuffer(buffer); + return readTokenizeResult(deserializer); +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/functions/typeReaders.ts b/ts/pkgs/duckdb-ui-client/src/serialization/functions/typeReaders.ts new file mode 100644 index 0000000..5b95a50 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/functions/typeReaders.ts @@ -0,0 +1,137 @@ +import { BinaryDeserializer } from '../classes/BinaryDeserializer.js'; +import { BaseTypeInfo, TypeIdAndInfo, TypeInfo } from '../types/TypeInfo.js'; +import { + readList, + readNullable, + readPair, + readString, + readStringList, + readUint8, + readUnsupported, + readVarInt, +} from './basicReaders.js'; + +export function readStructEntry( + deserializer: BinaryDeserializer, +): [string, TypeIdAndInfo] { + return readPair(deserializer, readString, readType); +} + +export function readStructEntryList( + deserializer: BinaryDeserializer, +): [string, TypeIdAndInfo][] { + return readList(deserializer, readStructEntry); +} + +/** See ExtraTypeInfo::Deserialize in https://github.com/duckdb/duckdb/blob/main/src/storage/serialization/serialize_types.cpp */ +export function readTypeInfo(deserializer: BinaryDeserializer): TypeInfo { + const typeInfoType = deserializer.readProperty(100, readUint8); + const alias = deserializer.readPropertyWithDefault(101, readString, null); + const modifiers = deserializer.readPropertyWithDefault( + 102, + readUnsupported, + null, + ); + const baseInfo: BaseTypeInfo = { + ...(alias ? { alias } : {}), + ...(modifiers ? { modifiers } : {}), + }; + let typeInfo: TypeInfo | undefined; + switch (typeInfoType) { + case 1: // GENERIC_TYPE_INFO + typeInfo = { + ...baseInfo, + kind: 'generic', + }; + break; + case 2: // DECIMAL_TYPE_INFO + { + const width = deserializer.readPropertyWithDefault(200, readUint8, 0); + const scale = deserializer.readPropertyWithDefault(201, readUint8, 0); + typeInfo = { + ...baseInfo, + kind: 'decimal', + width, + scale, + }; + } + break; + case 4: // LIST_TYPE_INFO + { + const childType = deserializer.readProperty(200, readType); + typeInfo = { + ...baseInfo, + kind: 'list', + childType, + }; + } + break; + case 5: // STRUCT_TYPE_INFO + { + const childTypes = deserializer.readProperty(200, readStructEntryList); + typeInfo = { + ...baseInfo, + kind: 'struct', + childTypes, + }; + } + break; + case 6: // ENUM_TYPE_INFO + { + const valuesCount = deserializer.readProperty(200, readVarInt); + const values = deserializer.readProperty(201, readStringList); + typeInfo = { + ...baseInfo, + kind: 'enum', + valuesCount, + values, + }; + } + break; + case 9: // ARRAY_TYPE_INFO + { + const childType = deserializer.readProperty(200, readType); + const size = deserializer.readPropertyWithDefault(201, readVarInt, 0); + typeInfo = { + ...baseInfo, + kind: 'array', + childType, + size, + }; + } + break; + default: + throw new Error(`unsupported type info: ${typeInfoType}`); + } + deserializer.expectObjectEnd(); + if (!typeInfo) { + typeInfo = { + ...baseInfo, + kind: 'generic', + }; + } + return typeInfo; +} + +export function readNullableTypeInfo( + deserializer: BinaryDeserializer, +): TypeInfo | null { + return readNullable(deserializer, readTypeInfo); +} + +export function readType(deserializer: BinaryDeserializer): TypeIdAndInfo { + const id = deserializer.readProperty(100, readUint8); + const typeInfo = deserializer.readPropertyWithDefault( + 101, + readNullableTypeInfo, + null, + ); + deserializer.expectObjectEnd(); + return { id, ...(typeInfo ? { typeInfo } : {}) }; +} + +export function readTypeList( + deserializer: BinaryDeserializer, +): TypeIdAndInfo[] { + return readList(deserializer, readType); +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/functions/vectorReaders.ts b/ts/pkgs/duckdb-ui-client/src/serialization/functions/vectorReaders.ts new file mode 100644 index 0000000..5950328 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/functions/vectorReaders.ts @@ -0,0 +1,181 @@ +import { BinaryDeserializer } from '../classes/BinaryDeserializer.js'; +import { LogicalTypeId } from '../constants/LogicalTypeId.js'; +import { TypeIdAndInfo } from '../types/TypeInfo.js'; +import { BaseVector, ListEntry, Vector } from '../types/Vector.js'; +import { + readData, + readDataList, + readList, + readStringList, + readUint8, + readVarInt, +} from './basicReaders.js'; + +export function readListEntry(deserializer: BinaryDeserializer): ListEntry { + const offset = deserializer.readProperty(100, readVarInt); + const length = deserializer.readProperty(101, readVarInt); + deserializer.expectObjectEnd(); + return { offset, length }; +} + +export function readListEntryList( + deserializer: BinaryDeserializer, +): ListEntry[] { + return readList(deserializer, readListEntry); +} + +/** See Vector::Deserialize in https://github.com/duckdb/duckdb/blob/main/src/common/types/vector.cpp */ +export function readVector( + deserializer: BinaryDeserializer, + type: TypeIdAndInfo, +): Vector { + const allValid = deserializer.readProperty(100, readUint8); + const validity = allValid ? deserializer.readProperty(101, readData) : null; + const baseVector: BaseVector = { allValid, validity }; + let vector: Vector | undefined; + switch (type.id) { + case LogicalTypeId.BOOLEAN: + case LogicalTypeId.TINYINT: + case LogicalTypeId.SMALLINT: + case LogicalTypeId.INTEGER: + case LogicalTypeId.BIGINT: + case LogicalTypeId.DATE: + case LogicalTypeId.TIME: + case LogicalTypeId.TIMESTAMP_SEC: + case LogicalTypeId.TIMESTAMP_MS: + case LogicalTypeId.TIMESTAMP: + case LogicalTypeId.TIMESTAMP_NS: + case LogicalTypeId.DECIMAL: + case LogicalTypeId.FLOAT: + case LogicalTypeId.DOUBLE: + case LogicalTypeId.INTERVAL: + case LogicalTypeId.UTINYINT: + case LogicalTypeId.USMALLINT: + case LogicalTypeId.UINTEGER: + case LogicalTypeId.UBIGINT: + case LogicalTypeId.TIMESTAMP_TZ: + case LogicalTypeId.TIME_TZ: + case LogicalTypeId.UHUGEINT: + case LogicalTypeId.HUGEINT: + case LogicalTypeId.UUID: + case LogicalTypeId.ENUM: + { + const data = deserializer.readProperty(102, readData); + vector = { + ...baseVector, + kind: 'data', + data, + }; + } + break; + case LogicalTypeId.CHAR: + case LogicalTypeId.VARCHAR: + { + const data = deserializer.readProperty(102, readStringList); + vector = { + ...baseVector, + kind: 'string', + data, + }; + } + break; + case LogicalTypeId.BLOB: + case LogicalTypeId.BIT: + case LogicalTypeId.VARINT: + { + const data = deserializer.readProperty(102, readDataList); + vector = { + ...baseVector, + kind: 'datalist', + data, + }; + } + break; + case LogicalTypeId.STRUCT: + case LogicalTypeId.UNION: + { + const { typeInfo } = type; + if (!typeInfo) { + throw new Error(`STRUCT or UNION without typeInfo`); + } + if (typeInfo.kind !== 'struct') { + throw new Error( + `STRUCT or UNION with wrong typeInfo kind: ${typeInfo.kind}`, + ); + } + const types = typeInfo.childTypes.map((e) => e[1]); + const data = deserializer.readProperty(103, (d) => + readVectorList(d, types), + ); + vector = { + ...baseVector, + kind: 'vectorlist', + data, + }; + } + break; + case LogicalTypeId.LIST: + case LogicalTypeId.MAP: + { + const { typeInfo } = type; + if (!typeInfo) { + throw new Error(`LIST or MAP without typeInfo`); + } + if (typeInfo.kind !== 'list') { + throw new Error( + `LIST or MAP with wrong typeInfo kind: ${typeInfo.kind}`, + ); + } + const listSize = deserializer.readProperty(104, readVarInt); + const entries = deserializer.readProperty(105, readListEntryList); + const child = deserializer.readProperty(106, (d) => + readVector(d, typeInfo.childType), + ); + vector = { + ...baseVector, + kind: 'list', + listSize, + entries, + child, + }; + } + break; + case LogicalTypeId.ARRAY: + { + const { typeInfo } = type; + if (!typeInfo) { + throw new Error(`ARRAY without typeInfo`); + } + if (typeInfo.kind !== 'array') { + throw new Error(`ARRAY with wrong typeInfo kind: ${typeInfo.kind}`); + } + const arraySize = deserializer.readProperty(103, readVarInt); + const child = deserializer.readProperty(104, (d) => + readVector(d, typeInfo.childType), + ); + vector = { + ...baseVector, + kind: 'array', + arraySize, + child, + }; + } + break; + default: + throw new Error(`unrecognized type id: ${type.id}`); + } + deserializer.expectObjectEnd(); + if (!vector) { + throw new Error('unknown vector type'); + } + return vector; +} + +export function readVectorList( + deserializer: BinaryDeserializer, + types: TypeIdAndInfo[], +): Vector[] { + return readList(deserializer, (d: BinaryDeserializer, i: number) => + readVector(d, types[i]), + ); +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/types/ColumnNamesAndTypes.ts b/ts/pkgs/duckdb-ui-client/src/serialization/types/ColumnNamesAndTypes.ts new file mode 100644 index 0000000..fb7f10e --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/types/ColumnNamesAndTypes.ts @@ -0,0 +1,6 @@ +import { TypeIdAndInfo } from './TypeInfo.js'; + +export interface ColumnNamesAndTypes { + names: string[]; + types: TypeIdAndInfo[]; +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/types/DataChunk.ts b/ts/pkgs/duckdb-ui-client/src/serialization/types/DataChunk.ts new file mode 100644 index 0000000..bed9f32 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/types/DataChunk.ts @@ -0,0 +1,6 @@ +import { Vector } from './Vector.js'; + +export interface DataChunk { + rowCount: number; + vectors: Vector[]; +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/types/QueryResult.ts b/ts/pkgs/duckdb-ui-client/src/serialization/types/QueryResult.ts new file mode 100644 index 0000000..35f17bf --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/types/QueryResult.ts @@ -0,0 +1,15 @@ +import { ColumnNamesAndTypes } from './ColumnNamesAndTypes.js'; +import { DataChunk } from './DataChunk.js'; + +export interface SuccessQueryResult { + success: true; + columnNamesAndTypes: ColumnNamesAndTypes; + chunks: DataChunk[]; +} + +export interface ErrorQueryResult { + success: false; + error: string; +} + +export type QueryResult = SuccessQueryResult | ErrorQueryResult; diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/types/TokenizeResult.ts b/ts/pkgs/duckdb-ui-client/src/serialization/types/TokenizeResult.ts new file mode 100644 index 0000000..e564f19 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/types/TokenizeResult.ts @@ -0,0 +1,4 @@ +export interface TokenizeResult { + offsets: number[]; + types: number[]; +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/types/TypeInfo.ts b/ts/pkgs/duckdb-ui-client/src/serialization/types/TypeInfo.ts new file mode 100644 index 0000000..1ae9344 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/types/TypeInfo.ts @@ -0,0 +1,53 @@ +export interface BaseTypeInfo { + alias?: string; + modifiers?: unknown[]; // TODO +} + +export interface GenericTypeInfo extends BaseTypeInfo { + kind: 'generic'; +} + +export interface DecimalTypeInfo extends BaseTypeInfo { + kind: 'decimal'; + width: number; + scale: number; +} + +export interface ListTypeInfo extends BaseTypeInfo { + kind: 'list'; + childType: TypeIdAndInfo; +} + +export interface StructTypeInfo extends BaseTypeInfo { + kind: 'struct'; + childTypes: [string, TypeIdAndInfo][]; +} + +export interface EnumTypeInfo extends BaseTypeInfo { + kind: 'enum'; + valuesCount: number; + values: string[]; +} + +export interface ArrayTypeInfo extends BaseTypeInfo { + kind: 'array'; + childType: TypeIdAndInfo; + size: number; +} + +/** See https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/extra_type_info.hpp */ +export type TypeInfo = + | GenericTypeInfo + | DecimalTypeInfo + | ListTypeInfo + | StructTypeInfo + | EnumTypeInfo + | ArrayTypeInfo; + +export interface TypeIdAndInfo { + /** LogicalTypeId */ + id: number; + + /** Extra info for some types. */ + typeInfo?: TypeInfo; +} diff --git a/ts/pkgs/duckdb-ui-client/src/serialization/types/Vector.ts b/ts/pkgs/duckdb-ui-client/src/serialization/types/Vector.ts new file mode 100644 index 0000000..3cc107b --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/serialization/types/Vector.ts @@ -0,0 +1,51 @@ +export interface ListEntry { + offset: number; + length: number; +} + +export interface BaseVector { + allValid: number; + validity: DataView | null; +} + +export interface DataVector extends BaseVector { + kind: 'data'; + data: DataView; +} + +export interface StringVector extends BaseVector { + kind: 'string'; + data: string[]; +} + +export interface DataListVector extends BaseVector { + kind: 'datalist'; + data: DataView[]; +} + +export interface VectorListVector extends BaseVector { + kind: 'vectorlist'; + data: Vector[]; +} + +export interface ListVector extends BaseVector { + kind: 'list'; + listSize: number; + entries: ListEntry[]; + child: Vector; +} + +export interface ArrayVector extends BaseVector { + kind: 'array'; + arraySize: number; + child: Vector; +} + +/** See https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/types/vector.hpp */ +export type Vector = + | DataVector + | StringVector + | DataListVector + | VectorListVector + | ListVector + | ArrayVector; diff --git a/ts/pkgs/duckdb-ui-client/src/tsconfig.json b/ts/pkgs/duckdb-ui-client/src/tsconfig.json new file mode 100644 index 0000000..f970179 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.library.json", + "compilerOptions": { + "outDir": "../out" + } +} diff --git a/ts/pkgs/duckdb-ui-client/src/util/functions/randomString.ts b/ts/pkgs/duckdb-ui-client/src/util/functions/randomString.ts new file mode 100644 index 0000000..9667cdb --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/util/functions/randomString.ts @@ -0,0 +1,8 @@ +export function randomString( + length: number = 12, + chars: string = '$0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz', +): string { + return Array.from({ length }) + .map((_) => chars[Math.floor(Math.random() * chars.length)]) + .join(''); +} diff --git a/ts/pkgs/duckdb-ui-client/src/util/functions/toBase64.ts b/ts/pkgs/duckdb-ui-client/src/util/functions/toBase64.ts new file mode 100644 index 0000000..03fab1c --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/src/util/functions/toBase64.ts @@ -0,0 +1,10 @@ +const encoder = new TextEncoder(); + +export function toBase64(input: string): string { + const encoded = encoder.encode(input); + // For the reason behind this step, see https://developer.mozilla.org/en-US/docs/Web/API/Window/btoa#unicode_strings + const binaryString = Array.from(encoded, (codePoint) => + String.fromCodePoint(codePoint), + ).join(''); + return btoa(binaryString); +} diff --git a/ts/pkgs/duckdb-ui-client/test/helpers/makeBuffer.ts b/ts/pkgs/duckdb-ui-client/test/helpers/makeBuffer.ts new file mode 100644 index 0000000..24cadae --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/helpers/makeBuffer.ts @@ -0,0 +1,8 @@ +export function makeBuffer(bytes: number[]): ArrayBuffer { + const buffer = new ArrayBuffer(bytes.length); + const dv = new DataView(buffer); + for (let offset = 0; offset < bytes.length; offset++) { + dv.setUint8(offset, bytes[offset]); + } + return buffer; +} diff --git a/ts/pkgs/duckdb-ui-client/test/helpers/mockRequests.ts b/ts/pkgs/duckdb-ui-client/test/helpers/mockRequests.ts new file mode 100644 index 0000000..ee29756 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/helpers/mockRequests.ts @@ -0,0 +1,15 @@ +import { RequestHandler } from 'msw'; +import { setupServer } from 'msw/node'; + +export async function mockRequests( + handlers: RequestHandler[], + func: () => Promise, +) { + const server = setupServer(...handlers); + try { + server.listen(); + await func(); + } finally { + server.close(); + } +} diff --git a/ts/pkgs/duckdb-ui-client/test/http/classes/DuckDBUIHttpRequestQueue.test.ts b/ts/pkgs/duckdb-ui-client/test/http/classes/DuckDBUIHttpRequestQueue.test.ts new file mode 100644 index 0000000..4bc2c86 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/http/classes/DuckDBUIHttpRequestQueue.test.ts @@ -0,0 +1,138 @@ +import { http, HttpResponse } from 'msw'; +import { expect, suite, test } from 'vitest'; +import { DuckDBUIHttpRequestQueue } from '../../../src/http/classes/DuckDBUIHttpRequestQueue'; +import { makeBuffer } from '../../helpers/makeBuffer'; +import { mockRequests } from '../../helpers/mockRequests'; + +suite('DuckDBUIHttpRequestQueue', () => { + test('single request', () => { + return mockRequests( + [ + http.post('http://localhost/example/path', () => { + return HttpResponse.arrayBuffer(makeBuffer([17, 42])); + }), + ], + async () => { + const queue = new DuckDBUIHttpRequestQueue(); + const id = queue.enqueue( + 'http://localhost/example/path', + 'example body', + ); + expect(queue.length).toBe(1); + expect(queue.isCurrent(id)).toBe(true); + + const result = await queue.enqueuedResult(id); + expect(result.buffer).toEqual(makeBuffer([17, 42])); + }, + ); + }); + test('multiple requests', () => { + return mockRequests( + [ + http.post('http://localhost/example/path', async ({ request }) => { + const body = await request.text(); + const value = parseInt(body.split(' ')[0], 10); + return HttpResponse.arrayBuffer(makeBuffer([value])); + }), + ], + async () => { + const queue = new DuckDBUIHttpRequestQueue(); + const id1 = queue.enqueue( + 'http://localhost/example/path', + '11 example body', + ); + const id2 = queue.enqueue( + 'http://localhost/example/path', + '22 example body', + ); + expect(queue.length).toBe(2); + expect(queue.isCurrent(id1)).toBe(true); + + const result1 = await queue.enqueuedResult(id1); + expect(result1.buffer).toEqual(makeBuffer([11])); + + expect(queue.length).toBe(1); + expect(queue.isCurrent(id2)).toBe(true); + + const result2 = await queue.enqueuedResult(id2); + expect(result2.buffer).toEqual(makeBuffer([22])); + }, + ); + }); + test('cancel (first request)', () => { + return mockRequests( + [ + http.post('http://localhost/example/path', async ({ request }) => { + const body = await request.text(); + const value = parseInt(body.split(' ')[0], 10); + return HttpResponse.arrayBuffer(makeBuffer([value])); + }), + ], + async () => { + const queue = new DuckDBUIHttpRequestQueue(); + const id1 = queue.enqueue( + 'http://localhost/example/path', + '11 example body', + ); + const id2 = queue.enqueue( + 'http://localhost/example/path', + '22 example body', + ); + expect(queue.length).toBe(2); + expect(queue.isCurrent(id1)).toBe(true); + + queue.cancel(id1); + await expect(queue.enqueuedResult(id1)).rejects.toEqual( + new Error('query was canceled'), + ); + + const result2 = await queue.enqueuedResult(id2); + expect(result2.buffer).toEqual(makeBuffer([22])); + }, + ); + }); + test('cancel (second request)', () => { + return mockRequests( + [ + http.post('http://localhost/example/path', async ({ request }) => { + const body = await request.text(); + const value = parseInt(body.split(' ')[0], 10); + return HttpResponse.arrayBuffer(makeBuffer([value])); + }), + ], + async () => { + const queue = new DuckDBUIHttpRequestQueue(); + const id1 = queue.enqueue( + 'http://localhost/example/path', + '11 example body', + ); + const id2 = queue.enqueue( + 'http://localhost/example/path', + '22 example body', + ); + const id3 = queue.enqueue( + 'http://localhost/example/path', + '33 example body', + ); + expect(queue.length).toBe(3); + expect(queue.isCurrent(id1)).toBe(true); + + const promise2 = queue.enqueuedResult(id2); + queue.cancel(id2, 'example error message'); + + const result1 = await queue.enqueuedResult(id1); + expect(result1.buffer).toEqual(makeBuffer([11])); + + expect(queue.length).toBe(1); + expect(queue.isCurrent(id3)).toBe(true); + + await expect(promise2).rejects.toEqual( + new Error('example error message'), + ); + + const result3 = await queue.enqueuedResult(id3); + expect(result3.buffer).toEqual(makeBuffer([33])); + }, + ); + }); +}); diff --git a/ts/pkgs/duckdb-ui-client/test/http/functions/makeDuckDBUIHttpRequestHeaders.test.ts b/ts/pkgs/duckdb-ui-client/test/http/functions/makeDuckDBUIHttpRequestHeaders.test.ts new file mode 100644 index 0000000..ede4bac --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/http/functions/makeDuckDBUIHttpRequestHeaders.test.ts @@ -0,0 +1,39 @@ +import { expect, suite, test } from 'vitest'; +import { makeDuckDBUIHttpRequestHeaders } from '../../../src/http/functions/makeDuckDBUIHttpRequestHeaders'; + +suite('makeDuckDBUIHttpRequestHeaders', () => { + test('description', () => { + expect([ + ...makeDuckDBUIHttpRequestHeaders({ + description: 'example description', + }).entries(), + ]).toEqual([['x-duckdb-ui-request-description', 'example description']]); + }); + test('connection name', () => { + expect([ + ...makeDuckDBUIHttpRequestHeaders({ + connectionName: 'example connection name', + }).entries(), + ]).toEqual([['x-duckdb-ui-connection-name', 'example connection name']]); + }); + test('database name', () => { + // should be base64 encoded + expect([ + ...makeDuckDBUIHttpRequestHeaders({ + databaseName: 'example database name', + }).entries(), + ]).toEqual([['x-duckdb-ui-database-name', 'ZXhhbXBsZSBkYXRhYmFzZSBuYW1l']]); + }); + test('parameters', () => { + // values should be base64 encoded + expect([ + ...makeDuckDBUIHttpRequestHeaders({ + parameters: ['first', 'second'], + }).entries(), + ]).toEqual([ + ['x-duckdb-ui-parameter-count', '2'], + ['x-duckdb-ui-parameter-value-0', 'Zmlyc3Q='], + ['x-duckdb-ui-parameter-value-1', 'c2Vjb25k'], + ]); + }); +}); diff --git a/ts/pkgs/duckdb-ui-client/test/http/functions/sendDuckDBUIHttpRequest.test.ts b/ts/pkgs/duckdb-ui-client/test/http/functions/sendDuckDBUIHttpRequest.test.ts new file mode 100644 index 0000000..8c347a2 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/http/functions/sendDuckDBUIHttpRequest.test.ts @@ -0,0 +1,54 @@ +import { http, HttpResponse } from 'msw'; +import { expect, suite, test } from 'vitest'; +import { sendDuckDBUIHttpRequest } from '../../../src/http/functions/sendDuckDBUIHttpRequest'; +import { makeBuffer } from '../../helpers/makeBuffer'; +import { mockRequests } from '../../helpers/mockRequests'; + +suite('sendDuckDBUIHttpRequest', () => { + test('basic', async () => { + return mockRequests( + [ + http.post('http://localhost/example/path', () => { + return HttpResponse.arrayBuffer(makeBuffer([17, 42])); + }), + ], + async () => { + await expect( + sendDuckDBUIHttpRequest( + 'http://localhost/example/path', + 'example body', + ), + ).resolves.toEqual(makeBuffer([17, 42])); + }, + ); + }); + test('headers', async () => { + return mockRequests( + [ + http.post('http://localhost/example/path', ({ request }) => { + if ( + request.headers.get('X-Example-Header-1') !== + 'example-header-1-value' || + request.headers.get('X-Example-Header-2') !== + 'example-header-2-value' + ) { + return HttpResponse.error(); + } + return HttpResponse.arrayBuffer(makeBuffer([17, 42])); + }), + ], + async () => { + const headers = new Headers(); + headers.append('X-Example-Header-1', 'example-header-1-value'); + headers.append('X-Example-Header-2', 'example-header-2-value'); + await expect( + sendDuckDBUIHttpRequest( + 'http://localhost/example/path', + 'example body', + headers, + ), + ).resolves.toEqual(makeBuffer([17, 42])); + }, + ); + }); +}); diff --git a/ts/pkgs/duckdb-ui-client/test/serialization/classes/BinaryDeserializer.test.ts b/ts/pkgs/duckdb-ui-client/test/serialization/classes/BinaryDeserializer.test.ts new file mode 100644 index 0000000..1ba8e9a --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/serialization/classes/BinaryDeserializer.test.ts @@ -0,0 +1,87 @@ +import { expect, suite, test } from 'vitest'; +import { BinaryDeserializer } from '../../../src/serialization/classes/BinaryDeserializer'; +import { BinaryStreamReader } from '../../../src/serialization/classes/BinaryStreamReader'; +import { + readString, + readUint8, +} from '../../../src/serialization/functions/basicReaders'; +import { makeBuffer } from '../../helpers/makeBuffer'; + +suite('BinaryDeserializer', () => { + test('read uint8', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader(makeBuffer([17, 42])), + ); + expect(deserializer.readUint8()).toBe(17); + expect(deserializer.readUint8()).toBe(42); + }); + test('read varint', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader(makeBuffer([0x81, 0x82, 0x03])), + ); + expect(deserializer.readVarInt()).toBe((3 << 14) | (2 << 7) | 1); + }); + test('read nullable', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader(makeBuffer([0, 1, 17])), + ); + expect(deserializer.readNullable(readUint8)).toBe(null); + expect(deserializer.readNullable(readUint8)).toBe(17); + }); + test('read data', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader(makeBuffer([3, 0xa, 0xb, 0xc])), + ); + const dv = deserializer.readData(); + expect(dv.byteLength).toBe(3); + expect(dv.getUint8(0)).toBe(0xa); + expect(dv.getUint8(1)).toBe(0xb); + expect(dv.getUint8(2)).toBe(0xc); + }); + test('read string', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader(makeBuffer([4, 0x64, 0x75, 0x63, 0x6b])), + ); + expect(deserializer.readString()).toBe('duck'); + }); + test('read list (of string)', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader( + makeBuffer([ + 3, 4, 0x77, 0x61, 0x6c, 0x6b, 4, 0x73, 0x77, 0x69, 0x6d, 3, 0x66, + 0x6c, 0x79, + ]), + ), + ); + expect(deserializer.readList(readString)).toEqual(['walk', 'swim', 'fly']); + }); + test('read pair', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader( + makeBuffer([0, 0, 4, 0x64, 0x75, 0x63, 0x6b, 1, 0, 42, 0xff, 0xff]), + ), + ); + expect(deserializer.readPair(readString, readUint8)).toEqual(['duck', 42]); + }); + test('read property', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader(makeBuffer([100, 0, 4, 0x64, 0x75, 0x63, 0x6b])), + ); + expect(deserializer.readProperty(100, readString)).toEqual('duck'); + }); + test('read property (not present)', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader(makeBuffer([100, 0, 4, 0x64, 0x75, 0x63, 0x6b])), + ); + expect(() => deserializer.readProperty(101, readString)).toThrowError( + 'Expected field id 101 but got 100 (offset=0)', + ); + }); + test('read property with default', () => { + const deserializer = new BinaryDeserializer( + new BinaryStreamReader(makeBuffer([101, 0, 42])), + ); + expect(deserializer.readPropertyWithDefault(100, readUint8, 17)).toBe(17); + expect(deserializer.readPropertyWithDefault(101, readUint8, 17)).toBe(42); + }); +}); diff --git a/ts/pkgs/duckdb-ui-client/test/serialization/classes/BinaryStreamReader.test.ts b/ts/pkgs/duckdb-ui-client/test/serialization/classes/BinaryStreamReader.test.ts new file mode 100644 index 0000000..0dc65bc --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/serialization/classes/BinaryStreamReader.test.ts @@ -0,0 +1,30 @@ +import { expect, suite, test } from 'vitest'; +import { BinaryStreamReader } from '../../../src/serialization/classes/BinaryStreamReader'; +import { makeBuffer } from '../../helpers/makeBuffer'; + +suite('BinaryStreamReader', () => { + test('basic', () => { + const reader = new BinaryStreamReader( + makeBuffer([11, 22, 33, 44, 0x12, 0x34]), + ); + + expect(reader.getOffset()).toBe(0); + expect(reader.peekUint8()).toBe(11); + expect(reader.readUint8()).toBe(11); + + expect(reader.getOffset()).toBe(1); + expect(reader.peekUint8()).toBe(22); + expect(reader.readUint8()).toBe(22); + + expect(reader.getOffset()).toBe(2); + reader.consume(2); + expect(reader.getOffset()).toBe(4); + expect(reader.peekUint16(false)).toBe(0x1234); + expect(reader.peekUint16(true)).toBe(0x3412); + + const dv = reader.readData(2); + expect(dv.byteLength).toBe(2); + expect(dv.getUint8(0)).toBe(0x12); + expect(dv.getUint8(1)).toBe(0x34); + }); +}); diff --git a/ts/pkgs/duckdb-ui-client/test/tsconfig.json b/ts/pkgs/duckdb-ui-client/test/tsconfig.json new file mode 100644 index 0000000..8c93c98 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../../tsconfig.test.json", + "references": [ + { "path": "../src" } + ] +} diff --git a/ts/pkgs/duckdb-ui-client/test/util/functions/randomString.test.ts b/ts/pkgs/duckdb-ui-client/test/util/functions/randomString.test.ts new file mode 100644 index 0000000..2bfbde7 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/util/functions/randomString.test.ts @@ -0,0 +1,14 @@ +import { expect, suite, test } from 'vitest'; +import { randomString } from '../../../src/util/functions/randomString'; + +suite('randomString', () => { + test('default length', () => { + expect(randomString().length).toBe(12); + }); + test('custom length', () => { + expect(randomString(5).length).toBe(5); + }); + test('custom chars', () => { + expect(randomString(3, 'xy')).toMatch(/[xy][xy][xy]/); + }); +}); diff --git a/ts/pkgs/duckdb-ui-client/test/util/functions/toBase64.test.ts b/ts/pkgs/duckdb-ui-client/test/util/functions/toBase64.test.ts new file mode 100644 index 0000000..22f72c1 --- /dev/null +++ b/ts/pkgs/duckdb-ui-client/test/util/functions/toBase64.test.ts @@ -0,0 +1,11 @@ +import { expect, suite, test } from 'vitest'; +import { toBase64 } from '../../../src/util/functions/toBase64'; + +suite('toBase64', () => { + test('basic', () => { + expect(atob(toBase64('duck'))).toBe('duck'); + }); + test('unicode', () => { + expect(atob(toBase64('🦆'))).toBe('\xF0\x9F\xA6\x86'); + }); +}); diff --git a/ts/pnpm-lock.yaml b/ts/pnpm-lock.yaml new file mode 100644 index 0000000..faafd85 --- /dev/null +++ b/ts/pnpm-lock.yaml @@ -0,0 +1,2689 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +overrides: + tar-fs: ^3.0.8 + ws: ^8.18.1 + +importers: + + .: + devDependencies: + typescript: + specifier: ^5.8.3 + version: 5.8.3 + + pkgs/duckdb-data-reader: + dependencies: + '@duckdb/data-types': + specifier: workspace:* + version: link:../duckdb-data-types + '@duckdb/data-values': + specifier: workspace:* + version: link:../duckdb-data-values + devDependencies: + '@eslint/js': + specifier: ^9.24.0 + version: 9.28.0 + eslint: + specifier: ^9.24.0 + version: 9.28.0 + find-up-cli: + specifier: ^6.0.0 + version: 6.0.0 + prettier: + specifier: ^3.5.3 + version: 3.5.3 + rimraf: + specifier: ^6.0.1 + version: 6.0.1 + typescript: + specifier: ^5.8.3 + version: 5.8.3 + typescript-eslint: + specifier: ^8.30.1 + version: 8.34.0(eslint@9.28.0)(typescript@5.8.3) + vite: + specifier: ^6.2.6 + version: 6.3.5 + vitest: + specifier: ^3.1.1 + version: 3.2.3(msw@2.10.2(typescript@5.8.3)) + + pkgs/duckdb-data-types: + dependencies: + '@duckdb/data-values': + specifier: workspace:* + version: link:../duckdb-data-values + devDependencies: + '@eslint/js': + specifier: ^9.24.0 + version: 9.28.0 + eslint: + specifier: ^9.24.0 + version: 9.28.0 + find-up-cli: + specifier: ^6.0.0 + version: 6.0.0 + prettier: + specifier: ^3.5.3 + version: 3.5.3 + rimraf: + specifier: ^6.0.1 + version: 6.0.1 + typescript: + specifier: ^5.8.3 + version: 5.8.3 + typescript-eslint: + specifier: ^8.30.1 + version: 8.34.0(eslint@9.28.0)(typescript@5.8.3) + vite: + specifier: ^6.2.6 + version: 6.3.5 + vitest: + specifier: ^3.1.1 + version: 3.2.3(msw@2.10.2(typescript@5.8.3)) + + pkgs/duckdb-data-values: + devDependencies: + '@eslint/js': + specifier: ^9.24.0 + version: 9.28.0 + eslint: + specifier: ^9.24.0 + version: 9.28.0 + find-up-cli: + specifier: ^6.0.0 + version: 6.0.0 + prettier: + specifier: ^3.5.3 + version: 3.5.3 + rimraf: + specifier: ^6.0.1 + version: 6.0.1 + typescript: + specifier: ^5.8.3 + version: 5.8.3 + typescript-eslint: + specifier: ^8.30.1 + version: 8.34.0(eslint@9.28.0)(typescript@5.8.3) + vite: + specifier: ^6.2.6 + version: 6.3.5 + vitest: + specifier: ^3.1.1 + version: 3.2.3(msw@2.10.2(typescript@5.8.3)) + + pkgs/duckdb-ui-client: + dependencies: + '@duckdb/data-reader': + specifier: workspace:* + version: link:../duckdb-data-reader + '@duckdb/data-types': + specifier: workspace:* + version: link:../duckdb-data-types + '@duckdb/data-values': + specifier: workspace:* + version: link:../duckdb-data-values + core-js: + specifier: ^3.41.0 + version: 3.43.0 + devDependencies: + '@eslint/js': + specifier: ^9.24.0 + version: 9.28.0 + eslint: + specifier: ^9.24.0 + version: 9.28.0 + find-up-cli: + specifier: ^6.0.0 + version: 6.0.0 + msw: + specifier: ^2.10.2 + version: 2.10.2(typescript@5.8.3) + prettier: + specifier: ^3.5.3 + version: 3.5.3 + rimraf: + specifier: ^6.0.1 + version: 6.0.1 + typescript: + specifier: ^5.8.3 + version: 5.8.3 + typescript-eslint: + specifier: ^8.30.1 + version: 8.34.0(eslint@9.28.0)(typescript@5.8.3) + vite: + specifier: ^6.2.6 + version: 6.3.5 + vitest: + specifier: ^3.1.1 + version: 3.2.3(msw@2.10.2(typescript@5.8.3)) + +packages: + + '@bundled-es-modules/cookie@2.0.1': + resolution: {integrity: sha512-8o+5fRPLNbjbdGRRmJj3h6Hh1AQJf2dk3qQ/5ZFb+PXkRNiSoMGGUKlsgLfrxneb72axVJyIYji64E2+nNfYyw==} + + '@bundled-es-modules/statuses@1.0.1': + resolution: {integrity: sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==} + + '@bundled-es-modules/tough-cookie@0.1.6': + resolution: {integrity: sha512-dvMHbL464C0zI+Yqxbz6kZ5TOEp7GLW+pry/RWndAR8MJQAXZ2rPmIs8tziTZjeIyhSNZgZbCePtfSbdWqStJw==} + + '@esbuild/aix-ppc64@0.25.5': + resolution: {integrity: sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.25.5': + resolution: {integrity: sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.25.5': + resolution: {integrity: sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.25.5': + resolution: {integrity: sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.25.5': + resolution: {integrity: sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.25.5': + resolution: {integrity: sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.25.5': + resolution: {integrity: sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.25.5': + resolution: {integrity: sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.25.5': + resolution: {integrity: sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.25.5': + resolution: {integrity: sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.25.5': + resolution: {integrity: sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.25.5': + resolution: {integrity: sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.25.5': + resolution: {integrity: sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.25.5': + resolution: {integrity: sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.25.5': + resolution: {integrity: sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.25.5': + resolution: {integrity: sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.25.5': + resolution: {integrity: sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.25.5': + resolution: {integrity: sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.25.5': + resolution: {integrity: sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.25.5': + resolution: {integrity: sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.25.5': + resolution: {integrity: sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/sunos-x64@0.25.5': + resolution: {integrity: sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.25.5': + resolution: {integrity: sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.25.5': + resolution: {integrity: sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.25.5': + resolution: {integrity: sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@eslint-community/eslint-utils@4.7.0': + resolution: {integrity: sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + '@eslint-community/regexpp@4.12.1': + resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + '@eslint/config-array@0.20.1': + resolution: {integrity: sha512-OL0RJzC/CBzli0DrrR31qzj6d6i6Mm3HByuhflhl4LOBiWxN+3i6/t/ZQQNii4tjksXi8r2CRW1wMpWA2ULUEw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/config-helpers@0.2.3': + resolution: {integrity: sha512-u180qk2Um1le4yf0ruXH3PYFeEZeYC3p/4wCTKrr2U1CmGdzGi3KtY0nuPDH48UJxlKCC5RDzbcbh4X0XlqgHg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/core@0.14.0': + resolution: {integrity: sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/core@0.15.0': + resolution: {integrity: sha512-b7ePw78tEWWkpgZCDYkbqDOP8dmM6qe+AOC6iuJqlq1R/0ahMAeH3qynpnqKFGkMltrp44ohV4ubGyvLX28tzw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/eslintrc@3.3.1': + resolution: {integrity: sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/js@9.28.0': + resolution: {integrity: sha512-fnqSjGWd/CoIp4EXIxWVK/sHA6DOHN4+8Ix2cX5ycOY7LG0UY8nHCU5pIp2eaE1Mc7Qd8kHspYNzYXT2ojPLzg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/object-schema@2.1.6': + resolution: {integrity: sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/plugin-kit@0.3.2': + resolution: {integrity: sha512-4SaFZCNfJqvk/kenHpI8xvN42DMaoycy4PzKc5otHxRswww1kAt82OlBuwRVLofCACCTZEcla2Ydxv8scMXaTg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@humanfs/core@0.19.1': + resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} + engines: {node: '>=18.18.0'} + + '@humanfs/node@0.16.6': + resolution: {integrity: sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==} + engines: {node: '>=18.18.0'} + + '@humanwhocodes/module-importer@1.0.1': + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + + '@humanwhocodes/retry@0.3.1': + resolution: {integrity: sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==} + engines: {node: '>=18.18'} + + '@humanwhocodes/retry@0.4.3': + resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} + engines: {node: '>=18.18'} + + '@inquirer/confirm@5.1.12': + resolution: {integrity: sha512-dpq+ielV9/bqgXRUbNH//KsY6WEw9DrGPmipkpmgC1Y46cwuBTNx7PXFWTjc3MQ+urcc0QxoVHcMI0FW4Ok0hg==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/core@10.1.13': + resolution: {integrity: sha512-1viSxebkYN2nJULlzCxES6G9/stgHSepZ9LqqfdIGPHj5OHhiBUXVS0a6R0bEC2A+VL4D9w6QB66ebCr6HGllA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/figures@1.0.12': + resolution: {integrity: sha512-MJttijd8rMFcKJC8NYmprWr6hD3r9Gd9qUC0XwPNwoEPWSMVJwA2MlXxF+nhZZNMY+HXsWa+o7KY2emWYIn0jQ==} + engines: {node: '>=18'} + + '@inquirer/type@3.0.7': + resolution: {integrity: sha512-PfunHQcjwnju84L+ycmcMKB/pTPIngjUJvfnRhKY6FKPuYXlM4aQCb/nIdTFR6BEhMjFvngzvng/vBAJMZpLSA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@jridgewell/sourcemap-codec@1.5.0': + resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} + + '@mswjs/interceptors@0.39.2': + resolution: {integrity: sha512-RuzCup9Ct91Y7V79xwCb146RaBRHZ7NBbrIUySumd1rpKqHL5OonaqrGIbug5hNwP/fRyxFMA6ISgw4FTtYFYg==} + engines: {node: '>=18'} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@open-draft/deferred-promise@2.2.0': + resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==} + + '@open-draft/logger@0.3.0': + resolution: {integrity: sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==} + + '@open-draft/until@2.1.0': + resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==} + + '@rollup/rollup-android-arm-eabi@4.43.0': + resolution: {integrity: sha512-Krjy9awJl6rKbruhQDgivNbD1WuLb8xAclM4IR4cN5pHGAs2oIMMQJEiC3IC/9TZJ+QZkmZhlMO/6MBGxPidpw==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.43.0': + resolution: {integrity: sha512-ss4YJwRt5I63454Rpj+mXCXicakdFmKnUNxr1dLK+5rv5FJgAxnN7s31a5VchRYxCFWdmnDWKd0wbAdTr0J5EA==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.43.0': + resolution: {integrity: sha512-eKoL8ykZ7zz8MjgBenEF2OoTNFAPFz1/lyJ5UmmFSz5jW+7XbH1+MAgCVHy72aG59rbuQLcJeiMrP8qP5d/N0A==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.43.0': + resolution: {integrity: sha512-SYwXJgaBYW33Wi/q4ubN+ldWC4DzQY62S4Ll2dgfr/dbPoF50dlQwEaEHSKrQdSjC6oIe1WgzosoaNoHCdNuMg==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.43.0': + resolution: {integrity: sha512-SV+U5sSo0yujrjzBF7/YidieK2iF6E7MdF6EbYxNz94lA+R0wKl3SiixGyG/9Klab6uNBIqsN7j4Y/Fya7wAjQ==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.43.0': + resolution: {integrity: sha512-J7uCsiV13L/VOeHJBo5SjasKiGxJ0g+nQTrBkAsmQBIdil3KhPnSE9GnRon4ejX1XDdsmK/l30IYLiAaQEO0Cg==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.43.0': + resolution: {integrity: sha512-gTJ/JnnjCMc15uwB10TTATBEhK9meBIY+gXP4s0sHD1zHOaIh4Dmy1X9wup18IiY9tTNk5gJc4yx9ctj/fjrIw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.43.0': + resolution: {integrity: sha512-ZJ3gZynL1LDSIvRfz0qXtTNs56n5DI2Mq+WACWZ7yGHFUEirHBRt7fyIk0NsCKhmRhn7WAcjgSkSVVxKlPNFFw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.43.0': + resolution: {integrity: sha512-8FnkipasmOOSSlfucGYEu58U8cxEdhziKjPD2FIa0ONVMxvl/hmONtX/7y4vGjdUhjcTHlKlDhw3H9t98fPvyA==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.43.0': + resolution: {integrity: sha512-KPPyAdlcIZ6S9C3S2cndXDkV0Bb1OSMsX0Eelr2Bay4EsF9yi9u9uzc9RniK3mcUGCLhWY9oLr6er80P5DE6XA==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loongarch64-gnu@4.43.0': + resolution: {integrity: sha512-HPGDIH0/ZzAZjvtlXj6g+KDQ9ZMHfSP553za7o2Odegb/BEfwJcR0Sw0RLNpQ9nC6Gy8s+3mSS9xjZ0n3rhcYg==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-powerpc64le-gnu@4.43.0': + resolution: {integrity: sha512-gEmwbOws4U4GLAJDhhtSPWPXUzDfMRedT3hFMyRAvM9Mrnj+dJIFIeL7otsv2WF3D7GrV0GIewW0y28dOYWkmw==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.43.0': + resolution: {integrity: sha512-XXKvo2e+wFtXZF/9xoWohHg+MuRnvO29TI5Hqe9xwN5uN8NKUYy7tXUG3EZAlfchufNCTHNGjEx7uN78KsBo0g==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-riscv64-musl@4.43.0': + resolution: {integrity: sha512-ruf3hPWhjw6uDFsOAzmbNIvlXFXlBQ4nk57Sec8E8rUxs/AI4HD6xmiiasOOx/3QxS2f5eQMKTAwk7KHwpzr/Q==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.43.0': + resolution: {integrity: sha512-QmNIAqDiEMEvFV15rsSnjoSmO0+eJLoKRD9EAa9rrYNwO/XRCtOGM3A5A0X+wmG+XRrw9Fxdsw+LnyYiZWWcVw==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.43.0': + resolution: {integrity: sha512-jAHr/S0iiBtFyzjhOkAics/2SrXE092qyqEg96e90L3t9Op8OTzS6+IX0Fy5wCt2+KqeHAkti+eitV0wvblEoQ==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.43.0': + resolution: {integrity: sha512-3yATWgdeXyuHtBhrLt98w+5fKurdqvs8B53LaoKD7P7H7FKOONLsBVMNl9ghPQZQuYcceV5CDyPfyfGpMWD9mQ==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-win32-arm64-msvc@4.43.0': + resolution: {integrity: sha512-wVzXp2qDSCOpcBCT5WRWLmpJRIzv23valvcTwMHEobkjippNf+C3ys/+wf07poPkeNix0paTNemB2XrHr2TnGw==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.43.0': + resolution: {integrity: sha512-fYCTEyzf8d+7diCw8b+asvWDCLMjsCEA8alvtAutqJOJp/wL5hs1rWSqJ1vkjgW0L2NB4bsYJrpKkiIPRR9dvw==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.43.0': + resolution: {integrity: sha512-SnGhLiE5rlK0ofq8kzuDkM0g7FN1s5VYY+YSMTibP7CqShxCQvqtNxTARS4xX4PFJfHjG0ZQYX9iGzI3FQh5Aw==} + cpu: [x64] + os: [win32] + + '@types/chai@5.2.2': + resolution: {integrity: sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==} + + '@types/cookie@0.6.0': + resolution: {integrity: sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==} + + '@types/deep-eql@4.0.2': + resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==} + + '@types/estree@1.0.7': + resolution: {integrity: sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==} + + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/statuses@2.0.6': + resolution: {integrity: sha512-xMAgYwceFhRA2zY+XbEA7mxYbA093wdiW8Vu6gZPGWy9cmOyU9XesH1tNcEWsKFd5Vzrqx5T3D38PWx1FIIXkA==} + + '@types/tough-cookie@4.0.5': + resolution: {integrity: sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==} + + '@typescript-eslint/eslint-plugin@8.34.0': + resolution: {integrity: sha512-QXwAlHlbcAwNlEEMKQS2RCgJsgXrTJdjXT08xEgbPFa2yYQgVjBymxP5DrfrE7X7iodSzd9qBUHUycdyVJTW1w==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + '@typescript-eslint/parser': ^8.34.0 + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/parser@8.34.0': + resolution: {integrity: sha512-vxXJV1hVFx3IXz/oy2sICsJukaBrtDEQSBiV48/YIV5KWjX1dO+bcIr/kCPrW6weKXvsaGKFNlwH0v2eYdRRbA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/project-service@8.34.0': + resolution: {integrity: sha512-iEgDALRf970/B2YExmtPMPF54NenZUf4xpL3wsCRx/lgjz6ul/l13R81ozP/ZNuXfnLCS+oPmG7JIxfdNYKELw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/scope-manager@8.34.0': + resolution: {integrity: sha512-9Ac0X8WiLykl0aj1oYQNcLZjHgBojT6cW68yAgZ19letYu+Hxd0rE0veI1XznSSst1X5lwnxhPbVdwjDRIomRw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/tsconfig-utils@8.34.0': + resolution: {integrity: sha512-+W9VYHKFIzA5cBeooqQxqNriAP0QeQ7xTiDuIOr71hzgffm3EL2hxwWBIIj4GuofIbKxGNarpKqIq6Q6YrShOA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/type-utils@8.34.0': + resolution: {integrity: sha512-n7zSmOcUVhcRYC75W2pnPpbO1iwhJY3NLoHEtbJwJSNlVAZuwqu05zY3f3s2SDWWDSo9FdN5szqc73DCtDObAg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/types@8.34.0': + resolution: {integrity: sha512-9V24k/paICYPniajHfJ4cuAWETnt7Ssy+R0Rbcqo5sSFr3QEZ/8TSoUi9XeXVBGXCaLtwTOKSLGcInCAvyZeMA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/typescript-estree@8.34.0': + resolution: {integrity: sha512-rOi4KZxI7E0+BMqG7emPSK1bB4RICCpF7QD3KCLXn9ZvWoESsOMlHyZPAHyG04ujVplPaHbmEvs34m+wjgtVtg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/utils@8.34.0': + resolution: {integrity: sha512-8L4tWatGchV9A1cKbjaavS6mwYwp39jql8xUmIIKJdm+qiaeHy5KMKlBrf30akXAWBzn2SqKsNOtSENWUwg7XQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <5.9.0' + + '@typescript-eslint/visitor-keys@8.34.0': + resolution: {integrity: sha512-qHV7pW7E85A0x6qyrFn+O+q1k1p3tQCsqIZ1KZ5ESLXY57aTvUd3/a4rdPTeXisvhXn2VQG0VSKUqs8KHF2zcA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@vitest/expect@3.2.3': + resolution: {integrity: sha512-W2RH2TPWVHA1o7UmaFKISPvdicFJH+mjykctJFoAkUw+SPTJTGjUNdKscFBrqM7IPnCVu6zihtKYa7TkZS1dkQ==} + + '@vitest/mocker@3.2.3': + resolution: {integrity: sha512-cP6fIun+Zx8he4rbWvi+Oya6goKQDZK+Yq4hhlggwQBbrlOQ4qtZ+G4nxB6ZnzI9lyIb+JnvyiJnPC2AGbKSPA==} + peerDependencies: + msw: ^2.4.9 + vite: ^5.0.0 || ^6.0.0 || ^7.0.0-0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + + '@vitest/pretty-format@3.2.3': + resolution: {integrity: sha512-yFglXGkr9hW/yEXngO+IKMhP0jxyFw2/qys/CK4fFUZnSltD+MU7dVYGrH8rvPcK/O6feXQA+EU33gjaBBbAng==} + + '@vitest/runner@3.2.3': + resolution: {integrity: sha512-83HWYisT3IpMaU9LN+VN+/nLHVBCSIUKJzGxC5RWUOsK1h3USg7ojL+UXQR3b4o4UBIWCYdD2fxuzM7PQQ1u8w==} + + '@vitest/snapshot@3.2.3': + resolution: {integrity: sha512-9gIVWx2+tysDqUmmM1L0hwadyumqssOL1r8KJipwLx5JVYyxvVRfxvMq7DaWbZZsCqZnu/dZedaZQh4iYTtneA==} + + '@vitest/spy@3.2.3': + resolution: {integrity: sha512-JHu9Wl+7bf6FEejTCREy+DmgWe+rQKbK+y32C/k5f4TBIAlijhJbRBIRIOCEpVevgRsCQR2iHRUH2/qKVM/plw==} + + '@vitest/utils@3.2.3': + resolution: {integrity: sha512-4zFBCU5Pf+4Z6v+rwnZ1HU1yzOKKvDkMXZrymE2PBlbjKJRlrOxbvpfPSvJTGRIwGoahaOGvp+kbCoxifhzJ1Q==} + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} + engines: {node: '>=0.4.0'} + hasBin: true + + ajv@6.12.6: + resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + + ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.1.0: + resolution: {integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + assertion-error@2.0.1: + resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} + engines: {node: '>=12'} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + balanced-match@3.0.1: + resolution: {integrity: sha512-vjtV3hiLqYDNRoiAv0zC4QaGAMPomEoq83PRmYIofPswwZurCeWR5LByXm7SyoL0Zh5+2z0+HC7jG8gSZJUh0w==} + engines: {node: '>= 16'} + + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + + brace-expansion@4.0.1: + resolution: {integrity: sha512-YClrbvTCXGe70pU2JiEiPLYXO9gQkyxYeKpJIQHVS/gOs6EWMQP2RYBwjFLNT322Ji8TOC3IMPfsYCedNpzKfA==} + engines: {node: '>= 18'} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + chai@5.2.0: + resolution: {integrity: sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==} + engines: {node: '>=12'} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + check-error@2.1.1: + resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==} + engines: {node: '>= 16'} + + cli-width@4.1.0: + resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} + engines: {node: '>= 12'} + + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + cookie@0.7.2: + resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} + engines: {node: '>= 0.6'} + + core-js@3.43.0: + resolution: {integrity: sha512-N6wEbTTZSYOY2rYAn85CuvWWkCK6QweMn7/4Nr3w+gDBeBhk/x4EJeY6FPo4QzDoJZxVTv8U7CMvgWk6pOHHqA==} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + deep-eql@5.0.2: + resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} + engines: {node: '>=6'} + + deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + + esbuild@0.25.5: + resolution: {integrity: sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==} + engines: {node: '>=18'} + hasBin: true + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + eslint-scope@8.4.0: + resolution: {integrity: sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint-visitor-keys@4.2.1: + resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint@9.28.0: + resolution: {integrity: sha512-ocgh41VhRlf9+fVpe7QKzwLj9c92fDiqOj8Y3Sd4/ZmVA4Btx4PlUYPq4pp9JDyupkf1upbEXecxL2mwNV7jPQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + hasBin: true + peerDependencies: + jiti: '*' + peerDependenciesMeta: + jiti: + optional: true + + espree@10.4.0: + resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + esquery@1.6.0: + resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==} + engines: {node: '>=0.10'} + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + + expect-type@1.2.1: + resolution: {integrity: sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==} + engines: {node: '>=12.0.0'} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + + fastq@1.19.1: + resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} + + fdir@6.4.6: + resolution: {integrity: sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + file-entry-cache@8.0.0: + resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} + engines: {node: '>=16.0.0'} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + find-up-cli@6.0.0: + resolution: {integrity: sha512-3vDn6BBgpPwVcX6lkuDiZK5tEMY+eK8z/WYgbpkFZM41T0YDEdq2YsxVjxywr8CjfpXG1Pyn4/MfxQmlccgXow==} + engines: {node: '>=18'} + hasBin: true + + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + find-up@7.0.0: + resolution: {integrity: sha512-YyZM99iHrqLKjmt4LJDj58KI+fYyufRLBSYcqycxf//KpBk9FoewoGX0450m9nB44qrZnovzC2oeP5hUibxc/g==} + engines: {node: '>=18'} + + flat-cache@4.0.1: + resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} + engines: {node: '>=16'} + + flatted@3.3.3: + resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} + + foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + glob@11.0.2: + resolution: {integrity: sha512-YT7U7Vye+t5fZ/QMkBFrTJ7ZQxInIUjwyAjVj84CYXqgBdv30MFUPGnBR6sQaVq6Is15wYJUsnzTuWaGRBhBAQ==} + engines: {node: 20 || >=22} + hasBin: true + + globals@14.0.0: + resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} + engines: {node: '>=18'} + + graphemer@1.4.0: + resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} + + graphql@16.11.0: + resolution: {integrity: sha512-mS1lbMsxgQj6hge1XZ6p7GPhbrtFwUFYi3wRzXAC/FmYnyXMTvvI3td3rjmQ2u8ewXueaSvRPWaEcgVVOT9Jnw==} + engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + headers-polyfill@4.0.3: + resolution: {integrity: sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ==} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + ignore@7.0.5: + resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} + engines: {node: '>= 4'} + + import-fresh@3.3.1: + resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} + engines: {node: '>=6'} + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-node-process@1.2.0: + resolution: {integrity: sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw==} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jackspeak@4.1.1: + resolution: {integrity: sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==} + engines: {node: 20 || >=22} + + js-tokens@9.0.1: + resolution: {integrity: sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==} + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + + json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + + json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + + levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + locate-path@7.2.0: + resolution: {integrity: sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + loupe@3.1.3: + resolution: {integrity: sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==} + + lru-cache@11.1.0: + resolution: {integrity: sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==} + engines: {node: 20 || >=22} + + magic-string@0.30.17: + resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==} + + meow@13.2.0: + resolution: {integrity: sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==} + engines: {node: '>=18'} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + minimatch@10.0.2: + resolution: {integrity: sha512-+9TJCIYXgZ2Dm5LxVCFsa8jOm+evMwXHFI0JM1XROmkfkpz8/iLLDh+TwSmyIBrs6C6Xu9294/fq8cBA+P6AqA==} + engines: {node: 20 || >=22} + + minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + msw@2.10.2: + resolution: {integrity: sha512-RCKM6IZseZQCWcSWlutdf590M8nVfRHG1ImwzOtwz8IYxgT4zhUO0rfTcTvDGiaFE0Rhcc+h43lcF3Jc9gFtwQ==} + engines: {node: '>=18'} + hasBin: true + peerDependencies: + typescript: '>= 4.8.x' + peerDependenciesMeta: + typescript: + optional: true + + mute-stream@2.0.0: + resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} + engines: {node: ^18.17.0 || >=20.5.0} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + optionator@0.9.4: + resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} + engines: {node: '>= 0.8.0'} + + outvariant@1.4.3: + resolution: {integrity: sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-limit@4.0.0: + resolution: {integrity: sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + p-locate@6.0.0: + resolution: {integrity: sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-exists@5.0.0: + resolution: {integrity: sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-scurry@2.0.0: + resolution: {integrity: sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==} + engines: {node: 20 || >=22} + + path-to-regexp@6.3.0: + resolution: {integrity: sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==} + + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + + pathval@2.0.0: + resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==} + engines: {node: '>= 14.16'} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + picomatch@4.0.2: + resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} + engines: {node: '>=12'} + + postcss@8.5.5: + resolution: {integrity: sha512-d/jtm+rdNT8tpXuHY5MMtcbJFBkhXE6593XVR9UoGCH8jSFGci7jGvMGH5RYd5PBJW+00NZQt6gf7CbagJCrhg==} + engines: {node: ^10 || ^12 || >=14} + + prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + + prettier@3.5.3: + resolution: {integrity: sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==} + engines: {node: '>=14'} + hasBin: true + + psl@1.15.0: + resolution: {integrity: sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + querystringify@2.2.0: + resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + requires-port@1.0.0: + resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rimraf@6.0.1: + resolution: {integrity: sha512-9dkvaxAsk/xNXSJzMgFqqMCuFgt2+KsOFek3TMLfo8NCPfWpBmqwyNn5Y+NX56QUYfCtsyhF3ayiboEoUmJk/A==} + engines: {node: 20 || >=22} + hasBin: true + + rollup@4.43.0: + resolution: {integrity: sha512-wdN2Kd3Twh8MAEOEJZsuxuLKCsBEo4PVNLK6tQWAn10VhsVewQLzcucMgLolRlhFybGxfclbPeEYBaP6RvUFGg==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + semver@7.7.2: + resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} + engines: {node: '>=10'} + hasBin: true + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + siginfo@2.0.0: + resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + stackback@0.0.2: + resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + + statuses@2.0.2: + resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} + engines: {node: '>= 0.8'} + + std-env@3.9.0: + resolution: {integrity: sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==} + + strict-event-emitter@0.5.1: + resolution: {integrity: sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + strip-literal@3.0.0: + resolution: {integrity: sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + tinybench@2.9.0: + resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + + tinyexec@0.3.2: + resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + + tinyglobby@0.2.14: + resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==} + engines: {node: '>=12.0.0'} + + tinypool@1.1.0: + resolution: {integrity: sha512-7CotroY9a8DKsKprEy/a14aCCm8jYVmR7aFy4fpkZM8sdpNJbKkixuNjgM50yCmip2ezc8z4N7k3oe2+rfRJCQ==} + engines: {node: ^18.0.0 || >=20.0.0} + + tinyrainbow@2.0.0: + resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==} + engines: {node: '>=14.0.0'} + + tinyspy@4.0.3: + resolution: {integrity: sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==} + engines: {node: '>=14.0.0'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + tough-cookie@4.1.4: + resolution: {integrity: sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==} + engines: {node: '>=6'} + + ts-api-utils@2.1.0: + resolution: {integrity: sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==} + engines: {node: '>=18.12'} + peerDependencies: + typescript: '>=4.8.4' + + type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + + type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + + type-fest@4.41.0: + resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==} + engines: {node: '>=16'} + + typescript-eslint@8.34.0: + resolution: {integrity: sha512-MRpfN7uYjTrTGigFCt8sRyNqJFhjN0WwZecldaqhWm+wy0gaRt8Edb/3cuUy0zdq2opJWT6iXINKAtewnDOltQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <5.9.0' + + typescript@5.8.3: + resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} + engines: {node: '>=14.17'} + hasBin: true + + unicorn-magic@0.1.0: + resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} + engines: {node: '>=18'} + + universalify@0.2.0: + resolution: {integrity: sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==} + engines: {node: '>= 4.0.0'} + + uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + + url-parse@1.5.10: + resolution: {integrity: sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==} + + vite-node@3.2.3: + resolution: {integrity: sha512-gc8aAifGuDIpZHrPjuHyP4dpQmYXqWw7D1GmDnWeNWP654UEXzVfQ5IHPSK5HaHkwB/+p1atpYpSdw/2kOv8iQ==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + hasBin: true + + vite@6.3.5: + resolution: {integrity: sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + jiti: '>=1.21.0' + less: '*' + lightningcss: ^1.21.0 + sass: '*' + sass-embedded: '*' + stylus: '*' + sugarss: '*' + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + jiti: + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + + vitest@3.2.3: + resolution: {integrity: sha512-E6U2ZFXe3N/t4f5BwUaVCKRLHqUpk1CBWeMh78UT4VaTPH/2dyvH6ALl29JTovEPu9dVKr/K/J4PkXgrMbw4Ww==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/debug': ^4.1.12 + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + '@vitest/browser': 3.2.3 + '@vitest/ui': 3.2.3 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/debug': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + why-is-node-running@2.3.0: + resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} + engines: {node: '>=8'} + hasBin: true + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + + yocto-queue@1.2.1: + resolution: {integrity: sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==} + engines: {node: '>=12.20'} + + yoctocolors-cjs@2.1.2: + resolution: {integrity: sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==} + engines: {node: '>=18'} + +snapshots: + + '@bundled-es-modules/cookie@2.0.1': + dependencies: + cookie: 0.7.2 + + '@bundled-es-modules/statuses@1.0.1': + dependencies: + statuses: 2.0.2 + + '@bundled-es-modules/tough-cookie@0.1.6': + dependencies: + '@types/tough-cookie': 4.0.5 + tough-cookie: 4.1.4 + + '@esbuild/aix-ppc64@0.25.5': + optional: true + + '@esbuild/android-arm64@0.25.5': + optional: true + + '@esbuild/android-arm@0.25.5': + optional: true + + '@esbuild/android-x64@0.25.5': + optional: true + + '@esbuild/darwin-arm64@0.25.5': + optional: true + + '@esbuild/darwin-x64@0.25.5': + optional: true + + '@esbuild/freebsd-arm64@0.25.5': + optional: true + + '@esbuild/freebsd-x64@0.25.5': + optional: true + + '@esbuild/linux-arm64@0.25.5': + optional: true + + '@esbuild/linux-arm@0.25.5': + optional: true + + '@esbuild/linux-ia32@0.25.5': + optional: true + + '@esbuild/linux-loong64@0.25.5': + optional: true + + '@esbuild/linux-mips64el@0.25.5': + optional: true + + '@esbuild/linux-ppc64@0.25.5': + optional: true + + '@esbuild/linux-riscv64@0.25.5': + optional: true + + '@esbuild/linux-s390x@0.25.5': + optional: true + + '@esbuild/linux-x64@0.25.5': + optional: true + + '@esbuild/netbsd-arm64@0.25.5': + optional: true + + '@esbuild/netbsd-x64@0.25.5': + optional: true + + '@esbuild/openbsd-arm64@0.25.5': + optional: true + + '@esbuild/openbsd-x64@0.25.5': + optional: true + + '@esbuild/sunos-x64@0.25.5': + optional: true + + '@esbuild/win32-arm64@0.25.5': + optional: true + + '@esbuild/win32-ia32@0.25.5': + optional: true + + '@esbuild/win32-x64@0.25.5': + optional: true + + '@eslint-community/eslint-utils@4.7.0(eslint@9.28.0)': + dependencies: + eslint: 9.28.0 + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.12.1': {} + + '@eslint/config-array@0.20.1': + dependencies: + '@eslint/object-schema': 2.1.6 + debug: 4.4.1 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + + '@eslint/config-helpers@0.2.3': {} + + '@eslint/core@0.14.0': + dependencies: + '@types/json-schema': 7.0.15 + + '@eslint/core@0.15.0': + dependencies: + '@types/json-schema': 7.0.15 + + '@eslint/eslintrc@3.3.1': + dependencies: + ajv: 6.12.6 + debug: 4.4.1 + espree: 10.4.0 + globals: 14.0.0 + ignore: 5.3.2 + import-fresh: 3.3.1 + js-yaml: 4.1.0 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/js@9.28.0': {} + + '@eslint/object-schema@2.1.6': {} + + '@eslint/plugin-kit@0.3.2': + dependencies: + '@eslint/core': 0.15.0 + levn: 0.4.1 + + '@humanfs/core@0.19.1': {} + + '@humanfs/node@0.16.6': + dependencies: + '@humanfs/core': 0.19.1 + '@humanwhocodes/retry': 0.3.1 + + '@humanwhocodes/module-importer@1.0.1': {} + + '@humanwhocodes/retry@0.3.1': {} + + '@humanwhocodes/retry@0.4.3': {} + + '@inquirer/confirm@5.1.12': + dependencies: + '@inquirer/core': 10.1.13 + '@inquirer/type': 3.0.7 + + '@inquirer/core@10.1.13': + dependencies: + '@inquirer/figures': 1.0.12 + '@inquirer/type': 3.0.7 + ansi-escapes: 4.3.2 + cli-width: 4.1.0 + mute-stream: 2.0.0 + signal-exit: 4.1.0 + wrap-ansi: 6.2.0 + yoctocolors-cjs: 2.1.2 + + '@inquirer/figures@1.0.12': {} + + '@inquirer/type@3.0.7': {} + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@jridgewell/sourcemap-codec@1.5.0': {} + + '@mswjs/interceptors@0.39.2': + dependencies: + '@open-draft/deferred-promise': 2.2.0 + '@open-draft/logger': 0.3.0 + '@open-draft/until': 2.1.0 + is-node-process: 1.2.0 + outvariant: 1.4.3 + strict-event-emitter: 0.5.1 + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.19.1 + + '@open-draft/deferred-promise@2.2.0': {} + + '@open-draft/logger@0.3.0': + dependencies: + is-node-process: 1.2.0 + outvariant: 1.4.3 + + '@open-draft/until@2.1.0': {} + + '@rollup/rollup-android-arm-eabi@4.43.0': + optional: true + + '@rollup/rollup-android-arm64@4.43.0': + optional: true + + '@rollup/rollup-darwin-arm64@4.43.0': + optional: true + + '@rollup/rollup-darwin-x64@4.43.0': + optional: true + + '@rollup/rollup-freebsd-arm64@4.43.0': + optional: true + + '@rollup/rollup-freebsd-x64@4.43.0': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.43.0': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.43.0': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.43.0': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.43.0': + optional: true + + '@rollup/rollup-linux-loongarch64-gnu@4.43.0': + optional: true + + '@rollup/rollup-linux-powerpc64le-gnu@4.43.0': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.43.0': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.43.0': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.43.0': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.43.0': + optional: true + + '@rollup/rollup-linux-x64-musl@4.43.0': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.43.0': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.43.0': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.43.0': + optional: true + + '@types/chai@5.2.2': + dependencies: + '@types/deep-eql': 4.0.2 + + '@types/cookie@0.6.0': {} + + '@types/deep-eql@4.0.2': {} + + '@types/estree@1.0.7': {} + + '@types/estree@1.0.8': {} + + '@types/json-schema@7.0.15': {} + + '@types/statuses@2.0.6': {} + + '@types/tough-cookie@4.0.5': {} + + '@typescript-eslint/eslint-plugin@8.34.0(@typescript-eslint/parser@8.34.0(eslint@9.28.0)(typescript@5.8.3))(eslint@9.28.0)(typescript@5.8.3)': + dependencies: + '@eslint-community/regexpp': 4.12.1 + '@typescript-eslint/parser': 8.34.0(eslint@9.28.0)(typescript@5.8.3) + '@typescript-eslint/scope-manager': 8.34.0 + '@typescript-eslint/type-utils': 8.34.0(eslint@9.28.0)(typescript@5.8.3) + '@typescript-eslint/utils': 8.34.0(eslint@9.28.0)(typescript@5.8.3) + '@typescript-eslint/visitor-keys': 8.34.0 + eslint: 9.28.0 + graphemer: 1.4.0 + ignore: 7.0.5 + natural-compare: 1.4.0 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/parser@8.34.0(eslint@9.28.0)(typescript@5.8.3)': + dependencies: + '@typescript-eslint/scope-manager': 8.34.0 + '@typescript-eslint/types': 8.34.0 + '@typescript-eslint/typescript-estree': 8.34.0(typescript@5.8.3) + '@typescript-eslint/visitor-keys': 8.34.0 + debug: 4.4.1 + eslint: 9.28.0 + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/project-service@8.34.0(typescript@5.8.3)': + dependencies: + '@typescript-eslint/tsconfig-utils': 8.34.0(typescript@5.8.3) + '@typescript-eslint/types': 8.34.0 + debug: 4.4.1 + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/scope-manager@8.34.0': + dependencies: + '@typescript-eslint/types': 8.34.0 + '@typescript-eslint/visitor-keys': 8.34.0 + + '@typescript-eslint/tsconfig-utils@8.34.0(typescript@5.8.3)': + dependencies: + typescript: 5.8.3 + + '@typescript-eslint/type-utils@8.34.0(eslint@9.28.0)(typescript@5.8.3)': + dependencies: + '@typescript-eslint/typescript-estree': 8.34.0(typescript@5.8.3) + '@typescript-eslint/utils': 8.34.0(eslint@9.28.0)(typescript@5.8.3) + debug: 4.4.1 + eslint: 9.28.0 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/types@8.34.0': {} + + '@typescript-eslint/typescript-estree@8.34.0(typescript@5.8.3)': + dependencies: + '@typescript-eslint/project-service': 8.34.0(typescript@5.8.3) + '@typescript-eslint/tsconfig-utils': 8.34.0(typescript@5.8.3) + '@typescript-eslint/types': 8.34.0 + '@typescript-eslint/visitor-keys': 8.34.0 + debug: 4.4.1 + fast-glob: 3.3.3 + is-glob: 4.0.3 + minimatch: 9.0.5 + semver: 7.7.2 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@8.34.0(eslint@9.28.0)(typescript@5.8.3)': + dependencies: + '@eslint-community/eslint-utils': 4.7.0(eslint@9.28.0) + '@typescript-eslint/scope-manager': 8.34.0 + '@typescript-eslint/types': 8.34.0 + '@typescript-eslint/typescript-estree': 8.34.0(typescript@5.8.3) + eslint: 9.28.0 + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/visitor-keys@8.34.0': + dependencies: + '@typescript-eslint/types': 8.34.0 + eslint-visitor-keys: 4.2.1 + + '@vitest/expect@3.2.3': + dependencies: + '@types/chai': 5.2.2 + '@vitest/spy': 3.2.3 + '@vitest/utils': 3.2.3 + chai: 5.2.0 + tinyrainbow: 2.0.0 + + '@vitest/mocker@3.2.3(msw@2.10.2(typescript@5.8.3))(vite@6.3.5)': + dependencies: + '@vitest/spy': 3.2.3 + estree-walker: 3.0.3 + magic-string: 0.30.17 + optionalDependencies: + msw: 2.10.2(typescript@5.8.3) + vite: 6.3.5 + + '@vitest/pretty-format@3.2.3': + dependencies: + tinyrainbow: 2.0.0 + + '@vitest/runner@3.2.3': + dependencies: + '@vitest/utils': 3.2.3 + pathe: 2.0.3 + strip-literal: 3.0.0 + + '@vitest/snapshot@3.2.3': + dependencies: + '@vitest/pretty-format': 3.2.3 + magic-string: 0.30.17 + pathe: 2.0.3 + + '@vitest/spy@3.2.3': + dependencies: + tinyspy: 4.0.3 + + '@vitest/utils@3.2.3': + dependencies: + '@vitest/pretty-format': 3.2.3 + loupe: 3.1.3 + tinyrainbow: 2.0.0 + + acorn-jsx@5.3.2(acorn@8.15.0): + dependencies: + acorn: 8.15.0 + + acorn@8.15.0: {} + + ajv@6.12.6: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + + ansi-regex@5.0.1: {} + + ansi-regex@6.1.0: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@6.2.1: {} + + argparse@2.0.1: {} + + assertion-error@2.0.1: {} + + balanced-match@1.0.2: {} + + balanced-match@3.0.1: {} + + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + + brace-expansion@4.0.1: + dependencies: + balanced-match: 3.0.1 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + cac@6.7.14: {} + + callsites@3.1.0: {} + + chai@5.2.0: + dependencies: + assertion-error: 2.0.1 + check-error: 2.1.1 + deep-eql: 5.0.2 + loupe: 3.1.3 + pathval: 2.0.0 + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + check-error@2.1.1: {} + + cli-width@4.1.0: {} + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + concat-map@0.0.1: {} + + cookie@0.7.2: {} + + core-js@3.43.0: {} + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + debug@4.4.1: + dependencies: + ms: 2.1.3 + + deep-eql@5.0.2: {} + + deep-is@0.1.4: {} + + eastasianwidth@0.2.0: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + es-module-lexer@1.7.0: {} + + esbuild@0.25.5: + optionalDependencies: + '@esbuild/aix-ppc64': 0.25.5 + '@esbuild/android-arm': 0.25.5 + '@esbuild/android-arm64': 0.25.5 + '@esbuild/android-x64': 0.25.5 + '@esbuild/darwin-arm64': 0.25.5 + '@esbuild/darwin-x64': 0.25.5 + '@esbuild/freebsd-arm64': 0.25.5 + '@esbuild/freebsd-x64': 0.25.5 + '@esbuild/linux-arm': 0.25.5 + '@esbuild/linux-arm64': 0.25.5 + '@esbuild/linux-ia32': 0.25.5 + '@esbuild/linux-loong64': 0.25.5 + '@esbuild/linux-mips64el': 0.25.5 + '@esbuild/linux-ppc64': 0.25.5 + '@esbuild/linux-riscv64': 0.25.5 + '@esbuild/linux-s390x': 0.25.5 + '@esbuild/linux-x64': 0.25.5 + '@esbuild/netbsd-arm64': 0.25.5 + '@esbuild/netbsd-x64': 0.25.5 + '@esbuild/openbsd-arm64': 0.25.5 + '@esbuild/openbsd-x64': 0.25.5 + '@esbuild/sunos-x64': 0.25.5 + '@esbuild/win32-arm64': 0.25.5 + '@esbuild/win32-ia32': 0.25.5 + '@esbuild/win32-x64': 0.25.5 + + escalade@3.2.0: {} + + escape-string-regexp@4.0.0: {} + + eslint-scope@8.4.0: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint-visitor-keys@4.2.1: {} + + eslint@9.28.0: + dependencies: + '@eslint-community/eslint-utils': 4.7.0(eslint@9.28.0) + '@eslint-community/regexpp': 4.12.1 + '@eslint/config-array': 0.20.1 + '@eslint/config-helpers': 0.2.3 + '@eslint/core': 0.14.0 + '@eslint/eslintrc': 3.3.1 + '@eslint/js': 9.28.0 + '@eslint/plugin-kit': 0.3.2 + '@humanfs/node': 0.16.6 + '@humanwhocodes/module-importer': 1.0.1 + '@humanwhocodes/retry': 0.4.3 + '@types/estree': 1.0.8 + '@types/json-schema': 7.0.15 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.6 + debug: 4.4.1 + escape-string-regexp: 4.0.0 + eslint-scope: 8.4.0 + eslint-visitor-keys: 4.2.1 + espree: 10.4.0 + esquery: 1.6.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 8.0.0 + find-up: 5.0.0 + glob-parent: 6.0.2 + ignore: 5.3.2 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + json-stable-stringify-without-jsonify: 1.0.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.4 + transitivePeerDependencies: + - supports-color + + espree@10.4.0: + dependencies: + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) + eslint-visitor-keys: 4.2.1 + + esquery@1.6.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@5.3.0: {} + + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.8 + + esutils@2.0.3: {} + + expect-type@1.2.1: {} + + fast-deep-equal@3.1.3: {} + + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fastq@1.19.1: + dependencies: + reusify: 1.1.0 + + fdir@6.4.6(picomatch@4.0.2): + optionalDependencies: + picomatch: 4.0.2 + + file-entry-cache@8.0.0: + dependencies: + flat-cache: 4.0.1 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + find-up-cli@6.0.0: + dependencies: + find-up: 7.0.0 + meow: 13.2.0 + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + find-up@7.0.0: + dependencies: + locate-path: 7.2.0 + path-exists: 5.0.0 + unicorn-magic: 0.1.0 + + flat-cache@4.0.1: + dependencies: + flatted: 3.3.3 + keyv: 4.5.4 + + flatted@3.3.3: {} + + foreground-child@3.3.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + + fsevents@2.3.3: + optional: true + + get-caller-file@2.0.5: {} + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + glob@11.0.2: + dependencies: + foreground-child: 3.3.1 + jackspeak: 4.1.1 + minimatch: 10.0.2 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 2.0.0 + + globals@14.0.0: {} + + graphemer@1.4.0: {} + + graphql@16.11.0: {} + + has-flag@4.0.0: {} + + headers-polyfill@4.0.3: {} + + ignore@5.3.2: {} + + ignore@7.0.5: {} + + import-fresh@3.3.1: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + imurmurhash@0.1.4: {} + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-node-process@1.2.0: {} + + is-number@7.0.0: {} + + isexe@2.0.0: {} + + jackspeak@4.1.1: + dependencies: + '@isaacs/cliui': 8.0.2 + + js-tokens@9.0.1: {} + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + json-buffer@3.0.1: {} + + json-schema-traverse@0.4.1: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + locate-path@7.2.0: + dependencies: + p-locate: 6.0.0 + + lodash.merge@4.6.2: {} + + loupe@3.1.3: {} + + lru-cache@11.1.0: {} + + magic-string@0.30.17: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.0 + + meow@13.2.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + minimatch@10.0.2: + dependencies: + brace-expansion: 4.0.1 + + minimatch@3.1.2: + dependencies: + brace-expansion: 1.1.12 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.2 + + minipass@7.1.2: {} + + ms@2.1.3: {} + + msw@2.10.2(typescript@5.8.3): + dependencies: + '@bundled-es-modules/cookie': 2.0.1 + '@bundled-es-modules/statuses': 1.0.1 + '@bundled-es-modules/tough-cookie': 0.1.6 + '@inquirer/confirm': 5.1.12 + '@mswjs/interceptors': 0.39.2 + '@open-draft/deferred-promise': 2.2.0 + '@open-draft/until': 2.1.0 + '@types/cookie': 0.6.0 + '@types/statuses': 2.0.6 + graphql: 16.11.0 + headers-polyfill: 4.0.3 + is-node-process: 1.2.0 + outvariant: 1.4.3 + path-to-regexp: 6.3.0 + picocolors: 1.1.1 + strict-event-emitter: 0.5.1 + type-fest: 4.41.0 + yargs: 17.7.2 + optionalDependencies: + typescript: 5.8.3 + transitivePeerDependencies: + - '@types/node' + + mute-stream@2.0.0: {} + + nanoid@3.3.11: {} + + natural-compare@1.4.0: {} + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + outvariant@1.4.3: {} + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-limit@4.0.0: + dependencies: + yocto-queue: 1.2.1 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + p-locate@6.0.0: + dependencies: + p-limit: 4.0.0 + + package-json-from-dist@1.0.1: {} + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + path-exists@4.0.0: {} + + path-exists@5.0.0: {} + + path-key@3.1.1: {} + + path-scurry@2.0.0: + dependencies: + lru-cache: 11.1.0 + minipass: 7.1.2 + + path-to-regexp@6.3.0: {} + + pathe@2.0.3: {} + + pathval@2.0.0: {} + + picocolors@1.1.1: {} + + picomatch@2.3.1: {} + + picomatch@4.0.2: {} + + postcss@8.5.5: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + prelude-ls@1.2.1: {} + + prettier@3.5.3: {} + + psl@1.15.0: + dependencies: + punycode: 2.3.1 + + punycode@2.3.1: {} + + querystringify@2.2.0: {} + + queue-microtask@1.2.3: {} + + require-directory@2.1.1: {} + + requires-port@1.0.0: {} + + resolve-from@4.0.0: {} + + reusify@1.1.0: {} + + rimraf@6.0.1: + dependencies: + glob: 11.0.2 + package-json-from-dist: 1.0.1 + + rollup@4.43.0: + dependencies: + '@types/estree': 1.0.7 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.43.0 + '@rollup/rollup-android-arm64': 4.43.0 + '@rollup/rollup-darwin-arm64': 4.43.0 + '@rollup/rollup-darwin-x64': 4.43.0 + '@rollup/rollup-freebsd-arm64': 4.43.0 + '@rollup/rollup-freebsd-x64': 4.43.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.43.0 + '@rollup/rollup-linux-arm-musleabihf': 4.43.0 + '@rollup/rollup-linux-arm64-gnu': 4.43.0 + '@rollup/rollup-linux-arm64-musl': 4.43.0 + '@rollup/rollup-linux-loongarch64-gnu': 4.43.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.43.0 + '@rollup/rollup-linux-riscv64-gnu': 4.43.0 + '@rollup/rollup-linux-riscv64-musl': 4.43.0 + '@rollup/rollup-linux-s390x-gnu': 4.43.0 + '@rollup/rollup-linux-x64-gnu': 4.43.0 + '@rollup/rollup-linux-x64-musl': 4.43.0 + '@rollup/rollup-win32-arm64-msvc': 4.43.0 + '@rollup/rollup-win32-ia32-msvc': 4.43.0 + '@rollup/rollup-win32-x64-msvc': 4.43.0 + fsevents: 2.3.3 + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + semver@7.7.2: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + siginfo@2.0.0: {} + + signal-exit@4.1.0: {} + + source-map-js@1.2.1: {} + + stackback@0.0.2: {} + + statuses@2.0.2: {} + + std-env@3.9.0: {} + + strict-event-emitter@0.5.1: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.1.0 + + strip-json-comments@3.1.1: {} + + strip-literal@3.0.0: + dependencies: + js-tokens: 9.0.1 + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + tinybench@2.9.0: {} + + tinyexec@0.3.2: {} + + tinyglobby@0.2.14: + dependencies: + fdir: 6.4.6(picomatch@4.0.2) + picomatch: 4.0.2 + + tinypool@1.1.0: {} + + tinyrainbow@2.0.0: {} + + tinyspy@4.0.3: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + tough-cookie@4.1.4: + dependencies: + psl: 1.15.0 + punycode: 2.3.1 + universalify: 0.2.0 + url-parse: 1.5.10 + + ts-api-utils@2.1.0(typescript@5.8.3): + dependencies: + typescript: 5.8.3 + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + type-fest@0.21.3: {} + + type-fest@4.41.0: {} + + typescript-eslint@8.34.0(eslint@9.28.0)(typescript@5.8.3): + dependencies: + '@typescript-eslint/eslint-plugin': 8.34.0(@typescript-eslint/parser@8.34.0(eslint@9.28.0)(typescript@5.8.3))(eslint@9.28.0)(typescript@5.8.3) + '@typescript-eslint/parser': 8.34.0(eslint@9.28.0)(typescript@5.8.3) + '@typescript-eslint/utils': 8.34.0(eslint@9.28.0)(typescript@5.8.3) + eslint: 9.28.0 + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + + typescript@5.8.3: {} + + unicorn-magic@0.1.0: {} + + universalify@0.2.0: {} + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + url-parse@1.5.10: + dependencies: + querystringify: 2.2.0 + requires-port: 1.0.0 + + vite-node@3.2.3: + dependencies: + cac: 6.7.14 + debug: 4.4.1 + es-module-lexer: 1.7.0 + pathe: 2.0.3 + vite: 6.3.5 + transitivePeerDependencies: + - '@types/node' + - jiti + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + - tsx + - yaml + + vite@6.3.5: + dependencies: + esbuild: 0.25.5 + fdir: 6.4.6(picomatch@4.0.2) + picomatch: 4.0.2 + postcss: 8.5.5 + rollup: 4.43.0 + tinyglobby: 0.2.14 + optionalDependencies: + fsevents: 2.3.3 + + vitest@3.2.3(msw@2.10.2(typescript@5.8.3)): + dependencies: + '@types/chai': 5.2.2 + '@vitest/expect': 3.2.3 + '@vitest/mocker': 3.2.3(msw@2.10.2(typescript@5.8.3))(vite@6.3.5) + '@vitest/pretty-format': 3.2.3 + '@vitest/runner': 3.2.3 + '@vitest/snapshot': 3.2.3 + '@vitest/spy': 3.2.3 + '@vitest/utils': 3.2.3 + chai: 5.2.0 + debug: 4.4.1 + expect-type: 1.2.1 + magic-string: 0.30.17 + pathe: 2.0.3 + picomatch: 4.0.2 + std-env: 3.9.0 + tinybench: 2.9.0 + tinyexec: 0.3.2 + tinyglobby: 0.2.14 + tinypool: 1.1.0 + tinyrainbow: 2.0.0 + vite: 6.3.5 + vite-node: 3.2.3 + why-is-node-running: 2.3.0 + transitivePeerDependencies: + - jiti + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + - tsx + - yaml + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + why-is-node-running@2.3.0: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + + word-wrap@1.2.5: {} + + wrap-ansi@6.2.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + y18n@5.0.8: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + + yocto-queue@0.1.0: {} + + yocto-queue@1.2.1: {} + + yoctocolors-cjs@2.1.2: {} diff --git a/ts/pnpm-workspace.yaml b/ts/pnpm-workspace.yaml new file mode 100644 index 0000000..d525acd --- /dev/null +++ b/ts/pnpm-workspace.yaml @@ -0,0 +1,2 @@ +packages: + - 'pkgs/*' diff --git a/ts/tsconfig.base.json b/ts/tsconfig.base.json new file mode 100644 index 0000000..e84a69d --- /dev/null +++ b/ts/tsconfig.base.json @@ -0,0 +1,9 @@ +{ + "compilerOptions": { + "forceConsistentCasingInFileNames": true, + "noFallthroughCasesInSwitch": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "strict": true + } +} diff --git a/ts/tsconfig.json b/ts/tsconfig.json new file mode 100644 index 0000000..c926efe --- /dev/null +++ b/ts/tsconfig.json @@ -0,0 +1,13 @@ +{ + "files": [], + "references": [ + { "path": "pkgs/duckdb-data-reader/src" }, + { "path": "pkgs/duckdb-data-reader/test" }, + { "path": "pkgs/duckdb-data-types/src" }, + { "path": "pkgs/duckdb-data-types/test" }, + { "path": "pkgs/duckdb-data-values/src" }, + { "path": "pkgs/duckdb-data-values/test" }, + { "path": "pkgs/duckdb-ui-client/src" }, + { "path": "pkgs/duckdb-ui-client/test" }, + ] +} diff --git a/ts/tsconfig.library.json b/ts/tsconfig.library.json new file mode 100644 index 0000000..96ee0cc --- /dev/null +++ b/ts/tsconfig.library.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.base.json", + "compilerOptions": { + "composite": true, + "declaration": true, + "esModuleInterop": true, + "module": "NodeNext", + "moduleResolution": "NodeNext", + "target": "ESNext", + } +} diff --git a/ts/tsconfig.test.json b/ts/tsconfig.test.json new file mode 100644 index 0000000..94ee1d1 --- /dev/null +++ b/ts/tsconfig.test.json @@ -0,0 +1,15 @@ +{ + "extends": "./tsconfig.base.json", + "compilerOptions": { + "allowImportingTsExtensions": true, + "esModuleInterop": true, + "isolatedModules": true, + "module": "ESNext", + "moduleResolution": "Bundler", + "noEmit": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "target": "ESNext", + "useDefineForClassFields": true, + } +}