add duckdb-ui-client & other ts pkgs (#10)
* add duckdb-ui-client & other ts pkgs * workflow fixes * fix working dir * no sparse checkout; specify package.json path * path to pnpm-lock.yaml * add check & build test * workflow step descriptions * use comments & names * one more naming tweak
This commit is contained in:
37
.github/workflows/TypeScriptWorkspace.yml
vendored
37
.github/workflows/TypeScriptWorkspace.yml
vendored
@@ -13,9 +13,38 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
hello:
|
||||
name: "Hello"
|
||||
build_and_test:
|
||||
name: Build & Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Say Hello
|
||||
run: echo Hello from TypeScript Workspace job!
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
package_json_file: ts/package.json
|
||||
|
||||
- name: Setup Node with pnpm cache
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: ts/pnpm-lock.yaml
|
||||
|
||||
# Src files are built using preinstall
|
||||
- name: Install dependencies & build src
|
||||
working-directory: ts
|
||||
run: pnpm install
|
||||
|
||||
# This step is needed to type-check test files. (Src files are built during install.)
|
||||
- name: Build src & test (to type-check test)
|
||||
working-directory: ts
|
||||
run: pnpm build
|
||||
|
||||
- name: Check formatting & linting rules
|
||||
working-directory: ts
|
||||
run: pnpm check
|
||||
|
||||
- name: Test
|
||||
working-directory: ts
|
||||
run: pnpm test
|
||||
|
||||
3
ts/.gitignore
vendored
Normal file
3
ts/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
**/out/*
|
||||
**/test/tsconfig.tsbuildinfo
|
||||
3
ts/.prettierignore
Normal file
3
ts/.prettierignore
Normal file
@@ -0,0 +1,3 @@
|
||||
out
|
||||
**/*.json
|
||||
README.md
|
||||
3
ts/.prettierrc
Normal file
3
ts/.prettierrc
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"singleQuote": true
|
||||
}
|
||||
54
ts/README.md
54
ts/README.md
@@ -1 +1,55 @@
|
||||
# TypeScript Workspace
|
||||
|
||||
## Structure
|
||||
|
||||
This directory is a [pnpm workspace](https://pnpm.io/workspaces). Use the [pnpm](https://pnpm.io/) package manager, not npm or yarn.
|
||||
|
||||
One (recommended) way to install pnpm is using [corepack](https://pnpm.io/installation#using-corepack).
|
||||
|
||||
## Build
|
||||
|
||||
Run `pnpm install` (or just `pnpm i`) in a package directory to install dependencies and build. Note that this will also build dependent packages in this workspace. This builds src files, but not test files.
|
||||
|
||||
Run `pnpm build` to just run the build. This will not build dependencies. It will build both src and test files. To build just src or just test, use `pnpm build:src` or `pnpm build:test`.
|
||||
|
||||
Run `pnpm build:watch` in a package to rebuild (both src and test files) when source files are changed.
|
||||
|
||||
Run `pnpm check` in a package to check formatting and linting rules. To just check formatting, run `pnpm format:check`. To correct formatting, run `pnpm format:write`. To just check linting rules, run `pnpm lint`.
|
||||
|
||||
Run `pnpm clean` in that package to remove built output files for that package.
|
||||
|
||||
Run `pnpm build` at the root of the workspace to build all packages (both src and test files).
|
||||
|
||||
Run `pnpm build:watch` at the root can be used to rebuild (only) relevant packages when source files are changed.
|
||||
|
||||
Run `pnpm check` at the root of the workspace to check formatting and linting rules all packages.
|
||||
|
||||
## Test
|
||||
|
||||
Run `pnpm test` in a package directory to run its tests.
|
||||
|
||||
Run `pnpm test:watch` in a package directory to run its tests and rerun when source files change.
|
||||
|
||||
Tests use [vitest](https://vitest.dev/), either in Node or in [Browser Mode](https://vitest.dev/guide/browser.html) (using Chrome), depending on the package.
|
||||
|
||||
Run `pnpm test` at the root of the workspace to test all packages.
|
||||
|
||||
## Create
|
||||
|
||||
To create a new package, add a directory under `packages`.
|
||||
|
||||
Add a `package.json` file following the conventions of other packages.
|
||||
|
||||
The `package.json` should have `preinstall`, `build`, `clean`, and `test` scripts, as well as 'check', 'format', and 'lint' scripts. See existing packages for details.
|
||||
It should have a `name`, `version`, and `description`, set `"type": "module"`, and set `main`, `module`, and `types` appropriately.
|
||||
|
||||
Production source code should go in a `src` subdirectory.
|
||||
Put a `tsconfig.json` in this directory that extends `tsconfig.library.json` and sets the `outDir` to `../out`.
|
||||
|
||||
Test source code should got in a `test` subdirectory.
|
||||
Put a `tsconfig.json` in this directory that extends `tsconfig.test.json` and references `../src`.
|
||||
|
||||
For browser-based tests, create a `vite.config.js` file, and enable `browser` mode, set the `headless` option to `true`, and set the `type` to `chrome`.
|
||||
Note that `crossOriginIsolated` can be enabled by setting server headers. See example in `wasm-extension`.
|
||||
|
||||
Add references to both the `src` and `test` directories of your new package to the root `tsconfig.json` of the workspace.
|
||||
|
||||
21
ts/eslint.config.mjs
Normal file
21
ts/eslint.config.mjs
Normal file
@@ -0,0 +1,21 @@
|
||||
// @ts-check
|
||||
|
||||
import eslint from '@eslint/js';
|
||||
import tseslint from 'typescript-eslint';
|
||||
|
||||
export default tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
...tseslint.configs.recommended,
|
||||
{
|
||||
rules: {
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
'error',
|
||||
{
|
||||
argsIgnorePattern: '^_',
|
||||
varsIgnorePattern: '^_',
|
||||
caughtErrorsIgnorePattern: '^_',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
);
|
||||
19
ts/package.json
Normal file
19
ts/package.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "tsc -b",
|
||||
"build:watch": "tsc -b --watch",
|
||||
"check": "pnpm -r check",
|
||||
"test": "pnpm -r test"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.8.3"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"tar-fs": "^3.0.8",
|
||||
"ws": "^8.18.1"
|
||||
}
|
||||
},
|
||||
"packageManager": "pnpm@9.15.2+sha512.93e57b0126f0df74ce6bff29680394c0ba54ec47246b9cf321f0121d8d9bb03f750a705f24edc3c1180853afd7c2c3b94196d0a3d53d3e069d9e2793ef11f321"
|
||||
}
|
||||
38
ts/pkgs/duckdb-data-reader/package.json
Normal file
38
ts/pkgs/duckdb-data-reader/package.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"name": "@duckdb/data-reader",
|
||||
"version": "0.0.1",
|
||||
"description": "Utilities for representing and reading tabular data returned by DuckDB",
|
||||
"type": "module",
|
||||
"main": "./out/index.js",
|
||||
"module": "./out/index.js",
|
||||
"types": "./out/index.d.ts",
|
||||
"scripts": {
|
||||
"preinstall": "pnpm build:src",
|
||||
"build": "tsc -b src test",
|
||||
"build:src": "tsc -b src",
|
||||
"build:test": "tsc -b test",
|
||||
"build:watch": "tsc -b src test --watch",
|
||||
"check": "pnpm format:check && pnpm lint",
|
||||
"clean": "rimraf out",
|
||||
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||
"lint": "pnpm eslint src test",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@duckdb/data-types": "workspace:*",
|
||||
"@duckdb/data-values": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.24.0",
|
||||
"eslint": "^9.24.0",
|
||||
"find-up-cli": "^6.0.0",
|
||||
"prettier": "^3.5.3",
|
||||
"rimraf": "^6.0.1",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.30.1",
|
||||
"vite": "^6.2.6",
|
||||
"vitest": "^3.1.1"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
import { DuckDBData } from './DuckDBData.js';
|
||||
|
||||
export type DuckDBDataBatchIteratorResult = IteratorResult<
|
||||
DuckDBData,
|
||||
DuckDBData | undefined
|
||||
>;
|
||||
|
||||
export type AsyncDuckDBDataBatchIterator = AsyncIterator<
|
||||
DuckDBData,
|
||||
DuckDBData | undefined
|
||||
>;
|
||||
55
ts/pkgs/duckdb-data-reader/src/ColumnFilteredDuckDBData.ts
Normal file
55
ts/pkgs/duckdb-data-reader/src/ColumnFilteredDuckDBData.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { DuckDBType } from '@duckdb/data-types';
|
||||
import { DuckDBValue } from '@duckdb/data-values';
|
||||
import { DuckDBData } from './DuckDBData.js';
|
||||
|
||||
export class ColumnFilteredDuckDBData extends DuckDBData {
|
||||
private readonly inputColumnIndexForOutputColumnIndex: readonly number[];
|
||||
|
||||
constructor(
|
||||
private data: DuckDBData,
|
||||
columnVisibility: readonly boolean[],
|
||||
) {
|
||||
super();
|
||||
|
||||
const inputColumnIndexForOutputColumnIndex: number[] = [];
|
||||
const inputColumnCount = data.columnCount;
|
||||
let inputIndex = 0;
|
||||
while (inputIndex < inputColumnCount) {
|
||||
while (inputIndex < inputColumnCount && !columnVisibility[inputIndex]) {
|
||||
inputIndex++;
|
||||
}
|
||||
if (inputIndex < inputColumnCount) {
|
||||
inputColumnIndexForOutputColumnIndex.push(inputIndex++);
|
||||
}
|
||||
}
|
||||
this.inputColumnIndexForOutputColumnIndex =
|
||||
inputColumnIndexForOutputColumnIndex;
|
||||
}
|
||||
|
||||
get columnCount() {
|
||||
return this.inputColumnIndexForOutputColumnIndex.length;
|
||||
}
|
||||
|
||||
get rowCount() {
|
||||
return this.data.rowCount;
|
||||
}
|
||||
|
||||
columnName(columnIndex: number): string {
|
||||
return this.data.columnName(
|
||||
this.inputColumnIndexForOutputColumnIndex[columnIndex],
|
||||
);
|
||||
}
|
||||
|
||||
columnType(columnIndex: number): DuckDBType {
|
||||
return this.data.columnType(
|
||||
this.inputColumnIndexForOutputColumnIndex[columnIndex],
|
||||
);
|
||||
}
|
||||
|
||||
value(columnIndex: number, rowIndex: number): DuckDBValue {
|
||||
return this.data.value(
|
||||
this.inputColumnIndexForOutputColumnIndex[columnIndex],
|
||||
rowIndex,
|
||||
);
|
||||
}
|
||||
}
|
||||
114
ts/pkgs/duckdb-data-reader/src/DuckDBData.ts
Normal file
114
ts/pkgs/duckdb-data-reader/src/DuckDBData.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { DuckDBType } from '@duckdb/data-types';
|
||||
import { DuckDBValue } from '@duckdb/data-values';
|
||||
import { DuckDBRow } from './DuckDBRow.js';
|
||||
|
||||
/**
|
||||
* A two-dimensional table of data along with column metadata.
|
||||
*
|
||||
* May represent either a partial or full result set, or a batch of rows read from a result stream.
|
||||
* */
|
||||
export abstract class DuckDBData {
|
||||
/**
|
||||
* Number of columns.
|
||||
*
|
||||
* May be zero until the first part of the result is read. Will not change after the initial read.
|
||||
*/
|
||||
abstract get columnCount(): number;
|
||||
|
||||
/**
|
||||
* Current number of rows.
|
||||
*
|
||||
* For a partial result set, this may change as more rows are read.
|
||||
* For a full result, or a batch, this will not change.
|
||||
*/
|
||||
abstract get rowCount(): number;
|
||||
|
||||
/**
|
||||
* Returns the name of column at the given index (starting at zero).
|
||||
*
|
||||
* Note that duplicate column names are possible.
|
||||
*/
|
||||
abstract columnName(columnIndex: number): string;
|
||||
|
||||
/**
|
||||
* Returns the type of the column at the given index (starting at zero).
|
||||
*/
|
||||
abstract columnType(columnIndex: number): DuckDBType;
|
||||
|
||||
/**
|
||||
* Returns the value for the given column and row. Both are zero-indexed.
|
||||
*/
|
||||
abstract value(columnIndex: number, rowIndex: number): DuckDBValue;
|
||||
|
||||
/**
|
||||
* Returns the single value, assuming exactly one column and row. Throws otherwise.
|
||||
*/
|
||||
singleValue(): DuckDBValue {
|
||||
const { columnCount, rowCount } = this;
|
||||
if (columnCount === 0) {
|
||||
throw Error('no column data');
|
||||
}
|
||||
if (rowCount === 0) {
|
||||
throw Error('no rows');
|
||||
}
|
||||
if (columnCount > 1) {
|
||||
throw Error('more than one column');
|
||||
}
|
||||
if (rowCount > 1) {
|
||||
throw Error('more than one row');
|
||||
}
|
||||
return this.value(0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the column names as an array.
|
||||
*/
|
||||
columnNames(): readonly string[] {
|
||||
const { columnCount } = this;
|
||||
const outputColumnNames: string[] = [];
|
||||
for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) {
|
||||
outputColumnNames.push(this.columnName(columnIndex));
|
||||
}
|
||||
return outputColumnNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the column names as an array, deduplicated following DuckDB's "Auto-Increment Duplicate Column Names"
|
||||
* behavior.
|
||||
*/
|
||||
deduplicatedColumnNames(): readonly string[] {
|
||||
const { columnCount } = this;
|
||||
const outputColumnNames: string[] = [];
|
||||
const columnNameCount: { [columnName: string]: number } = {};
|
||||
for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) {
|
||||
const inputColumnName = this.columnName(columnIndex);
|
||||
const nameCount = (columnNameCount[inputColumnName] || 0) + 1;
|
||||
columnNameCount[inputColumnName] = nameCount;
|
||||
if (nameCount > 1) {
|
||||
outputColumnNames.push(`${inputColumnName}:${nameCount - 1}`);
|
||||
} else {
|
||||
outputColumnNames.push(inputColumnName);
|
||||
}
|
||||
}
|
||||
return outputColumnNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the data as an array of row objects, keyed by column names.
|
||||
*
|
||||
* The column names are deduplicated following DuckDB's "Auto-Increment Duplicate Column Names" behavior.
|
||||
*/
|
||||
toRows(): readonly DuckDBRow[] {
|
||||
const { rowCount, columnCount } = this;
|
||||
const outputColumnNames = this.deduplicatedColumnNames();
|
||||
const outputRows: DuckDBRow[] = [];
|
||||
for (let rowIndex = 0; rowIndex < rowCount; rowIndex++) {
|
||||
const row: { [columnName: string]: DuckDBValue } = {};
|
||||
for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) {
|
||||
row[outputColumnNames[columnIndex]] = this.value(columnIndex, rowIndex);
|
||||
}
|
||||
outputRows.push(row);
|
||||
}
|
||||
return outputRows;
|
||||
}
|
||||
}
|
||||
179
ts/pkgs/duckdb-data-reader/src/DuckDBDataReader.ts
Normal file
179
ts/pkgs/duckdb-data-reader/src/DuckDBDataReader.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
import { DuckDBType } from '@duckdb/data-types';
|
||||
import { DuckDBValue } from '@duckdb/data-values';
|
||||
import { AsyncDuckDBDataBatchIterator } from './AsyncDuckDBDataBatchIterator.js';
|
||||
import { DuckDBData } from './DuckDBData.js';
|
||||
|
||||
// Stores information about a run of similarly-sized batches.
|
||||
interface BatchSizeRun {
|
||||
batchCount: number;
|
||||
batchSize: number;
|
||||
rowCount: number; // Always equal to batchCount * batchSize. Precalculated for efficiency.
|
||||
}
|
||||
|
||||
/**
|
||||
* A result set that can be read incrementally.
|
||||
*
|
||||
* Represents either a partial or full result.
|
||||
* For full results, the `done` property will be true.
|
||||
* To read more rows into a partial result, use the `readUntil` or `readAll` methods.
|
||||
*/
|
||||
export class DuckDBDataReader extends DuckDBData {
|
||||
private readonly iterator: AsyncDuckDBDataBatchIterator;
|
||||
|
||||
private iteratorDone: boolean = false;
|
||||
|
||||
private totalRowsRead: number = 0;
|
||||
|
||||
private readonly batches: DuckDBData[] = [];
|
||||
|
||||
// Stores the sizes of the batches using run-length encoding to make lookup efficient.
|
||||
// Since batches before the last should be a consistent size, this array is not expected to grow beyond length 2.
|
||||
// (One run for the N-1 batches of consistent size, plus one run for the differently-size last batch, if any.)
|
||||
private readonly batchSizeRuns: BatchSizeRun[] = [];
|
||||
|
||||
constructor(iterator: AsyncDuckDBDataBatchIterator) {
|
||||
super();
|
||||
this.iterator = iterator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Number of columns.
|
||||
*
|
||||
* Will be zero until the first part of the result is read. Will not change after the initial read.
|
||||
*/
|
||||
public get columnCount(): number {
|
||||
if (this.batches.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
return this.batches[0].columnCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Current number of rows.
|
||||
*
|
||||
* For a partial result set, with `done` false, this may change as more rows are read.
|
||||
* For a full result, with `done` true, this will not change.
|
||||
*/
|
||||
public get rowCount(): number {
|
||||
return this.totalRowsRead;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of column at the given index (starting at zero).
|
||||
*
|
||||
* Note that duplicate column names are possible.
|
||||
*
|
||||
* Will return an error if no part of the result has been read yet.
|
||||
*/
|
||||
public columnName(columnIndex: number): string {
|
||||
if (this.batches.length === 0) {
|
||||
throw Error('no column data');
|
||||
}
|
||||
return this.batches[0].columnName(columnIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the type of the column at the given index (starting at zero).
|
||||
*
|
||||
* Will return an error if no part of the result has been read yet.
|
||||
*/
|
||||
public columnType(columnIndex: number): DuckDBType {
|
||||
if (this.batches.length === 0) {
|
||||
throw Error('no column data');
|
||||
}
|
||||
return this.batches[0].columnType(columnIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value for the given column and row. Both are zero-indexed.
|
||||
*
|
||||
* Will return an error if `rowIndex` is not less than the current `rowCount`.
|
||||
*/
|
||||
public value(columnIndex: number, rowIndex: number): DuckDBValue {
|
||||
if (this.totalRowsRead === 0) {
|
||||
throw Error('no data');
|
||||
}
|
||||
let batchIndex = 0;
|
||||
let currentRowIndex = rowIndex;
|
||||
// Find which run of batches our row is in.
|
||||
// Since batchSizeRuns shouldn't ever be longer than 2, this should be O(1).
|
||||
for (const run of this.batchSizeRuns) {
|
||||
if (currentRowIndex < run.rowCount) {
|
||||
// The row we're looking for is in this run.
|
||||
// Calculate the batch index and the row index in that batch.
|
||||
batchIndex += Math.floor(currentRowIndex / run.batchSize);
|
||||
const rowIndexInBatch = currentRowIndex % run.batchSize;
|
||||
const batch = this.batches[batchIndex];
|
||||
return batch.value(columnIndex, rowIndexInBatch);
|
||||
}
|
||||
// The row we're looking for is not in this run.
|
||||
// Update our counts for this run and move to the next one.
|
||||
batchIndex += run.batchCount;
|
||||
currentRowIndex -= run.rowCount;
|
||||
}
|
||||
// We didn't find our row. It must have been out of range.
|
||||
throw Error(
|
||||
`Row index ${rowIndex} requested, but only ${this.totalRowsRead} row have been read so far.`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if all rows have been read.
|
||||
*/
|
||||
public get done(): boolean {
|
||||
return this.iteratorDone;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read all rows.
|
||||
*/
|
||||
public async readAll(): Promise<void> {
|
||||
return this.read();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read rows until at least the given target row count has been met.
|
||||
*
|
||||
* Note that the resulting row count could be greater than the target, since rows are read in batches, typically of 2048 rows each.
|
||||
*/
|
||||
public async readUntil(targetRowCount: number): Promise<void> {
|
||||
return this.read(targetRowCount);
|
||||
}
|
||||
|
||||
private async read(targetRowCount?: number): Promise<void> {
|
||||
while (
|
||||
!(
|
||||
this.iteratorDone ||
|
||||
(targetRowCount !== undefined && this.totalRowsRead >= targetRowCount)
|
||||
)
|
||||
) {
|
||||
const { value, done } = await this.iterator.next();
|
||||
if (value) {
|
||||
this.updateBatchSizeRuns(value);
|
||||
this.batches.push(value);
|
||||
this.totalRowsRead += value.rowCount;
|
||||
}
|
||||
if (done) {
|
||||
this.iteratorDone = done;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private updateBatchSizeRuns(batch: DuckDBData) {
|
||||
if (this.batchSizeRuns.length > 0) {
|
||||
const lastRun = this.batchSizeRuns[this.batchSizeRuns.length - 1];
|
||||
if (lastRun.batchSize === batch.rowCount) {
|
||||
// If the new batch is the same size as the last one, just update our last run.
|
||||
lastRun.batchCount += 1;
|
||||
lastRun.rowCount += lastRun.batchSize;
|
||||
return;
|
||||
}
|
||||
}
|
||||
// If this is our first batch, or it's a different size, create a new run.
|
||||
this.batchSizeRuns.push({
|
||||
batchCount: 1,
|
||||
batchSize: batch.rowCount,
|
||||
rowCount: batch.rowCount,
|
||||
});
|
||||
}
|
||||
}
|
||||
5
ts/pkgs/duckdb-data-reader/src/DuckDBRow.ts
Normal file
5
ts/pkgs/duckdb-data-reader/src/DuckDBRow.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { DuckDBValue } from '@duckdb/data-values';
|
||||
|
||||
export interface DuckDBRow {
|
||||
readonly [columnName: string]: DuckDBValue;
|
||||
}
|
||||
32
ts/pkgs/duckdb-data-reader/src/MemoryDuckDBData.ts
Normal file
32
ts/pkgs/duckdb-data-reader/src/MemoryDuckDBData.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { DuckDBType } from '@duckdb/data-types';
|
||||
import { DuckDBValue } from '@duckdb/data-values';
|
||||
import { DuckDBData } from './DuckDBData.js';
|
||||
|
||||
export class MemoryDuckDBData extends DuckDBData {
|
||||
constructor(
|
||||
private columns: { name: string; type: DuckDBType }[],
|
||||
private values: DuckDBValue[][],
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
get columnCount() {
|
||||
return this.columns.length;
|
||||
}
|
||||
|
||||
get rowCount() {
|
||||
return this.values.length > 0 ? this.values[0].length : 0;
|
||||
}
|
||||
|
||||
columnName(columnIndex: number): string {
|
||||
return this.columns[columnIndex].name;
|
||||
}
|
||||
|
||||
columnType(columnIndex: number): DuckDBType {
|
||||
return this.columns[columnIndex].type;
|
||||
}
|
||||
|
||||
value(columnIndex: number, rowIndex: number): DuckDBValue {
|
||||
return this.values[columnIndex][rowIndex];
|
||||
}
|
||||
}
|
||||
6
ts/pkgs/duckdb-data-reader/src/index.ts
Normal file
6
ts/pkgs/duckdb-data-reader/src/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from './AsyncDuckDBDataBatchIterator.js';
|
||||
export * from './ColumnFilteredDuckDBData.js';
|
||||
export * from './DuckDBData.js';
|
||||
export * from './DuckDBDataReader.js';
|
||||
export * from './DuckDBRow.js';
|
||||
export * from './MemoryDuckDBData.js';
|
||||
6
ts/pkgs/duckdb-data-reader/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-reader/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.library.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../out"
|
||||
}
|
||||
}
|
||||
177
ts/pkgs/duckdb-data-reader/test/DuckDBDataReader.test.ts
Normal file
177
ts/pkgs/duckdb-data-reader/test/DuckDBDataReader.test.ts
Normal file
@@ -0,0 +1,177 @@
|
||||
import { DuckDBType, INTEGER, VARCHAR } from '@duckdb/data-types';
|
||||
import { DuckDBValue } from '@duckdb/data-values';
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import {
|
||||
AsyncDuckDBDataBatchIterator,
|
||||
DuckDBData,
|
||||
DuckDBDataReader,
|
||||
MemoryDuckDBData,
|
||||
} from '../src';
|
||||
|
||||
const ITERATOR_DONE = Object.freeze({ done: true, value: undefined });
|
||||
|
||||
class TestAsyncDuckDBDataBatchIterator implements AsyncDuckDBDataBatchIterator {
|
||||
private batches: readonly DuckDBData[];
|
||||
|
||||
private nextBatchIndex: number | null;
|
||||
|
||||
constructor(batches: readonly DuckDBData[]) {
|
||||
this.batches = batches;
|
||||
this.nextBatchIndex = this.batches.length > 0 ? 0 : null;
|
||||
}
|
||||
|
||||
async next(): Promise<IteratorResult<DuckDBData, undefined>> {
|
||||
if (this.nextBatchIndex == null) {
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
const nextBatch = this.batches[this.nextBatchIndex++];
|
||||
if (this.nextBatchIndex >= this.batches.length) {
|
||||
this.nextBatchIndex = null;
|
||||
}
|
||||
return {
|
||||
done: this.nextBatchIndex == null,
|
||||
value: nextBatch,
|
||||
} as IteratorResult<DuckDBData, undefined>;
|
||||
}
|
||||
|
||||
async return(): Promise<IteratorResult<DuckDBData, undefined>> {
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
|
||||
async throw(_err: Error): Promise<IteratorResult<DuckDBData, undefined>> {
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
|
||||
[Symbol.asyncIterator](): AsyncDuckDBDataBatchIterator {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
function expectColumns(
|
||||
data: DuckDBData,
|
||||
columns: { name: string; type: DuckDBType }[],
|
||||
) {
|
||||
expect(data.columnCount).toBe(columns.length);
|
||||
for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) {
|
||||
const column = columns[columnIndex];
|
||||
expect(data.columnName(columnIndex)).toBe(column.name);
|
||||
expect(data.columnType(columnIndex)).toStrictEqual(column.type);
|
||||
}
|
||||
}
|
||||
|
||||
function expectValues(data: DuckDBData, values: DuckDBValue[][]) {
|
||||
for (let columnIndex = 0; columnIndex < values.length; columnIndex++) {
|
||||
const column = values[columnIndex];
|
||||
for (let rowIndex = 0; rowIndex < column.length; rowIndex++) {
|
||||
expect(data.value(columnIndex, rowIndex)).toBe(column[rowIndex]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
suite('DuckDBDataReader', () => {
|
||||
test('should work for an empty batch list', async () => {
|
||||
const batches: DuckDBData[] = [];
|
||||
const iterator = new TestAsyncDuckDBDataBatchIterator(batches);
|
||||
const reader = new DuckDBDataReader(iterator);
|
||||
expect(reader.done).toBe(false);
|
||||
expect(reader.columnCount).toBe(0);
|
||||
expect(reader.rowCount).toBe(0);
|
||||
await reader.readAll();
|
||||
expect(reader.done).toBe(true);
|
||||
expect(reader.columnCount).toBe(0);
|
||||
expect(reader.rowCount).toBe(0);
|
||||
});
|
||||
test('should work for a single batch', async () => {
|
||||
const columns = [
|
||||
{ name: 'num', type: INTEGER },
|
||||
{ name: 'str', type: VARCHAR },
|
||||
];
|
||||
const values = [
|
||||
[2, 3, 5],
|
||||
['z', 'y', 'x'],
|
||||
];
|
||||
const batches: DuckDBData[] = [new MemoryDuckDBData(columns, values)];
|
||||
const iterator = new TestAsyncDuckDBDataBatchIterator(batches);
|
||||
const reader = new DuckDBDataReader(iterator);
|
||||
expect(reader.done).toBe(false);
|
||||
expect(reader.columnCount).toBe(0);
|
||||
expect(reader.rowCount).toBe(0);
|
||||
await reader.readAll();
|
||||
expect(reader.done).toBe(true);
|
||||
expectColumns(reader, columns);
|
||||
expect(reader.rowCount).toBe(3);
|
||||
expectValues(reader, values);
|
||||
});
|
||||
test('should work for multiple batches', async () => {
|
||||
const columns = [
|
||||
{ name: 'num', type: INTEGER },
|
||||
{ name: 'str', type: VARCHAR },
|
||||
];
|
||||
const values = [
|
||||
[12, 13, 15, 22, 23, 25, 32, 33, 35],
|
||||
['z1', 'y1', 'x1', 'z2', 'y2', 'x2', 'z3', 'y3', 'x3'],
|
||||
];
|
||||
const batches: DuckDBData[] = [
|
||||
new MemoryDuckDBData(columns, [
|
||||
values[0].slice(0, 3),
|
||||
values[1].slice(0, 3),
|
||||
]),
|
||||
new MemoryDuckDBData(columns, [
|
||||
values[0].slice(3, 6),
|
||||
values[1].slice(3, 6),
|
||||
]),
|
||||
new MemoryDuckDBData(columns, [
|
||||
values[0].slice(6, 9),
|
||||
values[1].slice(6, 9),
|
||||
]),
|
||||
];
|
||||
const iterator = new TestAsyncDuckDBDataBatchIterator(batches);
|
||||
const reader = new DuckDBDataReader(iterator);
|
||||
expect(reader.done).toBe(false);
|
||||
expect(reader.columnCount).toBe(0);
|
||||
expect(reader.rowCount).toBe(0);
|
||||
await reader.readAll();
|
||||
expect(reader.done).toBe(true);
|
||||
expectColumns(reader, columns);
|
||||
expect(reader.rowCount).toBe(9);
|
||||
expectValues(reader, values);
|
||||
});
|
||||
test('should work for partial reads of multiple batches', async () => {
|
||||
const columns = [
|
||||
{ name: 'num', type: INTEGER },
|
||||
{ name: 'str', type: VARCHAR },
|
||||
];
|
||||
const values = [
|
||||
[12, 13, 15, 22, 23, 25, 32, 33],
|
||||
['z1', 'y1', 'x1', 'z2', 'y2', 'x2', 'z3', 'y3'],
|
||||
];
|
||||
const batches: DuckDBData[] = [
|
||||
new MemoryDuckDBData(columns, [
|
||||
values[0].slice(0, 3),
|
||||
values[1].slice(0, 3),
|
||||
]),
|
||||
new MemoryDuckDBData(columns, [
|
||||
values[0].slice(3, 6),
|
||||
values[1].slice(3, 6),
|
||||
]),
|
||||
new MemoryDuckDBData(columns, [
|
||||
values[0].slice(6, 8),
|
||||
values[1].slice(6, 8),
|
||||
]),
|
||||
];
|
||||
const iterator = new TestAsyncDuckDBDataBatchIterator(batches);
|
||||
const reader = new DuckDBDataReader(iterator);
|
||||
expect(reader.done).toBe(false);
|
||||
expect(reader.columnCount).toBe(0);
|
||||
expect(reader.rowCount).toBe(0);
|
||||
await reader.readUntil(5);
|
||||
expect(reader.done).toBe(false);
|
||||
expectColumns(reader, columns);
|
||||
expect(reader.rowCount).toBe(6);
|
||||
expectValues(reader, [values[0].slice(0, 6), values[1].slice(0, 6)]);
|
||||
await reader.readUntil(10);
|
||||
expect(reader.done).toBe(true);
|
||||
expect(reader.rowCount).toBe(8);
|
||||
expectValues(reader, values);
|
||||
});
|
||||
});
|
||||
6
ts/pkgs/duckdb-data-reader/test/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-reader/test/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.test.json",
|
||||
"references": [
|
||||
{ "path": "../src" }
|
||||
]
|
||||
}
|
||||
37
ts/pkgs/duckdb-data-types/package.json
Normal file
37
ts/pkgs/duckdb-data-types/package.json
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"name": "@duckdb/data-types",
|
||||
"version": "0.0.1",
|
||||
"description": "Utilities for representing DuckDB types",
|
||||
"type": "module",
|
||||
"main": "./out/index.js",
|
||||
"module": "./out/index.js",
|
||||
"types": "./out/index.d.ts",
|
||||
"scripts": {
|
||||
"preinstall": "pnpm build:src",
|
||||
"build": "tsc -b src test",
|
||||
"build:src": "tsc -b src",
|
||||
"build:test": "tsc -b test",
|
||||
"build:watch": "tsc -b src test --watch",
|
||||
"check": "pnpm format:check && pnpm lint",
|
||||
"clean": "rimraf out",
|
||||
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||
"lint": "pnpm eslint src test",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@duckdb/data-values": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.24.0",
|
||||
"eslint": "^9.24.0",
|
||||
"find-up-cli": "^6.0.0",
|
||||
"prettier": "^3.5.3",
|
||||
"rimraf": "^6.0.1",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.30.1",
|
||||
"vite": "^6.2.6",
|
||||
"vitest": "^3.1.1"
|
||||
}
|
||||
}
|
||||
989
ts/pkgs/duckdb-data-types/src/DuckDBType.ts
Normal file
989
ts/pkgs/duckdb-data-types/src/DuckDBType.ts
Normal file
@@ -0,0 +1,989 @@
|
||||
import { Json } from '@duckdb/data-values';
|
||||
import { DuckDBTypeId } from './DuckDBTypeId.js';
|
||||
import { quotedIdentifier, quotedString } from './sql.js';
|
||||
|
||||
export interface DuckDBTypeToStringOptions {
|
||||
short?: boolean;
|
||||
}
|
||||
export abstract class BaseDuckDBType<T extends DuckDBTypeId> {
|
||||
public readonly typeId: T;
|
||||
public readonly alias?: string;
|
||||
protected constructor(typeId: T, alias?: string) {
|
||||
this.typeId = typeId;
|
||||
this.alias = alias;
|
||||
}
|
||||
public toString(_options?: DuckDBTypeToStringOptions): string {
|
||||
return this.alias ?? DuckDBTypeId[this.typeId];
|
||||
}
|
||||
public toJson(): Json {
|
||||
return {
|
||||
typeId: this.typeId,
|
||||
...(this.alias ? { alias: this.alias } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class DuckDBBooleanType extends BaseDuckDBType<DuckDBTypeId.BOOLEAN> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.BOOLEAN, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBBooleanType();
|
||||
public static create(alias?: string): DuckDBBooleanType {
|
||||
return alias ? new DuckDBBooleanType(alias) : DuckDBBooleanType.instance;
|
||||
}
|
||||
}
|
||||
export const BOOLEAN = DuckDBBooleanType.instance;
|
||||
|
||||
export class DuckDBTinyIntType extends BaseDuckDBType<DuckDBTypeId.TINYINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.TINYINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBTinyIntType();
|
||||
public static create(alias?: string): DuckDBTinyIntType {
|
||||
return alias ? new DuckDBTinyIntType(alias) : DuckDBTinyIntType.instance;
|
||||
}
|
||||
public static readonly Max = 2 ** 7 - 1;
|
||||
public static readonly Min = -(2 ** 7);
|
||||
public get max() {
|
||||
return DuckDBTinyIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBTinyIntType.Min;
|
||||
}
|
||||
}
|
||||
export const TINYINT = DuckDBTinyIntType.instance;
|
||||
|
||||
export class DuckDBSmallIntType extends BaseDuckDBType<DuckDBTypeId.SMALLINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.SMALLINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBSmallIntType();
|
||||
public static create(alias?: string): DuckDBSmallIntType {
|
||||
return alias ? new DuckDBSmallIntType(alias) : DuckDBSmallIntType.instance;
|
||||
}
|
||||
public static readonly Max = 2 ** 15 - 1;
|
||||
public static readonly Min = -(2 ** 15);
|
||||
public get max() {
|
||||
return DuckDBSmallIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBSmallIntType.Min;
|
||||
}
|
||||
}
|
||||
export const SMALLINT = DuckDBSmallIntType.instance;
|
||||
|
||||
export class DuckDBIntegerType extends BaseDuckDBType<DuckDBTypeId.INTEGER> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.INTEGER, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBIntegerType();
|
||||
public static create(alias?: string): DuckDBIntegerType {
|
||||
return alias ? new DuckDBIntegerType(alias) : DuckDBIntegerType.instance;
|
||||
}
|
||||
public static readonly Max = 2 ** 31 - 1;
|
||||
public static readonly Min = -(2 ** 31);
|
||||
public get max() {
|
||||
return DuckDBIntegerType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBIntegerType.Min;
|
||||
}
|
||||
}
|
||||
export const INTEGER = DuckDBIntegerType.instance;
|
||||
|
||||
export class DuckDBBigIntType extends BaseDuckDBType<DuckDBTypeId.BIGINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.BIGINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBBigIntType();
|
||||
public static create(alias?: string): DuckDBBigIntType {
|
||||
return alias ? new DuckDBBigIntType(alias) : DuckDBBigIntType.instance;
|
||||
}
|
||||
public static readonly Max: bigint = 2n ** 63n - 1n;
|
||||
public static readonly Min: bigint = -(2n ** 63n);
|
||||
public get max() {
|
||||
return DuckDBBigIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBBigIntType.Min;
|
||||
}
|
||||
}
|
||||
export const BIGINT = DuckDBBigIntType.instance;
|
||||
|
||||
export class DuckDBUTinyIntType extends BaseDuckDBType<DuckDBTypeId.UTINYINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.UTINYINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBUTinyIntType();
|
||||
public static create(alias?: string): DuckDBUTinyIntType {
|
||||
return alias ? new DuckDBUTinyIntType(alias) : DuckDBUTinyIntType.instance;
|
||||
}
|
||||
public static readonly Max = 2 ** 8 - 1;
|
||||
public static readonly Min = 0;
|
||||
public get max() {
|
||||
return DuckDBUTinyIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBUTinyIntType.Min;
|
||||
}
|
||||
}
|
||||
export const UTINYINT = DuckDBUTinyIntType.instance;
|
||||
|
||||
export class DuckDBUSmallIntType extends BaseDuckDBType<DuckDBTypeId.USMALLINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.USMALLINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBUSmallIntType();
|
||||
public static create(alias?: string): DuckDBUSmallIntType {
|
||||
return alias
|
||||
? new DuckDBUSmallIntType(alias)
|
||||
: DuckDBUSmallIntType.instance;
|
||||
}
|
||||
public static readonly Max = 2 ** 16 - 1;
|
||||
public static readonly Min = 0;
|
||||
public get max() {
|
||||
return DuckDBUSmallIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBUSmallIntType.Min;
|
||||
}
|
||||
}
|
||||
export const USMALLINT = DuckDBUSmallIntType.instance;
|
||||
|
||||
export class DuckDBUIntegerType extends BaseDuckDBType<DuckDBTypeId.UINTEGER> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.UINTEGER, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBUIntegerType();
|
||||
public static create(alias?: string): DuckDBUIntegerType {
|
||||
return alias ? new DuckDBUIntegerType(alias) : DuckDBUIntegerType.instance;
|
||||
}
|
||||
public static readonly Max = 2 ** 32 - 1;
|
||||
public static readonly Min = 0;
|
||||
public get max() {
|
||||
return DuckDBUIntegerType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBUIntegerType.Min;
|
||||
}
|
||||
}
|
||||
export const UINTEGER = DuckDBUIntegerType.instance;
|
||||
|
||||
export class DuckDBUBigIntType extends BaseDuckDBType<DuckDBTypeId.UBIGINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.UBIGINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBUBigIntType();
|
||||
public static create(alias?: string): DuckDBUBigIntType {
|
||||
return alias ? new DuckDBUBigIntType(alias) : DuckDBUBigIntType.instance;
|
||||
}
|
||||
public static readonly Max: bigint = 2n ** 64n - 1n;
|
||||
public static readonly Min: bigint = 0n;
|
||||
public get max() {
|
||||
return DuckDBUBigIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBUBigIntType.Min;
|
||||
}
|
||||
}
|
||||
export const UBIGINT = DuckDBUBigIntType.instance;
|
||||
|
||||
export class DuckDBFloatType extends BaseDuckDBType<DuckDBTypeId.FLOAT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.FLOAT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBFloatType();
|
||||
public static create(alias?: string): DuckDBFloatType {
|
||||
return alias ? new DuckDBFloatType(alias) : DuckDBFloatType.instance;
|
||||
}
|
||||
public static readonly Max = Math.fround(3.4028235e38);
|
||||
public static readonly Min = Math.fround(-3.4028235e38);
|
||||
public get max() {
|
||||
return DuckDBFloatType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBFloatType.Min;
|
||||
}
|
||||
}
|
||||
export const FLOAT = DuckDBFloatType.instance;
|
||||
|
||||
export class DuckDBDoubleType extends BaseDuckDBType<DuckDBTypeId.DOUBLE> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.DOUBLE, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBDoubleType();
|
||||
public static create(alias?: string): DuckDBDoubleType {
|
||||
return alias ? new DuckDBDoubleType(alias) : DuckDBDoubleType.instance;
|
||||
}
|
||||
public static readonly Max = Number.MAX_VALUE;
|
||||
public static readonly Min = -Number.MAX_VALUE;
|
||||
public get max() {
|
||||
return DuckDBDoubleType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBDoubleType.Min;
|
||||
}
|
||||
}
|
||||
export const DOUBLE = DuckDBDoubleType.instance;
|
||||
|
||||
export class DuckDBTimestampType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.TIMESTAMP, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBTimestampType();
|
||||
public static create(alias?: string): DuckDBTimestampType {
|
||||
return alias
|
||||
? new DuckDBTimestampType(alias)
|
||||
: DuckDBTimestampType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get epoch() {
|
||||
// return DuckDBTimestampValue.Epoch;
|
||||
// }
|
||||
// public get max() {
|
||||
// return DuckDBTimestampValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBTimestampValue.Min;
|
||||
// }
|
||||
// public get posInf() {
|
||||
// return DuckDBTimestampValue.PosInf;
|
||||
// }
|
||||
// public get negInf() {
|
||||
// return DuckDBTimestampValue.NegInf;
|
||||
// }
|
||||
}
|
||||
export const TIMESTAMP = DuckDBTimestampType.instance;
|
||||
|
||||
export type DuckDBTimestampMicrosecondsType = DuckDBTimestampType;
|
||||
export const DuckDBTimestampMicrosecondsType = DuckDBTimestampType;
|
||||
|
||||
export class DuckDBDateType extends BaseDuckDBType<DuckDBTypeId.DATE> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.DATE, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBDateType();
|
||||
public static create(alias?: string): DuckDBDateType {
|
||||
return alias ? new DuckDBDateType(alias) : DuckDBDateType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get epoch() {
|
||||
// return DuckDBDateValue.Epoch;
|
||||
// }
|
||||
// public get max() {
|
||||
// return DuckDBDateValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBDateValue.Min;
|
||||
// }
|
||||
// public get posInf() {
|
||||
// return DuckDBDateValue.PosInf;
|
||||
// }
|
||||
// public get negInf() {
|
||||
// return DuckDBDateValue.NegInf;
|
||||
// }
|
||||
}
|
||||
export const DATE = DuckDBDateType.instance;
|
||||
|
||||
export class DuckDBTimeType extends BaseDuckDBType<DuckDBTypeId.TIME> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.TIME, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBTimeType();
|
||||
public static create(alias?: string): DuckDBTimeType {
|
||||
return alias ? new DuckDBTimeType(alias) : DuckDBTimeType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get max() {
|
||||
// return DuckDBTimeValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBTimeValue.Min;
|
||||
// }
|
||||
}
|
||||
export const TIME = DuckDBTimeType.instance;
|
||||
|
||||
export class DuckDBIntervalType extends BaseDuckDBType<DuckDBTypeId.INTERVAL> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.INTERVAL, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBIntervalType();
|
||||
public static create(alias?: string): DuckDBIntervalType {
|
||||
return alias ? new DuckDBIntervalType(alias) : DuckDBIntervalType.instance;
|
||||
}
|
||||
}
|
||||
export const INTERVAL = DuckDBIntervalType.instance;
|
||||
|
||||
export class DuckDBHugeIntType extends BaseDuckDBType<DuckDBTypeId.HUGEINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.HUGEINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBHugeIntType();
|
||||
public static create(alias?: string): DuckDBHugeIntType {
|
||||
return alias ? new DuckDBHugeIntType(alias) : DuckDBHugeIntType.instance;
|
||||
}
|
||||
public static readonly Max: bigint = 2n ** 127n - 1n;
|
||||
public static readonly Min: bigint = -(2n ** 127n);
|
||||
public get max() {
|
||||
return DuckDBHugeIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBHugeIntType.Min;
|
||||
}
|
||||
}
|
||||
export const HUGEINT = DuckDBHugeIntType.instance;
|
||||
|
||||
export class DuckDBUHugeIntType extends BaseDuckDBType<DuckDBTypeId.UHUGEINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.UHUGEINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBUHugeIntType();
|
||||
public static create(alias?: string): DuckDBUHugeIntType {
|
||||
return alias ? new DuckDBUHugeIntType(alias) : DuckDBUHugeIntType.instance;
|
||||
}
|
||||
public static readonly Max: bigint = 2n ** 128n - 1n;
|
||||
public static readonly Min: bigint = 0n;
|
||||
public get max() {
|
||||
return DuckDBUHugeIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBUHugeIntType.Min;
|
||||
}
|
||||
}
|
||||
export const UHUGEINT = DuckDBUHugeIntType.instance;
|
||||
|
||||
export class DuckDBVarCharType extends BaseDuckDBType<DuckDBTypeId.VARCHAR> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.VARCHAR, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBVarCharType();
|
||||
public static create(alias?: string): DuckDBVarCharType {
|
||||
return alias ? new DuckDBVarCharType(alias) : DuckDBVarCharType.instance;
|
||||
}
|
||||
}
|
||||
export const VARCHAR = DuckDBVarCharType.instance;
|
||||
|
||||
export class DuckDBBlobType extends BaseDuckDBType<DuckDBTypeId.BLOB> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.BLOB, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBBlobType();
|
||||
public static create(alias?: string): DuckDBBlobType {
|
||||
return alias ? new DuckDBBlobType(alias) : DuckDBBlobType.instance;
|
||||
}
|
||||
}
|
||||
export const BLOB = DuckDBBlobType.instance;
|
||||
|
||||
export class DuckDBDecimalType extends BaseDuckDBType<DuckDBTypeId.DECIMAL> {
|
||||
public readonly width: number;
|
||||
public readonly scale: number;
|
||||
public constructor(width: number, scale: number, alias?: string) {
|
||||
super(DuckDBTypeId.DECIMAL, alias);
|
||||
this.width = width;
|
||||
this.scale = scale;
|
||||
}
|
||||
public toString(_options?: DuckDBTypeToStringOptions): string {
|
||||
return this.alias ?? `DECIMAL(${this.width},${this.scale})`;
|
||||
}
|
||||
public override toJson(): Json {
|
||||
return {
|
||||
typeId: this.typeId,
|
||||
width: this.width,
|
||||
scale: this.scale,
|
||||
...(this.alias ? { alias: this.alias } : {}),
|
||||
};
|
||||
}
|
||||
public static readonly default = new DuckDBDecimalType(18, 3);
|
||||
}
|
||||
export function DECIMAL(
|
||||
width?: number,
|
||||
scale?: number,
|
||||
alias?: string,
|
||||
): DuckDBDecimalType {
|
||||
if (width === undefined) {
|
||||
return DuckDBDecimalType.default;
|
||||
}
|
||||
if (scale === undefined) {
|
||||
return new DuckDBDecimalType(width, 0);
|
||||
}
|
||||
return new DuckDBDecimalType(width, scale, alias);
|
||||
}
|
||||
|
||||
export class DuckDBTimestampSecondsType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP_S> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.TIMESTAMP_S, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBTimestampSecondsType();
|
||||
public static create(alias?: string): DuckDBTimestampSecondsType {
|
||||
return alias
|
||||
? new DuckDBTimestampSecondsType(alias)
|
||||
: DuckDBTimestampSecondsType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get epoch() {
|
||||
// return DuckDBTimestampSecondsValue.Epoch;
|
||||
// }
|
||||
// public get max() {
|
||||
// return DuckDBTimestampSecondsValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBTimestampSecondsValue.Min;
|
||||
// }
|
||||
// public get posInf() {
|
||||
// return DuckDBTimestampSecondsValue.PosInf;
|
||||
// }
|
||||
// public get negInf() {
|
||||
// return DuckDBTimestampSecondsValue.NegInf;
|
||||
// }
|
||||
}
|
||||
export const TIMESTAMP_S = DuckDBTimestampSecondsType.instance;
|
||||
|
||||
export class DuckDBTimestampMillisecondsType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP_MS> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.TIMESTAMP_MS, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBTimestampMillisecondsType();
|
||||
public static create(alias?: string): DuckDBTimestampMillisecondsType {
|
||||
return alias
|
||||
? new DuckDBTimestampMillisecondsType(alias)
|
||||
: DuckDBTimestampMillisecondsType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get epoch() {
|
||||
// return DuckDBTimestampMillisecondsValue.Epoch;
|
||||
// }
|
||||
// public get max() {
|
||||
// return DuckDBTimestampMillisecondsValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBTimestampMillisecondsValue.Min;
|
||||
// }
|
||||
// public get posInf() {
|
||||
// return DuckDBTimestampMillisecondsValue.PosInf;
|
||||
// }
|
||||
// public get negInf() {
|
||||
// return DuckDBTimestampMillisecondsValue.NegInf;
|
||||
// }
|
||||
}
|
||||
export const TIMESTAMP_MS = DuckDBTimestampMillisecondsType.instance;
|
||||
|
||||
export class DuckDBTimestampNanosecondsType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP_NS> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.TIMESTAMP_NS, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBTimestampNanosecondsType();
|
||||
public static create(alias?: string): DuckDBTimestampNanosecondsType {
|
||||
return alias
|
||||
? new DuckDBTimestampNanosecondsType(alias)
|
||||
: DuckDBTimestampNanosecondsType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get epoch() {
|
||||
// return DuckDBTimestampNanosecondsValue.Epoch;
|
||||
// }
|
||||
// public get max() {
|
||||
// return DuckDBTimestampNanosecondsValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBTimestampNanosecondsValue.Min;
|
||||
// }
|
||||
// public get posInf() {
|
||||
// return DuckDBTimestampNanosecondsValue.PosInf;
|
||||
// }
|
||||
// public get negInf() {
|
||||
// return DuckDBTimestampNanosecondsValue.NegInf;
|
||||
// }
|
||||
}
|
||||
export const TIMESTAMP_NS = DuckDBTimestampNanosecondsType.instance;
|
||||
|
||||
export class DuckDBEnumType extends BaseDuckDBType<DuckDBTypeId.ENUM> {
|
||||
public readonly values: readonly string[];
|
||||
public readonly valueIndexes: Readonly<Record<string, number>>;
|
||||
public readonly internalTypeId: DuckDBTypeId;
|
||||
public constructor(
|
||||
values: readonly string[],
|
||||
internalTypeId: DuckDBTypeId,
|
||||
alias?: string,
|
||||
) {
|
||||
super(DuckDBTypeId.ENUM, alias);
|
||||
this.values = values;
|
||||
const valueIndexes: Record<string, number> = {};
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
valueIndexes[values[i]] = i;
|
||||
}
|
||||
this.valueIndexes = valueIndexes;
|
||||
this.internalTypeId = internalTypeId;
|
||||
}
|
||||
public indexForValue(value: string): number {
|
||||
return this.valueIndexes[value];
|
||||
}
|
||||
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||
if (this.alias) {
|
||||
return this.alias;
|
||||
}
|
||||
if (options?.short) {
|
||||
return `ENUM(…)`;
|
||||
}
|
||||
return `ENUM(${this.values.map(quotedString).join(', ')})`;
|
||||
}
|
||||
public override toJson(): Json {
|
||||
return {
|
||||
typeId: this.typeId,
|
||||
values: [...this.values],
|
||||
internalTypeId: this.internalTypeId,
|
||||
...(this.alias ? { alias: this.alias } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
export function ENUM8(
|
||||
values: readonly string[],
|
||||
alias?: string,
|
||||
): DuckDBEnumType {
|
||||
return new DuckDBEnumType(values, DuckDBTypeId.UTINYINT, alias);
|
||||
}
|
||||
export function ENUM16(
|
||||
values: readonly string[],
|
||||
alias?: string,
|
||||
): DuckDBEnumType {
|
||||
return new DuckDBEnumType(values, DuckDBTypeId.USMALLINT, alias);
|
||||
}
|
||||
export function ENUM32(
|
||||
values: readonly string[],
|
||||
alias?: string,
|
||||
): DuckDBEnumType {
|
||||
return new DuckDBEnumType(values, DuckDBTypeId.UINTEGER, alias);
|
||||
}
|
||||
export function ENUM(
|
||||
values: readonly string[],
|
||||
alias?: string,
|
||||
): DuckDBEnumType {
|
||||
if (values.length < 256) {
|
||||
return ENUM8(values, alias);
|
||||
} else if (values.length < 65536) {
|
||||
return ENUM16(values, alias);
|
||||
} else if (values.length < 4294967296) {
|
||||
return ENUM32(values, alias);
|
||||
} else {
|
||||
throw new Error(
|
||||
`ENUM types cannot have more than 4294967295 values; received ${values.length}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class DuckDBListType extends BaseDuckDBType<DuckDBTypeId.LIST> {
|
||||
public readonly valueType: DuckDBType;
|
||||
public constructor(valueType: DuckDBType, alias?: string) {
|
||||
super(DuckDBTypeId.LIST, alias);
|
||||
this.valueType = valueType;
|
||||
}
|
||||
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||
return this.alias ?? `${this.valueType.toString(options)}[]`;
|
||||
}
|
||||
public override toJson(): Json {
|
||||
return {
|
||||
typeId: this.typeId,
|
||||
valueType: this.valueType.toJson(),
|
||||
...(this.alias ? { alias: this.alias } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
export function LIST(valueType: DuckDBType, alias?: string): DuckDBListType {
|
||||
return new DuckDBListType(valueType, alias);
|
||||
}
|
||||
|
||||
export class DuckDBStructType extends BaseDuckDBType<DuckDBTypeId.STRUCT> {
|
||||
public readonly entryNames: readonly string[];
|
||||
public readonly entryTypes: readonly DuckDBType[];
|
||||
public readonly entryIndexes: Readonly<Record<string, number>>;
|
||||
public constructor(
|
||||
entryNames: readonly string[],
|
||||
entryTypes: readonly DuckDBType[],
|
||||
alias?: string,
|
||||
) {
|
||||
super(DuckDBTypeId.STRUCT, alias);
|
||||
if (entryNames.length !== entryTypes.length) {
|
||||
throw new Error(`Could not create DuckDBStructType: \
|
||||
entryNames length (${entryNames.length}) does not match entryTypes length (${entryTypes.length})`);
|
||||
}
|
||||
this.entryNames = entryNames;
|
||||
this.entryTypes = entryTypes;
|
||||
const entryIndexes: Record<string, number> = {};
|
||||
for (let i = 0; i < entryNames.length; i++) {
|
||||
entryIndexes[entryNames[i]] = i;
|
||||
}
|
||||
this.entryIndexes = entryIndexes;
|
||||
}
|
||||
public get entryCount() {
|
||||
return this.entryNames.length;
|
||||
}
|
||||
public indexForEntry(entryName: string): number {
|
||||
return this.entryIndexes[entryName];
|
||||
}
|
||||
public typeForEntry(entryName: string): DuckDBType {
|
||||
return this.entryTypes[this.entryIndexes[entryName]];
|
||||
}
|
||||
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||
if (this.alias) {
|
||||
return this.alias;
|
||||
}
|
||||
if (options?.short) {
|
||||
return `STRUCT(…)`;
|
||||
}
|
||||
const parts: string[] = [];
|
||||
for (let i = 0; i < this.entryNames.length; i++) {
|
||||
parts.push(
|
||||
`${quotedIdentifier(this.entryNames[i])} ${this.entryTypes[i]}`,
|
||||
);
|
||||
}
|
||||
return `STRUCT(${parts.join(', ')})`;
|
||||
}
|
||||
public override toJson(): Json {
|
||||
return {
|
||||
typeId: this.typeId,
|
||||
entryNames: [...this.entryNames],
|
||||
entryTypes: this.entryTypes.map((t) => t.toJson()),
|
||||
...(this.alias ? { alias: this.alias } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
export function STRUCT(
|
||||
entries: Record<string, DuckDBType>,
|
||||
alias?: string,
|
||||
): DuckDBStructType {
|
||||
const entryNames = Object.keys(entries);
|
||||
const entryTypes = Object.values(entries);
|
||||
return new DuckDBStructType(entryNames, entryTypes, alias);
|
||||
}
|
||||
|
||||
export class DuckDBMapType extends BaseDuckDBType<DuckDBTypeId.MAP> {
|
||||
public readonly keyType: DuckDBType;
|
||||
public readonly valueType: DuckDBType;
|
||||
public constructor(
|
||||
keyType: DuckDBType,
|
||||
valueType: DuckDBType,
|
||||
alias?: string,
|
||||
) {
|
||||
super(DuckDBTypeId.MAP, alias);
|
||||
this.keyType = keyType;
|
||||
this.valueType = valueType;
|
||||
}
|
||||
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||
if (this.alias) {
|
||||
return this.alias;
|
||||
}
|
||||
if (options?.short) {
|
||||
return `MAP(…)`;
|
||||
}
|
||||
return `MAP(${this.keyType}, ${this.valueType})`;
|
||||
}
|
||||
public override toJson(): Json {
|
||||
return {
|
||||
typeId: this.typeId,
|
||||
keyType: this.keyType.toJson(),
|
||||
valueType: this.valueType.toJson(),
|
||||
...(this.alias ? { alias: this.alias } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
export function MAP(
|
||||
keyType: DuckDBType,
|
||||
valueType: DuckDBType,
|
||||
alias?: string,
|
||||
): DuckDBMapType {
|
||||
return new DuckDBMapType(keyType, valueType, alias);
|
||||
}
|
||||
|
||||
export class DuckDBArrayType extends BaseDuckDBType<DuckDBTypeId.ARRAY> {
|
||||
public readonly valueType: DuckDBType;
|
||||
public readonly length: number;
|
||||
public constructor(valueType: DuckDBType, length: number, alias?: string) {
|
||||
super(DuckDBTypeId.ARRAY, alias);
|
||||
this.valueType = valueType;
|
||||
this.length = length;
|
||||
}
|
||||
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||
return this.alias ?? `${this.valueType.toString(options)}[${this.length}]`;
|
||||
}
|
||||
public override toJson(): Json {
|
||||
return {
|
||||
typeId: this.typeId,
|
||||
valueType: this.valueType.toJson(),
|
||||
length: this.length,
|
||||
...(this.alias ? { alias: this.alias } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
export function ARRAY(
|
||||
valueType: DuckDBType,
|
||||
length: number,
|
||||
alias?: string,
|
||||
): DuckDBArrayType {
|
||||
return new DuckDBArrayType(valueType, length, alias);
|
||||
}
|
||||
|
||||
export class DuckDBUUIDType extends BaseDuckDBType<DuckDBTypeId.UUID> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.UUID, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBUUIDType();
|
||||
public static create(alias?: string): DuckDBUUIDType {
|
||||
return alias ? new DuckDBUUIDType(alias) : DuckDBUUIDType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get max() {
|
||||
// return DuckDBUUIDValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBUUIDValue.Min;
|
||||
// }
|
||||
}
|
||||
export const UUID = DuckDBUUIDType.instance;
|
||||
|
||||
export class DuckDBUnionType extends BaseDuckDBType<DuckDBTypeId.UNION> {
|
||||
public readonly memberTags: readonly string[];
|
||||
public readonly tagMemberIndexes: Readonly<Record<string, number>>;
|
||||
public readonly memberTypes: readonly DuckDBType[];
|
||||
public constructor(
|
||||
memberTags: readonly string[],
|
||||
memberTypes: readonly DuckDBType[],
|
||||
alias?: string,
|
||||
) {
|
||||
super(DuckDBTypeId.UNION, alias);
|
||||
if (memberTags.length !== memberTypes.length) {
|
||||
throw new Error(`Could not create DuckDBUnionType: \
|
||||
tags length (${memberTags.length}) does not match valueTypes length (${memberTypes.length})`);
|
||||
}
|
||||
this.memberTags = memberTags;
|
||||
const tagMemberIndexes: Record<string, number> = {};
|
||||
for (let i = 0; i < memberTags.length; i++) {
|
||||
tagMemberIndexes[memberTags[i]] = i;
|
||||
}
|
||||
this.tagMemberIndexes = tagMemberIndexes;
|
||||
this.memberTypes = memberTypes;
|
||||
}
|
||||
public memberIndexForTag(tag: string): number {
|
||||
return this.tagMemberIndexes[tag];
|
||||
}
|
||||
public memberTypeForTag(tag: string): DuckDBType {
|
||||
return this.memberTypes[this.tagMemberIndexes[tag]];
|
||||
}
|
||||
public get memberCount() {
|
||||
return this.memberTags.length;
|
||||
}
|
||||
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||
if (this.alias) {
|
||||
return this.alias;
|
||||
}
|
||||
if (options?.short) {
|
||||
return `UNION(…)`;
|
||||
}
|
||||
const parts: string[] = [];
|
||||
for (let i = 0; i < this.memberTags.length; i++) {
|
||||
parts.push(
|
||||
`${quotedIdentifier(this.memberTags[i])} ${this.memberTypes[i]}`,
|
||||
);
|
||||
}
|
||||
return `UNION(${parts.join(', ')})`;
|
||||
}
|
||||
public override toJson(): Json {
|
||||
return {
|
||||
typeId: this.typeId,
|
||||
memberTags: [...this.memberTags],
|
||||
memberTypes: this.memberTypes.map((t) => t.toJson()),
|
||||
...(this.alias ? { alias: this.alias } : {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
export function UNION(
|
||||
members: Record<string, DuckDBType>,
|
||||
alias?: string,
|
||||
): DuckDBUnionType {
|
||||
const memberTags = Object.keys(members);
|
||||
const memberTypes = Object.values(members);
|
||||
return new DuckDBUnionType(memberTags, memberTypes, alias);
|
||||
}
|
||||
|
||||
export class DuckDBBitType extends BaseDuckDBType<DuckDBTypeId.BIT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.BIT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBBitType();
|
||||
public static create(alias?: string): DuckDBBitType {
|
||||
return alias ? new DuckDBBitType(alias) : DuckDBBitType.instance;
|
||||
}
|
||||
}
|
||||
export const BIT = DuckDBBitType.instance;
|
||||
|
||||
export class DuckDBTimeTZType extends BaseDuckDBType<DuckDBTypeId.TIME_TZ> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.TIME_TZ, alias);
|
||||
}
|
||||
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||
if (this.alias) {
|
||||
return this.alias;
|
||||
}
|
||||
if (options?.short) {
|
||||
return 'TIMETZ';
|
||||
}
|
||||
return 'TIME WITH TIME ZONE';
|
||||
}
|
||||
public static readonly instance = new DuckDBTimeTZType();
|
||||
public static create(alias?: string): DuckDBTimeTZType {
|
||||
return alias ? new DuckDBTimeTZType(alias) : DuckDBTimeTZType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get max() {
|
||||
// return DuckDBTimeTZValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBTimeTZValue.Min;
|
||||
// }
|
||||
}
|
||||
export const TIMETZ = DuckDBTimeTZType.instance;
|
||||
|
||||
export class DuckDBTimestampTZType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP_TZ> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.TIMESTAMP_TZ, alias);
|
||||
}
|
||||
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||
if (this.alias) {
|
||||
return this.alias;
|
||||
}
|
||||
if (options?.short) {
|
||||
return 'TIMESTAMPTZ';
|
||||
}
|
||||
return 'TIMESTAMP WITH TIME ZONE';
|
||||
}
|
||||
public static readonly instance = new DuckDBTimestampTZType();
|
||||
public static create(alias?: string): DuckDBTimestampTZType {
|
||||
return alias
|
||||
? new DuckDBTimestampTZType(alias)
|
||||
: DuckDBTimestampTZType.instance;
|
||||
}
|
||||
// TODO: common DuckDBValues on type objects
|
||||
// public get epoch() {
|
||||
// return DuckDBTimestampTZValue.Epoch;
|
||||
// }
|
||||
// public get max() {
|
||||
// return DuckDBTimestampTZValue.Max;
|
||||
// }
|
||||
// public get min() {
|
||||
// return DuckDBTimestampTZValue.Min;
|
||||
// }
|
||||
// public get posInf() {
|
||||
// return DuckDBTimestampTZValue.PosInf;
|
||||
// }
|
||||
// public get negInf() {
|
||||
// return DuckDBTimestampTZValue.NegInf;
|
||||
// }
|
||||
}
|
||||
export const TIMESTAMPTZ = DuckDBTimestampTZType.instance;
|
||||
|
||||
export class DuckDBAnyType extends BaseDuckDBType<DuckDBTypeId.ANY> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.ANY, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBAnyType();
|
||||
public static create(alias?: string): DuckDBAnyType {
|
||||
return alias ? new DuckDBAnyType(alias) : DuckDBAnyType.instance;
|
||||
}
|
||||
}
|
||||
export const ANY = DuckDBAnyType.instance;
|
||||
|
||||
export class DuckDBVarIntType extends BaseDuckDBType<DuckDBTypeId.VARINT> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.VARINT, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBVarIntType();
|
||||
public static create(alias?: string): DuckDBVarIntType {
|
||||
return alias ? new DuckDBVarIntType(alias) : DuckDBVarIntType.instance;
|
||||
}
|
||||
public static readonly Max: bigint =
|
||||
179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n;
|
||||
public static readonly Min: bigint =
|
||||
-179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n;
|
||||
public get max() {
|
||||
return DuckDBVarIntType.Max;
|
||||
}
|
||||
public get min() {
|
||||
return DuckDBVarIntType.Min;
|
||||
}
|
||||
}
|
||||
export const VARINT = DuckDBVarIntType.instance;
|
||||
|
||||
export class DuckDBSQLNullType extends BaseDuckDBType<DuckDBTypeId.SQLNULL> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.SQLNULL, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBSQLNullType();
|
||||
public static create(alias?: string): DuckDBSQLNullType {
|
||||
return alias ? new DuckDBSQLNullType(alias) : DuckDBSQLNullType.instance;
|
||||
}
|
||||
}
|
||||
export const SQLNULL = DuckDBSQLNullType.instance;
|
||||
|
||||
export class DuckDBStringLiteralType extends BaseDuckDBType<DuckDBTypeId.STRING_LITERAL> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.STRING_LITERAL, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBStringLiteralType();
|
||||
public static create(alias?: string): DuckDBStringLiteralType {
|
||||
return alias
|
||||
? new DuckDBStringLiteralType(alias)
|
||||
: DuckDBStringLiteralType.instance;
|
||||
}
|
||||
}
|
||||
export const STRING_LITERAL = DuckDBStringLiteralType.instance;
|
||||
|
||||
export class DuckDBIntegerLiteralType extends BaseDuckDBType<DuckDBTypeId.INTEGER_LITERAL> {
|
||||
public constructor(alias?: string) {
|
||||
super(DuckDBTypeId.INTEGER_LITERAL, alias);
|
||||
}
|
||||
public static readonly instance = new DuckDBIntegerLiteralType();
|
||||
public static create(alias?: string): DuckDBIntegerLiteralType {
|
||||
return alias
|
||||
? new DuckDBIntegerLiteralType(alias)
|
||||
: DuckDBIntegerLiteralType.instance;
|
||||
}
|
||||
}
|
||||
export const INTEGER_LITERAL = DuckDBIntegerLiteralType.instance;
|
||||
|
||||
export type DuckDBType =
|
||||
| DuckDBBooleanType
|
||||
| DuckDBTinyIntType
|
||||
| DuckDBSmallIntType
|
||||
| DuckDBIntegerType
|
||||
| DuckDBBigIntType
|
||||
| DuckDBUTinyIntType
|
||||
| DuckDBUSmallIntType
|
||||
| DuckDBUIntegerType
|
||||
| DuckDBUBigIntType
|
||||
| DuckDBFloatType
|
||||
| DuckDBDoubleType
|
||||
| DuckDBTimestampType
|
||||
| DuckDBDateType
|
||||
| DuckDBTimeType
|
||||
| DuckDBIntervalType
|
||||
| DuckDBHugeIntType
|
||||
| DuckDBUHugeIntType
|
||||
| DuckDBVarCharType
|
||||
| DuckDBBlobType
|
||||
| DuckDBDecimalType
|
||||
| DuckDBTimestampSecondsType
|
||||
| DuckDBTimestampMillisecondsType
|
||||
| DuckDBTimestampNanosecondsType
|
||||
| DuckDBEnumType
|
||||
| DuckDBListType
|
||||
| DuckDBStructType
|
||||
| DuckDBMapType
|
||||
| DuckDBArrayType
|
||||
| DuckDBUUIDType
|
||||
| DuckDBUnionType
|
||||
| DuckDBBitType
|
||||
| DuckDBTimeTZType
|
||||
| DuckDBTimestampTZType
|
||||
| DuckDBAnyType
|
||||
| DuckDBVarIntType
|
||||
| DuckDBSQLNullType
|
||||
| DuckDBStringLiteralType
|
||||
| DuckDBIntegerLiteralType;
|
||||
42
ts/pkgs/duckdb-data-types/src/DuckDBTypeId.ts
Normal file
42
ts/pkgs/duckdb-data-types/src/DuckDBTypeId.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
// copy of DUCKDB_TYPE from the C API, with names shortened
|
||||
export enum DuckDBTypeId {
|
||||
INVALID = 0,
|
||||
BOOLEAN = 1,
|
||||
TINYINT = 2,
|
||||
SMALLINT = 3,
|
||||
INTEGER = 4,
|
||||
BIGINT = 5,
|
||||
UTINYINT = 6,
|
||||
USMALLINT = 7,
|
||||
UINTEGER = 8,
|
||||
UBIGINT = 9,
|
||||
FLOAT = 10,
|
||||
DOUBLE = 11,
|
||||
TIMESTAMP = 12,
|
||||
DATE = 13,
|
||||
TIME = 14,
|
||||
INTERVAL = 15,
|
||||
HUGEINT = 16,
|
||||
UHUGEINT = 32,
|
||||
VARCHAR = 17,
|
||||
BLOB = 18,
|
||||
DECIMAL = 19,
|
||||
TIMESTAMP_S = 20,
|
||||
TIMESTAMP_MS = 21,
|
||||
TIMESTAMP_NS = 22,
|
||||
ENUM = 23,
|
||||
LIST = 24,
|
||||
STRUCT = 25,
|
||||
MAP = 26,
|
||||
ARRAY = 33,
|
||||
UUID = 27,
|
||||
UNION = 28,
|
||||
BIT = 29,
|
||||
TIME_TZ = 30,
|
||||
TIMESTAMP_TZ = 31,
|
||||
ANY = 34,
|
||||
VARINT = 35,
|
||||
SQLNULL = 36,
|
||||
STRING_LITERAL = 37,
|
||||
INTEGER_LITERAL = 38,
|
||||
}
|
||||
46
ts/pkgs/duckdb-data-types/src/extensionTypes.ts
Normal file
46
ts/pkgs/duckdb-data-types/src/extensionTypes.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import {
|
||||
DOUBLE,
|
||||
DuckDBBlobType,
|
||||
DuckDBVarCharType,
|
||||
FLOAT,
|
||||
HUGEINT,
|
||||
LIST,
|
||||
STRUCT,
|
||||
USMALLINT,
|
||||
UTINYINT,
|
||||
} from './DuckDBType.js';
|
||||
|
||||
// see https://github.com/duckdb/duckdb-inet/blob/main/src/inet_extension.cpp
|
||||
export const INET = STRUCT(
|
||||
{ ip_type: UTINYINT, address: HUGEINT, mask: USMALLINT },
|
||||
'INET',
|
||||
);
|
||||
|
||||
// see LogicalType::JSON() in https://github.com/duckdb/duckdb/blob/main/src/common/types.cpp
|
||||
export const JSONType = DuckDBVarCharType.create('JSON');
|
||||
|
||||
// see https://github.com/duckdb/duckdb-spatial/blob/main/src/spatial/spatial_types.cpp
|
||||
export const BOX_2D = STRUCT(
|
||||
{ min_x: DOUBLE, min_y: DOUBLE, max_x: DOUBLE, max_y: DOUBLE },
|
||||
'BOX_2D',
|
||||
);
|
||||
export const BOX_2DF = STRUCT(
|
||||
{ min_x: FLOAT, min_y: FLOAT, max_x: FLOAT, max_y: FLOAT },
|
||||
'BOX_2DF',
|
||||
);
|
||||
export const GEOMETRY = DuckDBBlobType.create('GEOMETRY');
|
||||
export const LINESTRING_2D = LIST(
|
||||
STRUCT({ x: DOUBLE, y: DOUBLE }),
|
||||
'LINESTRING_2D',
|
||||
);
|
||||
export const POINT_2D = STRUCT({ x: DOUBLE, y: DOUBLE }, 'POINT_2D');
|
||||
export const POINT_3D = STRUCT({ x: DOUBLE, y: DOUBLE, z: DOUBLE }, 'POINT_3D');
|
||||
export const POINT_4D = STRUCT(
|
||||
{ x: DOUBLE, y: DOUBLE, z: DOUBLE, m: DOUBLE },
|
||||
'POINT_4D',
|
||||
);
|
||||
export const POLYGON_2D = LIST(
|
||||
LIST(STRUCT({ x: DOUBLE, y: DOUBLE })),
|
||||
'POLYGON_2D',
|
||||
);
|
||||
export const WKB_BLOB = DuckDBBlobType.create('WKB_BLOB');
|
||||
4
ts/pkgs/duckdb-data-types/src/index.ts
Normal file
4
ts/pkgs/duckdb-data-types/src/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './DuckDBType.js';
|
||||
export * from './DuckDBTypeId.js';
|
||||
export * from './extensionTypes.js';
|
||||
export * from './parseLogicalTypeString.js';
|
||||
286
ts/pkgs/duckdb-data-types/src/parseLogicalTypeString.ts
Normal file
286
ts/pkgs/duckdb-data-types/src/parseLogicalTypeString.ts
Normal file
@@ -0,0 +1,286 @@
|
||||
import {
|
||||
ARRAY,
|
||||
BIGINT,
|
||||
BIT,
|
||||
BLOB,
|
||||
BOOLEAN,
|
||||
DATE,
|
||||
DECIMAL,
|
||||
DOUBLE,
|
||||
DuckDBMapType,
|
||||
DuckDBStructType,
|
||||
DuckDBType,
|
||||
DuckDBUnionType,
|
||||
ENUM,
|
||||
FLOAT,
|
||||
HUGEINT,
|
||||
INTEGER,
|
||||
INTERVAL,
|
||||
LIST,
|
||||
MAP,
|
||||
SMALLINT,
|
||||
SQLNULL,
|
||||
STRUCT,
|
||||
TIME,
|
||||
TIMESTAMP,
|
||||
TIMESTAMP_MS,
|
||||
TIMESTAMP_NS,
|
||||
TIMESTAMP_S,
|
||||
TIMESTAMPTZ,
|
||||
TIMETZ,
|
||||
TINYINT,
|
||||
UBIGINT,
|
||||
UHUGEINT,
|
||||
UINTEGER,
|
||||
UNION,
|
||||
USMALLINT,
|
||||
UTINYINT,
|
||||
UUID,
|
||||
VARCHAR,
|
||||
VARINT,
|
||||
} from './DuckDBType.js';
|
||||
import {
|
||||
BOX_2D,
|
||||
BOX_2DF,
|
||||
GEOMETRY,
|
||||
INET,
|
||||
JSONType,
|
||||
LINESTRING_2D,
|
||||
POINT_2D,
|
||||
POINT_3D,
|
||||
POINT_4D,
|
||||
POLYGON_2D,
|
||||
WKB_BLOB,
|
||||
} from './extensionTypes.js';
|
||||
|
||||
const simpleTypeMap: Record<string, DuckDBType> = {
|
||||
BIGINT: BIGINT,
|
||||
BIT: BIT,
|
||||
BOOLEAN: BOOLEAN,
|
||||
BLOB: BLOB,
|
||||
BOX_2D: BOX_2D,
|
||||
BOX_2DF: BOX_2DF,
|
||||
DATE: DATE,
|
||||
DOUBLE: DOUBLE,
|
||||
FLOAT: FLOAT,
|
||||
GEOMETRY: GEOMETRY,
|
||||
HUGEINT: HUGEINT,
|
||||
INET: INET,
|
||||
INTEGER: INTEGER,
|
||||
INTERVAL: INTERVAL,
|
||||
JSON: JSONType,
|
||||
LINESTRING_2D: LINESTRING_2D,
|
||||
POINT_2D: POINT_2D,
|
||||
POINT_3D: POINT_3D,
|
||||
POINT_4D: POINT_4D,
|
||||
POLYGON_2D: POLYGON_2D,
|
||||
SMALLINT: SMALLINT,
|
||||
SQLNULL: SQLNULL,
|
||||
TIME: TIME,
|
||||
'TIME WITH TIME ZONE': TIMETZ,
|
||||
TIMESTAMP: TIMESTAMP,
|
||||
'TIMESTAMP WITH TIME ZONE': TIMESTAMPTZ,
|
||||
TIMESTAMP_S: TIMESTAMP_S,
|
||||
TIMESTAMP_MS: TIMESTAMP_MS,
|
||||
TIMESTAMP_NS: TIMESTAMP_NS,
|
||||
TINYINT: TINYINT,
|
||||
UBIGINT: UBIGINT,
|
||||
UHUGEINT: UHUGEINT,
|
||||
UINTEGER: UINTEGER,
|
||||
USMALLINT: USMALLINT,
|
||||
UTINYINT: UTINYINT,
|
||||
UUID: UUID,
|
||||
VARCHAR: VARCHAR,
|
||||
VARINT: VARINT,
|
||||
WKB_BLOB: WKB_BLOB,
|
||||
};
|
||||
|
||||
function matchStructMapOrUnion(
|
||||
typeString: string,
|
||||
): DuckDBStructType | DuckDBMapType | DuckDBUnionType | undefined {
|
||||
typeString = typeString.trim();
|
||||
|
||||
const fields = parseStructLike(typeString);
|
||||
if (!fields) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (typeString.startsWith('STRUCT')) {
|
||||
const entries: Record<string, DuckDBType> = {};
|
||||
for (const field of fields) {
|
||||
if (field.key && field.type) {
|
||||
entries[field.key] = field.type;
|
||||
}
|
||||
}
|
||||
return STRUCT(entries);
|
||||
}
|
||||
if (typeString.startsWith('MAP')) {
|
||||
const keyType = fields[0]?.type;
|
||||
const valueType = fields[1]?.type;
|
||||
if (keyType && valueType) {
|
||||
return MAP(keyType, valueType);
|
||||
}
|
||||
}
|
||||
if (typeString.startsWith('UNION')) {
|
||||
const members: Record<string, DuckDBType> = {};
|
||||
for (const field of fields) {
|
||||
if (field.key && field.type) {
|
||||
members[field.key] = field.type;
|
||||
}
|
||||
}
|
||||
return UNION(members);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function parseStructLike(typeString: string): ParsedField[] | undefined {
|
||||
const structPattern = /^(STRUCT|MAP|UNION)\s*\((.*)\)$/;
|
||||
const match = structPattern.exec(typeString);
|
||||
if (!match) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const fieldsString = match[2];
|
||||
return parseFields(fieldsString);
|
||||
}
|
||||
|
||||
/** Parse the fields substring. We do this by counting parens and double quotes.
|
||||
* When checking for double-quotes, we only need to count an even number of them
|
||||
* to count brackets, since in cases where there escaped double quotes inside
|
||||
* a double-quoted string, the double quotes appear adjacent to each other,
|
||||
* always incrementing the count by 2 before there could theoretically be another
|
||||
* paren.
|
||||
*/
|
||||
function parseFields(fieldsString: string): ParsedField[] {
|
||||
const fields: ParsedField[] = [];
|
||||
let currentFieldStartIndex: number | null = null;
|
||||
let parenCount = 0;
|
||||
let quoteCount = 0;
|
||||
|
||||
for (let i = 0; i < fieldsString.length; i++) {
|
||||
const char = fieldsString[i];
|
||||
|
||||
if (
|
||||
currentFieldStartIndex === null &&
|
||||
char !== '(' &&
|
||||
char !== ')' &&
|
||||
char !== ','
|
||||
) {
|
||||
currentFieldStartIndex = i;
|
||||
}
|
||||
|
||||
if (char === '"') {
|
||||
quoteCount++;
|
||||
}
|
||||
|
||||
if (
|
||||
char === ',' &&
|
||||
parenCount === 0 &&
|
||||
quoteCount % 2 === 0 &&
|
||||
currentFieldStartIndex !== null
|
||||
) {
|
||||
const field = fieldsString.slice(currentFieldStartIndex, i);
|
||||
fields.push(parseField(field.trim()));
|
||||
currentFieldStartIndex = null;
|
||||
} else {
|
||||
if (char === '(' && quoteCount % 2 === 0) parenCount++;
|
||||
if (char === ')' && quoteCount % 2 === 0) parenCount--;
|
||||
}
|
||||
}
|
||||
|
||||
if (currentFieldStartIndex !== null) {
|
||||
const lastField = fieldsString.slice(currentFieldStartIndex);
|
||||
fields.push(parseField(lastField.trim()));
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
interface ParsedField {
|
||||
key?: string;
|
||||
type?: DuckDBType;
|
||||
}
|
||||
|
||||
function parseField(fieldString: string): ParsedField {
|
||||
const fieldPattern = /^(".*?"|\w+)\s+(.+)$/;
|
||||
const match = fieldPattern.exec(fieldString);
|
||||
if (match) {
|
||||
const key = match[1];
|
||||
const type = parseLogicalTypeString(match[2].trim());
|
||||
return { key, type };
|
||||
} else {
|
||||
const type = parseLogicalTypeString(fieldString);
|
||||
return { type };
|
||||
}
|
||||
}
|
||||
|
||||
function matchDecimal(typeString: string) {
|
||||
const match = typeString.match(/^DECIMAL\((\d+),(\d+)\)$/);
|
||||
if (match) {
|
||||
return DECIMAL(Number(match[1]), Number(match[2]));
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function matchEnum(typeString: string) {
|
||||
const match = /ENUM\(([^)]*)\)/i.exec(typeString);
|
||||
if (match) {
|
||||
const matches = match[1].matchAll(/'((?:[^']|'')*)'/g);
|
||||
const values: string[] = [];
|
||||
for (const match of matches) {
|
||||
values.push(match[1].replace(/''/, `'`));
|
||||
}
|
||||
return ENUM(values);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function matchList(typeString: string) {
|
||||
if (typeString.endsWith('[]')) {
|
||||
const innerType = typeString.slice(0, -2);
|
||||
return LIST(parseLogicalTypeString(innerType));
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function matchArray(typeString: string) {
|
||||
const match = typeString.match(/\[(\d+)\]$/);
|
||||
if (match) {
|
||||
const innerType = typeString.slice(0, -match[0].length);
|
||||
const length = match[1];
|
||||
return ARRAY(parseLogicalTypeString(innerType), Number(length));
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function parseLogicalTypeString(typeString: string): DuckDBType {
|
||||
if (typeString in simpleTypeMap) {
|
||||
return simpleTypeMap[typeString];
|
||||
}
|
||||
|
||||
const listType = matchList(typeString);
|
||||
if (listType) {
|
||||
return listType;
|
||||
}
|
||||
|
||||
const arrayType = matchArray(typeString);
|
||||
if (arrayType) {
|
||||
return arrayType;
|
||||
}
|
||||
|
||||
const decimalType = matchDecimal(typeString);
|
||||
if (decimalType) {
|
||||
return decimalType;
|
||||
}
|
||||
|
||||
const enumType = matchEnum(typeString);
|
||||
if (enumType) {
|
||||
return enumType;
|
||||
}
|
||||
|
||||
const structMapOrUnionType = matchStructMapOrUnion(typeString);
|
||||
if (structMapOrUnionType) {
|
||||
return structMapOrUnionType;
|
||||
}
|
||||
|
||||
throw Error(`unimplemented type match: ${typeString}`);
|
||||
}
|
||||
7
ts/pkgs/duckdb-data-types/src/sql.ts
Normal file
7
ts/pkgs/duckdb-data-types/src/sql.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export function quotedString(input: string): string {
|
||||
return `'${input.replace(`'`, `''`)}'`;
|
||||
}
|
||||
|
||||
export function quotedIdentifier(input: string): string {
|
||||
return `"${input.replace(`"`, `""`)}"`;
|
||||
}
|
||||
6
ts/pkgs/duckdb-data-types/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-types/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.library.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../out"
|
||||
}
|
||||
}
|
||||
1025
ts/pkgs/duckdb-data-types/test/DuckDBType.test.ts
Normal file
1025
ts/pkgs/duckdb-data-types/test/DuckDBType.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
326
ts/pkgs/duckdb-data-types/test/parseLogicalTypeString.test.ts
Normal file
326
ts/pkgs/duckdb-data-types/test/parseLogicalTypeString.test.ts
Normal file
@@ -0,0 +1,326 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import {
|
||||
ARRAY,
|
||||
BIGINT,
|
||||
BIT,
|
||||
BLOB,
|
||||
BOOLEAN,
|
||||
DATE,
|
||||
DECIMAL,
|
||||
DOUBLE,
|
||||
ENUM,
|
||||
FLOAT,
|
||||
HUGEINT,
|
||||
INTEGER,
|
||||
INTERVAL,
|
||||
LIST,
|
||||
MAP,
|
||||
SMALLINT,
|
||||
STRUCT,
|
||||
TIME,
|
||||
TIMESTAMP,
|
||||
TIMESTAMP_MS,
|
||||
TIMESTAMP_NS,
|
||||
TIMESTAMP_S,
|
||||
TIMESTAMPTZ,
|
||||
TIMETZ,
|
||||
TINYINT,
|
||||
UBIGINT,
|
||||
UHUGEINT,
|
||||
UINTEGER,
|
||||
UNION,
|
||||
USMALLINT,
|
||||
UTINYINT,
|
||||
UUID,
|
||||
VARCHAR,
|
||||
VARINT,
|
||||
} from '../src/DuckDBType';
|
||||
import {
|
||||
BOX_2D,
|
||||
BOX_2DF,
|
||||
GEOMETRY,
|
||||
INET,
|
||||
JSONType,
|
||||
LINESTRING_2D,
|
||||
POINT_2D,
|
||||
POINT_3D,
|
||||
POINT_4D,
|
||||
POLYGON_2D,
|
||||
WKB_BLOB,
|
||||
} from '../src/extensionTypes';
|
||||
import { parseLogicalTypeString } from '../src/parseLogicalTypeString';
|
||||
|
||||
suite('parseLogicalTypeString', () => {
|
||||
test('BOOLEAN', () => {
|
||||
expect(parseLogicalTypeString('BOOLEAN')).toStrictEqual(BOOLEAN);
|
||||
});
|
||||
test('TINYINT', () => {
|
||||
expect(parseLogicalTypeString('TINYINT')).toStrictEqual(TINYINT);
|
||||
});
|
||||
test('GEOMETRY', () => {
|
||||
expect(parseLogicalTypeString('GEOMETRY')).toStrictEqual(GEOMETRY);
|
||||
});
|
||||
test('LINESTRING_2D', () => {
|
||||
expect(parseLogicalTypeString('LINESTRING_2D')).toStrictEqual(
|
||||
LINESTRING_2D,
|
||||
);
|
||||
});
|
||||
test('BOX_2D', () => {
|
||||
expect(parseLogicalTypeString('BOX_2D')).toStrictEqual(BOX_2D);
|
||||
});
|
||||
test('BOX_2DF', () => {
|
||||
expect(parseLogicalTypeString('BOX_2DF')).toStrictEqual(BOX_2DF);
|
||||
});
|
||||
test('POINT_2D', () => {
|
||||
expect(parseLogicalTypeString('POINT_2D')).toStrictEqual(POINT_2D);
|
||||
});
|
||||
test('POINT_3D', () => {
|
||||
expect(parseLogicalTypeString('POINT_3D')).toStrictEqual(POINT_3D);
|
||||
});
|
||||
test('POINT_4D', () => {
|
||||
expect(parseLogicalTypeString('POINT_4D')).toStrictEqual(POINT_4D);
|
||||
});
|
||||
test('POLYGON_2D', () => {
|
||||
expect(parseLogicalTypeString('POLYGON_2D')).toStrictEqual(POLYGON_2D);
|
||||
});
|
||||
test('INET', () => {
|
||||
expect(parseLogicalTypeString('INET')).toStrictEqual(INET);
|
||||
});
|
||||
test('JSON', () => {
|
||||
expect(parseLogicalTypeString('JSON')).toStrictEqual(JSONType);
|
||||
});
|
||||
test('WKB_BLOB', () => {
|
||||
expect(parseLogicalTypeString('WKB_BLOB')).toStrictEqual(WKB_BLOB);
|
||||
});
|
||||
test('SMALLINT', () => {
|
||||
expect(parseLogicalTypeString('SMALLINT')).toStrictEqual(SMALLINT);
|
||||
});
|
||||
test('INTEGER', () => {
|
||||
expect(parseLogicalTypeString('INTEGER')).toStrictEqual(INTEGER);
|
||||
});
|
||||
test('BIGINT', () => {
|
||||
expect(parseLogicalTypeString('BIGINT')).toStrictEqual(BIGINT);
|
||||
});
|
||||
test('HUGEINT', () => {
|
||||
expect(parseLogicalTypeString('HUGEINT')).toStrictEqual(HUGEINT);
|
||||
});
|
||||
test('UTINYINT', () => {
|
||||
expect(parseLogicalTypeString('UTINYINT')).toStrictEqual(UTINYINT);
|
||||
});
|
||||
test('UHUGEINT', () => {
|
||||
expect(parseLogicalTypeString('UHUGEINT')).toStrictEqual(UHUGEINT);
|
||||
});
|
||||
test('USMALLINT', () => {
|
||||
expect(parseLogicalTypeString('USMALLINT')).toStrictEqual(USMALLINT);
|
||||
});
|
||||
test('UINTEGER', () => {
|
||||
expect(parseLogicalTypeString('UINTEGER')).toStrictEqual(UINTEGER);
|
||||
});
|
||||
test('UBIGINT', () => {
|
||||
expect(parseLogicalTypeString('UBIGINT')).toStrictEqual(UBIGINT);
|
||||
});
|
||||
test('DATE', () => {
|
||||
expect(parseLogicalTypeString('DATE')).toStrictEqual(DATE);
|
||||
});
|
||||
test('TIME', () => {
|
||||
expect(parseLogicalTypeString('TIME')).toStrictEqual(TIME);
|
||||
});
|
||||
test('TIMESTAMP', () => {
|
||||
expect(parseLogicalTypeString('TIMESTAMP')).toStrictEqual(TIMESTAMP);
|
||||
});
|
||||
test('TIMESTAMP_S', () => {
|
||||
expect(parseLogicalTypeString('TIMESTAMP_S')).toStrictEqual(TIMESTAMP_S);
|
||||
});
|
||||
test('TIMESTAMP_MS', () => {
|
||||
expect(parseLogicalTypeString('TIMESTAMP_MS')).toStrictEqual(TIMESTAMP_MS);
|
||||
});
|
||||
test('TIMESTAMP_NS', () => {
|
||||
expect(parseLogicalTypeString('TIMESTAMP_NS')).toStrictEqual(TIMESTAMP_NS);
|
||||
});
|
||||
test('TIME WITH TIME ZONE', () => {
|
||||
expect(parseLogicalTypeString('TIME WITH TIME ZONE')).toStrictEqual(TIMETZ);
|
||||
});
|
||||
test('TIMESTAMP WITH TIME ZONE', () => {
|
||||
expect(parseLogicalTypeString('TIMESTAMP WITH TIME ZONE')).toStrictEqual(
|
||||
TIMESTAMPTZ,
|
||||
);
|
||||
});
|
||||
test('FLOAT', () => {
|
||||
expect(parseLogicalTypeString('FLOAT')).toStrictEqual(FLOAT);
|
||||
});
|
||||
test('DOUBLE', () => {
|
||||
expect(parseLogicalTypeString('DOUBLE')).toStrictEqual(DOUBLE);
|
||||
});
|
||||
|
||||
test('DECIMAL(18,6)', () => {
|
||||
expect(parseLogicalTypeString('DECIMAL(18,6)')).toStrictEqual(
|
||||
DECIMAL(18, 6),
|
||||
);
|
||||
});
|
||||
|
||||
test(`ENUM('DUCK_DUCK_ENUM', 'GOOSE')`, () => {
|
||||
expect(
|
||||
parseLogicalTypeString(`ENUM('DUCK_DUCK_ENUM', 'GOOSE')`),
|
||||
).toStrictEqual(ENUM(['DUCK_DUCK_ENUM', 'GOOSE']));
|
||||
});
|
||||
|
||||
test('DOUBLE[]', () => {
|
||||
expect(parseLogicalTypeString('DOUBLE[]')).toStrictEqual(LIST(DOUBLE));
|
||||
});
|
||||
|
||||
test('STRUCT(a INTEGER, b VARCHAR)', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)'),
|
||||
).toStrictEqual(
|
||||
STRUCT({
|
||||
a: INTEGER,
|
||||
b: VARCHAR,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('STRUCT(a INTEGER[], b VARCHAR[])', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('STRUCT(a INTEGER[], b VARCHAR[])'),
|
||||
).toStrictEqual(
|
||||
STRUCT({
|
||||
a: LIST(INTEGER),
|
||||
b: LIST(VARCHAR),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('STRUCT(a INTEGER, b VARCHAR)[]', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)[]'),
|
||||
).toStrictEqual(
|
||||
LIST(
|
||||
STRUCT({
|
||||
a: INTEGER,
|
||||
b: VARCHAR,
|
||||
}),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
// addition: nested struct
|
||||
test('STRUCT(a STRUCT(b INTEGER), b VARCHAR)', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('STRUCT(a STRUCT(b INTEGER), b VARCHAR)'),
|
||||
).toStrictEqual(
|
||||
STRUCT({
|
||||
a: STRUCT({ b: INTEGER }),
|
||||
b: VARCHAR,
|
||||
}),
|
||||
);
|
||||
});
|
||||
test('STRUCT("my weird ""key" INTEGER, b VARCHAR)', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('STRUCT("my weird ""key" INTEGER, b VARCHAR)'),
|
||||
).toStrictEqual(
|
||||
STRUCT({
|
||||
'"my weird ""key"': INTEGER,
|
||||
b: VARCHAR,
|
||||
}),
|
||||
);
|
||||
});
|
||||
test('STRUCT("my weird ""key" STRUCT("my other ""weird key" INTEGER), b VARCHAR)', () => {
|
||||
expect(
|
||||
parseLogicalTypeString(
|
||||
'STRUCT("my weird ""key" STRUCT("my other ""weird key" INTEGER), b VARCHAR)',
|
||||
),
|
||||
).toStrictEqual(
|
||||
STRUCT({
|
||||
'"my weird ""key"': STRUCT({
|
||||
'"my other ""weird key"': INTEGER,
|
||||
}),
|
||||
b: VARCHAR,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('MAP(INTEGER, VARCHAR)', () => {
|
||||
expect(parseLogicalTypeString('MAP(INTEGER, VARCHAR)')).toStrictEqual(
|
||||
MAP(INTEGER, VARCHAR),
|
||||
);
|
||||
});
|
||||
|
||||
test('MAP(VARCHAR, STRUCT(b INTEGER))', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('MAP(VARCHAR, STRUCT(b INTEGER))'),
|
||||
).toStrictEqual(MAP(VARCHAR, STRUCT({ b: INTEGER })));
|
||||
});
|
||||
|
||||
test('UNION("name" VARCHAR, age SMALLINT)', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('UNION("name" VARCHAR, age SMALLINT)'),
|
||||
).toStrictEqual(
|
||||
UNION({
|
||||
'"name"': VARCHAR,
|
||||
age: SMALLINT,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('INTEGER[3]', () => {
|
||||
expect(parseLogicalTypeString('INTEGER[3]')).toStrictEqual(
|
||||
ARRAY(INTEGER, 3),
|
||||
);
|
||||
});
|
||||
|
||||
test('STRUCT(a INTEGER, b VARCHAR)[3]', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)[3]'),
|
||||
).toStrictEqual(
|
||||
ARRAY(
|
||||
STRUCT({
|
||||
a: INTEGER,
|
||||
b: VARCHAR,
|
||||
}),
|
||||
3,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('STRUCT(a INTEGER[3], b VARCHAR[3])', () => {
|
||||
expect(
|
||||
parseLogicalTypeString('STRUCT(a INTEGER[3], b VARCHAR[3])'),
|
||||
).toStrictEqual(
|
||||
STRUCT({
|
||||
a: ARRAY(INTEGER, 3),
|
||||
b: ARRAY(VARCHAR, 3),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('INTEGER[][3]', () => {
|
||||
expect(parseLogicalTypeString('INTEGER[][3]')).toStrictEqual(
|
||||
ARRAY(LIST(INTEGER), 3),
|
||||
);
|
||||
});
|
||||
|
||||
test('INTEGER[3][]', () => {
|
||||
expect(parseLogicalTypeString('INTEGER[3][]')).toStrictEqual(
|
||||
LIST(ARRAY(INTEGER, 3)),
|
||||
);
|
||||
});
|
||||
|
||||
test('UUID', () => {
|
||||
expect(parseLogicalTypeString('UUID')).toStrictEqual(UUID);
|
||||
});
|
||||
test('INTERVAL', () => {
|
||||
expect(parseLogicalTypeString('INTERVAL')).toStrictEqual(INTERVAL);
|
||||
});
|
||||
test('VARCHAR', () => {
|
||||
expect(parseLogicalTypeString('VARCHAR')).toStrictEqual(VARCHAR);
|
||||
});
|
||||
test('VARINT', () => {
|
||||
expect(parseLogicalTypeString('VARINT')).toStrictEqual(VARINT);
|
||||
});
|
||||
test('BLOB', () => {
|
||||
expect(parseLogicalTypeString('BLOB')).toStrictEqual(BLOB);
|
||||
});
|
||||
test('BIT', () => {
|
||||
expect(parseLogicalTypeString('BIT')).toStrictEqual(BIT);
|
||||
});
|
||||
});
|
||||
6
ts/pkgs/duckdb-data-types/test/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-types/test/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.test.json",
|
||||
"references": [
|
||||
{ "path": "../src" }
|
||||
]
|
||||
}
|
||||
34
ts/pkgs/duckdb-data-values/package.json
Normal file
34
ts/pkgs/duckdb-data-values/package.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "@duckdb/data-values",
|
||||
"version": "0.0.1",
|
||||
"description": "Utilities for representing DuckDB values",
|
||||
"type": "module",
|
||||
"main": "./out/index.js",
|
||||
"module": "./out/index.js",
|
||||
"types": "./out/index.d.ts",
|
||||
"scripts": {
|
||||
"preinstall": "pnpm build:src",
|
||||
"build": "tsc -b src test",
|
||||
"build:src": "tsc -b src",
|
||||
"build:test": "tsc -b test",
|
||||
"build:watch": "tsc -b src test --watch",
|
||||
"check": "pnpm format:check && pnpm lint",
|
||||
"clean": "rimraf out",
|
||||
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||
"lint": "pnpm eslint src test",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.24.0",
|
||||
"eslint": "^9.24.0",
|
||||
"find-up-cli": "^6.0.0",
|
||||
"prettier": "^3.5.3",
|
||||
"rimraf": "^6.0.1",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.30.1",
|
||||
"vite": "^6.2.6",
|
||||
"vitest": "^3.1.1"
|
||||
}
|
||||
}
|
||||
23
ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts
Normal file
23
ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBArrayValue extends SpecialDuckDBValue {
|
||||
public readonly values: readonly DuckDBValue[];
|
||||
|
||||
constructor(values: readonly DuckDBValue[]) {
|
||||
super();
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const valueStrings = this.values.map(displayStringForDuckDBValue);
|
||||
return `[${valueStrings.join(', ')}]`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.values.map(jsonFromDuckDBValue);
|
||||
}
|
||||
}
|
||||
123
ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts
Normal file
123
ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBBitValue extends SpecialDuckDBValue {
|
||||
public readonly data: Uint8Array;
|
||||
|
||||
constructor(data: Uint8Array) {
|
||||
super();
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public padding(): number {
|
||||
return this.data[0];
|
||||
}
|
||||
|
||||
public get length(): number {
|
||||
return (this.data.length - 1) * 8 - this.padding();
|
||||
}
|
||||
|
||||
public getBool(index: number): boolean {
|
||||
const offset = index + this.padding();
|
||||
const dataIndex = Math.floor(offset / 8) + 1;
|
||||
const byte = this.data[dataIndex] >> (7 - (offset % 8));
|
||||
return (byte & 1) !== 0;
|
||||
}
|
||||
|
||||
public toBools(): boolean[] {
|
||||
const bools: boolean[] = [];
|
||||
const length = this.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
bools.push(this.getBool(i));
|
||||
}
|
||||
return bools;
|
||||
}
|
||||
|
||||
public getBit(index: number): 0 | 1 {
|
||||
return this.getBool(index) ? 1 : 0;
|
||||
}
|
||||
|
||||
public toBits(): number[] {
|
||||
const bits: number[] = [];
|
||||
const length = this.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
bits.push(this.getBit(i));
|
||||
}
|
||||
return bits;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const length = this.length;
|
||||
const chars = Array.from<string>({ length });
|
||||
for (let i = 0; i < length; i++) {
|
||||
chars[i] = this.getBool(i) ? '1' : '0';
|
||||
}
|
||||
return chars.join('');
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
public static fromString(str: string, on: string = '1'): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(
|
||||
str.length,
|
||||
(i) => str[i] === on,
|
||||
);
|
||||
}
|
||||
|
||||
public static fromBits(
|
||||
bits: readonly number[],
|
||||
on: number = 1,
|
||||
): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(
|
||||
bits.length,
|
||||
(i) => bits[i] === on,
|
||||
);
|
||||
}
|
||||
|
||||
public static fromBools(bools: readonly boolean[]): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(bools.length, (i) => bools[i]);
|
||||
}
|
||||
|
||||
public static fromLengthAndPredicate(
|
||||
length: number,
|
||||
predicate: (index: number) => boolean,
|
||||
): DuckDBBitValue {
|
||||
const byteCount = Math.ceil(length / 8) + 1;
|
||||
const paddingBitCount = (8 - (length % 8)) % 8;
|
||||
|
||||
const data = new Uint8Array(byteCount);
|
||||
let byteIndex = 0;
|
||||
|
||||
// first byte contains count of padding bits
|
||||
data[byteIndex++] = paddingBitCount;
|
||||
|
||||
let byte = 0;
|
||||
let byteBit = 0;
|
||||
|
||||
// padding consists of 1s in MSB of second byte
|
||||
while (byteBit < paddingBitCount) {
|
||||
byte <<= 1;
|
||||
byte |= 1;
|
||||
byteBit++;
|
||||
}
|
||||
|
||||
let bitIndex = 0;
|
||||
|
||||
while (byteIndex < byteCount) {
|
||||
while (byteBit < 8) {
|
||||
byte <<= 1;
|
||||
if (predicate(bitIndex++)) {
|
||||
byte |= 1;
|
||||
}
|
||||
byteBit++;
|
||||
}
|
||||
data[byteIndex++] = byte;
|
||||
byte = 0;
|
||||
byteBit = 0;
|
||||
}
|
||||
|
||||
return new DuckDBBitValue(data);
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { stringFromBlob } from './conversion/stringFromBlob.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBBlobValue extends SpecialDuckDBValue {
|
||||
public readonly bytes: Uint8Array;
|
||||
|
||||
constructor(bytes: Uint8Array) {
|
||||
super();
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return stringFromBlob(this.bytes);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBDateStringFromDays } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBDateValue extends SpecialDuckDBValue {
|
||||
public readonly days: number;
|
||||
|
||||
constructor(days: number) {
|
||||
super();
|
||||
this.days = days;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBDateStringFromDays(this.days);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
38
ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts
Normal file
38
ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import {
|
||||
DuckDBDecimalFormatOptions,
|
||||
stringFromDecimal,
|
||||
} from './conversion/stringFromDecimal.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBDecimalValue extends SpecialDuckDBValue {
|
||||
public readonly scaledValue: bigint;
|
||||
|
||||
public readonly scale: number;
|
||||
|
||||
constructor(scaledValue: bigint, scale: number) {
|
||||
super();
|
||||
this.scaledValue = scaledValue;
|
||||
this.scale = scale;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return stringFromDecimal(this.scaledValue, this.scale);
|
||||
}
|
||||
|
||||
/** Returns a string representation appropriate to the host environment's current locale. */
|
||||
|
||||
public toLocaleString(
|
||||
locales?: string | string[],
|
||||
options?: DuckDBDecimalFormatOptions,
|
||||
): string {
|
||||
return stringFromDecimal(this.scaledValue, this.scale, {
|
||||
locales,
|
||||
options,
|
||||
});
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
26
ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts
Normal file
26
ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { getDuckDBIntervalString } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBIntervalValue extends SpecialDuckDBValue {
|
||||
public readonly months: number;
|
||||
|
||||
public readonly days: number;
|
||||
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(months: number, days: number, microseconds: bigint) {
|
||||
super();
|
||||
this.months = months;
|
||||
this.days = days;
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBIntervalString(this.months, this.days, this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
23
ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts
Normal file
23
ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBListValue extends SpecialDuckDBValue {
|
||||
public readonly values: readonly DuckDBValue[];
|
||||
|
||||
constructor(values: readonly DuckDBValue[]) {
|
||||
super();
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const valueStrings = this.values.map(displayStringForDuckDBValue);
|
||||
return `[${valueStrings.join(', ')}]`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.values.map(jsonFromDuckDBValue);
|
||||
}
|
||||
}
|
||||
6
ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts
Normal file
6
ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
|
||||
export interface DuckDBMapEntry {
|
||||
readonly key: DuckDBValue;
|
||||
readonly value: DuckDBValue;
|
||||
}
|
||||
33
ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts
Normal file
33
ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBMapEntry } from './DuckDBMapEntry.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBMapValue extends SpecialDuckDBValue {
|
||||
public readonly entries: readonly DuckDBMapEntry[];
|
||||
|
||||
constructor(entries: readonly DuckDBMapEntry[]) {
|
||||
super();
|
||||
this.entries = entries;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const entryStrings = this.entries.map(
|
||||
({ key, value }) =>
|
||||
`${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(
|
||||
value,
|
||||
)}`,
|
||||
);
|
||||
return `{${entryStrings.join(', ')}}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
const result: Json = {};
|
||||
for (const { key, value } of this.entries) {
|
||||
const keyString = displayStringForDuckDBValue(key);
|
||||
result[keyString] = jsonFromDuckDBValue(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
6
ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts
Normal file
6
ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
|
||||
export interface DuckDBStructEntry {
|
||||
readonly key: string;
|
||||
readonly value: DuckDBValue;
|
||||
}
|
||||
33
ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts
Normal file
33
ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBStructEntry } from './DuckDBStructEntry.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBStructValue extends SpecialDuckDBValue {
|
||||
public readonly entries: readonly DuckDBStructEntry[];
|
||||
|
||||
constructor(entries: readonly DuckDBStructEntry[]) {
|
||||
super();
|
||||
this.entries = entries;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const entryStrings = this.entries.map(
|
||||
({ key, value }) =>
|
||||
`${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(
|
||||
value,
|
||||
)}`,
|
||||
);
|
||||
return `{${entryStrings.join(', ')}}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
const result: Json = {};
|
||||
for (const { key, value } of this.entries) {
|
||||
const keyString = displayStringForDuckDBValue(key);
|
||||
result[keyString] = jsonFromDuckDBValue(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
42
ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts
Normal file
42
ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import {
|
||||
getDuckDBTimeStringFromMicrosecondsInDay,
|
||||
getOffsetStringFromSeconds,
|
||||
} from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimeTZValue extends SpecialDuckDBValue {
|
||||
public readonly micros: bigint;
|
||||
public readonly offset: number;
|
||||
|
||||
constructor(micros: bigint, offset: number) {
|
||||
super();
|
||||
this.micros = micros;
|
||||
this.offset = offset;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return `${getDuckDBTimeStringFromMicrosecondsInDay(
|
||||
this.micros,
|
||||
)}${getOffsetStringFromSeconds(this.offset)}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
private static TimeBits = 40;
|
||||
private static OffsetBits = 24;
|
||||
private static MaxOffset = 16 * 60 * 60 - 1; // ±15:59:59 = 57599 seconds
|
||||
|
||||
public static fromBits(bits: bigint): DuckDBTimeTZValue {
|
||||
const micros = BigInt.asUintN(
|
||||
DuckDBTimeTZValue.TimeBits,
|
||||
bits >> BigInt(DuckDBTimeTZValue.OffsetBits),
|
||||
);
|
||||
const offset =
|
||||
DuckDBTimeTZValue.MaxOffset -
|
||||
Number(BigInt.asUintN(DuckDBTimeTZValue.OffsetBits, bits));
|
||||
return new DuckDBTimeTZValue(micros, offset);
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimeStringFromMicrosecondsInDay } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimeValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimeStringFromMicrosecondsInDay(this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampMicrosecondsValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
|
||||
export type DuckDBTimestamp = DuckDBTimestampMicrosecondsValue;
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromMilliseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampMillisecondsValue extends SpecialDuckDBValue {
|
||||
public readonly milliseconds: bigint;
|
||||
|
||||
constructor(milliseconds: bigint) {
|
||||
super();
|
||||
this.milliseconds = milliseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromMilliseconds(this.milliseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromNanoseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampNanosecondsValue extends SpecialDuckDBValue {
|
||||
public readonly nanoseconds: bigint;
|
||||
|
||||
constructor(nanoseconds: bigint) {
|
||||
super();
|
||||
this.nanoseconds = nanoseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromNanoseconds(this.nanoseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromSeconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampSecondsValue extends SpecialDuckDBValue {
|
||||
public readonly seconds: bigint;
|
||||
|
||||
constructor(seconds: bigint) {
|
||||
super();
|
||||
this.seconds = seconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromSeconds(this.seconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
24
ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts
Normal file
24
ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampTZValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(toStringOptions?: DuckDBToStringOptions): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
this.microseconds,
|
||||
toStringOptions?.timezoneOffsetInMinutes || 0,
|
||||
);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
3
ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts
Normal file
3
ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export interface DuckDBToStringOptions {
|
||||
timezoneOffsetInMinutes?: number;
|
||||
}
|
||||
48
ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts
Normal file
48
ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { hexFromBlob } from './conversion/hexFromBlob.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBUUIDValue extends SpecialDuckDBValue {
|
||||
public readonly bytes: Uint8Array;
|
||||
|
||||
constructor(bytes: Uint8Array) {
|
||||
super();
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
if (this.bytes.length !== 16) {
|
||||
throw new Error('Invalid UUID bytes length');
|
||||
}
|
||||
|
||||
// Insert dashes to format the UUID
|
||||
return `${hexFromBlob(this.bytes, 0, 4)}-${hexFromBlob(this.bytes, 4, 6)}-${hexFromBlob(this.bytes, 6, 8)}-${hexFromBlob(this.bytes, 8, 10)}-${hexFromBlob(this.bytes, 10, 16)}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a DuckDBUUIDValue value from a HUGEINT as stored by DuckDB.
|
||||
*
|
||||
* UUID values are stored with their MSB flipped so their numeric ordering matches their string ordering.
|
||||
*/
|
||||
public static fromStoredHugeint(hugeint: bigint): DuckDBUUIDValue {
|
||||
// Flip the MSB and truncate to 128 bits to extract the represented unsigned 128-bit value.
|
||||
const uint128 =
|
||||
(hugeint ^ 0x80000000000000000000000000000000n) &
|
||||
0xffffffffffffffffffffffffffffffffn;
|
||||
return DuckDBUUIDValue.fromUint128(uint128);
|
||||
}
|
||||
|
||||
/** Create a DuckDBUUIDValue value from an unsigned 128-bit integer in a JS BigInt. */
|
||||
public static fromUint128(uint128: bigint): DuckDBUUIDValue {
|
||||
const bytes = new Uint8Array(16);
|
||||
const dv = new DataView(bytes.buffer);
|
||||
// Write the unsigned 128-bit integer to the buffer in big endian format.
|
||||
dv.setBigUint64(0, BigInt.asUintN(64, uint128 >> BigInt(64)), false);
|
||||
dv.setBigUint64(8, BigInt.asUintN(64, uint128), false);
|
||||
return new DuckDBUUIDValue(bytes);
|
||||
}
|
||||
}
|
||||
9
ts/pkgs/duckdb-data-values/src/DuckDBValue.ts
Normal file
9
ts/pkgs/duckdb-data-values/src/DuckDBValue.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export type DuckDBValue =
|
||||
| null
|
||||
| boolean
|
||||
| number
|
||||
| string
|
||||
| bigint // TODO: Should types requiring bigint be SpecialDBValues?
|
||||
| SpecialDuckDBValue;
|
||||
7
ts/pkgs/duckdb-data-values/src/Json.ts
Normal file
7
ts/pkgs/duckdb-data-values/src/Json.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export type Json =
|
||||
| null
|
||||
| boolean
|
||||
| number
|
||||
| string
|
||||
| Json[]
|
||||
| { [key: string]: Json };
|
||||
15
ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts
Normal file
15
ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
import { Json } from './Json.js';
|
||||
|
||||
export abstract class SpecialDuckDBValue {
|
||||
// The presence of this function can be used to identify SpecialDuckDBValue objects.
|
||||
public abstract toDuckDBString(
|
||||
toStringOptions?: DuckDBToStringOptions,
|
||||
): string;
|
||||
|
||||
public toString(): string {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
public abstract toJson(): Json;
|
||||
}
|
||||
@@ -0,0 +1,264 @@
|
||||
const DAYS_IN_400_YEARS = 146097; // (((365 * 4 + 1) * 25) - 1) * 4 + 1
|
||||
const MILLISECONDS_PER_DAY_NUM = 86400000; // 1000 * 60 * 60 * 24
|
||||
|
||||
const MICROSECONDS_PER_SECOND = BigInt(1000000);
|
||||
const MICROSECONDS_PER_MILLISECOND = BigInt(1000);
|
||||
const NANOSECONDS_PER_MICROSECOND = BigInt(1000);
|
||||
const SECONDS_PER_MINUTE = BigInt(60);
|
||||
const MINUTES_PER_HOUR = BigInt(60);
|
||||
const MICROSECONDS_PER_DAY = BigInt(86400000000); // 24 * 60 * 60 * 1000000
|
||||
|
||||
const NEGATIVE_INFINITY_TIMESTAMP = BigInt('-9223372036854775807'); // -(2^63-1)
|
||||
const POSITIVE_INFINITY_TIMESTAMP = BigInt('9223372036854775807'); // 2^63-1
|
||||
|
||||
export function getDuckDBDateStringFromYearMonthDay(
|
||||
year: number,
|
||||
month: number,
|
||||
dayOfMonth: number,
|
||||
): string {
|
||||
const yearStr = String(Math.abs(year)).padStart(4, '0');
|
||||
const monthStr = String(month).padStart(2, '0');
|
||||
const dayOfMonthStr = String(dayOfMonth).padStart(2, '0');
|
||||
return `${yearStr}-${monthStr}-${dayOfMonthStr}${year < 0 ? ' (BC)' : ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBDateStringFromDays(days: number): string {
|
||||
const absDays = Math.abs(days);
|
||||
const sign = days < 0 ? -1 : 1;
|
||||
// 400 years is the shortest interval with a fixed number of days. (Leap years and different length months can result
|
||||
// in shorter intervals having different number of days.) By separating the number of 400 year intervals from the
|
||||
// interval covered by the remaining days, we can guarantee that the date resulting from shifting the epoch by the
|
||||
// remaining interval is within the valid range of the JS Date object. This allows us to use JS Date to calculate the
|
||||
// year, month, and day of month for the date represented by the remaining interval, thus accounting for leap years
|
||||
// and different length months. We can then safely add back the years from the 400 year intervals, because the month
|
||||
// and day of month won't change when a date is shifted by a whole number of such intervals.
|
||||
const num400YearIntervals = Math.floor(absDays / DAYS_IN_400_YEARS);
|
||||
const yearsFrom400YearIntervals = sign * num400YearIntervals * 400;
|
||||
const absDaysFromRemainingInterval = absDays % DAYS_IN_400_YEARS;
|
||||
const millisecondsFromRemainingInterval =
|
||||
sign * absDaysFromRemainingInterval * MILLISECONDS_PER_DAY_NUM;
|
||||
const date = new Date(millisecondsFromRemainingInterval);
|
||||
let year = yearsFrom400YearIntervals + date.getUTCFullYear();
|
||||
if (year < 0) {
|
||||
year--; // correct for non-existence of year zero
|
||||
}
|
||||
const month = date.getUTCMonth() + 1; // getUTCMonth returns zero-indexed month, but we want a one-index month for display
|
||||
const dayOfMonth = date.getUTCDate(); // getUTCDate returns one-indexed day-of-month
|
||||
return getDuckDBDateStringFromYearMonthDay(year, month, dayOfMonth);
|
||||
}
|
||||
|
||||
export function getTimezoneOffsetString(
|
||||
timezoneOffsetInMinutes?: number,
|
||||
): string | undefined {
|
||||
if (timezoneOffsetInMinutes === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const negative = timezoneOffsetInMinutes < 0;
|
||||
const positiveMinutes = Math.abs(timezoneOffsetInMinutes);
|
||||
const minutesPart = positiveMinutes % 60;
|
||||
const hoursPart = Math.floor(positiveMinutes / 60);
|
||||
const minutesStr =
|
||||
minutesPart !== 0 ? String(minutesPart).padStart(2, '0') : '';
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
return `${negative ? '-' : '+'}${hoursStr}${minutesStr ? `:${minutesStr}` : ''}`;
|
||||
}
|
||||
|
||||
export function getAbsoluteOffsetStringFromParts(
|
||||
hoursPart: number,
|
||||
minutesPart: number,
|
||||
secondsPart: number,
|
||||
): string {
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
const minutesStr =
|
||||
minutesPart !== 0 || secondsPart !== 0
|
||||
? String(minutesPart).padStart(2, '0')
|
||||
: '';
|
||||
const secondsStr =
|
||||
secondsPart !== 0 ? String(secondsPart).padStart(2, '0') : '';
|
||||
let result = hoursStr;
|
||||
if (minutesStr) {
|
||||
result += `:${minutesStr}`;
|
||||
if (secondsStr) {
|
||||
result += `:${secondsStr}`;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function getOffsetStringFromAbsoluteSeconds(
|
||||
absoluteOffsetInSeconds: number,
|
||||
): string {
|
||||
const secondsPart = absoluteOffsetInSeconds % 60;
|
||||
const minutes = Math.floor(absoluteOffsetInSeconds / 60);
|
||||
const minutesPart = minutes % 60;
|
||||
const hoursPart = Math.floor(minutes / 60);
|
||||
return getAbsoluteOffsetStringFromParts(hoursPart, minutesPart, secondsPart);
|
||||
}
|
||||
|
||||
export function getOffsetStringFromSeconds(offsetInSeconds: number): string {
|
||||
const negative = offsetInSeconds < 0;
|
||||
const absoluteOffsetInSeconds = negative ? -offsetInSeconds : offsetInSeconds;
|
||||
const absoluteString = getOffsetStringFromAbsoluteSeconds(
|
||||
absoluteOffsetInSeconds,
|
||||
);
|
||||
return `${negative ? '-' : '+'}${absoluteString}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromParts(
|
||||
hoursPart: bigint,
|
||||
minutesPart: bigint,
|
||||
secondsPart: bigint,
|
||||
microsecondsPart: bigint,
|
||||
): string {
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
const minutesStr = String(minutesPart).padStart(2, '0');
|
||||
const secondsStr = String(secondsPart).padStart(2, '0');
|
||||
const microsecondsStr = String(microsecondsPart)
|
||||
.padStart(6, '0')
|
||||
.replace(/0+$/, '');
|
||||
return `${hoursStr}:${minutesStr}:${secondsStr}${
|
||||
microsecondsStr.length > 0 ? `.${microsecondsStr}` : ''
|
||||
}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromPositiveMicroseconds(
|
||||
positiveMicroseconds: bigint,
|
||||
): string {
|
||||
const microsecondsPart = positiveMicroseconds % MICROSECONDS_PER_SECOND;
|
||||
const seconds = positiveMicroseconds / MICROSECONDS_PER_SECOND;
|
||||
const secondsPart = seconds % SECONDS_PER_MINUTE;
|
||||
const minutes = seconds / SECONDS_PER_MINUTE;
|
||||
const minutesPart = minutes % MINUTES_PER_HOUR;
|
||||
const hoursPart = minutes / MINUTES_PER_HOUR;
|
||||
return getDuckDBTimeStringFromParts(
|
||||
hoursPart,
|
||||
minutesPart,
|
||||
secondsPart,
|
||||
microsecondsPart,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromMicrosecondsInDay(
|
||||
microsecondsInDay: bigint,
|
||||
): string {
|
||||
const positiveMicroseconds =
|
||||
microsecondsInDay < 0
|
||||
? microsecondsInDay + MICROSECONDS_PER_DAY
|
||||
: microsecondsInDay;
|
||||
return getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds);
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromMicroseconds(
|
||||
microseconds: bigint,
|
||||
): string {
|
||||
const negative = microseconds < 0;
|
||||
const positiveMicroseconds = negative ? -microseconds : microseconds;
|
||||
const positiveString =
|
||||
getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds);
|
||||
return negative ? `-${positiveString}` : positiveString;
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromDaysAndMicroseconds(
|
||||
days: bigint,
|
||||
microsecondsInDay: bigint,
|
||||
timezonePart?: string,
|
||||
): string {
|
||||
// This conversion of BigInt to Number is safe, because the largest absolute value that `days` can has is 106751991,
|
||||
// which fits without loss of precision in a JS Number. (106751991 = (2^63-1) / MICROSECONDS_PER_DAY)
|
||||
const dateStr = getDuckDBDateStringFromDays(Number(days));
|
||||
const timeStr = getDuckDBTimeStringFromMicrosecondsInDay(microsecondsInDay);
|
||||
return `${dateStr} ${timeStr}${timezonePart ?? ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromMicroseconds(
|
||||
microseconds: bigint,
|
||||
timezoneOffsetInMinutes?: number,
|
||||
): string {
|
||||
// Note that -infinity and infinity are only representable in TIMESTAMP (and TIMESTAMPTZ), not the other timestamp
|
||||
// variants. This is by-design and matches DuckDB.
|
||||
if (microseconds === NEGATIVE_INFINITY_TIMESTAMP) {
|
||||
return '-infinity';
|
||||
}
|
||||
if (microseconds === POSITIVE_INFINITY_TIMESTAMP) {
|
||||
return 'infinity';
|
||||
}
|
||||
const offsetMicroseconds =
|
||||
timezoneOffsetInMinutes !== undefined
|
||||
? microseconds +
|
||||
BigInt(timezoneOffsetInMinutes) *
|
||||
MICROSECONDS_PER_SECOND *
|
||||
SECONDS_PER_MINUTE
|
||||
: microseconds;
|
||||
let days = offsetMicroseconds / MICROSECONDS_PER_DAY;
|
||||
let microsecondsPart = offsetMicroseconds % MICROSECONDS_PER_DAY;
|
||||
if (microsecondsPart < 0) {
|
||||
days--;
|
||||
microsecondsPart += MICROSECONDS_PER_DAY;
|
||||
}
|
||||
return getDuckDBTimestampStringFromDaysAndMicroseconds(
|
||||
days,
|
||||
microsecondsPart,
|
||||
getTimezoneOffsetString(timezoneOffsetInMinutes),
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromSeconds(seconds: bigint): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
seconds * MICROSECONDS_PER_SECOND,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromMilliseconds(
|
||||
milliseconds: bigint,
|
||||
): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
milliseconds * MICROSECONDS_PER_MILLISECOND,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromNanoseconds(
|
||||
nanoseconds: bigint,
|
||||
): string {
|
||||
// Note that this division causes loss of precision. This matches the behavior of the DuckDB. It's important that this
|
||||
// precision loss happen before the negative correction in getTimestampStringFromMicroseconds, otherwise off-by-one
|
||||
// errors can occur.
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
nanoseconds / NANOSECONDS_PER_MICROSECOND,
|
||||
);
|
||||
}
|
||||
|
||||
// Assumes baseUnit can be pluralized by adding an 's'.
|
||||
function numberAndUnit(value: number, baseUnit: string): string {
|
||||
return `${value} ${baseUnit}${value !== 1 ? 's' : ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBIntervalString(
|
||||
months: number,
|
||||
days: number,
|
||||
microseconds: bigint,
|
||||
): string {
|
||||
const parts: string[] = [];
|
||||
if (months !== 0) {
|
||||
const sign = months < 0 ? -1 : 1;
|
||||
const absMonths = Math.abs(months);
|
||||
const absYears = Math.floor(absMonths / 12);
|
||||
const years = sign * absYears;
|
||||
const extraMonths = sign * (absMonths - absYears * 12);
|
||||
if (years !== 0) {
|
||||
parts.push(numberAndUnit(years, 'year'));
|
||||
}
|
||||
if (extraMonths !== 0) {
|
||||
parts.push(numberAndUnit(extraMonths, 'month'));
|
||||
}
|
||||
}
|
||||
if (days !== 0) {
|
||||
parts.push(numberAndUnit(days, 'day'));
|
||||
}
|
||||
if (microseconds !== BigInt(0)) {
|
||||
parts.push(getDuckDBTimeStringFromMicroseconds(microseconds));
|
||||
}
|
||||
if (parts.length > 0) {
|
||||
return parts.join(' ');
|
||||
}
|
||||
return '00:00:00';
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
import { DuckDBValue } from '../DuckDBValue.js';
|
||||
|
||||
export function displayStringForDuckDBValue(value: DuckDBValue): string {
|
||||
if (value == null) {
|
||||
return 'NULL';
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
return `'${value.replace(`'`, `''`)}'`;
|
||||
}
|
||||
return String(value);
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* Returns the JS bigint value represented by the byte array a VARINT in DuckDB's internal format.
|
||||
*
|
||||
* DuckDB stores VARINTs as an array of bytes consisting of a three-byte header followed by a variable number of bytes
|
||||
* (at least one). The header specifies the number of bytes after the header, and whether the number is positive or
|
||||
* negative. The bytes after the header specify the absolute value of the number, in big endian format.
|
||||
*
|
||||
* The sign of the number is determined by the MSB of the header, which is 1 for positive and 0 for negative. Negative
|
||||
* numbers also have all bytes of both the header and value inverted. (For negative numbers, the MSB is 0 after this
|
||||
* inversion. Put another way: the MSB of the header is always 1, but it's inverted for negative numbers.)
|
||||
*/
|
||||
export function getVarIntFromBytes(bytes: Uint8Array): bigint {
|
||||
const firstByte = bytes[0];
|
||||
const positive = (firstByte & 0x80) > 0;
|
||||
const uint64Mask = positive ? 0n : 0xffffffffffffffffn;
|
||||
const uint8Mask = positive ? 0 : 0xff;
|
||||
const dv = new DataView(
|
||||
bytes.buffer,
|
||||
bytes.byteOffset + 3,
|
||||
bytes.byteLength - 3,
|
||||
);
|
||||
const lastUint64Offset = dv.byteLength - 8;
|
||||
let offset = 0;
|
||||
let result = 0n;
|
||||
while (offset <= lastUint64Offset) {
|
||||
result = (result << 64n) | (dv.getBigUint64(offset) ^ uint64Mask);
|
||||
offset += 8;
|
||||
}
|
||||
while (offset < dv.byteLength) {
|
||||
result = (result << 8n) | BigInt(dv.getUint8(offset) ^ uint8Mask);
|
||||
offset += 1;
|
||||
}
|
||||
return positive ? result : -result;
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
export function hexFromBlob(
|
||||
blob: Uint8Array,
|
||||
start: number | undefined,
|
||||
end: number | undefined,
|
||||
): string {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (end === undefined) {
|
||||
end = blob.length;
|
||||
}
|
||||
let hex = '';
|
||||
|
||||
for (let i = start; i < end; i++) {
|
||||
const byte = blob[i];
|
||||
// Ensure each byte is 2 hex characters
|
||||
hex += (byte < 16 ? '0' : '') + byte.toString(16);
|
||||
}
|
||||
return hex;
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
import { DuckDBValue } from '../DuckDBValue.js';
|
||||
import { Json } from '../Json.js';
|
||||
import { SpecialDuckDBValue } from '../SpecialDuckDBValue.js';
|
||||
|
||||
export function jsonFromDuckDBValue(value: DuckDBValue): Json {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
if (typeof value === 'bigint') {
|
||||
return String(value);
|
||||
}
|
||||
if (value instanceof SpecialDuckDBValue) {
|
||||
return value.toJson();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
17
ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts
Normal file
17
ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
/** Matches BLOB-to-VARCHAR conversion behavior of DuckDB. */
|
||||
export function stringFromBlob(bytes: Uint8Array): string {
|
||||
let result = '';
|
||||
for (const byte of bytes) {
|
||||
if (
|
||||
byte <= 0x1f ||
|
||||
byte === 0x22 /* single quote */ ||
|
||||
byte === 0x27 /* double quote */ ||
|
||||
byte >= 0x7f
|
||||
) {
|
||||
result += `\\x${byte.toString(16).toUpperCase().padStart(2, '0')}`;
|
||||
} else {
|
||||
result += String.fromCharCode(byte);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
129
ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts
Normal file
129
ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
/**
|
||||
* Decimal string formatting.
|
||||
*
|
||||
* Supports a subset of the functionality of `BigInt.prototype.toLocaleString` for locale-specific formatting.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Locale formatting options for DuckDBDecimalValue.
|
||||
*
|
||||
* This is a subset of the options available for `BigInt.prototype.toLocaleString`
|
||||
*/
|
||||
export interface DuckDBDecimalFormatOptions {
|
||||
useGrouping?: boolean;
|
||||
minimumFractionDigits?: number;
|
||||
maximumFractionDigits?: number;
|
||||
}
|
||||
|
||||
export interface LocaleOptions {
|
||||
locales?: string | string[];
|
||||
options?: DuckDBDecimalFormatOptions;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get the decimal separator for a given locale.
|
||||
* Somewhat expensive, so use getCachedDecimalSeparator if you need to call this multiple times.
|
||||
*/
|
||||
|
||||
function getDecimalSeparator(locales?: string | string[]): string {
|
||||
const decimalSeparator =
|
||||
new Intl.NumberFormat(locales, { useGrouping: false })
|
||||
.formatToParts(0.1)
|
||||
.find((part) => part.type === 'decimal')?.value ?? '.';
|
||||
return decimalSeparator;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get the decimal separator for a given locale, and cache the result.
|
||||
*/
|
||||
const cachedDecimalSeparators: { [localeKey: string]: string } = {};
|
||||
|
||||
function getCachedDecimalSeparator(locales?: string | string[]): string {
|
||||
const cacheKey = JSON.stringify(locales);
|
||||
if (cacheKey in cachedDecimalSeparators) {
|
||||
return cachedDecimalSeparators[cacheKey];
|
||||
}
|
||||
const decimalSeparator = getDecimalSeparator(locales);
|
||||
cachedDecimalSeparators[cacheKey] = decimalSeparator;
|
||||
return decimalSeparator;
|
||||
}
|
||||
|
||||
// Helper function to format whole part of a decimal value.
|
||||
// Note that we explicitly omit 'minimumFractionDigits' and 'maximumFractionDigits' from the options
|
||||
// passed to toLocaleString, because they are only relevant for the fractional part of the number, and
|
||||
// would result in formatting the whole part as a real number, which we don't want.
|
||||
function formatWholePart(
|
||||
localeOptions: LocaleOptions | undefined,
|
||||
val: bigint,
|
||||
): string {
|
||||
if (localeOptions) {
|
||||
const {
|
||||
minimumFractionDigits: _minFD,
|
||||
maximumFractionDigits: _maxFD,
|
||||
...restOptions
|
||||
} = localeOptions.options ?? {};
|
||||
return val.toLocaleString(localeOptions?.locales, restOptions);
|
||||
}
|
||||
return String(val);
|
||||
}
|
||||
|
||||
// Format the fractional part of a decimal value
|
||||
// Note that we must handle minimumFractionDigits and maximumFractionDigits ourselves, and that
|
||||
// we don't apply `useGrouping` because that only applies to the whole part of the number.
|
||||
function formatFractionalPart(
|
||||
localeOptions: LocaleOptions | undefined,
|
||||
val: bigint,
|
||||
scale: number,
|
||||
): string {
|
||||
const fractionalPartStr = String(val).padStart(scale, '0');
|
||||
if (!localeOptions) {
|
||||
return fractionalPartStr;
|
||||
}
|
||||
const minFracDigits = localeOptions?.options?.minimumFractionDigits ?? 0;
|
||||
const maxFracDigits = localeOptions?.options?.maximumFractionDigits ?? 20;
|
||||
|
||||
return fractionalPartStr.padEnd(minFracDigits, '0').slice(0, maxFracDigits);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a scaled decimal value to a string, possibly using locale-specific formatting.
|
||||
*/
|
||||
export function stringFromDecimal(
|
||||
scaledValue: bigint,
|
||||
scale: number,
|
||||
localeOptions?: LocaleOptions,
|
||||
): string {
|
||||
// Decimal values are represented as integers that have been scaled up by a power of ten. The `scale` property of
|
||||
// the type is the exponent of the scale factor. For a scale greater than zero, we need to separate out the
|
||||
// fractional part by reversing this scaling.
|
||||
if (scale > 0) {
|
||||
const scaleFactor = BigInt(10) ** BigInt(scale);
|
||||
const absScaledValue = scaledValue < 0 ? -scaledValue : scaledValue;
|
||||
|
||||
const prefix = scaledValue < 0 ? '-' : '';
|
||||
|
||||
const wholePartNum = absScaledValue / scaleFactor;
|
||||
const wholePartStr = formatWholePart(localeOptions, wholePartNum);
|
||||
|
||||
const fractionalPartNum = absScaledValue % scaleFactor;
|
||||
const fractionalPartStr = formatFractionalPart(
|
||||
localeOptions,
|
||||
fractionalPartNum,
|
||||
scale,
|
||||
);
|
||||
|
||||
const decimalSeparatorStr = localeOptions
|
||||
? getCachedDecimalSeparator(localeOptions.locales)
|
||||
: '.';
|
||||
|
||||
return `${prefix}${wholePartStr}${decimalSeparatorStr}${fractionalPartStr}`;
|
||||
}
|
||||
// For a scale of zero, there is no fractional part, so a direct string conversion works.
|
||||
if (localeOptions) {
|
||||
return scaledValue.toLocaleString(
|
||||
localeOptions?.locales,
|
||||
localeOptions?.options as BigIntToLocaleStringOptions | undefined,
|
||||
);
|
||||
}
|
||||
return String(scaledValue);
|
||||
}
|
||||
25
ts/pkgs/duckdb-data-values/src/index.ts
Normal file
25
ts/pkgs/duckdb-data-values/src/index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
export { getVarIntFromBytes } from './conversion/getVarIntFromBytes.js';
|
||||
export { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
export { DuckDBArrayValue } from './DuckDBArrayValue.js';
|
||||
export { DuckDBBitValue } from './DuckDBBitValue.js';
|
||||
export { DuckDBBlobValue } from './DuckDBBlobValue.js';
|
||||
export { DuckDBDateValue } from './DuckDBDateValue.js';
|
||||
export { DuckDBDecimalValue } from './DuckDBDecimalValue.js';
|
||||
export { DuckDBIntervalValue } from './DuckDBIntervalValue.js';
|
||||
export { DuckDBListValue } from './DuckDBListValue.js';
|
||||
export { DuckDBMapEntry } from './DuckDBMapEntry.js';
|
||||
export { DuckDBMapValue } from './DuckDBMapValue.js';
|
||||
export { DuckDBStructEntry } from './DuckDBStructEntry.js';
|
||||
export { DuckDBStructValue } from './DuckDBStructValue.js';
|
||||
export { DuckDBTimestampMicrosecondsValue } from './DuckDBTimestampMicrosecondsValue.js';
|
||||
export { DuckDBTimestampMillisecondsValue } from './DuckDBTimestampMillisecondsValue.js';
|
||||
export { DuckDBTimestampNanosecondsValue } from './DuckDBTimestampNanosecondsValue.js';
|
||||
export { DuckDBTimestampSecondsValue } from './DuckDBTimestampSecondsValue.js';
|
||||
export { DuckDBTimestampTZValue } from './DuckDBTimestampTZValue.js';
|
||||
export { DuckDBTimeTZValue } from './DuckDBTimeTZValue.js';
|
||||
export { DuckDBTimeValue } from './DuckDBTimeValue.js';
|
||||
export { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
export { DuckDBUUIDValue } from './DuckDBUUIDValue.js';
|
||||
export { DuckDBValue } from './DuckDBValue.js';
|
||||
export { Json } from './Json.js';
|
||||
export { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
6
ts/pkgs/duckdb-data-values/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-values/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.library.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../out"
|
||||
}
|
||||
}
|
||||
49
ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts
Normal file
49
ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBArrayValue } from '../src/DuckDBArrayValue';
|
||||
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||
|
||||
suite('DuckDBArrayValue', () => {
|
||||
test('should render an empty array to the correct string', () => {
|
||||
expect(new DuckDBArrayValue([]).toString()).toStrictEqual('[]');
|
||||
});
|
||||
test('should render a single element array to the correct string', () => {
|
||||
expect(new DuckDBArrayValue([123]).toString()).toStrictEqual('[123]');
|
||||
});
|
||||
test('should render a multi-element array to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBArrayValue(['abc', null, true, '']).toString(),
|
||||
).toStrictEqual(`['abc', NULL, true, '']`);
|
||||
});
|
||||
test('should render an array with nested arrays to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBArrayValue([
|
||||
new DuckDBArrayValue([]),
|
||||
null,
|
||||
new DuckDBArrayValue([123, null, 'xyz']),
|
||||
]).toString(),
|
||||
).toStrictEqual(`[[], NULL, [123, NULL, 'xyz']]`);
|
||||
});
|
||||
test('toJson array with basic values', () => {
|
||||
expect(new DuckDBArrayValue([123, 'abc', null]).toJson()).toStrictEqual([
|
||||
123,
|
||||
'abc',
|
||||
null,
|
||||
]);
|
||||
});
|
||||
test('toJson array with complex values', () => {
|
||||
expect(
|
||||
new DuckDBArrayValue([
|
||||
new DuckDBMapValue([
|
||||
{ key: 'foo', value: 123 },
|
||||
{ key: 'bar', value: 'abc' },
|
||||
]),
|
||||
new DuckDBArrayValue([123, null, 'xyz']),
|
||||
null,
|
||||
]).toJson(),
|
||||
).toStrictEqual([
|
||||
{ "'foo'": 123, "'bar'": 'abc' },
|
||||
[123, null, 'xyz'],
|
||||
null,
|
||||
]);
|
||||
});
|
||||
});
|
||||
33
ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts
Normal file
33
ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBBitValue } from '../src/DuckDBBitValue';
|
||||
|
||||
suite('DuckDBBitValue', () => {
|
||||
test('should render an empty byte array to the correct string', () => {
|
||||
expect(new DuckDBBitValue(new Uint8Array([])).toString()).toStrictEqual('');
|
||||
});
|
||||
test('should render bit string with no padding to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBitValue(new Uint8Array([0x00, 0xf1, 0xe2, 0xd3])).toString(),
|
||||
).toStrictEqual('111100011110001011010011');
|
||||
});
|
||||
test('should render bit string with padding to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBitValue(new Uint8Array([0x03, 0xf1, 0xe2, 0xd3])).toString(),
|
||||
).toStrictEqual('100011110001011010011');
|
||||
});
|
||||
test('should round-trip bit string with no padding', () => {
|
||||
expect(
|
||||
DuckDBBitValue.fromString('111100011110001011010011').toString(),
|
||||
).toStrictEqual('111100011110001011010011');
|
||||
});
|
||||
test('should round-trip bit string with padding', () => {
|
||||
expect(
|
||||
DuckDBBitValue.fromString('100011110001011010011').toString(),
|
||||
).toStrictEqual('100011110001011010011');
|
||||
});
|
||||
test('toJson', () => {
|
||||
expect(
|
||||
DuckDBBitValue.fromString('100011110001011010011').toJson(),
|
||||
).toStrictEqual('100011110001011010011');
|
||||
});
|
||||
});
|
||||
92
ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts
Normal file
92
ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBBlobValue } from '../src/DuckDBBlobValue';
|
||||
|
||||
suite('DuckDBBlobValue', () => {
|
||||
test('should render an empty byte array to the correct string', () => {
|
||||
expect(new DuckDBBlobValue(new Uint8Array([])).toString()).toStrictEqual(
|
||||
'',
|
||||
);
|
||||
});
|
||||
test('should render a byte array to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([0x41, 0x42, 0x43, 0x31, 0x32, 0x33]),
|
||||
).toString(),
|
||||
).toStrictEqual('ABC123');
|
||||
});
|
||||
test('should render a byte array containing single-digit non-printables to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a,
|
||||
0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(
|
||||
'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F',
|
||||
);
|
||||
});
|
||||
test('should render a byte array containing double-digit non-printables to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a,
|
||||
0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(
|
||||
'\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F',
|
||||
);
|
||||
});
|
||||
test('should render a byte array containing min printables (including single and double quotes) to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a,
|
||||
0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(' !\\x22#$%&\\x27()*+,-./');
|
||||
});
|
||||
test('should render a byte array containing max printables (including backspace) to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a,
|
||||
0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual('pqrstuvwxyz{|}~\\x7F');
|
||||
});
|
||||
test('should render a byte array containing high non-printables to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a,
|
||||
0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(
|
||||
'\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F',
|
||||
);
|
||||
});
|
||||
test('should render a byte array containing max non-printables to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa,
|
||||
0xfb, 0xfc, 0xfd, 0xfe, 0xff,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(
|
||||
'\\xF0\\xF1\\xF2\\xF3\\xF4\\xF5\\xF6\\xF7\\xF8\\xF9\\xFA\\xFB\\xFC\\xFD\\xFE\\xFF',
|
||||
);
|
||||
});
|
||||
test('toJson', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([0x41, 0x42, 0x43, 0x31, 0x32, 0x33]),
|
||||
).toJson(),
|
||||
).toStrictEqual('ABC123');
|
||||
});
|
||||
});
|
||||
18
ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts
Normal file
18
ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBDateValue } from '../src/DuckDBDateValue';
|
||||
|
||||
suite('DuckDBDateValue', () => {
|
||||
test('should render a normal date value to the correct string', () => {
|
||||
expect(new DuckDBDateValue(19643).toString()).toStrictEqual('2023-10-13');
|
||||
});
|
||||
test('should render the max date value to the correct string', () => {
|
||||
expect(new DuckDBDateValue(2 ** 31 - 2).toString()).toStrictEqual(
|
||||
'5881580-07-10',
|
||||
);
|
||||
});
|
||||
test('should render the min date value to the correct string', () => {
|
||||
expect(new DuckDBDateValue(-(2 ** 31) + 2).toString()).toStrictEqual(
|
||||
'5877642-06-25 (BC)',
|
||||
);
|
||||
});
|
||||
});
|
||||
150
ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts
Normal file
150
ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBDecimalValue } from '../src/DuckDBDecimalValue';
|
||||
|
||||
suite('DuckDBDecimalValue', () => {
|
||||
test('should render a scaled value of zero with a scale of zero to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(0n, 0).toString()).toStrictEqual('0');
|
||||
});
|
||||
test('should render a small positive scaled value with a scale of zero to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(7n, 0).toString()).toStrictEqual('7');
|
||||
});
|
||||
test('should render a small negative scaled value with a scale of zero to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(-7n, 0).toString()).toStrictEqual('-7');
|
||||
});
|
||||
test('should render a large positive scaled value with a scale of zero to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(987654321098765432109876543210n, 0).toString(),
|
||||
).toStrictEqual('987654321098765432109876543210');
|
||||
});
|
||||
test('should render a large negative scaled value with a scale of zero to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(-987654321098765432109876543210n, 0).toString(),
|
||||
).toStrictEqual('-987654321098765432109876543210');
|
||||
});
|
||||
test('should render the maximum positive scaled value with a scale of zero to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(
|
||||
99999999999999999999999999999999999999n,
|
||||
0,
|
||||
).toString(),
|
||||
).toStrictEqual('99999999999999999999999999999999999999');
|
||||
});
|
||||
test('should render the maximum negative scaled value with a scale of zero to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(
|
||||
-99999999999999999999999999999999999999n,
|
||||
0,
|
||||
).toString(),
|
||||
).toStrictEqual('-99999999999999999999999999999999999999');
|
||||
});
|
||||
|
||||
test('should render a scaled value of zero with a non-zero scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(0n, 3).toString()).toStrictEqual('0.000');
|
||||
});
|
||||
test('should render a small positive scaled value with a non-zero scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(12345n, 3).toString()).toStrictEqual(
|
||||
'12.345',
|
||||
);
|
||||
});
|
||||
test('should render a small negative scaled value with a non-zero scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(-12345n, 3).toString()).toStrictEqual(
|
||||
'-12.345',
|
||||
);
|
||||
});
|
||||
test('should render a large positive scaled value with a non-zero scale to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(987654321098765432109876543210n, 10).toString(),
|
||||
).toStrictEqual('98765432109876543210.9876543210');
|
||||
});
|
||||
test('should render a large negative scaled value with a non-zero scale to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(-987654321098765432109876543210n, 10).toString(),
|
||||
).toStrictEqual('-98765432109876543210.9876543210');
|
||||
});
|
||||
test('should render leading and trailing zeros in the fractional part of value greater than one correctly', () => {
|
||||
expect(new DuckDBDecimalValue(120034500n, 7).toString()).toStrictEqual(
|
||||
'12.0034500',
|
||||
);
|
||||
});
|
||||
test('should render leading and trailing zeros in the fractional part of value less than negative one correctly', () => {
|
||||
expect(new DuckDBDecimalValue(-120034500n, 7).toString()).toStrictEqual(
|
||||
'-12.0034500',
|
||||
);
|
||||
});
|
||||
test('should render leading and trailing zeros in the fractional part of value between zero and one correctly', () => {
|
||||
expect(new DuckDBDecimalValue(34500n, 7).toString()).toStrictEqual(
|
||||
'0.0034500',
|
||||
);
|
||||
});
|
||||
test('should render leading and trailing zeros in the fractional part of value between zero and negative one correctly', () => {
|
||||
expect(new DuckDBDecimalValue(-34500n, 7).toString()).toStrictEqual(
|
||||
'-0.0034500',
|
||||
);
|
||||
});
|
||||
test('should render a small positive scaled value with a the maximum scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(1n, 38).toString()).toStrictEqual(
|
||||
'0.00000000000000000000000000000000000001',
|
||||
);
|
||||
});
|
||||
test('should render a small negative scaled value with a the maximum scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(-1n, 38).toString()).toStrictEqual(
|
||||
'-0.00000000000000000000000000000000000001',
|
||||
);
|
||||
});
|
||||
test('should render the maximum positive scaled value with a the maximum scale to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(
|
||||
99999999999999999999999999999999999999n,
|
||||
38,
|
||||
).toString(),
|
||||
).toStrictEqual('0.99999999999999999999999999999999999999');
|
||||
});
|
||||
test('should render the maximum negative scaled value with a the maximum scale to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(
|
||||
-99999999999999999999999999999999999999n,
|
||||
38,
|
||||
).toString(),
|
||||
).toStrictEqual('-0.99999999999999999999999999999999999999');
|
||||
});
|
||||
|
||||
test('should render a locale string with grouping by default', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(9876543210n, 0).toLocaleString(),
|
||||
).toStrictEqual('9,876,543,210');
|
||||
});
|
||||
|
||||
test('should render a European locale with . for grouping', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(9876543210n, 0).toLocaleString('de-DE'),
|
||||
).toStrictEqual('9.876.543.210');
|
||||
});
|
||||
|
||||
test('should render a locale string with a specified minimum fraction digits', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(12345n, 3).toLocaleString(undefined, {
|
||||
minimumFractionDigits: 5,
|
||||
}),
|
||||
).toStrictEqual('12.34500');
|
||||
});
|
||||
|
||||
test('should render a locale string with a specified maximum fraction digits', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(12345n, 3).toLocaleString(undefined, {
|
||||
maximumFractionDigits: 1,
|
||||
}),
|
||||
).toStrictEqual('12.3');
|
||||
});
|
||||
|
||||
test('should render a decimal with a large whole part and fractional part in a European locale with the correct grouping and decimal', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(98765432109876543210n, 10).toLocaleString(
|
||||
'de-DE',
|
||||
{
|
||||
useGrouping: true,
|
||||
maximumFractionDigits: 5,
|
||||
},
|
||||
),
|
||||
).toStrictEqual('9.876.543.210,98765');
|
||||
});
|
||||
});
|
||||
219
ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts
Normal file
219
ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBIntervalValue } from '../src/DuckDBIntervalValue';
|
||||
|
||||
const MICROS_IN_SEC = 1000000n;
|
||||
const MICROS_IN_MIN = 60n * MICROS_IN_SEC;
|
||||
const MICROS_IN_HR = 60n * MICROS_IN_MIN;
|
||||
const MAX_INT32 = 2n ** 31n - 1n;
|
||||
|
||||
suite('DuckDBIntervalValue', () => {
|
||||
test('should render an empty interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, 0n).toString()).toStrictEqual(
|
||||
'00:00:00',
|
||||
);
|
||||
});
|
||||
|
||||
test('should render a one month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(1, 0, 0n).toString()).toStrictEqual(
|
||||
'1 month',
|
||||
);
|
||||
});
|
||||
test('should render a negative one month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-1, 0, 0n).toString()).toStrictEqual(
|
||||
'-1 months',
|
||||
);
|
||||
});
|
||||
test('should render a two month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(2, 0, 0n).toString()).toStrictEqual(
|
||||
'2 months',
|
||||
);
|
||||
});
|
||||
test('should render a negative two month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-2, 0, 0n).toString()).toStrictEqual(
|
||||
'-2 months',
|
||||
);
|
||||
});
|
||||
test('should render a one year interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(12, 0, 0n).toString()).toStrictEqual(
|
||||
'1 year',
|
||||
);
|
||||
});
|
||||
test('should render a negative one year interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-12, 0, 0n).toString()).toStrictEqual(
|
||||
'-1 years',
|
||||
);
|
||||
});
|
||||
test('should render a two year interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(24, 0, 0n).toString()).toStrictEqual(
|
||||
'2 years',
|
||||
);
|
||||
});
|
||||
test('should render a negative two year interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-24, 0, 0n).toString()).toStrictEqual(
|
||||
'-2 years',
|
||||
);
|
||||
});
|
||||
test('should render a two year, three month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(24 + 3, 0, 0n).toString()).toStrictEqual(
|
||||
'2 years 3 months',
|
||||
);
|
||||
});
|
||||
test('should render a negative two year, three month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-(24 + 3), 0, 0n).toString()).toStrictEqual(
|
||||
'-2 years -3 months',
|
||||
);
|
||||
});
|
||||
|
||||
test('should render a one day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 1, 0n).toString()).toStrictEqual('1 day');
|
||||
});
|
||||
test('should render a negative one day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, -1, 0n).toString()).toStrictEqual(
|
||||
'-1 days',
|
||||
);
|
||||
});
|
||||
test('should render a two day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 2, 0n).toString()).toStrictEqual(
|
||||
'2 days',
|
||||
);
|
||||
});
|
||||
test('should render a negative two day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, -2, 0n).toString()).toStrictEqual(
|
||||
'-2 days',
|
||||
);
|
||||
});
|
||||
test('should render a 30 day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 30, 0n).toString()).toStrictEqual(
|
||||
'30 days',
|
||||
);
|
||||
});
|
||||
test('should render a 365 day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 365, 0n).toString()).toStrictEqual(
|
||||
'365 days',
|
||||
);
|
||||
});
|
||||
|
||||
test('should render a one microsecond interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, 1n).toString()).toStrictEqual(
|
||||
'00:00:00.000001',
|
||||
);
|
||||
});
|
||||
test('should render a negative one microsecond interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, -1n).toString()).toStrictEqual(
|
||||
'-00:00:00.000001',
|
||||
);
|
||||
});
|
||||
test('should render a large microsecond interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, 987654n).toString()).toStrictEqual(
|
||||
'00:00:00.987654',
|
||||
);
|
||||
});
|
||||
test('should render a large negative microsecond interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, -987654n).toString()).toStrictEqual(
|
||||
'-00:00:00.987654',
|
||||
);
|
||||
});
|
||||
test('should render a one second interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MICROS_IN_SEC).toString(),
|
||||
).toStrictEqual('00:00:01');
|
||||
});
|
||||
test('should render a negative one second interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -MICROS_IN_SEC).toString(),
|
||||
).toStrictEqual('-00:00:01');
|
||||
});
|
||||
test('should render a 59 second interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, 59n * MICROS_IN_SEC).toString(),
|
||||
).toStrictEqual('00:00:59');
|
||||
});
|
||||
test('should render a -59 second interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -59n * MICROS_IN_SEC).toString(),
|
||||
).toStrictEqual('-00:00:59');
|
||||
});
|
||||
test('should render a one minute interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MICROS_IN_MIN).toString(),
|
||||
).toStrictEqual('00:01:00');
|
||||
});
|
||||
test('should render a negative one minute interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -MICROS_IN_MIN).toString(),
|
||||
).toStrictEqual('-00:01:00');
|
||||
});
|
||||
test('should render a 59 minute interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, 59n * MICROS_IN_MIN).toString(),
|
||||
).toStrictEqual('00:59:00');
|
||||
});
|
||||
test('should render a -59 minute interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -59n * MICROS_IN_MIN).toString(),
|
||||
).toStrictEqual('-00:59:00');
|
||||
});
|
||||
test('should render a one hour interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('01:00:00');
|
||||
});
|
||||
test('should render a negative one hour interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('-01:00:00');
|
||||
});
|
||||
test('should render a 24 hour interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, 24n * MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('24:00:00');
|
||||
});
|
||||
test('should render a -24 hour interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -24n * MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('-24:00:00');
|
||||
});
|
||||
test('should render a very large interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MAX_INT32 * MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('2147483647:00:00');
|
||||
});
|
||||
test('should render a very large negative interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -MAX_INT32 * MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('-2147483647:00:00');
|
||||
});
|
||||
test('should render a very large interval with microseconds to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MAX_INT32 * MICROS_IN_HR + 1n).toString(),
|
||||
).toStrictEqual('2147483647:00:00.000001');
|
||||
});
|
||||
test('should render a very large negative interval with microseconds to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(
|
||||
0,
|
||||
0,
|
||||
-(MAX_INT32 * MICROS_IN_HR + 1n),
|
||||
).toString(),
|
||||
).toStrictEqual('-2147483647:00:00.000001');
|
||||
});
|
||||
|
||||
test('should render a interval with multiple parts to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(
|
||||
24 + 3,
|
||||
5,
|
||||
7n * MICROS_IN_HR + 11n * MICROS_IN_MIN + 13n * MICROS_IN_SEC + 17n,
|
||||
).toString(),
|
||||
).toStrictEqual('2 years 3 months 5 days 07:11:13.000017');
|
||||
});
|
||||
test('should render a negative interval with multiple parts to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(
|
||||
-(24 + 3),
|
||||
-5,
|
||||
-(7n * MICROS_IN_HR + 11n * MICROS_IN_MIN + 13n * MICROS_IN_SEC + 17n),
|
||||
).toString(),
|
||||
).toStrictEqual('-2 years -3 months -5 days -07:11:13.000017');
|
||||
});
|
||||
});
|
||||
45
ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts
Normal file
45
ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBMapValue } from '../src';
|
||||
import { DuckDBListValue } from '../src/DuckDBListValue';
|
||||
|
||||
suite('DuckDBListValue', () => {
|
||||
test('should render an empty list to the correct string', () => {
|
||||
expect(new DuckDBListValue([]).toString()).toStrictEqual('[]');
|
||||
});
|
||||
test('should render a single element list to the correct string', () => {
|
||||
expect(new DuckDBListValue([123]).toString()).toStrictEqual('[123]');
|
||||
});
|
||||
test('should render a multi-element list to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBListValue(['abc', null, true, '']).toString(),
|
||||
).toStrictEqual(`['abc', NULL, true, '']`);
|
||||
});
|
||||
test('should render a list with nested lists to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBListValue([
|
||||
new DuckDBListValue([]),
|
||||
null,
|
||||
new DuckDBListValue([123, null, 'xyz']),
|
||||
]).toString(),
|
||||
).toStrictEqual(`[[], NULL, [123, NULL, 'xyz']]`);
|
||||
});
|
||||
test('toJson with complex values', () => {
|
||||
expect(
|
||||
new DuckDBListValue([
|
||||
new DuckDBMapValue([
|
||||
{ key: 'foo', value: 123 },
|
||||
{ key: 'bar', value: 'abc' },
|
||||
]),
|
||||
null,
|
||||
new DuckDBMapValue([
|
||||
{ key: 'foo', value: null },
|
||||
{ key: 'bar', value: 'xyz' },
|
||||
]),
|
||||
]).toJson(),
|
||||
).toStrictEqual([
|
||||
{ "'foo'": 123, "'bar'": 'abc' },
|
||||
null,
|
||||
{ "'foo'": null, "'bar'": 'xyz' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
77
ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts
Normal file
77
ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBListValue } from '../src/DuckDBListValue';
|
||||
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||
|
||||
suite('DuckDBMapValue', () => {
|
||||
test('should render an empty map to the correct string', () => {
|
||||
expect(new DuckDBMapValue([]).toString()).toStrictEqual('{}');
|
||||
});
|
||||
test('should render a single-entry map to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([{ key: 'x', value: 1 }]).toString(),
|
||||
).toStrictEqual(`{'x': 1}`);
|
||||
});
|
||||
test('should render a multi-entry map to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{ key: 1, value: 42.001 },
|
||||
{ key: 5, value: -32.1 },
|
||||
{ key: 3, value: null },
|
||||
]).toString(),
|
||||
).toStrictEqual(`{1: 42.001, 5: -32.1, 3: NULL}`);
|
||||
});
|
||||
test('should render a multi-entry map with complex key types to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{
|
||||
key: new DuckDBListValue(['a', 'b']),
|
||||
value: new DuckDBListValue([1.1, 2.2]),
|
||||
},
|
||||
{
|
||||
key: new DuckDBListValue(['c', 'd']),
|
||||
value: new DuckDBListValue([3.3, 4.4]),
|
||||
},
|
||||
]).toString(),
|
||||
).toStrictEqual(`{['a', 'b']: [1.1, 2.2], ['c', 'd']: [3.3, 4.4]}`);
|
||||
});
|
||||
test('should render a map with nested maps to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{ key: new DuckDBMapValue([]), value: new DuckDBMapValue([]) },
|
||||
{
|
||||
key: new DuckDBMapValue([{ key: 'key1', value: 'value1' }]),
|
||||
value: new DuckDBMapValue([
|
||||
{ key: 1, value: 42.001 },
|
||||
{ key: 5, value: -32.1 },
|
||||
{ key: 3, value: null },
|
||||
]),
|
||||
},
|
||||
]).toString(),
|
||||
).toStrictEqual(
|
||||
`{{}: {}, {'key1': 'value1'}: {1: 42.001, 5: -32.1, 3: NULL}}`,
|
||||
);
|
||||
});
|
||||
test('toJson basics', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{ key: 'a', value: 1 },
|
||||
{ key: 'b', value: 2 },
|
||||
{ key: 'c', value: 3 },
|
||||
]).toJson(),
|
||||
).toStrictEqual({ "'a'": 1, "'b'": 2, "'c'": 3 });
|
||||
});
|
||||
test('toJson with complex keys and values', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{
|
||||
key: new DuckDBListValue(['a', 'b']),
|
||||
value: new DuckDBListValue([1.1, 2.2]),
|
||||
},
|
||||
{
|
||||
key: new DuckDBListValue(['c', 'd']),
|
||||
value: new DuckDBListValue([3.3, 4.4]),
|
||||
},
|
||||
]).toJson(),
|
||||
).toStrictEqual({ "['a', 'b']": [1.1, 2.2], "['c', 'd']": [3.3, 4.4] });
|
||||
});
|
||||
});
|
||||
110
ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts
Normal file
110
ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||
import { DuckDBStructValue } from '../src/DuckDBStructValue';
|
||||
|
||||
suite('DuckDBStructValue', () => {
|
||||
test('should render an empty struct to the correct string', () => {
|
||||
expect(new DuckDBStructValue([]).toString()).toStrictEqual('{}');
|
||||
});
|
||||
test('should render a single-entry struct to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([{ key: 'x', value: 1 }]).toString(),
|
||||
).toStrictEqual(`{'x': 1}`);
|
||||
});
|
||||
test('should render a multi-entry struct to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'x', value: 1 },
|
||||
{ key: 'y', value: 2 },
|
||||
{ key: 'z', value: 3 },
|
||||
]).toString(),
|
||||
).toStrictEqual(`{'x': 1, 'y': 2, 'z': 3}`);
|
||||
});
|
||||
test('should render a multi-entry struct with different value types to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'key1', value: 'string' },
|
||||
{ key: 'key2', value: 1 },
|
||||
{ key: 'key3', value: 12.345 },
|
||||
{ key: 'key0', value: null },
|
||||
]).toString(),
|
||||
).toStrictEqual(
|
||||
`{'key1': 'string', 'key2': 1, 'key3': 12.345, 'key0': NULL}`,
|
||||
);
|
||||
});
|
||||
test('should render a multi-entry struct with empty keys to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: '', value: 2 },
|
||||
{ key: '', value: 1 },
|
||||
{ key: '', value: 3 },
|
||||
]).toString(),
|
||||
).toStrictEqual(`{'': 2, '': 1, '': 3}`);
|
||||
});
|
||||
test('should render a struct with nested structs to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||
{
|
||||
key: 'struct',
|
||||
value: new DuckDBStructValue([
|
||||
{ key: 'key1', value: 'string' },
|
||||
{ key: 'key2', value: 1 },
|
||||
{ key: 'key3', value: 12.345 },
|
||||
]),
|
||||
},
|
||||
]).toString(),
|
||||
).toStrictEqual(
|
||||
`{'empty_struct': {}, 'struct': {'key1': 'string', 'key2': 1, 'key3': 12.345}}`,
|
||||
);
|
||||
});
|
||||
test('toJson with simple keys and values', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'x', value: 1 },
|
||||
{ key: 'y', value: 2 },
|
||||
{ key: 'z', value: 3 },
|
||||
]).toJson(),
|
||||
).toStrictEqual({ "'x'": 1, "'y'": 2, "'z'": 3 });
|
||||
});
|
||||
test('toJson with nested struct values', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||
{
|
||||
key: 'struct',
|
||||
value: new DuckDBStructValue([
|
||||
{ key: 'key1', value: 'string' },
|
||||
{ key: 'key2', value: 1 },
|
||||
{ key: 'key3', value: 12.345 },
|
||||
]),
|
||||
},
|
||||
]).toJson(),
|
||||
).toStrictEqual({
|
||||
"'empty_struct'": {},
|
||||
"'struct'": { "'key1'": 'string', "'key2'": 1, "'key3'": 12.345 },
|
||||
});
|
||||
});
|
||||
test('toJson with nested complex values', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||
{
|
||||
key: 'struct',
|
||||
value: new DuckDBStructValue([
|
||||
{
|
||||
key: 'key1',
|
||||
value: new DuckDBMapValue([
|
||||
{ key: 'foo', value: null },
|
||||
{ key: 'bar', value: 'xyz' },
|
||||
]),
|
||||
},
|
||||
]),
|
||||
},
|
||||
]).toJson(),
|
||||
).toStrictEqual({
|
||||
"'empty_struct'": {},
|
||||
"'struct'": { "'key1'": { "'foo'": null, "'bar'": 'xyz' } },
|
||||
});
|
||||
});
|
||||
});
|
||||
60
ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts
Normal file
60
ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimeTZValue } from '../src/DuckDBTimeTZValue';
|
||||
|
||||
suite('DuckDBTimeTZValue', () => {
|
||||
test('should render a normal time value with a positive offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(
|
||||
((12n * 60n + 34n) * 60n + 56n) * 1000000n + 789012n,
|
||||
(13 * 60 + 24) * 60 + 57,
|
||||
).toString(),
|
||||
).toStrictEqual('12:34:56.789012+13:24:57');
|
||||
});
|
||||
test('should render a normal time value with millisecond precision with an offset in minutes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(
|
||||
((12n * 60n + 34n) * 60n + 56n) * 1000000n + 789000n,
|
||||
(13 * 60 + 24) * 60,
|
||||
).toString(),
|
||||
).toStrictEqual('12:34:56.789+13:24');
|
||||
});
|
||||
test('should render a normal time value with second precision with an offset in hours to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(
|
||||
((12n * 60n + 34n) * 60n + 56n) * 1000000n,
|
||||
(13 * 60 + 0) * 60,
|
||||
).toString(),
|
||||
).toStrictEqual('12:34:56+13');
|
||||
});
|
||||
test('should render a zero time value with a zero offset to the correct string', () => {
|
||||
expect(new DuckDBTimeTZValue(0n, 0).toString()).toStrictEqual(
|
||||
'00:00:00+00',
|
||||
);
|
||||
});
|
||||
test('should render the max value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(
|
||||
((24n * 60n + 0n) * 60n + 0n) * 1000000n,
|
||||
-((15 * 60 + 59) * 60 + 59),
|
||||
).toString(),
|
||||
).toStrictEqual('24:00:00-15:59:59');
|
||||
});
|
||||
test('should render the min value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(0n, (15 * 60 + 59) * 60 + 59).toString(),
|
||||
).toStrictEqual('00:00:00+15:59:59');
|
||||
});
|
||||
test('should construct the correct value from bits', () => {
|
||||
expect(DuckDBTimeTZValue.fromBits(0n).toString()).toStrictEqual(
|
||||
'00:00:00+15:59:59',
|
||||
);
|
||||
});
|
||||
test('should construct the correct value from bits', () => {
|
||||
expect(
|
||||
DuckDBTimeTZValue.fromBits(
|
||||
(BigInt.asUintN(40, ((24n * 60n + 0n) * 60n + 0n) * 1000000n) << 24n) |
|
||||
BigInt.asUintN(24, (31n * 60n + 59n) * 60n + 58n),
|
||||
).toString(),
|
||||
).toStrictEqual('24:00:00-15:59:59');
|
||||
});
|
||||
});
|
||||
18
ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts
Normal file
18
ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimeValue } from '../src/DuckDBTimeValue';
|
||||
|
||||
suite('DuckDBTimeValue', () => {
|
||||
test('should render a normal time value to the correct string', () => {
|
||||
expect(new DuckDBTimeValue(45296000000n).toString()).toStrictEqual(
|
||||
'12:34:56',
|
||||
);
|
||||
});
|
||||
test('should render the max time value to the correct string', () => {
|
||||
expect(new DuckDBTimeValue(86399999999n).toString()).toStrictEqual(
|
||||
'23:59:59.999999',
|
||||
);
|
||||
});
|
||||
test('should render the min time value to the correct string', () => {
|
||||
expect(new DuckDBTimeValue(0n).toString()).toStrictEqual('00:00:00');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,55 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampMicrosecondsValue } from '../src/DuckDBTimestampMicrosecondsValue';
|
||||
|
||||
suite('DuckDBTimestampMicrosecondsValue', () => {
|
||||
test('should render a normal timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(1612325106007800n).toString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06.0078');
|
||||
});
|
||||
test('should render a zero timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampMicrosecondsValue(0n).toString()).toStrictEqual(
|
||||
'1970-01-01 00:00:00',
|
||||
);
|
||||
});
|
||||
test('should render a negative timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampMicrosecondsValue(-7n).toString()).toStrictEqual(
|
||||
'1969-12-31 23:59:59.999993',
|
||||
);
|
||||
});
|
||||
test('should render a large positive timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(2353318271999999000n).toString(),
|
||||
).toStrictEqual('76543-09-08 23:59:59.999');
|
||||
});
|
||||
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(-58261244276543211n).toString(),
|
||||
).toStrictEqual('0123-10-11 01:02:03.456789');
|
||||
});
|
||||
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(-65992661876543211n).toString(),
|
||||
).toStrictEqual('0123-10-11 (BC) 01:02:03.456789');
|
||||
});
|
||||
test('should render the max timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(9223372036854775806n).toString(),
|
||||
).toStrictEqual('294247-01-10 04:00:54.775806');
|
||||
});
|
||||
test('should render the min timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(-9223372022400000000n).toString(),
|
||||
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||
});
|
||||
test('should render the positive infinity timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(9223372036854775807n).toString(),
|
||||
).toStrictEqual('infinity');
|
||||
});
|
||||
test('should render the negative infinity timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(-9223372036854775807n).toString(),
|
||||
).toStrictEqual('-infinity');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,45 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampMillisecondsValue } from '../src/DuckDBTimestampMillisecondsValue';
|
||||
|
||||
suite('DuckDBTimestampMillisecondsValue', () => {
|
||||
test('should render a normal timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(1612325106007n).toString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06.007');
|
||||
});
|
||||
test('should render a zero timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampMillisecondsValue(0n).toString()).toStrictEqual(
|
||||
'1970-01-01 00:00:00',
|
||||
);
|
||||
});
|
||||
test('should render a negative timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampMillisecondsValue(-7n).toString()).toStrictEqual(
|
||||
'1969-12-31 23:59:59.993',
|
||||
);
|
||||
});
|
||||
test('should render a large positive timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(2353318271999999n).toString(),
|
||||
).toStrictEqual('76543-09-08 23:59:59.999');
|
||||
});
|
||||
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(-58261244276544n).toString(),
|
||||
).toStrictEqual('0123-10-11 01:02:03.456');
|
||||
});
|
||||
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(-65992661876544n).toString(),
|
||||
).toStrictEqual('0123-10-11 (BC) 01:02:03.456');
|
||||
});
|
||||
test('should render the max timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(9223372036854775n).toString(),
|
||||
).toStrictEqual('294247-01-10 04:00:54.775');
|
||||
});
|
||||
test('should render the min timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(-9223372022400000n).toString(),
|
||||
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,40 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampNanosecondsValue } from '../src/DuckDBTimestampNanosecondsValue';
|
||||
|
||||
suite('DuckDBTimestampNanosecondsValue', () => {
|
||||
test('should render a normal timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(1612325106007891000n).toString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06.007891');
|
||||
});
|
||||
test('should render a zero timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampNanosecondsValue(0n).toString()).toStrictEqual(
|
||||
'1970-01-01 00:00:00',
|
||||
);
|
||||
});
|
||||
test('should render a negative timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(-7000n).toString(),
|
||||
).toStrictEqual('1969-12-31 23:59:59.999993');
|
||||
});
|
||||
test('should render a large positive timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(8857641599999123000n).toString(),
|
||||
).toStrictEqual('2250-09-08 23:59:59.999123');
|
||||
});
|
||||
test('should render a large negative timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(-8495881076543211000n).toString(),
|
||||
).toStrictEqual('1700-10-11 01:02:03.456789');
|
||||
});
|
||||
test('should render the max timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(9223372036854775806n).toString(),
|
||||
).toStrictEqual('2262-04-11 23:47:16.854775');
|
||||
});
|
||||
test('should render the min timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(-9223372036854775806n).toString(),
|
||||
).toStrictEqual('1677-09-21 00:12:43.145225');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,45 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampSecondsValue } from '../src/DuckDBTimestampSecondsValue';
|
||||
|
||||
suite('DuckDBTimestampSecondsValue', () => {
|
||||
test('should render a normal timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(1612325106n).toString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06');
|
||||
});
|
||||
test('should render a zero timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampSecondsValue(0n).toString()).toStrictEqual(
|
||||
'1970-01-01 00:00:00',
|
||||
);
|
||||
});
|
||||
test('should render a negative timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampSecondsValue(-7n).toString()).toStrictEqual(
|
||||
'1969-12-31 23:59:53',
|
||||
);
|
||||
});
|
||||
test('should render a large positive timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(2353318271999n).toString(),
|
||||
).toStrictEqual('76543-09-08 23:59:59');
|
||||
});
|
||||
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(-58261244277n).toString(),
|
||||
).toStrictEqual('0123-10-11 01:02:03');
|
||||
});
|
||||
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(-65992661877n).toString(),
|
||||
).toStrictEqual('0123-10-11 (BC) 01:02:03');
|
||||
});
|
||||
test('should render the max timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(9223372036854n).toString(),
|
||||
).toStrictEqual('294247-01-10 04:00:54');
|
||||
});
|
||||
test('should render the min timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(-9223372022400n).toString(),
|
||||
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,38 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampTZValue } from '../src/DuckDBTimestampTZValue';
|
||||
|
||||
suite('DuckDBTimestampTZValue', () => {
|
||||
test('should render a timestamp tz value with no timezone offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06.0078+00'); // defaults to UTC
|
||||
});
|
||||
test('should render a timestamp tz value with a zero timezone offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||
timezoneOffsetInMinutes: 0,
|
||||
}),
|
||||
).toStrictEqual('2021-02-03 04:05:06.0078+00');
|
||||
});
|
||||
test('should render a timestamp tz value with a positive timezone offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||
timezoneOffsetInMinutes: 300,
|
||||
}),
|
||||
).toStrictEqual('2021-02-03 09:05:06.0078+05');
|
||||
});
|
||||
test('should render a timestamp tz value with a negative timezone offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||
timezoneOffsetInMinutes: -300,
|
||||
}),
|
||||
).toStrictEqual('2021-02-02 23:05:06.0078-05');
|
||||
});
|
||||
test('should render a timestamp tz value with a timezone offset containing minutes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||
timezoneOffsetInMinutes: 330,
|
||||
}),
|
||||
).toStrictEqual('2021-02-03 09:35:06.0078+05:30');
|
||||
});
|
||||
});
|
||||
49
ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts
Normal file
49
ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBUUIDValue } from '../src/DuckDBUUIDValue';
|
||||
|
||||
suite('DuckDBUUIDValue', () => {
|
||||
test('should render all zero bytes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBUUIDValue(
|
||||
new Uint8Array([
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual('00000000-0000-0000-0000-000000000000');
|
||||
});
|
||||
test('should render all max bytes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBUUIDValue(
|
||||
new Uint8Array([
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual('ffffffff-ffff-ffff-ffff-ffffffffffff');
|
||||
});
|
||||
test('should render arbitrary bytes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBUUIDValue(
|
||||
new Uint8Array([
|
||||
0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87, 0xfe, 0xdc, 0xba,
|
||||
0x98, 0x76, 0x54, 0x32, 0x10,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||
});
|
||||
test('should render a uint128 to the correct string', () => {
|
||||
expect(
|
||||
DuckDBUUIDValue.fromUint128(
|
||||
0xf0e1d2c3b4a59687fedcba9876543210n,
|
||||
).toString(),
|
||||
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||
});
|
||||
test('should render a stored hugeint to the correct string', () => {
|
||||
expect(
|
||||
DuckDBUUIDValue.fromStoredHugeint(
|
||||
0x70e1d2c3b4a59687fedcba9876543210n, // note the flipped MSB
|
||||
).toString(),
|
||||
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,61 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { getVarIntFromBytes } from '../../src/conversion/getVarIntFromBytes';
|
||||
|
||||
suite('getVarIntFromBytes', () => {
|
||||
test('should return correct value for varint representation of 0', () => {
|
||||
expect(
|
||||
getVarIntFromBytes(new Uint8Array([0x80, 0x00, 0x01, 0x00])),
|
||||
).toEqual(0n);
|
||||
});
|
||||
test('should return correct value for varint representation of 1', () => {
|
||||
expect(
|
||||
getVarIntFromBytes(new Uint8Array([0x80, 0x00, 0x01, 0x01])),
|
||||
).toEqual(1n);
|
||||
});
|
||||
test('should return correct value for varint representation of -1', () => {
|
||||
expect(
|
||||
getVarIntFromBytes(new Uint8Array([0x7f, 0xff, 0xfe, 0xfe])),
|
||||
).toEqual(-1n);
|
||||
});
|
||||
test('should return correct value for max varint', () => {
|
||||
// max VARINT = max IEEE double = 2^1023 * (1 + (1 − 2^−52)) ~= 1.7976931348623157 * 10^308
|
||||
// Note that the storage format supports much larger than this, but DuckDB specifies this max to support conversion to/from DOUBLE.
|
||||
expect(
|
||||
getVarIntFromBytes(
|
||||
// prettier-ignore
|
||||
new Uint8Array([0x80, 0x00, 0x80,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
]),
|
||||
),
|
||||
).toEqual(
|
||||
179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n,
|
||||
);
|
||||
});
|
||||
test('should return correct value for min varint', () => {
|
||||
// min VARINT = -max VARINT
|
||||
expect(
|
||||
getVarIntFromBytes(
|
||||
// prettier-ignore
|
||||
new Uint8Array([0x7F, 0xFF, 0x7F,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
]),
|
||||
),
|
||||
).toEqual(
|
||||
-179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n,
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,26 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBListValue } from '../../src';
|
||||
import { jsonFromDuckDBValue } from '../../src/conversion/jsonFromDuckDBValue';
|
||||
|
||||
suite('jsonFromDuckDBValue', () => {
|
||||
test('null', () => {
|
||||
expect(jsonFromDuckDBValue(null)).toBe(null);
|
||||
});
|
||||
test('boolean', () => {
|
||||
expect(jsonFromDuckDBValue(true)).toBe(true);
|
||||
});
|
||||
test('number', () => {
|
||||
expect(jsonFromDuckDBValue(42)).toBe(42);
|
||||
});
|
||||
test('bigint', () => {
|
||||
expect(jsonFromDuckDBValue(12345n)).toBe('12345');
|
||||
});
|
||||
test('string', () => {
|
||||
expect(jsonFromDuckDBValue('foo')).toBe('foo');
|
||||
});
|
||||
test('special', () => {
|
||||
expect(jsonFromDuckDBValue(new DuckDBListValue([1, 2, 3]))).toStrictEqual([
|
||||
1, 2, 3,
|
||||
]);
|
||||
});
|
||||
});
|
||||
6
ts/pkgs/duckdb-data-values/test/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-values/test/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.test.json",
|
||||
"references": [
|
||||
{ "path": "../src" }
|
||||
]
|
||||
}
|
||||
41
ts/pkgs/duckdb-ui-client/package.json
Normal file
41
ts/pkgs/duckdb-ui-client/package.json
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"name": "@duckdb/ui-client",
|
||||
"version": "0.0.1",
|
||||
"description": "Client for communicating with the DuckDB UI server",
|
||||
"type": "module",
|
||||
"main": "./out/index.js",
|
||||
"module": "./out/index.js",
|
||||
"types": "./out/index.d.ts",
|
||||
"scripts": {
|
||||
"preinstall": "pnpm build:src",
|
||||
"build": "tsc -b src test",
|
||||
"build:src": "tsc -b src",
|
||||
"build:test": "tsc -b test",
|
||||
"build:watch": "tsc -b src test --watch",
|
||||
"check": "pnpm format:check && pnpm lint",
|
||||
"clean": "rimraf out",
|
||||
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||
"lint": "pnpm eslint src test",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@duckdb/data-reader": "workspace:*",
|
||||
"@duckdb/data-types": "workspace:*",
|
||||
"@duckdb/data-values": "workspace:*",
|
||||
"core-js": "^3.41.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.24.0",
|
||||
"eslint": "^9.24.0",
|
||||
"find-up-cli": "^6.0.0",
|
||||
"msw": "^2.10.2",
|
||||
"prettier": "^3.5.3",
|
||||
"rimraf": "^6.0.1",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.30.1",
|
||||
"vite": "^6.2.6",
|
||||
"vitest": "^3.1.1"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
import { sendDuckDBUIHttpRequest } from '../../http/functions/sendDuckDBUIHttpRequest.js';
|
||||
import { tokenizeResultFromBuffer } from '../../serialization/functions/tokenizeResultFromBuffer.js';
|
||||
import type { TokenizeResult } from '../../serialization/types/TokenizeResult.js';
|
||||
import { DuckDBUIClientConnection } from './DuckDBUIClientConnection.js';
|
||||
|
||||
export { DuckDBUIClientConnection };
|
||||
export type { TokenizeResult };
|
||||
|
||||
export class DuckDBUIClient {
|
||||
private readonly eventSource: EventSource;
|
||||
|
||||
private defaultConnection: DuckDBUIClientConnection | undefined;
|
||||
|
||||
private constructor() {
|
||||
this.eventSource = new EventSource('/localEvents');
|
||||
}
|
||||
|
||||
public addOpenEventListener(listener: (event: Event) => void) {
|
||||
this.eventSource.addEventListener('open', listener);
|
||||
}
|
||||
|
||||
public removeOpenEventListener(listener: (event: Event) => void) {
|
||||
this.eventSource.removeEventListener('open', listener);
|
||||
}
|
||||
|
||||
public addErrorEventListener(listener: (event: Event) => void) {
|
||||
this.eventSource.addEventListener('error', listener);
|
||||
}
|
||||
|
||||
public removeErrorEventListener(listener: (event: Event) => void) {
|
||||
this.eventSource.removeEventListener('error', listener);
|
||||
}
|
||||
|
||||
public addMessageEventListener(
|
||||
type: string,
|
||||
listener: (event: MessageEvent) => void,
|
||||
) {
|
||||
this.eventSource.addEventListener(type, listener);
|
||||
}
|
||||
|
||||
public removeMessageEventListener(
|
||||
type: string,
|
||||
listener: (event: MessageEvent) => void,
|
||||
) {
|
||||
this.eventSource.removeEventListener(type, listener);
|
||||
}
|
||||
|
||||
public connect() {
|
||||
return new DuckDBUIClientConnection();
|
||||
}
|
||||
|
||||
public get connection(): DuckDBUIClientConnection {
|
||||
if (!this.defaultConnection) {
|
||||
this.defaultConnection = this.connect();
|
||||
}
|
||||
return this.defaultConnection;
|
||||
}
|
||||
|
||||
public async tokenize(text: string): Promise<TokenizeResult> {
|
||||
const buffer = await sendDuckDBUIHttpRequest('/ddb/tokenize', text);
|
||||
return tokenizeResultFromBuffer(buffer);
|
||||
}
|
||||
|
||||
private static singletonInstance: DuckDBUIClient;
|
||||
|
||||
public static get singleton(): DuckDBUIClient {
|
||||
if (!DuckDBUIClient.singletonInstance) {
|
||||
DuckDBUIClient.singletonInstance = new DuckDBUIClient();
|
||||
}
|
||||
return DuckDBUIClient.singletonInstance;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
import { DuckDBUIHttpRequestQueue } from '../../http/classes/DuckDBUIHttpRequestQueue.js';
|
||||
import {
|
||||
DuckDBUIHttpRequestHeaderOptions,
|
||||
makeDuckDBUIHttpRequestHeaders,
|
||||
} from '../../http/functions/makeDuckDBUIHttpRequestHeaders.js';
|
||||
import { sendDuckDBUIHttpRequest } from '../../http/functions/sendDuckDBUIHttpRequest.js';
|
||||
import { randomString } from '../../util/functions/randomString.js';
|
||||
import { materializedRunResultFromQueueResult } from '../functions/materializedRunResultFromQueueResult.js';
|
||||
import { MaterializedRunResult } from '../types/MaterializedRunResult.js';
|
||||
|
||||
export class DuckDBUIClientConnection {
|
||||
private readonly connectionName = `connection_${randomString()}`;
|
||||
|
||||
private readonly requestQueue: DuckDBUIHttpRequestQueue =
|
||||
new DuckDBUIHttpRequestQueue();
|
||||
|
||||
public async run(
|
||||
sql: string,
|
||||
args?: unknown[],
|
||||
): Promise<MaterializedRunResult> {
|
||||
const queueResult = await this.requestQueue.enqueueAndWait(
|
||||
'/ddb/run',
|
||||
sql,
|
||||
this.makeHeaders({ parameters: args }),
|
||||
);
|
||||
return materializedRunResultFromQueueResult(queueResult);
|
||||
}
|
||||
|
||||
public enqueue(sql: string, args?: unknown[]): string {
|
||||
return this.requestQueue.enqueue(
|
||||
'/ddb/run',
|
||||
sql,
|
||||
this.makeHeaders({ parameters: args }),
|
||||
);
|
||||
}
|
||||
|
||||
public cancel(
|
||||
id: string,
|
||||
errorMessage?: string,
|
||||
failure?: (reason: unknown) => void,
|
||||
) {
|
||||
this.requestQueue.cancel(id, errorMessage);
|
||||
// If currently running, then interrupt it.
|
||||
if (this.requestQueue.isCurrent(id)) {
|
||||
// Don't await (but report any unexpected errors). Canceling should return synchronously.
|
||||
sendDuckDBUIHttpRequest('/ddb/interrupt', '', this.makeHeaders()).catch(
|
||||
failure,
|
||||
);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public async runQueued(id: string): Promise<MaterializedRunResult> {
|
||||
const queueResult = await this.requestQueue.enqueuedResult(id);
|
||||
return materializedRunResultFromQueueResult(queueResult);
|
||||
}
|
||||
|
||||
public get queuedCount(): number {
|
||||
return this.requestQueue.length;
|
||||
}
|
||||
|
||||
private makeHeaders(
|
||||
options: Omit<DuckDBUIHttpRequestHeaderOptions, 'connectionName'> = {},
|
||||
): Headers {
|
||||
return makeDuckDBUIHttpRequestHeaders({
|
||||
...options,
|
||||
connectionName: this.connectionName,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { DuckDBDataReader } from '@duckdb/data-reader';
|
||||
import { DuckDBDataChunkIterator } from '../../data-chunk/classes/DuckDBDataChunkIterator.js';
|
||||
import { DuckDBUIHttpRequestQueueResult } from '../../http/classes/DuckDBUIHttpRequestQueue.js';
|
||||
import { deserializerFromBuffer } from '../../serialization/functions/deserializeFromBuffer.js';
|
||||
import { readQueryResult } from '../../serialization/functions/resultReaders.js';
|
||||
import { MaterializedRunResult } from '../types/MaterializedRunResult.js';
|
||||
|
||||
export async function materializedRunResultFromQueueResult(
|
||||
queueResult: DuckDBUIHttpRequestQueueResult,
|
||||
): Promise<MaterializedRunResult> {
|
||||
const { buffer, startTimeMs, endTimeMs } = queueResult;
|
||||
const deserializer = deserializerFromBuffer(buffer);
|
||||
const result = readQueryResult(deserializer);
|
||||
if (!result.success) {
|
||||
throw new Error(result.error);
|
||||
}
|
||||
const dataReader = new DuckDBDataReader(new DuckDBDataChunkIterator(result));
|
||||
await dataReader.readAll();
|
||||
return { data: dataReader, startTimeMs, endTimeMs };
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
import { DuckDBData } from '@duckdb/data-reader';
|
||||
|
||||
export interface MaterializedRunResult {
|
||||
/**
|
||||
* Full result set.
|
||||
*
|
||||
* Includes column metadata, such as types. Supports duplicate column names without renaming.
|
||||
*
|
||||
* See the `DuckDBData` interface for details.
|
||||
*/
|
||||
data: DuckDBData;
|
||||
startTimeMs: number;
|
||||
endTimeMs: number;
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
// DuckDB's physical storage and binary serialization format is little endian.
|
||||
const littleEndian = true;
|
||||
|
||||
export function getInt8(dataView: DataView, offset: number): number {
|
||||
return dataView.getInt8(offset);
|
||||
}
|
||||
|
||||
export function getUInt8(dataView: DataView, offset: number): number {
|
||||
return dataView.getUint8(offset);
|
||||
}
|
||||
|
||||
export function getInt16(dataView: DataView, offset: number): number {
|
||||
return dataView.getInt16(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getUInt16(dataView: DataView, offset: number): number {
|
||||
return dataView.getUint16(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getInt32(dataView: DataView, offset: number): number {
|
||||
return dataView.getInt32(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getUInt32(dataView: DataView, offset: number): number {
|
||||
return dataView.getUint32(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getInt64(dataView: DataView, offset: number): bigint {
|
||||
return dataView.getBigInt64(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getUInt64(dataView: DataView, offset: number): bigint {
|
||||
return dataView.getBigUint64(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getFloat32(dataView: DataView, offset: number): number {
|
||||
return dataView.getFloat32(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getFloat64(dataView: DataView, offset: number): number {
|
||||
return dataView.getFloat64(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getInt128(dataView: DataView, offset: number): bigint {
|
||||
const lower = getUInt64(dataView, offset);
|
||||
const upper = getInt64(dataView, offset + 8);
|
||||
return (upper << BigInt(64)) + lower;
|
||||
}
|
||||
|
||||
export function getUInt128(dataView: DataView, offset: number): bigint {
|
||||
const lower = getUInt64(dataView, offset);
|
||||
const upper = getUInt64(dataView, offset + 8);
|
||||
return (BigInt.asUintN(64, upper) << BigInt(64)) | BigInt.asUintN(64, lower);
|
||||
}
|
||||
|
||||
export function getBoolean(dataView: DataView, offset: number): boolean {
|
||||
return getUInt8(dataView, offset) !== 0;
|
||||
}
|
||||
@@ -0,0 +1,178 @@
|
||||
import {
|
||||
ARRAY,
|
||||
DECIMAL,
|
||||
DuckDBBigIntType,
|
||||
DuckDBBitType,
|
||||
DuckDBBlobType,
|
||||
DuckDBBooleanType,
|
||||
DuckDBDateType,
|
||||
DuckDBDoubleType,
|
||||
DuckDBFloatType,
|
||||
DuckDBHugeIntType,
|
||||
DuckDBIntegerType,
|
||||
DuckDBIntervalType,
|
||||
DuckDBSmallIntType,
|
||||
DuckDBTimestampMillisecondsType,
|
||||
DuckDBTimestampNanosecondsType,
|
||||
DuckDBTimestampSecondsType,
|
||||
DuckDBTimestampType,
|
||||
DuckDBTimestampTZType,
|
||||
DuckDBTimeType,
|
||||
DuckDBTimeTZType,
|
||||
DuckDBTinyIntType,
|
||||
DuckDBType,
|
||||
DuckDBUBigIntType,
|
||||
DuckDBUHugeIntType,
|
||||
DuckDBUIntegerType,
|
||||
DuckDBUSmallIntType,
|
||||
DuckDBUTinyIntType,
|
||||
DuckDBUUIDType,
|
||||
DuckDBVarCharType,
|
||||
DuckDBVarIntType,
|
||||
ENUM,
|
||||
JSONType,
|
||||
LIST,
|
||||
MAP,
|
||||
STRUCT,
|
||||
UNION,
|
||||
} from '@duckdb/data-types';
|
||||
import { LogicalTypeId } from '../../serialization/constants/LogicalTypeId.js';
|
||||
import { TypeIdAndInfo } from '../../serialization/types/TypeInfo.js';
|
||||
import {
|
||||
getArrayTypeInfo,
|
||||
getDecimalTypeInfo,
|
||||
getEnumTypeInfo,
|
||||
getListTypeInfo,
|
||||
getMapTypeInfos,
|
||||
getStructTypeInfo,
|
||||
} from './typeInfoGetters.js';
|
||||
|
||||
/** Return the DuckDBType corresponding to the given TypeIdAndInfo. */
|
||||
export function duckDBTypeFromTypeIdAndInfo(
|
||||
typeIdAndInfo: TypeIdAndInfo,
|
||||
): DuckDBType {
|
||||
const { id, typeInfo } = typeIdAndInfo;
|
||||
const alias = typeInfo?.alias;
|
||||
switch (id) {
|
||||
case LogicalTypeId.BOOLEAN:
|
||||
return DuckDBBooleanType.create(alias);
|
||||
|
||||
case LogicalTypeId.TINYINT:
|
||||
return DuckDBTinyIntType.create(alias);
|
||||
case LogicalTypeId.SMALLINT:
|
||||
return DuckDBSmallIntType.create(alias);
|
||||
case LogicalTypeId.INTEGER:
|
||||
return DuckDBIntegerType.create(alias);
|
||||
case LogicalTypeId.BIGINT:
|
||||
return DuckDBBigIntType.create(alias);
|
||||
|
||||
case LogicalTypeId.DATE:
|
||||
return DuckDBDateType.create(alias);
|
||||
case LogicalTypeId.TIME:
|
||||
return DuckDBTimeType.create(alias);
|
||||
case LogicalTypeId.TIMESTAMP_SEC:
|
||||
return DuckDBTimestampSecondsType.create(alias);
|
||||
case LogicalTypeId.TIMESTAMP_MS:
|
||||
return DuckDBTimestampMillisecondsType.create(alias);
|
||||
case LogicalTypeId.TIMESTAMP:
|
||||
return DuckDBTimestampType.create(alias);
|
||||
case LogicalTypeId.TIMESTAMP_NS:
|
||||
return DuckDBTimestampNanosecondsType.create(alias);
|
||||
|
||||
case LogicalTypeId.DECIMAL: {
|
||||
const { width, scale } = getDecimalTypeInfo(typeInfo);
|
||||
return DECIMAL(width, scale, alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.FLOAT:
|
||||
return DuckDBFloatType.create(alias);
|
||||
case LogicalTypeId.DOUBLE:
|
||||
return DuckDBDoubleType.create(alias);
|
||||
|
||||
case LogicalTypeId.CHAR:
|
||||
case LogicalTypeId.VARCHAR:
|
||||
// Minor optimization for JSON type to avoid creating new type object.
|
||||
if (alias === JSONType.alias) {
|
||||
return JSONType;
|
||||
}
|
||||
return DuckDBVarCharType.create(alias);
|
||||
case LogicalTypeId.BLOB:
|
||||
return DuckDBBlobType.create(alias);
|
||||
|
||||
case LogicalTypeId.INTERVAL:
|
||||
return DuckDBIntervalType.create(alias);
|
||||
|
||||
case LogicalTypeId.UTINYINT:
|
||||
return DuckDBUTinyIntType.create(alias);
|
||||
case LogicalTypeId.USMALLINT:
|
||||
return DuckDBUSmallIntType.create(alias);
|
||||
case LogicalTypeId.UINTEGER:
|
||||
return DuckDBUIntegerType.create(alias);
|
||||
case LogicalTypeId.UBIGINT:
|
||||
return DuckDBUBigIntType.create(alias);
|
||||
|
||||
case LogicalTypeId.TIMESTAMP_TZ:
|
||||
return DuckDBTimestampTZType.create(alias);
|
||||
case LogicalTypeId.TIME_TZ:
|
||||
return DuckDBTimeTZType.create(alias);
|
||||
|
||||
case LogicalTypeId.BIT:
|
||||
return DuckDBBitType.create(alias);
|
||||
|
||||
case LogicalTypeId.VARINT:
|
||||
return DuckDBVarIntType.create(alias);
|
||||
|
||||
case LogicalTypeId.UHUGEINT:
|
||||
return DuckDBUHugeIntType.create(alias);
|
||||
case LogicalTypeId.HUGEINT:
|
||||
return DuckDBHugeIntType.create(alias);
|
||||
|
||||
case LogicalTypeId.UUID:
|
||||
return DuckDBUUIDType.create(alias);
|
||||
|
||||
case LogicalTypeId.STRUCT: {
|
||||
const { childTypes } = getStructTypeInfo(typeInfo);
|
||||
const entries: Record<string, DuckDBType> = {};
|
||||
for (const [key, valueTypeIdAndInfo] of childTypes) {
|
||||
entries[key] = duckDBTypeFromTypeIdAndInfo(valueTypeIdAndInfo);
|
||||
}
|
||||
return STRUCT(entries, alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.LIST: {
|
||||
const { childType } = getListTypeInfo(typeInfo);
|
||||
return LIST(duckDBTypeFromTypeIdAndInfo(childType), alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.MAP: {
|
||||
const { keyType, valueType } = getMapTypeInfos(typeInfo);
|
||||
return MAP(
|
||||
duckDBTypeFromTypeIdAndInfo(keyType),
|
||||
duckDBTypeFromTypeIdAndInfo(valueType),
|
||||
alias,
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.ENUM: {
|
||||
const { values } = getEnumTypeInfo(typeInfo);
|
||||
return ENUM(values, alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.UNION: {
|
||||
const { childTypes } = getStructTypeInfo(typeInfo);
|
||||
const members: Record<string, DuckDBType> = {};
|
||||
for (const [key, valueTypeIdAndInfo] of childTypes) {
|
||||
members[key] = duckDBTypeFromTypeIdAndInfo(valueTypeIdAndInfo);
|
||||
}
|
||||
return UNION(members, alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.ARRAY: {
|
||||
const { childType, size } = getArrayTypeInfo(typeInfo);
|
||||
return ARRAY(duckDBTypeFromTypeIdAndInfo(childType), size, alias);
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`type id not implemented: ${typeIdAndInfo.id}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,271 @@
|
||||
import {
|
||||
DuckDBArrayValue,
|
||||
DuckDBBitValue,
|
||||
DuckDBBlobValue,
|
||||
DuckDBDateValue,
|
||||
DuckDBDecimalValue,
|
||||
DuckDBIntervalValue,
|
||||
DuckDBListValue,
|
||||
DuckDBMapValue,
|
||||
DuckDBStructValue,
|
||||
DuckDBTimeTZValue,
|
||||
DuckDBTimeValue,
|
||||
DuckDBTimestampMicrosecondsValue,
|
||||
DuckDBTimestampMillisecondsValue,
|
||||
DuckDBTimestampNanosecondsValue,
|
||||
DuckDBTimestampSecondsValue,
|
||||
DuckDBTimestampTZValue,
|
||||
DuckDBUUIDValue,
|
||||
DuckDBValue,
|
||||
getVarIntFromBytes,
|
||||
} from '@duckdb/data-values';
|
||||
import { LogicalTypeId } from '../../serialization/constants/LogicalTypeId.js';
|
||||
import { TypeIdAndInfo } from '../../serialization/types/TypeInfo.js';
|
||||
import { Vector } from '../../serialization/types/Vector.js';
|
||||
import {
|
||||
getBoolean,
|
||||
getFloat32,
|
||||
getFloat64,
|
||||
getInt128,
|
||||
getInt16,
|
||||
getInt32,
|
||||
getInt64,
|
||||
getInt8,
|
||||
getUInt128,
|
||||
getUInt16,
|
||||
getUInt32,
|
||||
getUInt64,
|
||||
getUInt8,
|
||||
} from './dataViewReaders.js';
|
||||
import { isRowValid } from './isRowValid.js';
|
||||
import {
|
||||
getArrayTypeInfo,
|
||||
getDecimalTypeInfo,
|
||||
getEnumTypeInfo,
|
||||
getListTypeInfo,
|
||||
getMapTypeInfos,
|
||||
getStructTypeInfo,
|
||||
} from './typeInfoGetters.js';
|
||||
import {
|
||||
getArrayVector,
|
||||
getDataListVector,
|
||||
getDataVector,
|
||||
getListVector,
|
||||
getStringVector,
|
||||
getVectorListVector,
|
||||
} from './vectorGetters.js';
|
||||
|
||||
/** Return the DuckDBValue at the given index in the given Vector with the type described by the given TypeIdAndInfo. */
|
||||
export function duckDBValueFromVector(
|
||||
typeIdAndInfo: TypeIdAndInfo,
|
||||
vector: Vector,
|
||||
rowIndex: number,
|
||||
): DuckDBValue {
|
||||
if (!isRowValid(vector.validity, rowIndex)) return null;
|
||||
|
||||
const { id, typeInfo } = typeIdAndInfo;
|
||||
switch (id) {
|
||||
case LogicalTypeId.BOOLEAN:
|
||||
return getBoolean(getDataVector(vector).data, rowIndex);
|
||||
|
||||
case LogicalTypeId.TINYINT:
|
||||
return getInt8(getDataVector(vector).data, rowIndex);
|
||||
case LogicalTypeId.SMALLINT:
|
||||
return getInt16(getDataVector(vector).data, rowIndex * 2);
|
||||
case LogicalTypeId.INTEGER:
|
||||
return getInt32(getDataVector(vector).data, rowIndex * 4);
|
||||
case LogicalTypeId.BIGINT:
|
||||
return getInt64(getDataVector(vector).data, rowIndex * 8);
|
||||
|
||||
case LogicalTypeId.DATE:
|
||||
return new DuckDBDateValue(
|
||||
getInt32(getDataVector(vector).data, rowIndex * 4),
|
||||
);
|
||||
case LogicalTypeId.TIME:
|
||||
return new DuckDBTimeValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIMESTAMP_SEC:
|
||||
return new DuckDBTimestampSecondsValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIMESTAMP_MS:
|
||||
return new DuckDBTimestampMillisecondsValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIMESTAMP:
|
||||
return new DuckDBTimestampMicrosecondsValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIMESTAMP_NS:
|
||||
return new DuckDBTimestampNanosecondsValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
|
||||
case LogicalTypeId.DECIMAL: {
|
||||
const { width, scale } = getDecimalTypeInfo(typeInfo);
|
||||
if (width <= 4) {
|
||||
return new DuckDBDecimalValue(
|
||||
BigInt(getInt16(getDataVector(vector).data, rowIndex * 2)),
|
||||
scale,
|
||||
);
|
||||
} else if (width <= 9) {
|
||||
return new DuckDBDecimalValue(
|
||||
BigInt(getInt32(getDataVector(vector).data, rowIndex * 4)),
|
||||
scale,
|
||||
);
|
||||
} else if (width <= 18) {
|
||||
return new DuckDBDecimalValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
scale,
|
||||
);
|
||||
} else if (width <= 38) {
|
||||
return new DuckDBDecimalValue(
|
||||
getInt128(getDataVector(vector).data, rowIndex * 16),
|
||||
scale,
|
||||
);
|
||||
}
|
||||
throw new Error(`unsupported decimal width: ${width}`);
|
||||
}
|
||||
|
||||
case LogicalTypeId.FLOAT:
|
||||
return getFloat32(getDataVector(vector).data, rowIndex * 4);
|
||||
case LogicalTypeId.DOUBLE:
|
||||
return getFloat64(getDataVector(vector).data, rowIndex * 8);
|
||||
|
||||
case LogicalTypeId.CHAR:
|
||||
case LogicalTypeId.VARCHAR:
|
||||
return getStringVector(vector).data[rowIndex];
|
||||
|
||||
case LogicalTypeId.BLOB: {
|
||||
const dv = getDataListVector(vector).data[rowIndex];
|
||||
return new DuckDBBlobValue(
|
||||
new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.INTERVAL: {
|
||||
const { data } = getDataVector(vector);
|
||||
const months = getInt32(data, rowIndex * 16 + 0);
|
||||
const days = getInt32(data, rowIndex * 16 + 4);
|
||||
const micros = getInt64(data, rowIndex * 16 + 8);
|
||||
return new DuckDBIntervalValue(months, days, micros);
|
||||
}
|
||||
|
||||
case LogicalTypeId.UTINYINT:
|
||||
return getUInt8(getDataVector(vector).data, rowIndex);
|
||||
case LogicalTypeId.USMALLINT:
|
||||
return getUInt16(getDataVector(vector).data, rowIndex * 2);
|
||||
case LogicalTypeId.UINTEGER:
|
||||
return getUInt32(getDataVector(vector).data, rowIndex * 4);
|
||||
case LogicalTypeId.UBIGINT:
|
||||
return getUInt64(getDataVector(vector).data, rowIndex * 8);
|
||||
|
||||
case LogicalTypeId.TIMESTAMP_TZ:
|
||||
return new DuckDBTimestampTZValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIME_TZ:
|
||||
return DuckDBTimeTZValue.fromBits(
|
||||
getUInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
|
||||
case LogicalTypeId.BIT: {
|
||||
const dv = getDataListVector(vector).data[rowIndex];
|
||||
return new DuckDBBitValue(
|
||||
new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.VARINT: {
|
||||
const dv = getDataListVector(vector).data[rowIndex];
|
||||
return getVarIntFromBytes(
|
||||
new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.UHUGEINT:
|
||||
return getUInt128(getDataVector(vector).data, rowIndex * 16);
|
||||
case LogicalTypeId.HUGEINT:
|
||||
return getInt128(getDataVector(vector).data, rowIndex * 16);
|
||||
|
||||
case LogicalTypeId.UUID:
|
||||
return DuckDBUUIDValue.fromStoredHugeint(
|
||||
getInt128(getDataVector(vector).data, rowIndex * 16),
|
||||
);
|
||||
|
||||
case LogicalTypeId.STRUCT: {
|
||||
const { childTypes } = getStructTypeInfo(typeInfo);
|
||||
const { data } = getVectorListVector(vector);
|
||||
return new DuckDBStructValue(
|
||||
Array.from({ length: childTypes.length }).map((_, i) => ({
|
||||
key: childTypes[i][0],
|
||||
value: duckDBValueFromVector(childTypes[i][1], data[i], rowIndex),
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.LIST: {
|
||||
const { childType } = getListTypeInfo(typeInfo);
|
||||
const { child, entries } = getListVector(vector);
|
||||
const { offset, length } = entries[rowIndex];
|
||||
return new DuckDBListValue(
|
||||
Array.from({ length }).map((_, i) =>
|
||||
duckDBValueFromVector(childType, child, offset + i),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.MAP: {
|
||||
const { keyType, valueType } = getMapTypeInfos(typeInfo);
|
||||
const { child, entries } = getListVector(vector);
|
||||
const { offset, length } = entries[rowIndex];
|
||||
const { data } = getVectorListVector(child);
|
||||
return new DuckDBMapValue(
|
||||
Array.from({ length }).map((_, i) => ({
|
||||
key: duckDBValueFromVector(keyType, data[0], offset + i),
|
||||
value: duckDBValueFromVector(valueType, data[1], offset + i),
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.ENUM: {
|
||||
const { values } = getEnumTypeInfo(typeInfo);
|
||||
if (values.length < 256) {
|
||||
return values[getUInt8(getDataVector(vector).data, rowIndex)];
|
||||
} else if (values.length < 65536) {
|
||||
return values[getUInt16(getDataVector(vector).data, rowIndex * 2)];
|
||||
} else if (values.length < 4294967296) {
|
||||
return values[getUInt32(getDataVector(vector).data, rowIndex * 4)];
|
||||
}
|
||||
throw new Error(`unsupported enum size: values.length=${values.length}`);
|
||||
}
|
||||
|
||||
case LogicalTypeId.UNION: {
|
||||
const { childTypes } = getStructTypeInfo(typeInfo);
|
||||
const { data } = getVectorListVector(vector);
|
||||
const tag = Number(
|
||||
duckDBValueFromVector(childTypes[0][1], data[0], rowIndex),
|
||||
);
|
||||
const altIndex = tag + 1;
|
||||
return duckDBValueFromVector(
|
||||
childTypes[altIndex][1],
|
||||
data[altIndex],
|
||||
rowIndex,
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.ARRAY: {
|
||||
const { childType, size } = getArrayTypeInfo(typeInfo);
|
||||
const { child } = getArrayVector(vector);
|
||||
return new DuckDBArrayValue(
|
||||
Array.from({ length: size }).map((_, i) =>
|
||||
duckDBValueFromVector(childType, child, rowIndex * size + i),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`type not implemented: ${id}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { getUInt64 } from './dataViewReaders.js';
|
||||
|
||||
export function isRowValid(validity: DataView | null, row: number): boolean {
|
||||
if (!validity) return true;
|
||||
const bigint = getUInt64(validity, Math.floor(row / 64) * 8);
|
||||
return (bigint & (1n << BigInt(row % 64))) !== 0n;
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
import {
|
||||
ArrayTypeInfo,
|
||||
DecimalTypeInfo,
|
||||
EnumTypeInfo,
|
||||
ListTypeInfo,
|
||||
StructTypeInfo,
|
||||
TypeIdAndInfo,
|
||||
TypeInfo,
|
||||
} from '../../serialization/types/TypeInfo.js';
|
||||
|
||||
export function getArrayTypeInfo(
|
||||
typeInfo: TypeInfo | undefined,
|
||||
): ArrayTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`ARRAY has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'array') {
|
||||
throw new Error(`ARRAY has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getDecimalTypeInfo(
|
||||
typeInfo: TypeInfo | undefined,
|
||||
): DecimalTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`DECIMAL has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'decimal') {
|
||||
throw new Error(`DECIMAL has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getEnumTypeInfo(typeInfo: TypeInfo | undefined): EnumTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`ENUM has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'enum') {
|
||||
throw new Error(`ENUM has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getListTypeInfo(typeInfo: TypeInfo | undefined): ListTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`LIST has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'list') {
|
||||
throw new Error(`LIST has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getStructTypeInfo(
|
||||
typeInfo: TypeInfo | undefined,
|
||||
): StructTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`STRUCT has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'struct') {
|
||||
throw new Error(`STRUCT has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getMapTypeInfos(typeInfo: TypeInfo | undefined): {
|
||||
keyType: TypeIdAndInfo;
|
||||
valueType: TypeIdAndInfo;
|
||||
} {
|
||||
// MAP = LIST(STRUCT(key KEY_TYPE, value VALUE_TYPE))
|
||||
const { childType } = getListTypeInfo(typeInfo);
|
||||
const { childTypes } = getStructTypeInfo(childType.typeInfo);
|
||||
if (childTypes.length !== 2) {
|
||||
throw new Error(
|
||||
`MAP childType has unexpected childTypes length: ${childTypes.length}`,
|
||||
);
|
||||
}
|
||||
if (childTypes[0].length !== 2) {
|
||||
throw new Error(
|
||||
`MAP childType has unexpected childTypes[0] length: ${childTypes[0].length}`,
|
||||
);
|
||||
}
|
||||
if (childTypes[1].length !== 2) {
|
||||
throw new Error(
|
||||
`MAP childType has unexpected childTypes[1] length: ${childTypes[1].length}`,
|
||||
);
|
||||
}
|
||||
return {
|
||||
keyType: childTypes[0][1],
|
||||
valueType: childTypes[1][1],
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
import {
|
||||
ArrayVector,
|
||||
DataListVector,
|
||||
DataVector,
|
||||
ListVector,
|
||||
StringVector,
|
||||
Vector,
|
||||
VectorListVector,
|
||||
} from '../../serialization/types/Vector.js';
|
||||
|
||||
export function getDataVector(vector: Vector): DataVector {
|
||||
if (vector.kind !== 'data') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getStringVector(vector: Vector): StringVector {
|
||||
if (vector.kind !== 'string') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getDataListVector(vector: Vector): DataListVector {
|
||||
if (vector.kind !== 'datalist') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getVectorListVector(vector: Vector): VectorListVector {
|
||||
if (vector.kind !== 'vectorlist') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getListVector(vector: Vector): ListVector {
|
||||
if (vector.kind !== 'list') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getArrayVector(vector: Vector): ArrayVector {
|
||||
if (vector.kind !== 'array') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
import { DuckDBData } from '@duckdb/data-reader';
|
||||
import { DuckDBType } from '@duckdb/data-types';
|
||||
import { DuckDBValue } from '@duckdb/data-values';
|
||||
import { duckDBTypeFromTypeIdAndInfo } from '../../conversion/functions/duckDBTypeFromTypeIdAndInfo.js';
|
||||
import { duckDBValueFromVector } from '../../conversion/functions/duckDBValueFromVector.js';
|
||||
import { ColumnNamesAndTypes } from '../../serialization/types/ColumnNamesAndTypes.js';
|
||||
import { DataChunk } from '../../serialization/types/DataChunk.js';
|
||||
|
||||
export class DuckDBDataChunk extends DuckDBData {
|
||||
constructor(
|
||||
private columnNamesAndTypes: ColumnNamesAndTypes,
|
||||
private chunk: DataChunk,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
get columnCount() {
|
||||
return this.columnNamesAndTypes.names.length;
|
||||
}
|
||||
|
||||
get rowCount() {
|
||||
return this.chunk.rowCount;
|
||||
}
|
||||
|
||||
columnName(columnIndex: number): string {
|
||||
return this.columnNamesAndTypes.names[columnIndex];
|
||||
}
|
||||
|
||||
columnType(columnIndex: number): DuckDBType {
|
||||
return duckDBTypeFromTypeIdAndInfo(
|
||||
this.columnNamesAndTypes.types[columnIndex],
|
||||
);
|
||||
}
|
||||
|
||||
value(columnIndex: number, rowIndex: number): DuckDBValue {
|
||||
return duckDBValueFromVector(
|
||||
this.columnNamesAndTypes.types[columnIndex],
|
||||
this.chunk.vectors[columnIndex],
|
||||
rowIndex,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
import {
|
||||
AsyncDuckDBDataBatchIterator,
|
||||
DuckDBData,
|
||||
DuckDBDataBatchIteratorResult,
|
||||
} from '@duckdb/data-reader';
|
||||
import { SuccessQueryResult } from '../../serialization/types/QueryResult.js';
|
||||
import { DuckDBDataChunk } from './DuckDBDataChunk.js';
|
||||
|
||||
const ITERATOR_DONE: DuckDBDataBatchIteratorResult = Object.freeze({
|
||||
done: true,
|
||||
value: undefined,
|
||||
});
|
||||
|
||||
export class DuckDBDataChunkIterator implements AsyncDuckDBDataBatchIterator {
|
||||
private result: SuccessQueryResult;
|
||||
|
||||
private index: number;
|
||||
|
||||
constructor(result: SuccessQueryResult) {
|
||||
this.result = result;
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
async next(): Promise<DuckDBDataBatchIteratorResult> {
|
||||
if (this.index < this.result.chunks.length) {
|
||||
return {
|
||||
done: false,
|
||||
value: new DuckDBDataChunk(
|
||||
this.result.columnNamesAndTypes,
|
||||
this.result.chunks[this.index++],
|
||||
),
|
||||
};
|
||||
}
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
|
||||
async return(value?: DuckDBData): Promise<DuckDBDataBatchIteratorResult> {
|
||||
if (value) {
|
||||
return { done: true, value };
|
||||
}
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
|
||||
async throw(_e?: unknown): Promise<DuckDBDataBatchIteratorResult> {
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
|
||||
[Symbol.asyncIterator](): AsyncDuckDBDataBatchIterator {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
import 'core-js/actual/promise/with-resolvers.js';
|
||||
import { randomString } from '../../util/functions/randomString.js';
|
||||
import { sendDuckDBUIHttpRequest } from '../functions/sendDuckDBUIHttpRequest.js';
|
||||
|
||||
export interface DuckDBUIHttpRequestQueueResult {
|
||||
buffer: ArrayBuffer;
|
||||
startTimeMs: number;
|
||||
endTimeMs: number;
|
||||
}
|
||||
|
||||
export interface DuckDBUIHttpRequestQueueEntry {
|
||||
id: string;
|
||||
url: string;
|
||||
body: string;
|
||||
headers?: Headers;
|
||||
deferredResult: PromiseWithResolvers<DuckDBUIHttpRequestQueueResult>;
|
||||
canceled?: boolean;
|
||||
}
|
||||
|
||||
export class DuckDBUIHttpRequestQueue {
|
||||
/**
|
||||
* Invariants: The first entry in the queue has been sent and we're waiting for its response. If the first entry is
|
||||
* canceled, it remains in the queue until its response is received. If an entry other than the first is canceled, it
|
||||
* remains in the queue until it comes to the front, at which point it is removed without being sent.
|
||||
*/
|
||||
private entries: DuckDBUIHttpRequestQueueEntry[] = [];
|
||||
|
||||
public get length() {
|
||||
return this.entries.length;
|
||||
}
|
||||
|
||||
public enqueueAndWait(
|
||||
url: string,
|
||||
body: string,
|
||||
headers?: Headers,
|
||||
): Promise<DuckDBUIHttpRequestQueueResult> {
|
||||
return this.internalEnqueue(url, body, headers).deferredResult.promise;
|
||||
}
|
||||
|
||||
public enqueue(url: string, body: string, headers?: Headers): string {
|
||||
return this.internalEnqueue(url, body, headers).id;
|
||||
}
|
||||
|
||||
public enqueuedResult(id: string): Promise<DuckDBUIHttpRequestQueueResult> {
|
||||
const index = this.entries.findIndex((entry) => entry.id === id);
|
||||
if (index < 0) {
|
||||
throw new Error(`Invalid id: ${id}`);
|
||||
}
|
||||
return this.entries[index].deferredResult.promise;
|
||||
}
|
||||
|
||||
public cancel(id: string, errorMessage?: string) {
|
||||
const index = this.entries.findIndex((entry) => entry.id === id);
|
||||
if (index >= 0) {
|
||||
// Mark the entry as canceled and reject its promise. If it was already sent, then we'll remove it from the queue
|
||||
// when we get its response. If not, then we'll remove it when the (non-canceled) request before it completes. The
|
||||
// caller may or may not arrange for the response to return early with an error, for example, by interrupting it;
|
||||
// whether that happens doesn't change how the queue operates.
|
||||
this.entries[index].canceled = true;
|
||||
this.entries[index].deferredResult.reject(
|
||||
new Error(errorMessage ?? 'query was canceled'),
|
||||
);
|
||||
} else {
|
||||
console.warn(`Couldn't cancel; no entry found for id: ${id}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the given entry id is the front of the queue.
|
||||
* Note that it may be canceled.
|
||||
*/
|
||||
public isCurrent(id: string): boolean {
|
||||
return this.entries.length > 0 && this.entries[0].id === id;
|
||||
}
|
||||
|
||||
private internalEnqueue(
|
||||
url: string,
|
||||
body: string,
|
||||
headers?: Headers,
|
||||
): DuckDBUIHttpRequestQueueEntry {
|
||||
const id = randomString();
|
||||
const deferredResult =
|
||||
Promise.withResolvers<DuckDBUIHttpRequestQueueResult>();
|
||||
const entry: DuckDBUIHttpRequestQueueEntry = {
|
||||
id,
|
||||
url,
|
||||
body,
|
||||
headers,
|
||||
deferredResult,
|
||||
};
|
||||
this.entries.push(entry);
|
||||
// If the new entry is the first in our queue, then send it.
|
||||
if (this.entries.length === 1) {
|
||||
this.sendRequest(this.entries[0]);
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
|
||||
private handleResponse(
|
||||
entryId: string,
|
||||
startTimeMs: number,
|
||||
buffer: ArrayBuffer | undefined,
|
||||
reason?: unknown,
|
||||
) {
|
||||
if (this.entries.length === 0) {
|
||||
console.warn(
|
||||
`DuckDBUIHttpRequestQueue.handleResponse(entryId=${entryId}): queue unexpectedly empty`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (this.entries[0].id !== entryId) {
|
||||
console.warn(
|
||||
`DuckDBUIHttpRequestQueue.handleResponse(entryId=${entryId}): front of queue doesn't match response`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
// Remove the entry corresponding to this response.
|
||||
const entry = this.entries.shift();
|
||||
// There should always be an entry because of the length check above, but we need to appease the compiler.
|
||||
// If the entry was canceled, we've already rejected the promise, so there's nothing more to do.
|
||||
if (entry && !entry.canceled) {
|
||||
if (buffer) {
|
||||
const endTimeMs = performance.now();
|
||||
// If the entry has a valid buffer, then resolve its promise to it.
|
||||
entry.deferredResult.resolve({ buffer, startTimeMs, endTimeMs });
|
||||
} else {
|
||||
// Otherwise, reject it with the provided reason.
|
||||
entry.deferredResult.reject(reason);
|
||||
}
|
||||
}
|
||||
// Send the next request (if there are any).
|
||||
this.sendNextInQueue();
|
||||
}
|
||||
|
||||
/** If there are any entries in our queue that aren't canceled, send the first one. */
|
||||
private sendNextInQueue() {
|
||||
// Remove any unsent canceled entries from the front of the queue.
|
||||
while (this.entries.length > 0 && this.entries[0].canceled) {
|
||||
this.entries.shift();
|
||||
}
|
||||
// If there's an uncanceled entry left, send it.
|
||||
if (this.entries.length > 0) {
|
||||
this.sendRequest(this.entries[0]);
|
||||
}
|
||||
}
|
||||
|
||||
private sendRequest(entry: DuckDBUIHttpRequestQueueEntry) {
|
||||
const startTimeMs = performance.now();
|
||||
sendDuckDBUIHttpRequest(entry.url, entry.body, entry.headers)
|
||||
.then((buffer) => this.handleResponse(entry.id, startTimeMs, buffer))
|
||||
.catch((reason) =>
|
||||
this.handleResponse(entry.id, startTimeMs, undefined, reason),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
import { toBase64 } from '../../util/functions/toBase64.js';
|
||||
|
||||
export interface DuckDBUIHttpRequestHeaderOptions {
|
||||
description?: string;
|
||||
connectionName?: string;
|
||||
databaseName?: string;
|
||||
parameters?: unknown[];
|
||||
}
|
||||
|
||||
export function makeDuckDBUIHttpRequestHeaders({
|
||||
description,
|
||||
connectionName,
|
||||
databaseName,
|
||||
parameters,
|
||||
}: DuckDBUIHttpRequestHeaderOptions): Headers {
|
||||
const headers = new Headers();
|
||||
if (description) {
|
||||
headers.append('X-DuckDB-UI-Request-Description', description);
|
||||
}
|
||||
if (connectionName) {
|
||||
headers.append('X-DuckDB-UI-Connection-Name', connectionName);
|
||||
}
|
||||
if (databaseName) {
|
||||
// base64 encode the value because it can contain characters invalid in an HTTP header
|
||||
headers.append('X-DuckDB-UI-Database-Name', toBase64(databaseName));
|
||||
}
|
||||
if (parameters) {
|
||||
headers.append('X-DuckDB-UI-Parameter-Count', String(parameters.length));
|
||||
for (let i = 0; i < parameters.length; i++) {
|
||||
// base64 encode the value because it can contain characters invalid in an HTTP header
|
||||
// TODO: support non-string parameters?
|
||||
headers.append(
|
||||
`X-DuckDB-UI-Parameter-Value-${i}`,
|
||||
toBase64(String(parameters[i])),
|
||||
);
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
export async function sendDuckDBUIHttpRequest(
|
||||
url: string,
|
||||
body: string,
|
||||
headers?: Headers,
|
||||
): Promise<ArrayBuffer> {
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
const buffer = await response.arrayBuffer();
|
||||
return buffer;
|
||||
}
|
||||
1
ts/pkgs/duckdb-ui-client/src/index.ts
Normal file
1
ts/pkgs/duckdb-ui-client/src/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './client/classes/DuckDBUIClient.js';
|
||||
@@ -0,0 +1,130 @@
|
||||
import { BinaryStreamReader } from './BinaryStreamReader.js';
|
||||
|
||||
export type Reader<T> = (deserializer: BinaryDeserializer) => T;
|
||||
export type ListReader<T> = (
|
||||
deserializer: BinaryDeserializer,
|
||||
index: number,
|
||||
) => T;
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
/**
|
||||
* An implementation of a subset of DuckDB's BinaryDeserializer.
|
||||
*
|
||||
* See:
|
||||
* - https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/serializer/binary_deserializer.hpp
|
||||
* - https://github.com/duckdb/duckdb/blob/main/src/common/serializer/binary_deserializer.cpp
|
||||
*/
|
||||
export class BinaryDeserializer {
|
||||
private reader: BinaryStreamReader;
|
||||
|
||||
public constructor(reader: BinaryStreamReader) {
|
||||
this.reader = reader;
|
||||
}
|
||||
|
||||
private peekFieldId() {
|
||||
return this.reader.peekUint16(true);
|
||||
}
|
||||
|
||||
private consumeFieldId() {
|
||||
this.reader.consume(2);
|
||||
}
|
||||
|
||||
private checkFieldId(possibleFieldId: number) {
|
||||
const fieldId = this.peekFieldId();
|
||||
if (fieldId === possibleFieldId) {
|
||||
this.consumeFieldId();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private expectFieldId(expectedFieldId: number) {
|
||||
const fieldId = this.peekFieldId();
|
||||
if (fieldId === expectedFieldId) {
|
||||
this.consumeFieldId();
|
||||
} else {
|
||||
throw new Error(
|
||||
`Expected field id ${expectedFieldId} but got ${fieldId} (offset=${this.reader.getOffset()})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public expectObjectEnd() {
|
||||
this.expectFieldId(0xffff);
|
||||
}
|
||||
|
||||
public throwUnsupported() {
|
||||
throw new Error(`unsupported type, offset=${this.reader.getOffset()}`);
|
||||
}
|
||||
|
||||
public readUint8() {
|
||||
return this.reader.readUint8();
|
||||
}
|
||||
|
||||
public readVarInt() {
|
||||
let result = 0;
|
||||
let byte = 0;
|
||||
let shift = 0;
|
||||
do {
|
||||
byte = this.reader.readUint8();
|
||||
result |= (byte & 0x7f) << shift;
|
||||
shift += 7;
|
||||
} while (byte & 0x80);
|
||||
return result;
|
||||
}
|
||||
|
||||
public readNullable<T>(reader: Reader<T>) {
|
||||
const present = this.readUint8();
|
||||
if (present) {
|
||||
return reader(this);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public readData() {
|
||||
const length = this.readVarInt();
|
||||
return this.reader.readData(length);
|
||||
}
|
||||
|
||||
public readString() {
|
||||
const length = this.readVarInt();
|
||||
const dv = this.reader.readData(length);
|
||||
return decoder.decode(dv);
|
||||
}
|
||||
|
||||
public readList<T>(reader: ListReader<T>) {
|
||||
const count = this.readVarInt();
|
||||
const items: T[] = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
items.push(reader(this, i));
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
public readPair<T, U>(
|
||||
firstReader: Reader<T>,
|
||||
secondReader: Reader<U>,
|
||||
): [T, U] {
|
||||
const first = this.readProperty(0, firstReader);
|
||||
const second = this.readProperty(1, secondReader);
|
||||
this.expectObjectEnd();
|
||||
return [first, second];
|
||||
}
|
||||
|
||||
public readProperty<T>(expectedFieldId: number, reader: Reader<T>) {
|
||||
this.expectFieldId(expectedFieldId);
|
||||
return reader(this);
|
||||
}
|
||||
|
||||
public readPropertyWithDefault<T>(
|
||||
possibleFieldId: number,
|
||||
reader: Reader<T>,
|
||||
defaultValue: T,
|
||||
): T {
|
||||
if (this.checkFieldId(possibleFieldId)) {
|
||||
return reader(this);
|
||||
}
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user