add duckdb-ui-client & other ts pkgs (#10)
* add duckdb-ui-client & other ts pkgs * workflow fixes * fix working dir * no sparse checkout; specify package.json path * path to pnpm-lock.yaml * add check & build test * workflow step descriptions * use comments & names * one more naming tweak
This commit is contained in:
34
ts/pkgs/duckdb-data-values/package.json
Normal file
34
ts/pkgs/duckdb-data-values/package.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "@duckdb/data-values",
|
||||
"version": "0.0.1",
|
||||
"description": "Utilities for representing DuckDB values",
|
||||
"type": "module",
|
||||
"main": "./out/index.js",
|
||||
"module": "./out/index.js",
|
||||
"types": "./out/index.d.ts",
|
||||
"scripts": {
|
||||
"preinstall": "pnpm build:src",
|
||||
"build": "tsc -b src test",
|
||||
"build:src": "tsc -b src",
|
||||
"build:test": "tsc -b test",
|
||||
"build:watch": "tsc -b src test --watch",
|
||||
"check": "pnpm format:check && pnpm lint",
|
||||
"clean": "rimraf out",
|
||||
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||
"lint": "pnpm eslint src test",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.24.0",
|
||||
"eslint": "^9.24.0",
|
||||
"find-up-cli": "^6.0.0",
|
||||
"prettier": "^3.5.3",
|
||||
"rimraf": "^6.0.1",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.30.1",
|
||||
"vite": "^6.2.6",
|
||||
"vitest": "^3.1.1"
|
||||
}
|
||||
}
|
||||
23
ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts
Normal file
23
ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBArrayValue extends SpecialDuckDBValue {
|
||||
public readonly values: readonly DuckDBValue[];
|
||||
|
||||
constructor(values: readonly DuckDBValue[]) {
|
||||
super();
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const valueStrings = this.values.map(displayStringForDuckDBValue);
|
||||
return `[${valueStrings.join(', ')}]`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.values.map(jsonFromDuckDBValue);
|
||||
}
|
||||
}
|
||||
123
ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts
Normal file
123
ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBBitValue extends SpecialDuckDBValue {
|
||||
public readonly data: Uint8Array;
|
||||
|
||||
constructor(data: Uint8Array) {
|
||||
super();
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public padding(): number {
|
||||
return this.data[0];
|
||||
}
|
||||
|
||||
public get length(): number {
|
||||
return (this.data.length - 1) * 8 - this.padding();
|
||||
}
|
||||
|
||||
public getBool(index: number): boolean {
|
||||
const offset = index + this.padding();
|
||||
const dataIndex = Math.floor(offset / 8) + 1;
|
||||
const byte = this.data[dataIndex] >> (7 - (offset % 8));
|
||||
return (byte & 1) !== 0;
|
||||
}
|
||||
|
||||
public toBools(): boolean[] {
|
||||
const bools: boolean[] = [];
|
||||
const length = this.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
bools.push(this.getBool(i));
|
||||
}
|
||||
return bools;
|
||||
}
|
||||
|
||||
public getBit(index: number): 0 | 1 {
|
||||
return this.getBool(index) ? 1 : 0;
|
||||
}
|
||||
|
||||
public toBits(): number[] {
|
||||
const bits: number[] = [];
|
||||
const length = this.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
bits.push(this.getBit(i));
|
||||
}
|
||||
return bits;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const length = this.length;
|
||||
const chars = Array.from<string>({ length });
|
||||
for (let i = 0; i < length; i++) {
|
||||
chars[i] = this.getBool(i) ? '1' : '0';
|
||||
}
|
||||
return chars.join('');
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
public static fromString(str: string, on: string = '1'): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(
|
||||
str.length,
|
||||
(i) => str[i] === on,
|
||||
);
|
||||
}
|
||||
|
||||
public static fromBits(
|
||||
bits: readonly number[],
|
||||
on: number = 1,
|
||||
): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(
|
||||
bits.length,
|
||||
(i) => bits[i] === on,
|
||||
);
|
||||
}
|
||||
|
||||
public static fromBools(bools: readonly boolean[]): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(bools.length, (i) => bools[i]);
|
||||
}
|
||||
|
||||
public static fromLengthAndPredicate(
|
||||
length: number,
|
||||
predicate: (index: number) => boolean,
|
||||
): DuckDBBitValue {
|
||||
const byteCount = Math.ceil(length / 8) + 1;
|
||||
const paddingBitCount = (8 - (length % 8)) % 8;
|
||||
|
||||
const data = new Uint8Array(byteCount);
|
||||
let byteIndex = 0;
|
||||
|
||||
// first byte contains count of padding bits
|
||||
data[byteIndex++] = paddingBitCount;
|
||||
|
||||
let byte = 0;
|
||||
let byteBit = 0;
|
||||
|
||||
// padding consists of 1s in MSB of second byte
|
||||
while (byteBit < paddingBitCount) {
|
||||
byte <<= 1;
|
||||
byte |= 1;
|
||||
byteBit++;
|
||||
}
|
||||
|
||||
let bitIndex = 0;
|
||||
|
||||
while (byteIndex < byteCount) {
|
||||
while (byteBit < 8) {
|
||||
byte <<= 1;
|
||||
if (predicate(bitIndex++)) {
|
||||
byte |= 1;
|
||||
}
|
||||
byteBit++;
|
||||
}
|
||||
data[byteIndex++] = byte;
|
||||
byte = 0;
|
||||
byteBit = 0;
|
||||
}
|
||||
|
||||
return new DuckDBBitValue(data);
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { stringFromBlob } from './conversion/stringFromBlob.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBBlobValue extends SpecialDuckDBValue {
|
||||
public readonly bytes: Uint8Array;
|
||||
|
||||
constructor(bytes: Uint8Array) {
|
||||
super();
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return stringFromBlob(this.bytes);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBDateStringFromDays } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBDateValue extends SpecialDuckDBValue {
|
||||
public readonly days: number;
|
||||
|
||||
constructor(days: number) {
|
||||
super();
|
||||
this.days = days;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBDateStringFromDays(this.days);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
38
ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts
Normal file
38
ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import {
|
||||
DuckDBDecimalFormatOptions,
|
||||
stringFromDecimal,
|
||||
} from './conversion/stringFromDecimal.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBDecimalValue extends SpecialDuckDBValue {
|
||||
public readonly scaledValue: bigint;
|
||||
|
||||
public readonly scale: number;
|
||||
|
||||
constructor(scaledValue: bigint, scale: number) {
|
||||
super();
|
||||
this.scaledValue = scaledValue;
|
||||
this.scale = scale;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return stringFromDecimal(this.scaledValue, this.scale);
|
||||
}
|
||||
|
||||
/** Returns a string representation appropriate to the host environment's current locale. */
|
||||
|
||||
public toLocaleString(
|
||||
locales?: string | string[],
|
||||
options?: DuckDBDecimalFormatOptions,
|
||||
): string {
|
||||
return stringFromDecimal(this.scaledValue, this.scale, {
|
||||
locales,
|
||||
options,
|
||||
});
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
26
ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts
Normal file
26
ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { getDuckDBIntervalString } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBIntervalValue extends SpecialDuckDBValue {
|
||||
public readonly months: number;
|
||||
|
||||
public readonly days: number;
|
||||
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(months: number, days: number, microseconds: bigint) {
|
||||
super();
|
||||
this.months = months;
|
||||
this.days = days;
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBIntervalString(this.months, this.days, this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
23
ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts
Normal file
23
ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBListValue extends SpecialDuckDBValue {
|
||||
public readonly values: readonly DuckDBValue[];
|
||||
|
||||
constructor(values: readonly DuckDBValue[]) {
|
||||
super();
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const valueStrings = this.values.map(displayStringForDuckDBValue);
|
||||
return `[${valueStrings.join(', ')}]`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.values.map(jsonFromDuckDBValue);
|
||||
}
|
||||
}
|
||||
6
ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts
Normal file
6
ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
|
||||
export interface DuckDBMapEntry {
|
||||
readonly key: DuckDBValue;
|
||||
readonly value: DuckDBValue;
|
||||
}
|
||||
33
ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts
Normal file
33
ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBMapEntry } from './DuckDBMapEntry.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBMapValue extends SpecialDuckDBValue {
|
||||
public readonly entries: readonly DuckDBMapEntry[];
|
||||
|
||||
constructor(entries: readonly DuckDBMapEntry[]) {
|
||||
super();
|
||||
this.entries = entries;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const entryStrings = this.entries.map(
|
||||
({ key, value }) =>
|
||||
`${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(
|
||||
value,
|
||||
)}`,
|
||||
);
|
||||
return `{${entryStrings.join(', ')}}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
const result: Json = {};
|
||||
for (const { key, value } of this.entries) {
|
||||
const keyString = displayStringForDuckDBValue(key);
|
||||
result[keyString] = jsonFromDuckDBValue(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
6
ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts
Normal file
6
ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
|
||||
export interface DuckDBStructEntry {
|
||||
readonly key: string;
|
||||
readonly value: DuckDBValue;
|
||||
}
|
||||
33
ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts
Normal file
33
ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBStructEntry } from './DuckDBStructEntry.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBStructValue extends SpecialDuckDBValue {
|
||||
public readonly entries: readonly DuckDBStructEntry[];
|
||||
|
||||
constructor(entries: readonly DuckDBStructEntry[]) {
|
||||
super();
|
||||
this.entries = entries;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const entryStrings = this.entries.map(
|
||||
({ key, value }) =>
|
||||
`${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(
|
||||
value,
|
||||
)}`,
|
||||
);
|
||||
return `{${entryStrings.join(', ')}}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
const result: Json = {};
|
||||
for (const { key, value } of this.entries) {
|
||||
const keyString = displayStringForDuckDBValue(key);
|
||||
result[keyString] = jsonFromDuckDBValue(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
42
ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts
Normal file
42
ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import {
|
||||
getDuckDBTimeStringFromMicrosecondsInDay,
|
||||
getOffsetStringFromSeconds,
|
||||
} from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimeTZValue extends SpecialDuckDBValue {
|
||||
public readonly micros: bigint;
|
||||
public readonly offset: number;
|
||||
|
||||
constructor(micros: bigint, offset: number) {
|
||||
super();
|
||||
this.micros = micros;
|
||||
this.offset = offset;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return `${getDuckDBTimeStringFromMicrosecondsInDay(
|
||||
this.micros,
|
||||
)}${getOffsetStringFromSeconds(this.offset)}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
private static TimeBits = 40;
|
||||
private static OffsetBits = 24;
|
||||
private static MaxOffset = 16 * 60 * 60 - 1; // ±15:59:59 = 57599 seconds
|
||||
|
||||
public static fromBits(bits: bigint): DuckDBTimeTZValue {
|
||||
const micros = BigInt.asUintN(
|
||||
DuckDBTimeTZValue.TimeBits,
|
||||
bits >> BigInt(DuckDBTimeTZValue.OffsetBits),
|
||||
);
|
||||
const offset =
|
||||
DuckDBTimeTZValue.MaxOffset -
|
||||
Number(BigInt.asUintN(DuckDBTimeTZValue.OffsetBits, bits));
|
||||
return new DuckDBTimeTZValue(micros, offset);
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimeStringFromMicrosecondsInDay } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimeValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimeStringFromMicrosecondsInDay(this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampMicrosecondsValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
|
||||
export type DuckDBTimestamp = DuckDBTimestampMicrosecondsValue;
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromMilliseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampMillisecondsValue extends SpecialDuckDBValue {
|
||||
public readonly milliseconds: bigint;
|
||||
|
||||
constructor(milliseconds: bigint) {
|
||||
super();
|
||||
this.milliseconds = milliseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromMilliseconds(this.milliseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromNanoseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampNanosecondsValue extends SpecialDuckDBValue {
|
||||
public readonly nanoseconds: bigint;
|
||||
|
||||
constructor(nanoseconds: bigint) {
|
||||
super();
|
||||
this.nanoseconds = nanoseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromNanoseconds(this.nanoseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromSeconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampSecondsValue extends SpecialDuckDBValue {
|
||||
public readonly seconds: bigint;
|
||||
|
||||
constructor(seconds: bigint) {
|
||||
super();
|
||||
this.seconds = seconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromSeconds(this.seconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
24
ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts
Normal file
24
ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampTZValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(toStringOptions?: DuckDBToStringOptions): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
this.microseconds,
|
||||
toStringOptions?.timezoneOffsetInMinutes || 0,
|
||||
);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
3
ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts
Normal file
3
ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export interface DuckDBToStringOptions {
|
||||
timezoneOffsetInMinutes?: number;
|
||||
}
|
||||
48
ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts
Normal file
48
ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { hexFromBlob } from './conversion/hexFromBlob.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBUUIDValue extends SpecialDuckDBValue {
|
||||
public readonly bytes: Uint8Array;
|
||||
|
||||
constructor(bytes: Uint8Array) {
|
||||
super();
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
if (this.bytes.length !== 16) {
|
||||
throw new Error('Invalid UUID bytes length');
|
||||
}
|
||||
|
||||
// Insert dashes to format the UUID
|
||||
return `${hexFromBlob(this.bytes, 0, 4)}-${hexFromBlob(this.bytes, 4, 6)}-${hexFromBlob(this.bytes, 6, 8)}-${hexFromBlob(this.bytes, 8, 10)}-${hexFromBlob(this.bytes, 10, 16)}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a DuckDBUUIDValue value from a HUGEINT as stored by DuckDB.
|
||||
*
|
||||
* UUID values are stored with their MSB flipped so their numeric ordering matches their string ordering.
|
||||
*/
|
||||
public static fromStoredHugeint(hugeint: bigint): DuckDBUUIDValue {
|
||||
// Flip the MSB and truncate to 128 bits to extract the represented unsigned 128-bit value.
|
||||
const uint128 =
|
||||
(hugeint ^ 0x80000000000000000000000000000000n) &
|
||||
0xffffffffffffffffffffffffffffffffn;
|
||||
return DuckDBUUIDValue.fromUint128(uint128);
|
||||
}
|
||||
|
||||
/** Create a DuckDBUUIDValue value from an unsigned 128-bit integer in a JS BigInt. */
|
||||
public static fromUint128(uint128: bigint): DuckDBUUIDValue {
|
||||
const bytes = new Uint8Array(16);
|
||||
const dv = new DataView(bytes.buffer);
|
||||
// Write the unsigned 128-bit integer to the buffer in big endian format.
|
||||
dv.setBigUint64(0, BigInt.asUintN(64, uint128 >> BigInt(64)), false);
|
||||
dv.setBigUint64(8, BigInt.asUintN(64, uint128), false);
|
||||
return new DuckDBUUIDValue(bytes);
|
||||
}
|
||||
}
|
||||
9
ts/pkgs/duckdb-data-values/src/DuckDBValue.ts
Normal file
9
ts/pkgs/duckdb-data-values/src/DuckDBValue.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export type DuckDBValue =
|
||||
| null
|
||||
| boolean
|
||||
| number
|
||||
| string
|
||||
| bigint // TODO: Should types requiring bigint be SpecialDBValues?
|
||||
| SpecialDuckDBValue;
|
||||
7
ts/pkgs/duckdb-data-values/src/Json.ts
Normal file
7
ts/pkgs/duckdb-data-values/src/Json.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export type Json =
|
||||
| null
|
||||
| boolean
|
||||
| number
|
||||
| string
|
||||
| Json[]
|
||||
| { [key: string]: Json };
|
||||
15
ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts
Normal file
15
ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
import { Json } from './Json.js';
|
||||
|
||||
export abstract class SpecialDuckDBValue {
|
||||
// The presence of this function can be used to identify SpecialDuckDBValue objects.
|
||||
public abstract toDuckDBString(
|
||||
toStringOptions?: DuckDBToStringOptions,
|
||||
): string;
|
||||
|
||||
public toString(): string {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
public abstract toJson(): Json;
|
||||
}
|
||||
@@ -0,0 +1,264 @@
|
||||
const DAYS_IN_400_YEARS = 146097; // (((365 * 4 + 1) * 25) - 1) * 4 + 1
|
||||
const MILLISECONDS_PER_DAY_NUM = 86400000; // 1000 * 60 * 60 * 24
|
||||
|
||||
const MICROSECONDS_PER_SECOND = BigInt(1000000);
|
||||
const MICROSECONDS_PER_MILLISECOND = BigInt(1000);
|
||||
const NANOSECONDS_PER_MICROSECOND = BigInt(1000);
|
||||
const SECONDS_PER_MINUTE = BigInt(60);
|
||||
const MINUTES_PER_HOUR = BigInt(60);
|
||||
const MICROSECONDS_PER_DAY = BigInt(86400000000); // 24 * 60 * 60 * 1000000
|
||||
|
||||
const NEGATIVE_INFINITY_TIMESTAMP = BigInt('-9223372036854775807'); // -(2^63-1)
|
||||
const POSITIVE_INFINITY_TIMESTAMP = BigInt('9223372036854775807'); // 2^63-1
|
||||
|
||||
export function getDuckDBDateStringFromYearMonthDay(
|
||||
year: number,
|
||||
month: number,
|
||||
dayOfMonth: number,
|
||||
): string {
|
||||
const yearStr = String(Math.abs(year)).padStart(4, '0');
|
||||
const monthStr = String(month).padStart(2, '0');
|
||||
const dayOfMonthStr = String(dayOfMonth).padStart(2, '0');
|
||||
return `${yearStr}-${monthStr}-${dayOfMonthStr}${year < 0 ? ' (BC)' : ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBDateStringFromDays(days: number): string {
|
||||
const absDays = Math.abs(days);
|
||||
const sign = days < 0 ? -1 : 1;
|
||||
// 400 years is the shortest interval with a fixed number of days. (Leap years and different length months can result
|
||||
// in shorter intervals having different number of days.) By separating the number of 400 year intervals from the
|
||||
// interval covered by the remaining days, we can guarantee that the date resulting from shifting the epoch by the
|
||||
// remaining interval is within the valid range of the JS Date object. This allows us to use JS Date to calculate the
|
||||
// year, month, and day of month for the date represented by the remaining interval, thus accounting for leap years
|
||||
// and different length months. We can then safely add back the years from the 400 year intervals, because the month
|
||||
// and day of month won't change when a date is shifted by a whole number of such intervals.
|
||||
const num400YearIntervals = Math.floor(absDays / DAYS_IN_400_YEARS);
|
||||
const yearsFrom400YearIntervals = sign * num400YearIntervals * 400;
|
||||
const absDaysFromRemainingInterval = absDays % DAYS_IN_400_YEARS;
|
||||
const millisecondsFromRemainingInterval =
|
||||
sign * absDaysFromRemainingInterval * MILLISECONDS_PER_DAY_NUM;
|
||||
const date = new Date(millisecondsFromRemainingInterval);
|
||||
let year = yearsFrom400YearIntervals + date.getUTCFullYear();
|
||||
if (year < 0) {
|
||||
year--; // correct for non-existence of year zero
|
||||
}
|
||||
const month = date.getUTCMonth() + 1; // getUTCMonth returns zero-indexed month, but we want a one-index month for display
|
||||
const dayOfMonth = date.getUTCDate(); // getUTCDate returns one-indexed day-of-month
|
||||
return getDuckDBDateStringFromYearMonthDay(year, month, dayOfMonth);
|
||||
}
|
||||
|
||||
export function getTimezoneOffsetString(
|
||||
timezoneOffsetInMinutes?: number,
|
||||
): string | undefined {
|
||||
if (timezoneOffsetInMinutes === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const negative = timezoneOffsetInMinutes < 0;
|
||||
const positiveMinutes = Math.abs(timezoneOffsetInMinutes);
|
||||
const minutesPart = positiveMinutes % 60;
|
||||
const hoursPart = Math.floor(positiveMinutes / 60);
|
||||
const minutesStr =
|
||||
minutesPart !== 0 ? String(minutesPart).padStart(2, '0') : '';
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
return `${negative ? '-' : '+'}${hoursStr}${minutesStr ? `:${minutesStr}` : ''}`;
|
||||
}
|
||||
|
||||
export function getAbsoluteOffsetStringFromParts(
|
||||
hoursPart: number,
|
||||
minutesPart: number,
|
||||
secondsPart: number,
|
||||
): string {
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
const minutesStr =
|
||||
minutesPart !== 0 || secondsPart !== 0
|
||||
? String(minutesPart).padStart(2, '0')
|
||||
: '';
|
||||
const secondsStr =
|
||||
secondsPart !== 0 ? String(secondsPart).padStart(2, '0') : '';
|
||||
let result = hoursStr;
|
||||
if (minutesStr) {
|
||||
result += `:${minutesStr}`;
|
||||
if (secondsStr) {
|
||||
result += `:${secondsStr}`;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function getOffsetStringFromAbsoluteSeconds(
|
||||
absoluteOffsetInSeconds: number,
|
||||
): string {
|
||||
const secondsPart = absoluteOffsetInSeconds % 60;
|
||||
const minutes = Math.floor(absoluteOffsetInSeconds / 60);
|
||||
const minutesPart = minutes % 60;
|
||||
const hoursPart = Math.floor(minutes / 60);
|
||||
return getAbsoluteOffsetStringFromParts(hoursPart, minutesPart, secondsPart);
|
||||
}
|
||||
|
||||
export function getOffsetStringFromSeconds(offsetInSeconds: number): string {
|
||||
const negative = offsetInSeconds < 0;
|
||||
const absoluteOffsetInSeconds = negative ? -offsetInSeconds : offsetInSeconds;
|
||||
const absoluteString = getOffsetStringFromAbsoluteSeconds(
|
||||
absoluteOffsetInSeconds,
|
||||
);
|
||||
return `${negative ? '-' : '+'}${absoluteString}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromParts(
|
||||
hoursPart: bigint,
|
||||
minutesPart: bigint,
|
||||
secondsPart: bigint,
|
||||
microsecondsPart: bigint,
|
||||
): string {
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
const minutesStr = String(minutesPart).padStart(2, '0');
|
||||
const secondsStr = String(secondsPart).padStart(2, '0');
|
||||
const microsecondsStr = String(microsecondsPart)
|
||||
.padStart(6, '0')
|
||||
.replace(/0+$/, '');
|
||||
return `${hoursStr}:${minutesStr}:${secondsStr}${
|
||||
microsecondsStr.length > 0 ? `.${microsecondsStr}` : ''
|
||||
}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromPositiveMicroseconds(
|
||||
positiveMicroseconds: bigint,
|
||||
): string {
|
||||
const microsecondsPart = positiveMicroseconds % MICROSECONDS_PER_SECOND;
|
||||
const seconds = positiveMicroseconds / MICROSECONDS_PER_SECOND;
|
||||
const secondsPart = seconds % SECONDS_PER_MINUTE;
|
||||
const minutes = seconds / SECONDS_PER_MINUTE;
|
||||
const minutesPart = minutes % MINUTES_PER_HOUR;
|
||||
const hoursPart = minutes / MINUTES_PER_HOUR;
|
||||
return getDuckDBTimeStringFromParts(
|
||||
hoursPart,
|
||||
minutesPart,
|
||||
secondsPart,
|
||||
microsecondsPart,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromMicrosecondsInDay(
|
||||
microsecondsInDay: bigint,
|
||||
): string {
|
||||
const positiveMicroseconds =
|
||||
microsecondsInDay < 0
|
||||
? microsecondsInDay + MICROSECONDS_PER_DAY
|
||||
: microsecondsInDay;
|
||||
return getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds);
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromMicroseconds(
|
||||
microseconds: bigint,
|
||||
): string {
|
||||
const negative = microseconds < 0;
|
||||
const positiveMicroseconds = negative ? -microseconds : microseconds;
|
||||
const positiveString =
|
||||
getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds);
|
||||
return negative ? `-${positiveString}` : positiveString;
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromDaysAndMicroseconds(
|
||||
days: bigint,
|
||||
microsecondsInDay: bigint,
|
||||
timezonePart?: string,
|
||||
): string {
|
||||
// This conversion of BigInt to Number is safe, because the largest absolute value that `days` can has is 106751991,
|
||||
// which fits without loss of precision in a JS Number. (106751991 = (2^63-1) / MICROSECONDS_PER_DAY)
|
||||
const dateStr = getDuckDBDateStringFromDays(Number(days));
|
||||
const timeStr = getDuckDBTimeStringFromMicrosecondsInDay(microsecondsInDay);
|
||||
return `${dateStr} ${timeStr}${timezonePart ?? ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromMicroseconds(
|
||||
microseconds: bigint,
|
||||
timezoneOffsetInMinutes?: number,
|
||||
): string {
|
||||
// Note that -infinity and infinity are only representable in TIMESTAMP (and TIMESTAMPTZ), not the other timestamp
|
||||
// variants. This is by-design and matches DuckDB.
|
||||
if (microseconds === NEGATIVE_INFINITY_TIMESTAMP) {
|
||||
return '-infinity';
|
||||
}
|
||||
if (microseconds === POSITIVE_INFINITY_TIMESTAMP) {
|
||||
return 'infinity';
|
||||
}
|
||||
const offsetMicroseconds =
|
||||
timezoneOffsetInMinutes !== undefined
|
||||
? microseconds +
|
||||
BigInt(timezoneOffsetInMinutes) *
|
||||
MICROSECONDS_PER_SECOND *
|
||||
SECONDS_PER_MINUTE
|
||||
: microseconds;
|
||||
let days = offsetMicroseconds / MICROSECONDS_PER_DAY;
|
||||
let microsecondsPart = offsetMicroseconds % MICROSECONDS_PER_DAY;
|
||||
if (microsecondsPart < 0) {
|
||||
days--;
|
||||
microsecondsPart += MICROSECONDS_PER_DAY;
|
||||
}
|
||||
return getDuckDBTimestampStringFromDaysAndMicroseconds(
|
||||
days,
|
||||
microsecondsPart,
|
||||
getTimezoneOffsetString(timezoneOffsetInMinutes),
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromSeconds(seconds: bigint): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
seconds * MICROSECONDS_PER_SECOND,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromMilliseconds(
|
||||
milliseconds: bigint,
|
||||
): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
milliseconds * MICROSECONDS_PER_MILLISECOND,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromNanoseconds(
|
||||
nanoseconds: bigint,
|
||||
): string {
|
||||
// Note that this division causes loss of precision. This matches the behavior of the DuckDB. It's important that this
|
||||
// precision loss happen before the negative correction in getTimestampStringFromMicroseconds, otherwise off-by-one
|
||||
// errors can occur.
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
nanoseconds / NANOSECONDS_PER_MICROSECOND,
|
||||
);
|
||||
}
|
||||
|
||||
// Assumes baseUnit can be pluralized by adding an 's'.
|
||||
function numberAndUnit(value: number, baseUnit: string): string {
|
||||
return `${value} ${baseUnit}${value !== 1 ? 's' : ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBIntervalString(
|
||||
months: number,
|
||||
days: number,
|
||||
microseconds: bigint,
|
||||
): string {
|
||||
const parts: string[] = [];
|
||||
if (months !== 0) {
|
||||
const sign = months < 0 ? -1 : 1;
|
||||
const absMonths = Math.abs(months);
|
||||
const absYears = Math.floor(absMonths / 12);
|
||||
const years = sign * absYears;
|
||||
const extraMonths = sign * (absMonths - absYears * 12);
|
||||
if (years !== 0) {
|
||||
parts.push(numberAndUnit(years, 'year'));
|
||||
}
|
||||
if (extraMonths !== 0) {
|
||||
parts.push(numberAndUnit(extraMonths, 'month'));
|
||||
}
|
||||
}
|
||||
if (days !== 0) {
|
||||
parts.push(numberAndUnit(days, 'day'));
|
||||
}
|
||||
if (microseconds !== BigInt(0)) {
|
||||
parts.push(getDuckDBTimeStringFromMicroseconds(microseconds));
|
||||
}
|
||||
if (parts.length > 0) {
|
||||
return parts.join(' ');
|
||||
}
|
||||
return '00:00:00';
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
import { DuckDBValue } from '../DuckDBValue.js';
|
||||
|
||||
export function displayStringForDuckDBValue(value: DuckDBValue): string {
|
||||
if (value == null) {
|
||||
return 'NULL';
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
return `'${value.replace(`'`, `''`)}'`;
|
||||
}
|
||||
return String(value);
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* Returns the JS bigint value represented by the byte array a VARINT in DuckDB's internal format.
|
||||
*
|
||||
* DuckDB stores VARINTs as an array of bytes consisting of a three-byte header followed by a variable number of bytes
|
||||
* (at least one). The header specifies the number of bytes after the header, and whether the number is positive or
|
||||
* negative. The bytes after the header specify the absolute value of the number, in big endian format.
|
||||
*
|
||||
* The sign of the number is determined by the MSB of the header, which is 1 for positive and 0 for negative. Negative
|
||||
* numbers also have all bytes of both the header and value inverted. (For negative numbers, the MSB is 0 after this
|
||||
* inversion. Put another way: the MSB of the header is always 1, but it's inverted for negative numbers.)
|
||||
*/
|
||||
export function getVarIntFromBytes(bytes: Uint8Array): bigint {
|
||||
const firstByte = bytes[0];
|
||||
const positive = (firstByte & 0x80) > 0;
|
||||
const uint64Mask = positive ? 0n : 0xffffffffffffffffn;
|
||||
const uint8Mask = positive ? 0 : 0xff;
|
||||
const dv = new DataView(
|
||||
bytes.buffer,
|
||||
bytes.byteOffset + 3,
|
||||
bytes.byteLength - 3,
|
||||
);
|
||||
const lastUint64Offset = dv.byteLength - 8;
|
||||
let offset = 0;
|
||||
let result = 0n;
|
||||
while (offset <= lastUint64Offset) {
|
||||
result = (result << 64n) | (dv.getBigUint64(offset) ^ uint64Mask);
|
||||
offset += 8;
|
||||
}
|
||||
while (offset < dv.byteLength) {
|
||||
result = (result << 8n) | BigInt(dv.getUint8(offset) ^ uint8Mask);
|
||||
offset += 1;
|
||||
}
|
||||
return positive ? result : -result;
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
export function hexFromBlob(
|
||||
blob: Uint8Array,
|
||||
start: number | undefined,
|
||||
end: number | undefined,
|
||||
): string {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (end === undefined) {
|
||||
end = blob.length;
|
||||
}
|
||||
let hex = '';
|
||||
|
||||
for (let i = start; i < end; i++) {
|
||||
const byte = blob[i];
|
||||
// Ensure each byte is 2 hex characters
|
||||
hex += (byte < 16 ? '0' : '') + byte.toString(16);
|
||||
}
|
||||
return hex;
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
import { DuckDBValue } from '../DuckDBValue.js';
|
||||
import { Json } from '../Json.js';
|
||||
import { SpecialDuckDBValue } from '../SpecialDuckDBValue.js';
|
||||
|
||||
export function jsonFromDuckDBValue(value: DuckDBValue): Json {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
if (typeof value === 'bigint') {
|
||||
return String(value);
|
||||
}
|
||||
if (value instanceof SpecialDuckDBValue) {
|
||||
return value.toJson();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
17
ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts
Normal file
17
ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
/** Matches BLOB-to-VARCHAR conversion behavior of DuckDB. */
|
||||
export function stringFromBlob(bytes: Uint8Array): string {
|
||||
let result = '';
|
||||
for (const byte of bytes) {
|
||||
if (
|
||||
byte <= 0x1f ||
|
||||
byte === 0x22 /* single quote */ ||
|
||||
byte === 0x27 /* double quote */ ||
|
||||
byte >= 0x7f
|
||||
) {
|
||||
result += `\\x${byte.toString(16).toUpperCase().padStart(2, '0')}`;
|
||||
} else {
|
||||
result += String.fromCharCode(byte);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
129
ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts
Normal file
129
ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
/**
|
||||
* Decimal string formatting.
|
||||
*
|
||||
* Supports a subset of the functionality of `BigInt.prototype.toLocaleString` for locale-specific formatting.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Locale formatting options for DuckDBDecimalValue.
|
||||
*
|
||||
* This is a subset of the options available for `BigInt.prototype.toLocaleString`
|
||||
*/
|
||||
export interface DuckDBDecimalFormatOptions {
|
||||
useGrouping?: boolean;
|
||||
minimumFractionDigits?: number;
|
||||
maximumFractionDigits?: number;
|
||||
}
|
||||
|
||||
export interface LocaleOptions {
|
||||
locales?: string | string[];
|
||||
options?: DuckDBDecimalFormatOptions;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get the decimal separator for a given locale.
|
||||
* Somewhat expensive, so use getCachedDecimalSeparator if you need to call this multiple times.
|
||||
*/
|
||||
|
||||
function getDecimalSeparator(locales?: string | string[]): string {
|
||||
const decimalSeparator =
|
||||
new Intl.NumberFormat(locales, { useGrouping: false })
|
||||
.formatToParts(0.1)
|
||||
.find((part) => part.type === 'decimal')?.value ?? '.';
|
||||
return decimalSeparator;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get the decimal separator for a given locale, and cache the result.
|
||||
*/
|
||||
const cachedDecimalSeparators: { [localeKey: string]: string } = {};
|
||||
|
||||
function getCachedDecimalSeparator(locales?: string | string[]): string {
|
||||
const cacheKey = JSON.stringify(locales);
|
||||
if (cacheKey in cachedDecimalSeparators) {
|
||||
return cachedDecimalSeparators[cacheKey];
|
||||
}
|
||||
const decimalSeparator = getDecimalSeparator(locales);
|
||||
cachedDecimalSeparators[cacheKey] = decimalSeparator;
|
||||
return decimalSeparator;
|
||||
}
|
||||
|
||||
// Helper function to format whole part of a decimal value.
|
||||
// Note that we explicitly omit 'minimumFractionDigits' and 'maximumFractionDigits' from the options
|
||||
// passed to toLocaleString, because they are only relevant for the fractional part of the number, and
|
||||
// would result in formatting the whole part as a real number, which we don't want.
|
||||
function formatWholePart(
|
||||
localeOptions: LocaleOptions | undefined,
|
||||
val: bigint,
|
||||
): string {
|
||||
if (localeOptions) {
|
||||
const {
|
||||
minimumFractionDigits: _minFD,
|
||||
maximumFractionDigits: _maxFD,
|
||||
...restOptions
|
||||
} = localeOptions.options ?? {};
|
||||
return val.toLocaleString(localeOptions?.locales, restOptions);
|
||||
}
|
||||
return String(val);
|
||||
}
|
||||
|
||||
// Format the fractional part of a decimal value
|
||||
// Note that we must handle minimumFractionDigits and maximumFractionDigits ourselves, and that
|
||||
// we don't apply `useGrouping` because that only applies to the whole part of the number.
|
||||
function formatFractionalPart(
|
||||
localeOptions: LocaleOptions | undefined,
|
||||
val: bigint,
|
||||
scale: number,
|
||||
): string {
|
||||
const fractionalPartStr = String(val).padStart(scale, '0');
|
||||
if (!localeOptions) {
|
||||
return fractionalPartStr;
|
||||
}
|
||||
const minFracDigits = localeOptions?.options?.minimumFractionDigits ?? 0;
|
||||
const maxFracDigits = localeOptions?.options?.maximumFractionDigits ?? 20;
|
||||
|
||||
return fractionalPartStr.padEnd(minFracDigits, '0').slice(0, maxFracDigits);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a scaled decimal value to a string, possibly using locale-specific formatting.
|
||||
*/
|
||||
export function stringFromDecimal(
|
||||
scaledValue: bigint,
|
||||
scale: number,
|
||||
localeOptions?: LocaleOptions,
|
||||
): string {
|
||||
// Decimal values are represented as integers that have been scaled up by a power of ten. The `scale` property of
|
||||
// the type is the exponent of the scale factor. For a scale greater than zero, we need to separate out the
|
||||
// fractional part by reversing this scaling.
|
||||
if (scale > 0) {
|
||||
const scaleFactor = BigInt(10) ** BigInt(scale);
|
||||
const absScaledValue = scaledValue < 0 ? -scaledValue : scaledValue;
|
||||
|
||||
const prefix = scaledValue < 0 ? '-' : '';
|
||||
|
||||
const wholePartNum = absScaledValue / scaleFactor;
|
||||
const wholePartStr = formatWholePart(localeOptions, wholePartNum);
|
||||
|
||||
const fractionalPartNum = absScaledValue % scaleFactor;
|
||||
const fractionalPartStr = formatFractionalPart(
|
||||
localeOptions,
|
||||
fractionalPartNum,
|
||||
scale,
|
||||
);
|
||||
|
||||
const decimalSeparatorStr = localeOptions
|
||||
? getCachedDecimalSeparator(localeOptions.locales)
|
||||
: '.';
|
||||
|
||||
return `${prefix}${wholePartStr}${decimalSeparatorStr}${fractionalPartStr}`;
|
||||
}
|
||||
// For a scale of zero, there is no fractional part, so a direct string conversion works.
|
||||
if (localeOptions) {
|
||||
return scaledValue.toLocaleString(
|
||||
localeOptions?.locales,
|
||||
localeOptions?.options as BigIntToLocaleStringOptions | undefined,
|
||||
);
|
||||
}
|
||||
return String(scaledValue);
|
||||
}
|
||||
25
ts/pkgs/duckdb-data-values/src/index.ts
Normal file
25
ts/pkgs/duckdb-data-values/src/index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
export { getVarIntFromBytes } from './conversion/getVarIntFromBytes.js';
|
||||
export { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
export { DuckDBArrayValue } from './DuckDBArrayValue.js';
|
||||
export { DuckDBBitValue } from './DuckDBBitValue.js';
|
||||
export { DuckDBBlobValue } from './DuckDBBlobValue.js';
|
||||
export { DuckDBDateValue } from './DuckDBDateValue.js';
|
||||
export { DuckDBDecimalValue } from './DuckDBDecimalValue.js';
|
||||
export { DuckDBIntervalValue } from './DuckDBIntervalValue.js';
|
||||
export { DuckDBListValue } from './DuckDBListValue.js';
|
||||
export { DuckDBMapEntry } from './DuckDBMapEntry.js';
|
||||
export { DuckDBMapValue } from './DuckDBMapValue.js';
|
||||
export { DuckDBStructEntry } from './DuckDBStructEntry.js';
|
||||
export { DuckDBStructValue } from './DuckDBStructValue.js';
|
||||
export { DuckDBTimestampMicrosecondsValue } from './DuckDBTimestampMicrosecondsValue.js';
|
||||
export { DuckDBTimestampMillisecondsValue } from './DuckDBTimestampMillisecondsValue.js';
|
||||
export { DuckDBTimestampNanosecondsValue } from './DuckDBTimestampNanosecondsValue.js';
|
||||
export { DuckDBTimestampSecondsValue } from './DuckDBTimestampSecondsValue.js';
|
||||
export { DuckDBTimestampTZValue } from './DuckDBTimestampTZValue.js';
|
||||
export { DuckDBTimeTZValue } from './DuckDBTimeTZValue.js';
|
||||
export { DuckDBTimeValue } from './DuckDBTimeValue.js';
|
||||
export { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
export { DuckDBUUIDValue } from './DuckDBUUIDValue.js';
|
||||
export { DuckDBValue } from './DuckDBValue.js';
|
||||
export { Json } from './Json.js';
|
||||
export { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
6
ts/pkgs/duckdb-data-values/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-values/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.library.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../out"
|
||||
}
|
||||
}
|
||||
49
ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts
Normal file
49
ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBArrayValue } from '../src/DuckDBArrayValue';
|
||||
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||
|
||||
suite('DuckDBArrayValue', () => {
|
||||
test('should render an empty array to the correct string', () => {
|
||||
expect(new DuckDBArrayValue([]).toString()).toStrictEqual('[]');
|
||||
});
|
||||
test('should render a single element array to the correct string', () => {
|
||||
expect(new DuckDBArrayValue([123]).toString()).toStrictEqual('[123]');
|
||||
});
|
||||
test('should render a multi-element array to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBArrayValue(['abc', null, true, '']).toString(),
|
||||
).toStrictEqual(`['abc', NULL, true, '']`);
|
||||
});
|
||||
test('should render an array with nested arrays to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBArrayValue([
|
||||
new DuckDBArrayValue([]),
|
||||
null,
|
||||
new DuckDBArrayValue([123, null, 'xyz']),
|
||||
]).toString(),
|
||||
).toStrictEqual(`[[], NULL, [123, NULL, 'xyz']]`);
|
||||
});
|
||||
test('toJson array with basic values', () => {
|
||||
expect(new DuckDBArrayValue([123, 'abc', null]).toJson()).toStrictEqual([
|
||||
123,
|
||||
'abc',
|
||||
null,
|
||||
]);
|
||||
});
|
||||
test('toJson array with complex values', () => {
|
||||
expect(
|
||||
new DuckDBArrayValue([
|
||||
new DuckDBMapValue([
|
||||
{ key: 'foo', value: 123 },
|
||||
{ key: 'bar', value: 'abc' },
|
||||
]),
|
||||
new DuckDBArrayValue([123, null, 'xyz']),
|
||||
null,
|
||||
]).toJson(),
|
||||
).toStrictEqual([
|
||||
{ "'foo'": 123, "'bar'": 'abc' },
|
||||
[123, null, 'xyz'],
|
||||
null,
|
||||
]);
|
||||
});
|
||||
});
|
||||
33
ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts
Normal file
33
ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBBitValue } from '../src/DuckDBBitValue';
|
||||
|
||||
suite('DuckDBBitValue', () => {
|
||||
test('should render an empty byte array to the correct string', () => {
|
||||
expect(new DuckDBBitValue(new Uint8Array([])).toString()).toStrictEqual('');
|
||||
});
|
||||
test('should render bit string with no padding to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBitValue(new Uint8Array([0x00, 0xf1, 0xe2, 0xd3])).toString(),
|
||||
).toStrictEqual('111100011110001011010011');
|
||||
});
|
||||
test('should render bit string with padding to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBitValue(new Uint8Array([0x03, 0xf1, 0xe2, 0xd3])).toString(),
|
||||
).toStrictEqual('100011110001011010011');
|
||||
});
|
||||
test('should round-trip bit string with no padding', () => {
|
||||
expect(
|
||||
DuckDBBitValue.fromString('111100011110001011010011').toString(),
|
||||
).toStrictEqual('111100011110001011010011');
|
||||
});
|
||||
test('should round-trip bit string with padding', () => {
|
||||
expect(
|
||||
DuckDBBitValue.fromString('100011110001011010011').toString(),
|
||||
).toStrictEqual('100011110001011010011');
|
||||
});
|
||||
test('toJson', () => {
|
||||
expect(
|
||||
DuckDBBitValue.fromString('100011110001011010011').toJson(),
|
||||
).toStrictEqual('100011110001011010011');
|
||||
});
|
||||
});
|
||||
92
ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts
Normal file
92
ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBBlobValue } from '../src/DuckDBBlobValue';
|
||||
|
||||
suite('DuckDBBlobValue', () => {
|
||||
test('should render an empty byte array to the correct string', () => {
|
||||
expect(new DuckDBBlobValue(new Uint8Array([])).toString()).toStrictEqual(
|
||||
'',
|
||||
);
|
||||
});
|
||||
test('should render a byte array to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([0x41, 0x42, 0x43, 0x31, 0x32, 0x33]),
|
||||
).toString(),
|
||||
).toStrictEqual('ABC123');
|
||||
});
|
||||
test('should render a byte array containing single-digit non-printables to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a,
|
||||
0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(
|
||||
'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F',
|
||||
);
|
||||
});
|
||||
test('should render a byte array containing double-digit non-printables to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a,
|
||||
0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(
|
||||
'\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F',
|
||||
);
|
||||
});
|
||||
test('should render a byte array containing min printables (including single and double quotes) to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a,
|
||||
0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(' !\\x22#$%&\\x27()*+,-./');
|
||||
});
|
||||
test('should render a byte array containing max printables (including backspace) to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a,
|
||||
0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual('pqrstuvwxyz{|}~\\x7F');
|
||||
});
|
||||
test('should render a byte array containing high non-printables to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a,
|
||||
0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(
|
||||
'\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F',
|
||||
);
|
||||
});
|
||||
test('should render a byte array containing max non-printables to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([
|
||||
0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa,
|
||||
0xfb, 0xfc, 0xfd, 0xfe, 0xff,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual(
|
||||
'\\xF0\\xF1\\xF2\\xF3\\xF4\\xF5\\xF6\\xF7\\xF8\\xF9\\xFA\\xFB\\xFC\\xFD\\xFE\\xFF',
|
||||
);
|
||||
});
|
||||
test('toJson', () => {
|
||||
expect(
|
||||
new DuckDBBlobValue(
|
||||
new Uint8Array([0x41, 0x42, 0x43, 0x31, 0x32, 0x33]),
|
||||
).toJson(),
|
||||
).toStrictEqual('ABC123');
|
||||
});
|
||||
});
|
||||
18
ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts
Normal file
18
ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBDateValue } from '../src/DuckDBDateValue';
|
||||
|
||||
suite('DuckDBDateValue', () => {
|
||||
test('should render a normal date value to the correct string', () => {
|
||||
expect(new DuckDBDateValue(19643).toString()).toStrictEqual('2023-10-13');
|
||||
});
|
||||
test('should render the max date value to the correct string', () => {
|
||||
expect(new DuckDBDateValue(2 ** 31 - 2).toString()).toStrictEqual(
|
||||
'5881580-07-10',
|
||||
);
|
||||
});
|
||||
test('should render the min date value to the correct string', () => {
|
||||
expect(new DuckDBDateValue(-(2 ** 31) + 2).toString()).toStrictEqual(
|
||||
'5877642-06-25 (BC)',
|
||||
);
|
||||
});
|
||||
});
|
||||
150
ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts
Normal file
150
ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBDecimalValue } from '../src/DuckDBDecimalValue';
|
||||
|
||||
suite('DuckDBDecimalValue', () => {
|
||||
test('should render a scaled value of zero with a scale of zero to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(0n, 0).toString()).toStrictEqual('0');
|
||||
});
|
||||
test('should render a small positive scaled value with a scale of zero to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(7n, 0).toString()).toStrictEqual('7');
|
||||
});
|
||||
test('should render a small negative scaled value with a scale of zero to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(-7n, 0).toString()).toStrictEqual('-7');
|
||||
});
|
||||
test('should render a large positive scaled value with a scale of zero to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(987654321098765432109876543210n, 0).toString(),
|
||||
).toStrictEqual('987654321098765432109876543210');
|
||||
});
|
||||
test('should render a large negative scaled value with a scale of zero to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(-987654321098765432109876543210n, 0).toString(),
|
||||
).toStrictEqual('-987654321098765432109876543210');
|
||||
});
|
||||
test('should render the maximum positive scaled value with a scale of zero to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(
|
||||
99999999999999999999999999999999999999n,
|
||||
0,
|
||||
).toString(),
|
||||
).toStrictEqual('99999999999999999999999999999999999999');
|
||||
});
|
||||
test('should render the maximum negative scaled value with a scale of zero to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(
|
||||
-99999999999999999999999999999999999999n,
|
||||
0,
|
||||
).toString(),
|
||||
).toStrictEqual('-99999999999999999999999999999999999999');
|
||||
});
|
||||
|
||||
test('should render a scaled value of zero with a non-zero scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(0n, 3).toString()).toStrictEqual('0.000');
|
||||
});
|
||||
test('should render a small positive scaled value with a non-zero scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(12345n, 3).toString()).toStrictEqual(
|
||||
'12.345',
|
||||
);
|
||||
});
|
||||
test('should render a small negative scaled value with a non-zero scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(-12345n, 3).toString()).toStrictEqual(
|
||||
'-12.345',
|
||||
);
|
||||
});
|
||||
test('should render a large positive scaled value with a non-zero scale to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(987654321098765432109876543210n, 10).toString(),
|
||||
).toStrictEqual('98765432109876543210.9876543210');
|
||||
});
|
||||
test('should render a large negative scaled value with a non-zero scale to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(-987654321098765432109876543210n, 10).toString(),
|
||||
).toStrictEqual('-98765432109876543210.9876543210');
|
||||
});
|
||||
test('should render leading and trailing zeros in the fractional part of value greater than one correctly', () => {
|
||||
expect(new DuckDBDecimalValue(120034500n, 7).toString()).toStrictEqual(
|
||||
'12.0034500',
|
||||
);
|
||||
});
|
||||
test('should render leading and trailing zeros in the fractional part of value less than negative one correctly', () => {
|
||||
expect(new DuckDBDecimalValue(-120034500n, 7).toString()).toStrictEqual(
|
||||
'-12.0034500',
|
||||
);
|
||||
});
|
||||
test('should render leading and trailing zeros in the fractional part of value between zero and one correctly', () => {
|
||||
expect(new DuckDBDecimalValue(34500n, 7).toString()).toStrictEqual(
|
||||
'0.0034500',
|
||||
);
|
||||
});
|
||||
test('should render leading and trailing zeros in the fractional part of value between zero and negative one correctly', () => {
|
||||
expect(new DuckDBDecimalValue(-34500n, 7).toString()).toStrictEqual(
|
||||
'-0.0034500',
|
||||
);
|
||||
});
|
||||
test('should render a small positive scaled value with a the maximum scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(1n, 38).toString()).toStrictEqual(
|
||||
'0.00000000000000000000000000000000000001',
|
||||
);
|
||||
});
|
||||
test('should render a small negative scaled value with a the maximum scale to the correct string', () => {
|
||||
expect(new DuckDBDecimalValue(-1n, 38).toString()).toStrictEqual(
|
||||
'-0.00000000000000000000000000000000000001',
|
||||
);
|
||||
});
|
||||
test('should render the maximum positive scaled value with a the maximum scale to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(
|
||||
99999999999999999999999999999999999999n,
|
||||
38,
|
||||
).toString(),
|
||||
).toStrictEqual('0.99999999999999999999999999999999999999');
|
||||
});
|
||||
test('should render the maximum negative scaled value with a the maximum scale to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(
|
||||
-99999999999999999999999999999999999999n,
|
||||
38,
|
||||
).toString(),
|
||||
).toStrictEqual('-0.99999999999999999999999999999999999999');
|
||||
});
|
||||
|
||||
test('should render a locale string with grouping by default', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(9876543210n, 0).toLocaleString(),
|
||||
).toStrictEqual('9,876,543,210');
|
||||
});
|
||||
|
||||
test('should render a European locale with . for grouping', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(9876543210n, 0).toLocaleString('de-DE'),
|
||||
).toStrictEqual('9.876.543.210');
|
||||
});
|
||||
|
||||
test('should render a locale string with a specified minimum fraction digits', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(12345n, 3).toLocaleString(undefined, {
|
||||
minimumFractionDigits: 5,
|
||||
}),
|
||||
).toStrictEqual('12.34500');
|
||||
});
|
||||
|
||||
test('should render a locale string with a specified maximum fraction digits', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(12345n, 3).toLocaleString(undefined, {
|
||||
maximumFractionDigits: 1,
|
||||
}),
|
||||
).toStrictEqual('12.3');
|
||||
});
|
||||
|
||||
test('should render a decimal with a large whole part and fractional part in a European locale with the correct grouping and decimal', () => {
|
||||
expect(
|
||||
new DuckDBDecimalValue(98765432109876543210n, 10).toLocaleString(
|
||||
'de-DE',
|
||||
{
|
||||
useGrouping: true,
|
||||
maximumFractionDigits: 5,
|
||||
},
|
||||
),
|
||||
).toStrictEqual('9.876.543.210,98765');
|
||||
});
|
||||
});
|
||||
219
ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts
Normal file
219
ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBIntervalValue } from '../src/DuckDBIntervalValue';
|
||||
|
||||
const MICROS_IN_SEC = 1000000n;
|
||||
const MICROS_IN_MIN = 60n * MICROS_IN_SEC;
|
||||
const MICROS_IN_HR = 60n * MICROS_IN_MIN;
|
||||
const MAX_INT32 = 2n ** 31n - 1n;
|
||||
|
||||
suite('DuckDBIntervalValue', () => {
|
||||
test('should render an empty interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, 0n).toString()).toStrictEqual(
|
||||
'00:00:00',
|
||||
);
|
||||
});
|
||||
|
||||
test('should render a one month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(1, 0, 0n).toString()).toStrictEqual(
|
||||
'1 month',
|
||||
);
|
||||
});
|
||||
test('should render a negative one month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-1, 0, 0n).toString()).toStrictEqual(
|
||||
'-1 months',
|
||||
);
|
||||
});
|
||||
test('should render a two month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(2, 0, 0n).toString()).toStrictEqual(
|
||||
'2 months',
|
||||
);
|
||||
});
|
||||
test('should render a negative two month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-2, 0, 0n).toString()).toStrictEqual(
|
||||
'-2 months',
|
||||
);
|
||||
});
|
||||
test('should render a one year interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(12, 0, 0n).toString()).toStrictEqual(
|
||||
'1 year',
|
||||
);
|
||||
});
|
||||
test('should render a negative one year interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-12, 0, 0n).toString()).toStrictEqual(
|
||||
'-1 years',
|
||||
);
|
||||
});
|
||||
test('should render a two year interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(24, 0, 0n).toString()).toStrictEqual(
|
||||
'2 years',
|
||||
);
|
||||
});
|
||||
test('should render a negative two year interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-24, 0, 0n).toString()).toStrictEqual(
|
||||
'-2 years',
|
||||
);
|
||||
});
|
||||
test('should render a two year, three month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(24 + 3, 0, 0n).toString()).toStrictEqual(
|
||||
'2 years 3 months',
|
||||
);
|
||||
});
|
||||
test('should render a negative two year, three month interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(-(24 + 3), 0, 0n).toString()).toStrictEqual(
|
||||
'-2 years -3 months',
|
||||
);
|
||||
});
|
||||
|
||||
test('should render a one day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 1, 0n).toString()).toStrictEqual('1 day');
|
||||
});
|
||||
test('should render a negative one day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, -1, 0n).toString()).toStrictEqual(
|
||||
'-1 days',
|
||||
);
|
||||
});
|
||||
test('should render a two day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 2, 0n).toString()).toStrictEqual(
|
||||
'2 days',
|
||||
);
|
||||
});
|
||||
test('should render a negative two day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, -2, 0n).toString()).toStrictEqual(
|
||||
'-2 days',
|
||||
);
|
||||
});
|
||||
test('should render a 30 day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 30, 0n).toString()).toStrictEqual(
|
||||
'30 days',
|
||||
);
|
||||
});
|
||||
test('should render a 365 day interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 365, 0n).toString()).toStrictEqual(
|
||||
'365 days',
|
||||
);
|
||||
});
|
||||
|
||||
test('should render a one microsecond interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, 1n).toString()).toStrictEqual(
|
||||
'00:00:00.000001',
|
||||
);
|
||||
});
|
||||
test('should render a negative one microsecond interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, -1n).toString()).toStrictEqual(
|
||||
'-00:00:00.000001',
|
||||
);
|
||||
});
|
||||
test('should render a large microsecond interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, 987654n).toString()).toStrictEqual(
|
||||
'00:00:00.987654',
|
||||
);
|
||||
});
|
||||
test('should render a large negative microsecond interval to the correct string', () => {
|
||||
expect(new DuckDBIntervalValue(0, 0, -987654n).toString()).toStrictEqual(
|
||||
'-00:00:00.987654',
|
||||
);
|
||||
});
|
||||
test('should render a one second interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MICROS_IN_SEC).toString(),
|
||||
).toStrictEqual('00:00:01');
|
||||
});
|
||||
test('should render a negative one second interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -MICROS_IN_SEC).toString(),
|
||||
).toStrictEqual('-00:00:01');
|
||||
});
|
||||
test('should render a 59 second interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, 59n * MICROS_IN_SEC).toString(),
|
||||
).toStrictEqual('00:00:59');
|
||||
});
|
||||
test('should render a -59 second interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -59n * MICROS_IN_SEC).toString(),
|
||||
).toStrictEqual('-00:00:59');
|
||||
});
|
||||
test('should render a one minute interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MICROS_IN_MIN).toString(),
|
||||
).toStrictEqual('00:01:00');
|
||||
});
|
||||
test('should render a negative one minute interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -MICROS_IN_MIN).toString(),
|
||||
).toStrictEqual('-00:01:00');
|
||||
});
|
||||
test('should render a 59 minute interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, 59n * MICROS_IN_MIN).toString(),
|
||||
).toStrictEqual('00:59:00');
|
||||
});
|
||||
test('should render a -59 minute interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -59n * MICROS_IN_MIN).toString(),
|
||||
).toStrictEqual('-00:59:00');
|
||||
});
|
||||
test('should render a one hour interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('01:00:00');
|
||||
});
|
||||
test('should render a negative one hour interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('-01:00:00');
|
||||
});
|
||||
test('should render a 24 hour interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, 24n * MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('24:00:00');
|
||||
});
|
||||
test('should render a -24 hour interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -24n * MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('-24:00:00');
|
||||
});
|
||||
test('should render a very large interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MAX_INT32 * MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('2147483647:00:00');
|
||||
});
|
||||
test('should render a very large negative interval to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, -MAX_INT32 * MICROS_IN_HR).toString(),
|
||||
).toStrictEqual('-2147483647:00:00');
|
||||
});
|
||||
test('should render a very large interval with microseconds to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(0, 0, MAX_INT32 * MICROS_IN_HR + 1n).toString(),
|
||||
).toStrictEqual('2147483647:00:00.000001');
|
||||
});
|
||||
test('should render a very large negative interval with microseconds to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(
|
||||
0,
|
||||
0,
|
||||
-(MAX_INT32 * MICROS_IN_HR + 1n),
|
||||
).toString(),
|
||||
).toStrictEqual('-2147483647:00:00.000001');
|
||||
});
|
||||
|
||||
test('should render a interval with multiple parts to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(
|
||||
24 + 3,
|
||||
5,
|
||||
7n * MICROS_IN_HR + 11n * MICROS_IN_MIN + 13n * MICROS_IN_SEC + 17n,
|
||||
).toString(),
|
||||
).toStrictEqual('2 years 3 months 5 days 07:11:13.000017');
|
||||
});
|
||||
test('should render a negative interval with multiple parts to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBIntervalValue(
|
||||
-(24 + 3),
|
||||
-5,
|
||||
-(7n * MICROS_IN_HR + 11n * MICROS_IN_MIN + 13n * MICROS_IN_SEC + 17n),
|
||||
).toString(),
|
||||
).toStrictEqual('-2 years -3 months -5 days -07:11:13.000017');
|
||||
});
|
||||
});
|
||||
45
ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts
Normal file
45
ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBMapValue } from '../src';
|
||||
import { DuckDBListValue } from '../src/DuckDBListValue';
|
||||
|
||||
suite('DuckDBListValue', () => {
|
||||
test('should render an empty list to the correct string', () => {
|
||||
expect(new DuckDBListValue([]).toString()).toStrictEqual('[]');
|
||||
});
|
||||
test('should render a single element list to the correct string', () => {
|
||||
expect(new DuckDBListValue([123]).toString()).toStrictEqual('[123]');
|
||||
});
|
||||
test('should render a multi-element list to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBListValue(['abc', null, true, '']).toString(),
|
||||
).toStrictEqual(`['abc', NULL, true, '']`);
|
||||
});
|
||||
test('should render a list with nested lists to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBListValue([
|
||||
new DuckDBListValue([]),
|
||||
null,
|
||||
new DuckDBListValue([123, null, 'xyz']),
|
||||
]).toString(),
|
||||
).toStrictEqual(`[[], NULL, [123, NULL, 'xyz']]`);
|
||||
});
|
||||
test('toJson with complex values', () => {
|
||||
expect(
|
||||
new DuckDBListValue([
|
||||
new DuckDBMapValue([
|
||||
{ key: 'foo', value: 123 },
|
||||
{ key: 'bar', value: 'abc' },
|
||||
]),
|
||||
null,
|
||||
new DuckDBMapValue([
|
||||
{ key: 'foo', value: null },
|
||||
{ key: 'bar', value: 'xyz' },
|
||||
]),
|
||||
]).toJson(),
|
||||
).toStrictEqual([
|
||||
{ "'foo'": 123, "'bar'": 'abc' },
|
||||
null,
|
||||
{ "'foo'": null, "'bar'": 'xyz' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
77
ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts
Normal file
77
ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBListValue } from '../src/DuckDBListValue';
|
||||
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||
|
||||
suite('DuckDBMapValue', () => {
|
||||
test('should render an empty map to the correct string', () => {
|
||||
expect(new DuckDBMapValue([]).toString()).toStrictEqual('{}');
|
||||
});
|
||||
test('should render a single-entry map to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([{ key: 'x', value: 1 }]).toString(),
|
||||
).toStrictEqual(`{'x': 1}`);
|
||||
});
|
||||
test('should render a multi-entry map to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{ key: 1, value: 42.001 },
|
||||
{ key: 5, value: -32.1 },
|
||||
{ key: 3, value: null },
|
||||
]).toString(),
|
||||
).toStrictEqual(`{1: 42.001, 5: -32.1, 3: NULL}`);
|
||||
});
|
||||
test('should render a multi-entry map with complex key types to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{
|
||||
key: new DuckDBListValue(['a', 'b']),
|
||||
value: new DuckDBListValue([1.1, 2.2]),
|
||||
},
|
||||
{
|
||||
key: new DuckDBListValue(['c', 'd']),
|
||||
value: new DuckDBListValue([3.3, 4.4]),
|
||||
},
|
||||
]).toString(),
|
||||
).toStrictEqual(`{['a', 'b']: [1.1, 2.2], ['c', 'd']: [3.3, 4.4]}`);
|
||||
});
|
||||
test('should render a map with nested maps to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{ key: new DuckDBMapValue([]), value: new DuckDBMapValue([]) },
|
||||
{
|
||||
key: new DuckDBMapValue([{ key: 'key1', value: 'value1' }]),
|
||||
value: new DuckDBMapValue([
|
||||
{ key: 1, value: 42.001 },
|
||||
{ key: 5, value: -32.1 },
|
||||
{ key: 3, value: null },
|
||||
]),
|
||||
},
|
||||
]).toString(),
|
||||
).toStrictEqual(
|
||||
`{{}: {}, {'key1': 'value1'}: {1: 42.001, 5: -32.1, 3: NULL}}`,
|
||||
);
|
||||
});
|
||||
test('toJson basics', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{ key: 'a', value: 1 },
|
||||
{ key: 'b', value: 2 },
|
||||
{ key: 'c', value: 3 },
|
||||
]).toJson(),
|
||||
).toStrictEqual({ "'a'": 1, "'b'": 2, "'c'": 3 });
|
||||
});
|
||||
test('toJson with complex keys and values', () => {
|
||||
expect(
|
||||
new DuckDBMapValue([
|
||||
{
|
||||
key: new DuckDBListValue(['a', 'b']),
|
||||
value: new DuckDBListValue([1.1, 2.2]),
|
||||
},
|
||||
{
|
||||
key: new DuckDBListValue(['c', 'd']),
|
||||
value: new DuckDBListValue([3.3, 4.4]),
|
||||
},
|
||||
]).toJson(),
|
||||
).toStrictEqual({ "['a', 'b']": [1.1, 2.2], "['c', 'd']": [3.3, 4.4] });
|
||||
});
|
||||
});
|
||||
110
ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts
Normal file
110
ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||
import { DuckDBStructValue } from '../src/DuckDBStructValue';
|
||||
|
||||
suite('DuckDBStructValue', () => {
|
||||
test('should render an empty struct to the correct string', () => {
|
||||
expect(new DuckDBStructValue([]).toString()).toStrictEqual('{}');
|
||||
});
|
||||
test('should render a single-entry struct to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([{ key: 'x', value: 1 }]).toString(),
|
||||
).toStrictEqual(`{'x': 1}`);
|
||||
});
|
||||
test('should render a multi-entry struct to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'x', value: 1 },
|
||||
{ key: 'y', value: 2 },
|
||||
{ key: 'z', value: 3 },
|
||||
]).toString(),
|
||||
).toStrictEqual(`{'x': 1, 'y': 2, 'z': 3}`);
|
||||
});
|
||||
test('should render a multi-entry struct with different value types to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'key1', value: 'string' },
|
||||
{ key: 'key2', value: 1 },
|
||||
{ key: 'key3', value: 12.345 },
|
||||
{ key: 'key0', value: null },
|
||||
]).toString(),
|
||||
).toStrictEqual(
|
||||
`{'key1': 'string', 'key2': 1, 'key3': 12.345, 'key0': NULL}`,
|
||||
);
|
||||
});
|
||||
test('should render a multi-entry struct with empty keys to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: '', value: 2 },
|
||||
{ key: '', value: 1 },
|
||||
{ key: '', value: 3 },
|
||||
]).toString(),
|
||||
).toStrictEqual(`{'': 2, '': 1, '': 3}`);
|
||||
});
|
||||
test('should render a struct with nested structs to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||
{
|
||||
key: 'struct',
|
||||
value: new DuckDBStructValue([
|
||||
{ key: 'key1', value: 'string' },
|
||||
{ key: 'key2', value: 1 },
|
||||
{ key: 'key3', value: 12.345 },
|
||||
]),
|
||||
},
|
||||
]).toString(),
|
||||
).toStrictEqual(
|
||||
`{'empty_struct': {}, 'struct': {'key1': 'string', 'key2': 1, 'key3': 12.345}}`,
|
||||
);
|
||||
});
|
||||
test('toJson with simple keys and values', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'x', value: 1 },
|
||||
{ key: 'y', value: 2 },
|
||||
{ key: 'z', value: 3 },
|
||||
]).toJson(),
|
||||
).toStrictEqual({ "'x'": 1, "'y'": 2, "'z'": 3 });
|
||||
});
|
||||
test('toJson with nested struct values', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||
{
|
||||
key: 'struct',
|
||||
value: new DuckDBStructValue([
|
||||
{ key: 'key1', value: 'string' },
|
||||
{ key: 'key2', value: 1 },
|
||||
{ key: 'key3', value: 12.345 },
|
||||
]),
|
||||
},
|
||||
]).toJson(),
|
||||
).toStrictEqual({
|
||||
"'empty_struct'": {},
|
||||
"'struct'": { "'key1'": 'string', "'key2'": 1, "'key3'": 12.345 },
|
||||
});
|
||||
});
|
||||
test('toJson with nested complex values', () => {
|
||||
expect(
|
||||
new DuckDBStructValue([
|
||||
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||
{
|
||||
key: 'struct',
|
||||
value: new DuckDBStructValue([
|
||||
{
|
||||
key: 'key1',
|
||||
value: new DuckDBMapValue([
|
||||
{ key: 'foo', value: null },
|
||||
{ key: 'bar', value: 'xyz' },
|
||||
]),
|
||||
},
|
||||
]),
|
||||
},
|
||||
]).toJson(),
|
||||
).toStrictEqual({
|
||||
"'empty_struct'": {},
|
||||
"'struct'": { "'key1'": { "'foo'": null, "'bar'": 'xyz' } },
|
||||
});
|
||||
});
|
||||
});
|
||||
60
ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts
Normal file
60
ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimeTZValue } from '../src/DuckDBTimeTZValue';
|
||||
|
||||
suite('DuckDBTimeTZValue', () => {
|
||||
test('should render a normal time value with a positive offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(
|
||||
((12n * 60n + 34n) * 60n + 56n) * 1000000n + 789012n,
|
||||
(13 * 60 + 24) * 60 + 57,
|
||||
).toString(),
|
||||
).toStrictEqual('12:34:56.789012+13:24:57');
|
||||
});
|
||||
test('should render a normal time value with millisecond precision with an offset in minutes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(
|
||||
((12n * 60n + 34n) * 60n + 56n) * 1000000n + 789000n,
|
||||
(13 * 60 + 24) * 60,
|
||||
).toString(),
|
||||
).toStrictEqual('12:34:56.789+13:24');
|
||||
});
|
||||
test('should render a normal time value with second precision with an offset in hours to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(
|
||||
((12n * 60n + 34n) * 60n + 56n) * 1000000n,
|
||||
(13 * 60 + 0) * 60,
|
||||
).toString(),
|
||||
).toStrictEqual('12:34:56+13');
|
||||
});
|
||||
test('should render a zero time value with a zero offset to the correct string', () => {
|
||||
expect(new DuckDBTimeTZValue(0n, 0).toString()).toStrictEqual(
|
||||
'00:00:00+00',
|
||||
);
|
||||
});
|
||||
test('should render the max value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(
|
||||
((24n * 60n + 0n) * 60n + 0n) * 1000000n,
|
||||
-((15 * 60 + 59) * 60 + 59),
|
||||
).toString(),
|
||||
).toStrictEqual('24:00:00-15:59:59');
|
||||
});
|
||||
test('should render the min value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimeTZValue(0n, (15 * 60 + 59) * 60 + 59).toString(),
|
||||
).toStrictEqual('00:00:00+15:59:59');
|
||||
});
|
||||
test('should construct the correct value from bits', () => {
|
||||
expect(DuckDBTimeTZValue.fromBits(0n).toString()).toStrictEqual(
|
||||
'00:00:00+15:59:59',
|
||||
);
|
||||
});
|
||||
test('should construct the correct value from bits', () => {
|
||||
expect(
|
||||
DuckDBTimeTZValue.fromBits(
|
||||
(BigInt.asUintN(40, ((24n * 60n + 0n) * 60n + 0n) * 1000000n) << 24n) |
|
||||
BigInt.asUintN(24, (31n * 60n + 59n) * 60n + 58n),
|
||||
).toString(),
|
||||
).toStrictEqual('24:00:00-15:59:59');
|
||||
});
|
||||
});
|
||||
18
ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts
Normal file
18
ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimeValue } from '../src/DuckDBTimeValue';
|
||||
|
||||
suite('DuckDBTimeValue', () => {
|
||||
test('should render a normal time value to the correct string', () => {
|
||||
expect(new DuckDBTimeValue(45296000000n).toString()).toStrictEqual(
|
||||
'12:34:56',
|
||||
);
|
||||
});
|
||||
test('should render the max time value to the correct string', () => {
|
||||
expect(new DuckDBTimeValue(86399999999n).toString()).toStrictEqual(
|
||||
'23:59:59.999999',
|
||||
);
|
||||
});
|
||||
test('should render the min time value to the correct string', () => {
|
||||
expect(new DuckDBTimeValue(0n).toString()).toStrictEqual('00:00:00');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,55 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampMicrosecondsValue } from '../src/DuckDBTimestampMicrosecondsValue';
|
||||
|
||||
suite('DuckDBTimestampMicrosecondsValue', () => {
|
||||
test('should render a normal timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(1612325106007800n).toString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06.0078');
|
||||
});
|
||||
test('should render a zero timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampMicrosecondsValue(0n).toString()).toStrictEqual(
|
||||
'1970-01-01 00:00:00',
|
||||
);
|
||||
});
|
||||
test('should render a negative timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampMicrosecondsValue(-7n).toString()).toStrictEqual(
|
||||
'1969-12-31 23:59:59.999993',
|
||||
);
|
||||
});
|
||||
test('should render a large positive timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(2353318271999999000n).toString(),
|
||||
).toStrictEqual('76543-09-08 23:59:59.999');
|
||||
});
|
||||
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(-58261244276543211n).toString(),
|
||||
).toStrictEqual('0123-10-11 01:02:03.456789');
|
||||
});
|
||||
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(-65992661876543211n).toString(),
|
||||
).toStrictEqual('0123-10-11 (BC) 01:02:03.456789');
|
||||
});
|
||||
test('should render the max timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(9223372036854775806n).toString(),
|
||||
).toStrictEqual('294247-01-10 04:00:54.775806');
|
||||
});
|
||||
test('should render the min timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(-9223372022400000000n).toString(),
|
||||
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||
});
|
||||
test('should render the positive infinity timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(9223372036854775807n).toString(),
|
||||
).toStrictEqual('infinity');
|
||||
});
|
||||
test('should render the negative infinity timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMicrosecondsValue(-9223372036854775807n).toString(),
|
||||
).toStrictEqual('-infinity');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,45 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampMillisecondsValue } from '../src/DuckDBTimestampMillisecondsValue';
|
||||
|
||||
suite('DuckDBTimestampMillisecondsValue', () => {
|
||||
test('should render a normal timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(1612325106007n).toString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06.007');
|
||||
});
|
||||
test('should render a zero timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampMillisecondsValue(0n).toString()).toStrictEqual(
|
||||
'1970-01-01 00:00:00',
|
||||
);
|
||||
});
|
||||
test('should render a negative timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampMillisecondsValue(-7n).toString()).toStrictEqual(
|
||||
'1969-12-31 23:59:59.993',
|
||||
);
|
||||
});
|
||||
test('should render a large positive timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(2353318271999999n).toString(),
|
||||
).toStrictEqual('76543-09-08 23:59:59.999');
|
||||
});
|
||||
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(-58261244276544n).toString(),
|
||||
).toStrictEqual('0123-10-11 01:02:03.456');
|
||||
});
|
||||
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(-65992661876544n).toString(),
|
||||
).toStrictEqual('0123-10-11 (BC) 01:02:03.456');
|
||||
});
|
||||
test('should render the max timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(9223372036854775n).toString(),
|
||||
).toStrictEqual('294247-01-10 04:00:54.775');
|
||||
});
|
||||
test('should render the min timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampMillisecondsValue(-9223372022400000n).toString(),
|
||||
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,40 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampNanosecondsValue } from '../src/DuckDBTimestampNanosecondsValue';
|
||||
|
||||
suite('DuckDBTimestampNanosecondsValue', () => {
|
||||
test('should render a normal timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(1612325106007891000n).toString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06.007891');
|
||||
});
|
||||
test('should render a zero timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampNanosecondsValue(0n).toString()).toStrictEqual(
|
||||
'1970-01-01 00:00:00',
|
||||
);
|
||||
});
|
||||
test('should render a negative timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(-7000n).toString(),
|
||||
).toStrictEqual('1969-12-31 23:59:59.999993');
|
||||
});
|
||||
test('should render a large positive timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(8857641599999123000n).toString(),
|
||||
).toStrictEqual('2250-09-08 23:59:59.999123');
|
||||
});
|
||||
test('should render a large negative timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(-8495881076543211000n).toString(),
|
||||
).toStrictEqual('1700-10-11 01:02:03.456789');
|
||||
});
|
||||
test('should render the max timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(9223372036854775806n).toString(),
|
||||
).toStrictEqual('2262-04-11 23:47:16.854775');
|
||||
});
|
||||
test('should render the min timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampNanosecondsValue(-9223372036854775806n).toString(),
|
||||
).toStrictEqual('1677-09-21 00:12:43.145225');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,45 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampSecondsValue } from '../src/DuckDBTimestampSecondsValue';
|
||||
|
||||
suite('DuckDBTimestampSecondsValue', () => {
|
||||
test('should render a normal timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(1612325106n).toString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06');
|
||||
});
|
||||
test('should render a zero timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampSecondsValue(0n).toString()).toStrictEqual(
|
||||
'1970-01-01 00:00:00',
|
||||
);
|
||||
});
|
||||
test('should render a negative timestamp value to the correct string', () => {
|
||||
expect(new DuckDBTimestampSecondsValue(-7n).toString()).toStrictEqual(
|
||||
'1969-12-31 23:59:53',
|
||||
);
|
||||
});
|
||||
test('should render a large positive timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(2353318271999n).toString(),
|
||||
).toStrictEqual('76543-09-08 23:59:59');
|
||||
});
|
||||
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(-58261244277n).toString(),
|
||||
).toStrictEqual('0123-10-11 01:02:03');
|
||||
});
|
||||
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(-65992661877n).toString(),
|
||||
).toStrictEqual('0123-10-11 (BC) 01:02:03');
|
||||
});
|
||||
test('should render the max timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(9223372036854n).toString(),
|
||||
).toStrictEqual('294247-01-10 04:00:54');
|
||||
});
|
||||
test('should render the min timestamp value to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampSecondsValue(-9223372022400n).toString(),
|
||||
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,38 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBTimestampTZValue } from '../src/DuckDBTimestampTZValue';
|
||||
|
||||
suite('DuckDBTimestampTZValue', () => {
|
||||
test('should render a timestamp tz value with no timezone offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString(),
|
||||
).toStrictEqual('2021-02-03 04:05:06.0078+00'); // defaults to UTC
|
||||
});
|
||||
test('should render a timestamp tz value with a zero timezone offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||
timezoneOffsetInMinutes: 0,
|
||||
}),
|
||||
).toStrictEqual('2021-02-03 04:05:06.0078+00');
|
||||
});
|
||||
test('should render a timestamp tz value with a positive timezone offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||
timezoneOffsetInMinutes: 300,
|
||||
}),
|
||||
).toStrictEqual('2021-02-03 09:05:06.0078+05');
|
||||
});
|
||||
test('should render a timestamp tz value with a negative timezone offset to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||
timezoneOffsetInMinutes: -300,
|
||||
}),
|
||||
).toStrictEqual('2021-02-02 23:05:06.0078-05');
|
||||
});
|
||||
test('should render a timestamp tz value with a timezone offset containing minutes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||
timezoneOffsetInMinutes: 330,
|
||||
}),
|
||||
).toStrictEqual('2021-02-03 09:35:06.0078+05:30');
|
||||
});
|
||||
});
|
||||
49
ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts
Normal file
49
ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBUUIDValue } from '../src/DuckDBUUIDValue';
|
||||
|
||||
suite('DuckDBUUIDValue', () => {
|
||||
test('should render all zero bytes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBUUIDValue(
|
||||
new Uint8Array([
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual('00000000-0000-0000-0000-000000000000');
|
||||
});
|
||||
test('should render all max bytes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBUUIDValue(
|
||||
new Uint8Array([
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual('ffffffff-ffff-ffff-ffff-ffffffffffff');
|
||||
});
|
||||
test('should render arbitrary bytes to the correct string', () => {
|
||||
expect(
|
||||
new DuckDBUUIDValue(
|
||||
new Uint8Array([
|
||||
0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87, 0xfe, 0xdc, 0xba,
|
||||
0x98, 0x76, 0x54, 0x32, 0x10,
|
||||
]),
|
||||
).toString(),
|
||||
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||
});
|
||||
test('should render a uint128 to the correct string', () => {
|
||||
expect(
|
||||
DuckDBUUIDValue.fromUint128(
|
||||
0xf0e1d2c3b4a59687fedcba9876543210n,
|
||||
).toString(),
|
||||
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||
});
|
||||
test('should render a stored hugeint to the correct string', () => {
|
||||
expect(
|
||||
DuckDBUUIDValue.fromStoredHugeint(
|
||||
0x70e1d2c3b4a59687fedcba9876543210n, // note the flipped MSB
|
||||
).toString(),
|
||||
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,61 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { getVarIntFromBytes } from '../../src/conversion/getVarIntFromBytes';
|
||||
|
||||
suite('getVarIntFromBytes', () => {
|
||||
test('should return correct value for varint representation of 0', () => {
|
||||
expect(
|
||||
getVarIntFromBytes(new Uint8Array([0x80, 0x00, 0x01, 0x00])),
|
||||
).toEqual(0n);
|
||||
});
|
||||
test('should return correct value for varint representation of 1', () => {
|
||||
expect(
|
||||
getVarIntFromBytes(new Uint8Array([0x80, 0x00, 0x01, 0x01])),
|
||||
).toEqual(1n);
|
||||
});
|
||||
test('should return correct value for varint representation of -1', () => {
|
||||
expect(
|
||||
getVarIntFromBytes(new Uint8Array([0x7f, 0xff, 0xfe, 0xfe])),
|
||||
).toEqual(-1n);
|
||||
});
|
||||
test('should return correct value for max varint', () => {
|
||||
// max VARINT = max IEEE double = 2^1023 * (1 + (1 − 2^−52)) ~= 1.7976931348623157 * 10^308
|
||||
// Note that the storage format supports much larger than this, but DuckDB specifies this max to support conversion to/from DOUBLE.
|
||||
expect(
|
||||
getVarIntFromBytes(
|
||||
// prettier-ignore
|
||||
new Uint8Array([0x80, 0x00, 0x80,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
]),
|
||||
),
|
||||
).toEqual(
|
||||
179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n,
|
||||
);
|
||||
});
|
||||
test('should return correct value for min varint', () => {
|
||||
// min VARINT = -max VARINT
|
||||
expect(
|
||||
getVarIntFromBytes(
|
||||
// prettier-ignore
|
||||
new Uint8Array([0x7F, 0xFF, 0x7F,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
]),
|
||||
),
|
||||
).toEqual(
|
||||
-179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n,
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,26 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBListValue } from '../../src';
|
||||
import { jsonFromDuckDBValue } from '../../src/conversion/jsonFromDuckDBValue';
|
||||
|
||||
suite('jsonFromDuckDBValue', () => {
|
||||
test('null', () => {
|
||||
expect(jsonFromDuckDBValue(null)).toBe(null);
|
||||
});
|
||||
test('boolean', () => {
|
||||
expect(jsonFromDuckDBValue(true)).toBe(true);
|
||||
});
|
||||
test('number', () => {
|
||||
expect(jsonFromDuckDBValue(42)).toBe(42);
|
||||
});
|
||||
test('bigint', () => {
|
||||
expect(jsonFromDuckDBValue(12345n)).toBe('12345');
|
||||
});
|
||||
test('string', () => {
|
||||
expect(jsonFromDuckDBValue('foo')).toBe('foo');
|
||||
});
|
||||
test('special', () => {
|
||||
expect(jsonFromDuckDBValue(new DuckDBListValue([1, 2, 3]))).toStrictEqual([
|
||||
1, 2, 3,
|
||||
]);
|
||||
});
|
||||
});
|
||||
6
ts/pkgs/duckdb-data-values/test/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-values/test/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.test.json",
|
||||
"references": [
|
||||
{ "path": "../src" }
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user