add duckdb-ui-client & other ts pkgs (#10)
* add duckdb-ui-client & other ts pkgs * workflow fixes * fix working dir * no sparse checkout; specify package.json path * path to pnpm-lock.yaml * add check & build test * workflow step descriptions * use comments & names * one more naming tweak
This commit is contained in:
23
ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts
Normal file
23
ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBArrayValue extends SpecialDuckDBValue {
|
||||
public readonly values: readonly DuckDBValue[];
|
||||
|
||||
constructor(values: readonly DuckDBValue[]) {
|
||||
super();
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const valueStrings = this.values.map(displayStringForDuckDBValue);
|
||||
return `[${valueStrings.join(', ')}]`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.values.map(jsonFromDuckDBValue);
|
||||
}
|
||||
}
|
||||
123
ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts
Normal file
123
ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBBitValue extends SpecialDuckDBValue {
|
||||
public readonly data: Uint8Array;
|
||||
|
||||
constructor(data: Uint8Array) {
|
||||
super();
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public padding(): number {
|
||||
return this.data[0];
|
||||
}
|
||||
|
||||
public get length(): number {
|
||||
return (this.data.length - 1) * 8 - this.padding();
|
||||
}
|
||||
|
||||
public getBool(index: number): boolean {
|
||||
const offset = index + this.padding();
|
||||
const dataIndex = Math.floor(offset / 8) + 1;
|
||||
const byte = this.data[dataIndex] >> (7 - (offset % 8));
|
||||
return (byte & 1) !== 0;
|
||||
}
|
||||
|
||||
public toBools(): boolean[] {
|
||||
const bools: boolean[] = [];
|
||||
const length = this.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
bools.push(this.getBool(i));
|
||||
}
|
||||
return bools;
|
||||
}
|
||||
|
||||
public getBit(index: number): 0 | 1 {
|
||||
return this.getBool(index) ? 1 : 0;
|
||||
}
|
||||
|
||||
public toBits(): number[] {
|
||||
const bits: number[] = [];
|
||||
const length = this.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
bits.push(this.getBit(i));
|
||||
}
|
||||
return bits;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const length = this.length;
|
||||
const chars = Array.from<string>({ length });
|
||||
for (let i = 0; i < length; i++) {
|
||||
chars[i] = this.getBool(i) ? '1' : '0';
|
||||
}
|
||||
return chars.join('');
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
public static fromString(str: string, on: string = '1'): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(
|
||||
str.length,
|
||||
(i) => str[i] === on,
|
||||
);
|
||||
}
|
||||
|
||||
public static fromBits(
|
||||
bits: readonly number[],
|
||||
on: number = 1,
|
||||
): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(
|
||||
bits.length,
|
||||
(i) => bits[i] === on,
|
||||
);
|
||||
}
|
||||
|
||||
public static fromBools(bools: readonly boolean[]): DuckDBBitValue {
|
||||
return DuckDBBitValue.fromLengthAndPredicate(bools.length, (i) => bools[i]);
|
||||
}
|
||||
|
||||
public static fromLengthAndPredicate(
|
||||
length: number,
|
||||
predicate: (index: number) => boolean,
|
||||
): DuckDBBitValue {
|
||||
const byteCount = Math.ceil(length / 8) + 1;
|
||||
const paddingBitCount = (8 - (length % 8)) % 8;
|
||||
|
||||
const data = new Uint8Array(byteCount);
|
||||
let byteIndex = 0;
|
||||
|
||||
// first byte contains count of padding bits
|
||||
data[byteIndex++] = paddingBitCount;
|
||||
|
||||
let byte = 0;
|
||||
let byteBit = 0;
|
||||
|
||||
// padding consists of 1s in MSB of second byte
|
||||
while (byteBit < paddingBitCount) {
|
||||
byte <<= 1;
|
||||
byte |= 1;
|
||||
byteBit++;
|
||||
}
|
||||
|
||||
let bitIndex = 0;
|
||||
|
||||
while (byteIndex < byteCount) {
|
||||
while (byteBit < 8) {
|
||||
byte <<= 1;
|
||||
if (predicate(bitIndex++)) {
|
||||
byte |= 1;
|
||||
}
|
||||
byteBit++;
|
||||
}
|
||||
data[byteIndex++] = byte;
|
||||
byte = 0;
|
||||
byteBit = 0;
|
||||
}
|
||||
|
||||
return new DuckDBBitValue(data);
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { stringFromBlob } from './conversion/stringFromBlob.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBBlobValue extends SpecialDuckDBValue {
|
||||
public readonly bytes: Uint8Array;
|
||||
|
||||
constructor(bytes: Uint8Array) {
|
||||
super();
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return stringFromBlob(this.bytes);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBDateStringFromDays } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBDateValue extends SpecialDuckDBValue {
|
||||
public readonly days: number;
|
||||
|
||||
constructor(days: number) {
|
||||
super();
|
||||
this.days = days;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBDateStringFromDays(this.days);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
38
ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts
Normal file
38
ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import {
|
||||
DuckDBDecimalFormatOptions,
|
||||
stringFromDecimal,
|
||||
} from './conversion/stringFromDecimal.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBDecimalValue extends SpecialDuckDBValue {
|
||||
public readonly scaledValue: bigint;
|
||||
|
||||
public readonly scale: number;
|
||||
|
||||
constructor(scaledValue: bigint, scale: number) {
|
||||
super();
|
||||
this.scaledValue = scaledValue;
|
||||
this.scale = scale;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return stringFromDecimal(this.scaledValue, this.scale);
|
||||
}
|
||||
|
||||
/** Returns a string representation appropriate to the host environment's current locale. */
|
||||
|
||||
public toLocaleString(
|
||||
locales?: string | string[],
|
||||
options?: DuckDBDecimalFormatOptions,
|
||||
): string {
|
||||
return stringFromDecimal(this.scaledValue, this.scale, {
|
||||
locales,
|
||||
options,
|
||||
});
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
26
ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts
Normal file
26
ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { getDuckDBIntervalString } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBIntervalValue extends SpecialDuckDBValue {
|
||||
public readonly months: number;
|
||||
|
||||
public readonly days: number;
|
||||
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(months: number, days: number, microseconds: bigint) {
|
||||
super();
|
||||
this.months = months;
|
||||
this.days = days;
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBIntervalString(this.months, this.days, this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
23
ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts
Normal file
23
ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBListValue extends SpecialDuckDBValue {
|
||||
public readonly values: readonly DuckDBValue[];
|
||||
|
||||
constructor(values: readonly DuckDBValue[]) {
|
||||
super();
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const valueStrings = this.values.map(displayStringForDuckDBValue);
|
||||
return `[${valueStrings.join(', ')}]`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.values.map(jsonFromDuckDBValue);
|
||||
}
|
||||
}
|
||||
6
ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts
Normal file
6
ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
|
||||
export interface DuckDBMapEntry {
|
||||
readonly key: DuckDBValue;
|
||||
readonly value: DuckDBValue;
|
||||
}
|
||||
33
ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts
Normal file
33
ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBMapEntry } from './DuckDBMapEntry.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBMapValue extends SpecialDuckDBValue {
|
||||
public readonly entries: readonly DuckDBMapEntry[];
|
||||
|
||||
constructor(entries: readonly DuckDBMapEntry[]) {
|
||||
super();
|
||||
this.entries = entries;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const entryStrings = this.entries.map(
|
||||
({ key, value }) =>
|
||||
`${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(
|
||||
value,
|
||||
)}`,
|
||||
);
|
||||
return `{${entryStrings.join(', ')}}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
const result: Json = {};
|
||||
for (const { key, value } of this.entries) {
|
||||
const keyString = displayStringForDuckDBValue(key);
|
||||
result[keyString] = jsonFromDuckDBValue(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
6
ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts
Normal file
6
ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { DuckDBValue } from './DuckDBValue.js';
|
||||
|
||||
export interface DuckDBStructEntry {
|
||||
readonly key: string;
|
||||
readonly value: DuckDBValue;
|
||||
}
|
||||
33
ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts
Normal file
33
ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
import { DuckDBStructEntry } from './DuckDBStructEntry.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBStructValue extends SpecialDuckDBValue {
|
||||
public readonly entries: readonly DuckDBStructEntry[];
|
||||
|
||||
constructor(entries: readonly DuckDBStructEntry[]) {
|
||||
super();
|
||||
this.entries = entries;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
const entryStrings = this.entries.map(
|
||||
({ key, value }) =>
|
||||
`${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(
|
||||
value,
|
||||
)}`,
|
||||
);
|
||||
return `{${entryStrings.join(', ')}}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
const result: Json = {};
|
||||
for (const { key, value } of this.entries) {
|
||||
const keyString = displayStringForDuckDBValue(key);
|
||||
result[keyString] = jsonFromDuckDBValue(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
42
ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts
Normal file
42
ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import {
|
||||
getDuckDBTimeStringFromMicrosecondsInDay,
|
||||
getOffsetStringFromSeconds,
|
||||
} from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimeTZValue extends SpecialDuckDBValue {
|
||||
public readonly micros: bigint;
|
||||
public readonly offset: number;
|
||||
|
||||
constructor(micros: bigint, offset: number) {
|
||||
super();
|
||||
this.micros = micros;
|
||||
this.offset = offset;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return `${getDuckDBTimeStringFromMicrosecondsInDay(
|
||||
this.micros,
|
||||
)}${getOffsetStringFromSeconds(this.offset)}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
private static TimeBits = 40;
|
||||
private static OffsetBits = 24;
|
||||
private static MaxOffset = 16 * 60 * 60 - 1; // ±15:59:59 = 57599 seconds
|
||||
|
||||
public static fromBits(bits: bigint): DuckDBTimeTZValue {
|
||||
const micros = BigInt.asUintN(
|
||||
DuckDBTimeTZValue.TimeBits,
|
||||
bits >> BigInt(DuckDBTimeTZValue.OffsetBits),
|
||||
);
|
||||
const offset =
|
||||
DuckDBTimeTZValue.MaxOffset -
|
||||
Number(BigInt.asUintN(DuckDBTimeTZValue.OffsetBits, bits));
|
||||
return new DuckDBTimeTZValue(micros, offset);
|
||||
}
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimeStringFromMicrosecondsInDay } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimeValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimeStringFromMicrosecondsInDay(this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampMicrosecondsValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(this.microseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
|
||||
export type DuckDBTimestamp = DuckDBTimestampMicrosecondsValue;
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromMilliseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampMillisecondsValue extends SpecialDuckDBValue {
|
||||
public readonly milliseconds: bigint;
|
||||
|
||||
constructor(milliseconds: bigint) {
|
||||
super();
|
||||
this.milliseconds = milliseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromMilliseconds(this.milliseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromNanoseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampNanosecondsValue extends SpecialDuckDBValue {
|
||||
public readonly nanoseconds: bigint;
|
||||
|
||||
constructor(nanoseconds: bigint) {
|
||||
super();
|
||||
this.nanoseconds = nanoseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromNanoseconds(this.nanoseconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuckDBTimestampStringFromSeconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampSecondsValue extends SpecialDuckDBValue {
|
||||
public readonly seconds: bigint;
|
||||
|
||||
constructor(seconds: bigint) {
|
||||
super();
|
||||
this.seconds = seconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
return getDuckDBTimestampStringFromSeconds(this.seconds);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
24
ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts
Normal file
24
ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js';
|
||||
import { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBTimestampTZValue extends SpecialDuckDBValue {
|
||||
public readonly microseconds: bigint;
|
||||
|
||||
constructor(microseconds: bigint) {
|
||||
super();
|
||||
this.microseconds = microseconds;
|
||||
}
|
||||
|
||||
public toDuckDBString(toStringOptions?: DuckDBToStringOptions): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
this.microseconds,
|
||||
toStringOptions?.timezoneOffsetInMinutes || 0,
|
||||
);
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
}
|
||||
3
ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts
Normal file
3
ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export interface DuckDBToStringOptions {
|
||||
timezoneOffsetInMinutes?: number;
|
||||
}
|
||||
48
ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts
Normal file
48
ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { hexFromBlob } from './conversion/hexFromBlob.js';
|
||||
import { Json } from './Json.js';
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export class DuckDBUUIDValue extends SpecialDuckDBValue {
|
||||
public readonly bytes: Uint8Array;
|
||||
|
||||
constructor(bytes: Uint8Array) {
|
||||
super();
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public toDuckDBString(): string {
|
||||
if (this.bytes.length !== 16) {
|
||||
throw new Error('Invalid UUID bytes length');
|
||||
}
|
||||
|
||||
// Insert dashes to format the UUID
|
||||
return `${hexFromBlob(this.bytes, 0, 4)}-${hexFromBlob(this.bytes, 4, 6)}-${hexFromBlob(this.bytes, 6, 8)}-${hexFromBlob(this.bytes, 8, 10)}-${hexFromBlob(this.bytes, 10, 16)}`;
|
||||
}
|
||||
|
||||
public toJson(): Json {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a DuckDBUUIDValue value from a HUGEINT as stored by DuckDB.
|
||||
*
|
||||
* UUID values are stored with their MSB flipped so their numeric ordering matches their string ordering.
|
||||
*/
|
||||
public static fromStoredHugeint(hugeint: bigint): DuckDBUUIDValue {
|
||||
// Flip the MSB and truncate to 128 bits to extract the represented unsigned 128-bit value.
|
||||
const uint128 =
|
||||
(hugeint ^ 0x80000000000000000000000000000000n) &
|
||||
0xffffffffffffffffffffffffffffffffn;
|
||||
return DuckDBUUIDValue.fromUint128(uint128);
|
||||
}
|
||||
|
||||
/** Create a DuckDBUUIDValue value from an unsigned 128-bit integer in a JS BigInt. */
|
||||
public static fromUint128(uint128: bigint): DuckDBUUIDValue {
|
||||
const bytes = new Uint8Array(16);
|
||||
const dv = new DataView(bytes.buffer);
|
||||
// Write the unsigned 128-bit integer to the buffer in big endian format.
|
||||
dv.setBigUint64(0, BigInt.asUintN(64, uint128 >> BigInt(64)), false);
|
||||
dv.setBigUint64(8, BigInt.asUintN(64, uint128), false);
|
||||
return new DuckDBUUIDValue(bytes);
|
||||
}
|
||||
}
|
||||
9
ts/pkgs/duckdb-data-values/src/DuckDBValue.ts
Normal file
9
ts/pkgs/duckdb-data-values/src/DuckDBValue.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
|
||||
export type DuckDBValue =
|
||||
| null
|
||||
| boolean
|
||||
| number
|
||||
| string
|
||||
| bigint // TODO: Should types requiring bigint be SpecialDBValues?
|
||||
| SpecialDuckDBValue;
|
||||
7
ts/pkgs/duckdb-data-values/src/Json.ts
Normal file
7
ts/pkgs/duckdb-data-values/src/Json.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export type Json =
|
||||
| null
|
||||
| boolean
|
||||
| number
|
||||
| string
|
||||
| Json[]
|
||||
| { [key: string]: Json };
|
||||
15
ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts
Normal file
15
ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
import { Json } from './Json.js';
|
||||
|
||||
export abstract class SpecialDuckDBValue {
|
||||
// The presence of this function can be used to identify SpecialDuckDBValue objects.
|
||||
public abstract toDuckDBString(
|
||||
toStringOptions?: DuckDBToStringOptions,
|
||||
): string;
|
||||
|
||||
public toString(): string {
|
||||
return this.toDuckDBString();
|
||||
}
|
||||
|
||||
public abstract toJson(): Json;
|
||||
}
|
||||
@@ -0,0 +1,264 @@
|
||||
const DAYS_IN_400_YEARS = 146097; // (((365 * 4 + 1) * 25) - 1) * 4 + 1
|
||||
const MILLISECONDS_PER_DAY_NUM = 86400000; // 1000 * 60 * 60 * 24
|
||||
|
||||
const MICROSECONDS_PER_SECOND = BigInt(1000000);
|
||||
const MICROSECONDS_PER_MILLISECOND = BigInt(1000);
|
||||
const NANOSECONDS_PER_MICROSECOND = BigInt(1000);
|
||||
const SECONDS_PER_MINUTE = BigInt(60);
|
||||
const MINUTES_PER_HOUR = BigInt(60);
|
||||
const MICROSECONDS_PER_DAY = BigInt(86400000000); // 24 * 60 * 60 * 1000000
|
||||
|
||||
const NEGATIVE_INFINITY_TIMESTAMP = BigInt('-9223372036854775807'); // -(2^63-1)
|
||||
const POSITIVE_INFINITY_TIMESTAMP = BigInt('9223372036854775807'); // 2^63-1
|
||||
|
||||
export function getDuckDBDateStringFromYearMonthDay(
|
||||
year: number,
|
||||
month: number,
|
||||
dayOfMonth: number,
|
||||
): string {
|
||||
const yearStr = String(Math.abs(year)).padStart(4, '0');
|
||||
const monthStr = String(month).padStart(2, '0');
|
||||
const dayOfMonthStr = String(dayOfMonth).padStart(2, '0');
|
||||
return `${yearStr}-${monthStr}-${dayOfMonthStr}${year < 0 ? ' (BC)' : ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBDateStringFromDays(days: number): string {
|
||||
const absDays = Math.abs(days);
|
||||
const sign = days < 0 ? -1 : 1;
|
||||
// 400 years is the shortest interval with a fixed number of days. (Leap years and different length months can result
|
||||
// in shorter intervals having different number of days.) By separating the number of 400 year intervals from the
|
||||
// interval covered by the remaining days, we can guarantee that the date resulting from shifting the epoch by the
|
||||
// remaining interval is within the valid range of the JS Date object. This allows us to use JS Date to calculate the
|
||||
// year, month, and day of month for the date represented by the remaining interval, thus accounting for leap years
|
||||
// and different length months. We can then safely add back the years from the 400 year intervals, because the month
|
||||
// and day of month won't change when a date is shifted by a whole number of such intervals.
|
||||
const num400YearIntervals = Math.floor(absDays / DAYS_IN_400_YEARS);
|
||||
const yearsFrom400YearIntervals = sign * num400YearIntervals * 400;
|
||||
const absDaysFromRemainingInterval = absDays % DAYS_IN_400_YEARS;
|
||||
const millisecondsFromRemainingInterval =
|
||||
sign * absDaysFromRemainingInterval * MILLISECONDS_PER_DAY_NUM;
|
||||
const date = new Date(millisecondsFromRemainingInterval);
|
||||
let year = yearsFrom400YearIntervals + date.getUTCFullYear();
|
||||
if (year < 0) {
|
||||
year--; // correct for non-existence of year zero
|
||||
}
|
||||
const month = date.getUTCMonth() + 1; // getUTCMonth returns zero-indexed month, but we want a one-index month for display
|
||||
const dayOfMonth = date.getUTCDate(); // getUTCDate returns one-indexed day-of-month
|
||||
return getDuckDBDateStringFromYearMonthDay(year, month, dayOfMonth);
|
||||
}
|
||||
|
||||
export function getTimezoneOffsetString(
|
||||
timezoneOffsetInMinutes?: number,
|
||||
): string | undefined {
|
||||
if (timezoneOffsetInMinutes === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const negative = timezoneOffsetInMinutes < 0;
|
||||
const positiveMinutes = Math.abs(timezoneOffsetInMinutes);
|
||||
const minutesPart = positiveMinutes % 60;
|
||||
const hoursPart = Math.floor(positiveMinutes / 60);
|
||||
const minutesStr =
|
||||
minutesPart !== 0 ? String(minutesPart).padStart(2, '0') : '';
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
return `${negative ? '-' : '+'}${hoursStr}${minutesStr ? `:${minutesStr}` : ''}`;
|
||||
}
|
||||
|
||||
export function getAbsoluteOffsetStringFromParts(
|
||||
hoursPart: number,
|
||||
minutesPart: number,
|
||||
secondsPart: number,
|
||||
): string {
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
const minutesStr =
|
||||
minutesPart !== 0 || secondsPart !== 0
|
||||
? String(minutesPart).padStart(2, '0')
|
||||
: '';
|
||||
const secondsStr =
|
||||
secondsPart !== 0 ? String(secondsPart).padStart(2, '0') : '';
|
||||
let result = hoursStr;
|
||||
if (minutesStr) {
|
||||
result += `:${minutesStr}`;
|
||||
if (secondsStr) {
|
||||
result += `:${secondsStr}`;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function getOffsetStringFromAbsoluteSeconds(
|
||||
absoluteOffsetInSeconds: number,
|
||||
): string {
|
||||
const secondsPart = absoluteOffsetInSeconds % 60;
|
||||
const minutes = Math.floor(absoluteOffsetInSeconds / 60);
|
||||
const minutesPart = minutes % 60;
|
||||
const hoursPart = Math.floor(minutes / 60);
|
||||
return getAbsoluteOffsetStringFromParts(hoursPart, minutesPart, secondsPart);
|
||||
}
|
||||
|
||||
export function getOffsetStringFromSeconds(offsetInSeconds: number): string {
|
||||
const negative = offsetInSeconds < 0;
|
||||
const absoluteOffsetInSeconds = negative ? -offsetInSeconds : offsetInSeconds;
|
||||
const absoluteString = getOffsetStringFromAbsoluteSeconds(
|
||||
absoluteOffsetInSeconds,
|
||||
);
|
||||
return `${negative ? '-' : '+'}${absoluteString}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromParts(
|
||||
hoursPart: bigint,
|
||||
minutesPart: bigint,
|
||||
secondsPart: bigint,
|
||||
microsecondsPart: bigint,
|
||||
): string {
|
||||
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||
const minutesStr = String(minutesPart).padStart(2, '0');
|
||||
const secondsStr = String(secondsPart).padStart(2, '0');
|
||||
const microsecondsStr = String(microsecondsPart)
|
||||
.padStart(6, '0')
|
||||
.replace(/0+$/, '');
|
||||
return `${hoursStr}:${minutesStr}:${secondsStr}${
|
||||
microsecondsStr.length > 0 ? `.${microsecondsStr}` : ''
|
||||
}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromPositiveMicroseconds(
|
||||
positiveMicroseconds: bigint,
|
||||
): string {
|
||||
const microsecondsPart = positiveMicroseconds % MICROSECONDS_PER_SECOND;
|
||||
const seconds = positiveMicroseconds / MICROSECONDS_PER_SECOND;
|
||||
const secondsPart = seconds % SECONDS_PER_MINUTE;
|
||||
const minutes = seconds / SECONDS_PER_MINUTE;
|
||||
const minutesPart = minutes % MINUTES_PER_HOUR;
|
||||
const hoursPart = minutes / MINUTES_PER_HOUR;
|
||||
return getDuckDBTimeStringFromParts(
|
||||
hoursPart,
|
||||
minutesPart,
|
||||
secondsPart,
|
||||
microsecondsPart,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromMicrosecondsInDay(
|
||||
microsecondsInDay: bigint,
|
||||
): string {
|
||||
const positiveMicroseconds =
|
||||
microsecondsInDay < 0
|
||||
? microsecondsInDay + MICROSECONDS_PER_DAY
|
||||
: microsecondsInDay;
|
||||
return getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds);
|
||||
}
|
||||
|
||||
export function getDuckDBTimeStringFromMicroseconds(
|
||||
microseconds: bigint,
|
||||
): string {
|
||||
const negative = microseconds < 0;
|
||||
const positiveMicroseconds = negative ? -microseconds : microseconds;
|
||||
const positiveString =
|
||||
getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds);
|
||||
return negative ? `-${positiveString}` : positiveString;
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromDaysAndMicroseconds(
|
||||
days: bigint,
|
||||
microsecondsInDay: bigint,
|
||||
timezonePart?: string,
|
||||
): string {
|
||||
// This conversion of BigInt to Number is safe, because the largest absolute value that `days` can has is 106751991,
|
||||
// which fits without loss of precision in a JS Number. (106751991 = (2^63-1) / MICROSECONDS_PER_DAY)
|
||||
const dateStr = getDuckDBDateStringFromDays(Number(days));
|
||||
const timeStr = getDuckDBTimeStringFromMicrosecondsInDay(microsecondsInDay);
|
||||
return `${dateStr} ${timeStr}${timezonePart ?? ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromMicroseconds(
|
||||
microseconds: bigint,
|
||||
timezoneOffsetInMinutes?: number,
|
||||
): string {
|
||||
// Note that -infinity and infinity are only representable in TIMESTAMP (and TIMESTAMPTZ), not the other timestamp
|
||||
// variants. This is by-design and matches DuckDB.
|
||||
if (microseconds === NEGATIVE_INFINITY_TIMESTAMP) {
|
||||
return '-infinity';
|
||||
}
|
||||
if (microseconds === POSITIVE_INFINITY_TIMESTAMP) {
|
||||
return 'infinity';
|
||||
}
|
||||
const offsetMicroseconds =
|
||||
timezoneOffsetInMinutes !== undefined
|
||||
? microseconds +
|
||||
BigInt(timezoneOffsetInMinutes) *
|
||||
MICROSECONDS_PER_SECOND *
|
||||
SECONDS_PER_MINUTE
|
||||
: microseconds;
|
||||
let days = offsetMicroseconds / MICROSECONDS_PER_DAY;
|
||||
let microsecondsPart = offsetMicroseconds % MICROSECONDS_PER_DAY;
|
||||
if (microsecondsPart < 0) {
|
||||
days--;
|
||||
microsecondsPart += MICROSECONDS_PER_DAY;
|
||||
}
|
||||
return getDuckDBTimestampStringFromDaysAndMicroseconds(
|
||||
days,
|
||||
microsecondsPart,
|
||||
getTimezoneOffsetString(timezoneOffsetInMinutes),
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromSeconds(seconds: bigint): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
seconds * MICROSECONDS_PER_SECOND,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromMilliseconds(
|
||||
milliseconds: bigint,
|
||||
): string {
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
milliseconds * MICROSECONDS_PER_MILLISECOND,
|
||||
);
|
||||
}
|
||||
|
||||
export function getDuckDBTimestampStringFromNanoseconds(
|
||||
nanoseconds: bigint,
|
||||
): string {
|
||||
// Note that this division causes loss of precision. This matches the behavior of the DuckDB. It's important that this
|
||||
// precision loss happen before the negative correction in getTimestampStringFromMicroseconds, otherwise off-by-one
|
||||
// errors can occur.
|
||||
return getDuckDBTimestampStringFromMicroseconds(
|
||||
nanoseconds / NANOSECONDS_PER_MICROSECOND,
|
||||
);
|
||||
}
|
||||
|
||||
// Assumes baseUnit can be pluralized by adding an 's'.
|
||||
function numberAndUnit(value: number, baseUnit: string): string {
|
||||
return `${value} ${baseUnit}${value !== 1 ? 's' : ''}`;
|
||||
}
|
||||
|
||||
export function getDuckDBIntervalString(
|
||||
months: number,
|
||||
days: number,
|
||||
microseconds: bigint,
|
||||
): string {
|
||||
const parts: string[] = [];
|
||||
if (months !== 0) {
|
||||
const sign = months < 0 ? -1 : 1;
|
||||
const absMonths = Math.abs(months);
|
||||
const absYears = Math.floor(absMonths / 12);
|
||||
const years = sign * absYears;
|
||||
const extraMonths = sign * (absMonths - absYears * 12);
|
||||
if (years !== 0) {
|
||||
parts.push(numberAndUnit(years, 'year'));
|
||||
}
|
||||
if (extraMonths !== 0) {
|
||||
parts.push(numberAndUnit(extraMonths, 'month'));
|
||||
}
|
||||
}
|
||||
if (days !== 0) {
|
||||
parts.push(numberAndUnit(days, 'day'));
|
||||
}
|
||||
if (microseconds !== BigInt(0)) {
|
||||
parts.push(getDuckDBTimeStringFromMicroseconds(microseconds));
|
||||
}
|
||||
if (parts.length > 0) {
|
||||
return parts.join(' ');
|
||||
}
|
||||
return '00:00:00';
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
import { DuckDBValue } from '../DuckDBValue.js';
|
||||
|
||||
export function displayStringForDuckDBValue(value: DuckDBValue): string {
|
||||
if (value == null) {
|
||||
return 'NULL';
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
return `'${value.replace(`'`, `''`)}'`;
|
||||
}
|
||||
return String(value);
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* Returns the JS bigint value represented by the byte array a VARINT in DuckDB's internal format.
|
||||
*
|
||||
* DuckDB stores VARINTs as an array of bytes consisting of a three-byte header followed by a variable number of bytes
|
||||
* (at least one). The header specifies the number of bytes after the header, and whether the number is positive or
|
||||
* negative. The bytes after the header specify the absolute value of the number, in big endian format.
|
||||
*
|
||||
* The sign of the number is determined by the MSB of the header, which is 1 for positive and 0 for negative. Negative
|
||||
* numbers also have all bytes of both the header and value inverted. (For negative numbers, the MSB is 0 after this
|
||||
* inversion. Put another way: the MSB of the header is always 1, but it's inverted for negative numbers.)
|
||||
*/
|
||||
export function getVarIntFromBytes(bytes: Uint8Array): bigint {
|
||||
const firstByte = bytes[0];
|
||||
const positive = (firstByte & 0x80) > 0;
|
||||
const uint64Mask = positive ? 0n : 0xffffffffffffffffn;
|
||||
const uint8Mask = positive ? 0 : 0xff;
|
||||
const dv = new DataView(
|
||||
bytes.buffer,
|
||||
bytes.byteOffset + 3,
|
||||
bytes.byteLength - 3,
|
||||
);
|
||||
const lastUint64Offset = dv.byteLength - 8;
|
||||
let offset = 0;
|
||||
let result = 0n;
|
||||
while (offset <= lastUint64Offset) {
|
||||
result = (result << 64n) | (dv.getBigUint64(offset) ^ uint64Mask);
|
||||
offset += 8;
|
||||
}
|
||||
while (offset < dv.byteLength) {
|
||||
result = (result << 8n) | BigInt(dv.getUint8(offset) ^ uint8Mask);
|
||||
offset += 1;
|
||||
}
|
||||
return positive ? result : -result;
|
||||
}
|
||||
20
ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
export function hexFromBlob(
|
||||
blob: Uint8Array,
|
||||
start: number | undefined,
|
||||
end: number | undefined,
|
||||
): string {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (end === undefined) {
|
||||
end = blob.length;
|
||||
}
|
||||
let hex = '';
|
||||
|
||||
for (let i = start; i < end; i++) {
|
||||
const byte = blob[i];
|
||||
// Ensure each byte is 2 hex characters
|
||||
hex += (byte < 16 ? '0' : '') + byte.toString(16);
|
||||
}
|
||||
return hex;
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
import { DuckDBValue } from '../DuckDBValue.js';
|
||||
import { Json } from '../Json.js';
|
||||
import { SpecialDuckDBValue } from '../SpecialDuckDBValue.js';
|
||||
|
||||
export function jsonFromDuckDBValue(value: DuckDBValue): Json {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
if (typeof value === 'bigint') {
|
||||
return String(value);
|
||||
}
|
||||
if (value instanceof SpecialDuckDBValue) {
|
||||
return value.toJson();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
17
ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts
Normal file
17
ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
/** Matches BLOB-to-VARCHAR conversion behavior of DuckDB. */
|
||||
export function stringFromBlob(bytes: Uint8Array): string {
|
||||
let result = '';
|
||||
for (const byte of bytes) {
|
||||
if (
|
||||
byte <= 0x1f ||
|
||||
byte === 0x22 /* single quote */ ||
|
||||
byte === 0x27 /* double quote */ ||
|
||||
byte >= 0x7f
|
||||
) {
|
||||
result += `\\x${byte.toString(16).toUpperCase().padStart(2, '0')}`;
|
||||
} else {
|
||||
result += String.fromCharCode(byte);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
129
ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts
Normal file
129
ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
/**
|
||||
* Decimal string formatting.
|
||||
*
|
||||
* Supports a subset of the functionality of `BigInt.prototype.toLocaleString` for locale-specific formatting.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Locale formatting options for DuckDBDecimalValue.
|
||||
*
|
||||
* This is a subset of the options available for `BigInt.prototype.toLocaleString`
|
||||
*/
|
||||
export interface DuckDBDecimalFormatOptions {
|
||||
useGrouping?: boolean;
|
||||
minimumFractionDigits?: number;
|
||||
maximumFractionDigits?: number;
|
||||
}
|
||||
|
||||
export interface LocaleOptions {
|
||||
locales?: string | string[];
|
||||
options?: DuckDBDecimalFormatOptions;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get the decimal separator for a given locale.
|
||||
* Somewhat expensive, so use getCachedDecimalSeparator if you need to call this multiple times.
|
||||
*/
|
||||
|
||||
function getDecimalSeparator(locales?: string | string[]): string {
|
||||
const decimalSeparator =
|
||||
new Intl.NumberFormat(locales, { useGrouping: false })
|
||||
.formatToParts(0.1)
|
||||
.find((part) => part.type === 'decimal')?.value ?? '.';
|
||||
return decimalSeparator;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get the decimal separator for a given locale, and cache the result.
|
||||
*/
|
||||
const cachedDecimalSeparators: { [localeKey: string]: string } = {};
|
||||
|
||||
function getCachedDecimalSeparator(locales?: string | string[]): string {
|
||||
const cacheKey = JSON.stringify(locales);
|
||||
if (cacheKey in cachedDecimalSeparators) {
|
||||
return cachedDecimalSeparators[cacheKey];
|
||||
}
|
||||
const decimalSeparator = getDecimalSeparator(locales);
|
||||
cachedDecimalSeparators[cacheKey] = decimalSeparator;
|
||||
return decimalSeparator;
|
||||
}
|
||||
|
||||
// Helper function to format whole part of a decimal value.
|
||||
// Note that we explicitly omit 'minimumFractionDigits' and 'maximumFractionDigits' from the options
|
||||
// passed to toLocaleString, because they are only relevant for the fractional part of the number, and
|
||||
// would result in formatting the whole part as a real number, which we don't want.
|
||||
function formatWholePart(
|
||||
localeOptions: LocaleOptions | undefined,
|
||||
val: bigint,
|
||||
): string {
|
||||
if (localeOptions) {
|
||||
const {
|
||||
minimumFractionDigits: _minFD,
|
||||
maximumFractionDigits: _maxFD,
|
||||
...restOptions
|
||||
} = localeOptions.options ?? {};
|
||||
return val.toLocaleString(localeOptions?.locales, restOptions);
|
||||
}
|
||||
return String(val);
|
||||
}
|
||||
|
||||
// Format the fractional part of a decimal value
|
||||
// Note that we must handle minimumFractionDigits and maximumFractionDigits ourselves, and that
|
||||
// we don't apply `useGrouping` because that only applies to the whole part of the number.
|
||||
function formatFractionalPart(
|
||||
localeOptions: LocaleOptions | undefined,
|
||||
val: bigint,
|
||||
scale: number,
|
||||
): string {
|
||||
const fractionalPartStr = String(val).padStart(scale, '0');
|
||||
if (!localeOptions) {
|
||||
return fractionalPartStr;
|
||||
}
|
||||
const minFracDigits = localeOptions?.options?.minimumFractionDigits ?? 0;
|
||||
const maxFracDigits = localeOptions?.options?.maximumFractionDigits ?? 20;
|
||||
|
||||
return fractionalPartStr.padEnd(minFracDigits, '0').slice(0, maxFracDigits);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a scaled decimal value to a string, possibly using locale-specific formatting.
|
||||
*/
|
||||
export function stringFromDecimal(
|
||||
scaledValue: bigint,
|
||||
scale: number,
|
||||
localeOptions?: LocaleOptions,
|
||||
): string {
|
||||
// Decimal values are represented as integers that have been scaled up by a power of ten. The `scale` property of
|
||||
// the type is the exponent of the scale factor. For a scale greater than zero, we need to separate out the
|
||||
// fractional part by reversing this scaling.
|
||||
if (scale > 0) {
|
||||
const scaleFactor = BigInt(10) ** BigInt(scale);
|
||||
const absScaledValue = scaledValue < 0 ? -scaledValue : scaledValue;
|
||||
|
||||
const prefix = scaledValue < 0 ? '-' : '';
|
||||
|
||||
const wholePartNum = absScaledValue / scaleFactor;
|
||||
const wholePartStr = formatWholePart(localeOptions, wholePartNum);
|
||||
|
||||
const fractionalPartNum = absScaledValue % scaleFactor;
|
||||
const fractionalPartStr = formatFractionalPart(
|
||||
localeOptions,
|
||||
fractionalPartNum,
|
||||
scale,
|
||||
);
|
||||
|
||||
const decimalSeparatorStr = localeOptions
|
||||
? getCachedDecimalSeparator(localeOptions.locales)
|
||||
: '.';
|
||||
|
||||
return `${prefix}${wholePartStr}${decimalSeparatorStr}${fractionalPartStr}`;
|
||||
}
|
||||
// For a scale of zero, there is no fractional part, so a direct string conversion works.
|
||||
if (localeOptions) {
|
||||
return scaledValue.toLocaleString(
|
||||
localeOptions?.locales,
|
||||
localeOptions?.options as BigIntToLocaleStringOptions | undefined,
|
||||
);
|
||||
}
|
||||
return String(scaledValue);
|
||||
}
|
||||
25
ts/pkgs/duckdb-data-values/src/index.ts
Normal file
25
ts/pkgs/duckdb-data-values/src/index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
export { getVarIntFromBytes } from './conversion/getVarIntFromBytes.js';
|
||||
export { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||
export { DuckDBArrayValue } from './DuckDBArrayValue.js';
|
||||
export { DuckDBBitValue } from './DuckDBBitValue.js';
|
||||
export { DuckDBBlobValue } from './DuckDBBlobValue.js';
|
||||
export { DuckDBDateValue } from './DuckDBDateValue.js';
|
||||
export { DuckDBDecimalValue } from './DuckDBDecimalValue.js';
|
||||
export { DuckDBIntervalValue } from './DuckDBIntervalValue.js';
|
||||
export { DuckDBListValue } from './DuckDBListValue.js';
|
||||
export { DuckDBMapEntry } from './DuckDBMapEntry.js';
|
||||
export { DuckDBMapValue } from './DuckDBMapValue.js';
|
||||
export { DuckDBStructEntry } from './DuckDBStructEntry.js';
|
||||
export { DuckDBStructValue } from './DuckDBStructValue.js';
|
||||
export { DuckDBTimestampMicrosecondsValue } from './DuckDBTimestampMicrosecondsValue.js';
|
||||
export { DuckDBTimestampMillisecondsValue } from './DuckDBTimestampMillisecondsValue.js';
|
||||
export { DuckDBTimestampNanosecondsValue } from './DuckDBTimestampNanosecondsValue.js';
|
||||
export { DuckDBTimestampSecondsValue } from './DuckDBTimestampSecondsValue.js';
|
||||
export { DuckDBTimestampTZValue } from './DuckDBTimestampTZValue.js';
|
||||
export { DuckDBTimeTZValue } from './DuckDBTimeTZValue.js';
|
||||
export { DuckDBTimeValue } from './DuckDBTimeValue.js';
|
||||
export { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||
export { DuckDBUUIDValue } from './DuckDBUUIDValue.js';
|
||||
export { DuckDBValue } from './DuckDBValue.js';
|
||||
export { Json } from './Json.js';
|
||||
export { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||
6
ts/pkgs/duckdb-data-values/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-values/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.library.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../out"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user