add duckdb-ui-client & other ts pkgs (#10)
* add duckdb-ui-client & other ts pkgs * workflow fixes * fix working dir * no sparse checkout; specify package.json path * path to pnpm-lock.yaml * add check & build test * workflow step descriptions * use comments & names * one more naming tweak
This commit is contained in:
41
ts/pkgs/duckdb-ui-client/package.json
Normal file
41
ts/pkgs/duckdb-ui-client/package.json
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"name": "@duckdb/ui-client",
|
||||
"version": "0.0.1",
|
||||
"description": "Client for communicating with the DuckDB UI server",
|
||||
"type": "module",
|
||||
"main": "./out/index.js",
|
||||
"module": "./out/index.js",
|
||||
"types": "./out/index.d.ts",
|
||||
"scripts": {
|
||||
"preinstall": "pnpm build:src",
|
||||
"build": "tsc -b src test",
|
||||
"build:src": "tsc -b src",
|
||||
"build:test": "tsc -b test",
|
||||
"build:watch": "tsc -b src test --watch",
|
||||
"check": "pnpm format:check && pnpm lint",
|
||||
"clean": "rimraf out",
|
||||
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||
"lint": "pnpm eslint src test",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@duckdb/data-reader": "workspace:*",
|
||||
"@duckdb/data-types": "workspace:*",
|
||||
"@duckdb/data-values": "workspace:*",
|
||||
"core-js": "^3.41.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.24.0",
|
||||
"eslint": "^9.24.0",
|
||||
"find-up-cli": "^6.0.0",
|
||||
"msw": "^2.10.2",
|
||||
"prettier": "^3.5.3",
|
||||
"rimraf": "^6.0.1",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.30.1",
|
||||
"vite": "^6.2.6",
|
||||
"vitest": "^3.1.1"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
import { sendDuckDBUIHttpRequest } from '../../http/functions/sendDuckDBUIHttpRequest.js';
|
||||
import { tokenizeResultFromBuffer } from '../../serialization/functions/tokenizeResultFromBuffer.js';
|
||||
import type { TokenizeResult } from '../../serialization/types/TokenizeResult.js';
|
||||
import { DuckDBUIClientConnection } from './DuckDBUIClientConnection.js';
|
||||
|
||||
export { DuckDBUIClientConnection };
|
||||
export type { TokenizeResult };
|
||||
|
||||
export class DuckDBUIClient {
|
||||
private readonly eventSource: EventSource;
|
||||
|
||||
private defaultConnection: DuckDBUIClientConnection | undefined;
|
||||
|
||||
private constructor() {
|
||||
this.eventSource = new EventSource('/localEvents');
|
||||
}
|
||||
|
||||
public addOpenEventListener(listener: (event: Event) => void) {
|
||||
this.eventSource.addEventListener('open', listener);
|
||||
}
|
||||
|
||||
public removeOpenEventListener(listener: (event: Event) => void) {
|
||||
this.eventSource.removeEventListener('open', listener);
|
||||
}
|
||||
|
||||
public addErrorEventListener(listener: (event: Event) => void) {
|
||||
this.eventSource.addEventListener('error', listener);
|
||||
}
|
||||
|
||||
public removeErrorEventListener(listener: (event: Event) => void) {
|
||||
this.eventSource.removeEventListener('error', listener);
|
||||
}
|
||||
|
||||
public addMessageEventListener(
|
||||
type: string,
|
||||
listener: (event: MessageEvent) => void,
|
||||
) {
|
||||
this.eventSource.addEventListener(type, listener);
|
||||
}
|
||||
|
||||
public removeMessageEventListener(
|
||||
type: string,
|
||||
listener: (event: MessageEvent) => void,
|
||||
) {
|
||||
this.eventSource.removeEventListener(type, listener);
|
||||
}
|
||||
|
||||
public connect() {
|
||||
return new DuckDBUIClientConnection();
|
||||
}
|
||||
|
||||
public get connection(): DuckDBUIClientConnection {
|
||||
if (!this.defaultConnection) {
|
||||
this.defaultConnection = this.connect();
|
||||
}
|
||||
return this.defaultConnection;
|
||||
}
|
||||
|
||||
public async tokenize(text: string): Promise<TokenizeResult> {
|
||||
const buffer = await sendDuckDBUIHttpRequest('/ddb/tokenize', text);
|
||||
return tokenizeResultFromBuffer(buffer);
|
||||
}
|
||||
|
||||
private static singletonInstance: DuckDBUIClient;
|
||||
|
||||
public static get singleton(): DuckDBUIClient {
|
||||
if (!DuckDBUIClient.singletonInstance) {
|
||||
DuckDBUIClient.singletonInstance = new DuckDBUIClient();
|
||||
}
|
||||
return DuckDBUIClient.singletonInstance;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
import { DuckDBUIHttpRequestQueue } from '../../http/classes/DuckDBUIHttpRequestQueue.js';
|
||||
import {
|
||||
DuckDBUIHttpRequestHeaderOptions,
|
||||
makeDuckDBUIHttpRequestHeaders,
|
||||
} from '../../http/functions/makeDuckDBUIHttpRequestHeaders.js';
|
||||
import { sendDuckDBUIHttpRequest } from '../../http/functions/sendDuckDBUIHttpRequest.js';
|
||||
import { randomString } from '../../util/functions/randomString.js';
|
||||
import { materializedRunResultFromQueueResult } from '../functions/materializedRunResultFromQueueResult.js';
|
||||
import { MaterializedRunResult } from '../types/MaterializedRunResult.js';
|
||||
|
||||
export class DuckDBUIClientConnection {
|
||||
private readonly connectionName = `connection_${randomString()}`;
|
||||
|
||||
private readonly requestQueue: DuckDBUIHttpRequestQueue =
|
||||
new DuckDBUIHttpRequestQueue();
|
||||
|
||||
public async run(
|
||||
sql: string,
|
||||
args?: unknown[],
|
||||
): Promise<MaterializedRunResult> {
|
||||
const queueResult = await this.requestQueue.enqueueAndWait(
|
||||
'/ddb/run',
|
||||
sql,
|
||||
this.makeHeaders({ parameters: args }),
|
||||
);
|
||||
return materializedRunResultFromQueueResult(queueResult);
|
||||
}
|
||||
|
||||
public enqueue(sql: string, args?: unknown[]): string {
|
||||
return this.requestQueue.enqueue(
|
||||
'/ddb/run',
|
||||
sql,
|
||||
this.makeHeaders({ parameters: args }),
|
||||
);
|
||||
}
|
||||
|
||||
public cancel(
|
||||
id: string,
|
||||
errorMessage?: string,
|
||||
failure?: (reason: unknown) => void,
|
||||
) {
|
||||
this.requestQueue.cancel(id, errorMessage);
|
||||
// If currently running, then interrupt it.
|
||||
if (this.requestQueue.isCurrent(id)) {
|
||||
// Don't await (but report any unexpected errors). Canceling should return synchronously.
|
||||
sendDuckDBUIHttpRequest('/ddb/interrupt', '', this.makeHeaders()).catch(
|
||||
failure,
|
||||
);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public async runQueued(id: string): Promise<MaterializedRunResult> {
|
||||
const queueResult = await this.requestQueue.enqueuedResult(id);
|
||||
return materializedRunResultFromQueueResult(queueResult);
|
||||
}
|
||||
|
||||
public get queuedCount(): number {
|
||||
return this.requestQueue.length;
|
||||
}
|
||||
|
||||
private makeHeaders(
|
||||
options: Omit<DuckDBUIHttpRequestHeaderOptions, 'connectionName'> = {},
|
||||
): Headers {
|
||||
return makeDuckDBUIHttpRequestHeaders({
|
||||
...options,
|
||||
connectionName: this.connectionName,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { DuckDBDataReader } from '@duckdb/data-reader';
|
||||
import { DuckDBDataChunkIterator } from '../../data-chunk/classes/DuckDBDataChunkIterator.js';
|
||||
import { DuckDBUIHttpRequestQueueResult } from '../../http/classes/DuckDBUIHttpRequestQueue.js';
|
||||
import { deserializerFromBuffer } from '../../serialization/functions/deserializeFromBuffer.js';
|
||||
import { readQueryResult } from '../../serialization/functions/resultReaders.js';
|
||||
import { MaterializedRunResult } from '../types/MaterializedRunResult.js';
|
||||
|
||||
export async function materializedRunResultFromQueueResult(
|
||||
queueResult: DuckDBUIHttpRequestQueueResult,
|
||||
): Promise<MaterializedRunResult> {
|
||||
const { buffer, startTimeMs, endTimeMs } = queueResult;
|
||||
const deserializer = deserializerFromBuffer(buffer);
|
||||
const result = readQueryResult(deserializer);
|
||||
if (!result.success) {
|
||||
throw new Error(result.error);
|
||||
}
|
||||
const dataReader = new DuckDBDataReader(new DuckDBDataChunkIterator(result));
|
||||
await dataReader.readAll();
|
||||
return { data: dataReader, startTimeMs, endTimeMs };
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
import { DuckDBData } from '@duckdb/data-reader';
|
||||
|
||||
export interface MaterializedRunResult {
|
||||
/**
|
||||
* Full result set.
|
||||
*
|
||||
* Includes column metadata, such as types. Supports duplicate column names without renaming.
|
||||
*
|
||||
* See the `DuckDBData` interface for details.
|
||||
*/
|
||||
data: DuckDBData;
|
||||
startTimeMs: number;
|
||||
endTimeMs: number;
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
// DuckDB's physical storage and binary serialization format is little endian.
|
||||
const littleEndian = true;
|
||||
|
||||
export function getInt8(dataView: DataView, offset: number): number {
|
||||
return dataView.getInt8(offset);
|
||||
}
|
||||
|
||||
export function getUInt8(dataView: DataView, offset: number): number {
|
||||
return dataView.getUint8(offset);
|
||||
}
|
||||
|
||||
export function getInt16(dataView: DataView, offset: number): number {
|
||||
return dataView.getInt16(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getUInt16(dataView: DataView, offset: number): number {
|
||||
return dataView.getUint16(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getInt32(dataView: DataView, offset: number): number {
|
||||
return dataView.getInt32(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getUInt32(dataView: DataView, offset: number): number {
|
||||
return dataView.getUint32(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getInt64(dataView: DataView, offset: number): bigint {
|
||||
return dataView.getBigInt64(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getUInt64(dataView: DataView, offset: number): bigint {
|
||||
return dataView.getBigUint64(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getFloat32(dataView: DataView, offset: number): number {
|
||||
return dataView.getFloat32(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getFloat64(dataView: DataView, offset: number): number {
|
||||
return dataView.getFloat64(offset, littleEndian);
|
||||
}
|
||||
|
||||
export function getInt128(dataView: DataView, offset: number): bigint {
|
||||
const lower = getUInt64(dataView, offset);
|
||||
const upper = getInt64(dataView, offset + 8);
|
||||
return (upper << BigInt(64)) + lower;
|
||||
}
|
||||
|
||||
export function getUInt128(dataView: DataView, offset: number): bigint {
|
||||
const lower = getUInt64(dataView, offset);
|
||||
const upper = getUInt64(dataView, offset + 8);
|
||||
return (BigInt.asUintN(64, upper) << BigInt(64)) | BigInt.asUintN(64, lower);
|
||||
}
|
||||
|
||||
export function getBoolean(dataView: DataView, offset: number): boolean {
|
||||
return getUInt8(dataView, offset) !== 0;
|
||||
}
|
||||
@@ -0,0 +1,178 @@
|
||||
import {
|
||||
ARRAY,
|
||||
DECIMAL,
|
||||
DuckDBBigIntType,
|
||||
DuckDBBitType,
|
||||
DuckDBBlobType,
|
||||
DuckDBBooleanType,
|
||||
DuckDBDateType,
|
||||
DuckDBDoubleType,
|
||||
DuckDBFloatType,
|
||||
DuckDBHugeIntType,
|
||||
DuckDBIntegerType,
|
||||
DuckDBIntervalType,
|
||||
DuckDBSmallIntType,
|
||||
DuckDBTimestampMillisecondsType,
|
||||
DuckDBTimestampNanosecondsType,
|
||||
DuckDBTimestampSecondsType,
|
||||
DuckDBTimestampType,
|
||||
DuckDBTimestampTZType,
|
||||
DuckDBTimeType,
|
||||
DuckDBTimeTZType,
|
||||
DuckDBTinyIntType,
|
||||
DuckDBType,
|
||||
DuckDBUBigIntType,
|
||||
DuckDBUHugeIntType,
|
||||
DuckDBUIntegerType,
|
||||
DuckDBUSmallIntType,
|
||||
DuckDBUTinyIntType,
|
||||
DuckDBUUIDType,
|
||||
DuckDBVarCharType,
|
||||
DuckDBVarIntType,
|
||||
ENUM,
|
||||
JSONType,
|
||||
LIST,
|
||||
MAP,
|
||||
STRUCT,
|
||||
UNION,
|
||||
} from '@duckdb/data-types';
|
||||
import { LogicalTypeId } from '../../serialization/constants/LogicalTypeId.js';
|
||||
import { TypeIdAndInfo } from '../../serialization/types/TypeInfo.js';
|
||||
import {
|
||||
getArrayTypeInfo,
|
||||
getDecimalTypeInfo,
|
||||
getEnumTypeInfo,
|
||||
getListTypeInfo,
|
||||
getMapTypeInfos,
|
||||
getStructTypeInfo,
|
||||
} from './typeInfoGetters.js';
|
||||
|
||||
/** Return the DuckDBType corresponding to the given TypeIdAndInfo. */
|
||||
export function duckDBTypeFromTypeIdAndInfo(
|
||||
typeIdAndInfo: TypeIdAndInfo,
|
||||
): DuckDBType {
|
||||
const { id, typeInfo } = typeIdAndInfo;
|
||||
const alias = typeInfo?.alias;
|
||||
switch (id) {
|
||||
case LogicalTypeId.BOOLEAN:
|
||||
return DuckDBBooleanType.create(alias);
|
||||
|
||||
case LogicalTypeId.TINYINT:
|
||||
return DuckDBTinyIntType.create(alias);
|
||||
case LogicalTypeId.SMALLINT:
|
||||
return DuckDBSmallIntType.create(alias);
|
||||
case LogicalTypeId.INTEGER:
|
||||
return DuckDBIntegerType.create(alias);
|
||||
case LogicalTypeId.BIGINT:
|
||||
return DuckDBBigIntType.create(alias);
|
||||
|
||||
case LogicalTypeId.DATE:
|
||||
return DuckDBDateType.create(alias);
|
||||
case LogicalTypeId.TIME:
|
||||
return DuckDBTimeType.create(alias);
|
||||
case LogicalTypeId.TIMESTAMP_SEC:
|
||||
return DuckDBTimestampSecondsType.create(alias);
|
||||
case LogicalTypeId.TIMESTAMP_MS:
|
||||
return DuckDBTimestampMillisecondsType.create(alias);
|
||||
case LogicalTypeId.TIMESTAMP:
|
||||
return DuckDBTimestampType.create(alias);
|
||||
case LogicalTypeId.TIMESTAMP_NS:
|
||||
return DuckDBTimestampNanosecondsType.create(alias);
|
||||
|
||||
case LogicalTypeId.DECIMAL: {
|
||||
const { width, scale } = getDecimalTypeInfo(typeInfo);
|
||||
return DECIMAL(width, scale, alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.FLOAT:
|
||||
return DuckDBFloatType.create(alias);
|
||||
case LogicalTypeId.DOUBLE:
|
||||
return DuckDBDoubleType.create(alias);
|
||||
|
||||
case LogicalTypeId.CHAR:
|
||||
case LogicalTypeId.VARCHAR:
|
||||
// Minor optimization for JSON type to avoid creating new type object.
|
||||
if (alias === JSONType.alias) {
|
||||
return JSONType;
|
||||
}
|
||||
return DuckDBVarCharType.create(alias);
|
||||
case LogicalTypeId.BLOB:
|
||||
return DuckDBBlobType.create(alias);
|
||||
|
||||
case LogicalTypeId.INTERVAL:
|
||||
return DuckDBIntervalType.create(alias);
|
||||
|
||||
case LogicalTypeId.UTINYINT:
|
||||
return DuckDBUTinyIntType.create(alias);
|
||||
case LogicalTypeId.USMALLINT:
|
||||
return DuckDBUSmallIntType.create(alias);
|
||||
case LogicalTypeId.UINTEGER:
|
||||
return DuckDBUIntegerType.create(alias);
|
||||
case LogicalTypeId.UBIGINT:
|
||||
return DuckDBUBigIntType.create(alias);
|
||||
|
||||
case LogicalTypeId.TIMESTAMP_TZ:
|
||||
return DuckDBTimestampTZType.create(alias);
|
||||
case LogicalTypeId.TIME_TZ:
|
||||
return DuckDBTimeTZType.create(alias);
|
||||
|
||||
case LogicalTypeId.BIT:
|
||||
return DuckDBBitType.create(alias);
|
||||
|
||||
case LogicalTypeId.VARINT:
|
||||
return DuckDBVarIntType.create(alias);
|
||||
|
||||
case LogicalTypeId.UHUGEINT:
|
||||
return DuckDBUHugeIntType.create(alias);
|
||||
case LogicalTypeId.HUGEINT:
|
||||
return DuckDBHugeIntType.create(alias);
|
||||
|
||||
case LogicalTypeId.UUID:
|
||||
return DuckDBUUIDType.create(alias);
|
||||
|
||||
case LogicalTypeId.STRUCT: {
|
||||
const { childTypes } = getStructTypeInfo(typeInfo);
|
||||
const entries: Record<string, DuckDBType> = {};
|
||||
for (const [key, valueTypeIdAndInfo] of childTypes) {
|
||||
entries[key] = duckDBTypeFromTypeIdAndInfo(valueTypeIdAndInfo);
|
||||
}
|
||||
return STRUCT(entries, alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.LIST: {
|
||||
const { childType } = getListTypeInfo(typeInfo);
|
||||
return LIST(duckDBTypeFromTypeIdAndInfo(childType), alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.MAP: {
|
||||
const { keyType, valueType } = getMapTypeInfos(typeInfo);
|
||||
return MAP(
|
||||
duckDBTypeFromTypeIdAndInfo(keyType),
|
||||
duckDBTypeFromTypeIdAndInfo(valueType),
|
||||
alias,
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.ENUM: {
|
||||
const { values } = getEnumTypeInfo(typeInfo);
|
||||
return ENUM(values, alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.UNION: {
|
||||
const { childTypes } = getStructTypeInfo(typeInfo);
|
||||
const members: Record<string, DuckDBType> = {};
|
||||
for (const [key, valueTypeIdAndInfo] of childTypes) {
|
||||
members[key] = duckDBTypeFromTypeIdAndInfo(valueTypeIdAndInfo);
|
||||
}
|
||||
return UNION(members, alias);
|
||||
}
|
||||
|
||||
case LogicalTypeId.ARRAY: {
|
||||
const { childType, size } = getArrayTypeInfo(typeInfo);
|
||||
return ARRAY(duckDBTypeFromTypeIdAndInfo(childType), size, alias);
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`type id not implemented: ${typeIdAndInfo.id}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,271 @@
|
||||
import {
|
||||
DuckDBArrayValue,
|
||||
DuckDBBitValue,
|
||||
DuckDBBlobValue,
|
||||
DuckDBDateValue,
|
||||
DuckDBDecimalValue,
|
||||
DuckDBIntervalValue,
|
||||
DuckDBListValue,
|
||||
DuckDBMapValue,
|
||||
DuckDBStructValue,
|
||||
DuckDBTimeTZValue,
|
||||
DuckDBTimeValue,
|
||||
DuckDBTimestampMicrosecondsValue,
|
||||
DuckDBTimestampMillisecondsValue,
|
||||
DuckDBTimestampNanosecondsValue,
|
||||
DuckDBTimestampSecondsValue,
|
||||
DuckDBTimestampTZValue,
|
||||
DuckDBUUIDValue,
|
||||
DuckDBValue,
|
||||
getVarIntFromBytes,
|
||||
} from '@duckdb/data-values';
|
||||
import { LogicalTypeId } from '../../serialization/constants/LogicalTypeId.js';
|
||||
import { TypeIdAndInfo } from '../../serialization/types/TypeInfo.js';
|
||||
import { Vector } from '../../serialization/types/Vector.js';
|
||||
import {
|
||||
getBoolean,
|
||||
getFloat32,
|
||||
getFloat64,
|
||||
getInt128,
|
||||
getInt16,
|
||||
getInt32,
|
||||
getInt64,
|
||||
getInt8,
|
||||
getUInt128,
|
||||
getUInt16,
|
||||
getUInt32,
|
||||
getUInt64,
|
||||
getUInt8,
|
||||
} from './dataViewReaders.js';
|
||||
import { isRowValid } from './isRowValid.js';
|
||||
import {
|
||||
getArrayTypeInfo,
|
||||
getDecimalTypeInfo,
|
||||
getEnumTypeInfo,
|
||||
getListTypeInfo,
|
||||
getMapTypeInfos,
|
||||
getStructTypeInfo,
|
||||
} from './typeInfoGetters.js';
|
||||
import {
|
||||
getArrayVector,
|
||||
getDataListVector,
|
||||
getDataVector,
|
||||
getListVector,
|
||||
getStringVector,
|
||||
getVectorListVector,
|
||||
} from './vectorGetters.js';
|
||||
|
||||
/** Return the DuckDBValue at the given index in the given Vector with the type described by the given TypeIdAndInfo. */
|
||||
export function duckDBValueFromVector(
|
||||
typeIdAndInfo: TypeIdAndInfo,
|
||||
vector: Vector,
|
||||
rowIndex: number,
|
||||
): DuckDBValue {
|
||||
if (!isRowValid(vector.validity, rowIndex)) return null;
|
||||
|
||||
const { id, typeInfo } = typeIdAndInfo;
|
||||
switch (id) {
|
||||
case LogicalTypeId.BOOLEAN:
|
||||
return getBoolean(getDataVector(vector).data, rowIndex);
|
||||
|
||||
case LogicalTypeId.TINYINT:
|
||||
return getInt8(getDataVector(vector).data, rowIndex);
|
||||
case LogicalTypeId.SMALLINT:
|
||||
return getInt16(getDataVector(vector).data, rowIndex * 2);
|
||||
case LogicalTypeId.INTEGER:
|
||||
return getInt32(getDataVector(vector).data, rowIndex * 4);
|
||||
case LogicalTypeId.BIGINT:
|
||||
return getInt64(getDataVector(vector).data, rowIndex * 8);
|
||||
|
||||
case LogicalTypeId.DATE:
|
||||
return new DuckDBDateValue(
|
||||
getInt32(getDataVector(vector).data, rowIndex * 4),
|
||||
);
|
||||
case LogicalTypeId.TIME:
|
||||
return new DuckDBTimeValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIMESTAMP_SEC:
|
||||
return new DuckDBTimestampSecondsValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIMESTAMP_MS:
|
||||
return new DuckDBTimestampMillisecondsValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIMESTAMP:
|
||||
return new DuckDBTimestampMicrosecondsValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIMESTAMP_NS:
|
||||
return new DuckDBTimestampNanosecondsValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
|
||||
case LogicalTypeId.DECIMAL: {
|
||||
const { width, scale } = getDecimalTypeInfo(typeInfo);
|
||||
if (width <= 4) {
|
||||
return new DuckDBDecimalValue(
|
||||
BigInt(getInt16(getDataVector(vector).data, rowIndex * 2)),
|
||||
scale,
|
||||
);
|
||||
} else if (width <= 9) {
|
||||
return new DuckDBDecimalValue(
|
||||
BigInt(getInt32(getDataVector(vector).data, rowIndex * 4)),
|
||||
scale,
|
||||
);
|
||||
} else if (width <= 18) {
|
||||
return new DuckDBDecimalValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
scale,
|
||||
);
|
||||
} else if (width <= 38) {
|
||||
return new DuckDBDecimalValue(
|
||||
getInt128(getDataVector(vector).data, rowIndex * 16),
|
||||
scale,
|
||||
);
|
||||
}
|
||||
throw new Error(`unsupported decimal width: ${width}`);
|
||||
}
|
||||
|
||||
case LogicalTypeId.FLOAT:
|
||||
return getFloat32(getDataVector(vector).data, rowIndex * 4);
|
||||
case LogicalTypeId.DOUBLE:
|
||||
return getFloat64(getDataVector(vector).data, rowIndex * 8);
|
||||
|
||||
case LogicalTypeId.CHAR:
|
||||
case LogicalTypeId.VARCHAR:
|
||||
return getStringVector(vector).data[rowIndex];
|
||||
|
||||
case LogicalTypeId.BLOB: {
|
||||
const dv = getDataListVector(vector).data[rowIndex];
|
||||
return new DuckDBBlobValue(
|
||||
new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.INTERVAL: {
|
||||
const { data } = getDataVector(vector);
|
||||
const months = getInt32(data, rowIndex * 16 + 0);
|
||||
const days = getInt32(data, rowIndex * 16 + 4);
|
||||
const micros = getInt64(data, rowIndex * 16 + 8);
|
||||
return new DuckDBIntervalValue(months, days, micros);
|
||||
}
|
||||
|
||||
case LogicalTypeId.UTINYINT:
|
||||
return getUInt8(getDataVector(vector).data, rowIndex);
|
||||
case LogicalTypeId.USMALLINT:
|
||||
return getUInt16(getDataVector(vector).data, rowIndex * 2);
|
||||
case LogicalTypeId.UINTEGER:
|
||||
return getUInt32(getDataVector(vector).data, rowIndex * 4);
|
||||
case LogicalTypeId.UBIGINT:
|
||||
return getUInt64(getDataVector(vector).data, rowIndex * 8);
|
||||
|
||||
case LogicalTypeId.TIMESTAMP_TZ:
|
||||
return new DuckDBTimestampTZValue(
|
||||
getInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
case LogicalTypeId.TIME_TZ:
|
||||
return DuckDBTimeTZValue.fromBits(
|
||||
getUInt64(getDataVector(vector).data, rowIndex * 8),
|
||||
);
|
||||
|
||||
case LogicalTypeId.BIT: {
|
||||
const dv = getDataListVector(vector).data[rowIndex];
|
||||
return new DuckDBBitValue(
|
||||
new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.VARINT: {
|
||||
const dv = getDataListVector(vector).data[rowIndex];
|
||||
return getVarIntFromBytes(
|
||||
new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.UHUGEINT:
|
||||
return getUInt128(getDataVector(vector).data, rowIndex * 16);
|
||||
case LogicalTypeId.HUGEINT:
|
||||
return getInt128(getDataVector(vector).data, rowIndex * 16);
|
||||
|
||||
case LogicalTypeId.UUID:
|
||||
return DuckDBUUIDValue.fromStoredHugeint(
|
||||
getInt128(getDataVector(vector).data, rowIndex * 16),
|
||||
);
|
||||
|
||||
case LogicalTypeId.STRUCT: {
|
||||
const { childTypes } = getStructTypeInfo(typeInfo);
|
||||
const { data } = getVectorListVector(vector);
|
||||
return new DuckDBStructValue(
|
||||
Array.from({ length: childTypes.length }).map((_, i) => ({
|
||||
key: childTypes[i][0],
|
||||
value: duckDBValueFromVector(childTypes[i][1], data[i], rowIndex),
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.LIST: {
|
||||
const { childType } = getListTypeInfo(typeInfo);
|
||||
const { child, entries } = getListVector(vector);
|
||||
const { offset, length } = entries[rowIndex];
|
||||
return new DuckDBListValue(
|
||||
Array.from({ length }).map((_, i) =>
|
||||
duckDBValueFromVector(childType, child, offset + i),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.MAP: {
|
||||
const { keyType, valueType } = getMapTypeInfos(typeInfo);
|
||||
const { child, entries } = getListVector(vector);
|
||||
const { offset, length } = entries[rowIndex];
|
||||
const { data } = getVectorListVector(child);
|
||||
return new DuckDBMapValue(
|
||||
Array.from({ length }).map((_, i) => ({
|
||||
key: duckDBValueFromVector(keyType, data[0], offset + i),
|
||||
value: duckDBValueFromVector(valueType, data[1], offset + i),
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.ENUM: {
|
||||
const { values } = getEnumTypeInfo(typeInfo);
|
||||
if (values.length < 256) {
|
||||
return values[getUInt8(getDataVector(vector).data, rowIndex)];
|
||||
} else if (values.length < 65536) {
|
||||
return values[getUInt16(getDataVector(vector).data, rowIndex * 2)];
|
||||
} else if (values.length < 4294967296) {
|
||||
return values[getUInt32(getDataVector(vector).data, rowIndex * 4)];
|
||||
}
|
||||
throw new Error(`unsupported enum size: values.length=${values.length}`);
|
||||
}
|
||||
|
||||
case LogicalTypeId.UNION: {
|
||||
const { childTypes } = getStructTypeInfo(typeInfo);
|
||||
const { data } = getVectorListVector(vector);
|
||||
const tag = Number(
|
||||
duckDBValueFromVector(childTypes[0][1], data[0], rowIndex),
|
||||
);
|
||||
const altIndex = tag + 1;
|
||||
return duckDBValueFromVector(
|
||||
childTypes[altIndex][1],
|
||||
data[altIndex],
|
||||
rowIndex,
|
||||
);
|
||||
}
|
||||
|
||||
case LogicalTypeId.ARRAY: {
|
||||
const { childType, size } = getArrayTypeInfo(typeInfo);
|
||||
const { child } = getArrayVector(vector);
|
||||
return new DuckDBArrayValue(
|
||||
Array.from({ length: size }).map((_, i) =>
|
||||
duckDBValueFromVector(childType, child, rowIndex * size + i),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`type not implemented: ${id}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { getUInt64 } from './dataViewReaders.js';
|
||||
|
||||
export function isRowValid(validity: DataView | null, row: number): boolean {
|
||||
if (!validity) return true;
|
||||
const bigint = getUInt64(validity, Math.floor(row / 64) * 8);
|
||||
return (bigint & (1n << BigInt(row % 64))) !== 0n;
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
import {
|
||||
ArrayTypeInfo,
|
||||
DecimalTypeInfo,
|
||||
EnumTypeInfo,
|
||||
ListTypeInfo,
|
||||
StructTypeInfo,
|
||||
TypeIdAndInfo,
|
||||
TypeInfo,
|
||||
} from '../../serialization/types/TypeInfo.js';
|
||||
|
||||
export function getArrayTypeInfo(
|
||||
typeInfo: TypeInfo | undefined,
|
||||
): ArrayTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`ARRAY has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'array') {
|
||||
throw new Error(`ARRAY has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getDecimalTypeInfo(
|
||||
typeInfo: TypeInfo | undefined,
|
||||
): DecimalTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`DECIMAL has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'decimal') {
|
||||
throw new Error(`DECIMAL has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getEnumTypeInfo(typeInfo: TypeInfo | undefined): EnumTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`ENUM has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'enum') {
|
||||
throw new Error(`ENUM has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getListTypeInfo(typeInfo: TypeInfo | undefined): ListTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`LIST has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'list') {
|
||||
throw new Error(`LIST has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getStructTypeInfo(
|
||||
typeInfo: TypeInfo | undefined,
|
||||
): StructTypeInfo {
|
||||
if (!typeInfo) {
|
||||
throw new Error(`STRUCT has no typeInfo!`);
|
||||
}
|
||||
if (typeInfo.kind !== 'struct') {
|
||||
throw new Error(`STRUCT has unexpected typeInfo.kind: ${typeInfo.kind}`);
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function getMapTypeInfos(typeInfo: TypeInfo | undefined): {
|
||||
keyType: TypeIdAndInfo;
|
||||
valueType: TypeIdAndInfo;
|
||||
} {
|
||||
// MAP = LIST(STRUCT(key KEY_TYPE, value VALUE_TYPE))
|
||||
const { childType } = getListTypeInfo(typeInfo);
|
||||
const { childTypes } = getStructTypeInfo(childType.typeInfo);
|
||||
if (childTypes.length !== 2) {
|
||||
throw new Error(
|
||||
`MAP childType has unexpected childTypes length: ${childTypes.length}`,
|
||||
);
|
||||
}
|
||||
if (childTypes[0].length !== 2) {
|
||||
throw new Error(
|
||||
`MAP childType has unexpected childTypes[0] length: ${childTypes[0].length}`,
|
||||
);
|
||||
}
|
||||
if (childTypes[1].length !== 2) {
|
||||
throw new Error(
|
||||
`MAP childType has unexpected childTypes[1] length: ${childTypes[1].length}`,
|
||||
);
|
||||
}
|
||||
return {
|
||||
keyType: childTypes[0][1],
|
||||
valueType: childTypes[1][1],
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
import {
|
||||
ArrayVector,
|
||||
DataListVector,
|
||||
DataVector,
|
||||
ListVector,
|
||||
StringVector,
|
||||
Vector,
|
||||
VectorListVector,
|
||||
} from '../../serialization/types/Vector.js';
|
||||
|
||||
export function getDataVector(vector: Vector): DataVector {
|
||||
if (vector.kind !== 'data') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getStringVector(vector: Vector): StringVector {
|
||||
if (vector.kind !== 'string') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getDataListVector(vector: Vector): DataListVector {
|
||||
if (vector.kind !== 'datalist') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getVectorListVector(vector: Vector): VectorListVector {
|
||||
if (vector.kind !== 'vectorlist') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getListVector(vector: Vector): ListVector {
|
||||
if (vector.kind !== 'list') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function getArrayVector(vector: Vector): ArrayVector {
|
||||
if (vector.kind !== 'array') {
|
||||
throw new Error(`Unexpected vector.kind: ${vector.kind}`);
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
import { DuckDBData } from '@duckdb/data-reader';
|
||||
import { DuckDBType } from '@duckdb/data-types';
|
||||
import { DuckDBValue } from '@duckdb/data-values';
|
||||
import { duckDBTypeFromTypeIdAndInfo } from '../../conversion/functions/duckDBTypeFromTypeIdAndInfo.js';
|
||||
import { duckDBValueFromVector } from '../../conversion/functions/duckDBValueFromVector.js';
|
||||
import { ColumnNamesAndTypes } from '../../serialization/types/ColumnNamesAndTypes.js';
|
||||
import { DataChunk } from '../../serialization/types/DataChunk.js';
|
||||
|
||||
export class DuckDBDataChunk extends DuckDBData {
|
||||
constructor(
|
||||
private columnNamesAndTypes: ColumnNamesAndTypes,
|
||||
private chunk: DataChunk,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
get columnCount() {
|
||||
return this.columnNamesAndTypes.names.length;
|
||||
}
|
||||
|
||||
get rowCount() {
|
||||
return this.chunk.rowCount;
|
||||
}
|
||||
|
||||
columnName(columnIndex: number): string {
|
||||
return this.columnNamesAndTypes.names[columnIndex];
|
||||
}
|
||||
|
||||
columnType(columnIndex: number): DuckDBType {
|
||||
return duckDBTypeFromTypeIdAndInfo(
|
||||
this.columnNamesAndTypes.types[columnIndex],
|
||||
);
|
||||
}
|
||||
|
||||
value(columnIndex: number, rowIndex: number): DuckDBValue {
|
||||
return duckDBValueFromVector(
|
||||
this.columnNamesAndTypes.types[columnIndex],
|
||||
this.chunk.vectors[columnIndex],
|
||||
rowIndex,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
import {
|
||||
AsyncDuckDBDataBatchIterator,
|
||||
DuckDBData,
|
||||
DuckDBDataBatchIteratorResult,
|
||||
} from '@duckdb/data-reader';
|
||||
import { SuccessQueryResult } from '../../serialization/types/QueryResult.js';
|
||||
import { DuckDBDataChunk } from './DuckDBDataChunk.js';
|
||||
|
||||
const ITERATOR_DONE: DuckDBDataBatchIteratorResult = Object.freeze({
|
||||
done: true,
|
||||
value: undefined,
|
||||
});
|
||||
|
||||
export class DuckDBDataChunkIterator implements AsyncDuckDBDataBatchIterator {
|
||||
private result: SuccessQueryResult;
|
||||
|
||||
private index: number;
|
||||
|
||||
constructor(result: SuccessQueryResult) {
|
||||
this.result = result;
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
async next(): Promise<DuckDBDataBatchIteratorResult> {
|
||||
if (this.index < this.result.chunks.length) {
|
||||
return {
|
||||
done: false,
|
||||
value: new DuckDBDataChunk(
|
||||
this.result.columnNamesAndTypes,
|
||||
this.result.chunks[this.index++],
|
||||
),
|
||||
};
|
||||
}
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
|
||||
async return(value?: DuckDBData): Promise<DuckDBDataBatchIteratorResult> {
|
||||
if (value) {
|
||||
return { done: true, value };
|
||||
}
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
|
||||
async throw(_e?: unknown): Promise<DuckDBDataBatchIteratorResult> {
|
||||
return ITERATOR_DONE;
|
||||
}
|
||||
|
||||
[Symbol.asyncIterator](): AsyncDuckDBDataBatchIterator {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
import 'core-js/actual/promise/with-resolvers.js';
|
||||
import { randomString } from '../../util/functions/randomString.js';
|
||||
import { sendDuckDBUIHttpRequest } from '../functions/sendDuckDBUIHttpRequest.js';
|
||||
|
||||
export interface DuckDBUIHttpRequestQueueResult {
|
||||
buffer: ArrayBuffer;
|
||||
startTimeMs: number;
|
||||
endTimeMs: number;
|
||||
}
|
||||
|
||||
export interface DuckDBUIHttpRequestQueueEntry {
|
||||
id: string;
|
||||
url: string;
|
||||
body: string;
|
||||
headers?: Headers;
|
||||
deferredResult: PromiseWithResolvers<DuckDBUIHttpRequestQueueResult>;
|
||||
canceled?: boolean;
|
||||
}
|
||||
|
||||
export class DuckDBUIHttpRequestQueue {
|
||||
/**
|
||||
* Invariants: The first entry in the queue has been sent and we're waiting for its response. If the first entry is
|
||||
* canceled, it remains in the queue until its response is received. If an entry other than the first is canceled, it
|
||||
* remains in the queue until it comes to the front, at which point it is removed without being sent.
|
||||
*/
|
||||
private entries: DuckDBUIHttpRequestQueueEntry[] = [];
|
||||
|
||||
public get length() {
|
||||
return this.entries.length;
|
||||
}
|
||||
|
||||
public enqueueAndWait(
|
||||
url: string,
|
||||
body: string,
|
||||
headers?: Headers,
|
||||
): Promise<DuckDBUIHttpRequestQueueResult> {
|
||||
return this.internalEnqueue(url, body, headers).deferredResult.promise;
|
||||
}
|
||||
|
||||
public enqueue(url: string, body: string, headers?: Headers): string {
|
||||
return this.internalEnqueue(url, body, headers).id;
|
||||
}
|
||||
|
||||
public enqueuedResult(id: string): Promise<DuckDBUIHttpRequestQueueResult> {
|
||||
const index = this.entries.findIndex((entry) => entry.id === id);
|
||||
if (index < 0) {
|
||||
throw new Error(`Invalid id: ${id}`);
|
||||
}
|
||||
return this.entries[index].deferredResult.promise;
|
||||
}
|
||||
|
||||
public cancel(id: string, errorMessage?: string) {
|
||||
const index = this.entries.findIndex((entry) => entry.id === id);
|
||||
if (index >= 0) {
|
||||
// Mark the entry as canceled and reject its promise. If it was already sent, then we'll remove it from the queue
|
||||
// when we get its response. If not, then we'll remove it when the (non-canceled) request before it completes. The
|
||||
// caller may or may not arrange for the response to return early with an error, for example, by interrupting it;
|
||||
// whether that happens doesn't change how the queue operates.
|
||||
this.entries[index].canceled = true;
|
||||
this.entries[index].deferredResult.reject(
|
||||
new Error(errorMessage ?? 'query was canceled'),
|
||||
);
|
||||
} else {
|
||||
console.warn(`Couldn't cancel; no entry found for id: ${id}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the given entry id is the front of the queue.
|
||||
* Note that it may be canceled.
|
||||
*/
|
||||
public isCurrent(id: string): boolean {
|
||||
return this.entries.length > 0 && this.entries[0].id === id;
|
||||
}
|
||||
|
||||
private internalEnqueue(
|
||||
url: string,
|
||||
body: string,
|
||||
headers?: Headers,
|
||||
): DuckDBUIHttpRequestQueueEntry {
|
||||
const id = randomString();
|
||||
const deferredResult =
|
||||
Promise.withResolvers<DuckDBUIHttpRequestQueueResult>();
|
||||
const entry: DuckDBUIHttpRequestQueueEntry = {
|
||||
id,
|
||||
url,
|
||||
body,
|
||||
headers,
|
||||
deferredResult,
|
||||
};
|
||||
this.entries.push(entry);
|
||||
// If the new entry is the first in our queue, then send it.
|
||||
if (this.entries.length === 1) {
|
||||
this.sendRequest(this.entries[0]);
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
|
||||
private handleResponse(
|
||||
entryId: string,
|
||||
startTimeMs: number,
|
||||
buffer: ArrayBuffer | undefined,
|
||||
reason?: unknown,
|
||||
) {
|
||||
if (this.entries.length === 0) {
|
||||
console.warn(
|
||||
`DuckDBUIHttpRequestQueue.handleResponse(entryId=${entryId}): queue unexpectedly empty`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (this.entries[0].id !== entryId) {
|
||||
console.warn(
|
||||
`DuckDBUIHttpRequestQueue.handleResponse(entryId=${entryId}): front of queue doesn't match response`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
// Remove the entry corresponding to this response.
|
||||
const entry = this.entries.shift();
|
||||
// There should always be an entry because of the length check above, but we need to appease the compiler.
|
||||
// If the entry was canceled, we've already rejected the promise, so there's nothing more to do.
|
||||
if (entry && !entry.canceled) {
|
||||
if (buffer) {
|
||||
const endTimeMs = performance.now();
|
||||
// If the entry has a valid buffer, then resolve its promise to it.
|
||||
entry.deferredResult.resolve({ buffer, startTimeMs, endTimeMs });
|
||||
} else {
|
||||
// Otherwise, reject it with the provided reason.
|
||||
entry.deferredResult.reject(reason);
|
||||
}
|
||||
}
|
||||
// Send the next request (if there are any).
|
||||
this.sendNextInQueue();
|
||||
}
|
||||
|
||||
/** If there are any entries in our queue that aren't canceled, send the first one. */
|
||||
private sendNextInQueue() {
|
||||
// Remove any unsent canceled entries from the front of the queue.
|
||||
while (this.entries.length > 0 && this.entries[0].canceled) {
|
||||
this.entries.shift();
|
||||
}
|
||||
// If there's an uncanceled entry left, send it.
|
||||
if (this.entries.length > 0) {
|
||||
this.sendRequest(this.entries[0]);
|
||||
}
|
||||
}
|
||||
|
||||
private sendRequest(entry: DuckDBUIHttpRequestQueueEntry) {
|
||||
const startTimeMs = performance.now();
|
||||
sendDuckDBUIHttpRequest(entry.url, entry.body, entry.headers)
|
||||
.then((buffer) => this.handleResponse(entry.id, startTimeMs, buffer))
|
||||
.catch((reason) =>
|
||||
this.handleResponse(entry.id, startTimeMs, undefined, reason),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
import { toBase64 } from '../../util/functions/toBase64.js';
|
||||
|
||||
export interface DuckDBUIHttpRequestHeaderOptions {
|
||||
description?: string;
|
||||
connectionName?: string;
|
||||
databaseName?: string;
|
||||
parameters?: unknown[];
|
||||
}
|
||||
|
||||
export function makeDuckDBUIHttpRequestHeaders({
|
||||
description,
|
||||
connectionName,
|
||||
databaseName,
|
||||
parameters,
|
||||
}: DuckDBUIHttpRequestHeaderOptions): Headers {
|
||||
const headers = new Headers();
|
||||
if (description) {
|
||||
headers.append('X-DuckDB-UI-Request-Description', description);
|
||||
}
|
||||
if (connectionName) {
|
||||
headers.append('X-DuckDB-UI-Connection-Name', connectionName);
|
||||
}
|
||||
if (databaseName) {
|
||||
// base64 encode the value because it can contain characters invalid in an HTTP header
|
||||
headers.append('X-DuckDB-UI-Database-Name', toBase64(databaseName));
|
||||
}
|
||||
if (parameters) {
|
||||
headers.append('X-DuckDB-UI-Parameter-Count', String(parameters.length));
|
||||
for (let i = 0; i < parameters.length; i++) {
|
||||
// base64 encode the value because it can contain characters invalid in an HTTP header
|
||||
// TODO: support non-string parameters?
|
||||
headers.append(
|
||||
`X-DuckDB-UI-Parameter-Value-${i}`,
|
||||
toBase64(String(parameters[i])),
|
||||
);
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
export async function sendDuckDBUIHttpRequest(
|
||||
url: string,
|
||||
body: string,
|
||||
headers?: Headers,
|
||||
): Promise<ArrayBuffer> {
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
const buffer = await response.arrayBuffer();
|
||||
return buffer;
|
||||
}
|
||||
1
ts/pkgs/duckdb-ui-client/src/index.ts
Normal file
1
ts/pkgs/duckdb-ui-client/src/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './client/classes/DuckDBUIClient.js';
|
||||
@@ -0,0 +1,130 @@
|
||||
import { BinaryStreamReader } from './BinaryStreamReader.js';
|
||||
|
||||
export type Reader<T> = (deserializer: BinaryDeserializer) => T;
|
||||
export type ListReader<T> = (
|
||||
deserializer: BinaryDeserializer,
|
||||
index: number,
|
||||
) => T;
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
/**
|
||||
* An implementation of a subset of DuckDB's BinaryDeserializer.
|
||||
*
|
||||
* See:
|
||||
* - https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/serializer/binary_deserializer.hpp
|
||||
* - https://github.com/duckdb/duckdb/blob/main/src/common/serializer/binary_deserializer.cpp
|
||||
*/
|
||||
export class BinaryDeserializer {
|
||||
private reader: BinaryStreamReader;
|
||||
|
||||
public constructor(reader: BinaryStreamReader) {
|
||||
this.reader = reader;
|
||||
}
|
||||
|
||||
private peekFieldId() {
|
||||
return this.reader.peekUint16(true);
|
||||
}
|
||||
|
||||
private consumeFieldId() {
|
||||
this.reader.consume(2);
|
||||
}
|
||||
|
||||
private checkFieldId(possibleFieldId: number) {
|
||||
const fieldId = this.peekFieldId();
|
||||
if (fieldId === possibleFieldId) {
|
||||
this.consumeFieldId();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private expectFieldId(expectedFieldId: number) {
|
||||
const fieldId = this.peekFieldId();
|
||||
if (fieldId === expectedFieldId) {
|
||||
this.consumeFieldId();
|
||||
} else {
|
||||
throw new Error(
|
||||
`Expected field id ${expectedFieldId} but got ${fieldId} (offset=${this.reader.getOffset()})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public expectObjectEnd() {
|
||||
this.expectFieldId(0xffff);
|
||||
}
|
||||
|
||||
public throwUnsupported() {
|
||||
throw new Error(`unsupported type, offset=${this.reader.getOffset()}`);
|
||||
}
|
||||
|
||||
public readUint8() {
|
||||
return this.reader.readUint8();
|
||||
}
|
||||
|
||||
public readVarInt() {
|
||||
let result = 0;
|
||||
let byte = 0;
|
||||
let shift = 0;
|
||||
do {
|
||||
byte = this.reader.readUint8();
|
||||
result |= (byte & 0x7f) << shift;
|
||||
shift += 7;
|
||||
} while (byte & 0x80);
|
||||
return result;
|
||||
}
|
||||
|
||||
public readNullable<T>(reader: Reader<T>) {
|
||||
const present = this.readUint8();
|
||||
if (present) {
|
||||
return reader(this);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public readData() {
|
||||
const length = this.readVarInt();
|
||||
return this.reader.readData(length);
|
||||
}
|
||||
|
||||
public readString() {
|
||||
const length = this.readVarInt();
|
||||
const dv = this.reader.readData(length);
|
||||
return decoder.decode(dv);
|
||||
}
|
||||
|
||||
public readList<T>(reader: ListReader<T>) {
|
||||
const count = this.readVarInt();
|
||||
const items: T[] = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
items.push(reader(this, i));
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
public readPair<T, U>(
|
||||
firstReader: Reader<T>,
|
||||
secondReader: Reader<U>,
|
||||
): [T, U] {
|
||||
const first = this.readProperty(0, firstReader);
|
||||
const second = this.readProperty(1, secondReader);
|
||||
this.expectObjectEnd();
|
||||
return [first, second];
|
||||
}
|
||||
|
||||
public readProperty<T>(expectedFieldId: number, reader: Reader<T>) {
|
||||
this.expectFieldId(expectedFieldId);
|
||||
return reader(this);
|
||||
}
|
||||
|
||||
public readPropertyWithDefault<T>(
|
||||
possibleFieldId: number,
|
||||
reader: Reader<T>,
|
||||
defaultValue: T,
|
||||
): T {
|
||||
if (this.checkFieldId(possibleFieldId)) {
|
||||
return reader(this);
|
||||
}
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* Enables reading or peeking at values of a binary buffer.
|
||||
* Subsequent reads start from the end of the previous one.
|
||||
*/
|
||||
export class BinaryStreamReader {
|
||||
private dv: DataView;
|
||||
|
||||
private offset: number;
|
||||
|
||||
public constructor(buffer: ArrayBuffer) {
|
||||
this.dv = new DataView(buffer);
|
||||
this.offset = 0;
|
||||
}
|
||||
|
||||
public getOffset() {
|
||||
return this.offset;
|
||||
}
|
||||
|
||||
public peekUint8() {
|
||||
return this.dv.getUint8(this.offset);
|
||||
}
|
||||
|
||||
public peekUint16(le: boolean) {
|
||||
return this.dv.getUint16(this.offset, le);
|
||||
}
|
||||
|
||||
public consume(byteCount: number) {
|
||||
this.offset += byteCount;
|
||||
}
|
||||
|
||||
private offsetBeforeConsume(byteCount: number) {
|
||||
const offsetBefore = this.offset;
|
||||
this.consume(byteCount);
|
||||
return offsetBefore;
|
||||
}
|
||||
|
||||
public readUint8() {
|
||||
return this.dv.getUint8(this.offsetBeforeConsume(1));
|
||||
}
|
||||
|
||||
public readData(length: number) {
|
||||
return new DataView(
|
||||
this.dv.buffer,
|
||||
this.offsetBeforeConsume(length),
|
||||
length,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
/**
|
||||
* Copy of DuckDB's LogicalTypeId.
|
||||
*
|
||||
* See LogicalTypeId in https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/types.hpp
|
||||
*/
|
||||
export const LogicalTypeId = {
|
||||
BOOLEAN: 10,
|
||||
TINYINT: 11,
|
||||
SMALLINT: 12,
|
||||
INTEGER: 13,
|
||||
BIGINT: 14,
|
||||
DATE: 15,
|
||||
TIME: 16,
|
||||
TIMESTAMP_SEC: 17,
|
||||
TIMESTAMP_MS: 18,
|
||||
TIMESTAMP: 19,
|
||||
TIMESTAMP_NS: 20,
|
||||
DECIMAL: 21,
|
||||
FLOAT: 22,
|
||||
DOUBLE: 23,
|
||||
CHAR: 24,
|
||||
VARCHAR: 25,
|
||||
BLOB: 26,
|
||||
INTERVAL: 27,
|
||||
UTINYINT: 28,
|
||||
USMALLINT: 29,
|
||||
UINTEGER: 30,
|
||||
UBIGINT: 31,
|
||||
TIMESTAMP_TZ: 32,
|
||||
TIME_TZ: 34,
|
||||
BIT: 36,
|
||||
VARINT: 39,
|
||||
UHUGEINT: 49,
|
||||
HUGEINT: 50,
|
||||
UUID: 54,
|
||||
STRUCT: 100,
|
||||
LIST: 101,
|
||||
MAP: 102,
|
||||
ENUM: 104,
|
||||
UNION: 107,
|
||||
ARRAY: 108,
|
||||
};
|
||||
@@ -0,0 +1,63 @@
|
||||
import {
|
||||
BinaryDeserializer,
|
||||
ListReader,
|
||||
Reader,
|
||||
} from '../classes/BinaryDeserializer.js';
|
||||
|
||||
export function readUnsupported(deserializer: BinaryDeserializer): void {
|
||||
deserializer.throwUnsupported();
|
||||
}
|
||||
|
||||
export function readNullable<T>(
|
||||
deserializer: BinaryDeserializer,
|
||||
reader: Reader<T>,
|
||||
): T | null {
|
||||
return deserializer.readNullable(reader);
|
||||
}
|
||||
|
||||
export function readUint8(deserializer: BinaryDeserializer): number {
|
||||
return deserializer.readUint8();
|
||||
}
|
||||
|
||||
export function readBoolean(deserializer: BinaryDeserializer): boolean {
|
||||
return deserializer.readUint8() !== 0;
|
||||
}
|
||||
|
||||
export function readVarInt(deserializer: BinaryDeserializer): number {
|
||||
return deserializer.readVarInt();
|
||||
}
|
||||
|
||||
export function readVarIntList(deserializer: BinaryDeserializer): number[] {
|
||||
return readList(deserializer, readVarInt);
|
||||
}
|
||||
|
||||
export function readData(deserializer: BinaryDeserializer): DataView {
|
||||
return deserializer.readData();
|
||||
}
|
||||
|
||||
export function readDataList(deserializer: BinaryDeserializer): DataView[] {
|
||||
return readList(deserializer, readData);
|
||||
}
|
||||
|
||||
export function readString(deserializer: BinaryDeserializer): string {
|
||||
return deserializer.readString();
|
||||
}
|
||||
|
||||
export function readList<T>(
|
||||
deserializer: BinaryDeserializer,
|
||||
reader: ListReader<T>,
|
||||
): T[] {
|
||||
return deserializer.readList(reader);
|
||||
}
|
||||
|
||||
export function readStringList(deserializer: BinaryDeserializer): string[] {
|
||||
return readList(deserializer, readString);
|
||||
}
|
||||
|
||||
export function readPair<T, U>(
|
||||
deserializer: BinaryDeserializer,
|
||||
firstReader: Reader<T>,
|
||||
secondReader: Reader<U>,
|
||||
): [T, U] {
|
||||
return deserializer.readPair(firstReader, secondReader);
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
import { BinaryDeserializer } from '../classes/BinaryDeserializer.js';
|
||||
import { BinaryStreamReader } from '../classes/BinaryStreamReader.js';
|
||||
|
||||
export function deserializerFromBuffer(
|
||||
buffer: ArrayBuffer,
|
||||
): BinaryDeserializer {
|
||||
const streamReader = new BinaryStreamReader(buffer);
|
||||
const deserializer = new BinaryDeserializer(streamReader);
|
||||
return deserializer;
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
import { BinaryDeserializer } from '../classes/BinaryDeserializer.js';
|
||||
import { ColumnNamesAndTypes } from '../types/ColumnNamesAndTypes.js';
|
||||
import { DataChunk } from '../types/DataChunk.js';
|
||||
import {
|
||||
ErrorQueryResult,
|
||||
QueryResult,
|
||||
SuccessQueryResult,
|
||||
} from '../types/QueryResult.js';
|
||||
import { TokenizeResult } from '../types/TokenizeResult.js';
|
||||
import { TypeIdAndInfo } from '../types/TypeInfo.js';
|
||||
import {
|
||||
readBoolean,
|
||||
readList,
|
||||
readString,
|
||||
readStringList,
|
||||
readVarInt,
|
||||
readVarIntList,
|
||||
} from './basicReaders.js';
|
||||
import { readTypeList } from './typeReaders.js';
|
||||
import { readVectorList } from './vectorReaders.js';
|
||||
|
||||
export function readTokenizeResult(
|
||||
deserializer: BinaryDeserializer,
|
||||
): TokenizeResult {
|
||||
const offsets = deserializer.readProperty(100, readVarIntList);
|
||||
const types = deserializer.readProperty(101, readVarIntList);
|
||||
deserializer.expectObjectEnd();
|
||||
return { offsets, types };
|
||||
}
|
||||
|
||||
export function readColumnNamesAndTypes(
|
||||
deserializer: BinaryDeserializer,
|
||||
): ColumnNamesAndTypes {
|
||||
const names = deserializer.readProperty(100, readStringList);
|
||||
const types = deserializer.readProperty(101, readTypeList);
|
||||
deserializer.expectObjectEnd();
|
||||
return { names, types };
|
||||
}
|
||||
|
||||
export function readChunk(
|
||||
deserializer: BinaryDeserializer,
|
||||
types: TypeIdAndInfo[],
|
||||
): DataChunk {
|
||||
const rowCount = deserializer.readProperty(100, readVarInt);
|
||||
const vectors = deserializer.readProperty(101, (d) =>
|
||||
readVectorList(d, types),
|
||||
);
|
||||
deserializer.expectObjectEnd();
|
||||
return { rowCount, vectors };
|
||||
}
|
||||
|
||||
export function readDataChunkList(
|
||||
deserializer: BinaryDeserializer,
|
||||
types: TypeIdAndInfo[],
|
||||
): DataChunk[] {
|
||||
return readList(deserializer, (d) => readChunk(d, types));
|
||||
}
|
||||
|
||||
export function readSuccessQueryResult(
|
||||
deserializer: BinaryDeserializer,
|
||||
): SuccessQueryResult {
|
||||
const columnNamesAndTypes = deserializer.readProperty(
|
||||
101,
|
||||
readColumnNamesAndTypes,
|
||||
);
|
||||
const chunks = deserializer.readProperty(102, (d) =>
|
||||
readDataChunkList(d, columnNamesAndTypes.types),
|
||||
);
|
||||
return { success: true, columnNamesAndTypes, chunks };
|
||||
}
|
||||
|
||||
export function readErrorQueryResult(
|
||||
deserializer: BinaryDeserializer,
|
||||
): ErrorQueryResult {
|
||||
const error = deserializer.readProperty(101, readString);
|
||||
return { success: false, error };
|
||||
}
|
||||
|
||||
export function readQueryResult(deserializer: BinaryDeserializer): QueryResult {
|
||||
const success = deserializer.readProperty(100, readBoolean);
|
||||
if (success) {
|
||||
return readSuccessQueryResult(deserializer);
|
||||
}
|
||||
return readErrorQueryResult(deserializer);
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
import { TokenizeResult } from '../types/TokenizeResult.js';
|
||||
import { deserializerFromBuffer } from './deserializeFromBuffer.js';
|
||||
import { readTokenizeResult } from './resultReaders.js';
|
||||
|
||||
export function tokenizeResultFromBuffer(buffer: ArrayBuffer): TokenizeResult {
|
||||
const deserializer = deserializerFromBuffer(buffer);
|
||||
return readTokenizeResult(deserializer);
|
||||
}
|
||||
@@ -0,0 +1,137 @@
|
||||
import { BinaryDeserializer } from '../classes/BinaryDeserializer.js';
|
||||
import { BaseTypeInfo, TypeIdAndInfo, TypeInfo } from '../types/TypeInfo.js';
|
||||
import {
|
||||
readList,
|
||||
readNullable,
|
||||
readPair,
|
||||
readString,
|
||||
readStringList,
|
||||
readUint8,
|
||||
readUnsupported,
|
||||
readVarInt,
|
||||
} from './basicReaders.js';
|
||||
|
||||
export function readStructEntry(
|
||||
deserializer: BinaryDeserializer,
|
||||
): [string, TypeIdAndInfo] {
|
||||
return readPair(deserializer, readString, readType);
|
||||
}
|
||||
|
||||
export function readStructEntryList(
|
||||
deserializer: BinaryDeserializer,
|
||||
): [string, TypeIdAndInfo][] {
|
||||
return readList(deserializer, readStructEntry);
|
||||
}
|
||||
|
||||
/** See ExtraTypeInfo::Deserialize in https://github.com/duckdb/duckdb/blob/main/src/storage/serialization/serialize_types.cpp */
|
||||
export function readTypeInfo(deserializer: BinaryDeserializer): TypeInfo {
|
||||
const typeInfoType = deserializer.readProperty(100, readUint8);
|
||||
const alias = deserializer.readPropertyWithDefault(101, readString, null);
|
||||
const modifiers = deserializer.readPropertyWithDefault(
|
||||
102,
|
||||
readUnsupported,
|
||||
null,
|
||||
);
|
||||
const baseInfo: BaseTypeInfo = {
|
||||
...(alias ? { alias } : {}),
|
||||
...(modifiers ? { modifiers } : {}),
|
||||
};
|
||||
let typeInfo: TypeInfo | undefined;
|
||||
switch (typeInfoType) {
|
||||
case 1: // GENERIC_TYPE_INFO
|
||||
typeInfo = {
|
||||
...baseInfo,
|
||||
kind: 'generic',
|
||||
};
|
||||
break;
|
||||
case 2: // DECIMAL_TYPE_INFO
|
||||
{
|
||||
const width = deserializer.readPropertyWithDefault(200, readUint8, 0);
|
||||
const scale = deserializer.readPropertyWithDefault(201, readUint8, 0);
|
||||
typeInfo = {
|
||||
...baseInfo,
|
||||
kind: 'decimal',
|
||||
width,
|
||||
scale,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 4: // LIST_TYPE_INFO
|
||||
{
|
||||
const childType = deserializer.readProperty(200, readType);
|
||||
typeInfo = {
|
||||
...baseInfo,
|
||||
kind: 'list',
|
||||
childType,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 5: // STRUCT_TYPE_INFO
|
||||
{
|
||||
const childTypes = deserializer.readProperty(200, readStructEntryList);
|
||||
typeInfo = {
|
||||
...baseInfo,
|
||||
kind: 'struct',
|
||||
childTypes,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 6: // ENUM_TYPE_INFO
|
||||
{
|
||||
const valuesCount = deserializer.readProperty(200, readVarInt);
|
||||
const values = deserializer.readProperty(201, readStringList);
|
||||
typeInfo = {
|
||||
...baseInfo,
|
||||
kind: 'enum',
|
||||
valuesCount,
|
||||
values,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 9: // ARRAY_TYPE_INFO
|
||||
{
|
||||
const childType = deserializer.readProperty(200, readType);
|
||||
const size = deserializer.readPropertyWithDefault(201, readVarInt, 0);
|
||||
typeInfo = {
|
||||
...baseInfo,
|
||||
kind: 'array',
|
||||
childType,
|
||||
size,
|
||||
};
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new Error(`unsupported type info: ${typeInfoType}`);
|
||||
}
|
||||
deserializer.expectObjectEnd();
|
||||
if (!typeInfo) {
|
||||
typeInfo = {
|
||||
...baseInfo,
|
||||
kind: 'generic',
|
||||
};
|
||||
}
|
||||
return typeInfo;
|
||||
}
|
||||
|
||||
export function readNullableTypeInfo(
|
||||
deserializer: BinaryDeserializer,
|
||||
): TypeInfo | null {
|
||||
return readNullable(deserializer, readTypeInfo);
|
||||
}
|
||||
|
||||
export function readType(deserializer: BinaryDeserializer): TypeIdAndInfo {
|
||||
const id = deserializer.readProperty(100, readUint8);
|
||||
const typeInfo = deserializer.readPropertyWithDefault(
|
||||
101,
|
||||
readNullableTypeInfo,
|
||||
null,
|
||||
);
|
||||
deserializer.expectObjectEnd();
|
||||
return { id, ...(typeInfo ? { typeInfo } : {}) };
|
||||
}
|
||||
|
||||
export function readTypeList(
|
||||
deserializer: BinaryDeserializer,
|
||||
): TypeIdAndInfo[] {
|
||||
return readList(deserializer, readType);
|
||||
}
|
||||
@@ -0,0 +1,181 @@
|
||||
import { BinaryDeserializer } from '../classes/BinaryDeserializer.js';
|
||||
import { LogicalTypeId } from '../constants/LogicalTypeId.js';
|
||||
import { TypeIdAndInfo } from '../types/TypeInfo.js';
|
||||
import { BaseVector, ListEntry, Vector } from '../types/Vector.js';
|
||||
import {
|
||||
readData,
|
||||
readDataList,
|
||||
readList,
|
||||
readStringList,
|
||||
readUint8,
|
||||
readVarInt,
|
||||
} from './basicReaders.js';
|
||||
|
||||
export function readListEntry(deserializer: BinaryDeserializer): ListEntry {
|
||||
const offset = deserializer.readProperty(100, readVarInt);
|
||||
const length = deserializer.readProperty(101, readVarInt);
|
||||
deserializer.expectObjectEnd();
|
||||
return { offset, length };
|
||||
}
|
||||
|
||||
export function readListEntryList(
|
||||
deserializer: BinaryDeserializer,
|
||||
): ListEntry[] {
|
||||
return readList(deserializer, readListEntry);
|
||||
}
|
||||
|
||||
/** See Vector::Deserialize in https://github.com/duckdb/duckdb/blob/main/src/common/types/vector.cpp */
|
||||
export function readVector(
|
||||
deserializer: BinaryDeserializer,
|
||||
type: TypeIdAndInfo,
|
||||
): Vector {
|
||||
const allValid = deserializer.readProperty(100, readUint8);
|
||||
const validity = allValid ? deserializer.readProperty(101, readData) : null;
|
||||
const baseVector: BaseVector = { allValid, validity };
|
||||
let vector: Vector | undefined;
|
||||
switch (type.id) {
|
||||
case LogicalTypeId.BOOLEAN:
|
||||
case LogicalTypeId.TINYINT:
|
||||
case LogicalTypeId.SMALLINT:
|
||||
case LogicalTypeId.INTEGER:
|
||||
case LogicalTypeId.BIGINT:
|
||||
case LogicalTypeId.DATE:
|
||||
case LogicalTypeId.TIME:
|
||||
case LogicalTypeId.TIMESTAMP_SEC:
|
||||
case LogicalTypeId.TIMESTAMP_MS:
|
||||
case LogicalTypeId.TIMESTAMP:
|
||||
case LogicalTypeId.TIMESTAMP_NS:
|
||||
case LogicalTypeId.DECIMAL:
|
||||
case LogicalTypeId.FLOAT:
|
||||
case LogicalTypeId.DOUBLE:
|
||||
case LogicalTypeId.INTERVAL:
|
||||
case LogicalTypeId.UTINYINT:
|
||||
case LogicalTypeId.USMALLINT:
|
||||
case LogicalTypeId.UINTEGER:
|
||||
case LogicalTypeId.UBIGINT:
|
||||
case LogicalTypeId.TIMESTAMP_TZ:
|
||||
case LogicalTypeId.TIME_TZ:
|
||||
case LogicalTypeId.UHUGEINT:
|
||||
case LogicalTypeId.HUGEINT:
|
||||
case LogicalTypeId.UUID:
|
||||
case LogicalTypeId.ENUM:
|
||||
{
|
||||
const data = deserializer.readProperty(102, readData);
|
||||
vector = {
|
||||
...baseVector,
|
||||
kind: 'data',
|
||||
data,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case LogicalTypeId.CHAR:
|
||||
case LogicalTypeId.VARCHAR:
|
||||
{
|
||||
const data = deserializer.readProperty(102, readStringList);
|
||||
vector = {
|
||||
...baseVector,
|
||||
kind: 'string',
|
||||
data,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case LogicalTypeId.BLOB:
|
||||
case LogicalTypeId.BIT:
|
||||
case LogicalTypeId.VARINT:
|
||||
{
|
||||
const data = deserializer.readProperty(102, readDataList);
|
||||
vector = {
|
||||
...baseVector,
|
||||
kind: 'datalist',
|
||||
data,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case LogicalTypeId.STRUCT:
|
||||
case LogicalTypeId.UNION:
|
||||
{
|
||||
const { typeInfo } = type;
|
||||
if (!typeInfo) {
|
||||
throw new Error(`STRUCT or UNION without typeInfo`);
|
||||
}
|
||||
if (typeInfo.kind !== 'struct') {
|
||||
throw new Error(
|
||||
`STRUCT or UNION with wrong typeInfo kind: ${typeInfo.kind}`,
|
||||
);
|
||||
}
|
||||
const types = typeInfo.childTypes.map((e) => e[1]);
|
||||
const data = deserializer.readProperty(103, (d) =>
|
||||
readVectorList(d, types),
|
||||
);
|
||||
vector = {
|
||||
...baseVector,
|
||||
kind: 'vectorlist',
|
||||
data,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case LogicalTypeId.LIST:
|
||||
case LogicalTypeId.MAP:
|
||||
{
|
||||
const { typeInfo } = type;
|
||||
if (!typeInfo) {
|
||||
throw new Error(`LIST or MAP without typeInfo`);
|
||||
}
|
||||
if (typeInfo.kind !== 'list') {
|
||||
throw new Error(
|
||||
`LIST or MAP with wrong typeInfo kind: ${typeInfo.kind}`,
|
||||
);
|
||||
}
|
||||
const listSize = deserializer.readProperty(104, readVarInt);
|
||||
const entries = deserializer.readProperty(105, readListEntryList);
|
||||
const child = deserializer.readProperty(106, (d) =>
|
||||
readVector(d, typeInfo.childType),
|
||||
);
|
||||
vector = {
|
||||
...baseVector,
|
||||
kind: 'list',
|
||||
listSize,
|
||||
entries,
|
||||
child,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case LogicalTypeId.ARRAY:
|
||||
{
|
||||
const { typeInfo } = type;
|
||||
if (!typeInfo) {
|
||||
throw new Error(`ARRAY without typeInfo`);
|
||||
}
|
||||
if (typeInfo.kind !== 'array') {
|
||||
throw new Error(`ARRAY with wrong typeInfo kind: ${typeInfo.kind}`);
|
||||
}
|
||||
const arraySize = deserializer.readProperty(103, readVarInt);
|
||||
const child = deserializer.readProperty(104, (d) =>
|
||||
readVector(d, typeInfo.childType),
|
||||
);
|
||||
vector = {
|
||||
...baseVector,
|
||||
kind: 'array',
|
||||
arraySize,
|
||||
child,
|
||||
};
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new Error(`unrecognized type id: ${type.id}`);
|
||||
}
|
||||
deserializer.expectObjectEnd();
|
||||
if (!vector) {
|
||||
throw new Error('unknown vector type');
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
export function readVectorList(
|
||||
deserializer: BinaryDeserializer,
|
||||
types: TypeIdAndInfo[],
|
||||
): Vector[] {
|
||||
return readList(deserializer, (d: BinaryDeserializer, i: number) =>
|
||||
readVector(d, types[i]),
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import { TypeIdAndInfo } from './TypeInfo.js';
|
||||
|
||||
export interface ColumnNamesAndTypes {
|
||||
names: string[];
|
||||
types: TypeIdAndInfo[];
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import { Vector } from './Vector.js';
|
||||
|
||||
export interface DataChunk {
|
||||
rowCount: number;
|
||||
vectors: Vector[];
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
import { ColumnNamesAndTypes } from './ColumnNamesAndTypes.js';
|
||||
import { DataChunk } from './DataChunk.js';
|
||||
|
||||
export interface SuccessQueryResult {
|
||||
success: true;
|
||||
columnNamesAndTypes: ColumnNamesAndTypes;
|
||||
chunks: DataChunk[];
|
||||
}
|
||||
|
||||
export interface ErrorQueryResult {
|
||||
success: false;
|
||||
error: string;
|
||||
}
|
||||
|
||||
export type QueryResult = SuccessQueryResult | ErrorQueryResult;
|
||||
@@ -0,0 +1,4 @@
|
||||
export interface TokenizeResult {
|
||||
offsets: number[];
|
||||
types: number[];
|
||||
}
|
||||
53
ts/pkgs/duckdb-ui-client/src/serialization/types/TypeInfo.ts
Normal file
53
ts/pkgs/duckdb-ui-client/src/serialization/types/TypeInfo.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
export interface BaseTypeInfo {
|
||||
alias?: string;
|
||||
modifiers?: unknown[]; // TODO
|
||||
}
|
||||
|
||||
export interface GenericTypeInfo extends BaseTypeInfo {
|
||||
kind: 'generic';
|
||||
}
|
||||
|
||||
export interface DecimalTypeInfo extends BaseTypeInfo {
|
||||
kind: 'decimal';
|
||||
width: number;
|
||||
scale: number;
|
||||
}
|
||||
|
||||
export interface ListTypeInfo extends BaseTypeInfo {
|
||||
kind: 'list';
|
||||
childType: TypeIdAndInfo;
|
||||
}
|
||||
|
||||
export interface StructTypeInfo extends BaseTypeInfo {
|
||||
kind: 'struct';
|
||||
childTypes: [string, TypeIdAndInfo][];
|
||||
}
|
||||
|
||||
export interface EnumTypeInfo extends BaseTypeInfo {
|
||||
kind: 'enum';
|
||||
valuesCount: number;
|
||||
values: string[];
|
||||
}
|
||||
|
||||
export interface ArrayTypeInfo extends BaseTypeInfo {
|
||||
kind: 'array';
|
||||
childType: TypeIdAndInfo;
|
||||
size: number;
|
||||
}
|
||||
|
||||
/** See https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/extra_type_info.hpp */
|
||||
export type TypeInfo =
|
||||
| GenericTypeInfo
|
||||
| DecimalTypeInfo
|
||||
| ListTypeInfo
|
||||
| StructTypeInfo
|
||||
| EnumTypeInfo
|
||||
| ArrayTypeInfo;
|
||||
|
||||
export interface TypeIdAndInfo {
|
||||
/** LogicalTypeId */
|
||||
id: number;
|
||||
|
||||
/** Extra info for some types. */
|
||||
typeInfo?: TypeInfo;
|
||||
}
|
||||
51
ts/pkgs/duckdb-ui-client/src/serialization/types/Vector.ts
Normal file
51
ts/pkgs/duckdb-ui-client/src/serialization/types/Vector.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
export interface ListEntry {
|
||||
offset: number;
|
||||
length: number;
|
||||
}
|
||||
|
||||
export interface BaseVector {
|
||||
allValid: number;
|
||||
validity: DataView | null;
|
||||
}
|
||||
|
||||
export interface DataVector extends BaseVector {
|
||||
kind: 'data';
|
||||
data: DataView;
|
||||
}
|
||||
|
||||
export interface StringVector extends BaseVector {
|
||||
kind: 'string';
|
||||
data: string[];
|
||||
}
|
||||
|
||||
export interface DataListVector extends BaseVector {
|
||||
kind: 'datalist';
|
||||
data: DataView[];
|
||||
}
|
||||
|
||||
export interface VectorListVector extends BaseVector {
|
||||
kind: 'vectorlist';
|
||||
data: Vector[];
|
||||
}
|
||||
|
||||
export interface ListVector extends BaseVector {
|
||||
kind: 'list';
|
||||
listSize: number;
|
||||
entries: ListEntry[];
|
||||
child: Vector;
|
||||
}
|
||||
|
||||
export interface ArrayVector extends BaseVector {
|
||||
kind: 'array';
|
||||
arraySize: number;
|
||||
child: Vector;
|
||||
}
|
||||
|
||||
/** See https://github.com/duckdb/duckdb/blob/main/src/include/duckdb/common/types/vector.hpp */
|
||||
export type Vector =
|
||||
| DataVector
|
||||
| StringVector
|
||||
| DataListVector
|
||||
| VectorListVector
|
||||
| ListVector
|
||||
| ArrayVector;
|
||||
6
ts/pkgs/duckdb-ui-client/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-ui-client/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.library.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../out"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
export function randomString(
|
||||
length: number = 12,
|
||||
chars: string = '$0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz',
|
||||
): string {
|
||||
return Array.from({ length })
|
||||
.map((_) => chars[Math.floor(Math.random() * chars.length)])
|
||||
.join('');
|
||||
}
|
||||
10
ts/pkgs/duckdb-ui-client/src/util/functions/toBase64.ts
Normal file
10
ts/pkgs/duckdb-ui-client/src/util/functions/toBase64.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
export function toBase64(input: string): string {
|
||||
const encoded = encoder.encode(input);
|
||||
// For the reason behind this step, see https://developer.mozilla.org/en-US/docs/Web/API/Window/btoa#unicode_strings
|
||||
const binaryString = Array.from(encoded, (codePoint) =>
|
||||
String.fromCodePoint(codePoint),
|
||||
).join('');
|
||||
return btoa(binaryString);
|
||||
}
|
||||
8
ts/pkgs/duckdb-ui-client/test/helpers/makeBuffer.ts
Normal file
8
ts/pkgs/duckdb-ui-client/test/helpers/makeBuffer.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export function makeBuffer(bytes: number[]): ArrayBuffer {
|
||||
const buffer = new ArrayBuffer(bytes.length);
|
||||
const dv = new DataView(buffer);
|
||||
for (let offset = 0; offset < bytes.length; offset++) {
|
||||
dv.setUint8(offset, bytes[offset]);
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
15
ts/pkgs/duckdb-ui-client/test/helpers/mockRequests.ts
Normal file
15
ts/pkgs/duckdb-ui-client/test/helpers/mockRequests.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { RequestHandler } from 'msw';
|
||||
import { setupServer } from 'msw/node';
|
||||
|
||||
export async function mockRequests(
|
||||
handlers: RequestHandler[],
|
||||
func: () => Promise<void>,
|
||||
) {
|
||||
const server = setupServer(...handlers);
|
||||
try {
|
||||
server.listen();
|
||||
await func();
|
||||
} finally {
|
||||
server.close();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
import { http, HttpResponse } from 'msw';
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { DuckDBUIHttpRequestQueue } from '../../../src/http/classes/DuckDBUIHttpRequestQueue';
|
||||
import { makeBuffer } from '../../helpers/makeBuffer';
|
||||
import { mockRequests } from '../../helpers/mockRequests';
|
||||
|
||||
suite('DuckDBUIHttpRequestQueue', () => {
|
||||
test('single request', () => {
|
||||
return mockRequests(
|
||||
[
|
||||
http.post('http://localhost/example/path', () => {
|
||||
return HttpResponse.arrayBuffer(makeBuffer([17, 42]));
|
||||
}),
|
||||
],
|
||||
async () => {
|
||||
const queue = new DuckDBUIHttpRequestQueue();
|
||||
const id = queue.enqueue(
|
||||
'http://localhost/example/path',
|
||||
'example body',
|
||||
);
|
||||
expect(queue.length).toBe(1);
|
||||
expect(queue.isCurrent(id)).toBe(true);
|
||||
|
||||
const result = await queue.enqueuedResult(id);
|
||||
expect(result.buffer).toEqual(makeBuffer([17, 42]));
|
||||
},
|
||||
);
|
||||
});
|
||||
test('multiple requests', () => {
|
||||
return mockRequests(
|
||||
[
|
||||
http.post('http://localhost/example/path', async ({ request }) => {
|
||||
const body = await request.text();
|
||||
const value = parseInt(body.split(' ')[0], 10);
|
||||
return HttpResponse.arrayBuffer(makeBuffer([value]));
|
||||
}),
|
||||
],
|
||||
async () => {
|
||||
const queue = new DuckDBUIHttpRequestQueue();
|
||||
const id1 = queue.enqueue(
|
||||
'http://localhost/example/path',
|
||||
'11 example body',
|
||||
);
|
||||
const id2 = queue.enqueue(
|
||||
'http://localhost/example/path',
|
||||
'22 example body',
|
||||
);
|
||||
expect(queue.length).toBe(2);
|
||||
expect(queue.isCurrent(id1)).toBe(true);
|
||||
|
||||
const result1 = await queue.enqueuedResult(id1);
|
||||
expect(result1.buffer).toEqual(makeBuffer([11]));
|
||||
|
||||
expect(queue.length).toBe(1);
|
||||
expect(queue.isCurrent(id2)).toBe(true);
|
||||
|
||||
const result2 = await queue.enqueuedResult(id2);
|
||||
expect(result2.buffer).toEqual(makeBuffer([22]));
|
||||
},
|
||||
);
|
||||
});
|
||||
test('cancel (first request)', () => {
|
||||
return mockRequests(
|
||||
[
|
||||
http.post('http://localhost/example/path', async ({ request }) => {
|
||||
const body = await request.text();
|
||||
const value = parseInt(body.split(' ')[0], 10);
|
||||
return HttpResponse.arrayBuffer(makeBuffer([value]));
|
||||
}),
|
||||
],
|
||||
async () => {
|
||||
const queue = new DuckDBUIHttpRequestQueue();
|
||||
const id1 = queue.enqueue(
|
||||
'http://localhost/example/path',
|
||||
'11 example body',
|
||||
);
|
||||
const id2 = queue.enqueue(
|
||||
'http://localhost/example/path',
|
||||
'22 example body',
|
||||
);
|
||||
expect(queue.length).toBe(2);
|
||||
expect(queue.isCurrent(id1)).toBe(true);
|
||||
|
||||
queue.cancel(id1);
|
||||
await expect(queue.enqueuedResult(id1)).rejects.toEqual(
|
||||
new Error('query was canceled'),
|
||||
);
|
||||
|
||||
const result2 = await queue.enqueuedResult(id2);
|
||||
expect(result2.buffer).toEqual(makeBuffer([22]));
|
||||
},
|
||||
);
|
||||
});
|
||||
test('cancel (second request)', () => {
|
||||
return mockRequests(
|
||||
[
|
||||
http.post('http://localhost/example/path', async ({ request }) => {
|
||||
const body = await request.text();
|
||||
const value = parseInt(body.split(' ')[0], 10);
|
||||
return HttpResponse.arrayBuffer(makeBuffer([value]));
|
||||
}),
|
||||
],
|
||||
async () => {
|
||||
const queue = new DuckDBUIHttpRequestQueue();
|
||||
const id1 = queue.enqueue(
|
||||
'http://localhost/example/path',
|
||||
'11 example body',
|
||||
);
|
||||
const id2 = queue.enqueue(
|
||||
'http://localhost/example/path',
|
||||
'22 example body',
|
||||
);
|
||||
const id3 = queue.enqueue(
|
||||
'http://localhost/example/path',
|
||||
'33 example body',
|
||||
);
|
||||
expect(queue.length).toBe(3);
|
||||
expect(queue.isCurrent(id1)).toBe(true);
|
||||
|
||||
const promise2 = queue.enqueuedResult(id2);
|
||||
queue.cancel(id2, 'example error message');
|
||||
|
||||
const result1 = await queue.enqueuedResult(id1);
|
||||
expect(result1.buffer).toEqual(makeBuffer([11]));
|
||||
|
||||
expect(queue.length).toBe(1);
|
||||
expect(queue.isCurrent(id3)).toBe(true);
|
||||
|
||||
await expect(promise2).rejects.toEqual(
|
||||
new Error('example error message'),
|
||||
);
|
||||
|
||||
const result3 = await queue.enqueuedResult(id3);
|
||||
expect(result3.buffer).toEqual(makeBuffer([33]));
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,39 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { makeDuckDBUIHttpRequestHeaders } from '../../../src/http/functions/makeDuckDBUIHttpRequestHeaders';
|
||||
|
||||
suite('makeDuckDBUIHttpRequestHeaders', () => {
|
||||
test('description', () => {
|
||||
expect([
|
||||
...makeDuckDBUIHttpRequestHeaders({
|
||||
description: 'example description',
|
||||
}).entries(),
|
||||
]).toEqual([['x-duckdb-ui-request-description', 'example description']]);
|
||||
});
|
||||
test('connection name', () => {
|
||||
expect([
|
||||
...makeDuckDBUIHttpRequestHeaders({
|
||||
connectionName: 'example connection name',
|
||||
}).entries(),
|
||||
]).toEqual([['x-duckdb-ui-connection-name', 'example connection name']]);
|
||||
});
|
||||
test('database name', () => {
|
||||
// should be base64 encoded
|
||||
expect([
|
||||
...makeDuckDBUIHttpRequestHeaders({
|
||||
databaseName: 'example database name',
|
||||
}).entries(),
|
||||
]).toEqual([['x-duckdb-ui-database-name', 'ZXhhbXBsZSBkYXRhYmFzZSBuYW1l']]);
|
||||
});
|
||||
test('parameters', () => {
|
||||
// values should be base64 encoded
|
||||
expect([
|
||||
...makeDuckDBUIHttpRequestHeaders({
|
||||
parameters: ['first', 'second'],
|
||||
}).entries(),
|
||||
]).toEqual([
|
||||
['x-duckdb-ui-parameter-count', '2'],
|
||||
['x-duckdb-ui-parameter-value-0', 'Zmlyc3Q='],
|
||||
['x-duckdb-ui-parameter-value-1', 'c2Vjb25k'],
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,54 @@
|
||||
import { http, HttpResponse } from 'msw';
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { sendDuckDBUIHttpRequest } from '../../../src/http/functions/sendDuckDBUIHttpRequest';
|
||||
import { makeBuffer } from '../../helpers/makeBuffer';
|
||||
import { mockRequests } from '../../helpers/mockRequests';
|
||||
|
||||
suite('sendDuckDBUIHttpRequest', () => {
|
||||
test('basic', async () => {
|
||||
return mockRequests(
|
||||
[
|
||||
http.post('http://localhost/example/path', () => {
|
||||
return HttpResponse.arrayBuffer(makeBuffer([17, 42]));
|
||||
}),
|
||||
],
|
||||
async () => {
|
||||
await expect(
|
||||
sendDuckDBUIHttpRequest(
|
||||
'http://localhost/example/path',
|
||||
'example body',
|
||||
),
|
||||
).resolves.toEqual(makeBuffer([17, 42]));
|
||||
},
|
||||
);
|
||||
});
|
||||
test('headers', async () => {
|
||||
return mockRequests(
|
||||
[
|
||||
http.post('http://localhost/example/path', ({ request }) => {
|
||||
if (
|
||||
request.headers.get('X-Example-Header-1') !==
|
||||
'example-header-1-value' ||
|
||||
request.headers.get('X-Example-Header-2') !==
|
||||
'example-header-2-value'
|
||||
) {
|
||||
return HttpResponse.error();
|
||||
}
|
||||
return HttpResponse.arrayBuffer(makeBuffer([17, 42]));
|
||||
}),
|
||||
],
|
||||
async () => {
|
||||
const headers = new Headers();
|
||||
headers.append('X-Example-Header-1', 'example-header-1-value');
|
||||
headers.append('X-Example-Header-2', 'example-header-2-value');
|
||||
await expect(
|
||||
sendDuckDBUIHttpRequest(
|
||||
'http://localhost/example/path',
|
||||
'example body',
|
||||
headers,
|
||||
),
|
||||
).resolves.toEqual(makeBuffer([17, 42]));
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,87 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { BinaryDeserializer } from '../../../src/serialization/classes/BinaryDeserializer';
|
||||
import { BinaryStreamReader } from '../../../src/serialization/classes/BinaryStreamReader';
|
||||
import {
|
||||
readString,
|
||||
readUint8,
|
||||
} from '../../../src/serialization/functions/basicReaders';
|
||||
import { makeBuffer } from '../../helpers/makeBuffer';
|
||||
|
||||
suite('BinaryDeserializer', () => {
|
||||
test('read uint8', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(makeBuffer([17, 42])),
|
||||
);
|
||||
expect(deserializer.readUint8()).toBe(17);
|
||||
expect(deserializer.readUint8()).toBe(42);
|
||||
});
|
||||
test('read varint', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(makeBuffer([0x81, 0x82, 0x03])),
|
||||
);
|
||||
expect(deserializer.readVarInt()).toBe((3 << 14) | (2 << 7) | 1);
|
||||
});
|
||||
test('read nullable', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(makeBuffer([0, 1, 17])),
|
||||
);
|
||||
expect(deserializer.readNullable(readUint8)).toBe(null);
|
||||
expect(deserializer.readNullable(readUint8)).toBe(17);
|
||||
});
|
||||
test('read data', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(makeBuffer([3, 0xa, 0xb, 0xc])),
|
||||
);
|
||||
const dv = deserializer.readData();
|
||||
expect(dv.byteLength).toBe(3);
|
||||
expect(dv.getUint8(0)).toBe(0xa);
|
||||
expect(dv.getUint8(1)).toBe(0xb);
|
||||
expect(dv.getUint8(2)).toBe(0xc);
|
||||
});
|
||||
test('read string', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(makeBuffer([4, 0x64, 0x75, 0x63, 0x6b])),
|
||||
);
|
||||
expect(deserializer.readString()).toBe('duck');
|
||||
});
|
||||
test('read list (of string)', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(
|
||||
makeBuffer([
|
||||
3, 4, 0x77, 0x61, 0x6c, 0x6b, 4, 0x73, 0x77, 0x69, 0x6d, 3, 0x66,
|
||||
0x6c, 0x79,
|
||||
]),
|
||||
),
|
||||
);
|
||||
expect(deserializer.readList(readString)).toEqual(['walk', 'swim', 'fly']);
|
||||
});
|
||||
test('read pair', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(
|
||||
makeBuffer([0, 0, 4, 0x64, 0x75, 0x63, 0x6b, 1, 0, 42, 0xff, 0xff]),
|
||||
),
|
||||
);
|
||||
expect(deserializer.readPair(readString, readUint8)).toEqual(['duck', 42]);
|
||||
});
|
||||
test('read property', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(makeBuffer([100, 0, 4, 0x64, 0x75, 0x63, 0x6b])),
|
||||
);
|
||||
expect(deserializer.readProperty(100, readString)).toEqual('duck');
|
||||
});
|
||||
test('read property (not present)', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(makeBuffer([100, 0, 4, 0x64, 0x75, 0x63, 0x6b])),
|
||||
);
|
||||
expect(() => deserializer.readProperty(101, readString)).toThrowError(
|
||||
'Expected field id 101 but got 100 (offset=0)',
|
||||
);
|
||||
});
|
||||
test('read property with default', () => {
|
||||
const deserializer = new BinaryDeserializer(
|
||||
new BinaryStreamReader(makeBuffer([101, 0, 42])),
|
||||
);
|
||||
expect(deserializer.readPropertyWithDefault(100, readUint8, 17)).toBe(17);
|
||||
expect(deserializer.readPropertyWithDefault(101, readUint8, 17)).toBe(42);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,30 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { BinaryStreamReader } from '../../../src/serialization/classes/BinaryStreamReader';
|
||||
import { makeBuffer } from '../../helpers/makeBuffer';
|
||||
|
||||
suite('BinaryStreamReader', () => {
|
||||
test('basic', () => {
|
||||
const reader = new BinaryStreamReader(
|
||||
makeBuffer([11, 22, 33, 44, 0x12, 0x34]),
|
||||
);
|
||||
|
||||
expect(reader.getOffset()).toBe(0);
|
||||
expect(reader.peekUint8()).toBe(11);
|
||||
expect(reader.readUint8()).toBe(11);
|
||||
|
||||
expect(reader.getOffset()).toBe(1);
|
||||
expect(reader.peekUint8()).toBe(22);
|
||||
expect(reader.readUint8()).toBe(22);
|
||||
|
||||
expect(reader.getOffset()).toBe(2);
|
||||
reader.consume(2);
|
||||
expect(reader.getOffset()).toBe(4);
|
||||
expect(reader.peekUint16(false)).toBe(0x1234);
|
||||
expect(reader.peekUint16(true)).toBe(0x3412);
|
||||
|
||||
const dv = reader.readData(2);
|
||||
expect(dv.byteLength).toBe(2);
|
||||
expect(dv.getUint8(0)).toBe(0x12);
|
||||
expect(dv.getUint8(1)).toBe(0x34);
|
||||
});
|
||||
});
|
||||
6
ts/pkgs/duckdb-ui-client/test/tsconfig.json
Normal file
6
ts/pkgs/duckdb-ui-client/test/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.test.json",
|
||||
"references": [
|
||||
{ "path": "../src" }
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { randomString } from '../../../src/util/functions/randomString';
|
||||
|
||||
suite('randomString', () => {
|
||||
test('default length', () => {
|
||||
expect(randomString().length).toBe(12);
|
||||
});
|
||||
test('custom length', () => {
|
||||
expect(randomString(5).length).toBe(5);
|
||||
});
|
||||
test('custom chars', () => {
|
||||
expect(randomString(3, 'xy')).toMatch(/[xy][xy][xy]/);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,11 @@
|
||||
import { expect, suite, test } from 'vitest';
|
||||
import { toBase64 } from '../../../src/util/functions/toBase64';
|
||||
|
||||
suite('toBase64', () => {
|
||||
test('basic', () => {
|
||||
expect(atob(toBase64('duck'))).toBe('duck');
|
||||
});
|
||||
test('unicode', () => {
|
||||
expect(atob(toBase64('🦆'))).toBe('\xF0\x9F\xA6\x86');
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user