Compare commits
49 Commits
yl/debug-c
...
v1.3.2
| Author | SHA1 | Date | |
|---|---|---|---|
| e11686f4a0 | |||
| 5821ee7fc8 | |||
| c52ad95c35 | |||
| 277279184b | |||
| f13123987a | |||
| bd809a759e | |||
|
|
bba41c82c5 | ||
|
|
3fff3b2d1f | ||
|
|
96dfa0d32b | ||
|
|
8a724108f1 | ||
|
|
aab29c57f4 | ||
|
|
291c1c4af5 | ||
|
|
fc78019f25 | ||
|
|
8ec47ccec9 | ||
|
|
2a8db10c6a | ||
|
|
0b240cf7d6 | ||
|
|
0a5add6547 | ||
|
|
13cb9c5ab9 | ||
|
|
0edb52054a | ||
|
|
d6cc9eeea4 | ||
|
|
696fc1ea02 | ||
|
|
0841d1ccac | ||
|
|
32fcf7e620 | ||
|
|
ccaf92e99d | ||
|
|
22721d7edd | ||
|
|
9d4f0ebb0e | ||
|
|
e06cd347fb | ||
|
|
fb016137ee | ||
|
|
ab5f005bce | ||
|
|
9c05c83412 | ||
|
|
c63b39fb62 | ||
|
|
0d72926508 | ||
|
|
bffad99acc | ||
|
|
c7d73dcb8d | ||
|
|
6145a1b4a9 | ||
|
|
17be068d61 | ||
|
|
c9d93eee2e | ||
|
|
763e521a99 | ||
|
|
875fa2161c | ||
|
|
fda5caf296 | ||
|
|
183422172c | ||
|
|
c6f57c2f9c | ||
|
|
6cbef80577 | ||
|
|
277b48c4c9 | ||
|
|
ec5334099f | ||
|
|
37c738af28 | ||
|
|
07ed1ea806 | ||
|
|
8f35729d11 | ||
|
|
2715fb15c1 |
53
.github/workflows/MainDistributionPipeline.yml
vendored
53
.github/workflows/MainDistributionPipeline.yml
vendored
@@ -3,8 +3,20 @@
|
|||||||
#
|
#
|
||||||
name: Main Extension Distribution Pipeline
|
name: Main Extension Distribution Pipeline
|
||||||
on:
|
on:
|
||||||
push:
|
|
||||||
pull_request:
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/TypeScriptWorkspace.yml"
|
||||||
|
- "docs/**"
|
||||||
|
- "ts/**"
|
||||||
|
- "README.md"
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
paths-ignore:
|
||||||
|
- ".github/workflows/TypeScriptWorkspace.yml"
|
||||||
|
- "docs/**"
|
||||||
|
- "ts/**"
|
||||||
|
- "README.md"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
@@ -12,11 +24,42 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
duckdb-main-build:
|
||||||
|
name: Build main extension binaries
|
||||||
|
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@main
|
||||||
|
with:
|
||||||
|
ci_tools_version: main
|
||||||
|
duckdb_version: main
|
||||||
|
exclude_archs: ${{ github.repository == 'duckdb/duckdb-ui' && 'wasm_mvp;wasm_eh;wasm_threads' || 'linux_arm64;linux_amd64_musl;osx_amd64;windows_amd64_mingw;wasm_mvp;wasm_eh;wasm_threads' }}
|
||||||
|
extension_name: ui
|
||||||
|
|
||||||
|
duckdb-next-patch-build:
|
||||||
|
name: Build next patch extension binaries
|
||||||
|
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@v1.3.2
|
||||||
|
with:
|
||||||
|
ci_tools_version: v1.3.2
|
||||||
|
duckdb_version: v1.3-ossivalis
|
||||||
|
exclude_archs: ${{ github.repository == 'duckdb/duckdb-ui' && 'wasm_mvp;wasm_eh;wasm_threads' || 'linux_arm64;linux_amd64_musl;osx_amd64;windows_amd64_mingw;wasm_mvp;wasm_eh;wasm_threads' }}
|
||||||
|
extension_name: ui
|
||||||
|
|
||||||
duckdb-stable-build:
|
duckdb-stable-build:
|
||||||
name: Build stable extension binaries
|
name: Build stable extension binaries
|
||||||
uses: ./.github/workflows/_extension_distribution.yml
|
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@v1.3.2
|
||||||
with:
|
with:
|
||||||
ci_tools_version: v1.2.1
|
ci_tools_version: v1.3.2
|
||||||
duckdb_version: v1.2.1
|
duckdb_version: v1.3.2
|
||||||
exclude_archs: 'wasm_mvp;wasm_eh;wasm_threads'
|
exclude_archs: ${{ github.repository == 'duckdb/duckdb-ui' && 'wasm_mvp;wasm_eh;wasm_threads' || 'linux_arm64;linux_amd64_musl;osx_amd64;windows_amd64_mingw;wasm_mvp;wasm_eh;wasm_threads' }}
|
||||||
extension_name: ui
|
extension_name: ui
|
||||||
|
|
||||||
|
duckdb-stable-deploy:
|
||||||
|
if: ${{ github.repository == 'duckdb/duckdb-ui' && ( startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main' ) }}
|
||||||
|
name: Deploy stable extension binaries
|
||||||
|
needs: duckdb-stable-build
|
||||||
|
uses: duckdb/extension-ci-tools/.github/workflows/_extension_deploy.yml@v1.3.2
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
extension_name: ui
|
||||||
|
ci_tools_version: v1.3.2
|
||||||
|
duckdb_version: v1.3.2
|
||||||
|
exclude_archs: 'wasm_mvp;wasm_eh;wasm_threads'
|
||||||
|
deploy_latest: ${{ startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main' }}
|
||||||
|
|||||||
50
.github/workflows/TypeScriptWorkspace.yml
vendored
Normal file
50
.github/workflows/TypeScriptWorkspace.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: TypeScript Workspace
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "ts/**"
|
||||||
|
- ".github/workflows/TypeScriptWorkspace.yml"
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
paths:
|
||||||
|
- "ts/**"
|
||||||
|
- ".github/workflows/TypeScriptWorkspace.yml"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build_and_test:
|
||||||
|
name: Build & Test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
package_json_file: ts/package.json
|
||||||
|
|
||||||
|
- name: Setup Node with pnpm cache
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
cache: 'pnpm'
|
||||||
|
cache-dependency-path: ts/pnpm-lock.yaml
|
||||||
|
|
||||||
|
# Src files are built using preinstall
|
||||||
|
- name: Install dependencies & build src
|
||||||
|
working-directory: ts
|
||||||
|
run: pnpm install
|
||||||
|
|
||||||
|
# This step is needed to type-check test files. (Src files are built during install.)
|
||||||
|
- name: Build src & test (to type-check test)
|
||||||
|
working-directory: ts
|
||||||
|
run: pnpm build
|
||||||
|
|
||||||
|
- name: Check formatting & linting rules
|
||||||
|
working-directory: ts
|
||||||
|
run: pnpm check
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
working-directory: ts
|
||||||
|
run: pnpm test
|
||||||
738
.github/workflows/_extension_distribution.yml
vendored
738
.github/workflows/_extension_distribution.yml
vendored
@@ -1,738 +0,0 @@
|
|||||||
# Reusable workflow for building DuckDB extensions using a standardized environment
|
|
||||||
#
|
|
||||||
# The workflow:
|
|
||||||
# - builds the extension using the CI workflow from the corresponding DuckDB version
|
|
||||||
# - uploads the extensions as gh actions artifacts in the following format:
|
|
||||||
# <ext_name>-<duckdb_version>-extension-<arch><optional_postfix>
|
|
||||||
#
|
|
||||||
# note: extensions are simply uploaded to GitHub actions, deploying the extensions is done a separate step. More info on
|
|
||||||
# this can be found in https://github.com/duckdb/extension-template
|
|
||||||
|
|
||||||
name: Extension distribution
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
# The name with which the extension will be built
|
|
||||||
extension_name:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
# DuckDB version to build against, should in most cases be identical to
|
|
||||||
duckdb_version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
# The version of the https://github.com/duckdb/extension-ci-tools submodule of the extension. In most cases will be identical to `duckdb_version`.
|
|
||||||
# Passing this explicitly is required because of https://github.com/actions/toolkit/issues/1264
|
|
||||||
ci_tools_version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
# ';' separated list of architectures to exclude, for example: 'linux_amd64;osx_arm64'
|
|
||||||
exclude_archs:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
# Postfix added to artifact names. Can be used to guarantee unique names when this workflow is called multiple times
|
|
||||||
artifact_postfix:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
# Override the default vcpkg repository
|
|
||||||
vcpkg_url:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: "https://github.com/microsoft/vcpkg.git"
|
|
||||||
# Override the default vcpkg commit used by this version of DuckDB
|
|
||||||
vcpkg_commit:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: "5e5d0e1cd7785623065e77eff011afdeec1a3574"
|
|
||||||
# Override the default script producing the matrices. Allows specifying custom matrices.
|
|
||||||
matrix_parse_script:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: "./extension-ci-tools/scripts/modify_distribution_matrix.py"
|
|
||||||
# Enable building the DuckDB Shell
|
|
||||||
build_duckdb_shell:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: true
|
|
||||||
# Supply an override repository to build, instead of using the current one
|
|
||||||
override_repository:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
# The git ref used for the override_repository
|
|
||||||
override_ref:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
# Override the repo for the CI tools (for testing CI tools itself)
|
|
||||||
override_ci_tools_repository:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: "duckdb/extension-ci-tools"
|
|
||||||
# Pass extra toolchains
|
|
||||||
# available: (parser_tools, rust, fortran, omp, python3)
|
|
||||||
extra_toolchains:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
rust_logs:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
# Optional tag the build extension should have -- this is easy to misuse, and subject to change, for internal use only
|
|
||||||
extension_tag:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
# Optional tag the referenced duckdb should have -- this is a easy to misuse, and subject to change, for internal use only
|
|
||||||
duckdb_tag:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ""
|
|
||||||
# If set tot true, skip tests
|
|
||||||
skip_tests:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
# DEPRECATED: use extra_toolchains instead
|
|
||||||
enable_rust:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
generate_matrix:
|
|
||||||
name: Generate matrix
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
linux_matrix: ${{ steps.set-matrix-linux.outputs.linux_matrix }}
|
|
||||||
windows_matrix: ${{ steps.set-matrix-windows.outputs.windows_matrix }}
|
|
||||||
osx_matrix: ${{ steps.set-matrix-osx.outputs.osx_matrix }}
|
|
||||||
wasm_matrix: ${{ steps.set-matrix-wasm.outputs.wasm_matrix }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout Extension CI tools
|
|
||||||
with:
|
|
||||||
path: 'extension-ci-tools'
|
|
||||||
ref: ${{ inputs.ci_tools_version }}
|
|
||||||
repository: ${{ inputs.override_ci_tools_repository }}
|
|
||||||
|
|
||||||
- id: parse-matrices
|
|
||||||
run: |
|
|
||||||
mkdir build
|
|
||||||
python3 ${{ inputs.matrix_parse_script }} --input extension-ci-tools/config/distribution_matrix.json --select_os linux --output build/linux_matrix.json --exclude "${{ inputs.exclude_archs }}" --pretty
|
|
||||||
python3 ${{ inputs.matrix_parse_script }} --input extension-ci-tools/config/distribution_matrix.json --select_os osx --output build/osx_matrix.json --exclude "${{ inputs.exclude_archs }}" --pretty
|
|
||||||
python3 ${{ inputs.matrix_parse_script }} --input extension-ci-tools/config/distribution_matrix.json --select_os windows --output build/windows_matrix.json --exclude "${{ inputs.exclude_archs }}" --pretty
|
|
||||||
python3 ${{ inputs.matrix_parse_script }} --input extension-ci-tools/config/distribution_matrix.json --select_os wasm --output build/wasm_matrix.json --exclude "${{ inputs.exclude_archs }}" --pretty
|
|
||||||
|
|
||||||
- id: set-matrix-linux
|
|
||||||
run: |
|
|
||||||
linux_matrix="`cat build/linux_matrix.json`"
|
|
||||||
echo linux_matrix=$linux_matrix >> $GITHUB_OUTPUT
|
|
||||||
echo `cat $GITHUB_OUTPUT`
|
|
||||||
|
|
||||||
- id: set-matrix-osx
|
|
||||||
run: |
|
|
||||||
osx_matrix="`cat build/osx_matrix.json`"
|
|
||||||
echo osx_matrix=$osx_matrix >> $GITHUB_OUTPUT
|
|
||||||
echo `cat $GITHUB_OUTPUT`
|
|
||||||
|
|
||||||
- id: set-matrix-windows
|
|
||||||
run: |
|
|
||||||
windows_matrix="`cat build/windows_matrix.json`"
|
|
||||||
echo windows_matrix=$windows_matrix >> $GITHUB_OUTPUT
|
|
||||||
echo `cat $GITHUB_OUTPUT`
|
|
||||||
|
|
||||||
- id: set-matrix-wasm
|
|
||||||
run: |
|
|
||||||
wasm_matrix="`cat build/wasm_matrix.json`"
|
|
||||||
echo wasm_matrix=$wasm_matrix >> $GITHUB_OUTPUT
|
|
||||||
echo `cat $GITHUB_OUTPUT`
|
|
||||||
|
|
||||||
linux:
|
|
||||||
name: Linux
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: generate_matrix
|
|
||||||
if: ${{ needs.generate_matrix.outputs.linux_matrix != '{}' && needs.generate_matrix.outputs.linux_matrix != '' }}
|
|
||||||
strategy:
|
|
||||||
matrix: ${{fromJson(needs.generate_matrix.outputs.linux_matrix)}}
|
|
||||||
env:
|
|
||||||
VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_triplet }}
|
|
||||||
VCPKG_TOOLCHAIN_PATH: ${{ github.workspace }}/vcpkg/scripts/buildsystems/vcpkg.cmake
|
|
||||||
GEN: ninja
|
|
||||||
BUILD_SHELL: ${{ inputs.build_duckdb_shell && '1' || '0' }}
|
|
||||||
DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Free up some unused space
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
docker images -a -q > package.list
|
|
||||||
if [ -s package.list ]; then
|
|
||||||
echo "To be deleted"
|
|
||||||
cat package.list
|
|
||||||
echo "---"
|
|
||||||
docker rmi $(cat package.list)
|
|
||||||
fi
|
|
||||||
rm package.list
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout override repository
|
|
||||||
if: ${{inputs.override_repository != ''}}
|
|
||||||
with:
|
|
||||||
repository: ${{ inputs.override_repository }}
|
|
||||||
ref: ${{ inputs.override_ref }}
|
|
||||||
fetch-depth: 0
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout current repository
|
|
||||||
if: ${{inputs.override_repository == ''}}
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout Extension CI tools
|
|
||||||
with:
|
|
||||||
path: 'extension-ci-tools'
|
|
||||||
ref: ${{ inputs.ci_tools_version }}
|
|
||||||
repository: ${{ inputs.override_ci_tools_repository }}
|
|
||||||
|
|
||||||
- name: Checkout DuckDB to version
|
|
||||||
if: ${{inputs.duckdb_version != ''}}
|
|
||||||
run: |
|
|
||||||
DUCKDB_GIT_VERSION=${{ inputs.duckdb_version }} make set_duckdb_version
|
|
||||||
|
|
||||||
- name: Tag extension
|
|
||||||
if: ${{inputs.extension_tag != ''}}
|
|
||||||
run: |
|
|
||||||
git tag ${{ inputs.extension_tag }}
|
|
||||||
|
|
||||||
- name: Tag DuckDB extension
|
|
||||||
if: ${{inputs.duckdb_tag != ''}}
|
|
||||||
run: |
|
|
||||||
DUCKDB_TAG=${{ inputs.duckdb_tag }} make set_duckdb_tag
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout Extension CI tools
|
|
||||||
with:
|
|
||||||
path: 'extension-ci-tools'
|
|
||||||
ref: ${{ inputs.ci_tools_version }}
|
|
||||||
repository: ${{ inputs.override_ci_tools_repository }}
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Build Docker image
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
docker build \
|
|
||||||
--build-arg 'vcpkg_url=${{ inputs.vcpkg_url }}' \
|
|
||||||
--build-arg 'vcpkg_commit=${{ inputs.vcpkg_commit }}' \
|
|
||||||
--build-arg 'extra_toolchains=${{ inputs.enable_rust && format(';{0};rust;', inputs.extra_toolchains) || format(';{0};', inputs.extra_toolchains) }}' \
|
|
||||||
-t duckdb/${{ matrix.duckdb_arch }} \
|
|
||||||
./extension-ci-tools/docker/${{ matrix.duckdb_arch }}
|
|
||||||
|
|
||||||
- name: Create env file for docker
|
|
||||||
run: |
|
|
||||||
touch docker_env.txt
|
|
||||||
echo "VCPKG_TARGET_TRIPLET=${{ matrix.vcpkg_triplet }}" >> docker_env.txt
|
|
||||||
echo "BUILD_SHELL=${{ inputs.build_duckdb_shell && '1' || '0' }}" >> docker_env.txt
|
|
||||||
echo "OPENSSL_ROOT_DIR=/duckdb_build_dir/build/release/vcpkg_installed/${{ matrix.vcpkg_triplet }}" >> docker_env.txt
|
|
||||||
echo "OPENSSL_DIR=/duckdb_build_dir/build/release/vcpkg_installed/${{ matrix.vcpkg_triplet }}" >> docker_env.txt
|
|
||||||
echo "OPENSSL_USE_STATIC_LIBS=true" >> docker_env.txt
|
|
||||||
echo "DUCKDB_PLATFORM=${{ matrix.duckdb_arch }}" >> docker_env.txt
|
|
||||||
echo "DUCKDB_GIT_VERSION=${{ inputs.duckdb_version }}" >> docker_env.txt
|
|
||||||
echo "LINUX_CI_IN_DOCKER=1" >> docker_env.txt
|
|
||||||
echo "TOOLCHAIN_FLAGS=${{ matrix.duckdb_arch == 'linux_arm64' && '-DCMAKE_C_COMPILER=aarch64-linux-gnu-gcc -DCMAKE_CXX_COMPILER=aarch64-linux-gnu-g++ -DCMAKE_Fortran_COMPILER=aarch64-linux-gnu-gfortran' || '' }}" >> docker_env.txt
|
|
||||||
|
|
||||||
- name: Generate timestamp for Ccache entry
|
|
||||||
shell: cmake -P {0}
|
|
||||||
id: ccache_timestamp
|
|
||||||
run: |
|
|
||||||
string(TIMESTAMP current_date "%Y-%m-%d-%H;%M;%S" UTC)
|
|
||||||
message("::set-output name=timestamp::${current_date}")
|
|
||||||
|
|
||||||
- name: Create Ccache directory
|
|
||||||
run: |
|
|
||||||
mkdir ccache_dir
|
|
||||||
|
|
||||||
- name: Load Ccache
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ./ccache_dir
|
|
||||||
key: ccache-extension-distribution-${{ matrix.duckdb_arch }}-${{ steps.ccache_timestamp.outputs.timestamp }}
|
|
||||||
restore-keys: |
|
|
||||||
ccache-extension-distribution-${{ matrix.duckdb_arch }}-
|
|
||||||
|
|
||||||
- name: Run configure (outside Docker)
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
DUCKDB_GIT_VERSION: ${{ inputs.duckdb_version }}
|
|
||||||
LINUX_CI_IN_DOCKER: 0
|
|
||||||
run: |
|
|
||||||
make configure_ci
|
|
||||||
|
|
||||||
- name: Run configure (inside Docker)
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
docker run --env-file=docker_env.txt -v `pwd`:/duckdb_build_dir -v `pwd`/ccache_dir:/ccache_dir duckdb/${{ matrix.duckdb_arch }} make configure_ci
|
|
||||||
|
|
||||||
- name: Build extension (inside Docker)
|
|
||||||
run: |
|
|
||||||
docker run --env-file=docker_env.txt -v `pwd`:/duckdb_build_dir -v `pwd`/ccache_dir:/ccache_dir duckdb/${{ matrix.duckdb_arch }} make release
|
|
||||||
|
|
||||||
- name: Test extension (inside docker)
|
|
||||||
if: ${{ matrix.duckdb_arch != 'linux_arm64' && inputs.skip_tests == false }}
|
|
||||||
run: |
|
|
||||||
docker run --env-file=docker_env.txt -v `pwd`:/duckdb_build_dir -v `pwd`/ccache_dir:/ccache_dir duckdb/${{ matrix.duckdb_arch }} make test_release
|
|
||||||
|
|
||||||
- name: Test extension (outside docker)
|
|
||||||
if: ${{ matrix.duckdb_arch != 'linux_arm64' && inputs.skip_tests == false }}
|
|
||||||
env:
|
|
||||||
DUCKDB_GIT_VERSION: ${{ inputs.duckdb_version }}
|
|
||||||
LINUX_CI_IN_DOCKER: 0
|
|
||||||
run: |
|
|
||||||
make test_release
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.extension_name }}-${{ inputs.duckdb_version }}-extension-${{matrix.duckdb_arch}}${{inputs.artifact_postfix}}
|
|
||||||
path: |
|
|
||||||
build/release/extension/${{ inputs.extension_name }}/${{ inputs.extension_name }}.duckdb_extension
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
if-no-files-found: error
|
|
||||||
name: test-ssl-${{ inputs.duckdb_version }}-${{matrix.duckdb_arch}}${{inputs.artifact_postfix}}
|
|
||||||
path: build/release/extension/ui/test-ssl/test_ssl
|
|
||||||
|
|
||||||
- name: Print Rust logs
|
|
||||||
if: ${{ inputs.rust_logs && (inputs.enable_rust || contains(format(';{0};', inputs.extra_toolchains), ';rust;')) }}
|
|
||||||
run: |
|
|
||||||
for filename in build/release/rust/src/*/*build-*.log; do
|
|
||||||
echo Printing logs for file $filename
|
|
||||||
cat $filename;
|
|
||||||
echo Done printing logs for $filename
|
|
||||||
done
|
|
||||||
|
|
||||||
macos:
|
|
||||||
name: MacOS
|
|
||||||
runs-on: macos-latest
|
|
||||||
needs: generate_matrix
|
|
||||||
if: ${{ needs.generate_matrix.outputs.osx_matrix != '{}' && needs.generate_matrix.outputs.osx_matrix != '' }}
|
|
||||||
strategy:
|
|
||||||
matrix: ${{fromJson(needs.generate_matrix.outputs.osx_matrix)}}
|
|
||||||
env:
|
|
||||||
VCPKG_TOOLCHAIN_PATH: ${{ github.workspace }}/vcpkg/scripts/buildsystems/vcpkg.cmake
|
|
||||||
VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_triplet }}
|
|
||||||
OSX_BUILD_ARCH: ${{ matrix.osx_build_arch }}
|
|
||||||
GEN: ninja
|
|
||||||
BUILD_SHELL: ${{ inputs.build_duckdb_shell && '1' || '0' }}
|
|
||||||
DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout override repository
|
|
||||||
if: ${{inputs.override_repository != ''}}
|
|
||||||
with:
|
|
||||||
repository: ${{ inputs.override_repository }}
|
|
||||||
ref: ${{ inputs.override_ref }}
|
|
||||||
fetch-depth: 0
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout current repository
|
|
||||||
if: ${{inputs.override_repository == ''}}
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
- name: Install Ninja
|
|
||||||
run: |
|
|
||||||
brew install ninja autoconf make libtool automake autoconf-archive
|
|
||||||
|
|
||||||
- name: Setup Ccache
|
|
||||||
uses: hendrikmuhs/ccache-action@main
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
key: extension-distribution-${{ matrix.duckdb_arch }}
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout Extension CI tools
|
|
||||||
with:
|
|
||||||
path: 'extension-ci-tools'
|
|
||||||
ref: ${{ inputs.ci_tools_version }}
|
|
||||||
repository: ${{ inputs.override_ci_tools_repository }}
|
|
||||||
|
|
||||||
- name: Checkout DuckDB to version
|
|
||||||
if: ${{inputs.duckdb_version != ''}}
|
|
||||||
run: |
|
|
||||||
DUCKDB_GIT_VERSION=${{ inputs.duckdb_version }} make set_duckdb_version
|
|
||||||
|
|
||||||
- name: Tag extension
|
|
||||||
if: ${{inputs.extension_tag != ''}}
|
|
||||||
run: |
|
|
||||||
git tag ${{ inputs.extension_tag }}
|
|
||||||
|
|
||||||
- name: Tag DuckDB extension
|
|
||||||
if: ${{inputs.duckdb_tag != ''}}
|
|
||||||
run: |
|
|
||||||
DUCKDB_TAG=${{ inputs.duckdb_tag }} make set_duckdb_tag
|
|
||||||
|
|
||||||
- name: Setup vcpkg
|
|
||||||
uses: lukka/run-vcpkg@v11.1
|
|
||||||
with:
|
|
||||||
vcpkgGitCommitId: ${{ inputs.vcpkg_commit }}
|
|
||||||
vcpkgGitURL: ${{ inputs.vcpkg_url }}
|
|
||||||
|
|
||||||
- name: Install Rust cross compile dependency
|
|
||||||
if: ${{ (inputs.enable_rust || contains(format(';{0};', inputs.extra_toolchains), ';rust;')) && matrix.osx_build_arch == 'x86_64'}}
|
|
||||||
run: |
|
|
||||||
rustup target add x86_64-apple-darwin
|
|
||||||
|
|
||||||
- name: 'Setup go'
|
|
||||||
if: ${{ (inputs.enable_go || contains(format(';{0};', inputs.extra_toolchains), ';go;'))}}
|
|
||||||
uses: actions/setup-go@v4
|
|
||||||
with:
|
|
||||||
go-version: '1.23'
|
|
||||||
|
|
||||||
- name: Install parser tools
|
|
||||||
if: ${{ contains(format(';{0};', inputs.extra_toolchains), ';parser_tools;')}}
|
|
||||||
run: |
|
|
||||||
brew install bison flex
|
|
||||||
|
|
||||||
- name: install omp (x86)
|
|
||||||
if: ${{ contains(format(';{0};', inputs.extra_toolchains), ';omp;') && matrix.duckdb_arch == 'osx_amd64' }}
|
|
||||||
run: |
|
|
||||||
arch -x86_64 /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
|
|
||||||
(echo; echo 'eval "$(/usr/local/bin/brew shellenv)"') >> /Users/runner/.bash_profile
|
|
||||||
eval "$(/usr/local/bin/brew shellenv)"
|
|
||||||
arch -x86_64 brew install libomp
|
|
||||||
echo "LDFLAGS=-L/usr/local/opt/libomp/lib" >> $GITHUB_ENV
|
|
||||||
echo "CFLAGS=-I/usr/local/opt/libomp/include" >> $GITHUB_ENV
|
|
||||||
echo "CPPFLAGS=-I/usr/local/opt/libomp/include" >> $GITHUB_ENV
|
|
||||||
echo "CXXFLAGS=-I/usr/local/opt/libomp/include" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: install omp (arm)
|
|
||||||
if: ${{ contains(format(';{0};', inputs.extra_toolchains), ';omp;') && matrix.duckdb_arch == 'osx_arm64' }}
|
|
||||||
run: |
|
|
||||||
brew install libomp
|
|
||||||
echo "LDFLAGS=-L/opt/homebrew/opt/libomp/lib" >> $GITHUB_ENV
|
|
||||||
echo "CFLAGS=-I/opt/homebrew/opt/libomp/include" >> $GITHUB_ENV
|
|
||||||
echo "CPPFLAGS=-I/opt/homebrew/opt/libomp/include" >> $GITHUB_ENV
|
|
||||||
echo "CXXFLAGS=-I/opt/homebrew/opt/libomp/include" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Run configure
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
DUCKDB_GIT_VERSION: ${{ inputs.duckdb_version }}
|
|
||||||
run: |
|
|
||||||
make configure_ci
|
|
||||||
|
|
||||||
- name: Build extension
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
make release
|
|
||||||
|
|
||||||
- name: Test Extension
|
|
||||||
if: ${{ matrix.osx_build_arch == 'arm64' && inputs.skip_tests == false }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
make test_release
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
if-no-files-found: error
|
|
||||||
name: ${{ inputs.extension_name }}-${{ inputs.duckdb_version }}-extension-${{matrix.duckdb_arch}}${{inputs.artifact_postfix}}
|
|
||||||
path: |
|
|
||||||
build/release/extension/${{ inputs.extension_name }}/${{ inputs.extension_name }}.duckdb_extension
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
if-no-files-found: error
|
|
||||||
name: test-ssl-${{ inputs.duckdb_version }}-${{matrix.duckdb_arch}}${{inputs.artifact_postfix}}
|
|
||||||
path: build/release/extension/ui/test-ssl/test_ssl
|
|
||||||
|
|
||||||
- name: Print Rust logs
|
|
||||||
if: ${{ inputs.rust_logs && (inputs.enable_rust || contains(format(';{0};', inputs.extra_toolchains), ';rust;')) }}
|
|
||||||
run: |
|
|
||||||
for filename in build/release/rust/src/*/*build-*.log; do
|
|
||||||
echo Printing logs for file $filename
|
|
||||||
cat $filename;
|
|
||||||
echo Done printing logs for $filename
|
|
||||||
done
|
|
||||||
|
|
||||||
windows:
|
|
||||||
name: Windows
|
|
||||||
runs-on: windows-2019
|
|
||||||
needs: generate_matrix
|
|
||||||
if: ${{ needs.generate_matrix.outputs.windows_matrix != '{}' && needs.generate_matrix.outputs.windows_matrix != '' }}
|
|
||||||
strategy:
|
|
||||||
matrix: ${{fromJson(needs.generate_matrix.outputs.windows_matrix)}}
|
|
||||||
env:
|
|
||||||
VCPKG_TOOLCHAIN_PATH: ${{ github.workspace }}/vcpkg/scripts/buildsystems/vcpkg.cmake
|
|
||||||
VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_triplet }}
|
|
||||||
BUILD_SHELL: ${{ inputs.build_duckdb_shell && '1' || '0' }}
|
|
||||||
DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }}
|
|
||||||
CC: ${{ (matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw') && 'gcc' || '' }}
|
|
||||||
CXX: ${{ (matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw') && 'g++' || '' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Keep \n line endings
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
git config --global core.autocrlf false
|
|
||||||
git config --global core.eol lf
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout override repository
|
|
||||||
if: ${{inputs.override_repository != ''}}
|
|
||||||
with:
|
|
||||||
repository: ${{ inputs.override_repository }}
|
|
||||||
ref: ${{ inputs.override_ref }}
|
|
||||||
fetch-depth: 0
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout current repository
|
|
||||||
if: ${{inputs.override_repository == ''}}
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
|
|
||||||
- name: Setup Rust
|
|
||||||
if: (inputs.enable_rust || contains(format(';{0};', inputs.extra_toolchains), ';rust;'))
|
|
||||||
uses: dtolnay/rust-toolchain@stable
|
|
||||||
|
|
||||||
- name: 'Setup go'
|
|
||||||
if: ${{ (inputs.enable_go || contains(format(';{0};', inputs.extra_toolchains), ';go;'))}}
|
|
||||||
uses: actions/setup-go@v4
|
|
||||||
with:
|
|
||||||
go-version: '1.23'
|
|
||||||
|
|
||||||
- name: Install parser tools
|
|
||||||
if: ${{ contains(format(';{0};', inputs.extra_toolchains), ';parser_tools;')}}
|
|
||||||
run: |
|
|
||||||
choco install winflexbison3
|
|
||||||
|
|
||||||
- uses: r-lib/actions/setup-r@v2
|
|
||||||
if: matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw'
|
|
||||||
with:
|
|
||||||
r-version: 'devel'
|
|
||||||
update-rtools: true
|
|
||||||
rtools-version: '42' # linker bug in 43
|
|
||||||
|
|
||||||
- name: setup rtools gcc for vcpkg
|
|
||||||
if: matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw'
|
|
||||||
run: |
|
|
||||||
cp C:/rtools42/x86_64-w64-mingw32.static.posix/bin/gcc.exe C:/rtools42/x86_64-w64-mingw32.static.posix/bin/x86_64-w64-mingw32-gcc.exe
|
|
||||||
cp C:/rtools42/x86_64-w64-mingw32.static.posix/bin/g++.exe C:/rtools42/x86_64-w64-mingw32.static.posix/bin/x86_64-w64-mingw32-g++.exe
|
|
||||||
cp C:/rtools42/x86_64-w64-mingw32.static.posix/bin/gfortran.exe C:/rtools42/x86_64-w64-mingw32.static.posix/bin/x86_64-w64-mingw32-gfortran.exe
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout Extension CI tools
|
|
||||||
with:
|
|
||||||
path: 'extension-ci-tools'
|
|
||||||
ref: ${{ inputs.ci_tools_version }}
|
|
||||||
repository: ${{ inputs.override_ci_tools_repository }}
|
|
||||||
|
|
||||||
- name: Checkout DuckDB to version
|
|
||||||
if: ${{inputs.duckdb_version != ''}}
|
|
||||||
env:
|
|
||||||
DUCKDB_GIT_VERSION: ${{ inputs.duckdb_version }}
|
|
||||||
run: |
|
|
||||||
make set_duckdb_version
|
|
||||||
|
|
||||||
- name: Tag extension
|
|
||||||
if: ${{inputs.extension_tag != ''}}
|
|
||||||
run: |
|
|
||||||
git tag ${{ inputs.extension_tag }}
|
|
||||||
|
|
||||||
- name: Tag DuckDB extension
|
|
||||||
if: ${{inputs.duckdb_tag != ''}}
|
|
||||||
env:
|
|
||||||
DUCKDB_TAG: ${{ inputs.duckdb_tag }}
|
|
||||||
run: |
|
|
||||||
make set_duckdb_tag
|
|
||||||
|
|
||||||
- name: Setup Ccache
|
|
||||||
uses: hendrikmuhs/ccache-action@main
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
key: ${{ github.job }}-${{ matrix.duckdb_arch }}
|
|
||||||
|
|
||||||
- name: Setup vcpkg
|
|
||||||
uses: lukka/run-vcpkg@v11.1
|
|
||||||
with:
|
|
||||||
vcpkgGitCommitId: ${{ inputs.vcpkg_commit }}
|
|
||||||
vcpkgGitURL: ${{ inputs.vcpkg_url }}
|
|
||||||
|
|
||||||
- name: Run configure
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }}
|
|
||||||
DUCKDB_PLATFORM_RTOOLS: ${{ (matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw') && 1 || 0 }}
|
|
||||||
DUCKDB_GIT_VERSION: ${{ inputs.duckdb_version }}
|
|
||||||
run: |
|
|
||||||
make configure_ci
|
|
||||||
|
|
||||||
- name: Build extension
|
|
||||||
env:
|
|
||||||
DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }}
|
|
||||||
DUCKDB_PLATFORM_RTOOLS: ${{ (matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw') && 1 || 0 }}
|
|
||||||
run: |
|
|
||||||
make release
|
|
||||||
|
|
||||||
- name: Test extension
|
|
||||||
if: ${{ inputs.skip_tests == false }}
|
|
||||||
env:
|
|
||||||
DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }}
|
|
||||||
DUCKDB_PLATFORM_RTOOLS: ${{ (matrix.duckdb_arch == 'windows_amd64_rtools' || matrix.duckdb_arch == 'windows_amd64_mingw') && 1 || 0 }}
|
|
||||||
run: |
|
|
||||||
make test_release
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
if-no-files-found: error
|
|
||||||
name: ${{ inputs.extension_name }}-${{ inputs.duckdb_version }}-extension-${{matrix.duckdb_arch}}${{inputs.artifact_postfix}}
|
|
||||||
path: |
|
|
||||||
build/release/extension/${{ inputs.extension_name }}/${{ inputs.extension_name }}.duckdb_extension
|
|
||||||
|
|
||||||
- name: Find test_ssl
|
|
||||||
shell: bash
|
|
||||||
run: find . -name 'test_ssl.exe'
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
if-no-files-found: error
|
|
||||||
name: test-ssl-${{ inputs.duckdb_version }}-${{matrix.duckdb_arch}}${{inputs.artifact_postfix}}
|
|
||||||
# MinGW: build/release/extension/ui/test-ssl/test_ssl.exe
|
|
||||||
# Windows: build/release/extension/ui/test-ssl/Release/test_ssl.exe
|
|
||||||
path: build/release/extension/ui/test-ssl/**/test_ssl.exe
|
|
||||||
|
|
||||||
|
|
||||||
- name: Print Rust logs
|
|
||||||
if: ${{ inputs.rust_logs && (inputs.enable_rust || contains(format(';{0};', inputs.extra_toolchains), ';rust;')) }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
for filename in build/release/rust/src/*/*build-*.log; do
|
|
||||||
echo Printing logs for file $filename
|
|
||||||
cat $filename;
|
|
||||||
echo Done printing logs for $filename
|
|
||||||
done
|
|
||||||
|
|
||||||
wasm:
|
|
||||||
name: DuckDB-Wasm
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: generate_matrix
|
|
||||||
if: ${{ needs.generate_matrix.outputs.wasm_matrix != '{}' && needs.generate_matrix.outputs.wasm_matrix != '' }}
|
|
||||||
strategy:
|
|
||||||
matrix: ${{fromJson(needs.generate_matrix.outputs.wasm_matrix)}}
|
|
||||||
env:
|
|
||||||
VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_triplet }}
|
|
||||||
VCPKG_TOOLCHAIN_PATH: ${{ github.workspace }}/vcpkg/scripts/buildsystems/vcpkg.cmake
|
|
||||||
DUCKDB_PLATFORM: ${{ matrix.duckdb_arch }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout override repository
|
|
||||||
if: ${{inputs.override_repository != ''}}
|
|
||||||
with:
|
|
||||||
repository: ${{ inputs.override_repository }}
|
|
||||||
ref: ${{ inputs.override_ref }}
|
|
||||||
fetch-depth: 0
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout current repository
|
|
||||||
if: ${{inputs.override_repository == ''}}
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
name: Checkout Extension CI tools
|
|
||||||
with:
|
|
||||||
path: 'extension-ci-tools'
|
|
||||||
ref: ${{ inputs.ci_tools_version }}
|
|
||||||
repository: ${{ inputs.override_ci_tools_repository }}
|
|
||||||
|
|
||||||
- name: Checkout DuckDB to version
|
|
||||||
if: ${{inputs.duckdb_version != ''}}
|
|
||||||
run: |
|
|
||||||
DUCKDB_GIT_VERSION=${{ inputs.duckdb_version }} make set_duckdb_version
|
|
||||||
|
|
||||||
- name: Tag extension
|
|
||||||
if: ${{inputs.extension_tag != ''}}
|
|
||||||
run: |
|
|
||||||
git tag ${{ inputs.extension_tag }}
|
|
||||||
|
|
||||||
- name: Tag DuckDB extension
|
|
||||||
if: ${{inputs.duckdb_tag != ''}}
|
|
||||||
run: |
|
|
||||||
DUCKDB_TAG=${{ inputs.duckdb_tag }} make set_duckdb_tag
|
|
||||||
|
|
||||||
- uses: mymindstorm/setup-emsdk@v13
|
|
||||||
with:
|
|
||||||
version: 3.1.71
|
|
||||||
|
|
||||||
- name: Setup Rust for cross compilation
|
|
||||||
if: ${{ (inputs.enable_rust || contains(format(';{0};', inputs.extra_toolchains), ';rust;'))}}
|
|
||||||
uses: dtolnay/rust-toolchain@stable
|
|
||||||
with:
|
|
||||||
targets: wasm32-unknown-emscripten
|
|
||||||
|
|
||||||
- name: 'Setup go'
|
|
||||||
if: ${{ (inputs.enable_go || contains(format(';{0};', inputs.extra_toolchains), ';go;'))}}
|
|
||||||
uses: actions/setup-go@v4
|
|
||||||
with:
|
|
||||||
go-version: '1.23'
|
|
||||||
|
|
||||||
- name: Setup vcpkg
|
|
||||||
uses: lukka/run-vcpkg@v11.1
|
|
||||||
with:
|
|
||||||
vcpkgGitCommitId: ${{ inputs.vcpkg_commit }}
|
|
||||||
vcpkgGitURL: ${{ inputs.vcpkg_url }}
|
|
||||||
|
|
||||||
- name: Setup Ccache
|
|
||||||
uses: hendrikmuhs/ccache-action@main
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
key: ${{ github.job }}-${{ matrix.duckdb_arch }}
|
|
||||||
|
|
||||||
- name: Run configure
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
DUCKDB_GIT_VERSION: ${{ inputs.duckdb_version }}
|
|
||||||
run: |
|
|
||||||
make configure_ci
|
|
||||||
|
|
||||||
- name: Build Wasm module
|
|
||||||
run: |
|
|
||||||
make ${{ matrix.duckdb_arch }}
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
if-no-files-found: error
|
|
||||||
name: ${{ inputs.extension_name }}-${{ inputs.duckdb_version }}-extension-${{matrix.duckdb_arch}}${{inputs.artifact_postfix}}
|
|
||||||
path: |
|
|
||||||
build/${{ matrix.duckdb_arch }}/extension/${{ inputs.extension_name }}/${{ inputs.extension_name }}.duckdb_extension.wasm
|
|
||||||
|
|
||||||
- name: Print Rust logs
|
|
||||||
if: ${{ inputs.rust_logs && (inputs.enable_rust || contains(format(';{0};', inputs.extra_toolchains), ';rust;')) }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
for filename in build/release/rust/src/*/*build-*.log; do
|
|
||||||
echo Printing logs for file $filename
|
|
||||||
cat $filename;
|
|
||||||
echo Done printing logs for $filename
|
|
||||||
done
|
|
||||||
206
.github/workflows/build-linux-amd64.yml
vendored
Normal file
206
.github/workflows/build-linux-amd64.yml
vendored
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
name: Build linux_amd64 extension and upload to Packages
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: Package version (defaults to tag name or short SHA)
|
||||||
|
required: false
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-linux-amd64:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
EXTENSION_NAME: ui
|
||||||
|
DUCKDB_PLATFORM: linux_amd64
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository (manual)
|
||||||
|
env:
|
||||||
|
TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
if [ -d .git ]; then
|
||||||
|
echo "Repository already present"
|
||||||
|
else
|
||||||
|
server="${GITHUB_SERVER_URL:-${{ github.server_url }}}"
|
||||||
|
repo_full="${GITHUB_REPOSITORY:-${{ github.repository }}}"
|
||||||
|
sha="${GITHUB_SHA:-${{ github.sha }}}"
|
||||||
|
host="$(echo "$server" | sed -E 's#^https?://([^/]+).*#\1#')"
|
||||||
|
if [ -n "${TOKEN:-}" ]; then
|
||||||
|
umask 077
|
||||||
|
printf "machine %s\n login token\n password %s\n" "$host" "$TOKEN" > "$HOME/.netrc"
|
||||||
|
fi
|
||||||
|
git init .
|
||||||
|
git config --global --add safe.directory "$(pwd)"
|
||||||
|
git remote add origin "$server/$repo_full.git"
|
||||||
|
git -c http.https://$host/.extraheader="" fetch --depth=1 origin "$sha"
|
||||||
|
git checkout -q FETCH_HEAD
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Show workspace status
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
git --no-pager status | cat
|
||||||
|
- name: Install build dependencies
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
build-essential cmake ninja-build python3 python3-venv pkg-config \
|
||||||
|
libssl-dev curl git ca-certificates
|
||||||
|
- name: Preflight Gitea upload (fast-fail)
|
||||||
|
env:
|
||||||
|
GITEA_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||||
|
ACTOR: ${{ github.actor }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
: "${GITEA_TOKEN:?GITEA_TOKEN secret is required}"
|
||||||
|
server="${GITHUB_SERVER_URL:-${{ github.server_url }}}"
|
||||||
|
owner="${GITHUB_REPOSITORY_OWNER:-${{ github.repository_owner }}}"
|
||||||
|
repo_full="${GITHUB_REPOSITORY:-${{ github.repository }}}"
|
||||||
|
pkg="${repo_full##*/}-preflight"
|
||||||
|
version="preflight-${GITHUB_RUN_ID:-0}-${GITHUB_RUN_ATTEMPT:-0}-$(date +%s)"
|
||||||
|
name="check.bin"
|
||||||
|
tmpfile="$(mktemp)"
|
||||||
|
printf "auth check %s\n" "$(date -u +%FT%TZ)" > "$tmpfile"
|
||||||
|
# Normalize server to effective scheme+host (handles http->https redirects)
|
||||||
|
base_no_trail="$(echo "$server" | sed 's#/*$##')"
|
||||||
|
# Use GET (not HEAD) to avoid servers that don't support HEAD on this endpoint
|
||||||
|
effective_version_url=$(curl -sS -L -o /dev/null -w '%{url_effective}' "$base_no_trail/api/v1/version" || echo "")
|
||||||
|
normalized_server=$(echo "$effective_version_url" | sed -E 's#^(https?://[^/]+).*$#\1#')
|
||||||
|
if [ -n "$normalized_server" ]; then
|
||||||
|
server="$normalized_server"
|
||||||
|
fi
|
||||||
|
url="$server/api/packages/$owner/generic/$pkg/$version/$name?replace=1"
|
||||||
|
auth_user="${ACTOR:-$owner}"
|
||||||
|
echo "Preflight: server=$server owner=$owner package=$pkg version=$version"
|
||||||
|
# Perform preflight upload using Basic auth directly
|
||||||
|
if curl -fS -L -X PUT \
|
||||||
|
-u "$auth_user:${GITEA_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
--upload-file "$tmpfile" "$url" >/dev/null; then
|
||||||
|
echo "Preflight upload succeeded, cleaning up"
|
||||||
|
else
|
||||||
|
echo "Preflight upload failed" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Cleanup the uploaded dummy package version (best effort)
|
||||||
|
curl -sS -L -o /dev/null -w " delete -> HTTP %{http_code}\n" \
|
||||||
|
-u "$auth_user:${GITEA_TOKEN}" -X DELETE \
|
||||||
|
"$server/api/packages/$owner/generic/$pkg/$version" || true
|
||||||
|
- name: Initialize submodules
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
git submodule update --init --recursive
|
||||||
|
|
||||||
|
- name: Build release (linux_amd64)
|
||||||
|
env:
|
||||||
|
GEN: ""
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
make -j"$(nproc)" release
|
||||||
|
|
||||||
|
- name: Find extension artifact
|
||||||
|
id: artifact
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
path="$(ls -1 build/release/extension/${EXTENSION_NAME}/${EXTENSION_NAME}.duckdb_extension 2>/dev/null || true)"
|
||||||
|
if [ -z "$path" ]; then
|
||||||
|
path="$(find build/release -type f -name '*.duckdb_extension' | head -n 1 || true)"
|
||||||
|
fi
|
||||||
|
if [ -z "$path" ]; then
|
||||||
|
echo "Extension artifact not found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "file=$path" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Found: $path"
|
||||||
|
|
||||||
|
- name: Compute package version
|
||||||
|
id: ver
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
version='${{ inputs.version }}'
|
||||||
|
if [ -z "$version" ]; then
|
||||||
|
if [ "${{ github.ref_type }}" = "tag" ]; then
|
||||||
|
version='${{ github.ref_name }}'
|
||||||
|
else
|
||||||
|
version="dev-${GITHUB_SHA::8}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Using version: $version"
|
||||||
|
- name: Upload to Gitea Packages (generic)
|
||||||
|
env:
|
||||||
|
GITEA_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||||
|
ACTOR: ${{ github.actor }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
: "${GITEA_TOKEN:?GITEA_TOKEN secret is required}"
|
||||||
|
# Derive server/owner/pkg from env if not provided
|
||||||
|
server="${GITHUB_SERVER_URL:-${{ github.server_url }}}"
|
||||||
|
owner="${GITHUB_REPOSITORY_OWNER:-${{ github.repository_owner }}}"
|
||||||
|
repo_full="${GITHUB_REPOSITORY:-${{ github.repository }}}"
|
||||||
|
pkg="${repo_full##*/}"
|
||||||
|
# Use previously computed version & artifact if available
|
||||||
|
version='${{ steps.ver.outputs.version }}'
|
||||||
|
file='${{ steps.artifact.outputs.file }}'
|
||||||
|
# Fallbacks if steps were skipped
|
||||||
|
if [ -z "${version}" ]; then
|
||||||
|
if [ -n "${GITHUB_REF_TYPE:-}" ] && [ "${GITHUB_REF_TYPE}" = "tag" ]; then
|
||||||
|
version="${GITHUB_REF_NAME:-dev-${GITHUB_SHA::8}}"
|
||||||
|
else
|
||||||
|
version="dev-${GITHUB_SHA::8}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if [ -z "${file}" ]; then
|
||||||
|
file="$(ls -1 build/release/extension/${EXTENSION_NAME}/${EXTENSION_NAME}.duckdb_extension 2>/dev/null || true)"
|
||||||
|
if [ -z "$file" ]; then
|
||||||
|
file="$(find build/release -type f -name '*.duckdb_extension' | head -n 1 || true)"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
[ -n "$server" ] || { echo "server not set" >&2; exit 1; }
|
||||||
|
[ -n "$owner" ] || { echo "owner not set" >&2; exit 1; }
|
||||||
|
[ -n "$pkg" ] || { echo "pkg not set" >&2; exit 1; }
|
||||||
|
[ -n "$version" ] || { echo "version not set" >&2; exit 1; }
|
||||||
|
[ -n "$file" ] || { echo "file not set" >&2; exit 1; }
|
||||||
|
# Normalize server using effective URL of /api/v1/version (handles http->https)
|
||||||
|
base_no_trail="$(echo "$server" | sed 's#/*$##')"
|
||||||
|
effective_version_url=$(curl -sS -L -o /dev/null -w '%{url_effective}' "$base_no_trail/api/v1/version" || echo "")
|
||||||
|
normalized_server=$(echo "$effective_version_url" | sed -E 's#^(https?://[^/]+).*$#\1#')
|
||||||
|
if [ -n "$normalized_server" ]; then
|
||||||
|
server="$normalized_server"
|
||||||
|
fi
|
||||||
|
# Use the GitHub actor as basic auth username by default
|
||||||
|
auth_user="${ACTOR:-$owner}"
|
||||||
|
name="$(basename "$file")"
|
||||||
|
url="$server/api/packages/$owner/generic/$pkg/$version/$name?replace=1"
|
||||||
|
echo "Uploading $file to $url"
|
||||||
|
echo " auth user=$auth_user"
|
||||||
|
|
||||||
|
# Use Basic auth directly (works with package registry)
|
||||||
|
curl -fS -L -X PUT \
|
||||||
|
-u "$auth_user:${GITEA_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
--retry 2 --retry-delay 2 --max-time 300 \
|
||||||
|
--upload-file "$file" "$url"
|
||||||
|
echo "Upload complete."
|
||||||
|
# Also upload the DuckDB shell binary
|
||||||
|
bin_path="./build/release/duckdb"
|
||||||
|
if [ ! -f "$bin_path" ]; then
|
||||||
|
echo "duckdb binary not found at $bin_path" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
bin_name="$(basename "$bin_path")"
|
||||||
|
bin_url="$server/api/packages/$owner/generic/$pkg/$version/$bin_name?replace=1"
|
||||||
|
echo "Uploading $bin_path to $bin_url"
|
||||||
|
curl -fS -L -X PUT \
|
||||||
|
-u "$auth_user:${GITEA_TOKEN}" \
|
||||||
|
-H "Content-Type: application/octet-stream" \
|
||||||
|
--retry 2 --retry-delay 2 --max-time 300 \
|
||||||
|
--upload-file "$bin_path" "$bin_url"
|
||||||
|
echo "DuckDB binary upload complete."
|
||||||
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -1,8 +1,8 @@
|
|||||||
[submodule "duckdb"]
|
[submodule "duckdb"]
|
||||||
path = duckdb
|
path = duckdb
|
||||||
url = https://github.com/duckdb/duckdb
|
url = https://github.com/duckdb/duckdb
|
||||||
branch = v1.2.1
|
branch = v1.3.2
|
||||||
[submodule "extension-ci-tools"]
|
[submodule "extension-ci-tools"]
|
||||||
path = extension-ci-tools
|
path = extension-ci-tools
|
||||||
url = https://github.com/duckdb/extension-ci-tools
|
url = https://github.com/duckdb/extension-ci-tools
|
||||||
branch = v1.2.1
|
branch = v1.3.2
|
||||||
|
|||||||
@@ -27,6 +27,10 @@ set(EXTENSION_SOURCES
|
|||||||
src/utils/serialization.cpp
|
src/utils/serialization.cpp
|
||||||
src/watcher.cpp)
|
src/watcher.cpp)
|
||||||
|
|
||||||
|
add_definitions(-DDUCKDB_MAJOR_VERSION=${DUCKDB_MAJOR_VERSION})
|
||||||
|
add_definitions(-DDUCKDB_MINOR_VERSION=${DUCKDB_MINOR_VERSION})
|
||||||
|
add_definitions(-DDUCKDB_PATCH_VERSION=${DUCKDB_PATCH_VERSION})
|
||||||
|
|
||||||
find_package(Git)
|
find_package(Git)
|
||||||
if(NOT Git_FOUND)
|
if(NOT Git_FOUND)
|
||||||
message(FATAL_ERROR "Git not found, unable to determine git sha")
|
message(FATAL_ERROR "Git not found, unable to determine git sha")
|
||||||
@@ -60,5 +64,3 @@ install(
|
|||||||
EXPORT "${DUCKDB_EXPORT_SET}"
|
EXPORT "${DUCKDB_EXPORT_SET}"
|
||||||
LIBRARY DESTINATION "${INSTALL_LIB_DIR}"
|
LIBRARY DESTINATION "${INSTALL_LIB_DIR}"
|
||||||
ARCHIVE DESTINATION "${INSTALL_LIB_DIR}")
|
ARCHIVE DESTINATION "${INSTALL_LIB_DIR}")
|
||||||
|
|
||||||
add_subdirectory(test-ssl)
|
|
||||||
88
README.md
88
README.md
@@ -1,24 +1,14 @@
|
|||||||
# duckdb-ui
|
# DuckDB UI Extension
|
||||||
|
|
||||||
This extension provides a user interface for DuckDB, allowing you to interact with DuckDB through a web-based interface. This repository is based on https://github.com/duckdb/extension-template, check it out if you want to build and ship your own DuckDB extension.
|
A [DuckDB extension](https://duckdb.org/docs/stable/core_extensions/ui.html) providing a browser-based user interface.
|
||||||
|
|
||||||
---
|
This repository contains both the extension, implemented in C++, and some packages used by the user interface, implemented in TypeScript.
|
||||||
|
|
||||||
## Building
|
While most of the user interface code is not yet publicly available, more of it will added here over time.
|
||||||
|
|
||||||
### Managing dependencies
|
## Extension
|
||||||
|
|
||||||
DuckDB extensions uses VCPKG for dependency management. Enabling VCPKG is very simple: follow the [installation instructions](https://vcpkg.io/en/getting-started) or just run the following:
|
The primary structure of this repository is based on the [DuckDB extension template](https://github.com/duckdb/extension-template).
|
||||||
|
|
||||||
```shell
|
|
||||||
git clone https://github.com/Microsoft/vcpkg.git
|
|
||||||
./vcpkg/bootstrap-vcpkg.sh
|
|
||||||
export VCPKG_TOOLCHAIN_PATH=`pwd`/vcpkg/scripts/buildsystems/vcpkg.cmake
|
|
||||||
```
|
|
||||||
|
|
||||||
Note: VCPKG is only required for extensions that want to rely on it for dependency management. If you want to develop an extension without dependencies, or want to do your own dependency management, just skip this step. Note that the example extension uses VCPKG to build with a dependency for instructive purposes, so when skipping this step the build may not work without removing the dependency.
|
|
||||||
|
|
||||||
### Build steps
|
|
||||||
|
|
||||||
To build the extension:
|
To build the extension:
|
||||||
|
|
||||||
@@ -38,66 +28,38 @@ This will create the following binaries:
|
|||||||
- `unittest` is the test runner of duckdb. Again, the extension is already linked into the binary.
|
- `unittest` is the test runner of duckdb. Again, the extension is already linked into the binary.
|
||||||
- `ui.duckdb_extension` is the loadable binary as it would be distributed.
|
- `ui.duckdb_extension` is the loadable binary as it would be distributed.
|
||||||
|
|
||||||
## Running the extension
|
|
||||||
|
|
||||||
To run the extension code, simply start the shell with `./build/release/duckdb`.
|
To run the extension code, simply start the shell with `./build/release/duckdb`.
|
||||||
|
|
||||||
Now we can use the features from the extension directly in DuckDB. The template contains a single scalar function `ui()` that takes a string arguments and returns a string:
|
To start the UI from the command line:
|
||||||
|
|
||||||
```
|
```
|
||||||
D select ui('Jane') as result;
|
./build/release/duckdb -ui
|
||||||
┌───────────────┐
|
|
||||||
│ result │
|
|
||||||
│ varchar │
|
|
||||||
├───────────────┤
|
|
||||||
│ Ui Jane 🐥 │
|
|
||||||
└───────────────┘
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Running the tests
|
To start the UI from SQL:
|
||||||
|
```
|
||||||
Different tests can be created for DuckDB extensions. The primary way of testing DuckDB extensions should be the SQL tests in `./test/sql`. These SQL tests can be run using:
|
call start_ui();
|
||||||
|
|
||||||
```sh
|
|
||||||
make test
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Installing the deployed binaries
|
For more usage details, see the [documentation](https://duckdb.org/docs/stable/core_extensions/ui.html).
|
||||||
|
|
||||||
To install your extension binaries from S3, you will need to do two things. Firstly, DuckDB should be launched with the
|
## User Interface Packages
|
||||||
`allow_unsigned_extensions` option set to true. How to set this will depend on the client you're using. Some examples:
|
|
||||||
|
|
||||||
CLI:
|
Some packages used by the browser-based user interface can be found in the `ts` directory.
|
||||||
|
|
||||||
```shell
|
See the [README](ts/README.md) in that directory for details.
|
||||||
duckdb -unsigned
|
|
||||||
```
|
|
||||||
|
|
||||||
Python:
|
## Architectural Overview
|
||||||
|
|
||||||
```python
|
The extension starts an HTTP server that both serves the UI assets (HTML, JavaScript, etc.)
|
||||||
con = duckdb.connect(':memory:', config={'allow_unsigned_extensions' : 'true'})
|
and handles requests to run SQL and perform other DuckDB operations.
|
||||||
```
|
|
||||||
|
|
||||||
NodeJS:
|
The server proxies requests for UI assets and fetches them from a remote server.
|
||||||
|
By default, this is `https://ui.duckdb.org`, but it can be [overridden](https://duckdb.org/docs/stable/core_extensions/ui.html#remote-url).
|
||||||
|
|
||||||
```js
|
The server also exposes a number of HTTP endpoints for performing DuckDB operations.
|
||||||
db = new duckdb.Database(':memory:', {"allow_unsigned_extensions": "true"});
|
These include running SQL, interrupting runs, tokenizing SQL text, and receiving events (such as catalog updates).
|
||||||
```
|
For details, see the `HttpServer::Run` method in [http_server.cpp](src/http_server.cpp).
|
||||||
|
|
||||||
Secondly, you will need to set the repository endpoint in DuckDB to the HTTP url of your bucket + version of the extension
|
The UI uses the TypeScript package [duckdb-ui-client](ts/pkgs/duckdb-ui-client/package.json) for communicating with the server.
|
||||||
you want to install. To do this run the following SQL query in DuckDB:
|
See the [DuckDBUIClient](ts/pkgs/duckdb-ui-client/src/client/classes/DuckDBUIClient.ts) and [DuckDBUIClientConnection](ts/pkgs/duckdb-ui-client/src/client/classes/DuckDBUIClientConnection.ts) classes exposed by this package for details.
|
||||||
|
|
||||||
```sql
|
|
||||||
SET custom_extension_repository='bucket.s3.eu-west-1.amazonaws.com/<your_extension_name>/latest';
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that the `/latest` path will allow you to install the latest extension version available for your current version of
|
|
||||||
DuckDB. To specify a specific version, you can pass the version instead.
|
|
||||||
|
|
||||||
After running these steps, you can install and load your extension using the regular INSTALL/LOAD commands in DuckDB:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
INSTALL ui
|
|
||||||
LOAD ui
|
|
||||||
```
|
|
||||||
|
|||||||
2
duckdb
2
duckdb
Submodule duckdb updated: 8e52ec4395...0b83e5d2f6
Submodule extension-ci-tools updated: 00e6af0684...90757de3f0
@@ -9,9 +9,12 @@
|
|||||||
#include "utils/serialization.hpp"
|
#include "utils/serialization.hpp"
|
||||||
#include "version.hpp"
|
#include "version.hpp"
|
||||||
#include "watcher.hpp"
|
#include "watcher.hpp"
|
||||||
|
#include <duckdb/common/http_util.hpp>
|
||||||
#include <duckdb/common/serializer/binary_serializer.hpp>
|
#include <duckdb/common/serializer/binary_serializer.hpp>
|
||||||
#include <duckdb/common/serializer/memory_stream.hpp>
|
#include <duckdb/common/serializer/memory_stream.hpp>
|
||||||
#include <duckdb/main/attached_database.hpp>
|
#include <duckdb/main/attached_database.hpp>
|
||||||
|
#include <duckdb/main/client_data.hpp>
|
||||||
|
#include <duckdb/parser/parsed_data/create_table_info.hpp>
|
||||||
#include <duckdb/parser/parser.hpp>
|
#include <duckdb/parser/parser.hpp>
|
||||||
|
|
||||||
namespace duckdb {
|
namespace duckdb {
|
||||||
@@ -19,6 +22,76 @@ namespace ui {
|
|||||||
|
|
||||||
unique_ptr<HttpServer> HttpServer::server_instance;
|
unique_ptr<HttpServer> HttpServer::server_instance;
|
||||||
|
|
||||||
|
// Helpers for validating request origin/referer in deployments where the UI is
|
||||||
|
// exposed on a non-localhost host (e.g., Docker, k8s, reverse proxies). These
|
||||||
|
// checks allow either the configured local_url, or the runtime host derived
|
||||||
|
// from the request headers. They also allow an escape hatch via the
|
||||||
|
// environment variable `ui_allow_any_origin=1|true`.
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
// Returns true if the given referer begins with any of the expected base URLs.
|
||||||
|
static bool RefererStartsWithAny(const std::string &referer,
|
||||||
|
const std::vector<std::string> &bases) {
|
||||||
|
for (const auto &base : bases) {
|
||||||
|
if (!base.empty() && referer.compare(0, base.size(), base) == 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::vector<std::string>
|
||||||
|
ExpectedBaseUrls(const httplib::Request &req, const std::string &local_url) {
|
||||||
|
// Prefer forwarded host if present, otherwise fall back to Host.
|
||||||
|
auto forwarded_host = req.get_header_value("X-Forwarded-Host");
|
||||||
|
auto host = forwarded_host.empty() ? req.get_header_value("Host")
|
||||||
|
: forwarded_host;
|
||||||
|
|
||||||
|
std::vector<std::string> bases;
|
||||||
|
bases.push_back(local_url);
|
||||||
|
if (!host.empty()) {
|
||||||
|
bases.push_back(StringUtil::Format("http://%s", host));
|
||||||
|
bases.push_back(StringUtil::Format("https://%s", host));
|
||||||
|
}
|
||||||
|
return bases;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool IsOriginAllowed(const httplib::Request &req,
|
||||||
|
const std::string &local_url) {
|
||||||
|
if (IsEnvEnabled("ui_allow_any_origin")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto origin = req.get_header_value("Origin");
|
||||||
|
if (origin.empty()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto bases = ExpectedBaseUrls(req, local_url);
|
||||||
|
for (const auto &base : bases) {
|
||||||
|
if (origin == base) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool IsRefererAllowed(const httplib::Request &req,
|
||||||
|
const std::string &local_url) {
|
||||||
|
if (IsEnvEnabled("ui_allow_any_origin")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto referer = req.get_header_value("Referer");
|
||||||
|
if (referer.empty()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return RefererStartsWithAny(referer, ExpectedBaseUrls(req, local_url));
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
HttpServer *HttpServer::GetInstance(ClientContext &context) {
|
HttpServer *HttpServer::GetInstance(ClientContext &context) {
|
||||||
if (server_instance) {
|
if (server_instance) {
|
||||||
// We already have an instance, make sure we're running on the right DB
|
// We already have an instance, make sure we're running on the right DB
|
||||||
@@ -64,7 +137,8 @@ bool HttpServer::IsRunningOnMachine(ClientContext &context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const auto local_port = GetLocalPort(context);
|
const auto local_port = GetLocalPort(context);
|
||||||
auto local_url = StringUtil::Format("http://localhost:%d", local_port);
|
const auto local_host = GetLocalHost(context);
|
||||||
|
auto local_url = StringUtil::Format("http://%s:%d", local_host, local_port);
|
||||||
|
|
||||||
httplib::Client client(local_url);
|
httplib::Client client(local_url);
|
||||||
return client.Get("/info");
|
return client.Get("/info");
|
||||||
@@ -94,20 +168,28 @@ const HttpServer &HttpServer::Start(ClientContext &context, bool *was_started) {
|
|||||||
|
|
||||||
const auto remote_url = GetRemoteUrl(context);
|
const auto remote_url = GetRemoteUrl(context);
|
||||||
const auto port = GetLocalPort(context);
|
const auto port = GetLocalPort(context);
|
||||||
|
const auto host = GetLocalHost(context);
|
||||||
auto server = GetInstance(context);
|
auto server = GetInstance(context);
|
||||||
server->DoStart(port, remote_url);
|
auto &http_util = HTTPUtil::Get(*context.db);
|
||||||
|
// FIXME - https://github.com/duckdb/duckdb/pull/17655 will remove `unused`
|
||||||
|
auto http_params = http_util.InitializeParameters(context, "unused");
|
||||||
|
server->DoStart(port, host, remote_url, std::move(http_params));
|
||||||
return *server;
|
return *server;
|
||||||
}
|
}
|
||||||
|
|
||||||
void HttpServer::DoStart(const uint16_t _local_port,
|
void HttpServer::DoStart(const uint16_t _local_port,
|
||||||
const std::string &_remote_url) {
|
const std::string &_local_host,
|
||||||
|
const std::string &_remote_url,
|
||||||
|
unique_ptr<HTTPParams> _http_params) {
|
||||||
if (Started()) {
|
if (Started()) {
|
||||||
throw std::runtime_error("HttpServer already started");
|
throw std::runtime_error("HttpServer already started");
|
||||||
}
|
}
|
||||||
|
|
||||||
local_port = _local_port;
|
local_port = _local_port;
|
||||||
local_url = StringUtil::Format("http://localhost:%d", local_port);
|
local_host = _local_host;
|
||||||
|
local_url = StringUtil::Format("http://%s:%d", local_host, local_port);
|
||||||
remote_url = _remote_url;
|
remote_url = _remote_url;
|
||||||
|
http_params = std::move(_http_params);
|
||||||
user_agent =
|
user_agent =
|
||||||
StringUtil::Format("duckdb-ui/%s-%s(%s)", DuckDB::LibraryVersion(),
|
StringUtil::Format("duckdb-ui/%s-%s(%s)", DuckDB::LibraryVersion(),
|
||||||
UI_EXTENSION_VERSION, DuckDB::Platform());
|
UI_EXTENSION_VERSION, DuckDB::Platform());
|
||||||
@@ -144,12 +226,14 @@ void HttpServer::DoStop() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ddb_instance.reset();
|
ddb_instance.reset();
|
||||||
|
http_params = nullptr;
|
||||||
remote_url = "";
|
remote_url = "";
|
||||||
local_port = 0;
|
local_port = 0;
|
||||||
|
local_host = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string HttpServer::LocalUrl() const {
|
std::string HttpServer::LocalUrl() const {
|
||||||
return StringUtil::Format("http://localhost:%d/", local_port);
|
return StringUtil::Format("http://%s:%d/", local_host, local_port);
|
||||||
}
|
}
|
||||||
|
|
||||||
shared_ptr<DatabaseInstance> HttpServer::LockDatabaseInstance() {
|
shared_ptr<DatabaseInstance> HttpServer::LockDatabaseInstance() {
|
||||||
@@ -185,7 +269,7 @@ void HttpServer::Run() {
|
|||||||
const httplib::ContentReader &content_reader) {
|
const httplib::ContentReader &content_reader) {
|
||||||
HandleTokenize(req, res, content_reader);
|
HandleTokenize(req, res, content_reader);
|
||||||
});
|
});
|
||||||
server.listen("localhost", local_port);
|
server.listen(local_host, local_port);
|
||||||
}
|
}
|
||||||
|
|
||||||
void HttpServer::HandleGetInfo(const httplib::Request &req,
|
void HttpServer::HandleGetInfo(const httplib::Request &req,
|
||||||
@@ -214,8 +298,7 @@ void HttpServer::HandleGetLocalToken(const httplib::Request &req,
|
|||||||
httplib::Response &res) {
|
httplib::Response &res) {
|
||||||
// GET requests don't include Origin, so use Referer instead.
|
// GET requests don't include Origin, so use Referer instead.
|
||||||
// Referer includes the path, so only compare the start.
|
// Referer includes the path, so only compare the start.
|
||||||
auto referer = req.get_header_value("Referer");
|
if (!IsRefererAllowed(req, local_url)) {
|
||||||
if (referer.compare(0, local_url.size(), local_url) != 0) {
|
|
||||||
res.status = 401;
|
res.status = 401;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -240,19 +323,47 @@ void HttpServer::HandleGetLocalToken(const httplib::Request &req,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Adapted from
|
||||||
|
// https://github.com/duckdb/duckdb/blob/1f8b6839ea7864c3e3fb020574f67384cb58124c/src/main/http/http_util.cpp#L129-L147
|
||||||
|
// Which is not currently exposed.
|
||||||
|
void HttpServer::InitClientFromParams(httplib::Client &client) {
|
||||||
|
auto sec = static_cast<time_t>(http_params->timeout);
|
||||||
|
auto usec = static_cast<time_t>(http_params->timeout_usec);
|
||||||
|
client.set_keep_alive(true);
|
||||||
|
client.set_write_timeout(sec, usec);
|
||||||
|
client.set_read_timeout(sec, usec);
|
||||||
|
client.set_connection_timeout(sec, usec);
|
||||||
|
|
||||||
|
if (!http_params->http_proxy.empty()) {
|
||||||
|
client.set_proxy(http_params->http_proxy,
|
||||||
|
static_cast<int>(http_params->http_proxy_port));
|
||||||
|
|
||||||
|
if (!http_params->http_proxy_username.empty()) {
|
||||||
|
client.set_proxy_basic_auth(http_params->http_proxy_username,
|
||||||
|
http_params->http_proxy_password);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void HttpServer::HandleGet(const httplib::Request &req,
|
void HttpServer::HandleGet(const httplib::Request &req,
|
||||||
httplib::Response &res) {
|
httplib::Response &res) {
|
||||||
// Create HTTP client to remote URL
|
// Create HTTP client to remote URL
|
||||||
// TODO: Can this be created once and shared?
|
// TODO: Can this be created once and shared?
|
||||||
httplib::Client client(remote_url);
|
httplib::Client client(remote_url);
|
||||||
client.set_keep_alive(true);
|
InitClientFromParams(client);
|
||||||
|
|
||||||
if (IsEnvEnabled("ui_disable_server_certificate_verification")) {
|
if (IsEnvEnabled("ui_disable_server_certificate_verification")) {
|
||||||
client.enable_server_certificate_verification(false);
|
client.enable_server_certificate_verification(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
httplib::Headers headers = {{"User-Agent", user_agent}};
|
||||||
|
auto cookie = req.get_header_value("Cookie");
|
||||||
|
if (!cookie.empty()) {
|
||||||
|
headers.emplace("Cookie", cookie);
|
||||||
|
}
|
||||||
|
|
||||||
// forward GET to remote URL
|
// forward GET to remote URL
|
||||||
auto result = client.Get(req.path, req.params, {{"User-Agent", user_agent}});
|
auto result = client.Get(req.path, req.params, headers);
|
||||||
if (!result) {
|
if (!result) {
|
||||||
res.status = 500;
|
res.status = 500;
|
||||||
res.set_content("Could not fetch: '" + req.path + "' from '" + remote_url +
|
res.set_content("Could not fetch: '" + req.path + "' from '" + remote_url +
|
||||||
@@ -279,8 +390,7 @@ void HttpServer::HandleGet(const httplib::Request &req,
|
|||||||
|
|
||||||
void HttpServer::HandleInterrupt(const httplib::Request &req,
|
void HttpServer::HandleInterrupt(const httplib::Request &req,
|
||||||
httplib::Response &res) {
|
httplib::Response &res) {
|
||||||
auto origin = req.get_header_value("Origin");
|
if (!IsOriginAllowed(req, local_url)) {
|
||||||
if (origin != local_url) {
|
|
||||||
res.status = 401;
|
res.status = 401;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -319,8 +429,7 @@ void HttpServer::HandleRun(const httplib::Request &req, httplib::Response &res,
|
|||||||
void HttpServer::DoHandleRun(const httplib::Request &req,
|
void HttpServer::DoHandleRun(const httplib::Request &req,
|
||||||
httplib::Response &res,
|
httplib::Response &res,
|
||||||
const httplib::ContentReader &content_reader) {
|
const httplib::ContentReader &content_reader) {
|
||||||
auto origin = req.get_header_value("Origin");
|
if (!IsOriginAllowed(req, local_url)) {
|
||||||
if (origin != local_url) {
|
|
||||||
res.status = 401;
|
res.status = 401;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -329,8 +438,10 @@ void HttpServer::DoHandleRun(const httplib::Request &req,
|
|||||||
|
|
||||||
auto connection_name = req.get_header_value("X-DuckDB-UI-Connection-Name");
|
auto connection_name = req.get_header_value("X-DuckDB-UI-Connection-Name");
|
||||||
|
|
||||||
auto database_name =
|
auto database_name_option =
|
||||||
DecodeBase64(req.get_header_value("X-DuckDB-UI-Database-Name"));
|
DecodeBase64(req.get_header_value("X-DuckDB-UI-Database-Name"));
|
||||||
|
auto schema_name_option =
|
||||||
|
DecodeBase64(req.get_header_value("X-DuckDB-UI-Schema-Name"));
|
||||||
|
|
||||||
std::vector<std::string> parameter_values;
|
std::vector<std::string> parameter_values;
|
||||||
auto parameter_count_string =
|
auto parameter_count_string =
|
||||||
@@ -344,6 +455,34 @@ void HttpServer::DoHandleRun(const httplib::Request &req,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// default to effectively no limit
|
||||||
|
auto result_chunk_limit = INT_MAX;
|
||||||
|
auto result_chunk_limit_string =
|
||||||
|
req.get_header_value("X-DuckDB-UI-Result-Chunk-Limit");
|
||||||
|
if (!result_chunk_limit_string.empty()) {
|
||||||
|
result_chunk_limit = std::stoi(result_chunk_limit_string);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto result_database_name_option =
|
||||||
|
DecodeBase64(req.get_header_value("X-DuckDB-UI-Result-Database-Name"));
|
||||||
|
auto result_schema_name_option =
|
||||||
|
DecodeBase64(req.get_header_value("X-DuckDB-UI-Result-Schema-Name"));
|
||||||
|
auto result_table_name =
|
||||||
|
DecodeBase64(req.get_header_value("X-DuckDB-UI-Result-Table-Name"));
|
||||||
|
|
||||||
|
// If no result table is specified, then the result table chunk limit is zero.
|
||||||
|
// Otherwise, default to effectively no limit.
|
||||||
|
auto result_table_chunk_limit = result_table_name.empty() ? 0 : INT_MAX;
|
||||||
|
auto result_table_chunk_limit_string =
|
||||||
|
req.get_header_value("X-DuckDB-UI-Result-Table-Chunk-Limit");
|
||||||
|
// Only set the result table chunk limit if a result table name is specified.
|
||||||
|
if (!result_table_name.empty() && !result_table_chunk_limit_string.empty()) {
|
||||||
|
result_table_chunk_limit = std::stoi(result_table_chunk_limit_string);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto errors_as_json_string =
|
||||||
|
req.get_header_value("X-DuckDB-UI-Errors-As-JSON");
|
||||||
|
|
||||||
std::string content = ReadContent(content_reader);
|
std::string content = ReadContent(content_reader);
|
||||||
|
|
||||||
auto db = ddb_instance.lock();
|
auto db = ddb_instance.lock();
|
||||||
@@ -356,23 +495,83 @@ void HttpServer::DoHandleRun(const httplib::Request &req,
|
|||||||
auto connection =
|
auto connection =
|
||||||
UIStorageExtensionInfo::GetState(*db).FindOrCreateConnection(
|
UIStorageExtensionInfo::GetState(*db).FindOrCreateConnection(
|
||||||
*db, connection_name);
|
*db, connection_name);
|
||||||
|
|
||||||
// Set current database if optional header is provided.
|
|
||||||
if (!database_name.empty()) {
|
|
||||||
auto &context = *connection->context;
|
auto &context = *connection->context;
|
||||||
|
auto &config = ClientConfig::GetConfig(context);
|
||||||
|
|
||||||
|
// Set errors_as_json
|
||||||
|
if (!errors_as_json_string.empty()) {
|
||||||
|
config.errors_as_json = errors_as_json_string == "true";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set current database & schema
|
||||||
|
if (!database_name_option.empty() || !schema_name_option.empty()) {
|
||||||
|
// It's fine if the database name is empty, but we need a valid schema name.
|
||||||
|
auto schema_name =
|
||||||
|
schema_name_option.empty() ? DEFAULT_SCHEMA : schema_name_option;
|
||||||
context.RunFunctionInTransaction([&] {
|
context.RunFunctionInTransaction([&] {
|
||||||
auto &manager = context.db->GetDatabaseManager();
|
duckdb::ClientData::Get(context).catalog_search_path->Set(
|
||||||
manager.SetDefaultDatabase(context, database_name);
|
{database_name_option, schema_name},
|
||||||
|
duckdb::CatalogSetPathType::SET_SCHEMA);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unique_ptr<SQLStatement> last_statement;
|
||||||
|
|
||||||
|
auto statements = connection->ExtractStatements(content);
|
||||||
|
auto statement_count = statements.size();
|
||||||
|
|
||||||
|
if (statement_count == 0) {
|
||||||
|
SetResponseErrorResult(res, "No statements");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there's more than one statement, run all but the last.
|
||||||
|
if (statement_count > 1) {
|
||||||
|
for (auto i = 0; i < statement_count - 1; ++i) {
|
||||||
|
auto pending = connection->PendingQuery(std::move(statements[i]), true);
|
||||||
|
// Return any error found before execution.
|
||||||
|
if (pending->HasError()) {
|
||||||
|
SetResponseErrorResult(res, pending->GetError());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Execute tasks until result is ready (or there's an error).
|
||||||
|
auto exec_result = PendingExecutionResult::RESULT_NOT_READY;
|
||||||
|
while (!PendingQueryResult::IsResultReady(exec_result)) {
|
||||||
|
exec_result = pending->ExecuteTask();
|
||||||
|
if (exec_result == PendingExecutionResult::BLOCKED ||
|
||||||
|
exec_result == PendingExecutionResult::NO_TASKS_AVAILABLE) {
|
||||||
|
std::this_thread::sleep_for(std::chrono::milliseconds(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Return any error found during execution.
|
||||||
|
switch (exec_result) {
|
||||||
|
case PendingExecutionResult::EXECUTION_ERROR:
|
||||||
|
SetResponseErrorResult(res, pending->GetError());
|
||||||
|
return;
|
||||||
|
case PendingExecutionResult::EXECUTION_FINISHED:
|
||||||
|
case PendingExecutionResult::RESULT_READY:
|
||||||
|
// ignore the result
|
||||||
|
pending->Execute();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
SetResponseErrorResult(
|
||||||
|
res, StringUtil::Format("Unexpected PendingExecutionResult: %s",
|
||||||
|
exec_result));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the last statement.
|
||||||
|
auto &statement_to_run = statements[statement_count - 1];
|
||||||
|
|
||||||
// We use a pending query so we can execute tasks and fetch chunks
|
// We use a pending query so we can execute tasks and fetch chunks
|
||||||
// incrementally. This enables cancellation.
|
// incrementally. This enables cancellation.
|
||||||
unique_ptr<PendingQueryResult> pending;
|
unique_ptr<PendingQueryResult> pending;
|
||||||
|
|
||||||
// Create pending query, with request content as SQL.
|
// Create pending query, with request content as SQL.
|
||||||
if (parameter_values.size() > 0) {
|
if (parameter_values.size() > 0) {
|
||||||
auto prepared = connection->Prepare(content);
|
auto prepared = connection->Prepare(std::move(statement_to_run));
|
||||||
if (prepared->HasError()) {
|
if (prepared->HasError()) {
|
||||||
SetResponseErrorResult(res, prepared->GetError());
|
SetResponseErrorResult(res, prepared->GetError());
|
||||||
return;
|
return;
|
||||||
@@ -385,7 +584,7 @@ void HttpServer::DoHandleRun(const httplib::Request &req,
|
|||||||
}
|
}
|
||||||
pending = prepared->PendingQuery(values, true);
|
pending = prepared->PendingQuery(values, true);
|
||||||
} else {
|
} else {
|
||||||
pending = connection->PendingQuery(content, true);
|
pending = connection->PendingQuery(std::move(statement_to_run), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pending->HasError()) {
|
if (pending->HasError()) {
|
||||||
@@ -414,17 +613,67 @@ void HttpServer::DoHandleRun(const httplib::Request &req,
|
|||||||
// Get the result. This should be quick because it's ready.
|
// Get the result. This should be quick because it's ready.
|
||||||
auto result = pending->Execute();
|
auto result = pending->Execute();
|
||||||
|
|
||||||
|
// We use a separate connection for the appender, including creating the
|
||||||
|
// result table, because we still need to fetch chunks from the pending
|
||||||
|
// query on the user's connection.
|
||||||
|
unique_ptr<duckdb::Connection> appender_connection;
|
||||||
|
unique_ptr<duckdb::Appender> appender;
|
||||||
|
|
||||||
|
if (!result_table_name.empty()) {
|
||||||
|
auto result_database_name = result_database_name_option.empty()
|
||||||
|
? "memory"
|
||||||
|
: result_database_name_option;
|
||||||
|
auto result_schema_name = result_schema_name_option.empty()
|
||||||
|
? "main"
|
||||||
|
: result_schema_name_option;
|
||||||
|
|
||||||
|
auto result_table_info = make_uniq<duckdb::CreateTableInfo>(
|
||||||
|
result_database_name, result_schema_name, result_table_name);
|
||||||
|
for (idx_t i = 0; i < result->names.size(); i++) {
|
||||||
|
result_table_info->columns.AddColumn(
|
||||||
|
ColumnDefinition(result->names[i], result->types[i]));
|
||||||
|
}
|
||||||
|
|
||||||
|
appender_connection = make_uniq<duckdb::Connection>(*db);
|
||||||
|
auto appender_context = appender_connection->context;
|
||||||
|
appender_context->RunFunctionInTransaction([&] {
|
||||||
|
auto &catalog = duckdb::Catalog::GetCatalog(*appender_context,
|
||||||
|
result_database_name);
|
||||||
|
MetaTransaction::Get(*appender_context)
|
||||||
|
.ModifyDatabase(catalog.GetAttached());
|
||||||
|
catalog.CreateTable(*appender_context, std::move(result_table_info));
|
||||||
|
});
|
||||||
|
|
||||||
|
appender = make_uniq<duckdb::Appender>(
|
||||||
|
*appender_connection, result_database_name, result_schema_name,
|
||||||
|
result_table_name);
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch the chunks and serialize the result.
|
// Fetch the chunks and serialize the result.
|
||||||
SuccessResult success_result;
|
SuccessResult success_result;
|
||||||
success_result.column_names_and_types = {std::move(result->names),
|
success_result.column_names_and_types = {std::move(result->names),
|
||||||
std::move(result->types)};
|
std::move(result->types)};
|
||||||
|
|
||||||
// TODO: support limiting the number of chunks fetched
|
auto chunk_limit = std::max(result_chunk_limit, result_table_chunk_limit);
|
||||||
auto chunk = result->Fetch();
|
auto chunks_fetched = 0;
|
||||||
while (chunk) {
|
unique_ptr<duckdb::DataChunk> chunk;
|
||||||
|
while (chunks_fetched < chunk_limit) {
|
||||||
|
chunk = result->Fetch();
|
||||||
|
if (!chunk) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
++chunks_fetched;
|
||||||
|
if (appender && chunks_fetched <= result_table_chunk_limit) {
|
||||||
|
appender->AppendDataChunk(*chunk);
|
||||||
|
}
|
||||||
|
if (chunks_fetched <= result_chunk_limit) {
|
||||||
success_result.chunks.push_back(
|
success_result.chunks.push_back(
|
||||||
{static_cast<uint16_t>(chunk->size()), std::move(chunk->data)});
|
{static_cast<uint16_t>(chunk->size()), std::move(chunk->data)});
|
||||||
chunk = result->Fetch();
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (appender) {
|
||||||
|
appender->Close();
|
||||||
}
|
}
|
||||||
|
|
||||||
MemoryStream success_response_content;
|
MemoryStream success_response_content;
|
||||||
@@ -433,7 +682,9 @@ void HttpServer::DoHandleRun(const httplib::Request &req,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
SetResponseErrorResult(res, "Unexpected PendingExecutionResult");
|
SetResponseErrorResult(
|
||||||
|
res, StringUtil::Format("Unexpected PendingExecutionResult: %s",
|
||||||
|
exec_result));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -441,8 +692,7 @@ void HttpServer::DoHandleRun(const httplib::Request &req,
|
|||||||
void HttpServer::HandleTokenize(const httplib::Request &req,
|
void HttpServer::HandleTokenize(const httplib::Request &req,
|
||||||
httplib::Response &res,
|
httplib::Response &res,
|
||||||
const httplib::ContentReader &content_reader) {
|
const httplib::ContentReader &content_reader) {
|
||||||
auto origin = req.get_header_value("Origin");
|
if (!IsOriginAllowed(req, local_url)) {
|
||||||
if (origin != local_url) {
|
|
||||||
res.status = 401;
|
res.status = 401;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <duckdb.hpp>
|
#include <duckdb.hpp>
|
||||||
|
#include <duckdb/common/http_util.hpp>
|
||||||
|
|
||||||
#define CPPHTTPLIB_OPENSSL_SUPPORT
|
#define CPPHTTPLIB_OPENSSL_SUPPORT
|
||||||
#include "httplib.hpp"
|
#include "httplib.hpp"
|
||||||
@@ -15,6 +16,7 @@
|
|||||||
namespace httplib = duckdb_httplib_openssl;
|
namespace httplib = duckdb_httplib_openssl;
|
||||||
|
|
||||||
namespace duckdb {
|
namespace duckdb {
|
||||||
|
class HTTPParams;
|
||||||
class MemoryStream;
|
class MemoryStream;
|
||||||
|
|
||||||
namespace ui {
|
namespace ui {
|
||||||
@@ -40,7 +42,8 @@ private:
|
|||||||
friend class Watcher;
|
friend class Watcher;
|
||||||
|
|
||||||
// Lifecycle
|
// Lifecycle
|
||||||
void DoStart(const uint16_t local_port, const std::string &remote_url);
|
void DoStart(const uint16_t local_port, const std::string &local_host,
|
||||||
|
const std::string &remote_url, unique_ptr<HTTPParams>);
|
||||||
void DoStop();
|
void DoStop();
|
||||||
void Run();
|
void Run();
|
||||||
void UpdateDatabaseInstance(shared_ptr<DatabaseInstance> context_db);
|
void UpdateDatabaseInstance(shared_ptr<DatabaseInstance> context_db);
|
||||||
@@ -67,8 +70,10 @@ private:
|
|||||||
|
|
||||||
// Misc
|
// Misc
|
||||||
shared_ptr<DatabaseInstance> LockDatabaseInstance();
|
shared_ptr<DatabaseInstance> LockDatabaseInstance();
|
||||||
|
void InitClientFromParams(httplib::Client &);
|
||||||
|
|
||||||
uint16_t local_port;
|
uint16_t local_port;
|
||||||
|
std::string local_host;
|
||||||
std::string local_url;
|
std::string local_url;
|
||||||
std::string remote_url;
|
std::string remote_url;
|
||||||
weak_ptr<DatabaseInstance> ddb_instance;
|
weak_ptr<DatabaseInstance> ddb_instance;
|
||||||
@@ -77,6 +82,7 @@ private:
|
|||||||
unique_ptr<std::thread> main_thread;
|
unique_ptr<std::thread> main_thread;
|
||||||
unique_ptr<EventDispatcher> event_dispatcher;
|
unique_ptr<EventDispatcher> event_dispatcher;
|
||||||
unique_ptr<Watcher> watcher;
|
unique_ptr<Watcher> watcher;
|
||||||
|
unique_ptr<HTTPParams> http_params;
|
||||||
|
|
||||||
static unique_ptr<HttpServer> server_instance;
|
static unique_ptr<HttpServer> server_instance;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
#define UI_LOCAL_PORT_SETTING_NAME "ui_local_port"
|
#define UI_LOCAL_PORT_SETTING_NAME "ui_local_port"
|
||||||
#define UI_LOCAL_PORT_SETTING_DEFAULT 4213
|
#define UI_LOCAL_PORT_SETTING_DEFAULT 4213
|
||||||
|
#define UI_LOCAL_HOST_SETTING_NAME "ui_local_host"
|
||||||
|
#define UI_LOCAL_HOST_SETTING_DEFAULT "localhost"
|
||||||
#define UI_REMOTE_URL_SETTING_NAME "ui_remote_url"
|
#define UI_REMOTE_URL_SETTING_NAME "ui_remote_url"
|
||||||
#define UI_REMOTE_URL_SETTING_DEFAULT "https://ui.duckdb.org"
|
#define UI_REMOTE_URL_SETTING_DEFAULT "https://ui.duckdb.org"
|
||||||
#define UI_POLLING_INTERVAL_SETTING_NAME "ui_polling_interval"
|
#define UI_POLLING_INTERVAL_SETTING_NAME "ui_polling_interval"
|
||||||
@@ -27,6 +29,7 @@ T GetSetting(const ClientContext &context, const char *setting_name) {
|
|||||||
|
|
||||||
std::string GetRemoteUrl(const ClientContext &);
|
std::string GetRemoteUrl(const ClientContext &);
|
||||||
uint16_t GetLocalPort(const ClientContext &);
|
uint16_t GetLocalPort(const ClientContext &);
|
||||||
|
std::string GetLocalHost(const ClientContext &);
|
||||||
uint32_t GetPollingInterval(const ClientContext &);
|
uint32_t GetPollingInterval(const ClientContext &);
|
||||||
|
|
||||||
} // namespace duckdb
|
} // namespace duckdb
|
||||||
|
|||||||
@@ -6,7 +6,12 @@ namespace duckdb {
|
|||||||
|
|
||||||
class UiExtension : public Extension {
|
class UiExtension : public Extension {
|
||||||
public:
|
public:
|
||||||
|
#ifdef DUCKDB_CPP_EXTENSION_ENTRY
|
||||||
|
void Load(ExtensionLoader &loader) override;
|
||||||
|
#else
|
||||||
void Load(DuckDB &db) override;
|
void Load(DuckDB &db) override;
|
||||||
|
#endif
|
||||||
|
|
||||||
std::string Name() override;
|
std::string Name() override;
|
||||||
std::string Version() const override;
|
std::string Version() const override;
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,9 +1,19 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <duckdb.hpp>
|
#include <duckdb.hpp>
|
||||||
|
#ifndef DUCKDB_CPP_EXTENSION_ENTRY
|
||||||
#include <duckdb/main/extension_util.hpp>
|
#include <duckdb/main/extension_util.hpp>
|
||||||
|
#endif
|
||||||
#include <type_traits>
|
#include <type_traits>
|
||||||
|
|
||||||
|
// TODO we cannot run these checks because they are not defined for DuckDB < 1.4.x
|
||||||
|
// #ifndef DUCKDB_MAJOR_VERSION
|
||||||
|
// #error "DUCKDB_MAJOR_VERSION is not defined"
|
||||||
|
// ...
|
||||||
|
#define DUCKDB_VERSION_AT_MOST(major, minor, patch) \
|
||||||
|
(DUCKDB_MAJOR_VERSION < (major) || (DUCKDB_MAJOR_VERSION == (major) && DUCKDB_MINOR_VERSION < (minor)) || \
|
||||||
|
(DUCKDB_MAJOR_VERSION == (major) && DUCKDB_MINOR_VERSION == (minor) && DUCKDB_PATCH_VERSION <= (patch)))
|
||||||
|
|
||||||
namespace duckdb {
|
namespace duckdb {
|
||||||
|
|
||||||
typedef std::string (*simple_tf_t)(ClientContext &);
|
typedef std::string (*simple_tf_t)(ClientContext &);
|
||||||
@@ -64,6 +74,15 @@ void TableFunc(ClientContext &context, TableFunctionInput &input,
|
|||||||
output.SetValue(0, 0, result);
|
output.SetValue(0, 0, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef DUCKDB_CPP_EXTENSION_ENTRY
|
||||||
|
template <typename Func, Func func>
|
||||||
|
void RegisterTF(ExtensionLoader &loader, const char *name) {
|
||||||
|
TableFunction tf(name, {}, internal::TableFunc<Func, func>,
|
||||||
|
internal::SingleStringResultBind,
|
||||||
|
RunOnceTableFunctionState::Init);
|
||||||
|
loader.RegisterFunction(tf);
|
||||||
|
}
|
||||||
|
#else
|
||||||
template <typename Func, Func func>
|
template <typename Func, Func func>
|
||||||
void RegisterTF(DatabaseInstance &instance, const char *name) {
|
void RegisterTF(DatabaseInstance &instance, const char *name) {
|
||||||
TableFunction tf(name, {}, internal::TableFunc<Func, func>,
|
TableFunction tf(name, {}, internal::TableFunc<Func, func>,
|
||||||
@@ -71,10 +90,16 @@ void RegisterTF(DatabaseInstance &instance, const char *name) {
|
|||||||
RunOnceTableFunctionState::Init);
|
RunOnceTableFunctionState::Init);
|
||||||
ExtensionUtil::RegisterFunction(instance, tf);
|
ExtensionUtil::RegisterFunction(instance, tf);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
|
|
||||||
|
#ifdef DUCKDB_CPP_EXTENSION_ENTRY
|
||||||
|
#define REGISTER_TF(name, func) \
|
||||||
|
internal::RegisterTF<decltype(&func), &func>(loader, name)
|
||||||
|
#else
|
||||||
#define REGISTER_TF(name, func) \
|
#define REGISTER_TF(name, func) \
|
||||||
internal::RegisterTF<decltype(&func), &func>(instance, name)
|
internal::RegisterTF<decltype(&func), &func>(instance, name)
|
||||||
|
#endif
|
||||||
|
|
||||||
} // namespace duckdb
|
} // namespace duckdb
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ private:
|
|||||||
std::condition_variable cv;
|
std::condition_variable cv;
|
||||||
std::atomic<bool> should_run;
|
std::atomic<bool> should_run;
|
||||||
HttpServer &server;
|
HttpServer &server;
|
||||||
|
DatabaseInstance *watched_database;
|
||||||
};
|
};
|
||||||
} // namespace ui
|
} // namespace ui
|
||||||
} // namespace duckdb
|
} // namespace duckdb
|
||||||
|
|||||||
@@ -15,6 +15,10 @@ uint16_t GetLocalPort(const ClientContext &context) {
|
|||||||
return internal::GetSetting<uint16_t>(context, UI_LOCAL_PORT_SETTING_NAME);
|
return internal::GetSetting<uint16_t>(context, UI_LOCAL_PORT_SETTING_NAME);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string GetLocalHost(const ClientContext &context) {
|
||||||
|
return internal::GetSetting<std::string>(context, UI_LOCAL_HOST_SETTING_NAME);
|
||||||
|
}
|
||||||
|
|
||||||
uint32_t GetPollingInterval(const ClientContext &context) {
|
uint32_t GetPollingInterval(const ClientContext &context) {
|
||||||
return internal::GetSetting<uint32_t>(context,
|
return internal::GetSetting<uint32_t>(context,
|
||||||
UI_POLLING_INTERVAL_SETTING_NAME);
|
UI_POLLING_INTERVAL_SETTING_NAME);
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
#define OPEN_COMMAND "start"
|
#define OPEN_COMMAND "start"
|
||||||
|
#undef CreateDirectory // avoid being transformed to `CreateDirectoryA`
|
||||||
#elif __linux__
|
#elif __linux__
|
||||||
#define OPEN_COMMAND "xdg-open"
|
#define OPEN_COMMAND "xdg-open"
|
||||||
#else
|
#else
|
||||||
@@ -81,7 +82,12 @@ void InitStorageExtension(duckdb::DatabaseInstance &db) {
|
|||||||
config.storage_extensions[STORAGE_EXTENSION_KEY] = std::move(ext);
|
config.storage_extensions[STORAGE_EXTENSION_KEY] = std::move(ext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef DUCKDB_CPP_EXTENSION_ENTRY
|
||||||
|
static void LoadInternal(ExtensionLoader &loader) {
|
||||||
|
auto &instance = loader.GetDatabaseInstance();
|
||||||
|
#else
|
||||||
static void LoadInternal(DatabaseInstance &instance) {
|
static void LoadInternal(DatabaseInstance &instance) {
|
||||||
|
#endif
|
||||||
InitStorageExtension(instance);
|
InitStorageExtension(instance);
|
||||||
|
|
||||||
// If the server is already running we need to update the database instance
|
// If the server is already running we need to update the database instance
|
||||||
@@ -102,6 +108,14 @@ static void LoadInternal(DatabaseInstance &instance) {
|
|||||||
LogicalType::USMALLINT, Value::USMALLINT(default_port));
|
LogicalType::USMALLINT, Value::USMALLINT(default_port));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
auto default_host = GetEnvOrDefault(UI_LOCAL_HOST_SETTING_NAME,
|
||||||
|
UI_LOCAL_HOST_SETTING_DEFAULT);
|
||||||
|
config.AddExtensionOption(UI_LOCAL_HOST_SETTING_NAME,
|
||||||
|
"Local host on which the UI server listens",
|
||||||
|
LogicalType::VARCHAR, Value(default_host));
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
auto def = GetEnvOrDefault(UI_REMOTE_URL_SETTING_NAME,
|
auto def = GetEnvOrDefault(UI_REMOTE_URL_SETTING_NAME,
|
||||||
UI_REMOTE_URL_SETTING_DEFAULT);
|
UI_REMOTE_URL_SETTING_DEFAULT);
|
||||||
@@ -128,11 +142,20 @@ static void LoadInternal(DatabaseInstance &instance) {
|
|||||||
TableFunction tf("ui_is_started", {}, IsUIStartedTableFunc,
|
TableFunction tf("ui_is_started", {}, IsUIStartedTableFunc,
|
||||||
internal::SingleBoolResultBind,
|
internal::SingleBoolResultBind,
|
||||||
RunOnceTableFunctionState::Init);
|
RunOnceTableFunctionState::Init);
|
||||||
|
#ifdef DUCKDB_CPP_EXTENSION_ENTRY
|
||||||
|
loader.RegisterFunction(tf);
|
||||||
|
#else
|
||||||
ExtensionUtil::RegisterFunction(instance, tf);
|
ExtensionUtil::RegisterFunction(instance, tf);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef DUCKDB_CPP_EXTENSION_ENTRY
|
||||||
|
void UiExtension::Load(ExtensionLoader &loader) { LoadInternal(loader); }
|
||||||
|
#else
|
||||||
void UiExtension::Load(DuckDB &db) { LoadInternal(*db.instance); }
|
void UiExtension::Load(DuckDB &db) { LoadInternal(*db.instance); }
|
||||||
|
#endif
|
||||||
|
|
||||||
std::string UiExtension::Name() { return "ui"; }
|
std::string UiExtension::Name() { return "ui"; }
|
||||||
|
|
||||||
std::string UiExtension::Version() const { return UI_EXTENSION_VERSION; }
|
std::string UiExtension::Version() const { return UI_EXTENSION_VERSION; }
|
||||||
@@ -141,10 +164,14 @@ std::string UiExtension::Version() const { return UI_EXTENSION_VERSION; }
|
|||||||
|
|
||||||
extern "C" {
|
extern "C" {
|
||||||
|
|
||||||
|
#ifdef DUCKDB_CPP_EXTENSION_ENTRY
|
||||||
|
DUCKDB_CPP_EXTENSION_ENTRY(ui, loader) { duckdb::LoadInternal(loader); }
|
||||||
|
#else
|
||||||
DUCKDB_EXTENSION_API void ui_init(duckdb::DatabaseInstance &db) {
|
DUCKDB_EXTENSION_API void ui_init(duckdb::DatabaseInstance &db) {
|
||||||
duckdb::DuckDB db_wrapper(db);
|
duckdb::DuckDB db_wrapper(db);
|
||||||
db_wrapper.LoadExtension<duckdb::UiExtension>();
|
db_wrapper.LoadExtension<duckdb::UiExtension>();
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
DUCKDB_EXTENSION_API const char *ui_version() {
|
DUCKDB_EXTENSION_API const char *ui_version() {
|
||||||
return duckdb::DuckDB::LibraryVersion();
|
return duckdb::DuckDB::LibraryVersion();
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
#include <duckdb/main/attached_database.hpp>
|
#include <duckdb/main/attached_database.hpp>
|
||||||
|
|
||||||
|
#include "utils/helpers.hpp"
|
||||||
#include "utils/md_helpers.hpp"
|
#include "utils/md_helpers.hpp"
|
||||||
#include "http_server.hpp"
|
#include "http_server.hpp"
|
||||||
#include "settings.hpp"
|
#include "settings.hpp"
|
||||||
@@ -9,7 +10,8 @@
|
|||||||
namespace duckdb {
|
namespace duckdb {
|
||||||
namespace ui {
|
namespace ui {
|
||||||
|
|
||||||
Watcher::Watcher(HttpServer &_server) : should_run(false), server(_server) {}
|
Watcher::Watcher(HttpServer &_server)
|
||||||
|
: should_run(false), server(_server), watched_database(nullptr) {}
|
||||||
|
|
||||||
bool WasCatalogUpdated(DatabaseInstance &db, Connection &connection,
|
bool WasCatalogUpdated(DatabaseInstance &db, Connection &connection,
|
||||||
CatalogState &last_state) {
|
CatalogState &last_state) {
|
||||||
@@ -22,19 +24,23 @@ bool WasCatalogUpdated(DatabaseInstance &db, Connection &connection,
|
|||||||
|
|
||||||
// Check currently attached databases
|
// Check currently attached databases
|
||||||
for (const auto &db_ref : databases) {
|
for (const auto &db_ref : databases) {
|
||||||
auto &db = db_ref.get();
|
#if DUCKDB_VERSION_AT_MOST(1, 3, 2)
|
||||||
if (db.IsTemporary()) {
|
auto &db_instance = db_ref.get();
|
||||||
|
#else
|
||||||
|
auto &db_instance = *db_ref;
|
||||||
|
#endif
|
||||||
|
if (db_instance.IsTemporary()) {
|
||||||
continue; // ignore temp databases
|
continue; // ignore temp databases
|
||||||
}
|
}
|
||||||
|
|
||||||
db_oids.insert(db.oid);
|
db_oids.insert(db_instance.oid);
|
||||||
auto &catalog = db.GetCatalog();
|
auto &catalog = db_instance.GetCatalog();
|
||||||
auto current_version = catalog.GetCatalogVersion(context);
|
auto current_version = catalog.GetCatalogVersion(context);
|
||||||
auto last_version_it = last_state.db_to_catalog_version.find(db.oid);
|
auto last_version_it = last_state.db_to_catalog_version.find(db_instance.oid);
|
||||||
if (last_version_it == last_state.db_to_catalog_version.end() // first time
|
if (last_version_it == last_state.db_to_catalog_version.end() // first time
|
||||||
|| !(last_version_it->second == current_version)) { // updated
|
|| !(last_version_it->second == current_version)) { // updated
|
||||||
has_change = true;
|
has_change = true;
|
||||||
last_state.db_to_catalog_version[db.oid] = current_version;
|
last_state.db_to_catalog_version[db_instance.oid] = current_version;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -62,6 +68,12 @@ void Watcher::Watch() {
|
|||||||
break; // DB went away, nothing to watch
|
break; // DB went away, nothing to watch
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (watched_database == nullptr) {
|
||||||
|
watched_database = db.get();
|
||||||
|
} else if (watched_database != db.get()) {
|
||||||
|
break; // DB changed, stop watching, will be restarted
|
||||||
|
}
|
||||||
|
|
||||||
duckdb::Connection con{*db};
|
duckdb::Connection con{*db};
|
||||||
auto polling_interval = GetPollingInterval(*con.context);
|
auto polling_interval = GetPollingInterval(*con.context);
|
||||||
if (polling_interval == 0) {
|
if (polling_interval == 0) {
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
cmake_minimum_required(VERSION 3.5...3.31.5)
|
|
||||||
|
|
||||||
find_package(OpenSSL REQUIRED)
|
|
||||||
|
|
||||||
project("Test ssl")
|
|
||||||
set(CMAKE_CXX_STANDARD 11)
|
|
||||||
|
|
||||||
add_definitions(-DNO_DUCKDB_RE2 -DCMAKE_BUILD_TYPE=Debug)
|
|
||||||
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0")
|
|
||||||
|
|
||||||
include_directories(
|
|
||||||
${PROJECT_SOURCE_DIR}/../third_party/httplib
|
|
||||||
${PROJECT_SOURCE_DIR}/../duckdb/src/include)
|
|
||||||
|
|
||||||
add_executable("test_ssl" "test_ssl.cc")
|
|
||||||
|
|
||||||
target_link_libraries("test_ssl" OpenSSL::SSL OpenSSL::Crypto)
|
|
||||||
|
|
||||||
install(TARGETS "test_ssl")
|
|
||||||
|
|
||||||
# cmake -S . -G Ninja -B build && cmake --build build
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
#define CPPHTTPLIB_OPENSSL_SUPPORT
|
|
||||||
#include "httplib.hpp"
|
|
||||||
|
|
||||||
using namespace duckdb_httplib_openssl;
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
Client client("https://ui.duckdb.org");
|
|
||||||
auto res = client.Get("/");
|
|
||||||
if (res) {
|
|
||||||
std::cout << "Status: " << res->status << std::endl;
|
|
||||||
std::cout << "Body: " << res->body.substr(0, 42) << "... (" << res->body.size() << ")" << std::endl;
|
|
||||||
} else {
|
|
||||||
std::cout << "Error: " << res.error() << std::endl;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
85
third_party/httplib/httplib.hpp
vendored
85
third_party/httplib/httplib.hpp
vendored
@@ -248,78 +248,8 @@ using socket_t = int;
|
|||||||
#include <thread>
|
#include <thread>
|
||||||
#include <unordered_map>
|
#include <unordered_map>
|
||||||
#include <unordered_set>
|
#include <unordered_set>
|
||||||
|
|
||||||
#ifdef NO_DUCKDB_RE2
|
|
||||||
namespace duckdb_re2 {
|
|
||||||
enum class RegexOptions : uint8_t { NONE, CASE_INSENSITIVE };
|
|
||||||
class Regex {
|
|
||||||
public:
|
|
||||||
explicit Regex(const std::string &pattern, RegexOptions options = RegexOptions::NONE): re(pattern) {}
|
|
||||||
explicit Regex(const char *pattern, RegexOptions options = RegexOptions::NONE) : Regex(std::string(pattern)) {
|
|
||||||
}
|
|
||||||
// const duckdb_re2::RE2 &GetRegex() const {
|
|
||||||
// return *regex;
|
|
||||||
// }
|
|
||||||
std::regex re;
|
|
||||||
};
|
|
||||||
struct GroupMatch {
|
|
||||||
std::string text;
|
|
||||||
uint32_t position;
|
|
||||||
|
|
||||||
const std::string &str() const { // NOLINT
|
|
||||||
return text;
|
|
||||||
}
|
|
||||||
operator std::string() const { // NOLINT: allow implicit cast
|
|
||||||
return text;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct Match {
|
|
||||||
|
|
||||||
GroupMatch GetGroup(uint64_t index) {
|
|
||||||
return {str(index), static_cast<uint32_t>(position(index))};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string str(uint64_t index) { // NOLINT
|
|
||||||
return m.str(index);
|
|
||||||
}
|
|
||||||
|
|
||||||
uint64_t position(uint64_t index) { // NOLINT
|
|
||||||
return m.position(index);
|
|
||||||
}
|
|
||||||
|
|
||||||
uint64_t length(uint64_t index) { // NOLINT
|
|
||||||
throw std::runtime_error("uint64_t length(uint64_t index) - NA");
|
|
||||||
}
|
|
||||||
|
|
||||||
GroupMatch operator[](uint64_t i) {
|
|
||||||
return GetGroup(i);
|
|
||||||
}
|
|
||||||
std::cmatch m;
|
|
||||||
};
|
|
||||||
|
|
||||||
bool RegexSearch(const std::string &input, Match &match, const Regex ®ex) {
|
|
||||||
throw std::runtime_error("bool RegexSearch(const std::string &input, Match &match, const Regex ®ex) - NA");
|
|
||||||
}
|
|
||||||
|
|
||||||
bool RegexMatch(const std::string &input, Match &match, const Regex ®ex) {
|
|
||||||
return std::regex_match(input.c_str(), match.m, regex.re);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool RegexMatch(const char *start, const char *end, Match &match, const Regex ®ex) {
|
|
||||||
throw std::runtime_error("bool RegexMatch(const char *start, const char *end, Match &match, const Regex ®ex) - NA");
|
|
||||||
}
|
|
||||||
|
|
||||||
bool RegexMatch(const std::string &input, const Regex ®ex) {
|
|
||||||
std::cmatch m;
|
|
||||||
return std::regex_match(input.c_str(), m, regex.re);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#else
|
|
||||||
#include "duckdb/common/re2_regex.hpp"
|
#include "duckdb/common/re2_regex.hpp"
|
||||||
#include "duckdb/common/random_engine.hpp"
|
#include "duckdb/common/random_engine.hpp"
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
|
#ifdef CPPHTTPLIB_OPENSSL_SUPPORT
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
@@ -4713,9 +4643,6 @@ inline std::string to_lower(const char *beg, const char *end) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
inline std::string make_multipart_data_boundary() {
|
inline std::string make_multipart_data_boundary() {
|
||||||
#ifdef NO_DUCKDB_RE2
|
|
||||||
throw std::runtime_error("NA");
|
|
||||||
#else
|
|
||||||
static const char data[] =
|
static const char data[] =
|
||||||
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
|
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
|
||||||
|
|
||||||
@@ -4726,7 +4653,6 @@ inline std::string make_multipart_data_boundary() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
inline bool is_multipart_boundary_chars_valid(const std::string &boundary) {
|
inline bool is_multipart_boundary_chars_valid(const std::string &boundary) {
|
||||||
@@ -5183,7 +5109,6 @@ inline bool parse_www_authenticate(const Response &res,
|
|||||||
if (type == "Basic") {
|
if (type == "Basic") {
|
||||||
return false;
|
return false;
|
||||||
} else if (type == "Digest") {
|
} else if (type == "Digest") {
|
||||||
#ifndef NO_DUCKDB_RE2
|
|
||||||
s = s.substr(pos + 1);
|
s = s.substr(pos + 1);
|
||||||
auto matches = duckdb_re2::RegexFindAll(s, re);
|
auto matches = duckdb_re2::RegexFindAll(s, re);
|
||||||
for (auto &m : matches) {
|
for (auto &m : matches) {
|
||||||
@@ -5197,9 +5122,6 @@ inline bool parse_www_authenticate(const Response &res,
|
|||||||
auth[key] = val;
|
auth[key] = val;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
#else
|
|
||||||
throw std::runtime_error("parse_www_authenticate- NA");
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -8244,10 +8166,6 @@ inline SSL *ssl_new(socket_t sock, SSL_CTX *ctx, std::mutex &ctx_mutex,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (ssl) {
|
if (ssl) {
|
||||||
#ifdef NO_DUCKDB_RE2
|
|
||||||
SSL_set_msg_callback(ssl, SSL_trace);
|
|
||||||
SSL_set_msg_callback_arg(ssl, BIO_new_fp(stdout, 0));
|
|
||||||
#endif
|
|
||||||
set_nonblocking(sock, true);
|
set_nonblocking(sock, true);
|
||||||
auto bio = BIO_new_socket(static_cast<int>(sock), BIO_NOCLOSE);
|
auto bio = BIO_new_socket(static_cast<int>(sock), BIO_NOCLOSE);
|
||||||
BIO_set_nbio(bio, 1);
|
BIO_set_nbio(bio, 1);
|
||||||
@@ -8760,7 +8678,6 @@ inline bool SSLClient::initialize_ssl(Socket &socket, Error &error) {
|
|||||||
verify_result_ = SSL_get_verify_result(ssl2);
|
verify_result_ = SSL_get_verify_result(ssl2);
|
||||||
|
|
||||||
if (verify_result_ != X509_V_OK) {
|
if (verify_result_ != X509_V_OK) {
|
||||||
std::cerr << "SSL_get_verify_result failed: " << verify_result_ << std::endl;
|
|
||||||
error = Error::SSLServerVerification;
|
error = Error::SSLServerVerification;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -8769,12 +8686,10 @@ inline bool SSLClient::initialize_ssl(Socket &socket, Error &error) {
|
|||||||
|
|
||||||
if (server_cert == nullptr) {
|
if (server_cert == nullptr) {
|
||||||
error = Error::SSLServerVerification;
|
error = Error::SSLServerVerification;
|
||||||
std::cerr << "SSL_get1_peer_certificate failed" << std::endl;
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!verify_host(server_cert)) {
|
if (!verify_host(server_cert)) {
|
||||||
std::cerr << "verify_host failed" << std::endl;
|
|
||||||
X509_free(server_cert);
|
X509_free(server_cert);
|
||||||
error = Error::SSLServerVerification;
|
error = Error::SSLServerVerification;
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
3
ts/.gitignore
vendored
Normal file
3
ts/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
node_modules
|
||||||
|
**/out/*
|
||||||
|
**/test/tsconfig.tsbuildinfo
|
||||||
3
ts/.prettierignore
Normal file
3
ts/.prettierignore
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
out
|
||||||
|
**/*.json
|
||||||
|
README.md
|
||||||
3
ts/.prettierrc
Normal file
3
ts/.prettierrc
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"singleQuote": true
|
||||||
|
}
|
||||||
55
ts/README.md
Normal file
55
ts/README.md
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# TypeScript Workspace
|
||||||
|
|
||||||
|
## Structure
|
||||||
|
|
||||||
|
This directory is a [pnpm workspace](https://pnpm.io/workspaces). Use the [pnpm](https://pnpm.io/) package manager, not npm or yarn.
|
||||||
|
|
||||||
|
One (recommended) way to install pnpm is using [corepack](https://pnpm.io/installation#using-corepack).
|
||||||
|
|
||||||
|
## Build
|
||||||
|
|
||||||
|
Run `pnpm install` (or just `pnpm i`) in a package directory to install dependencies and build. Note that this will also build dependent packages in this workspace. This builds src files, but not test files.
|
||||||
|
|
||||||
|
Run `pnpm build` to just run the build. This will not build dependencies. It will build both src and test files. To build just src or just test, use `pnpm build:src` or `pnpm build:test`.
|
||||||
|
|
||||||
|
Run `pnpm build:watch` in a package to rebuild (both src and test files) when source files are changed.
|
||||||
|
|
||||||
|
Run `pnpm check` in a package to check formatting and linting rules. To just check formatting, run `pnpm format:check`. To correct formatting, run `pnpm format:write`. To just check linting rules, run `pnpm lint`.
|
||||||
|
|
||||||
|
Run `pnpm clean` in that package to remove built output files for that package.
|
||||||
|
|
||||||
|
Run `pnpm build` at the root of the workspace to build all packages (both src and test files).
|
||||||
|
|
||||||
|
Run `pnpm build:watch` at the root can be used to rebuild (only) relevant packages when source files are changed.
|
||||||
|
|
||||||
|
Run `pnpm check` at the root of the workspace to check formatting and linting rules all packages.
|
||||||
|
|
||||||
|
## Test
|
||||||
|
|
||||||
|
Run `pnpm test` in a package directory to run its tests.
|
||||||
|
|
||||||
|
Run `pnpm test:watch` in a package directory to run its tests and rerun when source files change.
|
||||||
|
|
||||||
|
Tests use [vitest](https://vitest.dev/), either in Node or in [Browser Mode](https://vitest.dev/guide/browser.html) (using Chrome), depending on the package.
|
||||||
|
|
||||||
|
Run `pnpm test` at the root of the workspace to test all packages.
|
||||||
|
|
||||||
|
## Create
|
||||||
|
|
||||||
|
To create a new package, add a directory under `packages`.
|
||||||
|
|
||||||
|
Add a `package.json` file following the conventions of other packages.
|
||||||
|
|
||||||
|
The `package.json` should have `preinstall`, `build`, `clean`, and `test` scripts, as well as 'check', 'format', and 'lint' scripts. See existing packages for details.
|
||||||
|
It should have a `name`, `version`, and `description`, set `"type": "module"`, and set `main`, `module`, and `types` appropriately.
|
||||||
|
|
||||||
|
Production source code should go in a `src` subdirectory.
|
||||||
|
Put a `tsconfig.json` in this directory that extends `tsconfig.library.json` and sets the `outDir` to `../out`.
|
||||||
|
|
||||||
|
Test source code should got in a `test` subdirectory.
|
||||||
|
Put a `tsconfig.json` in this directory that extends `tsconfig.test.json` and references `../src`.
|
||||||
|
|
||||||
|
For browser-based tests, create a `vite.config.js` file, and enable `browser` mode, set the `headless` option to `true`, and set the `type` to `chrome`.
|
||||||
|
Note that `crossOriginIsolated` can be enabled by setting server headers. See example in `wasm-extension`.
|
||||||
|
|
||||||
|
Add references to both the `src` and `test` directories of your new package to the root `tsconfig.json` of the workspace.
|
||||||
21
ts/eslint.config.mjs
Normal file
21
ts/eslint.config.mjs
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
// @ts-check
|
||||||
|
|
||||||
|
import eslint from '@eslint/js';
|
||||||
|
import tseslint from 'typescript-eslint';
|
||||||
|
|
||||||
|
export default tseslint.config(
|
||||||
|
eslint.configs.recommended,
|
||||||
|
...tseslint.configs.recommended,
|
||||||
|
{
|
||||||
|
rules: {
|
||||||
|
'@typescript-eslint/no-unused-vars': [
|
||||||
|
'error',
|
||||||
|
{
|
||||||
|
argsIgnorePattern: '^_',
|
||||||
|
varsIgnorePattern: '^_',
|
||||||
|
caughtErrorsIgnorePattern: '^_',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
19
ts/package.json
Normal file
19
ts/package.json
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc -b",
|
||||||
|
"build:watch": "tsc -b --watch",
|
||||||
|
"check": "pnpm -r check",
|
||||||
|
"test": "pnpm -r test"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"typescript": "^5.8.3"
|
||||||
|
},
|
||||||
|
"pnpm": {
|
||||||
|
"overrides": {
|
||||||
|
"tar-fs": "^3.0.8",
|
||||||
|
"ws": "^8.18.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"packageManager": "pnpm@9.15.2+sha512.93e57b0126f0df74ce6bff29680394c0ba54ec47246b9cf321f0121d8d9bb03f750a705f24edc3c1180853afd7c2c3b94196d0a3d53d3e069d9e2793ef11f321"
|
||||||
|
}
|
||||||
38
ts/pkgs/duckdb-data-reader/package.json
Normal file
38
ts/pkgs/duckdb-data-reader/package.json
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
{
|
||||||
|
"name": "@duckdb/data-reader",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"description": "Utilities for representing and reading tabular data returned by DuckDB",
|
||||||
|
"type": "module",
|
||||||
|
"main": "./out/index.js",
|
||||||
|
"module": "./out/index.js",
|
||||||
|
"types": "./out/index.d.ts",
|
||||||
|
"scripts": {
|
||||||
|
"preinstall": "pnpm build:src",
|
||||||
|
"build": "tsc -b src test",
|
||||||
|
"build:src": "tsc -b src",
|
||||||
|
"build:test": "tsc -b test",
|
||||||
|
"build:watch": "tsc -b src test --watch",
|
||||||
|
"check": "pnpm format:check && pnpm lint",
|
||||||
|
"clean": "rimraf out",
|
||||||
|
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||||
|
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||||
|
"lint": "pnpm eslint src test",
|
||||||
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@duckdb/data-types": "workspace:*",
|
||||||
|
"@duckdb/data-values": "workspace:*"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^9.32.0",
|
||||||
|
"eslint": "^9.32.0",
|
||||||
|
"find-up-cli": "^6.0.0",
|
||||||
|
"prettier": "^3.6.2",
|
||||||
|
"rimraf": "^6.0.1",
|
||||||
|
"typescript": "^5.8.3",
|
||||||
|
"typescript-eslint": "^8.38.0",
|
||||||
|
"vite": "^6.3.6",
|
||||||
|
"vitest": "^3.2.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import { DuckDBData } from './DuckDBData.js';
|
||||||
|
|
||||||
|
export type DuckDBDataBatchIteratorResult = IteratorResult<
|
||||||
|
DuckDBData,
|
||||||
|
DuckDBData | undefined
|
||||||
|
>;
|
||||||
|
|
||||||
|
export type AsyncDuckDBDataBatchIterator = AsyncIterator<
|
||||||
|
DuckDBData,
|
||||||
|
DuckDBData | undefined
|
||||||
|
>;
|
||||||
55
ts/pkgs/duckdb-data-reader/src/ColumnFilteredDuckDBData.ts
Normal file
55
ts/pkgs/duckdb-data-reader/src/ColumnFilteredDuckDBData.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import { DuckDBType } from '@duckdb/data-types';
|
||||||
|
import { DuckDBValue } from '@duckdb/data-values';
|
||||||
|
import { DuckDBData } from './DuckDBData.js';
|
||||||
|
|
||||||
|
export class ColumnFilteredDuckDBData extends DuckDBData {
|
||||||
|
private readonly inputColumnIndexForOutputColumnIndex: readonly number[];
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private data: DuckDBData,
|
||||||
|
columnVisibility: readonly boolean[],
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
|
||||||
|
const inputColumnIndexForOutputColumnIndex: number[] = [];
|
||||||
|
const inputColumnCount = data.columnCount;
|
||||||
|
let inputIndex = 0;
|
||||||
|
while (inputIndex < inputColumnCount) {
|
||||||
|
while (inputIndex < inputColumnCount && !columnVisibility[inputIndex]) {
|
||||||
|
inputIndex++;
|
||||||
|
}
|
||||||
|
if (inputIndex < inputColumnCount) {
|
||||||
|
inputColumnIndexForOutputColumnIndex.push(inputIndex++);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.inputColumnIndexForOutputColumnIndex =
|
||||||
|
inputColumnIndexForOutputColumnIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
get columnCount() {
|
||||||
|
return this.inputColumnIndexForOutputColumnIndex.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
get rowCount() {
|
||||||
|
return this.data.rowCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
columnName(columnIndex: number): string {
|
||||||
|
return this.data.columnName(
|
||||||
|
this.inputColumnIndexForOutputColumnIndex[columnIndex],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
columnType(columnIndex: number): DuckDBType {
|
||||||
|
return this.data.columnType(
|
||||||
|
this.inputColumnIndexForOutputColumnIndex[columnIndex],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
value(columnIndex: number, rowIndex: number): DuckDBValue {
|
||||||
|
return this.data.value(
|
||||||
|
this.inputColumnIndexForOutputColumnIndex[columnIndex],
|
||||||
|
rowIndex,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
114
ts/pkgs/duckdb-data-reader/src/DuckDBData.ts
Normal file
114
ts/pkgs/duckdb-data-reader/src/DuckDBData.ts
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
import { DuckDBType } from '@duckdb/data-types';
|
||||||
|
import { DuckDBValue } from '@duckdb/data-values';
|
||||||
|
import { DuckDBRow } from './DuckDBRow.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A two-dimensional table of data along with column metadata.
|
||||||
|
*
|
||||||
|
* May represent either a partial or full result set, or a batch of rows read from a result stream.
|
||||||
|
* */
|
||||||
|
export abstract class DuckDBData {
|
||||||
|
/**
|
||||||
|
* Number of columns.
|
||||||
|
*
|
||||||
|
* May be zero until the first part of the result is read. Will not change after the initial read.
|
||||||
|
*/
|
||||||
|
abstract get columnCount(): number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Current number of rows.
|
||||||
|
*
|
||||||
|
* For a partial result set, this may change as more rows are read.
|
||||||
|
* For a full result, or a batch, this will not change.
|
||||||
|
*/
|
||||||
|
abstract get rowCount(): number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the name of column at the given index (starting at zero).
|
||||||
|
*
|
||||||
|
* Note that duplicate column names are possible.
|
||||||
|
*/
|
||||||
|
abstract columnName(columnIndex: number): string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the type of the column at the given index (starting at zero).
|
||||||
|
*/
|
||||||
|
abstract columnType(columnIndex: number): DuckDBType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the value for the given column and row. Both are zero-indexed.
|
||||||
|
*/
|
||||||
|
abstract value(columnIndex: number, rowIndex: number): DuckDBValue;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the single value, assuming exactly one column and row. Throws otherwise.
|
||||||
|
*/
|
||||||
|
singleValue(): DuckDBValue {
|
||||||
|
const { columnCount, rowCount } = this;
|
||||||
|
if (columnCount === 0) {
|
||||||
|
throw Error('no column data');
|
||||||
|
}
|
||||||
|
if (rowCount === 0) {
|
||||||
|
throw Error('no rows');
|
||||||
|
}
|
||||||
|
if (columnCount > 1) {
|
||||||
|
throw Error('more than one column');
|
||||||
|
}
|
||||||
|
if (rowCount > 1) {
|
||||||
|
throw Error('more than one row');
|
||||||
|
}
|
||||||
|
return this.value(0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the column names as an array.
|
||||||
|
*/
|
||||||
|
columnNames(): readonly string[] {
|
||||||
|
const { columnCount } = this;
|
||||||
|
const outputColumnNames: string[] = [];
|
||||||
|
for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) {
|
||||||
|
outputColumnNames.push(this.columnName(columnIndex));
|
||||||
|
}
|
||||||
|
return outputColumnNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the column names as an array, deduplicated following DuckDB's "Auto-Increment Duplicate Column Names"
|
||||||
|
* behavior.
|
||||||
|
*/
|
||||||
|
deduplicatedColumnNames(): readonly string[] {
|
||||||
|
const { columnCount } = this;
|
||||||
|
const outputColumnNames: string[] = [];
|
||||||
|
const columnNameCount: { [columnName: string]: number } = {};
|
||||||
|
for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) {
|
||||||
|
const inputColumnName = this.columnName(columnIndex);
|
||||||
|
const nameCount = (columnNameCount[inputColumnName] || 0) + 1;
|
||||||
|
columnNameCount[inputColumnName] = nameCount;
|
||||||
|
if (nameCount > 1) {
|
||||||
|
outputColumnNames.push(`${inputColumnName}:${nameCount - 1}`);
|
||||||
|
} else {
|
||||||
|
outputColumnNames.push(inputColumnName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return outputColumnNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the data as an array of row objects, keyed by column names.
|
||||||
|
*
|
||||||
|
* The column names are deduplicated following DuckDB's "Auto-Increment Duplicate Column Names" behavior.
|
||||||
|
*/
|
||||||
|
toRows(): readonly DuckDBRow[] {
|
||||||
|
const { rowCount, columnCount } = this;
|
||||||
|
const outputColumnNames = this.deduplicatedColumnNames();
|
||||||
|
const outputRows: DuckDBRow[] = [];
|
||||||
|
for (let rowIndex = 0; rowIndex < rowCount; rowIndex++) {
|
||||||
|
const row: { [columnName: string]: DuckDBValue } = {};
|
||||||
|
for (let columnIndex = 0; columnIndex < columnCount; columnIndex++) {
|
||||||
|
row[outputColumnNames[columnIndex]] = this.value(columnIndex, rowIndex);
|
||||||
|
}
|
||||||
|
outputRows.push(row);
|
||||||
|
}
|
||||||
|
return outputRows;
|
||||||
|
}
|
||||||
|
}
|
||||||
184
ts/pkgs/duckdb-data-reader/src/DuckDBDataReader.ts
Normal file
184
ts/pkgs/duckdb-data-reader/src/DuckDBDataReader.ts
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
import { DuckDBType } from '@duckdb/data-types';
|
||||||
|
import { DuckDBValue } from '@duckdb/data-values';
|
||||||
|
import { AsyncDuckDBDataBatchIterator } from './AsyncDuckDBDataBatchIterator.js';
|
||||||
|
import { DuckDBData } from './DuckDBData.js';
|
||||||
|
|
||||||
|
// Stores information about a run of similarly-sized batches.
|
||||||
|
interface BatchSizeRun {
|
||||||
|
batchCount: number;
|
||||||
|
batchSize: number;
|
||||||
|
rowCount: number; // Always equal to batchCount * batchSize. Precalculated for efficiency.
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A result set that can be read incrementally.
|
||||||
|
*
|
||||||
|
* Represents either a partial or full result.
|
||||||
|
* For full results, the `done` property will be true.
|
||||||
|
* To read more rows into a partial result, use the `readUntil` or `readAll` methods.
|
||||||
|
*/
|
||||||
|
export class DuckDBDataReader extends DuckDBData {
|
||||||
|
private readonly iterator: AsyncDuckDBDataBatchIterator;
|
||||||
|
|
||||||
|
private iteratorDone: boolean = false;
|
||||||
|
|
||||||
|
private totalRowsRead: number = 0;
|
||||||
|
|
||||||
|
private readonly batches: DuckDBData[] = [];
|
||||||
|
|
||||||
|
// Stores the sizes of the batches using run-length encoding to make lookup efficient.
|
||||||
|
// Since batches before the last should be a consistent size, this array is not expected to grow beyond length 2.
|
||||||
|
// (One run for the N-1 batches of consistent size, plus one run for the differently-size last batch, if any.)
|
||||||
|
private readonly batchSizeRuns: BatchSizeRun[] = [];
|
||||||
|
|
||||||
|
constructor(iterator: AsyncDuckDBDataBatchIterator) {
|
||||||
|
super();
|
||||||
|
this.iterator = iterator;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of columns.
|
||||||
|
*
|
||||||
|
* Will be zero until the first part of the result is read. Will not change after the initial read.
|
||||||
|
*/
|
||||||
|
public get columnCount(): number {
|
||||||
|
if (this.batches.length === 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
return this.batches[0].columnCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Current number of rows.
|
||||||
|
*
|
||||||
|
* For a partial result set, with `done` false, this may change as more rows are read.
|
||||||
|
* For a full result, with `done` true, this will not change.
|
||||||
|
*/
|
||||||
|
public get rowCount(): number {
|
||||||
|
return this.totalRowsRead;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the name of column at the given index (starting at zero).
|
||||||
|
*
|
||||||
|
* Note that duplicate column names are possible.
|
||||||
|
*
|
||||||
|
* Will return an error if no part of the result has been read yet.
|
||||||
|
*/
|
||||||
|
public columnName(columnIndex: number): string {
|
||||||
|
if (this.batches.length === 0) {
|
||||||
|
throw Error('no column data');
|
||||||
|
}
|
||||||
|
return this.batches[0].columnName(columnIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the type of the column at the given index (starting at zero).
|
||||||
|
*
|
||||||
|
* Will return an error if no part of the result has been read yet.
|
||||||
|
*/
|
||||||
|
public columnType(columnIndex: number): DuckDBType {
|
||||||
|
if (this.batches.length === 0) {
|
||||||
|
throw Error('no column data');
|
||||||
|
}
|
||||||
|
return this.batches[0].columnType(columnIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the value for the given column and row. Both are zero-indexed.
|
||||||
|
*
|
||||||
|
* Will return an error if `rowIndex` is not less than the current `rowCount`.
|
||||||
|
*/
|
||||||
|
public value(columnIndex: number, rowIndex: number): DuckDBValue {
|
||||||
|
if (this.totalRowsRead === 0) {
|
||||||
|
throw Error('no data');
|
||||||
|
}
|
||||||
|
let batchIndex = 0;
|
||||||
|
let currentRowIndex = rowIndex;
|
||||||
|
// Find which run of batches our row is in.
|
||||||
|
// Since batchSizeRuns shouldn't ever be longer than 2, this should be O(1).
|
||||||
|
for (const run of this.batchSizeRuns) {
|
||||||
|
if (currentRowIndex < run.rowCount) {
|
||||||
|
// The row we're looking for is in this run.
|
||||||
|
// Calculate the batch index and the row index in that batch.
|
||||||
|
batchIndex += Math.floor(currentRowIndex / run.batchSize);
|
||||||
|
if (batchIndex < 0 || batchIndex >= this.batches.length) {
|
||||||
|
throw new Error(
|
||||||
|
`DuckDBDataReader with ${this.batches.length} batches calculated out-of-range batch index: ${batchIndex} (columnIndex=${columnIndex}, rowIndex=${rowIndex})`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const rowIndexInBatch = currentRowIndex % run.batchSize;
|
||||||
|
const batch = this.batches[batchIndex];
|
||||||
|
return batch.value(columnIndex, rowIndexInBatch);
|
||||||
|
}
|
||||||
|
// The row we're looking for is not in this run.
|
||||||
|
// Update our counts for this run and move to the next one.
|
||||||
|
batchIndex += run.batchCount;
|
||||||
|
currentRowIndex -= run.rowCount;
|
||||||
|
}
|
||||||
|
// We didn't find our row. It must have been out of range.
|
||||||
|
throw Error(
|
||||||
|
`Row index ${rowIndex} requested, but only ${this.totalRowsRead} row have been read so far.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns true if all rows have been read.
|
||||||
|
*/
|
||||||
|
public get done(): boolean {
|
||||||
|
return this.iteratorDone;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read all rows.
|
||||||
|
*/
|
||||||
|
public async readAll(): Promise<void> {
|
||||||
|
return this.read();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read rows until at least the given target row count has been met.
|
||||||
|
*
|
||||||
|
* Note that the resulting row count could be greater than the target, since rows are read in batches, typically of 2048 rows each.
|
||||||
|
*/
|
||||||
|
public async readUntil(targetRowCount: number): Promise<void> {
|
||||||
|
return this.read(targetRowCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async read(targetRowCount?: number): Promise<void> {
|
||||||
|
while (
|
||||||
|
!(
|
||||||
|
this.iteratorDone ||
|
||||||
|
(targetRowCount !== undefined && this.totalRowsRead >= targetRowCount)
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
const { value, done } = await this.iterator.next();
|
||||||
|
if (value) {
|
||||||
|
this.updateBatchSizeRuns(value);
|
||||||
|
this.batches.push(value);
|
||||||
|
this.totalRowsRead += value.rowCount;
|
||||||
|
}
|
||||||
|
if (done) {
|
||||||
|
this.iteratorDone = done;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private updateBatchSizeRuns(batch: DuckDBData) {
|
||||||
|
if (this.batchSizeRuns.length > 0) {
|
||||||
|
const lastRun = this.batchSizeRuns[this.batchSizeRuns.length - 1];
|
||||||
|
if (lastRun.batchSize === batch.rowCount) {
|
||||||
|
// If the new batch is the same size as the last one, just update our last run.
|
||||||
|
lastRun.batchCount += 1;
|
||||||
|
lastRun.rowCount += lastRun.batchSize;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If this is our first batch, or it's a different size, create a new run.
|
||||||
|
this.batchSizeRuns.push({
|
||||||
|
batchCount: 1,
|
||||||
|
batchSize: batch.rowCount,
|
||||||
|
rowCount: batch.rowCount,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
5
ts/pkgs/duckdb-data-reader/src/DuckDBRow.ts
Normal file
5
ts/pkgs/duckdb-data-reader/src/DuckDBRow.ts
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import { DuckDBValue } from '@duckdb/data-values';
|
||||||
|
|
||||||
|
export interface DuckDBRow {
|
||||||
|
readonly [columnName: string]: DuckDBValue;
|
||||||
|
}
|
||||||
32
ts/pkgs/duckdb-data-reader/src/MemoryDuckDBData.ts
Normal file
32
ts/pkgs/duckdb-data-reader/src/MemoryDuckDBData.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { DuckDBType } from '@duckdb/data-types';
|
||||||
|
import { DuckDBValue } from '@duckdb/data-values';
|
||||||
|
import { DuckDBData } from './DuckDBData.js';
|
||||||
|
|
||||||
|
export class MemoryDuckDBData extends DuckDBData {
|
||||||
|
constructor(
|
||||||
|
private columns: { name: string; type: DuckDBType }[],
|
||||||
|
private values: DuckDBValue[][],
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
get columnCount() {
|
||||||
|
return this.columns.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
get rowCount() {
|
||||||
|
return this.values.length > 0 ? this.values[0].length : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
columnName(columnIndex: number): string {
|
||||||
|
return this.columns[columnIndex].name;
|
||||||
|
}
|
||||||
|
|
||||||
|
columnType(columnIndex: number): DuckDBType {
|
||||||
|
return this.columns[columnIndex].type;
|
||||||
|
}
|
||||||
|
|
||||||
|
value(columnIndex: number, rowIndex: number): DuckDBValue {
|
||||||
|
return this.values[columnIndex][rowIndex];
|
||||||
|
}
|
||||||
|
}
|
||||||
6
ts/pkgs/duckdb-data-reader/src/index.ts
Normal file
6
ts/pkgs/duckdb-data-reader/src/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
export * from './AsyncDuckDBDataBatchIterator.js';
|
||||||
|
export * from './ColumnFilteredDuckDBData.js';
|
||||||
|
export * from './DuckDBData.js';
|
||||||
|
export * from './DuckDBDataReader.js';
|
||||||
|
export * from './DuckDBRow.js';
|
||||||
|
export * from './MemoryDuckDBData.js';
|
||||||
6
ts/pkgs/duckdb-data-reader/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-reader/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../../tsconfig.library.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"outDir": "../out"
|
||||||
|
}
|
||||||
|
}
|
||||||
177
ts/pkgs/duckdb-data-reader/test/DuckDBDataReader.test.ts
Normal file
177
ts/pkgs/duckdb-data-reader/test/DuckDBDataReader.test.ts
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
import { DuckDBType, INTEGER, VARCHAR } from '@duckdb/data-types';
|
||||||
|
import { DuckDBValue } from '@duckdb/data-values';
|
||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import {
|
||||||
|
AsyncDuckDBDataBatchIterator,
|
||||||
|
DuckDBData,
|
||||||
|
DuckDBDataReader,
|
||||||
|
MemoryDuckDBData,
|
||||||
|
} from '../src';
|
||||||
|
|
||||||
|
const ITERATOR_DONE = Object.freeze({ done: true, value: undefined });
|
||||||
|
|
||||||
|
class TestAsyncDuckDBDataBatchIterator implements AsyncDuckDBDataBatchIterator {
|
||||||
|
private batches: readonly DuckDBData[];
|
||||||
|
|
||||||
|
private nextBatchIndex: number | null;
|
||||||
|
|
||||||
|
constructor(batches: readonly DuckDBData[]) {
|
||||||
|
this.batches = batches;
|
||||||
|
this.nextBatchIndex = this.batches.length > 0 ? 0 : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async next(): Promise<IteratorResult<DuckDBData, undefined>> {
|
||||||
|
if (this.nextBatchIndex == null) {
|
||||||
|
return ITERATOR_DONE;
|
||||||
|
}
|
||||||
|
const nextBatch = this.batches[this.nextBatchIndex++];
|
||||||
|
if (this.nextBatchIndex >= this.batches.length) {
|
||||||
|
this.nextBatchIndex = null;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
done: this.nextBatchIndex == null,
|
||||||
|
value: nextBatch,
|
||||||
|
} as IteratorResult<DuckDBData, undefined>;
|
||||||
|
}
|
||||||
|
|
||||||
|
async return(): Promise<IteratorResult<DuckDBData, undefined>> {
|
||||||
|
return ITERATOR_DONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
async throw(_err: Error): Promise<IteratorResult<DuckDBData, undefined>> {
|
||||||
|
return ITERATOR_DONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
[Symbol.asyncIterator](): AsyncDuckDBDataBatchIterator {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function expectColumns(
|
||||||
|
data: DuckDBData,
|
||||||
|
columns: { name: string; type: DuckDBType }[],
|
||||||
|
) {
|
||||||
|
expect(data.columnCount).toBe(columns.length);
|
||||||
|
for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) {
|
||||||
|
const column = columns[columnIndex];
|
||||||
|
expect(data.columnName(columnIndex)).toBe(column.name);
|
||||||
|
expect(data.columnType(columnIndex)).toStrictEqual(column.type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function expectValues(data: DuckDBData, values: DuckDBValue[][]) {
|
||||||
|
for (let columnIndex = 0; columnIndex < values.length; columnIndex++) {
|
||||||
|
const column = values[columnIndex];
|
||||||
|
for (let rowIndex = 0; rowIndex < column.length; rowIndex++) {
|
||||||
|
expect(data.value(columnIndex, rowIndex)).toBe(column[rowIndex]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
suite('DuckDBDataReader', () => {
|
||||||
|
test('should work for an empty batch list', async () => {
|
||||||
|
const batches: DuckDBData[] = [];
|
||||||
|
const iterator = new TestAsyncDuckDBDataBatchIterator(batches);
|
||||||
|
const reader = new DuckDBDataReader(iterator);
|
||||||
|
expect(reader.done).toBe(false);
|
||||||
|
expect(reader.columnCount).toBe(0);
|
||||||
|
expect(reader.rowCount).toBe(0);
|
||||||
|
await reader.readAll();
|
||||||
|
expect(reader.done).toBe(true);
|
||||||
|
expect(reader.columnCount).toBe(0);
|
||||||
|
expect(reader.rowCount).toBe(0);
|
||||||
|
});
|
||||||
|
test('should work for a single batch', async () => {
|
||||||
|
const columns = [
|
||||||
|
{ name: 'num', type: INTEGER },
|
||||||
|
{ name: 'str', type: VARCHAR },
|
||||||
|
];
|
||||||
|
const values = [
|
||||||
|
[2, 3, 5],
|
||||||
|
['z', 'y', 'x'],
|
||||||
|
];
|
||||||
|
const batches: DuckDBData[] = [new MemoryDuckDBData(columns, values)];
|
||||||
|
const iterator = new TestAsyncDuckDBDataBatchIterator(batches);
|
||||||
|
const reader = new DuckDBDataReader(iterator);
|
||||||
|
expect(reader.done).toBe(false);
|
||||||
|
expect(reader.columnCount).toBe(0);
|
||||||
|
expect(reader.rowCount).toBe(0);
|
||||||
|
await reader.readAll();
|
||||||
|
expect(reader.done).toBe(true);
|
||||||
|
expectColumns(reader, columns);
|
||||||
|
expect(reader.rowCount).toBe(3);
|
||||||
|
expectValues(reader, values);
|
||||||
|
});
|
||||||
|
test('should work for multiple batches', async () => {
|
||||||
|
const columns = [
|
||||||
|
{ name: 'num', type: INTEGER },
|
||||||
|
{ name: 'str', type: VARCHAR },
|
||||||
|
];
|
||||||
|
const values = [
|
||||||
|
[12, 13, 15, 22, 23, 25, 32, 33, 35],
|
||||||
|
['z1', 'y1', 'x1', 'z2', 'y2', 'x2', 'z3', 'y3', 'x3'],
|
||||||
|
];
|
||||||
|
const batches: DuckDBData[] = [
|
||||||
|
new MemoryDuckDBData(columns, [
|
||||||
|
values[0].slice(0, 3),
|
||||||
|
values[1].slice(0, 3),
|
||||||
|
]),
|
||||||
|
new MemoryDuckDBData(columns, [
|
||||||
|
values[0].slice(3, 6),
|
||||||
|
values[1].slice(3, 6),
|
||||||
|
]),
|
||||||
|
new MemoryDuckDBData(columns, [
|
||||||
|
values[0].slice(6, 9),
|
||||||
|
values[1].slice(6, 9),
|
||||||
|
]),
|
||||||
|
];
|
||||||
|
const iterator = new TestAsyncDuckDBDataBatchIterator(batches);
|
||||||
|
const reader = new DuckDBDataReader(iterator);
|
||||||
|
expect(reader.done).toBe(false);
|
||||||
|
expect(reader.columnCount).toBe(0);
|
||||||
|
expect(reader.rowCount).toBe(0);
|
||||||
|
await reader.readAll();
|
||||||
|
expect(reader.done).toBe(true);
|
||||||
|
expectColumns(reader, columns);
|
||||||
|
expect(reader.rowCount).toBe(9);
|
||||||
|
expectValues(reader, values);
|
||||||
|
});
|
||||||
|
test('should work for partial reads of multiple batches', async () => {
|
||||||
|
const columns = [
|
||||||
|
{ name: 'num', type: INTEGER },
|
||||||
|
{ name: 'str', type: VARCHAR },
|
||||||
|
];
|
||||||
|
const values = [
|
||||||
|
[12, 13, 15, 22, 23, 25, 32, 33],
|
||||||
|
['z1', 'y1', 'x1', 'z2', 'y2', 'x2', 'z3', 'y3'],
|
||||||
|
];
|
||||||
|
const batches: DuckDBData[] = [
|
||||||
|
new MemoryDuckDBData(columns, [
|
||||||
|
values[0].slice(0, 3),
|
||||||
|
values[1].slice(0, 3),
|
||||||
|
]),
|
||||||
|
new MemoryDuckDBData(columns, [
|
||||||
|
values[0].slice(3, 6),
|
||||||
|
values[1].slice(3, 6),
|
||||||
|
]),
|
||||||
|
new MemoryDuckDBData(columns, [
|
||||||
|
values[0].slice(6, 8),
|
||||||
|
values[1].slice(6, 8),
|
||||||
|
]),
|
||||||
|
];
|
||||||
|
const iterator = new TestAsyncDuckDBDataBatchIterator(batches);
|
||||||
|
const reader = new DuckDBDataReader(iterator);
|
||||||
|
expect(reader.done).toBe(false);
|
||||||
|
expect(reader.columnCount).toBe(0);
|
||||||
|
expect(reader.rowCount).toBe(0);
|
||||||
|
await reader.readUntil(5);
|
||||||
|
expect(reader.done).toBe(false);
|
||||||
|
expectColumns(reader, columns);
|
||||||
|
expect(reader.rowCount).toBe(6);
|
||||||
|
expectValues(reader, [values[0].slice(0, 6), values[1].slice(0, 6)]);
|
||||||
|
await reader.readUntil(10);
|
||||||
|
expect(reader.done).toBe(true);
|
||||||
|
expect(reader.rowCount).toBe(8);
|
||||||
|
expectValues(reader, values);
|
||||||
|
});
|
||||||
|
});
|
||||||
6
ts/pkgs/duckdb-data-reader/test/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-reader/test/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../../tsconfig.test.json",
|
||||||
|
"references": [
|
||||||
|
{ "path": "../src" }
|
||||||
|
]
|
||||||
|
}
|
||||||
37
ts/pkgs/duckdb-data-types/package.json
Normal file
37
ts/pkgs/duckdb-data-types/package.json
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"name": "@duckdb/data-types",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"description": "Utilities for representing DuckDB types",
|
||||||
|
"type": "module",
|
||||||
|
"main": "./out/index.js",
|
||||||
|
"module": "./out/index.js",
|
||||||
|
"types": "./out/index.d.ts",
|
||||||
|
"scripts": {
|
||||||
|
"preinstall": "pnpm build:src",
|
||||||
|
"build": "tsc -b src test",
|
||||||
|
"build:src": "tsc -b src",
|
||||||
|
"build:test": "tsc -b test",
|
||||||
|
"build:watch": "tsc -b src test --watch",
|
||||||
|
"check": "pnpm format:check && pnpm lint",
|
||||||
|
"clean": "rimraf out",
|
||||||
|
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||||
|
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||||
|
"lint": "pnpm eslint src test",
|
||||||
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@duckdb/data-values": "workspace:*"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^9.32.0",
|
||||||
|
"eslint": "^9.32.0",
|
||||||
|
"find-up-cli": "^6.0.0",
|
||||||
|
"prettier": "^3.6.2",
|
||||||
|
"rimraf": "^6.0.1",
|
||||||
|
"typescript": "^5.8.3",
|
||||||
|
"typescript-eslint": "^8.38.0",
|
||||||
|
"vite": "^6.3.6",
|
||||||
|
"vitest": "^3.2.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
989
ts/pkgs/duckdb-data-types/src/DuckDBType.ts
Normal file
989
ts/pkgs/duckdb-data-types/src/DuckDBType.ts
Normal file
@@ -0,0 +1,989 @@
|
|||||||
|
import { Json } from '@duckdb/data-values';
|
||||||
|
import { DuckDBTypeId } from './DuckDBTypeId.js';
|
||||||
|
import { quotedIdentifier, quotedString } from './sql.js';
|
||||||
|
|
||||||
|
export interface DuckDBTypeToStringOptions {
|
||||||
|
short?: boolean;
|
||||||
|
}
|
||||||
|
export abstract class BaseDuckDBType<T extends DuckDBTypeId> {
|
||||||
|
public readonly typeId: T;
|
||||||
|
public readonly alias?: string;
|
||||||
|
protected constructor(typeId: T, alias?: string) {
|
||||||
|
this.typeId = typeId;
|
||||||
|
this.alias = alias;
|
||||||
|
}
|
||||||
|
public toString(_options?: DuckDBTypeToStringOptions): string {
|
||||||
|
return this.alias ?? DuckDBTypeId[this.typeId];
|
||||||
|
}
|
||||||
|
public toJson(): Json {
|
||||||
|
return {
|
||||||
|
typeId: this.typeId,
|
||||||
|
...(this.alias ? { alias: this.alias } : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DuckDBBooleanType extends BaseDuckDBType<DuckDBTypeId.BOOLEAN> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.BOOLEAN, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBBooleanType();
|
||||||
|
public static create(alias?: string): DuckDBBooleanType {
|
||||||
|
return alias ? new DuckDBBooleanType(alias) : DuckDBBooleanType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const BOOLEAN = DuckDBBooleanType.instance;
|
||||||
|
|
||||||
|
export class DuckDBTinyIntType extends BaseDuckDBType<DuckDBTypeId.TINYINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.TINYINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBTinyIntType();
|
||||||
|
public static create(alias?: string): DuckDBTinyIntType {
|
||||||
|
return alias ? new DuckDBTinyIntType(alias) : DuckDBTinyIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max = 2 ** 7 - 1;
|
||||||
|
public static readonly Min = -(2 ** 7);
|
||||||
|
public get max() {
|
||||||
|
return DuckDBTinyIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBTinyIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const TINYINT = DuckDBTinyIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBSmallIntType extends BaseDuckDBType<DuckDBTypeId.SMALLINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.SMALLINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBSmallIntType();
|
||||||
|
public static create(alias?: string): DuckDBSmallIntType {
|
||||||
|
return alias ? new DuckDBSmallIntType(alias) : DuckDBSmallIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max = 2 ** 15 - 1;
|
||||||
|
public static readonly Min = -(2 ** 15);
|
||||||
|
public get max() {
|
||||||
|
return DuckDBSmallIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBSmallIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const SMALLINT = DuckDBSmallIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBIntegerType extends BaseDuckDBType<DuckDBTypeId.INTEGER> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.INTEGER, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBIntegerType();
|
||||||
|
public static create(alias?: string): DuckDBIntegerType {
|
||||||
|
return alias ? new DuckDBIntegerType(alias) : DuckDBIntegerType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max = 2 ** 31 - 1;
|
||||||
|
public static readonly Min = -(2 ** 31);
|
||||||
|
public get max() {
|
||||||
|
return DuckDBIntegerType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBIntegerType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const INTEGER = DuckDBIntegerType.instance;
|
||||||
|
|
||||||
|
export class DuckDBBigIntType extends BaseDuckDBType<DuckDBTypeId.BIGINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.BIGINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBBigIntType();
|
||||||
|
public static create(alias?: string): DuckDBBigIntType {
|
||||||
|
return alias ? new DuckDBBigIntType(alias) : DuckDBBigIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max: bigint = 2n ** 63n - 1n;
|
||||||
|
public static readonly Min: bigint = -(2n ** 63n);
|
||||||
|
public get max() {
|
||||||
|
return DuckDBBigIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBBigIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const BIGINT = DuckDBBigIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBUTinyIntType extends BaseDuckDBType<DuckDBTypeId.UTINYINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.UTINYINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBUTinyIntType();
|
||||||
|
public static create(alias?: string): DuckDBUTinyIntType {
|
||||||
|
return alias ? new DuckDBUTinyIntType(alias) : DuckDBUTinyIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max = 2 ** 8 - 1;
|
||||||
|
public static readonly Min = 0;
|
||||||
|
public get max() {
|
||||||
|
return DuckDBUTinyIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBUTinyIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const UTINYINT = DuckDBUTinyIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBUSmallIntType extends BaseDuckDBType<DuckDBTypeId.USMALLINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.USMALLINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBUSmallIntType();
|
||||||
|
public static create(alias?: string): DuckDBUSmallIntType {
|
||||||
|
return alias
|
||||||
|
? new DuckDBUSmallIntType(alias)
|
||||||
|
: DuckDBUSmallIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max = 2 ** 16 - 1;
|
||||||
|
public static readonly Min = 0;
|
||||||
|
public get max() {
|
||||||
|
return DuckDBUSmallIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBUSmallIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const USMALLINT = DuckDBUSmallIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBUIntegerType extends BaseDuckDBType<DuckDBTypeId.UINTEGER> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.UINTEGER, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBUIntegerType();
|
||||||
|
public static create(alias?: string): DuckDBUIntegerType {
|
||||||
|
return alias ? new DuckDBUIntegerType(alias) : DuckDBUIntegerType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max = 2 ** 32 - 1;
|
||||||
|
public static readonly Min = 0;
|
||||||
|
public get max() {
|
||||||
|
return DuckDBUIntegerType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBUIntegerType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const UINTEGER = DuckDBUIntegerType.instance;
|
||||||
|
|
||||||
|
export class DuckDBUBigIntType extends BaseDuckDBType<DuckDBTypeId.UBIGINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.UBIGINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBUBigIntType();
|
||||||
|
public static create(alias?: string): DuckDBUBigIntType {
|
||||||
|
return alias ? new DuckDBUBigIntType(alias) : DuckDBUBigIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max: bigint = 2n ** 64n - 1n;
|
||||||
|
public static readonly Min: bigint = 0n;
|
||||||
|
public get max() {
|
||||||
|
return DuckDBUBigIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBUBigIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const UBIGINT = DuckDBUBigIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBFloatType extends BaseDuckDBType<DuckDBTypeId.FLOAT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.FLOAT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBFloatType();
|
||||||
|
public static create(alias?: string): DuckDBFloatType {
|
||||||
|
return alias ? new DuckDBFloatType(alias) : DuckDBFloatType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max = Math.fround(3.4028235e38);
|
||||||
|
public static readonly Min = Math.fround(-3.4028235e38);
|
||||||
|
public get max() {
|
||||||
|
return DuckDBFloatType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBFloatType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const FLOAT = DuckDBFloatType.instance;
|
||||||
|
|
||||||
|
export class DuckDBDoubleType extends BaseDuckDBType<DuckDBTypeId.DOUBLE> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.DOUBLE, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBDoubleType();
|
||||||
|
public static create(alias?: string): DuckDBDoubleType {
|
||||||
|
return alias ? new DuckDBDoubleType(alias) : DuckDBDoubleType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max = Number.MAX_VALUE;
|
||||||
|
public static readonly Min = -Number.MAX_VALUE;
|
||||||
|
public get max() {
|
||||||
|
return DuckDBDoubleType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBDoubleType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const DOUBLE = DuckDBDoubleType.instance;
|
||||||
|
|
||||||
|
export class DuckDBTimestampType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.TIMESTAMP, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBTimestampType();
|
||||||
|
public static create(alias?: string): DuckDBTimestampType {
|
||||||
|
return alias
|
||||||
|
? new DuckDBTimestampType(alias)
|
||||||
|
: DuckDBTimestampType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get epoch() {
|
||||||
|
// return DuckDBTimestampValue.Epoch;
|
||||||
|
// }
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBTimestampValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBTimestampValue.Min;
|
||||||
|
// }
|
||||||
|
// public get posInf() {
|
||||||
|
// return DuckDBTimestampValue.PosInf;
|
||||||
|
// }
|
||||||
|
// public get negInf() {
|
||||||
|
// return DuckDBTimestampValue.NegInf;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const TIMESTAMP = DuckDBTimestampType.instance;
|
||||||
|
|
||||||
|
export type DuckDBTimestampMicrosecondsType = DuckDBTimestampType;
|
||||||
|
export const DuckDBTimestampMicrosecondsType = DuckDBTimestampType;
|
||||||
|
|
||||||
|
export class DuckDBDateType extends BaseDuckDBType<DuckDBTypeId.DATE> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.DATE, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBDateType();
|
||||||
|
public static create(alias?: string): DuckDBDateType {
|
||||||
|
return alias ? new DuckDBDateType(alias) : DuckDBDateType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get epoch() {
|
||||||
|
// return DuckDBDateValue.Epoch;
|
||||||
|
// }
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBDateValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBDateValue.Min;
|
||||||
|
// }
|
||||||
|
// public get posInf() {
|
||||||
|
// return DuckDBDateValue.PosInf;
|
||||||
|
// }
|
||||||
|
// public get negInf() {
|
||||||
|
// return DuckDBDateValue.NegInf;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const DATE = DuckDBDateType.instance;
|
||||||
|
|
||||||
|
export class DuckDBTimeType extends BaseDuckDBType<DuckDBTypeId.TIME> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.TIME, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBTimeType();
|
||||||
|
public static create(alias?: string): DuckDBTimeType {
|
||||||
|
return alias ? new DuckDBTimeType(alias) : DuckDBTimeType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBTimeValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBTimeValue.Min;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const TIME = DuckDBTimeType.instance;
|
||||||
|
|
||||||
|
export class DuckDBIntervalType extends BaseDuckDBType<DuckDBTypeId.INTERVAL> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.INTERVAL, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBIntervalType();
|
||||||
|
public static create(alias?: string): DuckDBIntervalType {
|
||||||
|
return alias ? new DuckDBIntervalType(alias) : DuckDBIntervalType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const INTERVAL = DuckDBIntervalType.instance;
|
||||||
|
|
||||||
|
export class DuckDBHugeIntType extends BaseDuckDBType<DuckDBTypeId.HUGEINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.HUGEINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBHugeIntType();
|
||||||
|
public static create(alias?: string): DuckDBHugeIntType {
|
||||||
|
return alias ? new DuckDBHugeIntType(alias) : DuckDBHugeIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max: bigint = 2n ** 127n - 1n;
|
||||||
|
public static readonly Min: bigint = -(2n ** 127n);
|
||||||
|
public get max() {
|
||||||
|
return DuckDBHugeIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBHugeIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const HUGEINT = DuckDBHugeIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBUHugeIntType extends BaseDuckDBType<DuckDBTypeId.UHUGEINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.UHUGEINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBUHugeIntType();
|
||||||
|
public static create(alias?: string): DuckDBUHugeIntType {
|
||||||
|
return alias ? new DuckDBUHugeIntType(alias) : DuckDBUHugeIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max: bigint = 2n ** 128n - 1n;
|
||||||
|
public static readonly Min: bigint = 0n;
|
||||||
|
public get max() {
|
||||||
|
return DuckDBUHugeIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBUHugeIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const UHUGEINT = DuckDBUHugeIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBVarCharType extends BaseDuckDBType<DuckDBTypeId.VARCHAR> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.VARCHAR, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBVarCharType();
|
||||||
|
public static create(alias?: string): DuckDBVarCharType {
|
||||||
|
return alias ? new DuckDBVarCharType(alias) : DuckDBVarCharType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const VARCHAR = DuckDBVarCharType.instance;
|
||||||
|
|
||||||
|
export class DuckDBBlobType extends BaseDuckDBType<DuckDBTypeId.BLOB> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.BLOB, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBBlobType();
|
||||||
|
public static create(alias?: string): DuckDBBlobType {
|
||||||
|
return alias ? new DuckDBBlobType(alias) : DuckDBBlobType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const BLOB = DuckDBBlobType.instance;
|
||||||
|
|
||||||
|
export class DuckDBDecimalType extends BaseDuckDBType<DuckDBTypeId.DECIMAL> {
|
||||||
|
public readonly width: number;
|
||||||
|
public readonly scale: number;
|
||||||
|
public constructor(width: number, scale: number, alias?: string) {
|
||||||
|
super(DuckDBTypeId.DECIMAL, alias);
|
||||||
|
this.width = width;
|
||||||
|
this.scale = scale;
|
||||||
|
}
|
||||||
|
public toString(_options?: DuckDBTypeToStringOptions): string {
|
||||||
|
return this.alias ?? `DECIMAL(${this.width},${this.scale})`;
|
||||||
|
}
|
||||||
|
public override toJson(): Json {
|
||||||
|
return {
|
||||||
|
typeId: this.typeId,
|
||||||
|
width: this.width,
|
||||||
|
scale: this.scale,
|
||||||
|
...(this.alias ? { alias: this.alias } : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
public static readonly default = new DuckDBDecimalType(18, 3);
|
||||||
|
}
|
||||||
|
export function DECIMAL(
|
||||||
|
width?: number,
|
||||||
|
scale?: number,
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBDecimalType {
|
||||||
|
if (width === undefined) {
|
||||||
|
return DuckDBDecimalType.default;
|
||||||
|
}
|
||||||
|
if (scale === undefined) {
|
||||||
|
return new DuckDBDecimalType(width, 0);
|
||||||
|
}
|
||||||
|
return new DuckDBDecimalType(width, scale, alias);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DuckDBTimestampSecondsType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP_S> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.TIMESTAMP_S, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBTimestampSecondsType();
|
||||||
|
public static create(alias?: string): DuckDBTimestampSecondsType {
|
||||||
|
return alias
|
||||||
|
? new DuckDBTimestampSecondsType(alias)
|
||||||
|
: DuckDBTimestampSecondsType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get epoch() {
|
||||||
|
// return DuckDBTimestampSecondsValue.Epoch;
|
||||||
|
// }
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBTimestampSecondsValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBTimestampSecondsValue.Min;
|
||||||
|
// }
|
||||||
|
// public get posInf() {
|
||||||
|
// return DuckDBTimestampSecondsValue.PosInf;
|
||||||
|
// }
|
||||||
|
// public get negInf() {
|
||||||
|
// return DuckDBTimestampSecondsValue.NegInf;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const TIMESTAMP_S = DuckDBTimestampSecondsType.instance;
|
||||||
|
|
||||||
|
export class DuckDBTimestampMillisecondsType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP_MS> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.TIMESTAMP_MS, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBTimestampMillisecondsType();
|
||||||
|
public static create(alias?: string): DuckDBTimestampMillisecondsType {
|
||||||
|
return alias
|
||||||
|
? new DuckDBTimestampMillisecondsType(alias)
|
||||||
|
: DuckDBTimestampMillisecondsType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get epoch() {
|
||||||
|
// return DuckDBTimestampMillisecondsValue.Epoch;
|
||||||
|
// }
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBTimestampMillisecondsValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBTimestampMillisecondsValue.Min;
|
||||||
|
// }
|
||||||
|
// public get posInf() {
|
||||||
|
// return DuckDBTimestampMillisecondsValue.PosInf;
|
||||||
|
// }
|
||||||
|
// public get negInf() {
|
||||||
|
// return DuckDBTimestampMillisecondsValue.NegInf;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const TIMESTAMP_MS = DuckDBTimestampMillisecondsType.instance;
|
||||||
|
|
||||||
|
export class DuckDBTimestampNanosecondsType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP_NS> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.TIMESTAMP_NS, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBTimestampNanosecondsType();
|
||||||
|
public static create(alias?: string): DuckDBTimestampNanosecondsType {
|
||||||
|
return alias
|
||||||
|
? new DuckDBTimestampNanosecondsType(alias)
|
||||||
|
: DuckDBTimestampNanosecondsType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get epoch() {
|
||||||
|
// return DuckDBTimestampNanosecondsValue.Epoch;
|
||||||
|
// }
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBTimestampNanosecondsValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBTimestampNanosecondsValue.Min;
|
||||||
|
// }
|
||||||
|
// public get posInf() {
|
||||||
|
// return DuckDBTimestampNanosecondsValue.PosInf;
|
||||||
|
// }
|
||||||
|
// public get negInf() {
|
||||||
|
// return DuckDBTimestampNanosecondsValue.NegInf;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const TIMESTAMP_NS = DuckDBTimestampNanosecondsType.instance;
|
||||||
|
|
||||||
|
export class DuckDBEnumType extends BaseDuckDBType<DuckDBTypeId.ENUM> {
|
||||||
|
public readonly values: readonly string[];
|
||||||
|
public readonly valueIndexes: Readonly<Record<string, number>>;
|
||||||
|
public readonly internalTypeId: DuckDBTypeId;
|
||||||
|
public constructor(
|
||||||
|
values: readonly string[],
|
||||||
|
internalTypeId: DuckDBTypeId,
|
||||||
|
alias?: string,
|
||||||
|
) {
|
||||||
|
super(DuckDBTypeId.ENUM, alias);
|
||||||
|
this.values = values;
|
||||||
|
const valueIndexes: Record<string, number> = {};
|
||||||
|
for (let i = 0; i < values.length; i++) {
|
||||||
|
valueIndexes[values[i]] = i;
|
||||||
|
}
|
||||||
|
this.valueIndexes = valueIndexes;
|
||||||
|
this.internalTypeId = internalTypeId;
|
||||||
|
}
|
||||||
|
public indexForValue(value: string): number {
|
||||||
|
return this.valueIndexes[value];
|
||||||
|
}
|
||||||
|
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||||
|
if (this.alias) {
|
||||||
|
return this.alias;
|
||||||
|
}
|
||||||
|
if (options?.short) {
|
||||||
|
return `ENUM(…)`;
|
||||||
|
}
|
||||||
|
return `ENUM(${this.values.map(quotedString).join(', ')})`;
|
||||||
|
}
|
||||||
|
public override toJson(): Json {
|
||||||
|
return {
|
||||||
|
typeId: this.typeId,
|
||||||
|
values: [...this.values],
|
||||||
|
internalTypeId: this.internalTypeId,
|
||||||
|
...(this.alias ? { alias: this.alias } : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export function ENUM8(
|
||||||
|
values: readonly string[],
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBEnumType {
|
||||||
|
return new DuckDBEnumType(values, DuckDBTypeId.UTINYINT, alias);
|
||||||
|
}
|
||||||
|
export function ENUM16(
|
||||||
|
values: readonly string[],
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBEnumType {
|
||||||
|
return new DuckDBEnumType(values, DuckDBTypeId.USMALLINT, alias);
|
||||||
|
}
|
||||||
|
export function ENUM32(
|
||||||
|
values: readonly string[],
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBEnumType {
|
||||||
|
return new DuckDBEnumType(values, DuckDBTypeId.UINTEGER, alias);
|
||||||
|
}
|
||||||
|
export function ENUM(
|
||||||
|
values: readonly string[],
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBEnumType {
|
||||||
|
if (values.length < 256) {
|
||||||
|
return ENUM8(values, alias);
|
||||||
|
} else if (values.length < 65536) {
|
||||||
|
return ENUM16(values, alias);
|
||||||
|
} else if (values.length < 4294967296) {
|
||||||
|
return ENUM32(values, alias);
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`ENUM types cannot have more than 4294967295 values; received ${values.length}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DuckDBListType extends BaseDuckDBType<DuckDBTypeId.LIST> {
|
||||||
|
public readonly valueType: DuckDBType;
|
||||||
|
public constructor(valueType: DuckDBType, alias?: string) {
|
||||||
|
super(DuckDBTypeId.LIST, alias);
|
||||||
|
this.valueType = valueType;
|
||||||
|
}
|
||||||
|
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||||
|
return this.alias ?? `${this.valueType.toString(options)}[]`;
|
||||||
|
}
|
||||||
|
public override toJson(): Json {
|
||||||
|
return {
|
||||||
|
typeId: this.typeId,
|
||||||
|
valueType: this.valueType.toJson(),
|
||||||
|
...(this.alias ? { alias: this.alias } : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export function LIST(valueType: DuckDBType, alias?: string): DuckDBListType {
|
||||||
|
return new DuckDBListType(valueType, alias);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DuckDBStructType extends BaseDuckDBType<DuckDBTypeId.STRUCT> {
|
||||||
|
public readonly entryNames: readonly string[];
|
||||||
|
public readonly entryTypes: readonly DuckDBType[];
|
||||||
|
public readonly entryIndexes: Readonly<Record<string, number>>;
|
||||||
|
public constructor(
|
||||||
|
entryNames: readonly string[],
|
||||||
|
entryTypes: readonly DuckDBType[],
|
||||||
|
alias?: string,
|
||||||
|
) {
|
||||||
|
super(DuckDBTypeId.STRUCT, alias);
|
||||||
|
if (entryNames.length !== entryTypes.length) {
|
||||||
|
throw new Error(`Could not create DuckDBStructType: \
|
||||||
|
entryNames length (${entryNames.length}) does not match entryTypes length (${entryTypes.length})`);
|
||||||
|
}
|
||||||
|
this.entryNames = entryNames;
|
||||||
|
this.entryTypes = entryTypes;
|
||||||
|
const entryIndexes: Record<string, number> = {};
|
||||||
|
for (let i = 0; i < entryNames.length; i++) {
|
||||||
|
entryIndexes[entryNames[i]] = i;
|
||||||
|
}
|
||||||
|
this.entryIndexes = entryIndexes;
|
||||||
|
}
|
||||||
|
public get entryCount() {
|
||||||
|
return this.entryNames.length;
|
||||||
|
}
|
||||||
|
public indexForEntry(entryName: string): number {
|
||||||
|
return this.entryIndexes[entryName];
|
||||||
|
}
|
||||||
|
public typeForEntry(entryName: string): DuckDBType {
|
||||||
|
return this.entryTypes[this.entryIndexes[entryName]];
|
||||||
|
}
|
||||||
|
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||||
|
if (this.alias) {
|
||||||
|
return this.alias;
|
||||||
|
}
|
||||||
|
if (options?.short) {
|
||||||
|
return `STRUCT(…)`;
|
||||||
|
}
|
||||||
|
const parts: string[] = [];
|
||||||
|
for (let i = 0; i < this.entryNames.length; i++) {
|
||||||
|
parts.push(
|
||||||
|
`${quotedIdentifier(this.entryNames[i])} ${this.entryTypes[i]}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return `STRUCT(${parts.join(', ')})`;
|
||||||
|
}
|
||||||
|
public override toJson(): Json {
|
||||||
|
return {
|
||||||
|
typeId: this.typeId,
|
||||||
|
entryNames: [...this.entryNames],
|
||||||
|
entryTypes: this.entryTypes.map((t) => t.toJson()),
|
||||||
|
...(this.alias ? { alias: this.alias } : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export function STRUCT(
|
||||||
|
entries: Record<string, DuckDBType>,
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBStructType {
|
||||||
|
const entryNames = Object.keys(entries);
|
||||||
|
const entryTypes = Object.values(entries);
|
||||||
|
return new DuckDBStructType(entryNames, entryTypes, alias);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DuckDBMapType extends BaseDuckDBType<DuckDBTypeId.MAP> {
|
||||||
|
public readonly keyType: DuckDBType;
|
||||||
|
public readonly valueType: DuckDBType;
|
||||||
|
public constructor(
|
||||||
|
keyType: DuckDBType,
|
||||||
|
valueType: DuckDBType,
|
||||||
|
alias?: string,
|
||||||
|
) {
|
||||||
|
super(DuckDBTypeId.MAP, alias);
|
||||||
|
this.keyType = keyType;
|
||||||
|
this.valueType = valueType;
|
||||||
|
}
|
||||||
|
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||||
|
if (this.alias) {
|
||||||
|
return this.alias;
|
||||||
|
}
|
||||||
|
if (options?.short) {
|
||||||
|
return `MAP(…)`;
|
||||||
|
}
|
||||||
|
return `MAP(${this.keyType}, ${this.valueType})`;
|
||||||
|
}
|
||||||
|
public override toJson(): Json {
|
||||||
|
return {
|
||||||
|
typeId: this.typeId,
|
||||||
|
keyType: this.keyType.toJson(),
|
||||||
|
valueType: this.valueType.toJson(),
|
||||||
|
...(this.alias ? { alias: this.alias } : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export function MAP(
|
||||||
|
keyType: DuckDBType,
|
||||||
|
valueType: DuckDBType,
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBMapType {
|
||||||
|
return new DuckDBMapType(keyType, valueType, alias);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DuckDBArrayType extends BaseDuckDBType<DuckDBTypeId.ARRAY> {
|
||||||
|
public readonly valueType: DuckDBType;
|
||||||
|
public readonly length: number;
|
||||||
|
public constructor(valueType: DuckDBType, length: number, alias?: string) {
|
||||||
|
super(DuckDBTypeId.ARRAY, alias);
|
||||||
|
this.valueType = valueType;
|
||||||
|
this.length = length;
|
||||||
|
}
|
||||||
|
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||||
|
return this.alias ?? `${this.valueType.toString(options)}[${this.length}]`;
|
||||||
|
}
|
||||||
|
public override toJson(): Json {
|
||||||
|
return {
|
||||||
|
typeId: this.typeId,
|
||||||
|
valueType: this.valueType.toJson(),
|
||||||
|
length: this.length,
|
||||||
|
...(this.alias ? { alias: this.alias } : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export function ARRAY(
|
||||||
|
valueType: DuckDBType,
|
||||||
|
length: number,
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBArrayType {
|
||||||
|
return new DuckDBArrayType(valueType, length, alias);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DuckDBUUIDType extends BaseDuckDBType<DuckDBTypeId.UUID> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.UUID, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBUUIDType();
|
||||||
|
public static create(alias?: string): DuckDBUUIDType {
|
||||||
|
return alias ? new DuckDBUUIDType(alias) : DuckDBUUIDType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBUUIDValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBUUIDValue.Min;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const UUID = DuckDBUUIDType.instance;
|
||||||
|
|
||||||
|
export class DuckDBUnionType extends BaseDuckDBType<DuckDBTypeId.UNION> {
|
||||||
|
public readonly memberTags: readonly string[];
|
||||||
|
public readonly tagMemberIndexes: Readonly<Record<string, number>>;
|
||||||
|
public readonly memberTypes: readonly DuckDBType[];
|
||||||
|
public constructor(
|
||||||
|
memberTags: readonly string[],
|
||||||
|
memberTypes: readonly DuckDBType[],
|
||||||
|
alias?: string,
|
||||||
|
) {
|
||||||
|
super(DuckDBTypeId.UNION, alias);
|
||||||
|
if (memberTags.length !== memberTypes.length) {
|
||||||
|
throw new Error(`Could not create DuckDBUnionType: \
|
||||||
|
tags length (${memberTags.length}) does not match valueTypes length (${memberTypes.length})`);
|
||||||
|
}
|
||||||
|
this.memberTags = memberTags;
|
||||||
|
const tagMemberIndexes: Record<string, number> = {};
|
||||||
|
for (let i = 0; i < memberTags.length; i++) {
|
||||||
|
tagMemberIndexes[memberTags[i]] = i;
|
||||||
|
}
|
||||||
|
this.tagMemberIndexes = tagMemberIndexes;
|
||||||
|
this.memberTypes = memberTypes;
|
||||||
|
}
|
||||||
|
public memberIndexForTag(tag: string): number {
|
||||||
|
return this.tagMemberIndexes[tag];
|
||||||
|
}
|
||||||
|
public memberTypeForTag(tag: string): DuckDBType {
|
||||||
|
return this.memberTypes[this.tagMemberIndexes[tag]];
|
||||||
|
}
|
||||||
|
public get memberCount() {
|
||||||
|
return this.memberTags.length;
|
||||||
|
}
|
||||||
|
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||||
|
if (this.alias) {
|
||||||
|
return this.alias;
|
||||||
|
}
|
||||||
|
if (options?.short) {
|
||||||
|
return `UNION(…)`;
|
||||||
|
}
|
||||||
|
const parts: string[] = [];
|
||||||
|
for (let i = 0; i < this.memberTags.length; i++) {
|
||||||
|
parts.push(
|
||||||
|
`${quotedIdentifier(this.memberTags[i])} ${this.memberTypes[i]}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return `UNION(${parts.join(', ')})`;
|
||||||
|
}
|
||||||
|
public override toJson(): Json {
|
||||||
|
return {
|
||||||
|
typeId: this.typeId,
|
||||||
|
memberTags: [...this.memberTags],
|
||||||
|
memberTypes: this.memberTypes.map((t) => t.toJson()),
|
||||||
|
...(this.alias ? { alias: this.alias } : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export function UNION(
|
||||||
|
members: Record<string, DuckDBType>,
|
||||||
|
alias?: string,
|
||||||
|
): DuckDBUnionType {
|
||||||
|
const memberTags = Object.keys(members);
|
||||||
|
const memberTypes = Object.values(members);
|
||||||
|
return new DuckDBUnionType(memberTags, memberTypes, alias);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DuckDBBitType extends BaseDuckDBType<DuckDBTypeId.BIT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.BIT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBBitType();
|
||||||
|
public static create(alias?: string): DuckDBBitType {
|
||||||
|
return alias ? new DuckDBBitType(alias) : DuckDBBitType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const BIT = DuckDBBitType.instance;
|
||||||
|
|
||||||
|
export class DuckDBTimeTZType extends BaseDuckDBType<DuckDBTypeId.TIME_TZ> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.TIME_TZ, alias);
|
||||||
|
}
|
||||||
|
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||||
|
if (this.alias) {
|
||||||
|
return this.alias;
|
||||||
|
}
|
||||||
|
if (options?.short) {
|
||||||
|
return 'TIMETZ';
|
||||||
|
}
|
||||||
|
return 'TIME WITH TIME ZONE';
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBTimeTZType();
|
||||||
|
public static create(alias?: string): DuckDBTimeTZType {
|
||||||
|
return alias ? new DuckDBTimeTZType(alias) : DuckDBTimeTZType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBTimeTZValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBTimeTZValue.Min;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const TIMETZ = DuckDBTimeTZType.instance;
|
||||||
|
|
||||||
|
export class DuckDBTimestampTZType extends BaseDuckDBType<DuckDBTypeId.TIMESTAMP_TZ> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.TIMESTAMP_TZ, alias);
|
||||||
|
}
|
||||||
|
public toString(options?: DuckDBTypeToStringOptions): string {
|
||||||
|
if (this.alias) {
|
||||||
|
return this.alias;
|
||||||
|
}
|
||||||
|
if (options?.short) {
|
||||||
|
return 'TIMESTAMPTZ';
|
||||||
|
}
|
||||||
|
return 'TIMESTAMP WITH TIME ZONE';
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBTimestampTZType();
|
||||||
|
public static create(alias?: string): DuckDBTimestampTZType {
|
||||||
|
return alias
|
||||||
|
? new DuckDBTimestampTZType(alias)
|
||||||
|
: DuckDBTimestampTZType.instance;
|
||||||
|
}
|
||||||
|
// TODO: common DuckDBValues on type objects
|
||||||
|
// public get epoch() {
|
||||||
|
// return DuckDBTimestampTZValue.Epoch;
|
||||||
|
// }
|
||||||
|
// public get max() {
|
||||||
|
// return DuckDBTimestampTZValue.Max;
|
||||||
|
// }
|
||||||
|
// public get min() {
|
||||||
|
// return DuckDBTimestampTZValue.Min;
|
||||||
|
// }
|
||||||
|
// public get posInf() {
|
||||||
|
// return DuckDBTimestampTZValue.PosInf;
|
||||||
|
// }
|
||||||
|
// public get negInf() {
|
||||||
|
// return DuckDBTimestampTZValue.NegInf;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
export const TIMESTAMPTZ = DuckDBTimestampTZType.instance;
|
||||||
|
|
||||||
|
export class DuckDBAnyType extends BaseDuckDBType<DuckDBTypeId.ANY> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.ANY, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBAnyType();
|
||||||
|
public static create(alias?: string): DuckDBAnyType {
|
||||||
|
return alias ? new DuckDBAnyType(alias) : DuckDBAnyType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const ANY = DuckDBAnyType.instance;
|
||||||
|
|
||||||
|
export class DuckDBVarIntType extends BaseDuckDBType<DuckDBTypeId.VARINT> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.VARINT, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBVarIntType();
|
||||||
|
public static create(alias?: string): DuckDBVarIntType {
|
||||||
|
return alias ? new DuckDBVarIntType(alias) : DuckDBVarIntType.instance;
|
||||||
|
}
|
||||||
|
public static readonly Max: bigint =
|
||||||
|
179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n;
|
||||||
|
public static readonly Min: bigint =
|
||||||
|
-179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n;
|
||||||
|
public get max() {
|
||||||
|
return DuckDBVarIntType.Max;
|
||||||
|
}
|
||||||
|
public get min() {
|
||||||
|
return DuckDBVarIntType.Min;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const VARINT = DuckDBVarIntType.instance;
|
||||||
|
|
||||||
|
export class DuckDBSQLNullType extends BaseDuckDBType<DuckDBTypeId.SQLNULL> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.SQLNULL, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBSQLNullType();
|
||||||
|
public static create(alias?: string): DuckDBSQLNullType {
|
||||||
|
return alias ? new DuckDBSQLNullType(alias) : DuckDBSQLNullType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const SQLNULL = DuckDBSQLNullType.instance;
|
||||||
|
|
||||||
|
export class DuckDBStringLiteralType extends BaseDuckDBType<DuckDBTypeId.STRING_LITERAL> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.STRING_LITERAL, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBStringLiteralType();
|
||||||
|
public static create(alias?: string): DuckDBStringLiteralType {
|
||||||
|
return alias
|
||||||
|
? new DuckDBStringLiteralType(alias)
|
||||||
|
: DuckDBStringLiteralType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const STRING_LITERAL = DuckDBStringLiteralType.instance;
|
||||||
|
|
||||||
|
export class DuckDBIntegerLiteralType extends BaseDuckDBType<DuckDBTypeId.INTEGER_LITERAL> {
|
||||||
|
public constructor(alias?: string) {
|
||||||
|
super(DuckDBTypeId.INTEGER_LITERAL, alias);
|
||||||
|
}
|
||||||
|
public static readonly instance = new DuckDBIntegerLiteralType();
|
||||||
|
public static create(alias?: string): DuckDBIntegerLiteralType {
|
||||||
|
return alias
|
||||||
|
? new DuckDBIntegerLiteralType(alias)
|
||||||
|
: DuckDBIntegerLiteralType.instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export const INTEGER_LITERAL = DuckDBIntegerLiteralType.instance;
|
||||||
|
|
||||||
|
export type DuckDBType =
|
||||||
|
| DuckDBBooleanType
|
||||||
|
| DuckDBTinyIntType
|
||||||
|
| DuckDBSmallIntType
|
||||||
|
| DuckDBIntegerType
|
||||||
|
| DuckDBBigIntType
|
||||||
|
| DuckDBUTinyIntType
|
||||||
|
| DuckDBUSmallIntType
|
||||||
|
| DuckDBUIntegerType
|
||||||
|
| DuckDBUBigIntType
|
||||||
|
| DuckDBFloatType
|
||||||
|
| DuckDBDoubleType
|
||||||
|
| DuckDBTimestampType
|
||||||
|
| DuckDBDateType
|
||||||
|
| DuckDBTimeType
|
||||||
|
| DuckDBIntervalType
|
||||||
|
| DuckDBHugeIntType
|
||||||
|
| DuckDBUHugeIntType
|
||||||
|
| DuckDBVarCharType
|
||||||
|
| DuckDBBlobType
|
||||||
|
| DuckDBDecimalType
|
||||||
|
| DuckDBTimestampSecondsType
|
||||||
|
| DuckDBTimestampMillisecondsType
|
||||||
|
| DuckDBTimestampNanosecondsType
|
||||||
|
| DuckDBEnumType
|
||||||
|
| DuckDBListType
|
||||||
|
| DuckDBStructType
|
||||||
|
| DuckDBMapType
|
||||||
|
| DuckDBArrayType
|
||||||
|
| DuckDBUUIDType
|
||||||
|
| DuckDBUnionType
|
||||||
|
| DuckDBBitType
|
||||||
|
| DuckDBTimeTZType
|
||||||
|
| DuckDBTimestampTZType
|
||||||
|
| DuckDBAnyType
|
||||||
|
| DuckDBVarIntType
|
||||||
|
| DuckDBSQLNullType
|
||||||
|
| DuckDBStringLiteralType
|
||||||
|
| DuckDBIntegerLiteralType;
|
||||||
42
ts/pkgs/duckdb-data-types/src/DuckDBTypeId.ts
Normal file
42
ts/pkgs/duckdb-data-types/src/DuckDBTypeId.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
// copy of DUCKDB_TYPE from the C API, with names shortened
|
||||||
|
export enum DuckDBTypeId {
|
||||||
|
INVALID = 0,
|
||||||
|
BOOLEAN = 1,
|
||||||
|
TINYINT = 2,
|
||||||
|
SMALLINT = 3,
|
||||||
|
INTEGER = 4,
|
||||||
|
BIGINT = 5,
|
||||||
|
UTINYINT = 6,
|
||||||
|
USMALLINT = 7,
|
||||||
|
UINTEGER = 8,
|
||||||
|
UBIGINT = 9,
|
||||||
|
FLOAT = 10,
|
||||||
|
DOUBLE = 11,
|
||||||
|
TIMESTAMP = 12,
|
||||||
|
DATE = 13,
|
||||||
|
TIME = 14,
|
||||||
|
INTERVAL = 15,
|
||||||
|
HUGEINT = 16,
|
||||||
|
UHUGEINT = 32,
|
||||||
|
VARCHAR = 17,
|
||||||
|
BLOB = 18,
|
||||||
|
DECIMAL = 19,
|
||||||
|
TIMESTAMP_S = 20,
|
||||||
|
TIMESTAMP_MS = 21,
|
||||||
|
TIMESTAMP_NS = 22,
|
||||||
|
ENUM = 23,
|
||||||
|
LIST = 24,
|
||||||
|
STRUCT = 25,
|
||||||
|
MAP = 26,
|
||||||
|
ARRAY = 33,
|
||||||
|
UUID = 27,
|
||||||
|
UNION = 28,
|
||||||
|
BIT = 29,
|
||||||
|
TIME_TZ = 30,
|
||||||
|
TIMESTAMP_TZ = 31,
|
||||||
|
ANY = 34,
|
||||||
|
VARINT = 35,
|
||||||
|
SQLNULL = 36,
|
||||||
|
STRING_LITERAL = 37,
|
||||||
|
INTEGER_LITERAL = 38,
|
||||||
|
}
|
||||||
46
ts/pkgs/duckdb-data-types/src/extensionTypes.ts
Normal file
46
ts/pkgs/duckdb-data-types/src/extensionTypes.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import {
|
||||||
|
DOUBLE,
|
||||||
|
DuckDBBlobType,
|
||||||
|
DuckDBVarCharType,
|
||||||
|
FLOAT,
|
||||||
|
HUGEINT,
|
||||||
|
LIST,
|
||||||
|
STRUCT,
|
||||||
|
USMALLINT,
|
||||||
|
UTINYINT,
|
||||||
|
} from './DuckDBType.js';
|
||||||
|
|
||||||
|
// see https://github.com/duckdb/duckdb-inet/blob/main/src/inet_extension.cpp
|
||||||
|
export const INET = STRUCT(
|
||||||
|
{ ip_type: UTINYINT, address: HUGEINT, mask: USMALLINT },
|
||||||
|
'INET',
|
||||||
|
);
|
||||||
|
|
||||||
|
// see LogicalType::JSON() in https://github.com/duckdb/duckdb/blob/main/src/common/types.cpp
|
||||||
|
export const JSONType = DuckDBVarCharType.create('JSON');
|
||||||
|
|
||||||
|
// see https://github.com/duckdb/duckdb-spatial/blob/main/src/spatial/spatial_types.cpp
|
||||||
|
export const BOX_2D = STRUCT(
|
||||||
|
{ min_x: DOUBLE, min_y: DOUBLE, max_x: DOUBLE, max_y: DOUBLE },
|
||||||
|
'BOX_2D',
|
||||||
|
);
|
||||||
|
export const BOX_2DF = STRUCT(
|
||||||
|
{ min_x: FLOAT, min_y: FLOAT, max_x: FLOAT, max_y: FLOAT },
|
||||||
|
'BOX_2DF',
|
||||||
|
);
|
||||||
|
export const GEOMETRY = DuckDBBlobType.create('GEOMETRY');
|
||||||
|
export const LINESTRING_2D = LIST(
|
||||||
|
STRUCT({ x: DOUBLE, y: DOUBLE }),
|
||||||
|
'LINESTRING_2D',
|
||||||
|
);
|
||||||
|
export const POINT_2D = STRUCT({ x: DOUBLE, y: DOUBLE }, 'POINT_2D');
|
||||||
|
export const POINT_3D = STRUCT({ x: DOUBLE, y: DOUBLE, z: DOUBLE }, 'POINT_3D');
|
||||||
|
export const POINT_4D = STRUCT(
|
||||||
|
{ x: DOUBLE, y: DOUBLE, z: DOUBLE, m: DOUBLE },
|
||||||
|
'POINT_4D',
|
||||||
|
);
|
||||||
|
export const POLYGON_2D = LIST(
|
||||||
|
LIST(STRUCT({ x: DOUBLE, y: DOUBLE })),
|
||||||
|
'POLYGON_2D',
|
||||||
|
);
|
||||||
|
export const WKB_BLOB = DuckDBBlobType.create('WKB_BLOB');
|
||||||
4
ts/pkgs/duckdb-data-types/src/index.ts
Normal file
4
ts/pkgs/duckdb-data-types/src/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export * from './DuckDBType.js';
|
||||||
|
export * from './DuckDBTypeId.js';
|
||||||
|
export * from './extensionTypes.js';
|
||||||
|
export * from './parseLogicalTypeString.js';
|
||||||
286
ts/pkgs/duckdb-data-types/src/parseLogicalTypeString.ts
Normal file
286
ts/pkgs/duckdb-data-types/src/parseLogicalTypeString.ts
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
import {
|
||||||
|
ARRAY,
|
||||||
|
BIGINT,
|
||||||
|
BIT,
|
||||||
|
BLOB,
|
||||||
|
BOOLEAN,
|
||||||
|
DATE,
|
||||||
|
DECIMAL,
|
||||||
|
DOUBLE,
|
||||||
|
DuckDBMapType,
|
||||||
|
DuckDBStructType,
|
||||||
|
DuckDBType,
|
||||||
|
DuckDBUnionType,
|
||||||
|
ENUM,
|
||||||
|
FLOAT,
|
||||||
|
HUGEINT,
|
||||||
|
INTEGER,
|
||||||
|
INTERVAL,
|
||||||
|
LIST,
|
||||||
|
MAP,
|
||||||
|
SMALLINT,
|
||||||
|
SQLNULL,
|
||||||
|
STRUCT,
|
||||||
|
TIME,
|
||||||
|
TIMESTAMP,
|
||||||
|
TIMESTAMP_MS,
|
||||||
|
TIMESTAMP_NS,
|
||||||
|
TIMESTAMP_S,
|
||||||
|
TIMESTAMPTZ,
|
||||||
|
TIMETZ,
|
||||||
|
TINYINT,
|
||||||
|
UBIGINT,
|
||||||
|
UHUGEINT,
|
||||||
|
UINTEGER,
|
||||||
|
UNION,
|
||||||
|
USMALLINT,
|
||||||
|
UTINYINT,
|
||||||
|
UUID,
|
||||||
|
VARCHAR,
|
||||||
|
VARINT,
|
||||||
|
} from './DuckDBType.js';
|
||||||
|
import {
|
||||||
|
BOX_2D,
|
||||||
|
BOX_2DF,
|
||||||
|
GEOMETRY,
|
||||||
|
INET,
|
||||||
|
JSONType,
|
||||||
|
LINESTRING_2D,
|
||||||
|
POINT_2D,
|
||||||
|
POINT_3D,
|
||||||
|
POINT_4D,
|
||||||
|
POLYGON_2D,
|
||||||
|
WKB_BLOB,
|
||||||
|
} from './extensionTypes.js';
|
||||||
|
|
||||||
|
const simpleTypeMap: Record<string, DuckDBType> = {
|
||||||
|
BIGINT: BIGINT,
|
||||||
|
BIT: BIT,
|
||||||
|
BOOLEAN: BOOLEAN,
|
||||||
|
BLOB: BLOB,
|
||||||
|
BOX_2D: BOX_2D,
|
||||||
|
BOX_2DF: BOX_2DF,
|
||||||
|
DATE: DATE,
|
||||||
|
DOUBLE: DOUBLE,
|
||||||
|
FLOAT: FLOAT,
|
||||||
|
GEOMETRY: GEOMETRY,
|
||||||
|
HUGEINT: HUGEINT,
|
||||||
|
INET: INET,
|
||||||
|
INTEGER: INTEGER,
|
||||||
|
INTERVAL: INTERVAL,
|
||||||
|
JSON: JSONType,
|
||||||
|
LINESTRING_2D: LINESTRING_2D,
|
||||||
|
POINT_2D: POINT_2D,
|
||||||
|
POINT_3D: POINT_3D,
|
||||||
|
POINT_4D: POINT_4D,
|
||||||
|
POLYGON_2D: POLYGON_2D,
|
||||||
|
SMALLINT: SMALLINT,
|
||||||
|
SQLNULL: SQLNULL,
|
||||||
|
TIME: TIME,
|
||||||
|
'TIME WITH TIME ZONE': TIMETZ,
|
||||||
|
TIMESTAMP: TIMESTAMP,
|
||||||
|
'TIMESTAMP WITH TIME ZONE': TIMESTAMPTZ,
|
||||||
|
TIMESTAMP_S: TIMESTAMP_S,
|
||||||
|
TIMESTAMP_MS: TIMESTAMP_MS,
|
||||||
|
TIMESTAMP_NS: TIMESTAMP_NS,
|
||||||
|
TINYINT: TINYINT,
|
||||||
|
UBIGINT: UBIGINT,
|
||||||
|
UHUGEINT: UHUGEINT,
|
||||||
|
UINTEGER: UINTEGER,
|
||||||
|
USMALLINT: USMALLINT,
|
||||||
|
UTINYINT: UTINYINT,
|
||||||
|
UUID: UUID,
|
||||||
|
VARCHAR: VARCHAR,
|
||||||
|
VARINT: VARINT,
|
||||||
|
WKB_BLOB: WKB_BLOB,
|
||||||
|
};
|
||||||
|
|
||||||
|
function matchStructMapOrUnion(
|
||||||
|
typeString: string,
|
||||||
|
): DuckDBStructType | DuckDBMapType | DuckDBUnionType | undefined {
|
||||||
|
typeString = typeString.trim();
|
||||||
|
|
||||||
|
const fields = parseStructLike(typeString);
|
||||||
|
if (!fields) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeString.startsWith('STRUCT')) {
|
||||||
|
const entries: Record<string, DuckDBType> = {};
|
||||||
|
for (const field of fields) {
|
||||||
|
if (field.key && field.type) {
|
||||||
|
entries[field.key] = field.type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return STRUCT(entries);
|
||||||
|
}
|
||||||
|
if (typeString.startsWith('MAP')) {
|
||||||
|
const keyType = fields[0]?.type;
|
||||||
|
const valueType = fields[1]?.type;
|
||||||
|
if (keyType && valueType) {
|
||||||
|
return MAP(keyType, valueType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (typeString.startsWith('UNION')) {
|
||||||
|
const members: Record<string, DuckDBType> = {};
|
||||||
|
for (const field of fields) {
|
||||||
|
if (field.key && field.type) {
|
||||||
|
members[field.key] = field.type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return UNION(members);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseStructLike(typeString: string): ParsedField[] | undefined {
|
||||||
|
const structPattern = /^(STRUCT|MAP|UNION)\s*\((.*)\)$/;
|
||||||
|
const match = structPattern.exec(typeString);
|
||||||
|
if (!match) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fieldsString = match[2];
|
||||||
|
return parseFields(fieldsString);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Parse the fields substring. We do this by counting parens and double quotes.
|
||||||
|
* When checking for double-quotes, we only need to count an even number of them
|
||||||
|
* to count brackets, since in cases where there escaped double quotes inside
|
||||||
|
* a double-quoted string, the double quotes appear adjacent to each other,
|
||||||
|
* always incrementing the count by 2 before there could theoretically be another
|
||||||
|
* paren.
|
||||||
|
*/
|
||||||
|
function parseFields(fieldsString: string): ParsedField[] {
|
||||||
|
const fields: ParsedField[] = [];
|
||||||
|
let currentFieldStartIndex: number | null = null;
|
||||||
|
let parenCount = 0;
|
||||||
|
let quoteCount = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < fieldsString.length; i++) {
|
||||||
|
const char = fieldsString[i];
|
||||||
|
|
||||||
|
if (
|
||||||
|
currentFieldStartIndex === null &&
|
||||||
|
char !== '(' &&
|
||||||
|
char !== ')' &&
|
||||||
|
char !== ','
|
||||||
|
) {
|
||||||
|
currentFieldStartIndex = i;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (char === '"') {
|
||||||
|
quoteCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
char === ',' &&
|
||||||
|
parenCount === 0 &&
|
||||||
|
quoteCount % 2 === 0 &&
|
||||||
|
currentFieldStartIndex !== null
|
||||||
|
) {
|
||||||
|
const field = fieldsString.slice(currentFieldStartIndex, i);
|
||||||
|
fields.push(parseField(field.trim()));
|
||||||
|
currentFieldStartIndex = null;
|
||||||
|
} else {
|
||||||
|
if (char === '(' && quoteCount % 2 === 0) parenCount++;
|
||||||
|
if (char === ')' && quoteCount % 2 === 0) parenCount--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentFieldStartIndex !== null) {
|
||||||
|
const lastField = fieldsString.slice(currentFieldStartIndex);
|
||||||
|
fields.push(parseField(lastField.trim()));
|
||||||
|
}
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ParsedField {
|
||||||
|
key?: string;
|
||||||
|
type?: DuckDBType;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseField(fieldString: string): ParsedField {
|
||||||
|
const fieldPattern = /^(".*?"|\w+)\s+(.+)$/;
|
||||||
|
const match = fieldPattern.exec(fieldString);
|
||||||
|
if (match) {
|
||||||
|
const key = match[1];
|
||||||
|
const type = parseLogicalTypeString(match[2].trim());
|
||||||
|
return { key, type };
|
||||||
|
} else {
|
||||||
|
const type = parseLogicalTypeString(fieldString);
|
||||||
|
return { type };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function matchDecimal(typeString: string) {
|
||||||
|
const match = typeString.match(/^DECIMAL\((\d+),(\d+)\)$/);
|
||||||
|
if (match) {
|
||||||
|
return DECIMAL(Number(match[1]), Number(match[2]));
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function matchEnum(typeString: string) {
|
||||||
|
const match = /ENUM\(([^)]*)\)/i.exec(typeString);
|
||||||
|
if (match) {
|
||||||
|
const matches = match[1].matchAll(/'((?:[^']|'')*)'/g);
|
||||||
|
const values: string[] = [];
|
||||||
|
for (const match of matches) {
|
||||||
|
values.push(match[1].replace(/''/, `'`));
|
||||||
|
}
|
||||||
|
return ENUM(values);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function matchList(typeString: string) {
|
||||||
|
if (typeString.endsWith('[]')) {
|
||||||
|
const innerType = typeString.slice(0, -2);
|
||||||
|
return LIST(parseLogicalTypeString(innerType));
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function matchArray(typeString: string) {
|
||||||
|
const match = typeString.match(/\[(\d+)\]$/);
|
||||||
|
if (match) {
|
||||||
|
const innerType = typeString.slice(0, -match[0].length);
|
||||||
|
const length = match[1];
|
||||||
|
return ARRAY(parseLogicalTypeString(innerType), Number(length));
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseLogicalTypeString(typeString: string): DuckDBType {
|
||||||
|
if (typeString in simpleTypeMap) {
|
||||||
|
return simpleTypeMap[typeString];
|
||||||
|
}
|
||||||
|
|
||||||
|
const listType = matchList(typeString);
|
||||||
|
if (listType) {
|
||||||
|
return listType;
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayType = matchArray(typeString);
|
||||||
|
if (arrayType) {
|
||||||
|
return arrayType;
|
||||||
|
}
|
||||||
|
|
||||||
|
const decimalType = matchDecimal(typeString);
|
||||||
|
if (decimalType) {
|
||||||
|
return decimalType;
|
||||||
|
}
|
||||||
|
|
||||||
|
const enumType = matchEnum(typeString);
|
||||||
|
if (enumType) {
|
||||||
|
return enumType;
|
||||||
|
}
|
||||||
|
|
||||||
|
const structMapOrUnionType = matchStructMapOrUnion(typeString);
|
||||||
|
if (structMapOrUnionType) {
|
||||||
|
return structMapOrUnionType;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw Error(`unimplemented type match: ${typeString}`);
|
||||||
|
}
|
||||||
7
ts/pkgs/duckdb-data-types/src/sql.ts
Normal file
7
ts/pkgs/duckdb-data-types/src/sql.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export function quotedString(input: string): string {
|
||||||
|
return `'${input.replace(`'`, `''`)}'`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function quotedIdentifier(input: string): string {
|
||||||
|
return `"${input.replace(`"`, `""`)}"`;
|
||||||
|
}
|
||||||
6
ts/pkgs/duckdb-data-types/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-types/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../../tsconfig.library.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"outDir": "../out"
|
||||||
|
}
|
||||||
|
}
|
||||||
1025
ts/pkgs/duckdb-data-types/test/DuckDBType.test.ts
Normal file
1025
ts/pkgs/duckdb-data-types/test/DuckDBType.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
326
ts/pkgs/duckdb-data-types/test/parseLogicalTypeString.test.ts
Normal file
326
ts/pkgs/duckdb-data-types/test/parseLogicalTypeString.test.ts
Normal file
@@ -0,0 +1,326 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import {
|
||||||
|
ARRAY,
|
||||||
|
BIGINT,
|
||||||
|
BIT,
|
||||||
|
BLOB,
|
||||||
|
BOOLEAN,
|
||||||
|
DATE,
|
||||||
|
DECIMAL,
|
||||||
|
DOUBLE,
|
||||||
|
ENUM,
|
||||||
|
FLOAT,
|
||||||
|
HUGEINT,
|
||||||
|
INTEGER,
|
||||||
|
INTERVAL,
|
||||||
|
LIST,
|
||||||
|
MAP,
|
||||||
|
SMALLINT,
|
||||||
|
STRUCT,
|
||||||
|
TIME,
|
||||||
|
TIMESTAMP,
|
||||||
|
TIMESTAMP_MS,
|
||||||
|
TIMESTAMP_NS,
|
||||||
|
TIMESTAMP_S,
|
||||||
|
TIMESTAMPTZ,
|
||||||
|
TIMETZ,
|
||||||
|
TINYINT,
|
||||||
|
UBIGINT,
|
||||||
|
UHUGEINT,
|
||||||
|
UINTEGER,
|
||||||
|
UNION,
|
||||||
|
USMALLINT,
|
||||||
|
UTINYINT,
|
||||||
|
UUID,
|
||||||
|
VARCHAR,
|
||||||
|
VARINT,
|
||||||
|
} from '../src/DuckDBType';
|
||||||
|
import {
|
||||||
|
BOX_2D,
|
||||||
|
BOX_2DF,
|
||||||
|
GEOMETRY,
|
||||||
|
INET,
|
||||||
|
JSONType,
|
||||||
|
LINESTRING_2D,
|
||||||
|
POINT_2D,
|
||||||
|
POINT_3D,
|
||||||
|
POINT_4D,
|
||||||
|
POLYGON_2D,
|
||||||
|
WKB_BLOB,
|
||||||
|
} from '../src/extensionTypes';
|
||||||
|
import { parseLogicalTypeString } from '../src/parseLogicalTypeString';
|
||||||
|
|
||||||
|
suite('parseLogicalTypeString', () => {
|
||||||
|
test('BOOLEAN', () => {
|
||||||
|
expect(parseLogicalTypeString('BOOLEAN')).toStrictEqual(BOOLEAN);
|
||||||
|
});
|
||||||
|
test('TINYINT', () => {
|
||||||
|
expect(parseLogicalTypeString('TINYINT')).toStrictEqual(TINYINT);
|
||||||
|
});
|
||||||
|
test('GEOMETRY', () => {
|
||||||
|
expect(parseLogicalTypeString('GEOMETRY')).toStrictEqual(GEOMETRY);
|
||||||
|
});
|
||||||
|
test('LINESTRING_2D', () => {
|
||||||
|
expect(parseLogicalTypeString('LINESTRING_2D')).toStrictEqual(
|
||||||
|
LINESTRING_2D,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('BOX_2D', () => {
|
||||||
|
expect(parseLogicalTypeString('BOX_2D')).toStrictEqual(BOX_2D);
|
||||||
|
});
|
||||||
|
test('BOX_2DF', () => {
|
||||||
|
expect(parseLogicalTypeString('BOX_2DF')).toStrictEqual(BOX_2DF);
|
||||||
|
});
|
||||||
|
test('POINT_2D', () => {
|
||||||
|
expect(parseLogicalTypeString('POINT_2D')).toStrictEqual(POINT_2D);
|
||||||
|
});
|
||||||
|
test('POINT_3D', () => {
|
||||||
|
expect(parseLogicalTypeString('POINT_3D')).toStrictEqual(POINT_3D);
|
||||||
|
});
|
||||||
|
test('POINT_4D', () => {
|
||||||
|
expect(parseLogicalTypeString('POINT_4D')).toStrictEqual(POINT_4D);
|
||||||
|
});
|
||||||
|
test('POLYGON_2D', () => {
|
||||||
|
expect(parseLogicalTypeString('POLYGON_2D')).toStrictEqual(POLYGON_2D);
|
||||||
|
});
|
||||||
|
test('INET', () => {
|
||||||
|
expect(parseLogicalTypeString('INET')).toStrictEqual(INET);
|
||||||
|
});
|
||||||
|
test('JSON', () => {
|
||||||
|
expect(parseLogicalTypeString('JSON')).toStrictEqual(JSONType);
|
||||||
|
});
|
||||||
|
test('WKB_BLOB', () => {
|
||||||
|
expect(parseLogicalTypeString('WKB_BLOB')).toStrictEqual(WKB_BLOB);
|
||||||
|
});
|
||||||
|
test('SMALLINT', () => {
|
||||||
|
expect(parseLogicalTypeString('SMALLINT')).toStrictEqual(SMALLINT);
|
||||||
|
});
|
||||||
|
test('INTEGER', () => {
|
||||||
|
expect(parseLogicalTypeString('INTEGER')).toStrictEqual(INTEGER);
|
||||||
|
});
|
||||||
|
test('BIGINT', () => {
|
||||||
|
expect(parseLogicalTypeString('BIGINT')).toStrictEqual(BIGINT);
|
||||||
|
});
|
||||||
|
test('HUGEINT', () => {
|
||||||
|
expect(parseLogicalTypeString('HUGEINT')).toStrictEqual(HUGEINT);
|
||||||
|
});
|
||||||
|
test('UTINYINT', () => {
|
||||||
|
expect(parseLogicalTypeString('UTINYINT')).toStrictEqual(UTINYINT);
|
||||||
|
});
|
||||||
|
test('UHUGEINT', () => {
|
||||||
|
expect(parseLogicalTypeString('UHUGEINT')).toStrictEqual(UHUGEINT);
|
||||||
|
});
|
||||||
|
test('USMALLINT', () => {
|
||||||
|
expect(parseLogicalTypeString('USMALLINT')).toStrictEqual(USMALLINT);
|
||||||
|
});
|
||||||
|
test('UINTEGER', () => {
|
||||||
|
expect(parseLogicalTypeString('UINTEGER')).toStrictEqual(UINTEGER);
|
||||||
|
});
|
||||||
|
test('UBIGINT', () => {
|
||||||
|
expect(parseLogicalTypeString('UBIGINT')).toStrictEqual(UBIGINT);
|
||||||
|
});
|
||||||
|
test('DATE', () => {
|
||||||
|
expect(parseLogicalTypeString('DATE')).toStrictEqual(DATE);
|
||||||
|
});
|
||||||
|
test('TIME', () => {
|
||||||
|
expect(parseLogicalTypeString('TIME')).toStrictEqual(TIME);
|
||||||
|
});
|
||||||
|
test('TIMESTAMP', () => {
|
||||||
|
expect(parseLogicalTypeString('TIMESTAMP')).toStrictEqual(TIMESTAMP);
|
||||||
|
});
|
||||||
|
test('TIMESTAMP_S', () => {
|
||||||
|
expect(parseLogicalTypeString('TIMESTAMP_S')).toStrictEqual(TIMESTAMP_S);
|
||||||
|
});
|
||||||
|
test('TIMESTAMP_MS', () => {
|
||||||
|
expect(parseLogicalTypeString('TIMESTAMP_MS')).toStrictEqual(TIMESTAMP_MS);
|
||||||
|
});
|
||||||
|
test('TIMESTAMP_NS', () => {
|
||||||
|
expect(parseLogicalTypeString('TIMESTAMP_NS')).toStrictEqual(TIMESTAMP_NS);
|
||||||
|
});
|
||||||
|
test('TIME WITH TIME ZONE', () => {
|
||||||
|
expect(parseLogicalTypeString('TIME WITH TIME ZONE')).toStrictEqual(TIMETZ);
|
||||||
|
});
|
||||||
|
test('TIMESTAMP WITH TIME ZONE', () => {
|
||||||
|
expect(parseLogicalTypeString('TIMESTAMP WITH TIME ZONE')).toStrictEqual(
|
||||||
|
TIMESTAMPTZ,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('FLOAT', () => {
|
||||||
|
expect(parseLogicalTypeString('FLOAT')).toStrictEqual(FLOAT);
|
||||||
|
});
|
||||||
|
test('DOUBLE', () => {
|
||||||
|
expect(parseLogicalTypeString('DOUBLE')).toStrictEqual(DOUBLE);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('DECIMAL(18,6)', () => {
|
||||||
|
expect(parseLogicalTypeString('DECIMAL(18,6)')).toStrictEqual(
|
||||||
|
DECIMAL(18, 6),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test(`ENUM('DUCK_DUCK_ENUM', 'GOOSE')`, () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString(`ENUM('DUCK_DUCK_ENUM', 'GOOSE')`),
|
||||||
|
).toStrictEqual(ENUM(['DUCK_DUCK_ENUM', 'GOOSE']));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('DOUBLE[]', () => {
|
||||||
|
expect(parseLogicalTypeString('DOUBLE[]')).toStrictEqual(LIST(DOUBLE));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('STRUCT(a INTEGER, b VARCHAR)', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)'),
|
||||||
|
).toStrictEqual(
|
||||||
|
STRUCT({
|
||||||
|
a: INTEGER,
|
||||||
|
b: VARCHAR,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('STRUCT(a INTEGER[], b VARCHAR[])', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('STRUCT(a INTEGER[], b VARCHAR[])'),
|
||||||
|
).toStrictEqual(
|
||||||
|
STRUCT({
|
||||||
|
a: LIST(INTEGER),
|
||||||
|
b: LIST(VARCHAR),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('STRUCT(a INTEGER, b VARCHAR)[]', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)[]'),
|
||||||
|
).toStrictEqual(
|
||||||
|
LIST(
|
||||||
|
STRUCT({
|
||||||
|
a: INTEGER,
|
||||||
|
b: VARCHAR,
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// addition: nested struct
|
||||||
|
test('STRUCT(a STRUCT(b INTEGER), b VARCHAR)', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('STRUCT(a STRUCT(b INTEGER), b VARCHAR)'),
|
||||||
|
).toStrictEqual(
|
||||||
|
STRUCT({
|
||||||
|
a: STRUCT({ b: INTEGER }),
|
||||||
|
b: VARCHAR,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('STRUCT("my weird ""key" INTEGER, b VARCHAR)', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('STRUCT("my weird ""key" INTEGER, b VARCHAR)'),
|
||||||
|
).toStrictEqual(
|
||||||
|
STRUCT({
|
||||||
|
'"my weird ""key"': INTEGER,
|
||||||
|
b: VARCHAR,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('STRUCT("my weird ""key" STRUCT("my other ""weird key" INTEGER), b VARCHAR)', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString(
|
||||||
|
'STRUCT("my weird ""key" STRUCT("my other ""weird key" INTEGER), b VARCHAR)',
|
||||||
|
),
|
||||||
|
).toStrictEqual(
|
||||||
|
STRUCT({
|
||||||
|
'"my weird ""key"': STRUCT({
|
||||||
|
'"my other ""weird key"': INTEGER,
|
||||||
|
}),
|
||||||
|
b: VARCHAR,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('MAP(INTEGER, VARCHAR)', () => {
|
||||||
|
expect(parseLogicalTypeString('MAP(INTEGER, VARCHAR)')).toStrictEqual(
|
||||||
|
MAP(INTEGER, VARCHAR),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('MAP(VARCHAR, STRUCT(b INTEGER))', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('MAP(VARCHAR, STRUCT(b INTEGER))'),
|
||||||
|
).toStrictEqual(MAP(VARCHAR, STRUCT({ b: INTEGER })));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('UNION("name" VARCHAR, age SMALLINT)', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('UNION("name" VARCHAR, age SMALLINT)'),
|
||||||
|
).toStrictEqual(
|
||||||
|
UNION({
|
||||||
|
'"name"': VARCHAR,
|
||||||
|
age: SMALLINT,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('INTEGER[3]', () => {
|
||||||
|
expect(parseLogicalTypeString('INTEGER[3]')).toStrictEqual(
|
||||||
|
ARRAY(INTEGER, 3),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('STRUCT(a INTEGER, b VARCHAR)[3]', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('STRUCT(a INTEGER, b VARCHAR)[3]'),
|
||||||
|
).toStrictEqual(
|
||||||
|
ARRAY(
|
||||||
|
STRUCT({
|
||||||
|
a: INTEGER,
|
||||||
|
b: VARCHAR,
|
||||||
|
}),
|
||||||
|
3,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('STRUCT(a INTEGER[3], b VARCHAR[3])', () => {
|
||||||
|
expect(
|
||||||
|
parseLogicalTypeString('STRUCT(a INTEGER[3], b VARCHAR[3])'),
|
||||||
|
).toStrictEqual(
|
||||||
|
STRUCT({
|
||||||
|
a: ARRAY(INTEGER, 3),
|
||||||
|
b: ARRAY(VARCHAR, 3),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('INTEGER[][3]', () => {
|
||||||
|
expect(parseLogicalTypeString('INTEGER[][3]')).toStrictEqual(
|
||||||
|
ARRAY(LIST(INTEGER), 3),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('INTEGER[3][]', () => {
|
||||||
|
expect(parseLogicalTypeString('INTEGER[3][]')).toStrictEqual(
|
||||||
|
LIST(ARRAY(INTEGER, 3)),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('UUID', () => {
|
||||||
|
expect(parseLogicalTypeString('UUID')).toStrictEqual(UUID);
|
||||||
|
});
|
||||||
|
test('INTERVAL', () => {
|
||||||
|
expect(parseLogicalTypeString('INTERVAL')).toStrictEqual(INTERVAL);
|
||||||
|
});
|
||||||
|
test('VARCHAR', () => {
|
||||||
|
expect(parseLogicalTypeString('VARCHAR')).toStrictEqual(VARCHAR);
|
||||||
|
});
|
||||||
|
test('VARINT', () => {
|
||||||
|
expect(parseLogicalTypeString('VARINT')).toStrictEqual(VARINT);
|
||||||
|
});
|
||||||
|
test('BLOB', () => {
|
||||||
|
expect(parseLogicalTypeString('BLOB')).toStrictEqual(BLOB);
|
||||||
|
});
|
||||||
|
test('BIT', () => {
|
||||||
|
expect(parseLogicalTypeString('BIT')).toStrictEqual(BIT);
|
||||||
|
});
|
||||||
|
});
|
||||||
6
ts/pkgs/duckdb-data-types/test/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-types/test/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../../tsconfig.test.json",
|
||||||
|
"references": [
|
||||||
|
{ "path": "../src" }
|
||||||
|
]
|
||||||
|
}
|
||||||
34
ts/pkgs/duckdb-data-values/package.json
Normal file
34
ts/pkgs/duckdb-data-values/package.json
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"name": "@duckdb/data-values",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"description": "Utilities for representing DuckDB values",
|
||||||
|
"type": "module",
|
||||||
|
"main": "./out/index.js",
|
||||||
|
"module": "./out/index.js",
|
||||||
|
"types": "./out/index.d.ts",
|
||||||
|
"scripts": {
|
||||||
|
"preinstall": "pnpm build:src",
|
||||||
|
"build": "tsc -b src test",
|
||||||
|
"build:src": "tsc -b src",
|
||||||
|
"build:test": "tsc -b test",
|
||||||
|
"build:watch": "tsc -b src test --watch",
|
||||||
|
"check": "pnpm format:check && pnpm lint",
|
||||||
|
"clean": "rimraf out",
|
||||||
|
"format:check": "prettier . --ignore-path $(find-up .prettierignore) --check",
|
||||||
|
"format:write": "prettier . --ignore-path $(find-up .prettierignore) --write",
|
||||||
|
"lint": "pnpm eslint src test",
|
||||||
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^9.32.0",
|
||||||
|
"eslint": "^9.32.0",
|
||||||
|
"find-up-cli": "^6.0.0",
|
||||||
|
"prettier": "^3.6.2",
|
||||||
|
"rimraf": "^6.0.1",
|
||||||
|
"typescript": "^5.8.3",
|
||||||
|
"typescript-eslint": "^8.38.0",
|
||||||
|
"vite": "^6.3.6",
|
||||||
|
"vitest": "^3.2.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
23
ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts
Normal file
23
ts/pkgs/duckdb-data-values/src/DuckDBArrayValue.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||||
|
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||||
|
import { DuckDBValue } from './DuckDBValue.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBArrayValue extends SpecialDuckDBValue {
|
||||||
|
public readonly values: readonly DuckDBValue[];
|
||||||
|
|
||||||
|
constructor(values: readonly DuckDBValue[]) {
|
||||||
|
super();
|
||||||
|
this.values = values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
const valueStrings = this.values.map(displayStringForDuckDBValue);
|
||||||
|
return `[${valueStrings.join(', ')}]`;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.values.map(jsonFromDuckDBValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
123
ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts
Normal file
123
ts/pkgs/duckdb-data-values/src/DuckDBBitValue.ts
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBBitValue extends SpecialDuckDBValue {
|
||||||
|
public readonly data: Uint8Array;
|
||||||
|
|
||||||
|
constructor(data: Uint8Array) {
|
||||||
|
super();
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
public padding(): number {
|
||||||
|
return this.data[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
public get length(): number {
|
||||||
|
return (this.data.length - 1) * 8 - this.padding();
|
||||||
|
}
|
||||||
|
|
||||||
|
public getBool(index: number): boolean {
|
||||||
|
const offset = index + this.padding();
|
||||||
|
const dataIndex = Math.floor(offset / 8) + 1;
|
||||||
|
const byte = this.data[dataIndex] >> (7 - (offset % 8));
|
||||||
|
return (byte & 1) !== 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toBools(): boolean[] {
|
||||||
|
const bools: boolean[] = [];
|
||||||
|
const length = this.length;
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
bools.push(this.getBool(i));
|
||||||
|
}
|
||||||
|
return bools;
|
||||||
|
}
|
||||||
|
|
||||||
|
public getBit(index: number): 0 | 1 {
|
||||||
|
return this.getBool(index) ? 1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toBits(): number[] {
|
||||||
|
const bits: number[] = [];
|
||||||
|
const length = this.length;
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
bits.push(this.getBit(i));
|
||||||
|
}
|
||||||
|
return bits;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
const length = this.length;
|
||||||
|
const chars = Array.from<string>({ length });
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
chars[i] = this.getBool(i) ? '1' : '0';
|
||||||
|
}
|
||||||
|
return chars.join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static fromString(str: string, on: string = '1'): DuckDBBitValue {
|
||||||
|
return DuckDBBitValue.fromLengthAndPredicate(
|
||||||
|
str.length,
|
||||||
|
(i) => str[i] === on,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static fromBits(
|
||||||
|
bits: readonly number[],
|
||||||
|
on: number = 1,
|
||||||
|
): DuckDBBitValue {
|
||||||
|
return DuckDBBitValue.fromLengthAndPredicate(
|
||||||
|
bits.length,
|
||||||
|
(i) => bits[i] === on,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static fromBools(bools: readonly boolean[]): DuckDBBitValue {
|
||||||
|
return DuckDBBitValue.fromLengthAndPredicate(bools.length, (i) => bools[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static fromLengthAndPredicate(
|
||||||
|
length: number,
|
||||||
|
predicate: (index: number) => boolean,
|
||||||
|
): DuckDBBitValue {
|
||||||
|
const byteCount = Math.ceil(length / 8) + 1;
|
||||||
|
const paddingBitCount = (8 - (length % 8)) % 8;
|
||||||
|
|
||||||
|
const data = new Uint8Array(byteCount);
|
||||||
|
let byteIndex = 0;
|
||||||
|
|
||||||
|
// first byte contains count of padding bits
|
||||||
|
data[byteIndex++] = paddingBitCount;
|
||||||
|
|
||||||
|
let byte = 0;
|
||||||
|
let byteBit = 0;
|
||||||
|
|
||||||
|
// padding consists of 1s in MSB of second byte
|
||||||
|
while (byteBit < paddingBitCount) {
|
||||||
|
byte <<= 1;
|
||||||
|
byte |= 1;
|
||||||
|
byteBit++;
|
||||||
|
}
|
||||||
|
|
||||||
|
let bitIndex = 0;
|
||||||
|
|
||||||
|
while (byteIndex < byteCount) {
|
||||||
|
while (byteBit < 8) {
|
||||||
|
byte <<= 1;
|
||||||
|
if (predicate(bitIndex++)) {
|
||||||
|
byte |= 1;
|
||||||
|
}
|
||||||
|
byteBit++;
|
||||||
|
}
|
||||||
|
data[byteIndex++] = byte;
|
||||||
|
byte = 0;
|
||||||
|
byteBit = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new DuckDBBitValue(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
20
ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBBlobValue.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { stringFromBlob } from './conversion/stringFromBlob.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBBlobValue extends SpecialDuckDBValue {
|
||||||
|
public readonly bytes: Uint8Array;
|
||||||
|
|
||||||
|
constructor(bytes: Uint8Array) {
|
||||||
|
super();
|
||||||
|
this.bytes = bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return stringFromBlob(this.bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
20
ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBDateValue.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { getDuckDBDateStringFromDays } from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBDateValue extends SpecialDuckDBValue {
|
||||||
|
public readonly days: number;
|
||||||
|
|
||||||
|
constructor(days: number) {
|
||||||
|
super();
|
||||||
|
this.days = days;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return getDuckDBDateStringFromDays(this.days);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
38
ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts
Normal file
38
ts/pkgs/duckdb-data-values/src/DuckDBDecimalValue.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import {
|
||||||
|
DuckDBDecimalFormatOptions,
|
||||||
|
stringFromDecimal,
|
||||||
|
} from './conversion/stringFromDecimal.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBDecimalValue extends SpecialDuckDBValue {
|
||||||
|
public readonly scaledValue: bigint;
|
||||||
|
|
||||||
|
public readonly scale: number;
|
||||||
|
|
||||||
|
constructor(scaledValue: bigint, scale: number) {
|
||||||
|
super();
|
||||||
|
this.scaledValue = scaledValue;
|
||||||
|
this.scale = scale;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return stringFromDecimal(this.scaledValue, this.scale);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns a string representation appropriate to the host environment's current locale. */
|
||||||
|
|
||||||
|
public toLocaleString(
|
||||||
|
locales?: string | string[],
|
||||||
|
options?: DuckDBDecimalFormatOptions,
|
||||||
|
): string {
|
||||||
|
return stringFromDecimal(this.scaledValue, this.scale, {
|
||||||
|
locales,
|
||||||
|
options,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
26
ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts
Normal file
26
ts/pkgs/duckdb-data-values/src/DuckDBIntervalValue.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { getDuckDBIntervalString } from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBIntervalValue extends SpecialDuckDBValue {
|
||||||
|
public readonly months: number;
|
||||||
|
|
||||||
|
public readonly days: number;
|
||||||
|
|
||||||
|
public readonly microseconds: bigint;
|
||||||
|
|
||||||
|
constructor(months: number, days: number, microseconds: bigint) {
|
||||||
|
super();
|
||||||
|
this.months = months;
|
||||||
|
this.days = days;
|
||||||
|
this.microseconds = microseconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return getDuckDBIntervalString(this.months, this.days, this.microseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
23
ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts
Normal file
23
ts/pkgs/duckdb-data-values/src/DuckDBListValue.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||||
|
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||||
|
import { DuckDBValue } from './DuckDBValue.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBListValue extends SpecialDuckDBValue {
|
||||||
|
public readonly values: readonly DuckDBValue[];
|
||||||
|
|
||||||
|
constructor(values: readonly DuckDBValue[]) {
|
||||||
|
super();
|
||||||
|
this.values = values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
const valueStrings = this.values.map(displayStringForDuckDBValue);
|
||||||
|
return `[${valueStrings.join(', ')}]`;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.values.map(jsonFromDuckDBValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
6
ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts
Normal file
6
ts/pkgs/duckdb-data-values/src/DuckDBMapEntry.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { DuckDBValue } from './DuckDBValue.js';
|
||||||
|
|
||||||
|
export interface DuckDBMapEntry {
|
||||||
|
readonly key: DuckDBValue;
|
||||||
|
readonly value: DuckDBValue;
|
||||||
|
}
|
||||||
33
ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts
Normal file
33
ts/pkgs/duckdb-data-values/src/DuckDBMapValue.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||||
|
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||||
|
import { DuckDBMapEntry } from './DuckDBMapEntry.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBMapValue extends SpecialDuckDBValue {
|
||||||
|
public readonly entries: readonly DuckDBMapEntry[];
|
||||||
|
|
||||||
|
constructor(entries: readonly DuckDBMapEntry[]) {
|
||||||
|
super();
|
||||||
|
this.entries = entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
const entryStrings = this.entries.map(
|
||||||
|
({ key, value }) =>
|
||||||
|
`${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(
|
||||||
|
value,
|
||||||
|
)}`,
|
||||||
|
);
|
||||||
|
return `{${entryStrings.join(', ')}}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
const result: Json = {};
|
||||||
|
for (const { key, value } of this.entries) {
|
||||||
|
const keyString = displayStringForDuckDBValue(key);
|
||||||
|
result[keyString] = jsonFromDuckDBValue(value);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
6
ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts
Normal file
6
ts/pkgs/duckdb-data-values/src/DuckDBStructEntry.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { DuckDBValue } from './DuckDBValue.js';
|
||||||
|
|
||||||
|
export interface DuckDBStructEntry {
|
||||||
|
readonly key: string;
|
||||||
|
readonly value: DuckDBValue;
|
||||||
|
}
|
||||||
33
ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts
Normal file
33
ts/pkgs/duckdb-data-values/src/DuckDBStructValue.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { displayStringForDuckDBValue } from './conversion/displayStringForDuckDBValue.js';
|
||||||
|
import { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||||
|
import { DuckDBStructEntry } from './DuckDBStructEntry.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBStructValue extends SpecialDuckDBValue {
|
||||||
|
public readonly entries: readonly DuckDBStructEntry[];
|
||||||
|
|
||||||
|
constructor(entries: readonly DuckDBStructEntry[]) {
|
||||||
|
super();
|
||||||
|
this.entries = entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
const entryStrings = this.entries.map(
|
||||||
|
({ key, value }) =>
|
||||||
|
`${displayStringForDuckDBValue(key)}: ${displayStringForDuckDBValue(
|
||||||
|
value,
|
||||||
|
)}`,
|
||||||
|
);
|
||||||
|
return `{${entryStrings.join(', ')}}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
const result: Json = {};
|
||||||
|
for (const { key, value } of this.entries) {
|
||||||
|
const keyString = displayStringForDuckDBValue(key);
|
||||||
|
result[keyString] = jsonFromDuckDBValue(value);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
42
ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts
Normal file
42
ts/pkgs/duckdb-data-values/src/DuckDBTimeTZValue.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import {
|
||||||
|
getDuckDBTimeStringFromMicrosecondsInDay,
|
||||||
|
getOffsetStringFromSeconds,
|
||||||
|
} from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBTimeTZValue extends SpecialDuckDBValue {
|
||||||
|
public readonly micros: bigint;
|
||||||
|
public readonly offset: number;
|
||||||
|
|
||||||
|
constructor(micros: bigint, offset: number) {
|
||||||
|
super();
|
||||||
|
this.micros = micros;
|
||||||
|
this.offset = offset;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return `${getDuckDBTimeStringFromMicrosecondsInDay(
|
||||||
|
this.micros,
|
||||||
|
)}${getOffsetStringFromSeconds(this.offset)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static TimeBits = 40;
|
||||||
|
private static OffsetBits = 24;
|
||||||
|
private static MaxOffset = 16 * 60 * 60 - 1; // ±15:59:59 = 57599 seconds
|
||||||
|
|
||||||
|
public static fromBits(bits: bigint): DuckDBTimeTZValue {
|
||||||
|
const micros = BigInt.asUintN(
|
||||||
|
DuckDBTimeTZValue.TimeBits,
|
||||||
|
bits >> BigInt(DuckDBTimeTZValue.OffsetBits),
|
||||||
|
);
|
||||||
|
const offset =
|
||||||
|
DuckDBTimeTZValue.MaxOffset -
|
||||||
|
Number(BigInt.asUintN(DuckDBTimeTZValue.OffsetBits, bits));
|
||||||
|
return new DuckDBTimeTZValue(micros, offset);
|
||||||
|
}
|
||||||
|
}
|
||||||
20
ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/DuckDBTimeValue.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { getDuckDBTimeStringFromMicrosecondsInDay } from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBTimeValue extends SpecialDuckDBValue {
|
||||||
|
public readonly microseconds: bigint;
|
||||||
|
|
||||||
|
constructor(microseconds: bigint) {
|
||||||
|
super();
|
||||||
|
this.microseconds = microseconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return getDuckDBTimeStringFromMicrosecondsInDay(this.microseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBTimestampMicrosecondsValue extends SpecialDuckDBValue {
|
||||||
|
public readonly microseconds: bigint;
|
||||||
|
|
||||||
|
constructor(microseconds: bigint) {
|
||||||
|
super();
|
||||||
|
this.microseconds = microseconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return getDuckDBTimestampStringFromMicroseconds(this.microseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DuckDBTimestamp = DuckDBTimestampMicrosecondsValue;
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
import { getDuckDBTimestampStringFromMilliseconds } from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBTimestampMillisecondsValue extends SpecialDuckDBValue {
|
||||||
|
public readonly milliseconds: bigint;
|
||||||
|
|
||||||
|
constructor(milliseconds: bigint) {
|
||||||
|
super();
|
||||||
|
this.milliseconds = milliseconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return getDuckDBTimestampStringFromMilliseconds(this.milliseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
import { getDuckDBTimestampStringFromNanoseconds } from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBTimestampNanosecondsValue extends SpecialDuckDBValue {
|
||||||
|
public readonly nanoseconds: bigint;
|
||||||
|
|
||||||
|
constructor(nanoseconds: bigint) {
|
||||||
|
super();
|
||||||
|
this.nanoseconds = nanoseconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return getDuckDBTimestampStringFromNanoseconds(this.nanoseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
import { getDuckDBTimestampStringFromSeconds } from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBTimestampSecondsValue extends SpecialDuckDBValue {
|
||||||
|
public readonly seconds: bigint;
|
||||||
|
|
||||||
|
constructor(seconds: bigint) {
|
||||||
|
super();
|
||||||
|
this.seconds = seconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
return getDuckDBTimestampStringFromSeconds(this.seconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
24
ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts
Normal file
24
ts/pkgs/duckdb-data-values/src/DuckDBTimestampTZValue.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { getDuckDBTimestampStringFromMicroseconds } from './conversion/dateTimeStringConversion.js';
|
||||||
|
import { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBTimestampTZValue extends SpecialDuckDBValue {
|
||||||
|
public readonly microseconds: bigint;
|
||||||
|
|
||||||
|
constructor(microseconds: bigint) {
|
||||||
|
super();
|
||||||
|
this.microseconds = microseconds;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(toStringOptions?: DuckDBToStringOptions): string {
|
||||||
|
return getDuckDBTimestampStringFromMicroseconds(
|
||||||
|
this.microseconds,
|
||||||
|
toStringOptions?.timezoneOffsetInMinutes || 0,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
}
|
||||||
3
ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts
Normal file
3
ts/pkgs/duckdb-data-values/src/DuckDBToStringOptions.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export interface DuckDBToStringOptions {
|
||||||
|
timezoneOffsetInMinutes?: number;
|
||||||
|
}
|
||||||
48
ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts
Normal file
48
ts/pkgs/duckdb-data-values/src/DuckDBUUIDValue.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import { hexFromBlob } from './conversion/hexFromBlob.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export class DuckDBUUIDValue extends SpecialDuckDBValue {
|
||||||
|
public readonly bytes: Uint8Array;
|
||||||
|
|
||||||
|
constructor(bytes: Uint8Array) {
|
||||||
|
super();
|
||||||
|
this.bytes = bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toDuckDBString(): string {
|
||||||
|
if (this.bytes.length !== 16) {
|
||||||
|
throw new Error('Invalid UUID bytes length');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert dashes to format the UUID
|
||||||
|
return `${hexFromBlob(this.bytes, 0, 4)}-${hexFromBlob(this.bytes, 4, 6)}-${hexFromBlob(this.bytes, 6, 8)}-${hexFromBlob(this.bytes, 8, 10)}-${hexFromBlob(this.bytes, 10, 16)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toJson(): Json {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a DuckDBUUIDValue value from a HUGEINT as stored by DuckDB.
|
||||||
|
*
|
||||||
|
* UUID values are stored with their MSB flipped so their numeric ordering matches their string ordering.
|
||||||
|
*/
|
||||||
|
public static fromStoredHugeint(hugeint: bigint): DuckDBUUIDValue {
|
||||||
|
// Flip the MSB and truncate to 128 bits to extract the represented unsigned 128-bit value.
|
||||||
|
const uint128 =
|
||||||
|
(hugeint ^ 0x80000000000000000000000000000000n) &
|
||||||
|
0xffffffffffffffffffffffffffffffffn;
|
||||||
|
return DuckDBUUIDValue.fromUint128(uint128);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Create a DuckDBUUIDValue value from an unsigned 128-bit integer in a JS BigInt. */
|
||||||
|
public static fromUint128(uint128: bigint): DuckDBUUIDValue {
|
||||||
|
const bytes = new Uint8Array(16);
|
||||||
|
const dv = new DataView(bytes.buffer);
|
||||||
|
// Write the unsigned 128-bit integer to the buffer in big endian format.
|
||||||
|
dv.setBigUint64(0, BigInt.asUintN(64, uint128 >> BigInt(64)), false);
|
||||||
|
dv.setBigUint64(8, BigInt.asUintN(64, uint128), false);
|
||||||
|
return new DuckDBUUIDValue(bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
9
ts/pkgs/duckdb-data-values/src/DuckDBValue.ts
Normal file
9
ts/pkgs/duckdb-data-values/src/DuckDBValue.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export type DuckDBValue =
|
||||||
|
| null
|
||||||
|
| boolean
|
||||||
|
| number
|
||||||
|
| string
|
||||||
|
| bigint // TODO: Should types requiring bigint be SpecialDBValues?
|
||||||
|
| SpecialDuckDBValue;
|
||||||
7
ts/pkgs/duckdb-data-values/src/Json.ts
Normal file
7
ts/pkgs/duckdb-data-values/src/Json.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export type Json =
|
||||||
|
| null
|
||||||
|
| boolean
|
||||||
|
| number
|
||||||
|
| string
|
||||||
|
| Json[]
|
||||||
|
| { [key: string]: Json };
|
||||||
15
ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts
Normal file
15
ts/pkgs/duckdb-data-values/src/SpecialDuckDBValue.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||||
|
import { Json } from './Json.js';
|
||||||
|
|
||||||
|
export abstract class SpecialDuckDBValue {
|
||||||
|
// The presence of this function can be used to identify SpecialDuckDBValue objects.
|
||||||
|
public abstract toDuckDBString(
|
||||||
|
toStringOptions?: DuckDBToStringOptions,
|
||||||
|
): string;
|
||||||
|
|
||||||
|
public toString(): string {
|
||||||
|
return this.toDuckDBString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract toJson(): Json;
|
||||||
|
}
|
||||||
@@ -0,0 +1,264 @@
|
|||||||
|
const DAYS_IN_400_YEARS = 146097; // (((365 * 4 + 1) * 25) - 1) * 4 + 1
|
||||||
|
const MILLISECONDS_PER_DAY_NUM = 86400000; // 1000 * 60 * 60 * 24
|
||||||
|
|
||||||
|
const MICROSECONDS_PER_SECOND = BigInt(1000000);
|
||||||
|
const MICROSECONDS_PER_MILLISECOND = BigInt(1000);
|
||||||
|
const NANOSECONDS_PER_MICROSECOND = BigInt(1000);
|
||||||
|
const SECONDS_PER_MINUTE = BigInt(60);
|
||||||
|
const MINUTES_PER_HOUR = BigInt(60);
|
||||||
|
const MICROSECONDS_PER_DAY = BigInt(86400000000); // 24 * 60 * 60 * 1000000
|
||||||
|
|
||||||
|
const NEGATIVE_INFINITY_TIMESTAMP = BigInt('-9223372036854775807'); // -(2^63-1)
|
||||||
|
const POSITIVE_INFINITY_TIMESTAMP = BigInt('9223372036854775807'); // 2^63-1
|
||||||
|
|
||||||
|
export function getDuckDBDateStringFromYearMonthDay(
|
||||||
|
year: number,
|
||||||
|
month: number,
|
||||||
|
dayOfMonth: number,
|
||||||
|
): string {
|
||||||
|
const yearStr = String(Math.abs(year)).padStart(4, '0');
|
||||||
|
const monthStr = String(month).padStart(2, '0');
|
||||||
|
const dayOfMonthStr = String(dayOfMonth).padStart(2, '0');
|
||||||
|
return `${yearStr}-${monthStr}-${dayOfMonthStr}${year < 0 ? ' (BC)' : ''}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBDateStringFromDays(days: number): string {
|
||||||
|
const absDays = Math.abs(days);
|
||||||
|
const sign = days < 0 ? -1 : 1;
|
||||||
|
// 400 years is the shortest interval with a fixed number of days. (Leap years and different length months can result
|
||||||
|
// in shorter intervals having different number of days.) By separating the number of 400 year intervals from the
|
||||||
|
// interval covered by the remaining days, we can guarantee that the date resulting from shifting the epoch by the
|
||||||
|
// remaining interval is within the valid range of the JS Date object. This allows us to use JS Date to calculate the
|
||||||
|
// year, month, and day of month for the date represented by the remaining interval, thus accounting for leap years
|
||||||
|
// and different length months. We can then safely add back the years from the 400 year intervals, because the month
|
||||||
|
// and day of month won't change when a date is shifted by a whole number of such intervals.
|
||||||
|
const num400YearIntervals = Math.floor(absDays / DAYS_IN_400_YEARS);
|
||||||
|
const yearsFrom400YearIntervals = sign * num400YearIntervals * 400;
|
||||||
|
const absDaysFromRemainingInterval = absDays % DAYS_IN_400_YEARS;
|
||||||
|
const millisecondsFromRemainingInterval =
|
||||||
|
sign * absDaysFromRemainingInterval * MILLISECONDS_PER_DAY_NUM;
|
||||||
|
const date = new Date(millisecondsFromRemainingInterval);
|
||||||
|
let year = yearsFrom400YearIntervals + date.getUTCFullYear();
|
||||||
|
if (year < 0) {
|
||||||
|
year--; // correct for non-existence of year zero
|
||||||
|
}
|
||||||
|
const month = date.getUTCMonth() + 1; // getUTCMonth returns zero-indexed month, but we want a one-index month for display
|
||||||
|
const dayOfMonth = date.getUTCDate(); // getUTCDate returns one-indexed day-of-month
|
||||||
|
return getDuckDBDateStringFromYearMonthDay(year, month, dayOfMonth);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getTimezoneOffsetString(
|
||||||
|
timezoneOffsetInMinutes?: number,
|
||||||
|
): string | undefined {
|
||||||
|
if (timezoneOffsetInMinutes === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const negative = timezoneOffsetInMinutes < 0;
|
||||||
|
const positiveMinutes = Math.abs(timezoneOffsetInMinutes);
|
||||||
|
const minutesPart = positiveMinutes % 60;
|
||||||
|
const hoursPart = Math.floor(positiveMinutes / 60);
|
||||||
|
const minutesStr =
|
||||||
|
minutesPart !== 0 ? String(minutesPart).padStart(2, '0') : '';
|
||||||
|
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||||
|
return `${negative ? '-' : '+'}${hoursStr}${minutesStr ? `:${minutesStr}` : ''}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAbsoluteOffsetStringFromParts(
|
||||||
|
hoursPart: number,
|
||||||
|
minutesPart: number,
|
||||||
|
secondsPart: number,
|
||||||
|
): string {
|
||||||
|
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||||
|
const minutesStr =
|
||||||
|
minutesPart !== 0 || secondsPart !== 0
|
||||||
|
? String(minutesPart).padStart(2, '0')
|
||||||
|
: '';
|
||||||
|
const secondsStr =
|
||||||
|
secondsPart !== 0 ? String(secondsPart).padStart(2, '0') : '';
|
||||||
|
let result = hoursStr;
|
||||||
|
if (minutesStr) {
|
||||||
|
result += `:${minutesStr}`;
|
||||||
|
if (secondsStr) {
|
||||||
|
result += `:${secondsStr}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getOffsetStringFromAbsoluteSeconds(
|
||||||
|
absoluteOffsetInSeconds: number,
|
||||||
|
): string {
|
||||||
|
const secondsPart = absoluteOffsetInSeconds % 60;
|
||||||
|
const minutes = Math.floor(absoluteOffsetInSeconds / 60);
|
||||||
|
const minutesPart = minutes % 60;
|
||||||
|
const hoursPart = Math.floor(minutes / 60);
|
||||||
|
return getAbsoluteOffsetStringFromParts(hoursPart, minutesPart, secondsPart);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getOffsetStringFromSeconds(offsetInSeconds: number): string {
|
||||||
|
const negative = offsetInSeconds < 0;
|
||||||
|
const absoluteOffsetInSeconds = negative ? -offsetInSeconds : offsetInSeconds;
|
||||||
|
const absoluteString = getOffsetStringFromAbsoluteSeconds(
|
||||||
|
absoluteOffsetInSeconds,
|
||||||
|
);
|
||||||
|
return `${negative ? '-' : '+'}${absoluteString}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimeStringFromParts(
|
||||||
|
hoursPart: bigint,
|
||||||
|
minutesPart: bigint,
|
||||||
|
secondsPart: bigint,
|
||||||
|
microsecondsPart: bigint,
|
||||||
|
): string {
|
||||||
|
const hoursStr = String(hoursPart).padStart(2, '0');
|
||||||
|
const minutesStr = String(minutesPart).padStart(2, '0');
|
||||||
|
const secondsStr = String(secondsPart).padStart(2, '0');
|
||||||
|
const microsecondsStr = String(microsecondsPart)
|
||||||
|
.padStart(6, '0')
|
||||||
|
.replace(/0+$/, '');
|
||||||
|
return `${hoursStr}:${minutesStr}:${secondsStr}${
|
||||||
|
microsecondsStr.length > 0 ? `.${microsecondsStr}` : ''
|
||||||
|
}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimeStringFromPositiveMicroseconds(
|
||||||
|
positiveMicroseconds: bigint,
|
||||||
|
): string {
|
||||||
|
const microsecondsPart = positiveMicroseconds % MICROSECONDS_PER_SECOND;
|
||||||
|
const seconds = positiveMicroseconds / MICROSECONDS_PER_SECOND;
|
||||||
|
const secondsPart = seconds % SECONDS_PER_MINUTE;
|
||||||
|
const minutes = seconds / SECONDS_PER_MINUTE;
|
||||||
|
const minutesPart = minutes % MINUTES_PER_HOUR;
|
||||||
|
const hoursPart = minutes / MINUTES_PER_HOUR;
|
||||||
|
return getDuckDBTimeStringFromParts(
|
||||||
|
hoursPart,
|
||||||
|
minutesPart,
|
||||||
|
secondsPart,
|
||||||
|
microsecondsPart,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimeStringFromMicrosecondsInDay(
|
||||||
|
microsecondsInDay: bigint,
|
||||||
|
): string {
|
||||||
|
const positiveMicroseconds =
|
||||||
|
microsecondsInDay < 0
|
||||||
|
? microsecondsInDay + MICROSECONDS_PER_DAY
|
||||||
|
: microsecondsInDay;
|
||||||
|
return getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimeStringFromMicroseconds(
|
||||||
|
microseconds: bigint,
|
||||||
|
): string {
|
||||||
|
const negative = microseconds < 0;
|
||||||
|
const positiveMicroseconds = negative ? -microseconds : microseconds;
|
||||||
|
const positiveString =
|
||||||
|
getDuckDBTimeStringFromPositiveMicroseconds(positiveMicroseconds);
|
||||||
|
return negative ? `-${positiveString}` : positiveString;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimestampStringFromDaysAndMicroseconds(
|
||||||
|
days: bigint,
|
||||||
|
microsecondsInDay: bigint,
|
||||||
|
timezonePart?: string,
|
||||||
|
): string {
|
||||||
|
// This conversion of BigInt to Number is safe, because the largest absolute value that `days` can has is 106751991,
|
||||||
|
// which fits without loss of precision in a JS Number. (106751991 = (2^63-1) / MICROSECONDS_PER_DAY)
|
||||||
|
const dateStr = getDuckDBDateStringFromDays(Number(days));
|
||||||
|
const timeStr = getDuckDBTimeStringFromMicrosecondsInDay(microsecondsInDay);
|
||||||
|
return `${dateStr} ${timeStr}${timezonePart ?? ''}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimestampStringFromMicroseconds(
|
||||||
|
microseconds: bigint,
|
||||||
|
timezoneOffsetInMinutes?: number,
|
||||||
|
): string {
|
||||||
|
// Note that -infinity and infinity are only representable in TIMESTAMP (and TIMESTAMPTZ), not the other timestamp
|
||||||
|
// variants. This is by-design and matches DuckDB.
|
||||||
|
if (microseconds === NEGATIVE_INFINITY_TIMESTAMP) {
|
||||||
|
return '-infinity';
|
||||||
|
}
|
||||||
|
if (microseconds === POSITIVE_INFINITY_TIMESTAMP) {
|
||||||
|
return 'infinity';
|
||||||
|
}
|
||||||
|
const offsetMicroseconds =
|
||||||
|
timezoneOffsetInMinutes !== undefined
|
||||||
|
? microseconds +
|
||||||
|
BigInt(timezoneOffsetInMinutes) *
|
||||||
|
MICROSECONDS_PER_SECOND *
|
||||||
|
SECONDS_PER_MINUTE
|
||||||
|
: microseconds;
|
||||||
|
let days = offsetMicroseconds / MICROSECONDS_PER_DAY;
|
||||||
|
let microsecondsPart = offsetMicroseconds % MICROSECONDS_PER_DAY;
|
||||||
|
if (microsecondsPart < 0) {
|
||||||
|
days--;
|
||||||
|
microsecondsPart += MICROSECONDS_PER_DAY;
|
||||||
|
}
|
||||||
|
return getDuckDBTimestampStringFromDaysAndMicroseconds(
|
||||||
|
days,
|
||||||
|
microsecondsPart,
|
||||||
|
getTimezoneOffsetString(timezoneOffsetInMinutes),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimestampStringFromSeconds(seconds: bigint): string {
|
||||||
|
return getDuckDBTimestampStringFromMicroseconds(
|
||||||
|
seconds * MICROSECONDS_PER_SECOND,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimestampStringFromMilliseconds(
|
||||||
|
milliseconds: bigint,
|
||||||
|
): string {
|
||||||
|
return getDuckDBTimestampStringFromMicroseconds(
|
||||||
|
milliseconds * MICROSECONDS_PER_MILLISECOND,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBTimestampStringFromNanoseconds(
|
||||||
|
nanoseconds: bigint,
|
||||||
|
): string {
|
||||||
|
// Note that this division causes loss of precision. This matches the behavior of the DuckDB. It's important that this
|
||||||
|
// precision loss happen before the negative correction in getTimestampStringFromMicroseconds, otherwise off-by-one
|
||||||
|
// errors can occur.
|
||||||
|
return getDuckDBTimestampStringFromMicroseconds(
|
||||||
|
nanoseconds / NANOSECONDS_PER_MICROSECOND,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assumes baseUnit can be pluralized by adding an 's'.
|
||||||
|
function numberAndUnit(value: number, baseUnit: string): string {
|
||||||
|
return `${value} ${baseUnit}${value !== 1 ? 's' : ''}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDuckDBIntervalString(
|
||||||
|
months: number,
|
||||||
|
days: number,
|
||||||
|
microseconds: bigint,
|
||||||
|
): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
if (months !== 0) {
|
||||||
|
const sign = months < 0 ? -1 : 1;
|
||||||
|
const absMonths = Math.abs(months);
|
||||||
|
const absYears = Math.floor(absMonths / 12);
|
||||||
|
const years = sign * absYears;
|
||||||
|
const extraMonths = sign * (absMonths - absYears * 12);
|
||||||
|
if (years !== 0) {
|
||||||
|
parts.push(numberAndUnit(years, 'year'));
|
||||||
|
}
|
||||||
|
if (extraMonths !== 0) {
|
||||||
|
parts.push(numberAndUnit(extraMonths, 'month'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (days !== 0) {
|
||||||
|
parts.push(numberAndUnit(days, 'day'));
|
||||||
|
}
|
||||||
|
if (microseconds !== BigInt(0)) {
|
||||||
|
parts.push(getDuckDBTimeStringFromMicroseconds(microseconds));
|
||||||
|
}
|
||||||
|
if (parts.length > 0) {
|
||||||
|
return parts.join(' ');
|
||||||
|
}
|
||||||
|
return '00:00:00';
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import { DuckDBValue } from '../DuckDBValue.js';
|
||||||
|
|
||||||
|
export function displayStringForDuckDBValue(value: DuckDBValue): string {
|
||||||
|
if (value == null) {
|
||||||
|
return 'NULL';
|
||||||
|
}
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
return `'${value.replace(`'`, `''`)}'`;
|
||||||
|
}
|
||||||
|
return String(value);
|
||||||
|
}
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
/**
|
||||||
|
* Returns the JS bigint value represented by the byte array a VARINT in DuckDB's internal format.
|
||||||
|
*
|
||||||
|
* DuckDB stores VARINTs as an array of bytes consisting of a three-byte header followed by a variable number of bytes
|
||||||
|
* (at least one). The header specifies the number of bytes after the header, and whether the number is positive or
|
||||||
|
* negative. The bytes after the header specify the absolute value of the number, in big endian format.
|
||||||
|
*
|
||||||
|
* The sign of the number is determined by the MSB of the header, which is 1 for positive and 0 for negative. Negative
|
||||||
|
* numbers also have all bytes of both the header and value inverted. (For negative numbers, the MSB is 0 after this
|
||||||
|
* inversion. Put another way: the MSB of the header is always 1, but it's inverted for negative numbers.)
|
||||||
|
*/
|
||||||
|
export function getVarIntFromBytes(bytes: Uint8Array): bigint {
|
||||||
|
const firstByte = bytes[0];
|
||||||
|
const positive = (firstByte & 0x80) > 0;
|
||||||
|
const uint64Mask = positive ? 0n : 0xffffffffffffffffn;
|
||||||
|
const uint8Mask = positive ? 0 : 0xff;
|
||||||
|
const dv = new DataView(
|
||||||
|
bytes.buffer,
|
||||||
|
bytes.byteOffset + 3,
|
||||||
|
bytes.byteLength - 3,
|
||||||
|
);
|
||||||
|
const lastUint64Offset = dv.byteLength - 8;
|
||||||
|
let offset = 0;
|
||||||
|
let result = 0n;
|
||||||
|
while (offset <= lastUint64Offset) {
|
||||||
|
result = (result << 64n) | (dv.getBigUint64(offset) ^ uint64Mask);
|
||||||
|
offset += 8;
|
||||||
|
}
|
||||||
|
while (offset < dv.byteLength) {
|
||||||
|
result = (result << 8n) | BigInt(dv.getUint8(offset) ^ uint8Mask);
|
||||||
|
offset += 1;
|
||||||
|
}
|
||||||
|
return positive ? result : -result;
|
||||||
|
}
|
||||||
20
ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts
Normal file
20
ts/pkgs/duckdb-data-values/src/conversion/hexFromBlob.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
export function hexFromBlob(
|
||||||
|
blob: Uint8Array,
|
||||||
|
start: number | undefined,
|
||||||
|
end: number | undefined,
|
||||||
|
): string {
|
||||||
|
if (start === undefined) {
|
||||||
|
start = 0;
|
||||||
|
}
|
||||||
|
if (end === undefined) {
|
||||||
|
end = blob.length;
|
||||||
|
}
|
||||||
|
let hex = '';
|
||||||
|
|
||||||
|
for (let i = start; i < end; i++) {
|
||||||
|
const byte = blob[i];
|
||||||
|
// Ensure each byte is 2 hex characters
|
||||||
|
hex += (byte < 16 ? '0' : '') + byte.toString(16);
|
||||||
|
}
|
||||||
|
return hex;
|
||||||
|
}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
import { DuckDBValue } from '../DuckDBValue.js';
|
||||||
|
import { Json } from '../Json.js';
|
||||||
|
import { SpecialDuckDBValue } from '../SpecialDuckDBValue.js';
|
||||||
|
|
||||||
|
export function jsonFromDuckDBValue(value: DuckDBValue): Json {
|
||||||
|
if (value === null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (typeof value === 'bigint') {
|
||||||
|
return String(value);
|
||||||
|
}
|
||||||
|
if (value instanceof SpecialDuckDBValue) {
|
||||||
|
return value.toJson();
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
17
ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts
Normal file
17
ts/pkgs/duckdb-data-values/src/conversion/stringFromBlob.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
/** Matches BLOB-to-VARCHAR conversion behavior of DuckDB. */
|
||||||
|
export function stringFromBlob(bytes: Uint8Array): string {
|
||||||
|
let result = '';
|
||||||
|
for (const byte of bytes) {
|
||||||
|
if (
|
||||||
|
byte <= 0x1f ||
|
||||||
|
byte === 0x22 /* single quote */ ||
|
||||||
|
byte === 0x27 /* double quote */ ||
|
||||||
|
byte >= 0x7f
|
||||||
|
) {
|
||||||
|
result += `\\x${byte.toString(16).toUpperCase().padStart(2, '0')}`;
|
||||||
|
} else {
|
||||||
|
result += String.fromCharCode(byte);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
129
ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts
Normal file
129
ts/pkgs/duckdb-data-values/src/conversion/stringFromDecimal.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
/**
|
||||||
|
* Decimal string formatting.
|
||||||
|
*
|
||||||
|
* Supports a subset of the functionality of `BigInt.prototype.toLocaleString` for locale-specific formatting.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Locale formatting options for DuckDBDecimalValue.
|
||||||
|
*
|
||||||
|
* This is a subset of the options available for `BigInt.prototype.toLocaleString`
|
||||||
|
*/
|
||||||
|
export interface DuckDBDecimalFormatOptions {
|
||||||
|
useGrouping?: boolean;
|
||||||
|
minimumFractionDigits?: number;
|
||||||
|
maximumFractionDigits?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LocaleOptions {
|
||||||
|
locales?: string | string[];
|
||||||
|
options?: DuckDBDecimalFormatOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Get the decimal separator for a given locale.
|
||||||
|
* Somewhat expensive, so use getCachedDecimalSeparator if you need to call this multiple times.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function getDecimalSeparator(locales?: string | string[]): string {
|
||||||
|
const decimalSeparator =
|
||||||
|
new Intl.NumberFormat(locales, { useGrouping: false })
|
||||||
|
.formatToParts(0.1)
|
||||||
|
.find((part) => part.type === 'decimal')?.value ?? '.';
|
||||||
|
return decimalSeparator;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Get the decimal separator for a given locale, and cache the result.
|
||||||
|
*/
|
||||||
|
const cachedDecimalSeparators: { [localeKey: string]: string } = {};
|
||||||
|
|
||||||
|
function getCachedDecimalSeparator(locales?: string | string[]): string {
|
||||||
|
const cacheKey = JSON.stringify(locales);
|
||||||
|
if (cacheKey in cachedDecimalSeparators) {
|
||||||
|
return cachedDecimalSeparators[cacheKey];
|
||||||
|
}
|
||||||
|
const decimalSeparator = getDecimalSeparator(locales);
|
||||||
|
cachedDecimalSeparators[cacheKey] = decimalSeparator;
|
||||||
|
return decimalSeparator;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to format whole part of a decimal value.
|
||||||
|
// Note that we explicitly omit 'minimumFractionDigits' and 'maximumFractionDigits' from the options
|
||||||
|
// passed to toLocaleString, because they are only relevant for the fractional part of the number, and
|
||||||
|
// would result in formatting the whole part as a real number, which we don't want.
|
||||||
|
function formatWholePart(
|
||||||
|
localeOptions: LocaleOptions | undefined,
|
||||||
|
val: bigint,
|
||||||
|
): string {
|
||||||
|
if (localeOptions) {
|
||||||
|
const {
|
||||||
|
minimumFractionDigits: _minFD,
|
||||||
|
maximumFractionDigits: _maxFD,
|
||||||
|
...restOptions
|
||||||
|
} = localeOptions.options ?? {};
|
||||||
|
return val.toLocaleString(localeOptions?.locales, restOptions);
|
||||||
|
}
|
||||||
|
return String(val);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format the fractional part of a decimal value
|
||||||
|
// Note that we must handle minimumFractionDigits and maximumFractionDigits ourselves, and that
|
||||||
|
// we don't apply `useGrouping` because that only applies to the whole part of the number.
|
||||||
|
function formatFractionalPart(
|
||||||
|
localeOptions: LocaleOptions | undefined,
|
||||||
|
val: bigint,
|
||||||
|
scale: number,
|
||||||
|
): string {
|
||||||
|
const fractionalPartStr = String(val).padStart(scale, '0');
|
||||||
|
if (!localeOptions) {
|
||||||
|
return fractionalPartStr;
|
||||||
|
}
|
||||||
|
const minFracDigits = localeOptions?.options?.minimumFractionDigits ?? 0;
|
||||||
|
const maxFracDigits = localeOptions?.options?.maximumFractionDigits ?? 20;
|
||||||
|
|
||||||
|
return fractionalPartStr.padEnd(minFracDigits, '0').slice(0, maxFracDigits);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a scaled decimal value to a string, possibly using locale-specific formatting.
|
||||||
|
*/
|
||||||
|
export function stringFromDecimal(
|
||||||
|
scaledValue: bigint,
|
||||||
|
scale: number,
|
||||||
|
localeOptions?: LocaleOptions,
|
||||||
|
): string {
|
||||||
|
// Decimal values are represented as integers that have been scaled up by a power of ten. The `scale` property of
|
||||||
|
// the type is the exponent of the scale factor. For a scale greater than zero, we need to separate out the
|
||||||
|
// fractional part by reversing this scaling.
|
||||||
|
if (scale > 0) {
|
||||||
|
const scaleFactor = BigInt(10) ** BigInt(scale);
|
||||||
|
const absScaledValue = scaledValue < 0 ? -scaledValue : scaledValue;
|
||||||
|
|
||||||
|
const prefix = scaledValue < 0 ? '-' : '';
|
||||||
|
|
||||||
|
const wholePartNum = absScaledValue / scaleFactor;
|
||||||
|
const wholePartStr = formatWholePart(localeOptions, wholePartNum);
|
||||||
|
|
||||||
|
const fractionalPartNum = absScaledValue % scaleFactor;
|
||||||
|
const fractionalPartStr = formatFractionalPart(
|
||||||
|
localeOptions,
|
||||||
|
fractionalPartNum,
|
||||||
|
scale,
|
||||||
|
);
|
||||||
|
|
||||||
|
const decimalSeparatorStr = localeOptions
|
||||||
|
? getCachedDecimalSeparator(localeOptions.locales)
|
||||||
|
: '.';
|
||||||
|
|
||||||
|
return `${prefix}${wholePartStr}${decimalSeparatorStr}${fractionalPartStr}`;
|
||||||
|
}
|
||||||
|
// For a scale of zero, there is no fractional part, so a direct string conversion works.
|
||||||
|
if (localeOptions) {
|
||||||
|
return scaledValue.toLocaleString(
|
||||||
|
localeOptions?.locales,
|
||||||
|
localeOptions?.options as BigIntToLocaleStringOptions | undefined,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return String(scaledValue);
|
||||||
|
}
|
||||||
25
ts/pkgs/duckdb-data-values/src/index.ts
Normal file
25
ts/pkgs/duckdb-data-values/src/index.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
export { getVarIntFromBytes } from './conversion/getVarIntFromBytes.js';
|
||||||
|
export { jsonFromDuckDBValue } from './conversion/jsonFromDuckDBValue.js';
|
||||||
|
export { DuckDBArrayValue } from './DuckDBArrayValue.js';
|
||||||
|
export { DuckDBBitValue } from './DuckDBBitValue.js';
|
||||||
|
export { DuckDBBlobValue } from './DuckDBBlobValue.js';
|
||||||
|
export { DuckDBDateValue } from './DuckDBDateValue.js';
|
||||||
|
export { DuckDBDecimalValue } from './DuckDBDecimalValue.js';
|
||||||
|
export { DuckDBIntervalValue } from './DuckDBIntervalValue.js';
|
||||||
|
export { DuckDBListValue } from './DuckDBListValue.js';
|
||||||
|
export { DuckDBMapEntry } from './DuckDBMapEntry.js';
|
||||||
|
export { DuckDBMapValue } from './DuckDBMapValue.js';
|
||||||
|
export { DuckDBStructEntry } from './DuckDBStructEntry.js';
|
||||||
|
export { DuckDBStructValue } from './DuckDBStructValue.js';
|
||||||
|
export { DuckDBTimestampMicrosecondsValue } from './DuckDBTimestampMicrosecondsValue.js';
|
||||||
|
export { DuckDBTimestampMillisecondsValue } from './DuckDBTimestampMillisecondsValue.js';
|
||||||
|
export { DuckDBTimestampNanosecondsValue } from './DuckDBTimestampNanosecondsValue.js';
|
||||||
|
export { DuckDBTimestampSecondsValue } from './DuckDBTimestampSecondsValue.js';
|
||||||
|
export { DuckDBTimestampTZValue } from './DuckDBTimestampTZValue.js';
|
||||||
|
export { DuckDBTimeTZValue } from './DuckDBTimeTZValue.js';
|
||||||
|
export { DuckDBTimeValue } from './DuckDBTimeValue.js';
|
||||||
|
export { DuckDBToStringOptions } from './DuckDBToStringOptions.js';
|
||||||
|
export { DuckDBUUIDValue } from './DuckDBUUIDValue.js';
|
||||||
|
export { DuckDBValue } from './DuckDBValue.js';
|
||||||
|
export { Json } from './Json.js';
|
||||||
|
export { SpecialDuckDBValue } from './SpecialDuckDBValue.js';
|
||||||
6
ts/pkgs/duckdb-data-values/src/tsconfig.json
Normal file
6
ts/pkgs/duckdb-data-values/src/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../../tsconfig.library.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"outDir": "../out"
|
||||||
|
}
|
||||||
|
}
|
||||||
49
ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts
Normal file
49
ts/pkgs/duckdb-data-values/test/DuckDBArrayValue.test.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBArrayValue } from '../src/DuckDBArrayValue';
|
||||||
|
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||||
|
|
||||||
|
suite('DuckDBArrayValue', () => {
|
||||||
|
test('should render an empty array to the correct string', () => {
|
||||||
|
expect(new DuckDBArrayValue([]).toString()).toStrictEqual('[]');
|
||||||
|
});
|
||||||
|
test('should render a single element array to the correct string', () => {
|
||||||
|
expect(new DuckDBArrayValue([123]).toString()).toStrictEqual('[123]');
|
||||||
|
});
|
||||||
|
test('should render a multi-element array to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBArrayValue(['abc', null, true, '']).toString(),
|
||||||
|
).toStrictEqual(`['abc', NULL, true, '']`);
|
||||||
|
});
|
||||||
|
test('should render an array with nested arrays to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBArrayValue([
|
||||||
|
new DuckDBArrayValue([]),
|
||||||
|
null,
|
||||||
|
new DuckDBArrayValue([123, null, 'xyz']),
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(`[[], NULL, [123, NULL, 'xyz']]`);
|
||||||
|
});
|
||||||
|
test('toJson array with basic values', () => {
|
||||||
|
expect(new DuckDBArrayValue([123, 'abc', null]).toJson()).toStrictEqual([
|
||||||
|
123,
|
||||||
|
'abc',
|
||||||
|
null,
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
test('toJson array with complex values', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBArrayValue([
|
||||||
|
new DuckDBMapValue([
|
||||||
|
{ key: 'foo', value: 123 },
|
||||||
|
{ key: 'bar', value: 'abc' },
|
||||||
|
]),
|
||||||
|
new DuckDBArrayValue([123, null, 'xyz']),
|
||||||
|
null,
|
||||||
|
]).toJson(),
|
||||||
|
).toStrictEqual([
|
||||||
|
{ "'foo'": 123, "'bar'": 'abc' },
|
||||||
|
[123, null, 'xyz'],
|
||||||
|
null,
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
33
ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts
Normal file
33
ts/pkgs/duckdb-data-values/test/DuckDBBitValue.test.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBBitValue } from '../src/DuckDBBitValue';
|
||||||
|
|
||||||
|
suite('DuckDBBitValue', () => {
|
||||||
|
test('should render an empty byte array to the correct string', () => {
|
||||||
|
expect(new DuckDBBitValue(new Uint8Array([])).toString()).toStrictEqual('');
|
||||||
|
});
|
||||||
|
test('should render bit string with no padding to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBitValue(new Uint8Array([0x00, 0xf1, 0xe2, 0xd3])).toString(),
|
||||||
|
).toStrictEqual('111100011110001011010011');
|
||||||
|
});
|
||||||
|
test('should render bit string with padding to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBitValue(new Uint8Array([0x03, 0xf1, 0xe2, 0xd3])).toString(),
|
||||||
|
).toStrictEqual('100011110001011010011');
|
||||||
|
});
|
||||||
|
test('should round-trip bit string with no padding', () => {
|
||||||
|
expect(
|
||||||
|
DuckDBBitValue.fromString('111100011110001011010011').toString(),
|
||||||
|
).toStrictEqual('111100011110001011010011');
|
||||||
|
});
|
||||||
|
test('should round-trip bit string with padding', () => {
|
||||||
|
expect(
|
||||||
|
DuckDBBitValue.fromString('100011110001011010011').toString(),
|
||||||
|
).toStrictEqual('100011110001011010011');
|
||||||
|
});
|
||||||
|
test('toJson', () => {
|
||||||
|
expect(
|
||||||
|
DuckDBBitValue.fromString('100011110001011010011').toJson(),
|
||||||
|
).toStrictEqual('100011110001011010011');
|
||||||
|
});
|
||||||
|
});
|
||||||
92
ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts
Normal file
92
ts/pkgs/duckdb-data-values/test/DuckDBBlobValue.test.ts
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBBlobValue } from '../src/DuckDBBlobValue';
|
||||||
|
|
||||||
|
suite('DuckDBBlobValue', () => {
|
||||||
|
test('should render an empty byte array to the correct string', () => {
|
||||||
|
expect(new DuckDBBlobValue(new Uint8Array([])).toString()).toStrictEqual(
|
||||||
|
'',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a byte array to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBlobValue(
|
||||||
|
new Uint8Array([0x41, 0x42, 0x43, 0x31, 0x32, 0x33]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('ABC123');
|
||||||
|
});
|
||||||
|
test('should render a byte array containing single-digit non-printables to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBlobValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a,
|
||||||
|
0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual(
|
||||||
|
'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a byte array containing double-digit non-printables to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBlobValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a,
|
||||||
|
0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual(
|
||||||
|
'\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1A\\x1B\\x1C\\x1D\\x1E\\x1F',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a byte array containing min printables (including single and double quotes) to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBlobValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a,
|
||||||
|
0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual(' !\\x22#$%&\\x27()*+,-./');
|
||||||
|
});
|
||||||
|
test('should render a byte array containing max printables (including backspace) to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBlobValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a,
|
||||||
|
0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('pqrstuvwxyz{|}~\\x7F');
|
||||||
|
});
|
||||||
|
test('should render a byte array containing high non-printables to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBlobValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a,
|
||||||
|
0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual(
|
||||||
|
'\\x80\\x81\\x82\\x83\\x84\\x85\\x86\\x87\\x88\\x89\\x8A\\x8B\\x8C\\x8D\\x8E\\x8F',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a byte array containing max non-printables to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBlobValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa,
|
||||||
|
0xfb, 0xfc, 0xfd, 0xfe, 0xff,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual(
|
||||||
|
'\\xF0\\xF1\\xF2\\xF3\\xF4\\xF5\\xF6\\xF7\\xF8\\xF9\\xFA\\xFB\\xFC\\xFD\\xFE\\xFF',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('toJson', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBBlobValue(
|
||||||
|
new Uint8Array([0x41, 0x42, 0x43, 0x31, 0x32, 0x33]),
|
||||||
|
).toJson(),
|
||||||
|
).toStrictEqual('ABC123');
|
||||||
|
});
|
||||||
|
});
|
||||||
18
ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts
Normal file
18
ts/pkgs/duckdb-data-values/test/DuckDBDateValue.test.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBDateValue } from '../src/DuckDBDateValue';
|
||||||
|
|
||||||
|
suite('DuckDBDateValue', () => {
|
||||||
|
test('should render a normal date value to the correct string', () => {
|
||||||
|
expect(new DuckDBDateValue(19643).toString()).toStrictEqual('2023-10-13');
|
||||||
|
});
|
||||||
|
test('should render the max date value to the correct string', () => {
|
||||||
|
expect(new DuckDBDateValue(2 ** 31 - 2).toString()).toStrictEqual(
|
||||||
|
'5881580-07-10',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render the min date value to the correct string', () => {
|
||||||
|
expect(new DuckDBDateValue(-(2 ** 31) + 2).toString()).toStrictEqual(
|
||||||
|
'5877642-06-25 (BC)',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
150
ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts
Normal file
150
ts/pkgs/duckdb-data-values/test/DuckDBDecimalValue.test.ts
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBDecimalValue } from '../src/DuckDBDecimalValue';
|
||||||
|
|
||||||
|
suite('DuckDBDecimalValue', () => {
|
||||||
|
test('should render a scaled value of zero with a scale of zero to the correct string', () => {
|
||||||
|
expect(new DuckDBDecimalValue(0n, 0).toString()).toStrictEqual('0');
|
||||||
|
});
|
||||||
|
test('should render a small positive scaled value with a scale of zero to the correct string', () => {
|
||||||
|
expect(new DuckDBDecimalValue(7n, 0).toString()).toStrictEqual('7');
|
||||||
|
});
|
||||||
|
test('should render a small negative scaled value with a scale of zero to the correct string', () => {
|
||||||
|
expect(new DuckDBDecimalValue(-7n, 0).toString()).toStrictEqual('-7');
|
||||||
|
});
|
||||||
|
test('should render a large positive scaled value with a scale of zero to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(987654321098765432109876543210n, 0).toString(),
|
||||||
|
).toStrictEqual('987654321098765432109876543210');
|
||||||
|
});
|
||||||
|
test('should render a large negative scaled value with a scale of zero to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(-987654321098765432109876543210n, 0).toString(),
|
||||||
|
).toStrictEqual('-987654321098765432109876543210');
|
||||||
|
});
|
||||||
|
test('should render the maximum positive scaled value with a scale of zero to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(
|
||||||
|
99999999999999999999999999999999999999n,
|
||||||
|
0,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('99999999999999999999999999999999999999');
|
||||||
|
});
|
||||||
|
test('should render the maximum negative scaled value with a scale of zero to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(
|
||||||
|
-99999999999999999999999999999999999999n,
|
||||||
|
0,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('-99999999999999999999999999999999999999');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a scaled value of zero with a non-zero scale to the correct string', () => {
|
||||||
|
expect(new DuckDBDecimalValue(0n, 3).toString()).toStrictEqual('0.000');
|
||||||
|
});
|
||||||
|
test('should render a small positive scaled value with a non-zero scale to the correct string', () => {
|
||||||
|
expect(new DuckDBDecimalValue(12345n, 3).toString()).toStrictEqual(
|
||||||
|
'12.345',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a small negative scaled value with a non-zero scale to the correct string', () => {
|
||||||
|
expect(new DuckDBDecimalValue(-12345n, 3).toString()).toStrictEqual(
|
||||||
|
'-12.345',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a large positive scaled value with a non-zero scale to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(987654321098765432109876543210n, 10).toString(),
|
||||||
|
).toStrictEqual('98765432109876543210.9876543210');
|
||||||
|
});
|
||||||
|
test('should render a large negative scaled value with a non-zero scale to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(-987654321098765432109876543210n, 10).toString(),
|
||||||
|
).toStrictEqual('-98765432109876543210.9876543210');
|
||||||
|
});
|
||||||
|
test('should render leading and trailing zeros in the fractional part of value greater than one correctly', () => {
|
||||||
|
expect(new DuckDBDecimalValue(120034500n, 7).toString()).toStrictEqual(
|
||||||
|
'12.0034500',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render leading and trailing zeros in the fractional part of value less than negative one correctly', () => {
|
||||||
|
expect(new DuckDBDecimalValue(-120034500n, 7).toString()).toStrictEqual(
|
||||||
|
'-12.0034500',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render leading and trailing zeros in the fractional part of value between zero and one correctly', () => {
|
||||||
|
expect(new DuckDBDecimalValue(34500n, 7).toString()).toStrictEqual(
|
||||||
|
'0.0034500',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render leading and trailing zeros in the fractional part of value between zero and negative one correctly', () => {
|
||||||
|
expect(new DuckDBDecimalValue(-34500n, 7).toString()).toStrictEqual(
|
||||||
|
'-0.0034500',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a small positive scaled value with a the maximum scale to the correct string', () => {
|
||||||
|
expect(new DuckDBDecimalValue(1n, 38).toString()).toStrictEqual(
|
||||||
|
'0.00000000000000000000000000000000000001',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a small negative scaled value with a the maximum scale to the correct string', () => {
|
||||||
|
expect(new DuckDBDecimalValue(-1n, 38).toString()).toStrictEqual(
|
||||||
|
'-0.00000000000000000000000000000000000001',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render the maximum positive scaled value with a the maximum scale to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(
|
||||||
|
99999999999999999999999999999999999999n,
|
||||||
|
38,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('0.99999999999999999999999999999999999999');
|
||||||
|
});
|
||||||
|
test('should render the maximum negative scaled value with a the maximum scale to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(
|
||||||
|
-99999999999999999999999999999999999999n,
|
||||||
|
38,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('-0.99999999999999999999999999999999999999');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a locale string with grouping by default', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(9876543210n, 0).toLocaleString(),
|
||||||
|
).toStrictEqual('9,876,543,210');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a European locale with . for grouping', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(9876543210n, 0).toLocaleString('de-DE'),
|
||||||
|
).toStrictEqual('9.876.543.210');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a locale string with a specified minimum fraction digits', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(12345n, 3).toLocaleString(undefined, {
|
||||||
|
minimumFractionDigits: 5,
|
||||||
|
}),
|
||||||
|
).toStrictEqual('12.34500');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a locale string with a specified maximum fraction digits', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(12345n, 3).toLocaleString(undefined, {
|
||||||
|
maximumFractionDigits: 1,
|
||||||
|
}),
|
||||||
|
).toStrictEqual('12.3');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a decimal with a large whole part and fractional part in a European locale with the correct grouping and decimal', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBDecimalValue(98765432109876543210n, 10).toLocaleString(
|
||||||
|
'de-DE',
|
||||||
|
{
|
||||||
|
useGrouping: true,
|
||||||
|
maximumFractionDigits: 5,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
).toStrictEqual('9.876.543.210,98765');
|
||||||
|
});
|
||||||
|
});
|
||||||
219
ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts
Normal file
219
ts/pkgs/duckdb-data-values/test/DuckDBIntervalValue.test.ts
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBIntervalValue } from '../src/DuckDBIntervalValue';
|
||||||
|
|
||||||
|
const MICROS_IN_SEC = 1000000n;
|
||||||
|
const MICROS_IN_MIN = 60n * MICROS_IN_SEC;
|
||||||
|
const MICROS_IN_HR = 60n * MICROS_IN_MIN;
|
||||||
|
const MAX_INT32 = 2n ** 31n - 1n;
|
||||||
|
|
||||||
|
suite('DuckDBIntervalValue', () => {
|
||||||
|
test('should render an empty interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'00:00:00',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a one month interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(1, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'1 month',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative one month interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(-1, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'-1 months',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a two month interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(2, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'2 months',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative two month interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(-2, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'-2 months',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a one year interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(12, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'1 year',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative one year interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(-12, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'-1 years',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a two year interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(24, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'2 years',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative two year interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(-24, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'-2 years',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a two year, three month interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(24 + 3, 0, 0n).toString()).toStrictEqual(
|
||||||
|
'2 years 3 months',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative two year, three month interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(-(24 + 3), 0, 0n).toString()).toStrictEqual(
|
||||||
|
'-2 years -3 months',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a one day interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 1, 0n).toString()).toStrictEqual('1 day');
|
||||||
|
});
|
||||||
|
test('should render a negative one day interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, -1, 0n).toString()).toStrictEqual(
|
||||||
|
'-1 days',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a two day interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 2, 0n).toString()).toStrictEqual(
|
||||||
|
'2 days',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative two day interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, -2, 0n).toString()).toStrictEqual(
|
||||||
|
'-2 days',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a 30 day interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 30, 0n).toString()).toStrictEqual(
|
||||||
|
'30 days',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a 365 day interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 365, 0n).toString()).toStrictEqual(
|
||||||
|
'365 days',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a one microsecond interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 0, 1n).toString()).toStrictEqual(
|
||||||
|
'00:00:00.000001',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative one microsecond interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 0, -1n).toString()).toStrictEqual(
|
||||||
|
'-00:00:00.000001',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a large microsecond interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 0, 987654n).toString()).toStrictEqual(
|
||||||
|
'00:00:00.987654',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a large negative microsecond interval to the correct string', () => {
|
||||||
|
expect(new DuckDBIntervalValue(0, 0, -987654n).toString()).toStrictEqual(
|
||||||
|
'-00:00:00.987654',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a one second interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, MICROS_IN_SEC).toString(),
|
||||||
|
).toStrictEqual('00:00:01');
|
||||||
|
});
|
||||||
|
test('should render a negative one second interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, -MICROS_IN_SEC).toString(),
|
||||||
|
).toStrictEqual('-00:00:01');
|
||||||
|
});
|
||||||
|
test('should render a 59 second interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, 59n * MICROS_IN_SEC).toString(),
|
||||||
|
).toStrictEqual('00:00:59');
|
||||||
|
});
|
||||||
|
test('should render a -59 second interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, -59n * MICROS_IN_SEC).toString(),
|
||||||
|
).toStrictEqual('-00:00:59');
|
||||||
|
});
|
||||||
|
test('should render a one minute interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, MICROS_IN_MIN).toString(),
|
||||||
|
).toStrictEqual('00:01:00');
|
||||||
|
});
|
||||||
|
test('should render a negative one minute interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, -MICROS_IN_MIN).toString(),
|
||||||
|
).toStrictEqual('-00:01:00');
|
||||||
|
});
|
||||||
|
test('should render a 59 minute interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, 59n * MICROS_IN_MIN).toString(),
|
||||||
|
).toStrictEqual('00:59:00');
|
||||||
|
});
|
||||||
|
test('should render a -59 minute interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, -59n * MICROS_IN_MIN).toString(),
|
||||||
|
).toStrictEqual('-00:59:00');
|
||||||
|
});
|
||||||
|
test('should render a one hour interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, MICROS_IN_HR).toString(),
|
||||||
|
).toStrictEqual('01:00:00');
|
||||||
|
});
|
||||||
|
test('should render a negative one hour interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, -MICROS_IN_HR).toString(),
|
||||||
|
).toStrictEqual('-01:00:00');
|
||||||
|
});
|
||||||
|
test('should render a 24 hour interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, 24n * MICROS_IN_HR).toString(),
|
||||||
|
).toStrictEqual('24:00:00');
|
||||||
|
});
|
||||||
|
test('should render a -24 hour interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, -24n * MICROS_IN_HR).toString(),
|
||||||
|
).toStrictEqual('-24:00:00');
|
||||||
|
});
|
||||||
|
test('should render a very large interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, MAX_INT32 * MICROS_IN_HR).toString(),
|
||||||
|
).toStrictEqual('2147483647:00:00');
|
||||||
|
});
|
||||||
|
test('should render a very large negative interval to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, -MAX_INT32 * MICROS_IN_HR).toString(),
|
||||||
|
).toStrictEqual('-2147483647:00:00');
|
||||||
|
});
|
||||||
|
test('should render a very large interval with microseconds to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(0, 0, MAX_INT32 * MICROS_IN_HR + 1n).toString(),
|
||||||
|
).toStrictEqual('2147483647:00:00.000001');
|
||||||
|
});
|
||||||
|
test('should render a very large negative interval with microseconds to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
-(MAX_INT32 * MICROS_IN_HR + 1n),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('-2147483647:00:00.000001');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render a interval with multiple parts to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(
|
||||||
|
24 + 3,
|
||||||
|
5,
|
||||||
|
7n * MICROS_IN_HR + 11n * MICROS_IN_MIN + 13n * MICROS_IN_SEC + 17n,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('2 years 3 months 5 days 07:11:13.000017');
|
||||||
|
});
|
||||||
|
test('should render a negative interval with multiple parts to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBIntervalValue(
|
||||||
|
-(24 + 3),
|
||||||
|
-5,
|
||||||
|
-(7n * MICROS_IN_HR + 11n * MICROS_IN_MIN + 13n * MICROS_IN_SEC + 17n),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('-2 years -3 months -5 days -07:11:13.000017');
|
||||||
|
});
|
||||||
|
});
|
||||||
45
ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts
Normal file
45
ts/pkgs/duckdb-data-values/test/DuckDBListValue.test.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBMapValue } from '../src';
|
||||||
|
import { DuckDBListValue } from '../src/DuckDBListValue';
|
||||||
|
|
||||||
|
suite('DuckDBListValue', () => {
|
||||||
|
test('should render an empty list to the correct string', () => {
|
||||||
|
expect(new DuckDBListValue([]).toString()).toStrictEqual('[]');
|
||||||
|
});
|
||||||
|
test('should render a single element list to the correct string', () => {
|
||||||
|
expect(new DuckDBListValue([123]).toString()).toStrictEqual('[123]');
|
||||||
|
});
|
||||||
|
test('should render a multi-element list to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBListValue(['abc', null, true, '']).toString(),
|
||||||
|
).toStrictEqual(`['abc', NULL, true, '']`);
|
||||||
|
});
|
||||||
|
test('should render a list with nested lists to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBListValue([
|
||||||
|
new DuckDBListValue([]),
|
||||||
|
null,
|
||||||
|
new DuckDBListValue([123, null, 'xyz']),
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(`[[], NULL, [123, NULL, 'xyz']]`);
|
||||||
|
});
|
||||||
|
test('toJson with complex values', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBListValue([
|
||||||
|
new DuckDBMapValue([
|
||||||
|
{ key: 'foo', value: 123 },
|
||||||
|
{ key: 'bar', value: 'abc' },
|
||||||
|
]),
|
||||||
|
null,
|
||||||
|
new DuckDBMapValue([
|
||||||
|
{ key: 'foo', value: null },
|
||||||
|
{ key: 'bar', value: 'xyz' },
|
||||||
|
]),
|
||||||
|
]).toJson(),
|
||||||
|
).toStrictEqual([
|
||||||
|
{ "'foo'": 123, "'bar'": 'abc' },
|
||||||
|
null,
|
||||||
|
{ "'foo'": null, "'bar'": 'xyz' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
77
ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts
Normal file
77
ts/pkgs/duckdb-data-values/test/DuckDBMapValue.test.ts
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBListValue } from '../src/DuckDBListValue';
|
||||||
|
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||||
|
|
||||||
|
suite('DuckDBMapValue', () => {
|
||||||
|
test('should render an empty map to the correct string', () => {
|
||||||
|
expect(new DuckDBMapValue([]).toString()).toStrictEqual('{}');
|
||||||
|
});
|
||||||
|
test('should render a single-entry map to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBMapValue([{ key: 'x', value: 1 }]).toString(),
|
||||||
|
).toStrictEqual(`{'x': 1}`);
|
||||||
|
});
|
||||||
|
test('should render a multi-entry map to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBMapValue([
|
||||||
|
{ key: 1, value: 42.001 },
|
||||||
|
{ key: 5, value: -32.1 },
|
||||||
|
{ key: 3, value: null },
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(`{1: 42.001, 5: -32.1, 3: NULL}`);
|
||||||
|
});
|
||||||
|
test('should render a multi-entry map with complex key types to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBMapValue([
|
||||||
|
{
|
||||||
|
key: new DuckDBListValue(['a', 'b']),
|
||||||
|
value: new DuckDBListValue([1.1, 2.2]),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: new DuckDBListValue(['c', 'd']),
|
||||||
|
value: new DuckDBListValue([3.3, 4.4]),
|
||||||
|
},
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(`{['a', 'b']: [1.1, 2.2], ['c', 'd']: [3.3, 4.4]}`);
|
||||||
|
});
|
||||||
|
test('should render a map with nested maps to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBMapValue([
|
||||||
|
{ key: new DuckDBMapValue([]), value: new DuckDBMapValue([]) },
|
||||||
|
{
|
||||||
|
key: new DuckDBMapValue([{ key: 'key1', value: 'value1' }]),
|
||||||
|
value: new DuckDBMapValue([
|
||||||
|
{ key: 1, value: 42.001 },
|
||||||
|
{ key: 5, value: -32.1 },
|
||||||
|
{ key: 3, value: null },
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(
|
||||||
|
`{{}: {}, {'key1': 'value1'}: {1: 42.001, 5: -32.1, 3: NULL}}`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('toJson basics', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBMapValue([
|
||||||
|
{ key: 'a', value: 1 },
|
||||||
|
{ key: 'b', value: 2 },
|
||||||
|
{ key: 'c', value: 3 },
|
||||||
|
]).toJson(),
|
||||||
|
).toStrictEqual({ "'a'": 1, "'b'": 2, "'c'": 3 });
|
||||||
|
});
|
||||||
|
test('toJson with complex keys and values', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBMapValue([
|
||||||
|
{
|
||||||
|
key: new DuckDBListValue(['a', 'b']),
|
||||||
|
value: new DuckDBListValue([1.1, 2.2]),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: new DuckDBListValue(['c', 'd']),
|
||||||
|
value: new DuckDBListValue([3.3, 4.4]),
|
||||||
|
},
|
||||||
|
]).toJson(),
|
||||||
|
).toStrictEqual({ "['a', 'b']": [1.1, 2.2], "['c', 'd']": [3.3, 4.4] });
|
||||||
|
});
|
||||||
|
});
|
||||||
110
ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts
Normal file
110
ts/pkgs/duckdb-data-values/test/DuckDBStructValue.test.ts
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBMapValue } from '../src/DuckDBMapValue';
|
||||||
|
import { DuckDBStructValue } from '../src/DuckDBStructValue';
|
||||||
|
|
||||||
|
suite('DuckDBStructValue', () => {
|
||||||
|
test('should render an empty struct to the correct string', () => {
|
||||||
|
expect(new DuckDBStructValue([]).toString()).toStrictEqual('{}');
|
||||||
|
});
|
||||||
|
test('should render a single-entry struct to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBStructValue([{ key: 'x', value: 1 }]).toString(),
|
||||||
|
).toStrictEqual(`{'x': 1}`);
|
||||||
|
});
|
||||||
|
test('should render a multi-entry struct to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBStructValue([
|
||||||
|
{ key: 'x', value: 1 },
|
||||||
|
{ key: 'y', value: 2 },
|
||||||
|
{ key: 'z', value: 3 },
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(`{'x': 1, 'y': 2, 'z': 3}`);
|
||||||
|
});
|
||||||
|
test('should render a multi-entry struct with different value types to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBStructValue([
|
||||||
|
{ key: 'key1', value: 'string' },
|
||||||
|
{ key: 'key2', value: 1 },
|
||||||
|
{ key: 'key3', value: 12.345 },
|
||||||
|
{ key: 'key0', value: null },
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(
|
||||||
|
`{'key1': 'string', 'key2': 1, 'key3': 12.345, 'key0': NULL}`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a multi-entry struct with empty keys to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBStructValue([
|
||||||
|
{ key: '', value: 2 },
|
||||||
|
{ key: '', value: 1 },
|
||||||
|
{ key: '', value: 3 },
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(`{'': 2, '': 1, '': 3}`);
|
||||||
|
});
|
||||||
|
test('should render a struct with nested structs to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBStructValue([
|
||||||
|
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||||
|
{
|
||||||
|
key: 'struct',
|
||||||
|
value: new DuckDBStructValue([
|
||||||
|
{ key: 'key1', value: 'string' },
|
||||||
|
{ key: 'key2', value: 1 },
|
||||||
|
{ key: 'key3', value: 12.345 },
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
]).toString(),
|
||||||
|
).toStrictEqual(
|
||||||
|
`{'empty_struct': {}, 'struct': {'key1': 'string', 'key2': 1, 'key3': 12.345}}`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('toJson with simple keys and values', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBStructValue([
|
||||||
|
{ key: 'x', value: 1 },
|
||||||
|
{ key: 'y', value: 2 },
|
||||||
|
{ key: 'z', value: 3 },
|
||||||
|
]).toJson(),
|
||||||
|
).toStrictEqual({ "'x'": 1, "'y'": 2, "'z'": 3 });
|
||||||
|
});
|
||||||
|
test('toJson with nested struct values', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBStructValue([
|
||||||
|
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||||
|
{
|
||||||
|
key: 'struct',
|
||||||
|
value: new DuckDBStructValue([
|
||||||
|
{ key: 'key1', value: 'string' },
|
||||||
|
{ key: 'key2', value: 1 },
|
||||||
|
{ key: 'key3', value: 12.345 },
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
]).toJson(),
|
||||||
|
).toStrictEqual({
|
||||||
|
"'empty_struct'": {},
|
||||||
|
"'struct'": { "'key1'": 'string', "'key2'": 1, "'key3'": 12.345 },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
test('toJson with nested complex values', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBStructValue([
|
||||||
|
{ key: 'empty_struct', value: new DuckDBStructValue([]) },
|
||||||
|
{
|
||||||
|
key: 'struct',
|
||||||
|
value: new DuckDBStructValue([
|
||||||
|
{
|
||||||
|
key: 'key1',
|
||||||
|
value: new DuckDBMapValue([
|
||||||
|
{ key: 'foo', value: null },
|
||||||
|
{ key: 'bar', value: 'xyz' },
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
]).toJson(),
|
||||||
|
).toStrictEqual({
|
||||||
|
"'empty_struct'": {},
|
||||||
|
"'struct'": { "'key1'": { "'foo'": null, "'bar'": 'xyz' } },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
60
ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts
Normal file
60
ts/pkgs/duckdb-data-values/test/DuckDBTimeTZValue.test.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBTimeTZValue } from '../src/DuckDBTimeTZValue';
|
||||||
|
|
||||||
|
suite('DuckDBTimeTZValue', () => {
|
||||||
|
test('should render a normal time value with a positive offset to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimeTZValue(
|
||||||
|
((12n * 60n + 34n) * 60n + 56n) * 1000000n + 789012n,
|
||||||
|
(13 * 60 + 24) * 60 + 57,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('12:34:56.789012+13:24:57');
|
||||||
|
});
|
||||||
|
test('should render a normal time value with millisecond precision with an offset in minutes to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimeTZValue(
|
||||||
|
((12n * 60n + 34n) * 60n + 56n) * 1000000n + 789000n,
|
||||||
|
(13 * 60 + 24) * 60,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('12:34:56.789+13:24');
|
||||||
|
});
|
||||||
|
test('should render a normal time value with second precision with an offset in hours to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimeTZValue(
|
||||||
|
((12n * 60n + 34n) * 60n + 56n) * 1000000n,
|
||||||
|
(13 * 60 + 0) * 60,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('12:34:56+13');
|
||||||
|
});
|
||||||
|
test('should render a zero time value with a zero offset to the correct string', () => {
|
||||||
|
expect(new DuckDBTimeTZValue(0n, 0).toString()).toStrictEqual(
|
||||||
|
'00:00:00+00',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render the max value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimeTZValue(
|
||||||
|
((24n * 60n + 0n) * 60n + 0n) * 1000000n,
|
||||||
|
-((15 * 60 + 59) * 60 + 59),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('24:00:00-15:59:59');
|
||||||
|
});
|
||||||
|
test('should render the min value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimeTZValue(0n, (15 * 60 + 59) * 60 + 59).toString(),
|
||||||
|
).toStrictEqual('00:00:00+15:59:59');
|
||||||
|
});
|
||||||
|
test('should construct the correct value from bits', () => {
|
||||||
|
expect(DuckDBTimeTZValue.fromBits(0n).toString()).toStrictEqual(
|
||||||
|
'00:00:00+15:59:59',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should construct the correct value from bits', () => {
|
||||||
|
expect(
|
||||||
|
DuckDBTimeTZValue.fromBits(
|
||||||
|
(BigInt.asUintN(40, ((24n * 60n + 0n) * 60n + 0n) * 1000000n) << 24n) |
|
||||||
|
BigInt.asUintN(24, (31n * 60n + 59n) * 60n + 58n),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('24:00:00-15:59:59');
|
||||||
|
});
|
||||||
|
});
|
||||||
18
ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts
Normal file
18
ts/pkgs/duckdb-data-values/test/DuckDBTimeValue.test.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBTimeValue } from '../src/DuckDBTimeValue';
|
||||||
|
|
||||||
|
suite('DuckDBTimeValue', () => {
|
||||||
|
test('should render a normal time value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimeValue(45296000000n).toString()).toStrictEqual(
|
||||||
|
'12:34:56',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render the max time value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimeValue(86399999999n).toString()).toStrictEqual(
|
||||||
|
'23:59:59.999999',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render the min time value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimeValue(0n).toString()).toStrictEqual('00:00:00');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,55 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBTimestampMicrosecondsValue } from '../src/DuckDBTimestampMicrosecondsValue';
|
||||||
|
|
||||||
|
suite('DuckDBTimestampMicrosecondsValue', () => {
|
||||||
|
test('should render a normal timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMicrosecondsValue(1612325106007800n).toString(),
|
||||||
|
).toStrictEqual('2021-02-03 04:05:06.0078');
|
||||||
|
});
|
||||||
|
test('should render a zero timestamp value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimestampMicrosecondsValue(0n).toString()).toStrictEqual(
|
||||||
|
'1970-01-01 00:00:00',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative timestamp value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimestampMicrosecondsValue(-7n).toString()).toStrictEqual(
|
||||||
|
'1969-12-31 23:59:59.999993',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a large positive timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMicrosecondsValue(2353318271999999000n).toString(),
|
||||||
|
).toStrictEqual('76543-09-08 23:59:59.999');
|
||||||
|
});
|
||||||
|
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMicrosecondsValue(-58261244276543211n).toString(),
|
||||||
|
).toStrictEqual('0123-10-11 01:02:03.456789');
|
||||||
|
});
|
||||||
|
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMicrosecondsValue(-65992661876543211n).toString(),
|
||||||
|
).toStrictEqual('0123-10-11 (BC) 01:02:03.456789');
|
||||||
|
});
|
||||||
|
test('should render the max timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMicrosecondsValue(9223372036854775806n).toString(),
|
||||||
|
).toStrictEqual('294247-01-10 04:00:54.775806');
|
||||||
|
});
|
||||||
|
test('should render the min timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMicrosecondsValue(-9223372022400000000n).toString(),
|
||||||
|
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||||
|
});
|
||||||
|
test('should render the positive infinity timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMicrosecondsValue(9223372036854775807n).toString(),
|
||||||
|
).toStrictEqual('infinity');
|
||||||
|
});
|
||||||
|
test('should render the negative infinity timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMicrosecondsValue(-9223372036854775807n).toString(),
|
||||||
|
).toStrictEqual('-infinity');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBTimestampMillisecondsValue } from '../src/DuckDBTimestampMillisecondsValue';
|
||||||
|
|
||||||
|
suite('DuckDBTimestampMillisecondsValue', () => {
|
||||||
|
test('should render a normal timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMillisecondsValue(1612325106007n).toString(),
|
||||||
|
).toStrictEqual('2021-02-03 04:05:06.007');
|
||||||
|
});
|
||||||
|
test('should render a zero timestamp value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimestampMillisecondsValue(0n).toString()).toStrictEqual(
|
||||||
|
'1970-01-01 00:00:00',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative timestamp value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimestampMillisecondsValue(-7n).toString()).toStrictEqual(
|
||||||
|
'1969-12-31 23:59:59.993',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a large positive timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMillisecondsValue(2353318271999999n).toString(),
|
||||||
|
).toStrictEqual('76543-09-08 23:59:59.999');
|
||||||
|
});
|
||||||
|
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMillisecondsValue(-58261244276544n).toString(),
|
||||||
|
).toStrictEqual('0123-10-11 01:02:03.456');
|
||||||
|
});
|
||||||
|
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMillisecondsValue(-65992661876544n).toString(),
|
||||||
|
).toStrictEqual('0123-10-11 (BC) 01:02:03.456');
|
||||||
|
});
|
||||||
|
test('should render the max timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMillisecondsValue(9223372036854775n).toString(),
|
||||||
|
).toStrictEqual('294247-01-10 04:00:54.775');
|
||||||
|
});
|
||||||
|
test('should render the min timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampMillisecondsValue(-9223372022400000n).toString(),
|
||||||
|
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBTimestampNanosecondsValue } from '../src/DuckDBTimestampNanosecondsValue';
|
||||||
|
|
||||||
|
suite('DuckDBTimestampNanosecondsValue', () => {
|
||||||
|
test('should render a normal timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampNanosecondsValue(1612325106007891000n).toString(),
|
||||||
|
).toStrictEqual('2021-02-03 04:05:06.007891');
|
||||||
|
});
|
||||||
|
test('should render a zero timestamp value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimestampNanosecondsValue(0n).toString()).toStrictEqual(
|
||||||
|
'1970-01-01 00:00:00',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampNanosecondsValue(-7000n).toString(),
|
||||||
|
).toStrictEqual('1969-12-31 23:59:59.999993');
|
||||||
|
});
|
||||||
|
test('should render a large positive timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampNanosecondsValue(8857641599999123000n).toString(),
|
||||||
|
).toStrictEqual('2250-09-08 23:59:59.999123');
|
||||||
|
});
|
||||||
|
test('should render a large negative timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampNanosecondsValue(-8495881076543211000n).toString(),
|
||||||
|
).toStrictEqual('1700-10-11 01:02:03.456789');
|
||||||
|
});
|
||||||
|
test('should render the max timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampNanosecondsValue(9223372036854775806n).toString(),
|
||||||
|
).toStrictEqual('2262-04-11 23:47:16.854775');
|
||||||
|
});
|
||||||
|
test('should render the min timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampNanosecondsValue(-9223372036854775806n).toString(),
|
||||||
|
).toStrictEqual('1677-09-21 00:12:43.145225');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBTimestampSecondsValue } from '../src/DuckDBTimestampSecondsValue';
|
||||||
|
|
||||||
|
suite('DuckDBTimestampSecondsValue', () => {
|
||||||
|
test('should render a normal timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampSecondsValue(1612325106n).toString(),
|
||||||
|
).toStrictEqual('2021-02-03 04:05:06');
|
||||||
|
});
|
||||||
|
test('should render a zero timestamp value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimestampSecondsValue(0n).toString()).toStrictEqual(
|
||||||
|
'1970-01-01 00:00:00',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a negative timestamp value to the correct string', () => {
|
||||||
|
expect(new DuckDBTimestampSecondsValue(-7n).toString()).toStrictEqual(
|
||||||
|
'1969-12-31 23:59:53',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should render a large positive timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampSecondsValue(2353318271999n).toString(),
|
||||||
|
).toStrictEqual('76543-09-08 23:59:59');
|
||||||
|
});
|
||||||
|
test('should render a large negative (AD) timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampSecondsValue(-58261244277n).toString(),
|
||||||
|
).toStrictEqual('0123-10-11 01:02:03');
|
||||||
|
});
|
||||||
|
test('should render a large negative (BC) timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampSecondsValue(-65992661877n).toString(),
|
||||||
|
).toStrictEqual('0123-10-11 (BC) 01:02:03');
|
||||||
|
});
|
||||||
|
test('should render the max timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampSecondsValue(9223372036854n).toString(),
|
||||||
|
).toStrictEqual('294247-01-10 04:00:54');
|
||||||
|
});
|
||||||
|
test('should render the min timestamp value to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampSecondsValue(-9223372022400n).toString(),
|
||||||
|
).toStrictEqual('290309-12-22 (BC) 00:00:00');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBTimestampTZValue } from '../src/DuckDBTimestampTZValue';
|
||||||
|
|
||||||
|
suite('DuckDBTimestampTZValue', () => {
|
||||||
|
test('should render a timestamp tz value with no timezone offset to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString(),
|
||||||
|
).toStrictEqual('2021-02-03 04:05:06.0078+00'); // defaults to UTC
|
||||||
|
});
|
||||||
|
test('should render a timestamp tz value with a zero timezone offset to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||||
|
timezoneOffsetInMinutes: 0,
|
||||||
|
}),
|
||||||
|
).toStrictEqual('2021-02-03 04:05:06.0078+00');
|
||||||
|
});
|
||||||
|
test('should render a timestamp tz value with a positive timezone offset to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||||
|
timezoneOffsetInMinutes: 300,
|
||||||
|
}),
|
||||||
|
).toStrictEqual('2021-02-03 09:05:06.0078+05');
|
||||||
|
});
|
||||||
|
test('should render a timestamp tz value with a negative timezone offset to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||||
|
timezoneOffsetInMinutes: -300,
|
||||||
|
}),
|
||||||
|
).toStrictEqual('2021-02-02 23:05:06.0078-05');
|
||||||
|
});
|
||||||
|
test('should render a timestamp tz value with a timezone offset containing minutes to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBTimestampTZValue(1612325106007800n).toDuckDBString({
|
||||||
|
timezoneOffsetInMinutes: 330,
|
||||||
|
}),
|
||||||
|
).toStrictEqual('2021-02-03 09:35:06.0078+05:30');
|
||||||
|
});
|
||||||
|
});
|
||||||
49
ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts
Normal file
49
ts/pkgs/duckdb-data-values/test/DuckDBUUIDValue.test.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { DuckDBUUIDValue } from '../src/DuckDBUUIDValue';
|
||||||
|
|
||||||
|
suite('DuckDBUUIDValue', () => {
|
||||||
|
test('should render all zero bytes to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBUUIDValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('00000000-0000-0000-0000-000000000000');
|
||||||
|
});
|
||||||
|
test('should render all max bytes to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBUUIDValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||||
|
0xff, 0xff, 0xff, 0xff, 0xff,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('ffffffff-ffff-ffff-ffff-ffffffffffff');
|
||||||
|
});
|
||||||
|
test('should render arbitrary bytes to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
new DuckDBUUIDValue(
|
||||||
|
new Uint8Array([
|
||||||
|
0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87, 0xfe, 0xdc, 0xba,
|
||||||
|
0x98, 0x76, 0x54, 0x32, 0x10,
|
||||||
|
]),
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||||
|
});
|
||||||
|
test('should render a uint128 to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
DuckDBUUIDValue.fromUint128(
|
||||||
|
0xf0e1d2c3b4a59687fedcba9876543210n,
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||||
|
});
|
||||||
|
test('should render a stored hugeint to the correct string', () => {
|
||||||
|
expect(
|
||||||
|
DuckDBUUIDValue.fromStoredHugeint(
|
||||||
|
0x70e1d2c3b4a59687fedcba9876543210n, // note the flipped MSB
|
||||||
|
).toString(),
|
||||||
|
).toStrictEqual('f0e1d2c3-b4a5-9687-fedc-ba9876543210');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
import { expect, suite, test } from 'vitest';
|
||||||
|
import { getVarIntFromBytes } from '../../src/conversion/getVarIntFromBytes';
|
||||||
|
|
||||||
|
suite('getVarIntFromBytes', () => {
|
||||||
|
test('should return correct value for varint representation of 0', () => {
|
||||||
|
expect(
|
||||||
|
getVarIntFromBytes(new Uint8Array([0x80, 0x00, 0x01, 0x00])),
|
||||||
|
).toEqual(0n);
|
||||||
|
});
|
||||||
|
test('should return correct value for varint representation of 1', () => {
|
||||||
|
expect(
|
||||||
|
getVarIntFromBytes(new Uint8Array([0x80, 0x00, 0x01, 0x01])),
|
||||||
|
).toEqual(1n);
|
||||||
|
});
|
||||||
|
test('should return correct value for varint representation of -1', () => {
|
||||||
|
expect(
|
||||||
|
getVarIntFromBytes(new Uint8Array([0x7f, 0xff, 0xfe, 0xfe])),
|
||||||
|
).toEqual(-1n);
|
||||||
|
});
|
||||||
|
test('should return correct value for max varint', () => {
|
||||||
|
// max VARINT = max IEEE double = 2^1023 * (1 + (1 − 2^−52)) ~= 1.7976931348623157 * 10^308
|
||||||
|
// Note that the storage format supports much larger than this, but DuckDB specifies this max to support conversion to/from DOUBLE.
|
||||||
|
expect(
|
||||||
|
getVarIntFromBytes(
|
||||||
|
// prettier-ignore
|
||||||
|
new Uint8Array([0x80, 0x00, 0x80,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
).toEqual(
|
||||||
|
179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
test('should return correct value for min varint', () => {
|
||||||
|
// min VARINT = -max VARINT
|
||||||
|
expect(
|
||||||
|
getVarIntFromBytes(
|
||||||
|
// prettier-ignore
|
||||||
|
new Uint8Array([0x7F, 0xFF, 0x7F,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
).toEqual(
|
||||||
|
-179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368n,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user