should be it

This commit is contained in:
2025-10-24 19:21:19 -05:00
parent a4b23fc57c
commit f09560c7b1
14047 changed files with 3161551 additions and 1 deletions

View File

@@ -0,0 +1,102 @@
name: Android
on:
workflow_call:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
workflow_dispatch:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**'
- '!.github/workflows/Android.yml'
pull_request:
types: [opened, reopened, ready_for_review]
paths-ignore:
- '**'
- '!.github/workflows/Android.yml'
- '!.github/patches/duckdb-wasm/**'
concurrency:
group: android-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
jobs:
android:
name: Android
runs-on: ubuntu-latest
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/feature' }}
strategy:
matrix:
arch: [ armeabi-v7a, arm64-v8a ]
env:
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
GEN: ninja
EXTENSION_STATIC_BUILD: 1
DUCKDB_PLATFORM: android_${{ matrix.arch}}
DUCKDB_CUSTOM_PLATFORM: android_${{ matrix.arch}}
CMAKE_VARS_BUILD: -DBUILD_UNITTESTS=0 -DBUILD_SHELL=0 -DANDROID_ABI=${{ matrix.arch}} -DCMAKE_TOOLCHAIN_FILE=./android-ndk/build/cmake/android.toolchain.cmake -DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- name: Install Ninja
shell: bash
run: sudo apt-get -y update && sudo apt-get -y install ninja-build
- name: Checkout (again)
shell: bash
run: git checkout ${{ inputs.git_ref }}
- name: Install Android NDK
shell: bash
run: |
wget https://dl.google.com/android/repository/android-ndk-r27-linux.zip
unzip android-ndk-r27-linux.zip
mv android-ndk-r27 android-ndk
- name: Build
shell: bash
run: make
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
zip -j libduckdb-android_${{matrix.arch}}.zip build/release/src/libduckdb*.* src/include/duckdb.h
./scripts/upload-assets-to-staging.sh github_release libduckdb-android_${{matrix.arch}}.zip
- uses: actions/upload-artifact@v4
with:
name: duckdb-binaries-android-${{matrix.arch}}
path: |
libduckdb-android_${{matrix.arch}}.zip

View File

@@ -0,0 +1,221 @@
name: Bundle Static Libraries
on:
workflow_call:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
workflow_dispatch:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**'
- '!.github/workflows/BundleStaticLibs.yml'
pull_request:
types: [opened, reopened, ready_for_review]
paths-ignore:
- '**'
- '!.github/workflows/BundleStaticLibs.yml'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
jobs:
bundle-osx-static-libs:
name: OSX static libs
strategy:
matrix:
include:
- xcode_target_flag: "x86_64"
architecture: "amd64"
- xcode_target_flag: "arm64"
architecture: "arm64"
runs-on: macos-latest
env:
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
GEN: ninja
OSX_BUILD_ARCH: ${{ matrix.xcode_target_flag }}
DUCKDB_PLATFORM: osx_${{ matrix.architecture }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
run: brew install ninja
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make
- name: Bundle static library
shell: bash
run: |
make gather-libs
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
python3 scripts/amalgamation.py
zip -r -j static-libs-osx-${{ matrix.architecture }}.zip src/include/duckdb.h build/release/libs/
./scripts/upload-assets-to-staging.sh github_release static-libs-osx-${{ matrix.architecture }}.zip
- uses: actions/upload-artifact@v4
with:
name: duckdb-static-libs-osx-${{ matrix.architecture }}
path: |
static-libs-osx-${{ matrix.architecture }}.zip
bundle-mingw-static-lib:
name: Windows MingW static libs
runs-on: windows-latest
env:
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- uses: r-lib/actions/setup-r@v2
with:
r-version: 'devel'
update-rtools: true
rtools-version: '42' # linker bug in 43 ^^
# TODO: this action is deprecated, can we rework this to avoid using it?
- uses: ./.github/actions/build_extensions
with:
duckdb_arch: windows_amd64_mingw
vcpkg_target_triplet: x64-mingw-static
treat_warn_as_error: 0
override_cc: gcc
override_cxx: g++
vcpkg_build: 1
no_static_linking: 0
run_tests: 0
run_autoload_tests: 0
build_in_tree_extensions: 0
build_out_of_tree_extensions: 0
bundle_static_lib_mode: 1
- name: Bundle static library
shell: bash
run: |
make gather-libs
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
zip -r -j static-libs-windows-mingw.zip src/include/duckdb.h build/release/libs/
./scripts/upload-assets-to-staging.sh github_release static-libs-windows-mingw.zip
- uses: actions/upload-artifact@v4
with:
name: duckdb-static-libs-windows-mingw
path: |
static-libs-windows-mingw.zip
bundle-linux-static-libs:
strategy:
fail-fast: false
matrix:
config: [ { runner: ubuntu-latest, arch: amd64, image: x86_64 }, { runner: ubuntu-24.04-arm, arch: arm64, image: aarch64 } ]
name: Linux Static Libraries (${{ matrix.config.arch }})
runs-on: ${{ matrix.config.runner }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- name: Build
shell: bash
run: |
export PWD=`pwd`
docker run \
-v$PWD:$PWD \
-e CMAKE_BUILD_PARALLEL_LEVEL=2 \
-e OVERRIDE_GIT_DESCRIBE=$OVERRIDE_GIT_DESCRIBE \
-e EXTENSION_CONFIGS="$PWD/.github/config/bundled_extensions.cmake" \
-e ENABLE_EXTENSION_AUTOLOADING=1 \
-e ENABLE_EXTENSION_AUTOINSTALL=1 \
-e BUILD_BENCHMARK=1 \
-e FORCE_WARN_UNUSED=1 \
-e EXPORT_DYNAMIC_SYMBOLS=1 \
quay.io/pypa/manylinux_2_28_${{ matrix.config.image }} \
bash -c "
set -e
yum install -y perl-IPC-Cmd gcc-toolset-12 gcc-toolset-12-gcc-c++
source /opt/rh/gcc-toolset-12/enable
export CC=gcc
export CXX=g++
git config --global --add safe.directory $PWD
make gather-libs -C $PWD
"
- name: Print platform
shell: bash
run: ./build/release/duckdb -c "PRAGMA platform;"
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
python3 scripts/amalgamation.py
zip -r -j static-libs-linux-${{ matrix.config.arch }}.zip src/include/duckdb.h build/release/libs/
./scripts/upload-assets-to-staging.sh github_release static-libs-linux-${{ matrix.config.arch }}.zip
- uses: actions/upload-artifact@v4
with:
name: duckdb-static-libs-linux-${{ matrix.config.arch }}
path: |
static-libs-linux-${{ matrix.config.arch }}.zip

View File

@@ -0,0 +1,26 @@
name: Check Issue for Code Formatting
on:
issues:
types:
- opened
env:
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
ISSUE_BODY: ${{ github.event.issue.body }}
jobs:
check_code_formatting:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Check issue for code formatting
run: |
echo "$ISSUE_BODY" >> issue-text.md
if ! cat issue-text.md | python3 scripts/check-issue-for-code-formatting.py; then
gh issue comment ${{ github.event.issue.number }} --body-file .github/workflows/code-formatting-warning.md
fi

View File

@@ -0,0 +1,158 @@
name: CodeQuality
on:
workflow_dispatch:
inputs:
explicit_checks:
description: 'Pass which checks to run or remain empty for default checks'
type: string
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'test/configs/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/lcov_exclude'
- '!.github/workflows/CodeQuality.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
merge_group:
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**.md'
- 'test/configs/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/lcov_exclude'
- '!.github/workflows/CodeQuality.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
format-check:
name: Format Check
runs-on: ubuntu-22.04
needs: check-draft
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build clang-format-11 && sudo pip3 install cmake-format 'black==24.*' cxxheaderparser pcpp 'clang_format==11.0.1'
- name: List Installed Packages
shell: bash
run: pip3 freeze
- name: Format Check
shell: bash
run: |
clang-format --version
clang-format --dump-config
black --version
make format-check-silent
- name: Generated Check
shell: bash
run: |
make generate-files
git diff --exit-code
enum-check:
name: C Enum Integrity Check
needs: format-check
runs-on: ubuntu-22.04
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install python dependencies
if: ${{ !startsWith(github.ref, 'refs/tags/v') }}
shell: bash
run: python -m pip install cxxheaderparser pcpp
- name: Verify C enum integrity
if: ${{ !startsWith(github.ref, 'refs/tags/v') }}
shell: bash
run: python scripts/verify_enum_integrity.py src/include/duckdb.h
tidy-check:
name: Tidy Check
runs-on: ubuntu-24.04
needs: format-check
env:
CC: gcc
CXX: g++
GEN: ninja
TIDY_THREADS: 4
TIDY_CHECKS: ${{ inputs.explicit_checks && inputs.explicit_checks || '' }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build clang-tidy && sudo pip3 install pybind11[global] --break-system-packages
- name: Setup Ccache
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/feature' }}
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Download clang-tidy-cache
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/feature' }}
shell: bash
run: |
set -e
curl -Lo /tmp/clang-tidy-cache https://github.com/ejfitzgerald/clang-tidy-cache/releases/download/v0.4.0/clang-tidy-cache-linux-amd64
md5sum /tmp/clang-tidy-cache | grep 880b290d7bbe7c1fb2a4f591f9a86cc1
chmod +x /tmp/clang-tidy-cache
- name: Tidy Check
shell: bash
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/feature' }}
run: make tidy-check TIDY_BINARY=/tmp/clang-tidy-cache
- name: Tidy Check Diff
shell: bash
if: ${{ github.ref != 'refs/heads/main' && github.ref != 'refs/heads/feature' }}
run: make tidy-check-diff

View File

@@ -0,0 +1,337 @@
name: Cross Version DB test
on:
workflow_call:
inputs:
git_ref:
type: string
workflow_dispatch:
inputs:
git_ref:
type: string
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**'
- '!.github/workflows/CrossVersion.yml'
concurrency:
group: crossversion-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
jobs:
osx-step-1:
# Builds binaries for osx
name: OSX Release
runs-on: macos-14
strategy:
matrix:
version: [ 'v1.0.0', 'v1.1.3', 'v1.2.2', 'v1.3-ossivalis', 'main' ]
fail-fast: false
env:
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
GEN: ninja
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ matrix.version }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
run: brew install ninja file
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make
- name: Print platform
shell: bash
run: ./build/release/duckdb -c "PRAGMA platform;"
- name: Unit Test
shell: bash
run: |
./build/release/test/unittest --force-storage --test-temp-dir my_local_folder || true
rm -rf my_local_folder/hive
- uses: actions/upload-artifact@v4
with:
name: files-osx-${{ matrix.version }}
path: |
my_local_folder/*
osx-step-2:
# Builds binaries for linux
name: OSX Release test
runs-on: macos-14
needs:
- osx-step-1
- linux-step-1
strategy:
matrix:
version: [ 'v1.0.0', 'v1.1.3', 'v1.2.2', 'v1.3-ossivalis', 'main' ]
fail-fast: false
env:
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
GEN: ninja
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ matrix.version }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
run: brew install ninja
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make
- name: Print platform
shell: bash
run: ./build/release/duckdb -c "PRAGMA platform;"
- uses: actions/download-artifact@v4
with:
name: files-osx-v1.0.0
path: osx_v1_0_0
- uses: actions/download-artifact@v4
with:
name: files-osx-v1.1.3
path: osx_v1_1_3
- uses: actions/download-artifact@v4
with:
name: files-osx-v1.2.2
path: osx_v1_2_2
- uses: actions/download-artifact@v4
with:
name: files-osx-v1.3-ossivalis
path: osx_v1_3-ossivalis
- uses: actions/download-artifact@v4
with:
name: files-osx-main
path: osx_main
- uses: actions/download-artifact@v4
with:
name: files-linux-v1.0.0
path: linux_v1_0_0
- uses: actions/download-artifact@v4
with:
name: files-linux-v1.1.3
path: linux_v1_1_3
- uses: actions/download-artifact@v4
with:
name: files-linux-v1.2.2
path: linux_v1_2_2
- uses: actions/download-artifact@v4
with:
name: files-linux-v1.3-ossivalis
path: linux_v1_3-ossivalis
- uses: actions/download-artifact@v4
with:
name: files-linux-main
path: linux_main
- name: Cross test
shell: bash
run: |
touch report
for folder in osx_v1_0_0 osx_v1_1_3 osx_main linux_main linux_v1_0_0 linux_v1_1_3 linux_v1_2_2 linux_v1_2 osx_v1_2_2 osx_v1_2; do
for filename in $folder/*; do
touch $filename.wal && cp $filename.wal a.db.wal 2>/dev/null && cp $filename a.db 2>/dev/null && (./build/release/duckdb a.db -c "ATTACH 'b.db'; COPY FROM DATABASE a TO b;" 2>out || (grep "but it is not a valid DuckDB database file!" out 2>/dev/null || ( echo "--> " $filename && cat out && echo "" && (grep -i "internal error" out && echo "--> " $filename >> report && cat out >> report && echo "" >> report)))) || true
rm -f b.db a.db b.db.wal a.db.wal
done
done
- name: Internal error report
shell: bash
run: |
cat report
linux-step-1:
# Builds binaries for linux
name: Linux Release
runs-on: ubuntu-latest
strategy:
matrix:
version: [ 'v1.0.0', 'v1.1.3', 'v1.2.2', 'v1.3-ossivalis', 'main' ]
fail-fast: false
env:
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
GEN: ninja
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ matrix.version }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make
- name: Print platform
shell: bash
run: ./build/release/duckdb -c "PRAGMA platform;"
- name: Unit Test
shell: bash
run: |
./build/release/test/unittest --force-storage --test-temp-dir my_local_folder || true
rm -rf my_local_folder/hive
- uses: actions/upload-artifact@v4
with:
name: files-linux-${{ matrix.version }}
path: |
my_local_folder/*
linux-step-2:
# Builds binaries for linux
name: Linux Release Test
runs-on: ubuntu-latest
needs:
- osx-step-1
- linux-step-1
strategy:
matrix:
version: [ 'v1.0.0', 'v1.1.3', 'v1.2.2', 'v1.3-ossivalis', 'main' ]
fail-fast: false
env:
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
GEN: ninja
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ matrix.version }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make
- name: Print platform
shell: bash
run: ./build/release/duckdb -c "PRAGMA platform;"
- uses: actions/download-artifact@v4
with:
name: files-osx-v1.0.0
path: osx_v1_0_0
- uses: actions/download-artifact@v4
with:
name: files-osx-v1.1.3
path: osx_v1_1_3
- uses: actions/download-artifact@v4
with:
name: files-osx-v1.2.2
path: osx_v1_2_2
- uses: actions/download-artifact@v4
with:
name: files-osx-v1.3-ossivalis
path: osx_v1_3-ossivalis
- uses: actions/download-artifact@v4
with:
name: files-osx-main
path: osx_main
- uses: actions/download-artifact@v4
with:
name: files-linux-v1.0.0
path: linux_v1_0_0
- uses: actions/download-artifact@v4
with:
name: files-linux-v1.1.3
path: linux_v1_1_3
- uses: actions/download-artifact@v4
with:
name: files-linux-v1.2.2
path: linux_v1_2_2
- uses: actions/download-artifact@v4
with:
name: files-linux-v1.3-ossivalis
path: linux_v1_3-ossivalis
- uses: actions/download-artifact@v4
with:
name: files-linux-main
path: linux_main
- name: Cross test
shell: bash
run: |
touch report
for folder in osx_v1_0_0 osx_v1_1_3 osx_main osx_v1_3-ossivalis linux_main linux_v1_3-ossivalis linux_v1_0_0 linux_v1_1_3 linux_v1_2_2 linux_v1_2 osx_v1_2_2 osx_v1_2; do
for filename in $folder/*; do
touch $filename.wal && cp $filename.wal a.db.wal 2>/dev/null && cp $filename a.db 2>/dev/null && (./build/release/duckdb a.db -c "ATTACH 'b.db'; COPY FROM DATABASE a TO b;" 2>out || (grep "but it is not a valid DuckDB database file!" out 2>/dev/null || ( echo "--> " $filename && cat out && echo "" && (grep -i "internal error" out && echo "--> " $filename >> report && cat out >> report && echo "" >> report)))) || true
rm -f b.db a.db b.db.wal a.db.wal
done
done
- name: Internal error report
shell: bash
run: |
cat report

View File

@@ -0,0 +1,58 @@
name: Docker tests
on:
workflow_call:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
workflow_dispatch:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**'
- '!.github/workflows/DockerTests.yml'
- '!scripts/test_docker_images.sh'
pull_request:
types: [opened, reopened, ready_for_review]
paths-ignore:
- '**'
- '!.github/workflows/DockerTests.yml'
- '!scripts/test_docker_images.sh'
concurrency:
group: docker-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
jobs:
linux-x64-docker:
name: Docker tests on Linux (x64)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- name: Build
shell: bash
run: |
./scripts/test_docker_images.sh

View File

@@ -0,0 +1,26 @@
# Marks all changed PR as draft
name: Draft on Synchronize
on:
pull_request:
types: [ synchronize ]
concurrency:
group: shouldturntodraft-${{ github.event.number }}
cancel-in-progress: true
jobs:
mark-as-draft:
name: Mark as draft
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- name: Save PR number
env:
PR_NUMBER: ${{ github.event.pull_request.node_id }}
run: |
mkdir -p ./pr
echo $PR_NUMBER > ./pr/pr_number
- uses: actions/upload-artifact@v4
with:
name: pr_number
path: pr/

View File

@@ -0,0 +1,21 @@
# Marks all changed PR as draft
name: Placeholder to cancel auto draft
on:
pull_request:
types: [ ready_for_review ]
concurrency:
group: shouldturntodraft-${{ github.event.number }}
cancel-in-progress: true
jobs:
mark-as-draft:
name: Placeholder
if: github.event.pull_request.draft == true
runs-on: ubuntu-latest
steps:
- name: Print PR number
env:
PR_NUMBER: ${{ github.event.pull_request.node_id }}
run: |
echo $PR_NUMBER

View File

@@ -0,0 +1,60 @@
# Marks all changed PR as draft
name: Move PR to Draft
on:
workflow_run:
workflows: [Draft on Synchronize]
types:
- completed
jobs:
actually-move-to-draft:
if: ${{ github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
steps:
- name: 'Download artifact'
uses: actions/github-script@v7
with:
script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr_number"
})[0];
let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
});
let fs = require('fs');
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/pr_number.zip`, Buffer.from(download.data));
- name: 'Unzip artifact'
run: unzip pr_number.zip
- name: 'Extract PR node id'
shell: bash
run: |
(echo -n "PR_NUMBER=" | cat - pr_number) >> $GITHUB_ENV
- name: 'Actually move to draft'
shell: bash
env:
MOVE_PR_TO_DRAFT_TOKEN_ENV: ${{ secrets.MOVE_PR_TO_DRAFT_TOKEN }}
if: ${{ env.MOVE_PR_TO_DRAFT_TOKEN_ENV != '' }}
run: |
echo ${{ env.MOVE_PR_TO_DRAFT_TOKEN_ENV }} | gh auth login --with-token
gh api graphql -F id=${{ env.PR_NUMBER }} -f query='
mutation($id: ID!) {
convertPullRequestToDraft(input: { pullRequestId: $id }) {
pullRequest {
id
number
isDraft
}
}
}
'

View File

@@ -0,0 +1,349 @@
name: ExtendedTests
on:
workflow_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**'
- '!.github/workflows/ExtendedTests.yml'
pull_request:
types: [opened, reopened, ready_for_review]
paths-ignore:
- '**'
- '!.github/workflows/ExtendedTests.yml'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
env:
CCACHE_SAVE: ${{ github.repository != 'duckdb/duckdb' }}
BASE_BRANCH: ${{ github.base_ref || (endsWith(github.ref, '_feature') && 'feature' || 'main') }}
jobs:
regression-lto-benchmark-runner:
name: Benchmark runner lto vs non-lto (OSX)
runs-on: macos-latest
env:
CC: clang
CXX: clang++
GEN: ninja
BUILD_BENCHMARK: 1
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: brew install ninja llvm && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
LTO=full make
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
cd duckdb
make
cd ..
- name: Set up benchmarks
shell: bash
run: |
cp -r benchmark duckdb/
- name: Regression Test Micro
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --threads 2
- name: Regression Test TPCH
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --threads 2
- name: Regression Test TPCH-PARQUET
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --threads 2
- name: Regression Test TPCDS
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --threads 2
- name: Regression Test H2OAI
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --threads 2
- name: Regression Test IMDB
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --threads 2
regression-clang16-vs-clang14-benchmark-runner:
name: Benchmark runner clang-16 vs clang-14 (OSX)
runs-on: macos-latest
env:
CC: clang
CXX: clang++
GEN: ninja
BUILD_BENCHMARK: 1
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: brew install ninja llvm && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
CMAKE_LLVM_PATH='/opt/homebrew/opt/llvm' make
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
cd duckdb
make
cd ..
- name: Set up benchmarks
shell: bash
run: |
cp -r benchmark duckdb/
- name: Regression Test Micro
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --threads 2
- name: Regression Test TPCH
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --threads 2
- name: Regression Test TPCH-PARQUET
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --threads 2
- name: Regression Test TPCDS
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --threads 2
- name: Regression Test H2OAI
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --threads 2
- name: Regression Test IMDB
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --threads 2
regression-clang-benchmark-runner:
name: Benchmark runner clang vs gcc
runs-on: ubuntu-latest
env:
CC: gcc
CXX: g++
GEN: ninja
BUILD_BENCHMARK: 1
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build llvm && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
#### This should also be alternative way to instal llvm to a specific version
# wget https://apt.llvm.org/llvm.sh
# chmod +x llvm.sh
# sudo ./llvm.sh 17
#####
CMAKE_LLVM_PATH='/usr/lib/llvm-14' make
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
cd duckdb
make
cd ..
- name: Set up benchmarks
shell: bash
run: |
cp -r benchmark duckdb/
- name: Regression Test Micro
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --threads 2
- name: Regression Test TPCH
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --threads 2
- name: Regression Test TPCH-PARQUET
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --threads 2
- name: Regression Test TPCDS
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --threads 2
- name: Regression Test H2OAI
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --threads 2
- name: Regression Test IMDB
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --threads 2
regression-flto-gcc-benchmark-runner:
name: Benchmark runner gcc flto vs gcc
runs-on: ubuntu-latest
env:
CC: gcc
CXX: g++
GEN: ninja
BUILD_BENCHMARK: 1
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
LTO='full' make
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
cd duckdb
make
cd ..
- name: Set up benchmarks
shell: bash
run: |
cp -r benchmark duckdb/
- name: Regression Test Micro
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --threads 2
- name: Regression Test TPCH
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --threads 2
- name: Regression Test TPCH-PARQUET
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --threads 2
- name: Regression Test TPCDS
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --threads 2
- name: Regression Test H2OAI
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --threads 2
- name: Regression Test IMDB
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --threads 2

View File

@@ -0,0 +1,366 @@
#
# This workflow is responsible for building all DuckDB extensions
#
name: Extensions (all platforms)
on:
workflow_call:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
run_all:
type: string
workflow_dispatch:
inputs:
override_git_describe:
description: 'Version tag to override git describe. Use to produce binaries'
type: string
git_ref:
description: 'Set to override the DuckDB version, leave empty for current commit'
type: string
required: false
default: ''
extra_exclude_archs:
description: 'Inject more architectures to skip'
type: string
required: false
default: ''
skip_tests:
description: 'Set to true to skip all testing'
type: boolean
required: false
default: false
run_all:
type: string
required: false
default: 'true'
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Extensions.yml'
- '!.github/workflows/_extension_distribution.yml'
merge_group:
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**.md'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Extensions.yml'
concurrency:
group: extensions-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
# This first step loads the various extension configs from the ~/.github/config directory storing them to drive the build jobs
load-extension-configs:
name: Load Extension Configs
runs-on: ubuntu-latest
needs: check-draft
outputs:
main_extensions_config: ${{ steps.set-main-extensions.outputs.extension_config }}
main_extensions_exclude_archs: ${{ steps.set-main-extensions.outputs.exclude_archs }}
rust_based_extensions_config: ${{ steps.set-rust-based-extensions.outputs.extension_config }}
rust_based_extensions_exclude_archs: ${{ steps.set-rust-based-extensions.outputs.exclude_archs }}
env:
# NOTE: on PRs we exclude some archs to speed things up
BASE_EXCLUDE_ARCHS: ${{ (github.event_name == 'pull_request' || inputs.run_all != 'true') && 'wasm_eh;wasm_threads;windows_amd64_mingw;osx_amd64;linux_arm64;linux_amd64_musl;' || '' }}
EXTRA_EXCLUDE_ARCHS: ${{ inputs.extra_exclude_archs }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- id: set-main-extensions
name: Configure main extensions
env:
IN_TREE_CONFIG_FILE: .github/config/in_tree_extensions.cmake
OUT_OF_TREE_CONFIG_FILE: .github/config/out_of_tree_extensions.cmake
DEFAULT_EXCLUDE_ARCHS: ''
run: |
# Set config
echo exclude_archs="$DEFAULT_EXCLUDE_ARCHS;$BASE_EXCLUDE_ARCHS;$EXTRA_EXCLUDE_ARCHS" >> $GITHUB_OUTPUT
in_tree_extensions="`cat $IN_TREE_CONFIG_FILE`"
out_of_tree_extensions="`cat $OUT_OF_TREE_CONFIG_FILE`"
echo "extension_config<<EOF" >> $GITHUB_OUTPUT
echo "$in_tree_extensions" >> $GITHUB_OUTPUT
echo -e "\n" >> $GITHUB_OUTPUT
echo "$out_of_tree_extensions" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
cat $GITHUB_OUTPUT
- id: set-rust-based-extensions
name: Configure Rust-based extensions
env:
CONFIG_FILE: .github/config/rust_based_extensions.cmake
DEFAULT_EXCLUDE_ARCHS: 'wasm_mvp;wasm_eh;wasm_threads;windows_amd64_rtools;windows_amd64_mingw;linux_amd64_musl'
run: |
echo exclude_archs="$DEFAULT_EXCLUDE_ARCHS;$BASE_EXCLUDE_ARCHS;$EXTRA_EXCLUDE_ARCHS" >> $GITHUB_OUTPUT
rust_based_extensions="`cat .github/config/rust_based_extensions.cmake`"
echo "extension_config<<EOF" >> $GITHUB_OUTPUT
echo "$rust_based_extensions" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
cat $GITHUB_OUTPUT
# Build the extensions from .github/config/in_tree_extensions.cmake
main-extensions:
name: Main Extensions
needs:
- load-extension-configs
uses: ./.github/workflows/_extension_distribution.yml
with:
artifact_prefix: main-extensions
exclude_archs: ${{ needs.load-extension-configs.outputs.main_extensions_exclude_archs }}
extension_config: ${{ needs.load-extension-configs.outputs.main_extensions_config }}
override_tag: ${{ inputs.override_git_describe }}
duckdb_ref: ${{ inputs.git_ref }}
skip_tests: ${{ inputs.skip_tests && true || false }}
save_cache: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
# Build the extensions from .github/config/rust_based_extensions.cmake
rust-based-extensions:
name: Rust-based Extensions
needs:
- load-extension-configs
uses: ./.github/workflows/_extension_distribution.yml
with:
exclude_archs: ${{ needs.load-extension-configs.outputs.rust_based_extensions_exclude_archs }}
artifact_prefix: rust-based-extensions
extension_config: ${{ needs.load-extension-configs.outputs.rust_based_extensions_config }}
extra_toolchains: 'rust'
override_tag: ${{ inputs.override_git_describe }}
duckdb_ref: ${{ inputs.git_ref }}
skip_tests: ${{ inputs.skip_tests && true || false }}
save_cache: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
# Merge all extensions into a single, versioned repository
create-extension-repository:
name: Create Extension Repository
runs-on: ubuntu-latest
needs:
- main-extensions
- rust-based-extensions
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/download-artifact@v4
name: Download main extensions
with:
pattern: main-extensions-${{ github.sha }}*
path: /tmp/repository_generation/main-extensions
- uses: actions/download-artifact@v4
name: Download rust-based extensions
with:
pattern: rust-based-extensions-${{ github.sha }}*
path: /tmp/repository_generation/rust-based-extensions
- name: Print all extensions
run: |
tree /tmp/repository_generation
- name: Merge into single repository
run: |
mkdir /tmp/merged_repository
cp -r /tmp/repository_generation/*/*/* /tmp/merged_repository
tree /tmp/merged_repository
- uses: actions/upload-artifact@v4
with:
if-no-files-found: error
name: extension-repository-${{ github.sha }}
path: |
/tmp/merged_repository/**/*.duckdb_extension*
upload-extensions:
name: Upload Extensions
runs-on: ubuntu-latest
needs:
- create-extension-repository
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/download-artifact@v4
with:
pattern: extension-repository-${{ github.sha }}
path: /tmp
- name: List extensions to deploy
shell: bash
run: |
tree /tmp/extension-repository-${{ github.sha }}
- name: Deploy extensions
shell: bash
env:
AWS_ENDPOINT_URL: ${{ secrets.DUCKDB_CORE_EXTENSION_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID: ${{secrets.DUCKDB_CORE_EXTENSION_S3_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.DUCKDB_CORE_EXTENSION_S3_SECRET}}
DUCKDB_DEPLOY_SCRIPT_MODE: for_real
DUCKDB_EXTENSION_SIGNING_PK: ${{ secrets.DUCKDB_EXTENSION_SIGNING_PK }}
run: |
pip install awscli
./scripts/extension-upload-repository.sh /tmp/extension-repository-${{ github.sha }}
autoload-tests:
name: Extension Autoloading Tests
if: ${{ !inputs.skip_tests}}
runs-on: ubuntu-latest
needs: create-extension-repository
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- name: Setup Build Environment
run: |
sudo apt-get update -y -qq
sudo apt-get install -y -qq ninja-build ccache
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- uses: actions/download-artifact@v4
with:
pattern: extension-repository-${{ github.sha }}
path: /tmp
- name: List extensions to test with
shell: bash
run: |
tree /tmp/extension-repository-${{ github.sha }}
- name: Build DuckDB
env:
GEN: ninja
CC: gcc
CXX: g++
EXTENSION_CONFIGS: './.github/config/rust_based_extensions.cmake;./.github/config/out_of_tree_extensions.cmake;./.github/config/in_tree_extensions.cmake'
EXTENSION_TESTS_ONLY: 1
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
run: |
make release
- name: Run Tests
env:
LOCAL_EXTENSION_REPO: /tmp/extension-repository-${{ github.sha }}
run: |
./build/release/test/unittest --autoloading available --skip-compiled
check-load-install-extensions:
name: Checks extension entries
if: ${{ !inputs.skip_tests}}
runs-on: ubuntu-22.04
needs: create-extension-repository
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.9'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
env:
GENERATE_EXTENSION_ENTRIES: 1
LOCAL_EXTENSION_REPO: build/release/repository_other
run: |
make
- uses: actions/download-artifact@v4
name: Download extension repository artifact
with:
pattern: extension-repository-${{ github.sha }}
path: /tmp
- name: Copy over local extension repository
shell: bash
run: |
cp -r /tmp/extension-repository-${{ github.sha }} build/release/repository
tree build/release/repository
find build/release/repository -type f ! -path "build/release/repository/*/linux_amd64/*" -delete
tree build/release/repository
- name: Check if extension_entries.hpp is up to date
shell: bash
env:
EXTENSION_CONFIGS: '.github/config/in_tree_extensions.cmake;.github/config/out_of_tree_extensions.cmake'
run: |
make extension_configuration
python scripts/generate_extensions_function.py
pip install "black>=24"
pip install cmake-format
pip install "clang_format==11.0.1"
make format-fix
- uses: actions/upload-artifact@v4
with:
name: extension_entries.hpp
path: |
src/include/duckdb/main/extension_entries.hpp
- name: Check for any difference
run: |
git diff --exit-code src/include/duckdb/main/extension_entries.hpp && echo "No differences found"
- name: Explainer
if: failure()
run: |
echo "There are differences in src/include/duckdb/main/extension_entries.hpp"
echo "Check the uploaded extension_entries.hpp (in the workflow Summary), and check that in instead of src/include/duckdb/main/extension_entries.hpp"

View File

@@ -0,0 +1,63 @@
name: Extra Tests
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
jobs:
regression-test-all:
name: All Regression Tests
runs-on: ubuntu-22.04
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
BUILD_BENCHMARK: 1
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "tpcd;tpcds;httpfs"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build Last Release
shell: bash
run: |
make
git clone https://github.com/duckdb/duckdb.git
cd duckdb
git checkout `git tag --list | tail -n 1`
make
cd ..
- name: Set up benchmarks
shell: bash
run: |
cp -r benchmark duckdb/
- name: Regression Test
if: always()
shell: bash
run: |
build/release/benchmark/benchmark_runner --list > alltests.list
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks alltests.list --verbose --threads 2

View File

@@ -0,0 +1,87 @@
name: Create or Label Mirror Issue
on:
issues:
types:
- labeled
env:
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
TITLE_PREFIX: "[duckdb/#${{ github.event.issue.number }}]"
PUBLIC_ISSUE_TITLE: ${{ github.event.issue.title }}
jobs:
handle_pr_submitted_or_fix_on_nightly_label:
if: github.event.label.name == 'PR submitted' || github.event.label.name == 'fixed on nightly'
runs-on: ubuntu-latest
steps:
- name: Remove 'needs triage' label
run: |
gh issue edit --repo duckdb/duckdb ${{ github.event.issue.number }} --remove-label "needs triage"
add_needs_reproducible_example_comment:
if: github.event.label.name == 'needs reproducible example'
runs-on: ubuntu-latest
steps:
- name: Add comment
run: |
cat > needs-reproducible-example-comment.md << EOF
Thanks for opening this issue in the DuckDB issue tracker! To resolve this issue, our team needs a reproducible example. This includes:
* A source code snippet which reproduces the issue.
* The snippet should be self-contained, i.e., it should contain all imports and should use relative paths instead of hard coded paths (please avoid \`/Users/JohnDoe/...\`).
* A lot of issues can be reproduced with plain SQL code executed in the [DuckDB command line client](https://duckdb.org/docs/api/cli/overview). If you can provide such an example, it greatly simplifies the reproduction process and likely results in a faster fix.
* If the script needs additional data, please share the data as a CSV, JSON, or Parquet file. Unfortunately, we cannot fix issues that can only be reproduced with a confidential data set. [Support contracts](https://duckdblabs.com/#support) allow sharing confidential data with the core DuckDB team under NDA.
For more detailed guidelines on how to create reproducible examples, please visit Stack Overflow's [“Minimal, Reproducible Example”](https://stackoverflow.com/help/minimal-reproducible-example) page.
EOF
gh issue comment --repo duckdb/duckdb ${{ github.event.issue.number }} --body-file needs-reproducible-example-comment.md
create_or_label_mirror_issue:
if: github.event.label.name == 'reproduced' || github.event.label.name == 'under review'
runs-on: ubuntu-latest
steps:
- name: Remove 'needs triage' / 'under review' if 'reproduced'
if: github.event.label.name == 'reproduced'
run: |
gh issue edit --repo duckdb/duckdb ${{ github.event.issue.number }} --remove-label "needs triage" --remove-label "under review" --remove-label "needs reproducible example"
- name: Remove 'needs triage' / 'reproduced' if 'under review'
if: github.event.label.name == 'under review'
run: |
gh issue edit --repo duckdb/duckdb ${{ github.event.issue.number }} --remove-label "needs triage" --remove-label "reproduced"
- name: Remove 'needs triage' if 'expected behavior'
if: github.event.label.name == 'expected behavior'
run: |
gh issue edit --repo duckdb/duckdb ${{ github.event.issue.number }} --remove-label "needs triage"
- name: Get mirror issue number
run: |
gh issue list --repo duckdblabs/duckdb-internal --search "${TITLE_PREFIX}" --json title,number --state all --jq ".[] | select(.title | startswith(\"$TITLE_PREFIX\")).number" > mirror_issue_number.txt
echo "MIRROR_ISSUE_NUMBER=$(cat mirror_issue_number.txt)" >> $GITHUB_ENV
- name: Print whether mirror issue exists
run: |
if [ "$MIRROR_ISSUE_NUMBER" == "" ]; then
echo "Mirror issue with title prefix '$TITLE_PREFIX' does not exist yet"
else
echo "Mirror issue with title prefix '$TITLE_PREFIX' exists with number $MIRROR_ISSUE_NUMBER"
fi
- name: Set label environment variable
run: |
if ${{ github.event.label.name == 'reproduced' }}; then
echo "LABEL=reproduced" >> $GITHUB_ENV
echo "UNLABEL=under review" >> $GITHUB_ENV
else
echo "LABEL=under review" >> $GITHUB_ENV
echo "UNLABEL=reproduced" >> $GITHUB_ENV
fi
- name: Create or label issue
run: |
if [ "$MIRROR_ISSUE_NUMBER" == "" ]; then
gh issue create --repo duckdblabs/duckdb-internal --label "$LABEL" --title "$TITLE_PREFIX - $PUBLIC_ISSUE_TITLE" --body "See https://github.com/duckdb/duckdb/issues/${{ github.event.issue.number }}"
else
gh issue edit --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER --remove-label "$UNLABEL" --add-label "$LABEL"
fi

View File

@@ -0,0 +1,51 @@
name: Update Mirror Issue
on:
discussion:
types:
- labeled
issues:
types:
- closed
- reopened
env:
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
TITLE_PREFIX: "[duckdb/#${{ github.event.issue.number || github.event.discussion.number }}]"
jobs:
update_mirror_issue:
runs-on: ubuntu-latest
steps:
- name: Get mirror issue number
run: |
gh issue list --repo duckdblabs/duckdb-internal --search "${TITLE_PREFIX}" --json title,number --state all --jq ".[] | select(.title | startswith(\"$TITLE_PREFIX\")).number" > mirror_issue_number.txt
echo "MIRROR_ISSUE_NUMBER=$(cat mirror_issue_number.txt)" >> $GITHUB_ENV
- name: Print whether mirror issue exists
run: |
if [ "$MIRROR_ISSUE_NUMBER" == "" ]; then
echo "Mirror issue with title prefix '$TITLE_PREFIX' does not exist yet"
else
echo "Mirror issue with title prefix '$TITLE_PREFIX' exists with number $MIRROR_ISSUE_NUMBER"
fi
- name: Add comment with status to mirror issue
run: |
if [ "$MIRROR_ISSUE_NUMBER" != "" ]; then
gh issue comment --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER --body "The issue has been ${{ github.event.action }} (https://github.com/duckdb/duckdb/issues/${{ github.event.issue.number || github.event.discussion.number }})."
fi
- name: Add closed label to mirror issue
if: github.event.action == 'closed'
run: |
if [ "$MIRROR_ISSUE_NUMBER" != "" ]; then
gh issue edit --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER --add-label "public closed" --remove-label "public reopened"
fi
- name: Reopen mirror issue and add reopened label
if: github.event.action == 'reopened'
run: |
if [ "$MIRROR_ISSUE_NUMBER" != "" ]; then
gh issue reopen --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER
gh issue edit --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER --add-label "public reopened" --remove-label "public closed"
fi

View File

@@ -0,0 +1,101 @@
name: InvokeCI
on:
repository_dispatch:
workflow_dispatch:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
run_all:
type: string
twine_upload:
type: string
concurrency:
group: invokeci-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}-${{ inputs.git_ref }}-${{ inputs.skip_tests }}
cancel-in-progress: true
jobs:
extensions:
uses: ./.github/workflows/Extensions.yml
secrets: inherit
with:
override_git_describe: ${{ inputs.override_git_describe }}
git_ref: ${{ inputs.git_ref }}
skip_tests: ${{ inputs.skip_tests }}
run_all: ${{ inputs.run_all }}
osx:
uses: ./.github/workflows/OSX.yml
secrets: inherit
with:
override_git_describe: ${{ inputs.override_git_describe }}
git_ref: ${{ inputs.git_ref }}
skip_tests: ${{ inputs.skip_tests }}
run_all: ${{ inputs.run_all }}
linux-release:
uses: ./.github/workflows/LinuxRelease.yml
secrets: inherit
with:
override_git_describe: ${{ inputs.override_git_describe }}
git_ref: ${{ inputs.git_ref }}
skip_tests: ${{ inputs.skip_tests }}
windows:
uses: ./.github/workflows/Windows.yml
secrets: inherit
with:
override_git_describe: ${{ inputs.override_git_describe }}
git_ref: ${{ inputs.git_ref }}
skip_tests: ${{ inputs.skip_tests }}
run_all: ${{ inputs.run_all }}
static-libraries:
uses: ./.github/workflows/BundleStaticLibs.yml
secrets: inherit
with:
override_git_describe: ${{ inputs.override_git_describe }}
git_ref: ${{ inputs.git_ref }}
skip_tests: ${{ inputs.skip_tests }}
prepare-status:
runs-on: ubuntu-latest
if: always()
needs:
- extensions
- osx
- linux-release
- windows
- static-libraries
outputs:
is-success: ${{ steps.set-output.outputs.success }}
steps:
- id: set-output
shell: bash
run: |
if [[ "${{ needs.extensions.result }}" == "success" && \
"${{ needs.osx.result }}" == "success" && \
"${{ needs.linux-release.result }}" == "success" && \
"${{ needs.windows.result }}" == "success" && \
"${{ needs.static-libraries.result }}" == "success" ]]; then
echo "success=true" >> $GITHUB_OUTPUT
else
echo "success=false" >> $GITHUB_OUTPUT
fi
notify-external-repos:
uses: ./.github/workflows/NotifyExternalRepositories.yml
secrets: inherit
needs: prepare-status
if: ${{ always() }}
with:
is-success: ${{ needs.prepare-status.outputs.is-success }}
target-branch: ${{ inputs.git_ref == '' && github.ref || inputs.git_ref }}
duckdb-sha: ${{ github.sha }}
triggering-event: ${{ github.event_name }}
should-publish: 'true'
override-git-describe: ${{ inputs.override_git_describe }}

View File

@@ -0,0 +1,30 @@
name: Close Stale Issues
on:
repository_dispatch:
workflow_dispatch:
jobs:
close_stale_issues:
permissions:
actions: write
contents: write # only for delete-branch option
issues: write
pull-requests: write
runs-on: ubuntu-latest
steps:
- name: Close stale issues
uses: actions/stale@v9
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
with:
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 30 days.'
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 30 days.'
close-issue-message: 'This issue was closed because it has been stale for 30 days with no activity.'
close-pr-message: 'This pull request was closed because it has been stale for 30 days with no activity.'
exempt-issue-labels: 'no stale'
exempt-pr-labels: 'no stale'
days-before-stale: 365
days-before-close: 30
operations-per-run: 500
stale-issue-label: stale
stale-pr-label: stale

View File

@@ -0,0 +1,115 @@
name: Julia
on:
workflow_dispatch:
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'examples/**'
- 'test/**'
- 'tools/**'
- '!tools/juliapkg/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Julia.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
merge_group:
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**.md'
- 'examples/**'
- 'test/**'
- 'tools/**'
- '!tools/juliapkg/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Julia.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
format_check:
name: Julia Format Check
needs: check-draft
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: julia-actions/setup-julia@v1
with:
version: 1.7
arch: x64
- name: Format Check
shell: bash
run: |
cd tools/juliapkg
julia -e "import Pkg; Pkg.add(\"JuliaFormatter\")"
./format_check.sh
main_julia:
name: Julia ${{ matrix.version }}
needs: check-draft
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
version:
- '1.10'
- '1'
os:
- ubuntu-latest
arch:
- x64
isRelease:
- ${{ github.ref == 'refs/heads/main' }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: julia-actions/setup-julia@v2
with:
version: ${{ matrix.version }}
arch: ${{ matrix.arch }}
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.version }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build DuckDB
shell: bash
run: |
CORE_EXTENSIONS="tpch;icu" BUILD_JEMALLOC=1 make
- name: Run Tests
shell: bash
run: |
export JULIA_DUCKDB_LIBRARY="`pwd`/build/release/src/libduckdb.so"
export JULIA_NUM_THREADS=2
export LD_PRELOAD="/usr/lib/x86_64-linux-gnu/libstdc++.so.6"
ls $JULIA_DUCKDB_LIBRARY
cd tools/juliapkg
julia --project -e "import Pkg; Pkg.test()"

View File

@@ -0,0 +1,211 @@
name: LinuxRelease
on:
workflow_call:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
workflow_dispatch:
inputs:
override_git_describe:
description: 'Version tag to override git describe'
type: string
git_ref:
description: 'Git ref'
type: string
skip_tests:
description: 'Pass "true" to skip tests'
type: string
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'test/configs/**'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/LinuxRelease.yml'
merge_group:
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**.md'
- 'test/configs/**'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/LinuxRelease.yml'
concurrency:
group: linuxrelease-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
linux-release-cli:
needs:
- check-draft
strategy:
fail-fast: false
matrix:
config: [ { runner: ubuntu-latest, arch: amd64, image: x86_64}, {runner: ubuntu-24.04-arm, arch: arm64, image: aarch64}]
name: Linux CLI (${{ matrix.config.arch }})
runs-on: ${{ matrix.config.runner }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- name: Install pytest
run: |
python3 -m pip install pytest
- name: Build
shell: bash
run: |
export PWD=`pwd`
docker run \
-v$PWD:$PWD \
-e CMAKE_BUILD_PARALLEL_LEVEL=2 \
-e OVERRIDE_GIT_DESCRIBE=$OVERRIDE_GIT_DESCRIBE \
-e EXTENSION_CONFIGS="$PWD/.github/config/bundled_extensions.cmake" \
-e ENABLE_EXTENSION_AUTOLOADING=1 \
-e ENABLE_EXTENSION_AUTOINSTALL=1 \
-e BUILD_BENCHMARK=1 \
-e FORCE_WARN_UNUSED=1 \
quay.io/pypa/manylinux_2_28_${{ matrix.config.image }} \
bash -c "
set -e
yum install -y perl-IPC-Cmd gcc-toolset-12 gcc-toolset-12-gcc-c++
source /opt/rh/gcc-toolset-12/enable
export CC=gcc
export CXX=g++
git config --global --add safe.directory $PWD
make -C $PWD
"
- name: Print platform
shell: bash
run: ./build/release/duckdb -c "PRAGMA platform;"
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
python3 scripts/amalgamation.py
zip -j duckdb_cli-linux-${{ matrix.config.arch }}.zip build/release/duckdb
gzip -9 -k -n -c build/release/duckdb > duckdb_cli-linux-${{ matrix.config.arch }}.gz
zip -j libduckdb-linux-${{ matrix.config.arch }}.zip build/release/src/libduckdb*.* src/amalgamation/duckdb.hpp src/include/duckdb.h
./scripts/upload-assets-to-staging.sh github_release libduckdb-linux-${{ matrix.config.arch }}.zip duckdb_cli-linux-${{ matrix.config.arch }}.zip duckdb_cli-linux-${{ matrix.config.arch }}.gz
- uses: actions/upload-artifact@v4
with:
name: duckdb-binaries-linux-${{ matrix.config.arch }}
path: |
libduckdb-linux-${{ matrix.config.arch }}.zip
duckdb_cli-linux-${{ matrix.config.arch }}.zip
duckdb_cli-linux-${{ matrix.config.arch }}.gz
- name: Test
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: |
python3 scripts/run_tests_one_by_one.py build/release/test/unittest "*" --time_execution
- name: Tools Tests
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: |
python3 -m pytest tools/shell/tests --shell-binary build/release/duckdb
- name: Examples
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: |
build/release/benchmark/benchmark_runner benchmark/micro/update/update_with_join.benchmark
build/release/duckdb -c "COPY (SELECT 42) TO '/dev/stdout' (FORMAT PARQUET)" | cat
upload-libduckdb-src:
name: Upload libduckdb-src.zip
needs: linux-release-cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
python3 scripts/amalgamation.py
zip -j libduckdb-src.zip src/amalgamation/duckdb.hpp src/amalgamation/duckdb.cpp src/include/duckdb.h src/include/duckdb_extension.h
./scripts/upload-assets-to-staging.sh github_release libduckdb-src.zip
symbol-leakage:
name: Symbol Leakage
runs-on: ubuntu-22.04
needs: linux-release-cli
if: ${{ inputs.skip_tests != 'true' }}
env:
GEN: ninja
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make
- name: Symbol Leakage Test
shell: bash
run: python3 scripts/exported_symbols_check.py build/release/src/libduckdb*.so

View File

@@ -0,0 +1,517 @@
name: Main
on:
workflow_dispatch:
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Main.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
- '!.github/patches/extensions/fts/*.patch' # fts used in some jobs
- '!.github/config/extensions/fts.cmake'
merge_group:
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**.md'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Main.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
- '!.github/patches/extensions/fts/*.patch' # fts used in some jobs
- '!.github/config/extensions/fts.cmake'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
linux-debug:
name: Linux Debug
# This tests release build while enabling slow verifiers (masked by #ifdef DEBUG) and sanitizers
needs: check-draft
runs-on: ubuntu-22.04
env:
CC: gcc-10
CXX: g++-10
TREAT_WARNINGS_AS_ERRORS: 1
GEN: ninja
CRASH_ON_ASSERT: 1
CMAKE_CXX_FLAGS: '-DDEBUG'
FORCE_ASSERT: 1
steps:
- uses: actions/checkout@v4
- id: describe_step
run: echo "git_describe=$(git describe --tags --long)" >> "$GITHUB_OUTPUT"
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make release
- name: Output version info
shell: bash
run: ./build/release/duckdb -c "PRAGMA version;"
- name: Set DUCKDB_INSTALL_LIB for ADBC tests
shell: bash
run: echo "DUCKDB_INSTALL_LIB=$(find `pwd` -name "libduck*.so" | head -n 1)" >> $GITHUB_ENV
- name: Test DUCKDB_INSTALL_LIB variable
run: echo $DUCKDB_INSTALL_LIB
- name: Test
shell: bash
run: |
python3 scripts/run_tests_one_by_one.py build/release/test/unittest --tests-per-invocation 100
linux-release:
name: Linux Release (full suite)
needs: check-draft
runs-on: ubuntu-24.04
env:
GEN: ninja
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet"
DISABLE_SANITIZER: 1
steps:
- uses: actions/checkout@v3
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make release
- name: Test
shell: bash
run: make allunit
no-string-inline:
name: No String Inline / Destroy Unpinned Blocks
runs-on: ubuntu-24.04
needs: linux-configs
env:
GEN: ninja
CORE_EXTENSIONS: "icu;parquet;tpch;tpcds;fts;json;inet"
DISABLE_STRING_INLINE: 1
DESTROY_UNPINNED_BLOCKS: 1
ALTERNATIVE_VERIFY: 1
DISABLE_POINTER_SALT: 1
LSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-leak-suppressions.txt
DUCKDB_TEST_DESCRIPTION: 'Compiled with ALTERNATIVE_VERIFY=1 DISABLE_STRING_INLINE=1 DESTROY_UNPINNED_BLOCKS=1 DISABLE_POINTER_SALT=1. Use require no_alternative_verify to skip.'
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make relassert
- name: Test
shell: bash
run: build/relassert/test/unittest
vector-sizes:
name: Vector Sizes
runs-on: ubuntu-22.04
needs: linux-configs
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
DUCKDB_TEST_DESCRIPTION: 'Compiled with STANDARD_VECTOR_SIZE=2. Use require vector_size 2048 to skip tests.'
steps:
- name: Clean up the disc space
shell: bash
run: |
echo "Disk usage before clean up:"
df -h
rm -rf /opt/hostedtoolcache/CodeQL Java* Pypy Ruby go node
echo "Disk usage after clean up:"
df -h
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: STANDARD_VECTOR_SIZE=2 make reldebug
- name: Test
shell: bash
run: |
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest --no-exit --time_execution
valgrind:
name: Valgrind
if: ${{ !startsWith(github.ref, 'refs/tags/v') }}
runs-on: ubuntu-24.04
needs: linux-configs
env:
CC: clang
CXX: clang++
DISABLE_SANITIZER: 1
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: 'icu;json;parquet;tpch'
GEN: ninja
steps:
- uses: actions/checkout@v4
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build valgrind clang
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make relassert
- name: Output version info
shell: bash
run: ./build/relassert/duckdb -c "PRAGMA version;"
- name: Test
shell: bash
run: valgrind ./build/relassert/test/unittest test/sql/tpch/tpch_sf001.test_slow
threadsan:
name: Thread Sanitizer
needs: linux-configs
runs-on: ubuntu-24.04
env:
CC: clang
CXX: clang++
GEN: ninja
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet"
TSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-thread-suppressions.txt
DUCKDB_TEST_DESCRIPTION: 'Tests run with thread sanitizer.'
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build clang
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: THREADSAN=1 make reldebug
- name: Test
shell: bash
run: |
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest --no-exit --timeout 600
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[intraquery]" --no-exit --timeout 600
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[interquery]" --no-exit --timeout 1800
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[interquery]" --no-exit --timeout 1800 --force-storage
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[interquery]" --no-exit --timeout 1800 --force-storage --force-reload
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[detailed_profiler]" --no-exit --timeout 600
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest test/sql/tpch/tpch_sf01.test_slow --no-exit --timeout 600
amalgamation-tests:
name: Amalgamation Tests
runs-on: ubuntu-22.04
needs: check-draft
env:
CC: clang
CXX: clang++
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install LLVM and Clang
uses: KyleMayes/install-llvm-action@v1
with:
version: "14.0"
- name: Generate Amalgamation
shell: bash
run: |
python scripts/amalgamation.py --extended
clang++ -std=c++17 -Isrc/amalgamation src/amalgamation/duckdb.cpp -emit-llvm -S -O0
force-blocking-sink-source:
name: Forcing async Sinks/Sources
runs-on: ubuntu-24.04
needs: check-draft
env:
GEN: ninja
CORE_EXTENSIONS: "icu;parquet;tpch;tpcds;fts;json;inet"
FORCE_ASYNC_SINK_SOURCE: 1
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: make relassert
- name: Test
shell: bash
run: python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest --no-exit --timeout 600
# TODO: Bring back BLOCK_VERIFICATION: 1, and consider bringing back fts
# TODO: DEBUG_STACKTRACE: 1 + reldebug ?
linux-configs:
name: Tests a release build with different configurations
runs-on: ubuntu-24.04
needs: check-draft
env:
BASE_BRANCH: ${{ github.base_ref || 'main' }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
id: build
shell: bash
env:
CORE_EXTENSIONS: "json;parquet;icu;tpch;tpcds"
GEN: ninja
run: make
- name: test/configs/encryption.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/encryption.json
- name: test/configs/force_storage.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/force_storage.json
- name: test/configs/force_storage_restart.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/force_storage_restart.json
- name: test/configs/latest_storage.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/latest_storage.json
- name: test/configs/verify_fetch_row.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/verify_fetch_row.json
- name: test/configs/wal_verification.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/wal_verification.json
- name: test/configs/prefetch_all_parquet_files.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/prefetch_all_parquet_files.json
- name: test/configs/no_local_filesystem.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/no_local_filesystem.json
- name: test/configs/block_size_16kB.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/block_size_16kB.json
- name: test/configs/latest_storage_block_size_16kB.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/latest_storage_block_size_16kB.json
- name: test/configs/enable_verification.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/enable_verification.json
- name: Test dictionary_expression
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --verify-vector dictionary_expression --skip-compiled
- name: Test dictionary_operator
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --verify-vector dictionary_operator --skip-compiled
- name: Test constant_operator
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --verify-vector constant_operator --skip-compiled
- name: Test sequence_operator
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --verify-vector sequence_operator --skip-compiled
- name: Test nested_shuffle
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --verify-vector nested_shuffle --skip-compiled
- name: Test variant_vector
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/variant_vector.json
- name: Test variant_vector
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/compressed_in_memory.json
- name: Test block prefetching
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/prefetch_all_storage.json
- name: Test peg_parser
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/release/test/unittest --test-config test/configs/peg_parser.json
- name: Forwards compatibility tests
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
python3 scripts/test_storage_compatibility.py --versions "1.2.1|1.3.2" --new-unittest build/release/test/unittest

View File

@@ -0,0 +1,19 @@
name: Create Mirror for Discussions
on:
discussion:
types:
- labeled
env:
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
TITLE_PREFIX: "[duckdb/#${{ github.event.discussion.number }}]"
PUBLIC_DISCUSSION_TITLE: ${{ github.event.discussion.title }}
jobs:
create_mirror_issue:
if: github.event.label.name == 'under review'
runs-on: ubuntu-latest
steps:
- name: Create mirror issue for discussion
run: |
gh issue create --repo duckdblabs/duckdb-internal --label "discussion" --title "$TITLE_PREFIX - $PUBLIC_DISCUSSION_TITLE" --body "See https://github.com/duckdb/duckdb/discussions/${{ github.event.discussion.number }}"

View File

@@ -0,0 +1,42 @@
name: Create Documentation issue for the Needs Documentation label
on:
discussion:
types:
- labeled
issues:
types:
- labeled
pull_request_target:
types:
- labeled
env:
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
# an event triggering this workflow is either an issue or a pull request,
# hence only one of the numbers will be filled in the TITLE_PREFIX
TITLE_PREFIX: "[duckdb/#${{ github.event.issue.number || github.event.pull_request.number }}]"
PUBLIC_ISSUE_TITLE: ${{ github.event.issue.title || github.event.pull_request.title }}
jobs:
create_documentation_issue:
if: github.event.label.name == 'Needs Documentation'
runs-on: ubuntu-latest
steps:
- name: Get mirror issue number
run: |
gh issue list --repo duckdb/duckdb-web --json title,number --state all --jq ".[] | select(.title | startswith(\"${TITLE_PREFIX}\")).number" > mirror_issue_number.txt
echo "MIRROR_ISSUE_NUMBER=$(cat mirror_issue_number.txt)" >> ${GITHUB_ENV}
- name: Print whether mirror issue exists
run: |
if [ "${MIRROR_ISSUE_NUMBER}" == "" ]; then
echo "Mirror issue with title prefix '${TITLE_PREFIX}' does not exist yet"
else
echo "Mirror issue with title prefix '${TITLE_PREFIX}' exists with number ${MIRROR_ISSUE_NUMBER}"
fi
- name: Create mirror issue if it does not yet exist
run: |
if [ "${MIRROR_ISSUE_NUMBER}" == "" ]; then
gh issue create --repo duckdb/duckdb-web --title "${TITLE_PREFIX} - ${PUBLIC_ISSUE_TITLE} needs documentation" --body "See https://github.com/duckdb/duckdb/issues/${{ github.event.issue.number || github.event.pull_request.number }}"
fi

View File

@@ -0,0 +1,687 @@
name: NightlyTests
on:
workflow_dispatch:
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**'
- '!.github/workflows/NightlyTests.yml'
- '!.github/patches/duckdb-wasm/**'
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**'
- '!.github/workflows/NightlyTests.yml'
- '!.github/patches/duckdb-wasm/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
DUCKDB_WASM_VERSION: "cf2048bd6d669ffa05c56d7d453e09e99de8b87e"
CCACHE_SAVE: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
linux-memory-leaks:
name: Linux Memory Leaks
needs: check-draft
runs-on: ubuntu-24.04
env:
GEN: ninja
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: make
- name: Test
shell: bash
run: |
python3 test/memoryleak/test_memory_leaks.py
release-assert:
name: Release Assertions
runs-on: ubuntu-latest
needs: linux-memory-leaks
env:
GEN: ninja
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet;httpfs"
DISABLE_SANITIZER: 1
CRASH_ON_ASSERT: 1
RUN_SLOW_VERIFIERS: 1
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build libcurl4-openssl-dev
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: UNSAFE_NUMERIC_CAST=1 make relassert
- name: Test
shell: bash
run: |
python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest "*" --no-exit --timeout 1200
release-assert-osx:
name: Release Assertions OSX
runs-on: macos-latest
needs: linux-memory-leaks
env:
GEN: ninja
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet;httpfs"
DISABLE_SANITIZER: 1
CRASH_ON_ASSERT: 1
RUN_SLOW_VERIFIERS: 1
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
run: brew install ninja llvm
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: CMAKE_LLVM_PATH='/opt/homebrew/opt/llvm' UNSAFE_NUMERIC_CAST=1 make relassert
- name: Test
shell: bash
run: |
python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest "*" --no-exit --timeout 1200
release-assert-osx-storage:
name: Release Assertions OSX Storage
runs-on: macos-latest
needs: linux-memory-leaks
env:
GEN: ninja
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet"
DISABLE_SANITIZER: 1
CRASH_ON_ASSERT: 1
RUN_SLOW_VERIFIERS: 1
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
run: brew install ninja
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: UNSAFE_NUMERIC_CAST=1 make relassert
- name: Test
shell: bash
run: |
python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest "*" --no-exit --timeout 1200 --force-storage
smaller-binary:
name: Smaller Binary
runs-on: ubuntu-24.04
needs: linux-memory-leaks
env:
GEN: ninja
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "icu;tpch;tpcds;json"
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: SMALLER_BINARY=1 make
- name: Measure Size
shell: bash
run: ls -trlah build/release/src/libduckdb*
- name: Test
shell: bash
run: |
build/release/test/unittest "*"
release-assert-clang:
name: Release Assertions with Clang
runs-on: ubuntu-latest
needs: linux-memory-leaks
env:
CC: clang
CXX: clang++
GEN: ninja
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet;httpfs"
DISABLE_SANITIZER: 1
CRASH_ON_ASSERT: 1
RUN_SLOW_VERIFIERS: 1
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build llvm libcurl4-openssl-dev
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: UNSAFE_NUMERIC_CAST=1 make relassert
- name: Test
shell: bash
run: |
python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest "*" --no-exit --timeout 1200
sqllogic:
name: Sqllogic tests
runs-on: ubuntu-latest # Secondary task of this CI job is to test building duckdb on latest ubuntu
needs: linux-memory-leaks
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@v1.2.11 # Note: pinned due to GLIBC incompatibility in later releases
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
# Build is implied by 'make sqlite' that will invoke implicitly 'make release' (we make it explicit)
- name: Build
shell: bash
run: make release
- name: Test
shell: bash
run: make sqlite
storage-initialization:
name: Storage Initialization Verification
runs-on: ubuntu-22.04
needs: linux-memory-leaks
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: make debug
- name: Test
shell: bash
run: python3 scripts/test_zero_initialize.py
extension-updating:
name: Extension updating test
runs-on: ubuntu-22.04
needs: linux-memory-leaks
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Cleanup disk before build
run: |
echo "Disk usage before clean up:"
df -h
sudo apt-get clean
sudo rm -rf /var/lib/apt/lists/*
docker system prune -af || true
rm -rf ~/.cache
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
echo "Disk usage after clean up:"
df -h
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install
shell: bash
run: pip install awscli
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: CORE_EXTENSIONS="tpch" make
- name: Start Minio
shell: bash
run: |
sudo ./scripts/install_s3_test_server.sh
./scripts/generate_presigned_url.sh
source ./scripts/run_s3_test_server.sh
source ./scripts/set_s3_test_server_variables.sh
sleep 60
- name: Build
shell: bash
run: |
./scripts/run_extension_medata_tests.sh
regression-test-memory-safety:
name: Regression Tests between safe and unsafe builds
runs-on: ubuntu-22.04
needs: linux-memory-leaks
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
BUILD_BENCHMARK: 1
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Checkout tools repo
uses: actions/checkout@v3
with:
fetch-depth: 0
path: unsafe
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: |
sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build libcurl4-openssl-dev && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: |
make
cd unsafe
UNSAFE_NUMERIC_CAST=1 DISABLE_MEMORY_SAFETY=1 make
- name: Set up benchmarks
shell: bash
run: |
cp -r benchmark unsafe/
- name: Regression Test Micro
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --verbose --threads 2
- name: Regression Test TPCH
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --verbose --threads 2
- name: Regression Test TPCDS
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --verbose --threads 2
- name: Regression Test H2OAI
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --verbose --threads 2
- name: Regression Test IMDB
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --verbose --threads 2
vector-and-block-sizes:
name: Tests different vector and block sizes
runs-on: ubuntu-24.04
env:
LSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-leak-suppressions.txt
needs: linux-memory-leaks
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
id: build
shell: bash
env:
CORE_EXTENSIONS: "json;parquet;icu;tpch;tpcds"
GEN: ninja
STANDARD_VECTOR_SIZE: 512
run: make relassert
- name: Fast and storage tests with default and small block size
shell: bash
run: |
./build/relassert/test/unittest
./build/relassert/test/unittest "test/sql/storage/*"
./build/relassert/test/unittest --test-config test/configs/block_size_16kB.json
./build/relassert/test/unittest "test/sql/storage/*" --test-config test/configs/block_size_16kB.json
linux-debug-configs:
name: Tests different configurations with a debug build
runs-on: ubuntu-24.04
env:
LSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-leak-suppressions.txt
needs: linux-memory-leaks
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
id: build
shell: bash
env:
CORE_EXTENSIONS: "json;parquet;icu;tpch;tpcds"
GEN: ninja
run: make debug
- name: test/configs/enable_verification_for_debug.json
if: (success() || failure()) && steps.build.conclusion == 'success'
shell: bash
run: |
./build/debug/test/unittest --test-config test/configs/enable_verification_for_debug.json
linux-wasm-experimental:
name: WebAssembly duckdb-wasm builds
# disable in NightlyTests
if: false
needs: check-draft
runs-on: ubuntu-22.04
steps:
- uses: mymindstorm/setup-emsdk@v12
with:
version: 'latest'
- name: Setup
shell: bash
run: |
git clone https://github.com/duckdb/duckdb-wasm
cd duckdb-wasm
git checkout ${{ env.DUCKDB_WASM_VERSION }}
shopt -s nullglob
for filename in ../.github/patches/duckdb-wasm/*.patch; do
git apply $filename
done
git submodule init
git submodule update
git rm -r submodules/duckdb
- uses: actions/checkout@v3
with:
fetch-depth: 0
path: duckdb-wasm/submodules/duckdb
- name: Setup
shell: bash
run: |
cd duckdb-wasm
make patch_duckdb || echo "done"
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Print version
shell: bash
run: |
emcc --version
- name: Build WebAssembly MVP
shell: bash
run: |
cd duckdb-wasm
bash scripts/wasm_build_lib.sh relsize mvp $(pwd)/submodules/duckdb
- name: Build WebAssembly EH
shell: bash
run: |
cd duckdb-wasm
bash scripts/wasm_build_lib.sh relsize eh $(pwd)/submodules/duckdb
- name: Build WebAssembly COI
shell: bash
run: |
cd duckdb-wasm
bash scripts/wasm_build_lib.sh relsize coi $(pwd)/submodules/duckdb
- name: Package
shell: bash
run: |
zip -r duckdb-wasm32.zip duckdb-wasm/packages/duckdb-wasm/src/bindings
- uses: actions/upload-artifact@v4
with:
name: duckdb-wasm32
path: |
duckdb-wasm32.zip
hash-zero:
name: Hash Zero
runs-on: ubuntu-24.04
needs: linux-memory-leaks
env:
GEN: ninja
CORE_EXTENSIONS: "icu;parquet;tpch;tpcds;fts;json;inet"
HASH_ZERO: 1
LSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-leak-suppressions.txt
DUCKDB_TEST_DESCRIPTION: 'Compiled with HASH_ZERO=1. Use require no_hash_zero to skip.'
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ env.CCACHE_SAVE }}
- name: Build
shell: bash
run: make relassert
- name: Test
shell: bash
run: build/relassert/test/unittest --test-config test/configs/hash_zero.json
codecov:
name: Code Coverage
runs-on: ubuntu-22.04
needs: linux-memory-leaks
env:
GEN: ninja
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build lcov curl g++ zip
- name: Set up Python 3.9
uses: actions/setup-python@v5
with:
python-version: '3.9'
- name: Install pytest
run: |
python3 -m pip install pytest
- name: Check Coverage
shell: bash
continue-on-error: true
run: |
make coverage-check
- name: Create Archive
if: ${{ success() || failure() }}
shell: bash
run: |
zip -r coverage.zip coverage_html
- uses: actions/upload-artifact@v4
if: ${{ success() || failure() }}
with:
name: coverage
path: coverage.zip
if-no-files-found: error

View File

@@ -0,0 +1,115 @@
name: Notify External Repositories
on:
workflow_call:
inputs:
duckdb-sha:
description: 'Vendor Specific DuckDB SHA'
required: false
default: ''
type: 'string'
target-branch:
description: 'Which Branch to Target'
required: true
default: ''
type: 'string'
triggering-event:
description: 'Which event triggered the run'
default: ''
type: 'string'
should-publish:
description: 'Should the called workflow push updates or not'
default: 'false'
type: 'string'
is-success:
description: 'True, if all the builds in InvokeCI had succeeded'
default: 'false'
type: 'string'
override-git-describe:
description: 'The name of the release tag, used for release builds'
required: false
default: ''
type: string
workflow_dispatch:
inputs:
duckdb-sha:
description: 'Vendor Specific DuckDB SHA'
required: false
default: ''
type: 'string'
target-branch:
description: 'Which Branch to Target'
required: true
default: ''
type: 'string'
triggering-event:
description: 'Which event triggered the run'
default: ''
type: 'string'
should-publish:
description: 'Should the called workflow push updates'
default: 'false'
type: 'string'
is-success:
description: 'True, if all the builds in InvokeCI had succeeded'
default: 'false'
type: 'string'
override-git-describe:
description: 'The name of the release tag, used for release builds'
required: false
default: ''
type: string
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
env:
PAT_USER: ${{ secrets.PAT_USERNAME }}
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
jobs:
notify-odbc-run:
name: Run ODBC Vendor
runs-on: ubuntu-latest
if: ${{ inputs.is-success == 'true' && inputs.override-git-describe == '' }}
steps:
- name: Run ODBC Vendor
if: ${{ github.repository == 'duckdb/duckdb' }}
run: |
export URL=https://api.github.com/repos/duckdb/duckdb-odbc/actions/workflows/Vendor.yml/dispatches
export DATA='{"ref": "${{ inputs.target-branch }}", "inputs": {"duckdb-sha": "${{ inputs.duckdb-sha }}"}}'
curl -v -XPOST -u "${PAT_USER}:${PAT_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" $URL --data "$DATA"
notify-jdbc-run:
name: Run JDBC Vendor
runs-on: ubuntu-latest
if: ${{ inputs.is-success == 'true' && inputs.override-git-describe == '' }}
steps:
- name: Run JDBC Vendor
if: ${{ github.repository == 'duckdb/duckdb' }}
run: |
export URL=https://api.github.com/repos/duckdb/duckdb-java/actions/workflows/Vendor.yml/dispatches
export DATA='{"ref": "${{ inputs.target-branch }}", "inputs": {"duckdb-sha": "${{ inputs.duckdb-sha }}"}}'
curl -v -XPOST -u "${PAT_USER}:${PAT_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" $URL --data "$DATA"
notify-nightly-build-status:
name: Run Nightly build status
runs-on: ubuntu-latest
steps:
- name: Run Nightly build status
if: ${{ github.repository == 'duckdb/duckdb' }}
run: |
export URL=https://api.github.com/repos/duckdb/duckdb-build-status/actions/workflows/NightlyBuildsCheck.yml/dispatches
export DATA='{"ref": "${{ inputs.target-branch }}", "inputs": {"event": "${{ inputs.triggering-event }}", "should_publish": "${{ inputs.should-publish }}"}}'
curl -v -XPOST -u "${PAT_USER}:${PAT_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" $URL --data "$DATA"
notify-python-nightly:
name: Dispatch Python nightly build
runs-on: ubuntu-latest
steps:
- name: Call /dispatch
if: ${{ github.repository == 'duckdb/duckdb' && inputs.override-git-describe == '' }}
run: |
export URL=https://api.github.com/repos/duckdb/duckdb-python/actions/workflows/release.yml/dispatches
export DATA='{"ref": "${{ inputs.target-branch }}", "inputs": {"duckdb-sha": "${{ inputs.duckdb-sha }}", "pypi-index": "prod" }}'
curl -v -XPOST -u "${PAT_USER}:${PAT_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" $URL --data "$DATA"

View File

@@ -0,0 +1,208 @@
name: OSX
on:
workflow_call:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
run_all:
type: string
workflow_dispatch:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
run_all:
type: string
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'test/configs/**'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/OSX.yml'
concurrency:
group: osx-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
jobs:
xcode-debug:
name: OSX Debug
runs-on: macos-14
env:
TREAT_WARNINGS_AS_ERRORS: 1
CMAKE_CXX_FLAGS: '-DDEBUG'
FORCE_ASSERT: 1
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Install ninja
shell: bash
run: brew install ninja
- name: Build
shell: bash
run: GEN=ninja make release
- name: Set DUCKDB_INSTALL_LIB for ADBC tests
shell: bash
run: echo "DUCKDB_INSTALL_LIB=$(find `pwd` -name "libduck*.dylib" | head -n 1)" >> $GITHUB_ENV
- name: Test DUCKDB_INSTALL_LIB variable
run: echo $DUCKDB_INSTALL_LIB
- name: Test
if: ${{ inputs.skip_tests != 'true' }}
shell: bash
run: make unittest_release
- name: Amalgamation
if: ${{ inputs.skip_tests != 'true' }}
shell: bash
run: |
python scripts/amalgamation.py --extended
cd src/amalgamation
clang++ -std=c++11 -O0 -Wall -Werror -emit-llvm -S duckdb.cpp
clang++ -DNDEBUG -O0 -std=c++11 -Wall -Werror -emit-llvm -S duckdb.cpp
clang++ -DDEBUG -O0 -std=c++11 -Wall -Werror -emit-llvm -S duckdb.cpp
xcode-release:
# Builds binaries for osx_arm64 and osx_amd64
name: OSX Release
runs-on: macos-14
needs: xcode-debug
env:
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
OSX_BUILD_UNIVERSAL: 1
GEN: ninja
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Ninja
run: brew install ninja
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Install pytest
run: |
python -m pip install pytest
- name: Build
shell: bash
run: make
- name: Print platform
shell: bash
run: ./build/release/duckdb -c "PRAGMA platform;"
# from https://docs.github.com/en/actions/deployment/deploying-xcode-applications/installing-an-apple-certificate-on-macos-runners-for-xcode-development
- name: Sign Binaries
shell: bash
env:
BUILD_CERTIFICATE_BASE64: ${{ secrets.OSX_CODESIGN_BUILD_CERTIFICATE_BASE64 }}
P12_PASSWORD: ${{ secrets.OSX_CODESIGN_P12_PASSWORD }}
KEYCHAIN_PASSWORD: ${{ secrets.OSX_CODESIGN_KEYCHAIN_PASSWORD }}
TEAM_ID : ${{ secrets.OSX_NOTARIZE_TEAM_ID }}
APPLE_ID: ${{ secrets.OSX_NOTARIZE_APPLE_ID }}
PASSWORD: ${{ secrets.OSX_NOTARIZE_PASSWORD }}
run: |
if [[ "$GITHUB_REPOSITORY" = "duckdb/duckdb" ]] ; then
. scripts/osx_import_codesign_certificate.sh
echo -e '<?xml version="1.0" encoding="UTF-8"?>\n<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n<plist version="1.0">\n<dict>\n <key>com.apple.security.cs.disable-library-validation</key>\n <true/>\n</dict>\n</plist>' > entitlements.plist
codesign --options runtime --entitlements entitlements.plist --all-architectures --force --sign "Developer ID Application: Stichting DuckDB Foundation" build/release/duckdb
codesign --options runtime --entitlements entitlements.plist --all-architectures --force --sign "Developer ID Application: Stichting DuckDB Foundation" build/release/src/libduckdb*.dylib
zip -j notarize.zip build/release/duckdb build/release/src/libduckdb.dylib
export XCRUN_RESPONSE=$(xcrun notarytool submit --progress --apple-id "$APPLE_ID" --password "$PASSWORD" --team-id "$TEAM_ID" --wait -f json notarize.zip)
rm notarize.zip
if [[ $(./build/release/duckdb -csv -noheader -c "SELECT (getenv('XCRUN_RESPONSE')::JSON)->>'status'") != "Accepted" ]] ; then
echo "Notarization failed!"
echo $XCRUN_RESPONSE
exit 1
fi
fi
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
python scripts/amalgamation.py
zip -j duckdb_cli-osx-universal.zip build/release/duckdb
gzip -9 -k -n -c build/release/duckdb > duckdb_cli-osx-universal.gz
zip -j libduckdb-osx-universal.zip build/release/src/libduckdb*.dylib src/amalgamation/duckdb.hpp src/include/duckdb.h
./scripts/upload-assets-to-staging.sh github_release libduckdb-osx-universal.zip duckdb_cli-osx-universal.zip duckdb_cli-osx-universal.gz
- uses: actions/upload-artifact@v4
with:
name: duckdb-binaries-osx
path: |
libduckdb-osx-universal.zip
duckdb_cli-osx-universal.zip
- name: Unit Test
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: make allunit
- name: Tools Tests
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: |
python -m pytest tools/shell/tests --shell-binary build/release/duckdb
- name: Examples
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: |
(cd examples/embedded-c; make)
(cd examples/embedded-c++; make)

View File

@@ -0,0 +1,16 @@
name: On Tag
on:
workflow_dispatch:
inputs:
override_git_describe:
type: string
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
jobs:
staged_upload:
uses: ./.github/workflows/StagedUpload.yml
secrets: inherit
with:
target_git_describe: ${{ inputs.override_git_describe || github.ref_name }}

View File

@@ -0,0 +1,34 @@
name: Pull Request Requires Maintainer Approval
on:
pull_request_target:
types:
- labeled
env:
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
TITLE_PREFIX: "[duckdb/#${{ github.event.pull_request.number }}]"
PUBLIC_PR_TITLE: ${{ github.event.pull_request.title }}
jobs:
create_or_label_issue:
if: github.event.label.name == 'needs maintainer approval'
runs-on: ubuntu-latest
steps:
- name: Get mirror issue number
run: |
gh issue list --repo duckdblabs/duckdb-internal --search "${TITLE_PREFIX}" --json title,number --state all --jq ".[] | select(.title | startswith(\"${TITLE_PREFIX}\")).number" > mirror_issue_number.txt
echo "MIRROR_ISSUE_NUMBER=$(cat mirror_issue_number.txt)" >> ${GITHUB_ENV}
- name: Print whether mirror issue exists
run: |
if [ "${MIRROR_ISSUE_NUMBER}" == "" ]; then
echo "Mirror issue with title prefix '${TITLE_PREFIX}' does not exist yet"
else
echo "Mirror issue with title prefix '${TITLE_PREFIX}' exists with number ${MIRROR_ISSUE_NUMBER}"
fi
- name: Create or label issue
run: |
if [ "${MIRROR_ISSUE_NUMBER}" == "" ]; then
gh issue create --repo duckdblabs/duckdb-internal --label "external action required" --label "Pull request" --title "${TITLE_PREFIX} - ${PUBLIC_PR_TITLE}" --body "Pull request ${TITLE_PREFIX} needs input from maintainers. See https://github.com/duckdb/duckdb/pull/${{ github.event.pull_request.number }}"
fi

View File

@@ -0,0 +1,380 @@
name: Regression
on:
workflow_call:
inputs:
base_hash:
type: string
workflow_dispatch:
inputs:
base_hash:
description: 'Base hash'
type: string
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'test/configs/**'
- 'tools/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Regression.yml'
- '.github/config/out_of_tree_extensions.cmake'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
- '!.github/patches/extensions/httpfs/*.patch' # httpfs used in some jobs
- '!.github/config/extensions/httpfs.cmake'
merge_group:
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**.md'
- 'test/configs/**'
- 'tools/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Regression.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
- '!.github/patches/extensions/httpfs/*.patch' # httpfs used in some jobs
- '!.github/config/extensions/httpfs.cmake'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
BASE_BRANCH: ${{ github.base_ref || (endsWith(github.ref, '_feature') && 'feature' || 'main') }}
BASE_HASH: ${{ inputs.base_hash }}
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
regression-test-benchmark-runner:
name: Regression Tests
needs: check-draft
runs-on: ubuntu-22.04
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
BUILD_BENCHMARK: 1
BUILD_JEMALLOC: 1
CORE_EXTENSIONS: "json;tpch;tpcds;httpfs;inet;icu"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build libcurl4-openssl-dev && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Checkout Private Regression
if: ${{ github.repository == 'duckdb/duckdb' && github.ref == 'refs/heads/main' }}
uses: actions/checkout@v4
with:
repository: duckdblabs/fivetran_regression
ref: main
token: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
path: benchmark/fivetran
# For PRs we compare against the base branch
- name: Build Current and Base Branch
if: ${{ !(github.repository == 'duckdb/duckdb' && github.ref == 'refs/heads/main') }}
shell: bash
run: |
make
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
cd duckdb
make
cd ..
# For NightlyTest we fetch the last commit hash that ran Regression on main
- name: Build Main and Previous Successful Regression Hash
if: ${{ github.repository == 'duckdb/duckdb' && github.ref == 'refs/heads/main' }}
shell: bash
run: |
make
git clone https://github.com/duckdb/duckdb.git
cd duckdb
if [[ -z "${BASE_HASH}" ]]; then
export CHECKOUT_HASH=$(gh run list --repo duckdb/duckdb --branch=main --workflow=Regression --event=repository_dispatch --status=completed --json=headSha --limit=1 --jq '.[0].headSha')
else
export CHECKOUT_HASH="$BASE_HASH"
fi
git checkout $CHECKOUT_HASH
make
cd ..
- name: Set up benchmarks
shell: bash
run: |
cp -r benchmark duckdb/
- name: Regression Test Fivetran
if: ${{ github.repository == 'duckdb/duckdb' && github.ref == 'refs/heads/main' }}
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks benchmark/fivetran/benchmark_list.csv --verbose --threads 2
- name: Regression Test Micro
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --verbose --threads 2
- name: Regression Test Ingestion Perf
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/ingestion.csv --verbose --threads 2
- name: Regression Test TPCH
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --verbose --threads 2
- name: Regression Test TPCH-PARQUET
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --verbose --threads 2
- name: Regression Test TPCDS
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --verbose --threads 2
- name: Regression Test H2OAI
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --verbose --threads 2
- name: Regression Test IMDB
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --verbose --threads 2
- name: Regression Test CSV
if: always()
shell: bash
run: |
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/csv.csv --verbose --threads 2
- name: Regression Test RealNest
if: always()
shell: bash
run: |
mkdir -p duckdb_benchmark_data
rm -R duckdb/duckdb_benchmark_data
mkdir -p duckdb/duckdb_benchmark_data
wget -q https://blobs.duckdb.org/data/realnest/realnest.duckdb --output-document=duckdb_benchmark_data/real_nest.duckdb
cp duckdb_benchmark_data/real_nest.duckdb duckdb/duckdb_benchmark_data/real_nest.duckdb
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/realnest.csv --verbose --threads 2
regression-test-storage:
name: Storage Size Regression Test
needs: check-draft
runs-on: ubuntu-22.04
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
CORE_EXTENSIONS: "tpch;tpcds"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
make
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
cd duckdb
make
cd ..
- name: Regression Test
shell: bash
run: |
python scripts/regression_test_storage_size.py --old duckdb/build/release/duckdb --new build/release/duckdb
- name: Test for incompatibility
shell: bash
run: |
if (cmp test/sql/storage_version/storage_version.db duckdb/test/sql/storage_version/storage_version.db); then
echo "storage_changed=false" >> $GITHUB_ENV
else
echo "storage_changed=true" >> $GITHUB_ENV
fi
- name: Regression Compatibility Test (testing bidirectional compatibility)
shell: bash
if: env.storage_changed == 'false'
run: |
# Regenerate test/sql/storage_version.db with newer version -> read with older version
python3 scripts/generate_storage_version.py
./duckdb/build/release/duckdb test/sql/storage_version/storage_version.db
# Regenerate test/sql/storage_version.db with older version -> read with newer version (already performed as part of test.slow)
cd duckdb
python3 ../scripts/generate_storage_version.py
../build/release/duckdb duckdb/test/sql/storage_version/storage_version.db
cd ..
- name: Regression Compatibility Test (testing storage version has been bumped)
shell: bash
if: env.storage_changed == 'true'
run: |
python3 scripts/generate_storage_version.py
cd duckdb
python3 scripts/generate_storage_version.py
cd ..
if (cmp -i 8 -n 12 test/sql/storage_version.db duckdb/test/sql/storage_version.db); then
echo "Expected storage format to be bumped, but this is not the case"
echo "This might fail spuriously if changes to content of test database / generation script happened"
exit 1
else
echo "Storage bump detected, all good!"
fi
regression-test-binary-size:
name: Regression test binary size
needs: check-draft
runs-on: ubuntu-22.04
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
CORE_EXTENSIONS: "tpch;tpcds;json;parquet"
EXTENSION_STATIC_BUILD: 1
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build && pip install requests
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
make
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
cd duckdb
make
cd ..
- name: Regression Test Extension binary size
shell: bash
run: |
python scripts/regression_test_extension_size.py --old 'duckdb/build/release/extension' --new build/release/extension --expect json,parquet,tpch,tpcds
regression-test-plan-cost:
name: Regression Test Join Order Plan Cost
needs: check-draft
runs-on: ubuntu-22.04
env:
CC: gcc-10
CXX: g++-10
GEN: ninja
CORE_EXTENSIONS: "tpch;httpfs"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install
shell: bash
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build libcurl4-openssl-dev && pip install tqdm
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
make
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
cd duckdb
make
cd ..
- name: Set up benchmarks
shell: bash
run: |
cp -r benchmark duckdb/
- name: Regression Test IMDB
if: always()
shell: bash
run: |
python scripts/plan_cost_runner.py --old duckdb/build/release/duckdb --new build/release/duckdb --dir=benchmark/imdb_plan_cost
- name: Regression Test TPCH
if: always()
shell: bash
run: |
python scripts/plan_cost_runner.py --old duckdb/build/release/duckdb --new build/release/duckdb --dir=benchmark/tpch_plan_cost

View File

@@ -0,0 +1,45 @@
name: Staged Upload
on:
workflow_call:
inputs:
target_git_describe:
type: string
workflow_dispatch:
inputs:
target_git_describe:
type: string
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
jobs:
staged-upload:
runs-on: ubuntu-latest
if: ${{ inputs.target_git_describe != '' }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v4
with:
python-version: '3.12'
- name: Install
shell: bash
run: pip install awscli
- name: Download from staging bucket
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
TARGET=$(git log -1 --format=%h)
mkdir to_be_uploaded
aws s3 cp --recursive "s3://duckdb-staging/$TARGET/${{ inputs.target_git_describe }}/$GITHUB_REPOSITORY/github_release" to_be_uploaded --region us-east-2
- name: Deploy
shell: bash
run: |
python3 scripts/asset-upload-gha.py to_be_uploaded/*

View File

@@ -0,0 +1,87 @@
name: Swift
on:
workflow_dispatch:
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'examples/**'
- 'test/**'
- 'tools/**'
- '!tools/swift/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Swift.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
merge_group:
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**.md'
- 'examples/**'
- 'test/**'
- 'tools/**'
- '!tools/swift/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Swift.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
test-apple-platforms:
name: Test Apple Platforms
needs: check-draft
strategy:
matrix:
# destinations need to match selected version of Xcode
# https://github.com/actions/runner-images/blob/main/images/macos/macos-14-Readme.md#installed-simulators
destination:
- 'macOS'
- 'iOS Simulator,name=iPhone 16'
- 'tvOS Simulator,name=Apple TV 4K (at 1080p) (3nd generation)'
isRelease:
- ${{ github.ref == 'refs/heads/main' }}
exclude:
- isRelease: false
destination: 'iOS Simulator,name=iPhone 16'
- isRelease: false
destination: 'tvOS Simulator,name=Apple TV 4K (at 1080p) (3nd generation)'
runs-on: macos-14
steps:
- name: Checkout
uses: actions/checkout@v4
with:
# we need tags for the ubiquity build script to run without errors
fetch-depth: '0'
- name: Prepare Package
run: python3 tools/swift/create_package.py tools/swift
- name: Select Xcode
run: sudo xcode-select -switch /Applications/Xcode_15.4.app && /usr/bin/xcodebuild -version
- name: Run Tests
run: |
xcrun xcodebuild test \
-workspace tools/swift/duckdb-swift/DuckDB.xcworkspace \
-scheme DuckDB \
-destination platform='${{ matrix.destination }}'

View File

@@ -0,0 +1,70 @@
name: SwiftRelease
on:
workflow_dispatch:
repository_dispatch:
push:
tags:
- '**'
env:
SOURCE_REF: ${{ github.event_name == 'release' && github.ref_name || 'main' }}
TARGET_REPO: 'duckdb/duckdb-swift'
TARGET_REF: 'main'
GH_TOKEN: ${{ secrets.GH_TOKEN }}
jobs:
update:
name: Update Swift Repo
runs-on: ubuntu-latest
steps:
- name: Checkout Source Repo
uses: actions/checkout@v4
with:
# we need tags for the ubiquity build script to run without errors
fetch-depth: '0'
ref: ${{ env.SOURCE_REF }}
path: 'source-repo'
- name: Checkout Target Repo
uses: actions/checkout@v4
with:
repository: ${{ env.TARGET_REPO }}
ref: ${{ env.TARGET_REF }}
token: ${{ env.GH_TOKEN }}
path: 'target-repo'
- name: Generate Swift Package
run: python3 source-repo/tools/swift/create_package.py source-repo/tools/swift
- name: Package Update
run: |
mkdir updated-repo
mv -v target-repo/.git updated-repo/.git
mv -v source-repo/tools/swift/duckdb-swift/* updated-repo/
- name: Commit Updated Repo
run: |
git -C updated-repo config user.name github-actions
git -C updated-repo config user.email github-actions@github.com
git -C updated-repo add -A
if [[ $(git -C updated-repo status --porcelain) ]]; then
git -C updated-repo commit -m "automated update"
fi
- name: Push Update
run: |
git -C updated-repo push
- name: Tag Release
run: |
cd source-repo
export TAG_NAME=`python3 -c "import sys, os; sys.path.append(os.path.join('scripts')); import package_build; print(package_build.git_dev_version())"`
cd ..
git -C updated-repo fetch --tags
if [[ $(git -C updated-repo tag -l $TAG_NAME) ]]; then
echo 'Tag '$TAG_NAME' already exists - skipping'
else
git -C updated-repo tag -a $TAG_NAME -m "Release $TAG_NAME"
git -C updated-repo push origin $TAG_NAME
fi

View File

@@ -0,0 +1,350 @@
name: Windows
on:
workflow_call:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
run_all:
type: string
workflow_dispatch:
inputs:
override_git_describe:
type: string
git_ref:
type: string
skip_tests:
type: string
run_all:
type: string
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'test/configs/**'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Windows.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
merge_group:
pull_request:
types: [opened, reopened, ready_for_review, converted_to_draft]
paths-ignore:
- '**.md'
- 'test/configs/**'
- 'tools/**'
- '!tools/shell/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/Windows.yml'
- '.github/config/extensions/*.cmake'
- '.github/patches/extensions/**/*.patch'
concurrency:
group: windows-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
cancel-in-progress: true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
AZURE_CODESIGN_ENDPOINT: https://eus.codesigning.azure.net/
AZURE_CODESIGN_ACCOUNT: duckdb-signing-2
AZURE_CODESIGN_PROFILE: duckdb-certificate-profile
jobs:
check-draft:
# We run all other jobs on PRs only if they are not draft PR
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-24.04
steps:
- name: Preliminary checks on CI
run: echo "Event name is ${{ github.event_name }}"
win-release-64:
# Builds binaries for windows_amd64
name: Windows (64 Bit)
needs: check-draft
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
python scripts/windows_ci.py
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_GENERATOR_PLATFORM=x64 -DENABLE_EXTENSION_AUTOLOADING=1 -DENABLE_EXTENSION_AUTOINSTALL=1 -DDUCKDB_EXTENSION_CONFIGS="${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake" -DDISABLE_UNITY=1 -DOVERRIDE_GIT_DESCRIBE="$OVERRIDE_GIT_DESCRIBE"
cmake --build . --config Release --parallel
- name: Set DUCKDB_INSTALL_LIB for ADBC tests
shell: pwsh
run: echo "DUCKDB_INSTALL_LIB=$((Get-ChildItem -Recurse -Filter "duckdb.dll" | Select-Object -First 1).FullName)" >> $GITHUB_ENV
- name: Test DUCKDB_INSTALL_LIB variable
shell: bash
run: echo $DUCKDB_INSTALL_LIB
- name: Test
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: |
test/Release/unittest.exe
- name: Test with VS2019 C++ stdlib
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: |
choco install wget -y --no-progress
wget -P ./test/Release https://blobs.duckdb.org/ci/msvcp140.dll
ls ./test/Release
./test/Release/unittest.exe
rm ./test/Release/msvcp140.dll
- name: Tools Test
shell: bash
if: ${{ inputs.skip_tests != 'true' }}
run: |
python -m pip install pytest
python -m pytest tools/shell/tests --shell-binary Release/duckdb.exe
tools/sqlite3_api_wrapper/Release/test_sqlite3_api_wrapper.exe
- name: Sign files with Azure Trusted Signing (TM)
if: github.repository == 'duckdb/duckdb' && github.event_name != 'pull_request'
uses: azure/trusted-signing-action@v0
with:
azure-tenant-id: ${{ secrets.AZURE_CODESIGN_TENANT_ID }}
azure-client-id: ${{ secrets.AZURE_CODESIGN_CLIENT_ID }}
azure-client-secret: ${{ secrets.AZURE_CODESIGN_CLIENT_SECRET }}
endpoint: ${{ env.AZURE_CODESIGN_ENDPOINT }}
trusted-signing-account-name: ${{ env.AZURE_CODESIGN_ACCOUNT }}
certificate-profile-name: ${{ env.AZURE_CODESIGN_PROFILE }}
files-folder: ${{ github.workspace }}
files-folder-filter: exe,dll
files-folder-recurse: true
file-digest: SHA256
timestamp-rfc3161: http://timestamp.acs.microsoft.com
timestamp-digest: SHA256
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
python scripts/amalgamation.py
/c/msys64/usr/bin/bash.exe -lc "pacman -Sy --noconfirm zip"
/c/msys64/usr/bin/zip.exe -j duckdb_cli-windows-amd64.zip Release/duckdb.exe
/c/msys64/usr/bin/zip.exe -j libduckdb-windows-amd64.zip src/Release/duckdb.dll src/Release/duckdb.lib src/amalgamation/duckdb.hpp src/include/duckdb.h
./scripts/upload-assets-to-staging.sh github_release libduckdb-windows-amd64.zip duckdb_cli-windows-amd64.zip
- uses: actions/upload-artifact@v4
with:
name: duckdb-binaries-windows-amd64
path: |
libduckdb-windows-amd64.zip
duckdb_cli-windows-amd64.zip
- uses: ilammy/msvc-dev-cmd@v1
- name: Duckdb.dll export symbols with C++ on Windows
shell: bash
run: cl -I src/include examples/embedded-c++-windows/cppintegration.cpp -link src/Release/duckdb.lib
win-release-32:
name: Windows (32 Bit)
needs:
- win-release-64
- check-draft
if: ${{ github.ref == 'refs/heads/main' || github.repository != 'duckdb/duckdb' || inputs.run_all == 'true' }}
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_GENERATOR_PLATFORM=Win32 -DDUCKDB_EXTENSION_CONFIGS="${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake" -DOVERRIDE_GIT_DESCRIBE="$OVERRIDE_GIT_DESCRIBE"
cmake --build . --config Release --parallel
- name: Test
shell: bash
run: test/Release/unittest.exe
- name: Tools Test
shell: bash
run: |
python -m pip install pytest
python -m pytest tools/shell/tests --shell-binary Release/duckdb.exe
tools/sqlite3_api_wrapper/Release/test_sqlite3_api_wrapper.exe
win-release-arm64:
name: Windows (ARM64)
needs:
- win-release-64
- check-draft
if: ${{ github.ref == 'refs/heads/main' || github.repository != 'duckdb/duckdb' || inputs.run_all == 'true' }}
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ inputs.git_ref }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: bash
run: |
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_GENERATOR_PLATFORM=ARM64 -DDUCKDB_EXTENSION_CONFIGS="${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake" -DOVERRIDE_GIT_DESCRIBE="$OVERRIDE_GIT_DESCRIBE" -DDUCKDB_EXPLICIT_PLATFORM=windows_arm64 -DDUCKDB_CUSTOM_PLATFORM=windows_arm64 -DBUILD_UNITTESTS=FALSE
cmake --build . --config Release --parallel
- name: Sign files with Azure Trusted Signing (TM)
if: github.repository == 'duckdb/duckdb' && github.event_name != 'pull_request'
uses: azure/trusted-signing-action@v0
with:
azure-tenant-id: ${{ secrets.AZURE_CODESIGN_TENANT_ID }}
azure-client-id: ${{ secrets.AZURE_CODESIGN_CLIENT_ID }}
azure-client-secret: ${{ secrets.AZURE_CODESIGN_CLIENT_SECRET }}
endpoint: ${{ env.AZURE_CODESIGN_ENDPOINT }}
trusted-signing-account-name: ${{ env.AZURE_CODESIGN_ACCOUNT }}
certificate-profile-name: ${{ env.AZURE_CODESIGN_PROFILE }}
files-folder: ${{ github.workspace }}
files-folder-filter: exe,dll
files-folder-recurse: true
file-digest: SHA256
timestamp-rfc3161: http://timestamp.acs.microsoft.com
timestamp-digest: SHA256
- name: Deploy
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
run: |
python scripts/amalgamation.py
/c/msys64/usr/bin/bash.exe -lc "pacman -Sy --noconfirm zip"
/c/msys64/usr/bin/zip.exe -j duckdb_cli-windows-arm64.zip Release/duckdb.exe
/c/msys64/usr/bin/zip.exe -j libduckdb-windows-arm64.zip src/Release/duckdb.dll src/Release/duckdb.lib src/amalgamation/duckdb.hpp src/include/duckdb.h
./scripts/upload-assets-to-staging.sh github_release libduckdb-windows-arm64.zip duckdb_cli-windows-arm64.zip
- uses: actions/upload-artifact@v4
with:
name: duckdb-binaries-windows-arm64
path: |
libduckdb-windows-arm64.zip
duckdb_cli-windows-arm64.zip
mingw:
name: MinGW (64 Bit)
needs:
- win-release-64
- check-draft
if: ${{ inputs.skip_tests != 'true' }}
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.git_ref }}
- uses: msys2/setup-msys2@v2
with:
msystem: MINGW64
update: true
install: git mingw-w64-x86_64-toolchain mingw-w64-x86_64-cmake mingw-w64-x86_64-ninja
cache: ${{ github.ref == 'refs/heads/main' || github.repository != 'duckdb/duckdb' }}
# see here: https://gist.github.com/scivision/1de4fd6abea9ba6b2d87dc1e86b5d2ce
- name: Put MSYS2_MinGW64 on PATH
# there is not yet an environment variable for this path from msys2/setup-msys2
shell: msys2 {0}
run: export PATH=D:/a/_temp/msys/msys64/mingw64/bin:$PATH
- name: Setup Ccache
uses: hendrikmuhs/ccache-action@main
with:
key: ${{ github.job }}
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
- name: Build
shell: msys2 {0}
run: |
cmake -G "Ninja" -DCMAKE_BUILD_TYPE=Release -DBUILD_EXTENSIONS='parquet' -DOVERRIDE_GIT_DESCRIBE="$OVERRIDE_GIT_DESCRIBE"
cmake --build . --config Release
- name: Test
shell: msys2 {0}
run: |
cp src/libduckdb.dll .
test/unittest.exe
- name: Tools Test
shell: msys2 {0}
run: |
tools/sqlite3_api_wrapper/test_sqlite3_api_wrapper.exe
win-packaged-upload:
runs-on: windows-latest
needs:
- win-release-64
- win-release-arm64
steps:
- uses: actions/download-artifact@v4
with:
name: duckdb-binaries-windows-arm64
- uses: actions/download-artifact@v4
with:
name: duckdb-binaries-windows-amd64
- uses: actions/upload-artifact@v4
with:
name: duckdb-binaries-windows
path: |
libduckdb-windows-amd64.zip
duckdb_cli-windows-amd64.zip
libduckdb-windows-arm64.zip
duckdb_cli-windows-arm64.zip

View File

@@ -0,0 +1,46 @@
# This is a reusable workflow to be used by extensions based on the extension template
name: Client Tests
on:
workflow_call:
inputs:
duckdb_version:
required: true
type: string
jobs:
python:
name: Python
runs-on: ubuntu-latest
env:
GEN: ninja
steps:
- name: Install Ninja
run: |
sudo apt-get update -y -qq
sudo apt-get install -y -qq ninja-build
- uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: 'true'
- name: Checkout DuckDB to version
run: |
cd duckdb
git checkout ${{ inputs.duckdb_version }}
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Build DuckDB Python client
run: make debug_python
- name: Install Python test dependencies
run: python -m pip install --upgrade pytest
- name: Run Python client tests
run: |
make test_debug_python

View File

@@ -0,0 +1,68 @@
name: Extension Build and Deploy
on:
workflow_call:
inputs:
artifact_prefix:
required: true
type: string
extension_config:
required: true
type: string
exclude_archs:
required: false
type: string
default: ''
extra_toolchains:
required: false
type: string
default: ""
duckdb_ref:
required: false
type: string
default: ""
override_tag:
required: false
type: string
default: ""
skip_tests:
required: false
type: boolean
default: false
save_cache:
required: false
type: boolean
default: false
jobs:
build:
name: Build
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@main
with:
# We piggy-back extension-template to build the extensions in extension_config, it's hacky, but it works ¯\_(ツ)_/¯
override_repository: duckdb/extension-template
override_ref: v1.4-andium
# Note when `upload_all_extensions` is true, the extension name is used as prefix to the artifact holding all built extensions
upload_all_extensions: true
extension_name: ${{ inputs.artifact_prefix }}
# DuckDB version is overridden to the current commit of the current repository
set_caller_as_duckdb: true
duckdb_version: ${{ github.sha }}
# CI tools is pinned to main
override_ci_tools_repository: duckdb/extension-ci-tools
ci_tools_version: main
exclude_archs: ${{ inputs.exclude_archs }}
extra_toolchains: ${{ inputs.extra_toolchains }}
use_merged_vcpkg_manifest: '1'
duckdb_tag: ${{ inputs.override_tag }}
skip_tests: ${{ inputs.skip_tests }}
save_cache: ${{ inputs.save_cache }}
# The extension_config.cmake configuration that gets built
extra_extension_config: ${{ inputs.extension_config }}

View File

@@ -0,0 +1,51 @@
name: CIFuzz
on:
workflow_dispatch:
repository_dispatch:
push:
branches-ignore:
- 'main'
- 'feature'
- 'v*.*-*'
paths-ignore:
- '**.md'
- 'tools/**'
- '.github/patches/duckdb-wasm/**'
- '.github/workflows/**'
- '!.github/workflows/cifuzz.yml'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
cancel-in-progress: true
jobs:
Fuzzing:
name: OSSFuzz
if: github.repository == 'duckdb/duckdb'
strategy:
fail-fast: false
matrix:
sanitizer: [address, undefined, memory]
runs-on: ubuntu-latest
steps:
- name: Build Fuzzers ${{ matrix.sanitizer }}
id: build
uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
with:
oss-fuzz-project-name: 'duckdb'
dry-run: false
sanitizer: ${{ matrix.sanitizer }}
- name: Run Fuzzers ${{ matrix.sanitizer }}
uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
with:
oss-fuzz-project-name: 'duckdb'
fuzz-seconds: 3600
dry-run: false
sanitizer: ${{ matrix.sanitizer }}
- name: Upload Crash
uses: actions/upload-artifact@v4
if: failure() && steps.build.outcome == 'success'
with:
name: artifacts-${{ matrix.sanitizer }}
path: ./out/artifacts

View File

@@ -0,0 +1,9 @@
Thanks for opening this issue! Based on our automated check, it seems that your post contains some code but it does not use [code blocks](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/creating-and-highlighting-code-blocks) to format it.
Please double-check your post and revise it if necessary. To employ syntax highlighting, it's recommended to use code blocks with triple backticks, e.g.:
````
```sql
SELECT ...
```
````
If this is a false positive, feel free to disregard this comment.

View File

@@ -0,0 +1,53 @@
# Creates and uploads a Coverity build on a schedule
# Requires that two secrets be created:
# COVERITY_SCAN_EMAIL, with the email address that should be notified with scan results
# COVERITY_SCAN_TOKEN, with the token from the Coverity project page (e.g., https://scan.coverity.com/projects/moshekaplan-duckdb?tab=project_settings )
# Also, ensure that the 'github.repository' comparison and 'COVERITY_PROJECT_NAME' values below are accurate
name: Coverity Scan
on:
repository_dispatch:
# Run once daily (via repository_dispatch), duckdb is at ~900k LOC
# Scan frequency limits from https://scan.coverity.com/faq#frequency :
# Up to 28 builds per week, with a maximum of 4 builds per day, for projects with fewer than 100K lines of code
# Up to 21 builds per week, with a maximum of 3 builds per day, for projects with 100K to 500K lines of code
# Up to 14 builds per week, with a maximum of 2 build per day, for projects with 500K to 1 million lines of code
# Up to 7 builds per week, with a maximum of 1 build per day, for projects with more than 1 million lines of code
# Support manual execution
workflow_dispatch:
jobs:
coverity:
# So it doesn't try to run on forks
if: github.repository == 'duckdb/duckdb'
runs-on: ubuntu-latest
env:
COVERITY_PROJECT_NAME: DuckDB
steps:
- uses: actions/checkout@v4
- name: Download and extract the Coverity Build Tool
run: |
wget https://scan.coverity.com/download/cxx/linux64 --post-data "token=${{ secrets.COVERITY_SCAN_TOKEN }}&project=${{ env.COVERITY_PROJECT_NAME }}" -O cov-analysis-linux64.tar.gz
mkdir cov-analysis-linux64
tar xzf cov-analysis-linux64.tar.gz --strip 1 -C cov-analysis-linux64
- name: Install dependencies
run: sudo apt update -y -qq && sudo apt install -y git g++ cmake ninja-build libssl-dev default-jdk
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Build with cov-build
run: cov-analysis-linux64/bin/cov-build --dir cov-int make
env:
BUILD_TPCE: 1
CORE_EXTENSIONS: "autocomplete;icu;tpcds;tpch;fts;httpfs;json;inet"
- name: Upload the result
run: |
tar czvf cov-int.tgz cov-int
curl \
--form project=${{ env.COVERITY_PROJECT_NAME }} \
--form email=${{ secrets.COVERITY_SCAN_EMAIL }} \
--form token=${{ secrets.COVERITY_SCAN_TOKEN }} \
--form file=@cov-int.tgz \
https://scan.coverity.com/builds

View File

@@ -0,0 +1,22 @@
/usr*
*/cl.hpp
*/tools/shell/*
*/tools/sqlite3_api_wrapper/*
*/benchmark/*
*/examples/*
*/third_party/*
*/test/*
*/extension/autocomplete/*
*/extension/fts/*
*/extension/icu/*
*/extension/jemalloc/*
*/extension/tpcds/*
*/extension/tpch/*
*/extension/json/yyjson/*
*/extension_helper.cpp
*/generated_extension_loader.hpp
*/adbc/*
*/enum_util.cpp
*/enums/expression_type.cpp
*/serialization/*
*/json_enums.cpp

View File

@@ -0,0 +1,2 @@
lcov_excl_line = default:|InternalException|NotImplementedException|IOException|SerializationException|LCOV_EXCL_LINE|Print
lcov_excl_br_line = InternalException|NotImplementedException|IOException|SerializationException|LCOV_EXCL_BR_LINE|Print