removed duckdb
This commit is contained in:
5
.gitmodules
vendored
5
.gitmodules
vendored
@@ -7,10 +7,7 @@
|
||||
[submodule "external/tomlplusplus"]
|
||||
path = external/tomlplusplus
|
||||
url = https://github.com/marzer/tomlplusplus
|
||||
|
||||
[submodule "external/bitsery"]
|
||||
path = external/bitsery
|
||||
url = https://github.com/fraillt/bitsery
|
||||
[submodule "external/duckdb"]
|
||||
path = external/duckdb
|
||||
url = https://github.com/duckdb/duckdb.git
|
||||
|
||||
|
||||
33
external/duckdb/.clang-format
vendored
33
external/duckdb/.clang-format
vendored
@@ -1,33 +0,0 @@
|
||||
---
|
||||
BasedOnStyle: LLVM
|
||||
SortIncludes: false
|
||||
TabWidth: 4
|
||||
IndentWidth: 4
|
||||
ColumnLimit: 120
|
||||
AllowShortFunctionsOnASingleLine: false
|
||||
---
|
||||
UseTab: ForIndentation
|
||||
DerivePointerAlignment: false
|
||||
PointerAlignment: Right
|
||||
AlignConsecutiveMacros: true
|
||||
AlignTrailingComments: true
|
||||
AllowAllArgumentsOnNextLine: true
|
||||
AllowAllConstructorInitializersOnNextLine: true
|
||||
AllowAllParametersOfDeclarationOnNextLine: true
|
||||
AlignAfterOpenBracket: Align
|
||||
SpaceBeforeCpp11BracedList: true
|
||||
SpaceBeforeCtorInitializerColon: true
|
||||
SpaceBeforeInheritanceColon: true
|
||||
SpacesInAngles: false
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInConditionalStatement: false
|
||||
AllowShortLambdasOnASingleLine: Inline
|
||||
AllowShortLoopsOnASingleLine: false
|
||||
AlwaysBreakTemplateDeclarations: Yes
|
||||
IncludeBlocks: Regroup
|
||||
Language: Cpp
|
||||
AccessModifierOffset: -4
|
||||
---
|
||||
Language: Java
|
||||
SpaceAfterCStyleCast: true
|
||||
---
|
||||
55
external/duckdb/.clang-tidy
vendored
55
external/duckdb/.clang-tidy
vendored
@@ -1,55 +0,0 @@
|
||||
Checks: '-*,clang-diagnostic-*,bugprone-*,performance-*,google-explicit-constructor,google-build-using-namespace,google-runtime-int,misc-definitions-in-headers,modernize-use-nullptr,modernize-use-override,-bugprone-macro-parentheses,readability-braces-around-statements,-bugprone-branch-clone,readability-identifier-naming,hicpp-exception-baseclass,misc-throw-by-value-catch-by-reference,-bugprone-signed-char-misuse,-bugprone-misplaced-widening-cast,-bugprone-sizeof-expression,-bugprone-easily-swappable-parameters,google-global-names-in-headers,llvm-header-guard,misc-definitions-in-headers,modernize-use-emplace,modernize-use-bool-literals,-performance-inefficient-string-concatenation,-performance-no-int-to-ptr,readability-container-size-empty,cppcoreguidelines-pro-type-cstyle-cast,-llvm-header-guard,-performance-enum-size,cppcoreguidelines-pro-type-const-cast,cppcoreguidelines-avoid-non-const-global-variables,cppcoreguidelines-interfaces-global-init,cppcoreguidelines-slicing,cppcoreguidelines-rvalue-reference-param-not-moved,cppcoreguidelines-virtual-class-destructor,-readability-identifier-naming,-bugprone-exception-escape,-bugprone-unused-local-non-trivial-variable,-bugprone-empty-catch,-misc-use-internal-linkage,-readability-static-definition-in-anonymous-namespace'
|
||||
WarningsAsErrors: '*'
|
||||
HeaderFilterRegex: 'src/include/duckdb/.*'
|
||||
FormatStyle: none
|
||||
CheckOptions:
|
||||
- key: readability-identifier-naming.ClassCase
|
||||
value: CamelCase
|
||||
- key: readability-identifier-naming.EnumCase
|
||||
value: CamelCase
|
||||
- key: readability-identifier-naming.TypedefCase
|
||||
value: lower_case
|
||||
- key: readability-identifier-naming.TypedefSuffix
|
||||
value: _t
|
||||
- key: readability-identifier-naming.FunctionCase
|
||||
value: CamelCase
|
||||
- key: readability-identifier-naming.MemberCase
|
||||
value: lower_case
|
||||
- key: readability-identifier-naming.ParameterCase
|
||||
value: lower_case
|
||||
- key: readability-identifier-naming.ConstantCase
|
||||
value: aNy_CasE
|
||||
- key: readability-identifier-naming.ConstantParameterCase
|
||||
value: lower_case
|
||||
- key: readability-identifier-naming.NamespaceCase
|
||||
value: lower_case
|
||||
- key: readability-identifier-naming.MacroDefinitionCase
|
||||
value: UPPER_CASE
|
||||
- key: readability-identifier-naming.StaticConstantCase
|
||||
value: UPPER_CASE
|
||||
- key: readability-identifier-naming.ConstantMemberCase
|
||||
value: aNy_CasE
|
||||
- key: readability-identifier-naming.StaticVariableCase
|
||||
value: UPPER_CASE
|
||||
- key: readability-identifier-naming.ClassConstantCase
|
||||
value: UPPER_CASE
|
||||
- key: readability-identifier-naming.EnumConstantCase
|
||||
value: UPPER_CASE
|
||||
- key: readability-identifier-naming.ConstexprVariableCase
|
||||
value: aNy_CasE
|
||||
- key: readability-identifier-naming.StaticConstantCase
|
||||
value: UPPER_CASE
|
||||
- key: readability-identifier-naming.TemplateTemplateParameterCase
|
||||
value: UPPER_CASE
|
||||
- key: readability-identifier-naming.TypeTemplateParameterCase
|
||||
value: UPPER_CASE
|
||||
- key: readability-identifier-naming.VariableCase
|
||||
value: lower_case
|
||||
- key: modernize-use-emplace.SmartPointers
|
||||
value: '::duckdb::shared_ptr;::duckdb::unique_ptr;::std::auto_ptr;::duckdb::weak_ptr'
|
||||
- key: cppcoreguidelines-rvalue-reference-param-not-moved.IgnoreUnnamedParams
|
||||
value: true
|
||||
- key: misc-use-internal-linkage
|
||||
value: true
|
||||
- key: readability-static-definition-in-anonymous-namespace
|
||||
value: true
|
||||
3
external/duckdb/.clangd
vendored
3
external/duckdb/.clangd
vendored
@@ -1,3 +0,0 @@
|
||||
CompileFlags:
|
||||
CompilationDatabase: build/clangd
|
||||
Add: -Wno-unqualified-std-cast-call
|
||||
23
external/duckdb/.codecov.yml
vendored
23
external/duckdb/.codecov.yml
vendored
@@ -1,23 +0,0 @@
|
||||
comment: false
|
||||
coverage:
|
||||
precision: 2
|
||||
round: down
|
||||
range: "0...100"
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
# advanced settings
|
||||
if_not_found: success
|
||||
if_ci_failed: failure
|
||||
informational: true
|
||||
only_pulls: false
|
||||
patch:
|
||||
default:
|
||||
branches:
|
||||
- main
|
||||
if_not_found: success
|
||||
if_ci_failed: error
|
||||
informational: true
|
||||
only_pulls: true
|
||||
paths:
|
||||
- "src"
|
||||
53
external/duckdb/.editorconfig
vendored
53
external/duckdb/.editorconfig
vendored
@@ -1,53 +0,0 @@
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*.{c,cpp,h,hpp}]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = tab
|
||||
tab_width = 4
|
||||
indent_size = tab
|
||||
trim_trailing_whitespace = true
|
||||
charset = utf-8
|
||||
max_line_length = 120
|
||||
x-soft-wrap-text = true
|
||||
x-soft-wrap-mode = CharacterWidth
|
||||
x-soft-wrap-limit = 120
|
||||
x-show-invisibles = false
|
||||
x-show-spaces = false
|
||||
|
||||
[*.{java}]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = tab
|
||||
tab_width = 4
|
||||
indent_size = tab
|
||||
trim_trailing_whitespace = false
|
||||
charset = utf-8
|
||||
max_line_length = 120
|
||||
x-soft-wrap-text = true
|
||||
x-soft-wrap-mode = CharacterWidth
|
||||
x-soft-wrap-limit = 120
|
||||
x-show-invisibles = false
|
||||
x-show-spaces = false
|
||||
|
||||
[*.{test,test_slow,test_coverage,benchmark}]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = tab
|
||||
tab_width = 4
|
||||
indent_size = tab
|
||||
trim_trailing_whitespace = false
|
||||
charset = utf-8
|
||||
x-soft-wrap-text = false
|
||||
|
||||
[Makefile]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = tab
|
||||
tab_width = 4
|
||||
indent_size = tab
|
||||
trim_trailing_whitespace = true
|
||||
charset = utf-8
|
||||
x-soft-wrap-text = false
|
||||
|
||||
[*keywords.list]
|
||||
insert_final_newline = false
|
||||
1
external/duckdb/.gitattributes
vendored
1
external/duckdb/.gitattributes
vendored
@@ -1 +0,0 @@
|
||||
*.test linguist-language=sql
|
||||
@@ -1,120 +0,0 @@
|
||||
name: Bug report
|
||||
description: Create a report to help us improve
|
||||
labels:
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: >
|
||||
DuckDB has several repositories for different components, please make sure you're raising your issue in the correct one:
|
||||
|
||||
* [Documentation](https://github.com/duckdb/duckdb-web/issues/new)
|
||||
* Clients: [Go](https://github.com/duckdb/duckdb-go/issues/new), [Java (JDBC)](https://github.com/duckdb/duckdb-java/issues/new), [Node.js](https://github.com/duckdb/duckdb-node-neo/issues/new), [ODBC](https://github.com/duckdb/duckdb-odbc/issues/new), [Python](https://github.com/duckdb/duckdb-python/issues/new), [R](https://github.com/duckdb/duckdb-r/issues/new), [Rust](https://github.com/duckdb/duckdb-rs/issues/new), [WebAssembly (Wasm)](https://github.com/duckdb/duckdb-wasm/issues/new)
|
||||
* Extensions: [`aws`](https://github.com/duckdb/duckdb-aws/issues/new), [`azure`](https://github.com/duckdb/duckdb-azure/issues/new), [`delta`](https://github.com/duckdb/duckdb-delta/issues/new), [`ducklake`](https://github.com/duckdb/duckdb-ducklake/issues/new), [`encodings`](https://github.com/duckdb/duckdb-encodings/issues/new), [`excel`](https://github.com/duckdb/duckdb-excel/issues/new), [`fts`](https://github.com/duckdb/duckdb-fts/issues/new), [`httpfs`](https://github.com/duckdb/duckdb-httpfs/issues/new), [`iceberg`](https://github.com/duckdb/duckdb-iceberg/issues/new), [`inet`](https://github.com/duckdb/duckdb-inet/issues/new), [`mysql`](https://github.com/duckdb/duckdb-mysql/issues/new), [`postgres`](https://github.com/duckdb/duckdb-postgres/issues/new), [`spatial`](https://github.com/duckdb/duckdb-spatial/issues/new), [`sqlite`](https://github.com/duckdb/duckdb-sqlite/issues/new), [`ui`](https://github.com/duckdb/duckdb-ui/issues/new), [`vss`](https://github.com/duckdb/duckdb-vss/issues/new)
|
||||
|
||||
If the issue occurs in core DuckDB (e.g., a SQL query crashes or returns incorrect results) or if the issue is in the DuckDB command line client, feel free to raise it in this repository.
|
||||
|
||||
Please report security vulnerabilities using GitHub's [report vulnerability form](https://github.com/duckdb/duckdb/security/advisories/new).
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What happens?
|
||||
description: A short, clear and concise description of what the bug is.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: To Reproduce
|
||||
description: |
|
||||
Please provide steps to reproduce the behavior, preferably a [minimal reproducible example](https://en.wikipedia.org/wiki/Minimal_reproducible_example). Please adhere the following guidelines:
|
||||
|
||||
* Format the code and the output as [code blocks](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/creating-and-highlighting-code-blocks) using triple backticks:
|
||||
|
||||
````
|
||||
```
|
||||
CODE HERE
|
||||
```
|
||||
````
|
||||
* Add all required imports for scripts, e.g., `import duckdb`, `import pandas as pd`.
|
||||
* Remove all prompts from the scripts. This include DuckDB's 'D' prompt and Python's `>>>` prompt. Removing these prompts makes reproduction attempts quicker.
|
||||
* Make sure that the script and its outputs are provided in separate code blocks.
|
||||
* If applicable, please check whether the issue is reproducible via running plain SQL queries from the DuckDB CLI client.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: "# Environment (please complete the following information):"
|
||||
- type: input
|
||||
attributes:
|
||||
label: "OS:"
|
||||
placeholder: e.g., iOS
|
||||
description: Please include operating system version and architecture (e.g., aarch64, x86_64, etc.).
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "DuckDB Version:"
|
||||
placeholder: e.g., 22
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "DuckDB Client:"
|
||||
placeholder: e.g., Python
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "Hardware:"
|
||||
placeholder: If your issue is performance-related, please include information on your CPU and memory.
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: "# Identity Disclosure:"
|
||||
- type: input
|
||||
attributes:
|
||||
label: "Full Name:"
|
||||
placeholder: e.g., John Doe
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "Affiliation:"
|
||||
placeholder: e.g., Acme Corporation
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If the above is not given and is not obvious from your GitHub profile page, we might close your issue without further review. Please refer to the [reasoning behind this rule](https://berthub.eu/articles/posts/anonymous-help/) if you have questions.
|
||||
|
||||
# Before Submitting:
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Did you include all relevant configuration (e.g., CPU architecture, Linux distribution) to reproduce the issue?
|
||||
options:
|
||||
- label: Yes, I have
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Did you include all code required to reproduce the issue?
|
||||
options:
|
||||
- label: Yes, I have
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Did you include all relevant data sets for reproducing the issue?
|
||||
options:
|
||||
- "No - Other reason (please specify in the issue body)"
|
||||
- "No - I cannot share the data sets because they are confidential"
|
||||
- "No - I cannot easily share my data sets due to their large size"
|
||||
- "Not applicable - the reproduction does not require a data set"
|
||||
- "Yes"
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
@@ -1,9 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Feature Request
|
||||
# manual template until discussion templates are GA
|
||||
url: https://github.com/duckdb/duckdb/discussions/new?category=ideas&title=Feature%20Request:%20...&labels=feature&body=Why%20do%20you%20want%20this%20feature%3F
|
||||
about: Submit feature requests here
|
||||
- name: Discussions
|
||||
url: https://github.com/duckdb/duckdb/discussions
|
||||
about: Please ask and answer general questions here.
|
||||
@@ -1,254 +0,0 @@
|
||||
# # # # # # # # # # # # # # # # # #
|
||||
#
|
||||
# WARNING: DEPRECATED!
|
||||
#
|
||||
# # # # # # # # # # # # # # # # # #
|
||||
|
||||
name: "Build Extensions"
|
||||
description: "Build, test and deploy the DuckDB extensions"
|
||||
inputs:
|
||||
# Test config
|
||||
run_tests:
|
||||
description: 'Run extension tests after build'
|
||||
default: '1'
|
||||
run_autoload_tests:
|
||||
description: 'Runs the autoloading tests'
|
||||
default: '1'
|
||||
|
||||
# Deploy config
|
||||
deploy_as:
|
||||
description: 'Binary architecture name for deploy step - DEPRECATED'
|
||||
default: ''
|
||||
deploy_version:
|
||||
description: 'Version tag or commit short hash for deploy step'
|
||||
default: ''
|
||||
s3_id:
|
||||
description: 'S3 key ID'
|
||||
default: ''
|
||||
s3_key:
|
||||
description: 'S3 key secret'
|
||||
default: ''
|
||||
signing_pk:
|
||||
description: 'Extension signing RSA private key'
|
||||
default: ''
|
||||
|
||||
# Build config
|
||||
duckdb_arch:
|
||||
description: 'Provide DUCKDB_PLATFORM to build system for cross compilation'
|
||||
default: ''
|
||||
static_link_build:
|
||||
description: 'Links DuckDB statically to the loadable extensions'
|
||||
default: '1'
|
||||
no_static_linking:
|
||||
description: 'Disables linking extensions into DuckDB for testing'
|
||||
default: '0'
|
||||
vcpkg_build:
|
||||
description: 'Installs vcpkg and pass its toolchain to CMakes'
|
||||
default: '1'
|
||||
build_dir:
|
||||
description: 'DuckDB source directory to run the build in'
|
||||
default: '.'
|
||||
ninja:
|
||||
description: 'Use ninja for building'
|
||||
default: '0'
|
||||
openssl_path:
|
||||
description: 'Directory of OpenSSL installation'
|
||||
default: ''
|
||||
post_install:
|
||||
description: 'Post-install scripts to run'
|
||||
default: ''
|
||||
treat_warn_as_error:
|
||||
description: 'Treat compilation warnings as errors'
|
||||
default: '1'
|
||||
build_in_tree_extensions:
|
||||
description: 'Build in-tree extensions'
|
||||
default: '1'
|
||||
build_out_of_tree_extensions:
|
||||
description: 'Build out-of-tree extensions'
|
||||
default: '1'
|
||||
build_complete_extensions_set:
|
||||
description: 'Whether all extensions needs to be built'
|
||||
default: '1'
|
||||
bundle_static_lib_mode:
|
||||
description: 'Build the default bundled extensions to publish the static libs'
|
||||
default: '0'
|
||||
osx_universal:
|
||||
description: 'Build Universal Binary for OSX'
|
||||
default: '0'
|
||||
osx_arch:
|
||||
description: 'Build specific architecture for OSX'
|
||||
default: ''
|
||||
aarch64_cross_compile:
|
||||
description: 'Enable Linux aarch64 cross-compiling'
|
||||
default: '0'
|
||||
vcpkg_target_triplet:
|
||||
description: 'Target triplet for installing vcpkg dependencies'
|
||||
default: ''
|
||||
override_cc:
|
||||
description: 'Override C Compiler'
|
||||
default: ''
|
||||
override_cxx:
|
||||
description: 'Override CXX Compiler'
|
||||
default: ''
|
||||
unittest_script:
|
||||
description: 'Script/program to execute the unittests'
|
||||
default: 'python3 scripts/run_tests_one_by_one.py ./build/release/test/unittest'
|
||||
cmake_flags:
|
||||
description: 'Flags to be passed to cmake'
|
||||
default: ''
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Setup DuckDB extension build config
|
||||
shell: bash
|
||||
run: |
|
||||
export EXTENSION_CONFIGS="$EXTENSION_CONFIGS;${{ inputs.bundle_static_lib_mode == 1 && '.github/config/bundled_extensions.cmake' || ''}}"
|
||||
export EXTENSION_CONFIGS="$EXTENSION_CONFIGS;${{ inputs.build_in_tree_extensions == 1 && '.github/config/in_tree_extensions.cmake' || ''}}"
|
||||
export EXTENSION_CONFIGS="$EXTENSION_CONFIGS;${{ inputs.build_out_of_tree_extensions == 1 && '.github/config/out_of_tree_extensions.cmake' || '' }}"
|
||||
echo "EXTENSION_CONFIGS=$EXTENSION_CONFIGS" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup vcpkg
|
||||
if: ${{ inputs.vcpkg_build == 1 }}
|
||||
uses: lukka/run-vcpkg@v11.1
|
||||
with:
|
||||
vcpkgGitCommitId: ce613c41372b23b1f51333815feb3edd87ef8a8b
|
||||
|
||||
- name: Set vcpkg env variables
|
||||
if: ${{ inputs.vcpkg_build == 1 }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "VCPKG_TOOLCHAIN_PATH=$VCPKG_ROOT/scripts/buildsystems/vcpkg.cmake" >> $GITHUB_ENV
|
||||
echo "VCPKG_TARGET_TRIPLET=${{ inputs.vcpkg_target_triplet }}" >> $GITHUB_ENV
|
||||
echo "BUILD_COMPLETE_EXTENSION_SET=${{ inputs.build_complete_extensions_set }}" >> $GITHUB_ENV
|
||||
|
||||
- name: workaround for https://github.com/duckdb/duckdb/issues/8360
|
||||
if: inputs.vcpkg_target_triplet == 'x64-windows-static-md'
|
||||
shell: bash
|
||||
run: |
|
||||
cd $VCPKG_ROOT
|
||||
mkdir -p downloads
|
||||
cd downloads
|
||||
curl -O -L https://github.com/duckdb/duckdb-data/releases/download/v1.0/nasm-2.16.01-win64.zip
|
||||
ls -al
|
||||
pwd
|
||||
|
||||
- name: Set Openssl dir
|
||||
if: inputs.openssl_path != ''
|
||||
shell: bash
|
||||
run: |
|
||||
echo "OPENSSL_ROOT_DIR=${{ inputs.openssl_path }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Create combined vcpkg manifest
|
||||
if: ${{ inputs.vcpkg_build == '1' && inputs.build_out_of_tree_extensions == '1' }}
|
||||
shell: bash
|
||||
run: |
|
||||
make extension_configuration
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
env:
|
||||
TREAT_WARNINGS_AS_ERRORS: ${{ inputs.treat_warn_as_error}}
|
||||
FORCE_WARN_UNUSED: 1
|
||||
STATIC_OPENSSL: 1
|
||||
EXTENSION_STATIC_BUILD: ${{ inputs.static_link_build }}
|
||||
OSX_BUILD_UNIVERSAL: ${{ inputs.osx_universal }}
|
||||
OSX_BUILD_ARCH: ${{ inputs.osx_arch }}
|
||||
DISABLE_BUILTIN_EXTENSIONS: ${{ inputs.no_static_linking}}
|
||||
CC: ${{ inputs.aarch64_cross_compile == 1 && 'aarch64-linux-gnu-gcc' || inputs.override_cc }}
|
||||
CXX: ${{ inputs.aarch64_cross_compile == 1 && 'aarch64-linux-gnu-g++' || inputs.override_cxx}}
|
||||
LOCAL_EXTENSION_REPO: ${{ inputs.run_autoload_tests == 1 && github.workspace || ''}}
|
||||
GEN: ${{ inputs.ninja == 1 && 'ninja' || '' }}
|
||||
USE_MERGED_VCPKG_MANIFEST: 1
|
||||
# TODO we should no longer override this but we should probably check that it is what we expect
|
||||
DUCKDB_PLATFORM: ${{ inputs.duckdb_arch }}
|
||||
CMAKE_VARS_BUILD: ${{ inputs.cmake_flags }}
|
||||
|
||||
run: |
|
||||
ls
|
||||
cd ${{ inputs.build_dir}}
|
||||
ls -al
|
||||
pwd
|
||||
echo "$USER"
|
||||
git config --global --add safe.directory '*'
|
||||
make
|
||||
ls
|
||||
|
||||
# Tests extension using regular require mechanism:
|
||||
# - statically linked extensions are disable on startup but a sqlogictest require will call their load function
|
||||
# - loadable-only extensions have their loadable extension loaded with the LOAD statement on a sqlogictest require
|
||||
- name: Test statically linked extensions
|
||||
if: ${{ inputs.run_tests == '1' && inputs.no_static_linking == '0' }}
|
||||
shell: bash
|
||||
run: |
|
||||
${{ inputs.unittest_script }}
|
||||
|
||||
- name: Run post-install scripts
|
||||
if: ${{ inputs.post_install != '' }}
|
||||
shell: bash
|
||||
run: |
|
||||
ls
|
||||
cd ${{ inputs.build_dir }}
|
||||
${{ inputs.post_install }}
|
||||
|
||||
# The reason we need to rebuild is we need to test auto-loading extensions: this is only possible without the other
|
||||
# extensions linked
|
||||
- name: Rebuild DuckDB without any extensions, but with all extension tests
|
||||
if: ${{ inputs.run_autoload_tests == '1' }}
|
||||
shell: bash
|
||||
env:
|
||||
EXTENSION_TESTS_ONLY: 1
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
GEN: ${{ inputs.ninja == '1' && 'ninja' || '' }}
|
||||
USE_MERGED_VCPKG_MANIFEST: 1
|
||||
run: |
|
||||
cd ${{ inputs.build_dir}}
|
||||
rm -rf duckdb_unittest_tempdir/*
|
||||
mv build/release/extension build/extension_tmp
|
||||
rm -rf build/release
|
||||
VCPKG_TOOLCHAIN_PATH="" make
|
||||
|
||||
# Run all unittests (including the out-of-tree tests) without any extensions linked, relying on the autoloader
|
||||
- name: Run tests with auto loading
|
||||
if: ${{ inputs.run_autoload_tests == '1' }}
|
||||
shell: bash
|
||||
env:
|
||||
LOCAL_EXTENSION_REPO: ${{ inputs.run_autoload_tests == '1' && github.workspace || ''}}
|
||||
DUCKDB_TEST_DESCRIPTION: 'Extension autoloading tests. All `require` calls are ignored and auto-loading is tested. Use require no_extension_autoloading in the test to skip tests.'
|
||||
run: |
|
||||
cd ${{ inputs.build_dir}}
|
||||
python3 scripts/get_test_list.py --file-contains 'require ' --list '"*.test"' > test.list
|
||||
python3 scripts/get_test_list.py --file-contains 'require-env LOCAL_EXTENSION_REPO' --list '"*.test"' >> test.list
|
||||
${{ inputs.unittest_script }} '-f test.list'
|
||||
rm -rf build/release/extension
|
||||
mv build/extension_tmp build/release/extension
|
||||
|
||||
- name: Run tests with auto loading with VS2019 C++ stdlib
|
||||
if: ${{ inputs.run_autoload_tests == '1' && inputs.vcpkg_target_triplet == 'x64-windows-static-md' }}
|
||||
shell: bash
|
||||
env:
|
||||
LOCAL_EXTENSION_REPO: ${{ inputs.run_autoload_tests == '1' && github.workspace || ''}}
|
||||
run: |
|
||||
rm -rf build/extension_tmp
|
||||
mv build/release/extension build/extension_tmp
|
||||
choco install wget -y --no-progress
|
||||
cd ${{ inputs.build_dir }}
|
||||
TEST_RUNNER_DIR=./build/release/test/Release
|
||||
if [ ! -f ${TEST_RUNNER_DIR}/unittest.exe ]; then
|
||||
echo "Invalid unit tests runner dir: ${TEST_RUNNER_DIR}, check 'inputs.unittest_script' argument"
|
||||
exit 1
|
||||
fi
|
||||
wget -P ${TEST_RUNNER_DIR} https://blobs.duckdb.org/ci/msvcp140.dll
|
||||
ls ${TEST_RUNNER_DIR}
|
||||
# test.list is generated on the previous step
|
||||
${{ inputs.unittest_script }} '-f test.list'
|
||||
rm ${TEST_RUNNER_DIR}/msvcp140.dll
|
||||
rm -rf ./build/release/extension
|
||||
mv ./build/extension_tmp ./build/release/extension
|
||||
|
||||
- name: Deploy
|
||||
if: ${{ inputs.deploy_as != '' }}
|
||||
shell: bash
|
||||
run: |
|
||||
exit 1
|
||||
@@ -1,37 +0,0 @@
|
||||
# https://github.com/pierotofy/set-swap-space
|
||||
name: 'Set Swap Space'
|
||||
description: 'Add moar swap'
|
||||
inputs:
|
||||
swap-size-gb:
|
||||
description: 'Swap space to create, in Gigabytes.'
|
||||
required: false
|
||||
default: '10'
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Swap space report before modification
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Memory and swap:"
|
||||
free -h
|
||||
echo
|
||||
swapon --show
|
||||
echo
|
||||
- name: Set Swap
|
||||
shell: bash
|
||||
run: |
|
||||
export SWAP_FILE=$(swapon --show=NAME | tail -n 1)
|
||||
sudo swapoff $SWAP_FILE
|
||||
sudo rm $SWAP_FILE
|
||||
sudo fallocate -l ${{ inputs.swap-size-gb }}G $SWAP_FILE
|
||||
sudo chmod 600 $SWAP_FILE
|
||||
sudo mkswap $SWAP_FILE
|
||||
sudo swapon $SWAP_FILE
|
||||
- name: Swap space report after modification
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Memory and swap:"
|
||||
free -h
|
||||
echo
|
||||
swapon --show
|
||||
echo
|
||||
@@ -1,16 +0,0 @@
|
||||
PROJ_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
# Configuration to build all extensions
|
||||
# This is used in combination with extension-ci-tools to fake an extension that as as extension_config.cmake the content of
|
||||
# in-tree and out-of-tree configuration files.
|
||||
EXT_NAME=all_extensions
|
||||
EXT_CONFIG=${PROJ_DIR}extension_config.cmake
|
||||
|
||||
# Include the Makefile from extension-ci-tools
|
||||
include extension-ci-tools/makefiles/duckdb_extension.Makefile
|
||||
|
||||
configure_ci:
|
||||
cd duckdb && BUILD_ALL_EXT=1 make extension_configuration && cp build/extension_configuration/vcpkg.json ../.
|
||||
|
||||
test_release:
|
||||
python3 duckdb/scripts/run_tests_one_by_one.py ./build/release/test/unittest
|
||||
@@ -1,42 +0,0 @@
|
||||
# Extension patches
|
||||
Patches in this directory are used to smoothen the process of introducing changes to DuckDB that break compatibility with an
|
||||
out-of-tree extension. Extensions installed from git urls can automatically apply patches found in this directory. The APPLY_PATCHES flag
|
||||
should be used to explicitly enable this feature. For example,
|
||||
lets say our extension config looks like this:
|
||||
|
||||
```shell
|
||||
duckdb_extension_load(spatial
|
||||
DONT_LINK
|
||||
GIT_URL https://github.com/duckdb/duckdb_spatial
|
||||
GIT_TAG f577b9441793f9170403e489f5d3587e023a945f
|
||||
APPLY_PATCHES
|
||||
)
|
||||
```
|
||||
In this example, upon downloading the spatial extension, all patches in the `.github/patches/extensions/spatial/*.patch`
|
||||
will be automatically applied.
|
||||
|
||||
Note that the reason for having the APPLY_PATCHES flag explicitly enabled is to make it easier for developers reading
|
||||
the extension config to detect a patch is present. For this reason, the patching mechanism will actually fail if `APPLY_PATCHES`
|
||||
is set with no patches in `.github/patches/extensions/<ext>/*.patch`.
|
||||
|
||||
# Workflow
|
||||
Imagine a change to DuckDB is introduced that breaks compatibility with extension X. The
|
||||
workflow for this is as follows:
|
||||
|
||||
### PR #1: breaking change to DuckDB
|
||||
- Commit breaking change to DuckDB
|
||||
- Fix breakage in extension X, producing a patch with fix (be wary of already existing patches)
|
||||
- Commit patch in `.github/patches/extensions/x/*.patch` using a descriptive name
|
||||
- enable APPLY_PATCHES for extension X in `.github/config/out_of_tree_extensions.cmake` (if not already enabled)
|
||||
|
||||
### PR #2: patch to extension X
|
||||
- Apply (all) the patch(es) in `.github/patches/extensions/x/*.patch` to extension X.
|
||||
|
||||
### PR #3: update extension X in DuckDB
|
||||
- Remove patches in `.github/patches/extensions/x/*.patch`
|
||||
- Remove `APPLY_PATCHES` flag from config
|
||||
- Update hash of extension in config
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
diff --git a/src/field_ids.cpp b/src/field_ids.cpp
|
||||
index d197f8d..52fb48c 100644
|
||||
--- a/src/field_ids.cpp
|
||||
+++ b/src/field_ids.cpp
|
||||
@@ -5,6 +5,8 @@ namespace duckdb {
|
||||
|
||||
namespace avro {
|
||||
|
||||
+constexpr const char *FieldID::DUCKDB_FIELD_ID;
|
||||
+
|
||||
FieldID::FieldID() : set(false) {
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
diff --git a/src/storage/delta_insert.cpp b/src/storage/delta_insert.cpp
|
||||
index 93ebf9f..8eea9f5 100644
|
||||
--- a/src/storage/delta_insert.cpp
|
||||
+++ b/src/storage/delta_insert.cpp
|
||||
@@ -1,7 +1,5 @@
|
||||
#include "storage/delta_insert.hpp"
|
||||
|
||||
-#include <duckdb/common/sort/partition_state.hpp>
|
||||
-
|
||||
#include "duckdb/catalog/catalog_entry/copy_function_catalog_entry.hpp"
|
||||
#include "duckdb/main/client_data.hpp"
|
||||
#include "duckdb/planner/operator/logical_copy_to_file.hpp"
|
||||
@@ -1,38 +0,0 @@
|
||||
diff --git a/src/s3fs.cpp b/src/s3fs.cpp
|
||||
index 72eddc3..601ecba 100644
|
||||
--- a/src/s3fs.cpp
|
||||
+++ b/src/s3fs.cpp
|
||||
@@ -895,7 +895,7 @@ void S3FileHandle::Initialize(optional_ptr<FileOpener> opener) {
|
||||
correct_region = new_region->second;
|
||||
}
|
||||
auto extra_text = S3FileSystem::GetS3BadRequestError(auth_params, correct_region);
|
||||
- throw Exception(error.Type(), error.RawMessage() + extra_text, extra_info);
|
||||
+ throw Exception(extra_info, error.Type(), error.RawMessage() + extra_text);
|
||||
}
|
||||
if (entry->second == "403") {
|
||||
// 403: FORBIDDEN
|
||||
@@ -905,7 +905,7 @@ void S3FileHandle::Initialize(optional_ptr<FileOpener> opener) {
|
||||
} else {
|
||||
extra_text = S3FileSystem::GetS3AuthError(auth_params);
|
||||
}
|
||||
- throw Exception(error.Type(), error.RawMessage() + extra_text, extra_info);
|
||||
+ throw Exception(extra_info, error.Type(), error.RawMessage() + extra_text);
|
||||
}
|
||||
}
|
||||
throw;
|
||||
@@ -941,13 +941,13 @@ bool S3FileSystem::CanHandleFile(const string &fpath) {
|
||||
void S3FileSystem::RemoveFile(const string &path, optional_ptr<FileOpener> opener) {
|
||||
auto handle = OpenFile(path, FileFlags::FILE_FLAGS_NULL_IF_NOT_EXISTS, opener);
|
||||
if (!handle) {
|
||||
- throw IOException("Could not remove file \"%s\": %s", {{"errno", "404"}}, path, "No such file or directory");
|
||||
+ throw IOException({{"errno", "404"}}, "Could not remove file \"%s\": %s", path, "No such file or directory");
|
||||
}
|
||||
|
||||
auto &s3fh = handle->Cast<S3FileHandle>();
|
||||
auto res = DeleteRequest(*handle, s3fh.path, {});
|
||||
if (res->status != HTTPStatusCode::OK_200 && res->status != HTTPStatusCode::NoContent_204) {
|
||||
- throw IOException("Could not remove file \"%s\": %s", {{"errno", to_string(static_cast<int>(res->status))}},
|
||||
+ throw IOException({{"errno", to_string(static_cast<int>(res->status))}}, "Could not remove file \"%s\": %s",
|
||||
path, res->GetError());
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
diff --git a/src/storage/iceberg_insert.cpp b/src/storage/iceberg_insert.cpp
|
||||
index aa2371e8..cccc82d6 100644
|
||||
--- a/src/storage/iceberg_insert.cpp
|
||||
+++ b/src/storage/iceberg_insert.cpp
|
||||
@@ -7,7 +7,6 @@
|
||||
|
||||
#include "iceberg_multi_file_list.hpp"
|
||||
|
||||
-#include "duckdb/common/sort/partition_state.hpp"
|
||||
#include "duckdb/catalog/catalog_entry/copy_function_catalog_entry.hpp"
|
||||
#include "duckdb/main/client_data.hpp"
|
||||
#include "duckdb/planner/operator/logical_copy_to_file.hpp"
|
||||
@@ -1,19 +0,0 @@
|
||||
diff --git a/src/inet_functions.cpp b/src/inet_functions.cpp
|
||||
index da92a4c..afa7446 100644
|
||||
--- a/src/inet_functions.cpp
|
||||
+++ b/src/inet_functions.cpp
|
||||
@@ -185,11 +185,12 @@ static INET_TYPE AddImplementation(INET_TYPE ip, hugeint_t val) {
|
||||
if (val > 0) {
|
||||
address_out =
|
||||
AddOperatorOverflowCheck::Operation<uhugeint_t, uhugeint_t, uhugeint_t>(
|
||||
- address_in, val);
|
||||
+ address_in, (uhugeint_t)val);
|
||||
} else {
|
||||
+ // TODO: this is off for when val is the minimal uhugeint_t value
|
||||
address_out =
|
||||
SubtractOperatorOverflowCheck::Operation<uhugeint_t, uhugeint_t,
|
||||
- uhugeint_t>(address_in, -val);
|
||||
+ uhugeint_t>(address_in, (uhugeint_t)(-val));
|
||||
}
|
||||
|
||||
if (addr_type == IPAddressType::IP_ADDRESS_V4 &&
|
||||
@@ -1,16 +0,0 @@
|
||||
diff --git a/src/spatial/modules/main/spatial_functions_scalar.cpp b/src/spatial/modules/main/spatial_functions_scalar.cpp
|
||||
index 60ca7373ce..a44cfc7a82 100644
|
||||
--- a/src/spatial/modules/main/spatial_functions_scalar.cpp
|
||||
+++ b/src/spatial/modules/main/spatial_functions_scalar.cpp
|
||||
@@ -9243,6 +9243,11 @@ struct ST_MMin : VertexAggFunctionBase<ST_MMin, VertexMinAggOp> {
|
||||
static constexpr auto ORDINATE = VertexOrdinate::M;
|
||||
};
|
||||
|
||||
+constexpr const char * ST_M::NAME;
|
||||
+constexpr const char * ST_X::NAME;
|
||||
+constexpr const char * ST_Y::NAME;
|
||||
+constexpr const char * ST_Z::NAME;
|
||||
+
|
||||
} // namespace
|
||||
|
||||
// Helper to access the constant distance from the bind data
|
||||
@@ -1,44 +0,0 @@
|
||||
diff --git a/src/statement_generator.cpp b/src/statement_generator.cpp
|
||||
index fc34c7c..5defc4e 100644
|
||||
--- a/src/statement_generator.cpp
|
||||
+++ b/src/statement_generator.cpp
|
||||
@@ -373,8 +373,9 @@ unique_ptr<QueryNode> StatementGenerator::GenerateQueryNode() {
|
||||
GenerateCTEs(*setop);
|
||||
setop->setop_type = Choose<SetOperationType>({SetOperationType::EXCEPT, SetOperationType::INTERSECT,
|
||||
SetOperationType::UNION, SetOperationType::UNION_BY_NAME});
|
||||
- setop->left = GenerateQueryNode();
|
||||
- setop->right = GenerateQueryNode();
|
||||
+ for(idx_t i = 0; i < 2; i++) {
|
||||
+ setop->children.push_back(GenerateQueryNode());
|
||||
+ }
|
||||
switch (setop->setop_type) {
|
||||
case SetOperationType::EXCEPT:
|
||||
case SetOperationType::INTERSECT:
|
||||
diff --git a/src/statement_simplifier.cpp b/src/statement_simplifier.cpp
|
||||
index 2cd7f06..4602928 100644
|
||||
--- a/src/statement_simplifier.cpp
|
||||
+++ b/src/statement_simplifier.cpp
|
||||
@@ -196,8 +196,9 @@ void StatementSimplifier::Simplify(SelectNode &node) {
|
||||
}
|
||||
|
||||
void StatementSimplifier::Simplify(SetOperationNode &node) {
|
||||
- Simplify(node.left);
|
||||
- Simplify(node.right);
|
||||
+ for(auto &child : node.children) {
|
||||
+ Simplify(child);
|
||||
+ }
|
||||
}
|
||||
|
||||
void StatementSimplifier::Simplify(CommonTableExpressionMap &cte) {
|
||||
@@ -218,8 +219,9 @@ void StatementSimplifier::Simplify(unique_ptr<QueryNode> &node) {
|
||||
break;
|
||||
case QueryNodeType::SET_OPERATION_NODE: {
|
||||
auto &setop = node->Cast<SetOperationNode>();
|
||||
- SimplifyReplace(node, setop.left);
|
||||
- SimplifyReplace(node, setop.right);
|
||||
+ for(auto &child : setop.children) {
|
||||
+ SimplifyReplace(node, child);
|
||||
+ }
|
||||
Simplify(setop);
|
||||
break;
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
benchmark/appian_benchmarks/q01.benchmark
|
||||
benchmark/appian_benchmarks/q02.benchmark
|
||||
benchmark/appian_benchmarks/q03.benchmark
|
||||
benchmark/appian_benchmarks/q04.benchmark
|
||||
benchmark/appian_benchmarks/q05.benchmark
|
||||
benchmark/appian_benchmarks/q06.benchmark
|
||||
benchmark/appian_benchmarks/q07.benchmark
|
||||
benchmark/appian_benchmarks/q08.benchmark
|
||||
|
9
external/duckdb/.github/regression/csv.csv
vendored
9
external/duckdb/.github/regression/csv.csv
vendored
@@ -1,9 +0,0 @@
|
||||
benchmark/csv/sniffer.benchmark
|
||||
benchmark/csv/read.benchmark
|
||||
benchmark/csv/small_csv.benchmark
|
||||
benchmark/csv/null_padding.benchmark
|
||||
benchmark/csv/projection_pushdown.benchmark
|
||||
benchmark/csv/1_byte_values.benchmark
|
||||
benchmark/csv/16_byte_values.benchmark
|
||||
benchmark/csv/multiple_small_files.benchmark
|
||||
benchmark/csv/time_type.benchmark
|
||||
|
15
external/duckdb/.github/regression/h2oai.csv
vendored
15
external/duckdb/.github/regression/h2oai.csv
vendored
@@ -1,15 +0,0 @@
|
||||
benchmark/h2oai/group/q01.benchmark
|
||||
benchmark/h2oai/group/q02.benchmark
|
||||
benchmark/h2oai/group/q03.benchmark
|
||||
benchmark/h2oai/group/q04.benchmark
|
||||
benchmark/h2oai/group/q05.benchmark
|
||||
benchmark/h2oai/group/q06.benchmark
|
||||
benchmark/h2oai/group/q07.benchmark
|
||||
benchmark/h2oai/group/q08.benchmark
|
||||
benchmark/h2oai/group/q09.benchmark
|
||||
benchmark/h2oai/group/q10.benchmark
|
||||
benchmark/h2oai/join/q01.benchmark
|
||||
benchmark/h2oai/join/q02.benchmark
|
||||
benchmark/h2oai/join/q03.benchmark
|
||||
benchmark/h2oai/join/q04.benchmark
|
||||
benchmark/h2oai/join/q05.benchmark
|
||||
|
113
external/duckdb/.github/regression/imdb.csv
vendored
113
external/duckdb/.github/regression/imdb.csv
vendored
@@ -1,113 +0,0 @@
|
||||
benchmark/imdb/01a.benchmark
|
||||
benchmark/imdb/01b.benchmark
|
||||
benchmark/imdb/01c.benchmark
|
||||
benchmark/imdb/01d.benchmark
|
||||
benchmark/imdb/02a.benchmark
|
||||
benchmark/imdb/02b.benchmark
|
||||
benchmark/imdb/02c.benchmark
|
||||
benchmark/imdb/02d.benchmark
|
||||
benchmark/imdb/03a.benchmark
|
||||
benchmark/imdb/03b.benchmark
|
||||
benchmark/imdb/03c.benchmark
|
||||
benchmark/imdb/04a.benchmark
|
||||
benchmark/imdb/04b.benchmark
|
||||
benchmark/imdb/04c.benchmark
|
||||
benchmark/imdb/05a.benchmark
|
||||
benchmark/imdb/05b.benchmark
|
||||
benchmark/imdb/05c.benchmark
|
||||
benchmark/imdb/06a.benchmark
|
||||
benchmark/imdb/06b.benchmark
|
||||
benchmark/imdb/06c.benchmark
|
||||
benchmark/imdb/06d.benchmark
|
||||
benchmark/imdb/06e.benchmark
|
||||
benchmark/imdb/06f.benchmark
|
||||
benchmark/imdb/07a.benchmark
|
||||
benchmark/imdb/07b.benchmark
|
||||
benchmark/imdb/07c.benchmark
|
||||
benchmark/imdb/08a.benchmark
|
||||
benchmark/imdb/08b.benchmark
|
||||
benchmark/imdb/08c.benchmark
|
||||
benchmark/imdb/08d.benchmark
|
||||
benchmark/imdb/09a.benchmark
|
||||
benchmark/imdb/09b.benchmark
|
||||
benchmark/imdb/09c.benchmark
|
||||
benchmark/imdb/09d.benchmark
|
||||
benchmark/imdb/10a.benchmark
|
||||
benchmark/imdb/10b.benchmark
|
||||
benchmark/imdb/10c.benchmark
|
||||
benchmark/imdb/11a.benchmark
|
||||
benchmark/imdb/11b.benchmark
|
||||
benchmark/imdb/11c.benchmark
|
||||
benchmark/imdb/11d.benchmark
|
||||
benchmark/imdb/12a.benchmark
|
||||
benchmark/imdb/12b.benchmark
|
||||
benchmark/imdb/12c.benchmark
|
||||
benchmark/imdb/13a.benchmark
|
||||
benchmark/imdb/13b.benchmark
|
||||
benchmark/imdb/13c.benchmark
|
||||
benchmark/imdb/13d.benchmark
|
||||
benchmark/imdb/14a.benchmark
|
||||
benchmark/imdb/14b.benchmark
|
||||
benchmark/imdb/14c.benchmark
|
||||
benchmark/imdb/15a.benchmark
|
||||
benchmark/imdb/15b.benchmark
|
||||
benchmark/imdb/15c.benchmark
|
||||
benchmark/imdb/15d.benchmark
|
||||
benchmark/imdb/16a.benchmark
|
||||
benchmark/imdb/16b.benchmark
|
||||
benchmark/imdb/16c.benchmark
|
||||
benchmark/imdb/16d.benchmark
|
||||
benchmark/imdb/17a.benchmark
|
||||
benchmark/imdb/17b.benchmark
|
||||
benchmark/imdb/17c.benchmark
|
||||
benchmark/imdb/17d.benchmark
|
||||
benchmark/imdb/17e.benchmark
|
||||
benchmark/imdb/17f.benchmark
|
||||
benchmark/imdb/18a.benchmark
|
||||
benchmark/imdb/18b.benchmark
|
||||
benchmark/imdb/18c.benchmark
|
||||
benchmark/imdb/19a.benchmark
|
||||
benchmark/imdb/19b.benchmark
|
||||
benchmark/imdb/19c.benchmark
|
||||
benchmark/imdb/19d.benchmark
|
||||
benchmark/imdb/20a.benchmark
|
||||
benchmark/imdb/20b.benchmark
|
||||
benchmark/imdb/20c.benchmark
|
||||
benchmark/imdb/21a.benchmark
|
||||
benchmark/imdb/21b.benchmark
|
||||
benchmark/imdb/21c.benchmark
|
||||
benchmark/imdb/22a.benchmark
|
||||
benchmark/imdb/22b.benchmark
|
||||
benchmark/imdb/22c.benchmark
|
||||
benchmark/imdb/22d.benchmark
|
||||
benchmark/imdb/23a.benchmark
|
||||
benchmark/imdb/23b.benchmark
|
||||
benchmark/imdb/23c.benchmark
|
||||
benchmark/imdb/24a.benchmark
|
||||
benchmark/imdb/24b.benchmark
|
||||
benchmark/imdb/25a.benchmark
|
||||
benchmark/imdb/25b.benchmark
|
||||
benchmark/imdb/25c.benchmark
|
||||
benchmark/imdb/26a.benchmark
|
||||
benchmark/imdb/26b.benchmark
|
||||
benchmark/imdb/26c.benchmark
|
||||
benchmark/imdb/27a.benchmark
|
||||
benchmark/imdb/27b.benchmark
|
||||
benchmark/imdb/27c.benchmark
|
||||
benchmark/imdb/28a.benchmark
|
||||
benchmark/imdb/28b.benchmark
|
||||
benchmark/imdb/28c.benchmark
|
||||
benchmark/imdb/29a.benchmark
|
||||
benchmark/imdb/29b.benchmark
|
||||
benchmark/imdb/29c.benchmark
|
||||
benchmark/imdb/30a.benchmark
|
||||
benchmark/imdb/30b.benchmark
|
||||
benchmark/imdb/30c.benchmark
|
||||
benchmark/imdb/31a.benchmark
|
||||
benchmark/imdb/31b.benchmark
|
||||
benchmark/imdb/31c.benchmark
|
||||
benchmark/imdb/32a.benchmark
|
||||
benchmark/imdb/32b.benchmark
|
||||
benchmark/imdb/33a.benchmark
|
||||
benchmark/imdb/33b.benchmark
|
||||
benchmark/imdb/33c.benchmark
|
||||
|
31
external/duckdb/.github/regression/ingestion.csv
vendored
31
external/duckdb/.github/regression/ingestion.csv
vendored
@@ -1,31 +0,0 @@
|
||||
benchmark/ingestion/in_memory_db/tpcds/csv/ingest_inventory.benchmark
|
||||
benchmark/ingestion/in_memory_db/tpcds/parquet/ingest_inventory.benchmark
|
||||
benchmark/ingestion/in_memory_db/tpcds/native/ingest_inventory.benchmark
|
||||
|
||||
benchmark/ingestion/persistent_storage/tpcds/csv/ingest_inventory.benchmark
|
||||
benchmark/ingestion/persistent_storage/tpcds/parquet/ingest_inventory.benchmark
|
||||
benchmark/ingestion/persistent_storage/tpcds/native/ingest_inventory.benchmark
|
||||
|
||||
benchmark/ingestion/in_memory_db/tpcds/csv/ingest_store_sales.benchmark
|
||||
benchmark/ingestion/in_memory_db/tpcds/parquet/ingest_store_sales.benchmark
|
||||
benchmark/ingestion/in_memory_db/tpcds/native/ingest_store_sales.benchmark
|
||||
|
||||
benchmark/ingestion/persistent_storage/tpcds/csv/ingest_store_sales.benchmark
|
||||
benchmark/ingestion/persistent_storage/tpcds/parquet/ingest_store_sales.benchmark
|
||||
benchmark/ingestion/persistent_storage/tpcds/native/ingest_store_sales.benchmark
|
||||
|
||||
benchmark/ingestion/in_memory_db/tpch/csv/ingest_lineitem.benchmark
|
||||
benchmark/ingestion/in_memory_db/tpch/parquet/ingest_lineitem.benchmark
|
||||
benchmark/ingestion/in_memory_db/tpch/native/ingest_lineitem.benchmark
|
||||
|
||||
benchmark/ingestion/persistent_storage/tpch/csv/ingest_lineitem.benchmark
|
||||
benchmark/ingestion/persistent_storage/tpch/parquet/ingest_lineitem.benchmark
|
||||
benchmark/ingestion/persistent_storage/tpch/native/ingest_lineitem.benchmark
|
||||
|
||||
benchmark/ingestion/in_memory_db/tpch/csv/ingest_orders.benchmark
|
||||
benchmark/ingestion/in_memory_db/tpch/parquet/ingest_orders.benchmark
|
||||
benchmark/ingestion/in_memory_db/tpch/native/ingest_orders.benchmark
|
||||
|
||||
benchmark/ingestion/persistent_storage/tpch/csv/ingest_orders.benchmark
|
||||
benchmark/ingestion/persistent_storage/tpch/parquet/ingest_orders.benchmark
|
||||
benchmark/ingestion/persistent_storage/tpch/native/ingest_orders.benchmark
|
||||
|
@@ -1,15 +0,0 @@
|
||||
benchmark/large/ingestion/tpcds/csv/ingest_inventory.benchmark
|
||||
benchmark/large/ingestion/tpcds/parquet/ingest_inventory.benchmark
|
||||
benchmark/large/ingestion/tpcds/native/ingest_inventory.benchmark
|
||||
|
||||
benchmark/large/ingestion/tpcds/csv/ingest_store_sales.benchmark
|
||||
benchmark/large/ingestion/tpcds/parquet/ingest_store_sales.benchmark
|
||||
benchmark/large/ingestion/tpcds/native/ingest_store_sales.benchmark
|
||||
|
||||
benchmark/large/ingestion/tpch/csv/ingest_lineitem.benchmark
|
||||
benchmark/large/ingestion/tpch/parquet/ingest_lineitem.benchmark
|
||||
benchmark/large/ingestion/tpch/native/ingest_lineitem.benchmark
|
||||
|
||||
benchmark/large/ingestion/tpch/csv/ingest_orders.benchmark
|
||||
benchmark/large/ingestion/tpch/parquet/ingest_orders.benchmark
|
||||
benchmark/large/ingestion/tpch/native/ingest_orders.benchmark
|
||||
|
@@ -1,99 +0,0 @@
|
||||
benchmark/large/tpcds-sf100/q01.benchmark
|
||||
benchmark/large/tpcds-sf100/q02.benchmark
|
||||
benchmark/large/tpcds-sf100/q03.benchmark
|
||||
benchmark/large/tpcds-sf100/q04.benchmark
|
||||
benchmark/large/tpcds-sf100/q05.benchmark
|
||||
benchmark/large/tpcds-sf100/q06.benchmark
|
||||
benchmark/large/tpcds-sf100/q07.benchmark
|
||||
benchmark/large/tpcds-sf100/q08.benchmark
|
||||
benchmark/large/tpcds-sf100/q09.benchmark
|
||||
benchmark/large/tpcds-sf100/q10.benchmark
|
||||
benchmark/large/tpcds-sf100/q11.benchmark
|
||||
benchmark/large/tpcds-sf100/q12.benchmark
|
||||
benchmark/large/tpcds-sf100/q13.benchmark
|
||||
benchmark/large/tpcds-sf100/q14.benchmark
|
||||
benchmark/large/tpcds-sf100/q15.benchmark
|
||||
benchmark/large/tpcds-sf100/q16.benchmark
|
||||
benchmark/large/tpcds-sf100/q17.benchmark
|
||||
benchmark/large/tpcds-sf100/q18.benchmark
|
||||
benchmark/large/tpcds-sf100/q19.benchmark
|
||||
benchmark/large/tpcds-sf100/q20.benchmark
|
||||
benchmark/large/tpcds-sf100/q21.benchmark
|
||||
benchmark/large/tpcds-sf100/q22.benchmark
|
||||
benchmark/large/tpcds-sf100/q23.benchmark
|
||||
benchmark/large/tpcds-sf100/q24.benchmark
|
||||
benchmark/large/tpcds-sf100/q25.benchmark
|
||||
benchmark/large/tpcds-sf100/q26.benchmark
|
||||
benchmark/large/tpcds-sf100/q27.benchmark
|
||||
benchmark/large/tpcds-sf100/q28.benchmark
|
||||
benchmark/large/tpcds-sf100/q29.benchmark
|
||||
benchmark/large/tpcds-sf100/q30.benchmark
|
||||
benchmark/large/tpcds-sf100/q31.benchmark
|
||||
benchmark/large/tpcds-sf100/q32.benchmark
|
||||
benchmark/large/tpcds-sf100/q33.benchmark
|
||||
benchmark/large/tpcds-sf100/q34.benchmark
|
||||
benchmark/large/tpcds-sf100/q35.benchmark
|
||||
benchmark/large/tpcds-sf100/q36.benchmark
|
||||
benchmark/large/tpcds-sf100/q37.benchmark
|
||||
benchmark/large/tpcds-sf100/q38.benchmark
|
||||
benchmark/large/tpcds-sf100/q39.benchmark
|
||||
benchmark/large/tpcds-sf100/q40.benchmark
|
||||
benchmark/large/tpcds-sf100/q41.benchmark
|
||||
benchmark/large/tpcds-sf100/q42.benchmark
|
||||
benchmark/large/tpcds-sf100/q43.benchmark
|
||||
benchmark/large/tpcds-sf100/q44.benchmark
|
||||
benchmark/large/tpcds-sf100/q45.benchmark
|
||||
benchmark/large/tpcds-sf100/q46.benchmark
|
||||
benchmark/large/tpcds-sf100/q47.benchmark
|
||||
benchmark/large/tpcds-sf100/q48.benchmark
|
||||
benchmark/large/tpcds-sf100/q49.benchmark
|
||||
benchmark/large/tpcds-sf100/q50.benchmark
|
||||
benchmark/large/tpcds-sf100/q51.benchmark
|
||||
benchmark/large/tpcds-sf100/q52.benchmark
|
||||
benchmark/large/tpcds-sf100/q53.benchmark
|
||||
benchmark/large/tpcds-sf100/q54.benchmark
|
||||
benchmark/large/tpcds-sf100/q55.benchmark
|
||||
benchmark/large/tpcds-sf100/q56.benchmark
|
||||
benchmark/large/tpcds-sf100/q57.benchmark
|
||||
benchmark/large/tpcds-sf100/q58.benchmark
|
||||
benchmark/large/tpcds-sf100/q59.benchmark
|
||||
benchmark/large/tpcds-sf100/q60.benchmark
|
||||
benchmark/large/tpcds-sf100/q61.benchmark
|
||||
benchmark/large/tpcds-sf100/q62.benchmark
|
||||
benchmark/large/tpcds-sf100/q63.benchmark
|
||||
benchmark/large/tpcds-sf100/q64.benchmark
|
||||
benchmark/large/tpcds-sf100/q65.benchmark
|
||||
benchmark/large/tpcds-sf100/q66.benchmark
|
||||
benchmark/large/tpcds-sf100/q67.benchmark
|
||||
benchmark/large/tpcds-sf100/q68.benchmark
|
||||
benchmark/large/tpcds-sf100/q69.benchmark
|
||||
benchmark/large/tpcds-sf100/q70.benchmark
|
||||
benchmark/large/tpcds-sf100/q71.benchmark
|
||||
benchmark/large/tpcds-sf100/q72.benchmark
|
||||
benchmark/large/tpcds-sf100/q73.benchmark
|
||||
benchmark/large/tpcds-sf100/q74.benchmark
|
||||
benchmark/large/tpcds-sf100/q75.benchmark
|
||||
benchmark/large/tpcds-sf100/q76.benchmark
|
||||
benchmark/large/tpcds-sf100/q77.benchmark
|
||||
benchmark/large/tpcds-sf100/q78.benchmark
|
||||
benchmark/large/tpcds-sf100/q79.benchmark
|
||||
benchmark/large/tpcds-sf100/q80.benchmark
|
||||
benchmark/large/tpcds-sf100/q81.benchmark
|
||||
benchmark/large/tpcds-sf100/q82.benchmark
|
||||
benchmark/large/tpcds-sf100/q83.benchmark
|
||||
benchmark/large/tpcds-sf100/q84.benchmark
|
||||
benchmark/large/tpcds-sf100/q85.benchmark
|
||||
benchmark/large/tpcds-sf100/q86.benchmark
|
||||
benchmark/large/tpcds-sf100/q87.benchmark
|
||||
benchmark/large/tpcds-sf100/q88.benchmark
|
||||
benchmark/large/tpcds-sf100/q89.benchmark
|
||||
benchmark/large/tpcds-sf100/q90.benchmark
|
||||
benchmark/large/tpcds-sf100/q91.benchmark
|
||||
benchmark/large/tpcds-sf100/q92.benchmark
|
||||
benchmark/large/tpcds-sf100/q93.benchmark
|
||||
benchmark/large/tpcds-sf100/q94.benchmark
|
||||
benchmark/large/tpcds-sf100/q95.benchmark
|
||||
benchmark/large/tpcds-sf100/q96.benchmark
|
||||
benchmark/large/tpcds-sf100/q97.benchmark
|
||||
benchmark/large/tpcds-sf100/q98.benchmark
|
||||
benchmark/large/tpcds-sf100/q99.benchmark
|
||||
|
@@ -1,23 +0,0 @@
|
||||
benchmark/large/tpch-sf100/q01.benchmark
|
||||
benchmark/large/tpch-sf100/q02.benchmark
|
||||
benchmark/large/tpch-sf100/q03.benchmark
|
||||
benchmark/large/tpch-sf100/q04.benchmark
|
||||
benchmark/large/tpch-sf100/q05.benchmark
|
||||
benchmark/large/tpch-sf100/q06.benchmark
|
||||
benchmark/large/tpch-sf100/q07.benchmark
|
||||
benchmark/large/tpch-sf100/q08.benchmark
|
||||
benchmark/large/tpch-sf100/q09.benchmark
|
||||
benchmark/large/tpch-sf100/q10.benchmark
|
||||
benchmark/large/tpch-sf100/q11.benchmark
|
||||
benchmark/large/tpch-sf100/q12.benchmark
|
||||
benchmark/large/tpch-sf100/q13.benchmark
|
||||
benchmark/large/tpch-sf100/q14.benchmark
|
||||
benchmark/large/tpch-sf100/q15.benchmark
|
||||
benchmark/large/tpch-sf100/q16.benchmark
|
||||
benchmark/large/tpch-sf100/q17.benchmark
|
||||
benchmark/large/tpch-sf100/q18.benchmark
|
||||
benchmark/large/tpch-sf100/q19.benchmark
|
||||
benchmark/large/tpch-sf100/q20.benchmark
|
||||
benchmark/large/tpch-sf100/q21.benchmark
|
||||
benchmark/large/tpch-sf100/q22.benchmark
|
||||
|
||||
|
37
external/duckdb/.github/regression/micro.csv
vendored
37
external/duckdb/.github/regression/micro.csv
vendored
@@ -1,37 +0,0 @@
|
||||
benchmark/micro/cast/cast_date_string.benchmark
|
||||
benchmark/micro/cast/cast_int_string.benchmark
|
||||
benchmark/micro/cast/cast_double_string.benchmark
|
||||
benchmark/micro/cast/cast_string_double.benchmark
|
||||
benchmark/micro/cast/cast_string_int.benchmark
|
||||
benchmark/micro/cast/cast_timestamp_string.benchmark
|
||||
benchmark/micro/copy/to_parquet_partition_by_few.benchmark
|
||||
benchmark/micro/copy/to_parquet_partition_by_many.benchmark
|
||||
benchmark/micro/limit/parallel_limit.benchmark
|
||||
benchmark/micro/filter/parallel_complex_filter.benchmark
|
||||
benchmark/micro/catalog/add_column_empty.benchmark
|
||||
benchmark/micro/groupby-parallel/many_groups_large_values_small.benchmark
|
||||
benchmark/tpch/csv/lineitem_csv_auto_detect.benchmark
|
||||
benchmark/tpch/parquet/write_lineitem_parquet.benchmark
|
||||
benchmark/tpch/pivot/lineitem_pivot_returnflag.benchmark
|
||||
benchmark/tpch/pivot/lineitem_pivot_shipdate.benchmark
|
||||
benchmark/micro/logger/disabled/logging_disabled_global.benchmark
|
||||
benchmark/micro/logger/disabled/logging_disabled_client_context.benchmark
|
||||
benchmark/micro/logger/disabled/logging_disabled_reference.benchmark
|
||||
benchmark/micro/logger/disabled/logging_disabled_file_opener.benchmark
|
||||
benchmark/micro/logger/enabled/logging_enabled_client_context.benchmark
|
||||
benchmark/micro/logger/enabled/logging_enabled_global.benchmark
|
||||
benchmark/micro/logger/enabled/logging_enabled_file_opener.benchmark
|
||||
benchmark/micro/logger/enabled/logging_enabled_global.benchmark
|
||||
benchmark/micro/logger/filtered_out_by_log_type/client_context.benchmark
|
||||
benchmark/micro/logger/filtered_out_by_log_type/file_opener.benchmark
|
||||
benchmark/micro/logger/filtered_out_by_log_type/global.benchmark
|
||||
benchmark/micro/logger/file_handle_log/csv/file_handle_logging_csv.benchmark
|
||||
benchmark/micro/logger/file_handle_log/parquet/file_handle_logging_parquet.benchmark
|
||||
benchmark/micro/logger/logging_overhead/parquet_q1_with_filesystem_logging.benchmark
|
||||
benchmark/micro/logger/logging_overhead/parquet_q1_with_default_logging.benchmark
|
||||
benchmark/micro/logger/logging_overhead/duckdb_persistent_q1_with_default_logging.benchmark
|
||||
benchmark/micro/logger/storage/file/log_message_size/huge_string.benchmark
|
||||
benchmark/micro/logger/storage/file/log_message_size/small_string.benchmark
|
||||
benchmark/micro/filter/choose_correct_filter_function.benchmark
|
||||
benchmark/micro/optimizer/topn_window_elimination.benchmark
|
||||
benchmark/micro/aggregate/group_two_string_dictionaries.benchmark
|
||||
|
22
external/duckdb/.github/regression/realnest.csv
vendored
22
external/duckdb/.github/regression/realnest.csv
vendored
@@ -1,22 +0,0 @@
|
||||
benchmark/realnest/micro/01_aggregate-first-level-struct-members.benchmark
|
||||
benchmark/realnest/micro/02_list_sort.benchmark
|
||||
benchmark/realnest/micro/03_create_table_from_unnested_structs.benchmark
|
||||
benchmark/realnest/micro/04_list_transform_and_list_aggregate.benchmark
|
||||
benchmark/realnest/micro/05_list_filter.benchmark
|
||||
benchmark/realnest/micro/06_list_filter_on_unnested_structure.benchmark
|
||||
benchmark/realnest/micro/07_list_unique_on_transformed_and_aggregated_list.benchmark
|
||||
benchmark/realnest/micro/08_count_map_keys.benchmark
|
||||
benchmark/realnest/micro/09_array_agg.benchmark
|
||||
benchmark/realnest/micro/11_list_sort_reduce_transform.benchmark
|
||||
benchmark/realnest/micro/12_map_list_values.benchmark
|
||||
benchmark/realnest/micro/13_multi_join_nested_data_with_filtering.benchmark
|
||||
benchmark/realnest/micro/14_list_slice.benchmark
|
||||
benchmark/realnest/micro/15_list_sort.benchmark
|
||||
benchmark/realnest/micro/16_most_common_list_aggregates.benchmark
|
||||
benchmark/realnest/hep/q01.benchmark
|
||||
benchmark/realnest/hep/q02.benchmark
|
||||
benchmark/realnest/hep/q03.benchmark
|
||||
benchmark/realnest/hep/q04.benchmark
|
||||
benchmark/realnest/hep/q06_1.benchmark
|
||||
benchmark/realnest/hep/q06_2.benchmark
|
||||
benchmark/realnest/hep/q07.benchmark
|
||||
|
@@ -1,9 +0,0 @@
|
||||
benchmark/realnest/hep/q01.benchmark
|
||||
benchmark/realnest/hep/q02.benchmark
|
||||
benchmark/realnest/hep/q03.benchmark
|
||||
benchmark/realnest/hep/q04.benchmark
|
||||
benchmark/realnest/hep/q05.benchmark
|
||||
benchmark/realnest/hep/q06_1.benchmark
|
||||
benchmark/realnest/hep/q06_2.benchmark
|
||||
benchmark/realnest/hep/q07.benchmark
|
||||
benchmark/realnest/hep/q08.benchmark
|
||||
|
4
external/duckdb/.github/regression/taxi.csv
vendored
4
external/duckdb/.github/regression/taxi.csv
vendored
@@ -1,4 +0,0 @@
|
||||
benchmark/taxi/q01.benchmark
|
||||
benchmark/taxi/q02.benchmark
|
||||
benchmark/taxi/q03.benchmark
|
||||
benchmark/taxi/q04.benchmark
|
||||
|
99
external/duckdb/.github/regression/tpcds.csv
vendored
99
external/duckdb/.github/regression/tpcds.csv
vendored
@@ -1,99 +0,0 @@
|
||||
benchmark/tpcds/sf1/q01.benchmark
|
||||
benchmark/tpcds/sf1/q02.benchmark
|
||||
benchmark/tpcds/sf1/q03.benchmark
|
||||
benchmark/tpcds/sf1/q04.benchmark
|
||||
benchmark/tpcds/sf1/q05.benchmark
|
||||
benchmark/tpcds/sf1/q06.benchmark
|
||||
benchmark/tpcds/sf1/q07.benchmark
|
||||
benchmark/tpcds/sf1/q08.benchmark
|
||||
benchmark/tpcds/sf1/q09.benchmark
|
||||
benchmark/tpcds/sf1/q10.benchmark
|
||||
benchmark/tpcds/sf1/q11.benchmark
|
||||
benchmark/tpcds/sf1/q12.benchmark
|
||||
benchmark/tpcds/sf1/q13.benchmark
|
||||
benchmark/tpcds/sf1/q14.benchmark
|
||||
benchmark/tpcds/sf1/q15.benchmark
|
||||
benchmark/tpcds/sf1/q16.benchmark
|
||||
benchmark/tpcds/sf1/q17.benchmark
|
||||
benchmark/tpcds/sf1/q18.benchmark
|
||||
benchmark/tpcds/sf1/q19.benchmark
|
||||
benchmark/tpcds/sf1/q20.benchmark
|
||||
benchmark/tpcds/sf1/q21.benchmark
|
||||
benchmark/tpcds/sf1/q22.benchmark
|
||||
benchmark/tpcds/sf1/q23.benchmark
|
||||
benchmark/tpcds/sf1/q24.benchmark
|
||||
benchmark/tpcds/sf1/q25.benchmark
|
||||
benchmark/tpcds/sf1/q26.benchmark
|
||||
benchmark/tpcds/sf1/q27.benchmark
|
||||
benchmark/tpcds/sf1/q28.benchmark
|
||||
benchmark/tpcds/sf1/q29.benchmark
|
||||
benchmark/tpcds/sf1/q30.benchmark
|
||||
benchmark/tpcds/sf1/q31.benchmark
|
||||
benchmark/tpcds/sf1/q32.benchmark
|
||||
benchmark/tpcds/sf1/q33.benchmark
|
||||
benchmark/tpcds/sf1/q34.benchmark
|
||||
benchmark/tpcds/sf1/q35.benchmark
|
||||
benchmark/tpcds/sf1/q36.benchmark
|
||||
benchmark/tpcds/sf1/q37.benchmark
|
||||
benchmark/tpcds/sf1/q38.benchmark
|
||||
benchmark/tpcds/sf1/q39.benchmark
|
||||
benchmark/tpcds/sf1/q40.benchmark
|
||||
benchmark/tpcds/sf1/q41.benchmark
|
||||
benchmark/tpcds/sf1/q42.benchmark
|
||||
benchmark/tpcds/sf1/q43.benchmark
|
||||
benchmark/tpcds/sf1/q44.benchmark
|
||||
benchmark/tpcds/sf1/q45.benchmark
|
||||
benchmark/tpcds/sf1/q46.benchmark
|
||||
benchmark/tpcds/sf1/q47.benchmark
|
||||
benchmark/tpcds/sf1/q48.benchmark
|
||||
benchmark/tpcds/sf1/q49.benchmark
|
||||
benchmark/tpcds/sf1/q50.benchmark
|
||||
benchmark/tpcds/sf1/q51.benchmark
|
||||
benchmark/tpcds/sf1/q52.benchmark
|
||||
benchmark/tpcds/sf1/q53.benchmark
|
||||
benchmark/tpcds/sf1/q54.benchmark
|
||||
benchmark/tpcds/sf1/q55.benchmark
|
||||
benchmark/tpcds/sf1/q56.benchmark
|
||||
benchmark/tpcds/sf1/q57.benchmark
|
||||
benchmark/tpcds/sf1/q58.benchmark
|
||||
benchmark/tpcds/sf1/q59.benchmark
|
||||
benchmark/tpcds/sf1/q60.benchmark
|
||||
benchmark/tpcds/sf1/q61.benchmark
|
||||
benchmark/tpcds/sf1/q62.benchmark
|
||||
benchmark/tpcds/sf1/q63.benchmark
|
||||
benchmark/tpcds/sf1/q64.benchmark
|
||||
benchmark/tpcds/sf1/q65.benchmark
|
||||
benchmark/tpcds/sf1/q66.benchmark
|
||||
benchmark/tpcds/sf1/q67.benchmark
|
||||
benchmark/tpcds/sf1/q68.benchmark
|
||||
benchmark/tpcds/sf1/q69.benchmark
|
||||
benchmark/tpcds/sf1/q70.benchmark
|
||||
benchmark/tpcds/sf1/q71.benchmark
|
||||
benchmark/tpcds/sf1/q72.benchmark
|
||||
benchmark/tpcds/sf1/q73.benchmark
|
||||
benchmark/tpcds/sf1/q74.benchmark
|
||||
benchmark/tpcds/sf1/q75.benchmark
|
||||
benchmark/tpcds/sf1/q76.benchmark
|
||||
benchmark/tpcds/sf1/q77.benchmark
|
||||
benchmark/tpcds/sf1/q78.benchmark
|
||||
benchmark/tpcds/sf1/q79.benchmark
|
||||
benchmark/tpcds/sf1/q80.benchmark
|
||||
benchmark/tpcds/sf1/q81.benchmark
|
||||
benchmark/tpcds/sf1/q82.benchmark
|
||||
benchmark/tpcds/sf1/q83.benchmark
|
||||
benchmark/tpcds/sf1/q84.benchmark
|
||||
benchmark/tpcds/sf1/q85.benchmark
|
||||
benchmark/tpcds/sf1/q86.benchmark
|
||||
benchmark/tpcds/sf1/q87.benchmark
|
||||
benchmark/tpcds/sf1/q88.benchmark
|
||||
benchmark/tpcds/sf1/q89.benchmark
|
||||
benchmark/tpcds/sf1/q90.benchmark
|
||||
benchmark/tpcds/sf1/q91.benchmark
|
||||
benchmark/tpcds/sf1/q92.benchmark
|
||||
benchmark/tpcds/sf1/q93.benchmark
|
||||
benchmark/tpcds/sf1/q94.benchmark
|
||||
benchmark/tpcds/sf1/q95.benchmark
|
||||
benchmark/tpcds/sf1/q96.benchmark
|
||||
benchmark/tpcds/sf1/q97.benchmark
|
||||
benchmark/tpcds/sf1/q98.benchmark
|
||||
benchmark/tpcds/sf1/q99.benchmark
|
||||
|
22
external/duckdb/.github/regression/tpch.csv
vendored
22
external/duckdb/.github/regression/tpch.csv
vendored
@@ -1,22 +0,0 @@
|
||||
benchmark/tpch/sf1/q01.benchmark
|
||||
benchmark/tpch/sf1/q02.benchmark
|
||||
benchmark/tpch/sf1/q03.benchmark
|
||||
benchmark/tpch/sf1/q04.benchmark
|
||||
benchmark/tpch/sf1/q05.benchmark
|
||||
benchmark/tpch/sf1/q06.benchmark
|
||||
benchmark/tpch/sf1/q07.benchmark
|
||||
benchmark/tpch/sf1/q08.benchmark
|
||||
benchmark/tpch/sf1/q09.benchmark
|
||||
benchmark/tpch/sf1/q10.benchmark
|
||||
benchmark/tpch/sf1/q11.benchmark
|
||||
benchmark/tpch/sf1/q12.benchmark
|
||||
benchmark/tpch/sf1/q13.benchmark
|
||||
benchmark/tpch/sf1/q14.benchmark
|
||||
benchmark/tpch/sf1/q15.benchmark
|
||||
benchmark/tpch/sf1/q16.benchmark
|
||||
benchmark/tpch/sf1/q17.benchmark
|
||||
benchmark/tpch/sf1/q18.benchmark
|
||||
benchmark/tpch/sf1/q19.benchmark
|
||||
benchmark/tpch/sf1/q20.benchmark
|
||||
benchmark/tpch/sf1/q21.benchmark
|
||||
benchmark/tpch/sf1/q22.benchmark
|
||||
|
@@ -1,22 +0,0 @@
|
||||
benchmark/tpch/sf1-parquet/q01.benchmark
|
||||
benchmark/tpch/sf1-parquet/q02.benchmark
|
||||
benchmark/tpch/sf1-parquet/q03.benchmark
|
||||
benchmark/tpch/sf1-parquet/q04.benchmark
|
||||
benchmark/tpch/sf1-parquet/q05.benchmark
|
||||
benchmark/tpch/sf1-parquet/q06.benchmark
|
||||
benchmark/tpch/sf1-parquet/q07.benchmark
|
||||
benchmark/tpch/sf1-parquet/q08.benchmark
|
||||
benchmark/tpch/sf1-parquet/q09.benchmark
|
||||
benchmark/tpch/sf1-parquet/q10.benchmark
|
||||
benchmark/tpch/sf1-parquet/q11.benchmark
|
||||
benchmark/tpch/sf1-parquet/q12.benchmark
|
||||
benchmark/tpch/sf1-parquet/q13.benchmark
|
||||
benchmark/tpch/sf1-parquet/q14.benchmark
|
||||
benchmark/tpch/sf1-parquet/q15.benchmark
|
||||
benchmark/tpch/sf1-parquet/q16.benchmark
|
||||
benchmark/tpch/sf1-parquet/q17.benchmark
|
||||
benchmark/tpch/sf1-parquet/q18.benchmark
|
||||
benchmark/tpch/sf1-parquet/q19.benchmark
|
||||
benchmark/tpch/sf1-parquet/q20.benchmark
|
||||
benchmark/tpch/sf1-parquet/q21.benchmark
|
||||
benchmark/tpch/sf1-parquet/q22.benchmark
|
||||
|
102
external/duckdb/.github/workflows/Android.yml
vendored
102
external/duckdb/.github/workflows/Android.yml
vendored
@@ -1,102 +0,0 @@
|
||||
name: Android
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/Android.yml'
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review]
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/Android.yml'
|
||||
- '!.github/patches/duckdb-wasm/**'
|
||||
|
||||
|
||||
concurrency:
|
||||
group: android-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
|
||||
|
||||
jobs:
|
||||
android:
|
||||
name: Android
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/feature' }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [ armeabi-v7a, arm64-v8a ]
|
||||
|
||||
env:
|
||||
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
GEN: ninja
|
||||
EXTENSION_STATIC_BUILD: 1
|
||||
DUCKDB_PLATFORM: android_${{ matrix.arch}}
|
||||
DUCKDB_CUSTOM_PLATFORM: android_${{ matrix.arch}}
|
||||
CMAKE_VARS_BUILD: -DBUILD_UNITTESTS=0 -DBUILD_SHELL=0 -DANDROID_ABI=${{ matrix.arch}} -DCMAKE_TOOLCHAIN_FILE=./android-ndk/build/cmake/android.toolchain.cmake -DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- name: Install Ninja
|
||||
shell: bash
|
||||
run: sudo apt-get -y update && sudo apt-get -y install ninja-build
|
||||
|
||||
- name: Checkout (again)
|
||||
shell: bash
|
||||
run: git checkout ${{ inputs.git_ref }}
|
||||
|
||||
- name: Install Android NDK
|
||||
shell: bash
|
||||
run: |
|
||||
wget https://dl.google.com/android/repository/android-ndk-r27-linux.zip
|
||||
unzip android-ndk-r27-linux.zip
|
||||
mv android-ndk-r27 android-ndk
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
zip -j libduckdb-android_${{matrix.arch}}.zip build/release/src/libduckdb*.* src/include/duckdb.h
|
||||
./scripts/upload-assets-to-staging.sh github_release libduckdb-android_${{matrix.arch}}.zip
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-binaries-android-${{matrix.arch}}
|
||||
path: |
|
||||
libduckdb-android_${{matrix.arch}}.zip
|
||||
@@ -1,221 +0,0 @@
|
||||
name: Bundle Static Libraries
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/BundleStaticLibs.yml'
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review]
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/BundleStaticLibs.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
|
||||
|
||||
jobs:
|
||||
bundle-osx-static-libs:
|
||||
name: OSX static libs
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- xcode_target_flag: "x86_64"
|
||||
architecture: "amd64"
|
||||
- xcode_target_flag: "arm64"
|
||||
architecture: "arm64"
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
GEN: ninja
|
||||
OSX_BUILD_ARCH: ${{ matrix.xcode_target_flag }}
|
||||
DUCKDB_PLATFORM: osx_${{ matrix.architecture }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
run: brew install ninja
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Bundle static library
|
||||
shell: bash
|
||||
run: |
|
||||
make gather-libs
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
python3 scripts/amalgamation.py
|
||||
zip -r -j static-libs-osx-${{ matrix.architecture }}.zip src/include/duckdb.h build/release/libs/
|
||||
./scripts/upload-assets-to-staging.sh github_release static-libs-osx-${{ matrix.architecture }}.zip
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-static-libs-osx-${{ matrix.architecture }}
|
||||
path: |
|
||||
static-libs-osx-${{ matrix.architecture }}.zip
|
||||
|
||||
|
||||
bundle-mingw-static-lib:
|
||||
name: Windows MingW static libs
|
||||
runs-on: windows-latest
|
||||
env:
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- uses: r-lib/actions/setup-r@v2
|
||||
with:
|
||||
r-version: 'devel'
|
||||
update-rtools: true
|
||||
rtools-version: '42' # linker bug in 43 ^^
|
||||
|
||||
# TODO: this action is deprecated, can we rework this to avoid using it?
|
||||
- uses: ./.github/actions/build_extensions
|
||||
with:
|
||||
duckdb_arch: windows_amd64_mingw
|
||||
vcpkg_target_triplet: x64-mingw-static
|
||||
treat_warn_as_error: 0
|
||||
override_cc: gcc
|
||||
override_cxx: g++
|
||||
vcpkg_build: 1
|
||||
no_static_linking: 0
|
||||
run_tests: 0
|
||||
run_autoload_tests: 0
|
||||
build_in_tree_extensions: 0
|
||||
build_out_of_tree_extensions: 0
|
||||
bundle_static_lib_mode: 1
|
||||
|
||||
- name: Bundle static library
|
||||
shell: bash
|
||||
run: |
|
||||
make gather-libs
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
zip -r -j static-libs-windows-mingw.zip src/include/duckdb.h build/release/libs/
|
||||
./scripts/upload-assets-to-staging.sh github_release static-libs-windows-mingw.zip
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-static-libs-windows-mingw
|
||||
path: |
|
||||
static-libs-windows-mingw.zip
|
||||
|
||||
bundle-linux-static-libs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
config: [ { runner: ubuntu-latest, arch: amd64, image: x86_64 }, { runner: ubuntu-24.04-arm, arch: arm64, image: aarch64 } ]
|
||||
|
||||
name: Linux Static Libraries (${{ matrix.config.arch }})
|
||||
runs-on: ${{ matrix.config.runner }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
export PWD=`pwd`
|
||||
docker run \
|
||||
-v$PWD:$PWD \
|
||||
-e CMAKE_BUILD_PARALLEL_LEVEL=2 \
|
||||
-e OVERRIDE_GIT_DESCRIBE=$OVERRIDE_GIT_DESCRIBE \
|
||||
-e EXTENSION_CONFIGS="$PWD/.github/config/bundled_extensions.cmake" \
|
||||
-e ENABLE_EXTENSION_AUTOLOADING=1 \
|
||||
-e ENABLE_EXTENSION_AUTOINSTALL=1 \
|
||||
-e BUILD_BENCHMARK=1 \
|
||||
-e FORCE_WARN_UNUSED=1 \
|
||||
-e EXPORT_DYNAMIC_SYMBOLS=1 \
|
||||
quay.io/pypa/manylinux_2_28_${{ matrix.config.image }} \
|
||||
bash -c "
|
||||
set -e
|
||||
yum install -y perl-IPC-Cmd gcc-toolset-12 gcc-toolset-12-gcc-c++
|
||||
|
||||
source /opt/rh/gcc-toolset-12/enable
|
||||
export CC=gcc
|
||||
export CXX=g++
|
||||
|
||||
git config --global --add safe.directory $PWD
|
||||
make gather-libs -C $PWD
|
||||
"
|
||||
- name: Print platform
|
||||
shell: bash
|
||||
run: ./build/release/duckdb -c "PRAGMA platform;"
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
python3 scripts/amalgamation.py
|
||||
zip -r -j static-libs-linux-${{ matrix.config.arch }}.zip src/include/duckdb.h build/release/libs/
|
||||
./scripts/upload-assets-to-staging.sh github_release static-libs-linux-${{ matrix.config.arch }}.zip
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-static-libs-linux-${{ matrix.config.arch }}
|
||||
path: |
|
||||
static-libs-linux-${{ matrix.config.arch }}.zip
|
||||
@@ -1,26 +0,0 @@
|
||||
name: Check Issue for Code Formatting
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
jobs:
|
||||
check_code_formatting:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python 3.12
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Check issue for code formatting
|
||||
run: |
|
||||
echo "$ISSUE_BODY" >> issue-text.md
|
||||
if ! cat issue-text.md | python3 scripts/check-issue-for-code-formatting.py; then
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file .github/workflows/code-formatting-warning.md
|
||||
fi
|
||||
158
external/duckdb/.github/workflows/CodeQuality.yml
vendored
158
external/duckdb/.github/workflows/CodeQuality.yml
vendored
@@ -1,158 +0,0 @@
|
||||
name: CodeQuality
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
explicit_checks:
|
||||
description: 'Pass which checks to run or remain empty for default checks'
|
||||
type: string
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/lcov_exclude'
|
||||
- '!.github/workflows/CodeQuality.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/lcov_exclude'
|
||||
- '!.github/workflows/CodeQuality.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
format-check:
|
||||
name: Format Check
|
||||
runs-on: ubuntu-22.04
|
||||
needs: check-draft
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build clang-format-11 && sudo pip3 install cmake-format 'black==24.*' cxxheaderparser pcpp 'clang_format==11.0.1'
|
||||
|
||||
- name: List Installed Packages
|
||||
shell: bash
|
||||
run: pip3 freeze
|
||||
|
||||
- name: Format Check
|
||||
shell: bash
|
||||
run: |
|
||||
clang-format --version
|
||||
clang-format --dump-config
|
||||
black --version
|
||||
make format-check-silent
|
||||
|
||||
- name: Generated Check
|
||||
shell: bash
|
||||
run: |
|
||||
make generate-files
|
||||
git diff --exit-code
|
||||
|
||||
enum-check:
|
||||
name: C Enum Integrity Check
|
||||
needs: format-check
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install python dependencies
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/v') }}
|
||||
shell: bash
|
||||
run: python -m pip install cxxheaderparser pcpp
|
||||
|
||||
- name: Verify C enum integrity
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/v') }}
|
||||
shell: bash
|
||||
run: python scripts/verify_enum_integrity.py src/include/duckdb.h
|
||||
|
||||
tidy-check:
|
||||
name: Tidy Check
|
||||
runs-on: ubuntu-24.04
|
||||
needs: format-check
|
||||
|
||||
env:
|
||||
CC: gcc
|
||||
CXX: g++
|
||||
GEN: ninja
|
||||
TIDY_THREADS: 4
|
||||
TIDY_CHECKS: ${{ inputs.explicit_checks && inputs.explicit_checks || '' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build clang-tidy && sudo pip3 install pybind11[global] --break-system-packages
|
||||
|
||||
- name: Setup Ccache
|
||||
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/feature' }}
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Download clang-tidy-cache
|
||||
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/feature' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -e
|
||||
curl -Lo /tmp/clang-tidy-cache https://github.com/ejfitzgerald/clang-tidy-cache/releases/download/v0.4.0/clang-tidy-cache-linux-amd64
|
||||
md5sum /tmp/clang-tidy-cache | grep 880b290d7bbe7c1fb2a4f591f9a86cc1
|
||||
chmod +x /tmp/clang-tidy-cache
|
||||
|
||||
- name: Tidy Check
|
||||
shell: bash
|
||||
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/feature' }}
|
||||
run: make tidy-check TIDY_BINARY=/tmp/clang-tidy-cache
|
||||
|
||||
- name: Tidy Check Diff
|
||||
shell: bash
|
||||
if: ${{ github.ref != 'refs/heads/main' && github.ref != 'refs/heads/feature' }}
|
||||
run: make tidy-check-diff
|
||||
337
external/duckdb/.github/workflows/CrossVersion.yml
vendored
337
external/duckdb/.github/workflows/CrossVersion.yml
vendored
@@ -1,337 +0,0 @@
|
||||
name: Cross Version DB test
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
git_ref:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
git_ref:
|
||||
type: string
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/CrossVersion.yml'
|
||||
|
||||
concurrency:
|
||||
group: crossversion-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
jobs:
|
||||
osx-step-1:
|
||||
# Builds binaries for osx
|
||||
name: OSX Release
|
||||
runs-on: macos-14
|
||||
strategy:
|
||||
matrix:
|
||||
version: [ 'v1.0.0', 'v1.1.3', 'v1.2.2', 'v1.3-ossivalis', 'main' ]
|
||||
fail-fast: false
|
||||
env:
|
||||
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ matrix.version }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
run: brew install ninja file
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Print platform
|
||||
shell: bash
|
||||
run: ./build/release/duckdb -c "PRAGMA platform;"
|
||||
|
||||
- name: Unit Test
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --force-storage --test-temp-dir my_local_folder || true
|
||||
rm -rf my_local_folder/hive
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: files-osx-${{ matrix.version }}
|
||||
path: |
|
||||
my_local_folder/*
|
||||
|
||||
osx-step-2:
|
||||
# Builds binaries for linux
|
||||
name: OSX Release test
|
||||
runs-on: macos-14
|
||||
needs:
|
||||
- osx-step-1
|
||||
- linux-step-1
|
||||
strategy:
|
||||
matrix:
|
||||
version: [ 'v1.0.0', 'v1.1.3', 'v1.2.2', 'v1.3-ossivalis', 'main' ]
|
||||
fail-fast: false
|
||||
env:
|
||||
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ matrix.version }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
run: brew install ninja
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Print platform
|
||||
shell: bash
|
||||
run: ./build/release/duckdb -c "PRAGMA platform;"
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-v1.0.0
|
||||
path: osx_v1_0_0
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-v1.1.3
|
||||
path: osx_v1_1_3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-v1.2.2
|
||||
path: osx_v1_2_2
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-v1.3-ossivalis
|
||||
path: osx_v1_3-ossivalis
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-main
|
||||
path: osx_main
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-v1.0.0
|
||||
path: linux_v1_0_0
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-v1.1.3
|
||||
path: linux_v1_1_3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-v1.2.2
|
||||
path: linux_v1_2_2
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-v1.3-ossivalis
|
||||
path: linux_v1_3-ossivalis
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-main
|
||||
path: linux_main
|
||||
|
||||
- name: Cross test
|
||||
shell: bash
|
||||
run: |
|
||||
touch report
|
||||
for folder in osx_v1_0_0 osx_v1_1_3 osx_main linux_main linux_v1_0_0 linux_v1_1_3 linux_v1_2_2 linux_v1_2 osx_v1_2_2 osx_v1_2; do
|
||||
for filename in $folder/*; do
|
||||
touch $filename.wal && cp $filename.wal a.db.wal 2>/dev/null && cp $filename a.db 2>/dev/null && (./build/release/duckdb a.db -c "ATTACH 'b.db'; COPY FROM DATABASE a TO b;" 2>out || (grep "but it is not a valid DuckDB database file!" out 2>/dev/null || ( echo "--> " $filename && cat out && echo "" && (grep -i "internal error" out && echo "--> " $filename >> report && cat out >> report && echo "" >> report)))) || true
|
||||
rm -f b.db a.db b.db.wal a.db.wal
|
||||
done
|
||||
done
|
||||
|
||||
- name: Internal error report
|
||||
shell: bash
|
||||
run: |
|
||||
cat report
|
||||
|
||||
linux-step-1:
|
||||
# Builds binaries for linux
|
||||
name: Linux Release
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
version: [ 'v1.0.0', 'v1.1.3', 'v1.2.2', 'v1.3-ossivalis', 'main' ]
|
||||
fail-fast: false
|
||||
env:
|
||||
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ matrix.version }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Print platform
|
||||
shell: bash
|
||||
run: ./build/release/duckdb -c "PRAGMA platform;"
|
||||
|
||||
- name: Unit Test
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --force-storage --test-temp-dir my_local_folder || true
|
||||
rm -rf my_local_folder/hive
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: files-linux-${{ matrix.version }}
|
||||
path: |
|
||||
my_local_folder/*
|
||||
|
||||
linux-step-2:
|
||||
# Builds binaries for linux
|
||||
name: Linux Release Test
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- osx-step-1
|
||||
- linux-step-1
|
||||
strategy:
|
||||
matrix:
|
||||
version: [ 'v1.0.0', 'v1.1.3', 'v1.2.2', 'v1.3-ossivalis', 'main' ]
|
||||
fail-fast: false
|
||||
env:
|
||||
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ matrix.version }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Print platform
|
||||
shell: bash
|
||||
run: ./build/release/duckdb -c "PRAGMA platform;"
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-v1.0.0
|
||||
path: osx_v1_0_0
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-v1.1.3
|
||||
path: osx_v1_1_3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-v1.2.2
|
||||
path: osx_v1_2_2
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-v1.3-ossivalis
|
||||
path: osx_v1_3-ossivalis
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-osx-main
|
||||
path: osx_main
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-v1.0.0
|
||||
path: linux_v1_0_0
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-v1.1.3
|
||||
path: linux_v1_1_3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-v1.2.2
|
||||
path: linux_v1_2_2
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-v1.3-ossivalis
|
||||
path: linux_v1_3-ossivalis
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: files-linux-main
|
||||
path: linux_main
|
||||
|
||||
- name: Cross test
|
||||
shell: bash
|
||||
run: |
|
||||
touch report
|
||||
for folder in osx_v1_0_0 osx_v1_1_3 osx_main osx_v1_3-ossivalis linux_main linux_v1_3-ossivalis linux_v1_0_0 linux_v1_1_3 linux_v1_2_2 linux_v1_2 osx_v1_2_2 osx_v1_2; do
|
||||
for filename in $folder/*; do
|
||||
touch $filename.wal && cp $filename.wal a.db.wal 2>/dev/null && cp $filename a.db 2>/dev/null && (./build/release/duckdb a.db -c "ATTACH 'b.db'; COPY FROM DATABASE a TO b;" 2>out || (grep "but it is not a valid DuckDB database file!" out 2>/dev/null || ( echo "--> " $filename && cat out && echo "" && (grep -i "internal error" out && echo "--> " $filename >> report && cat out >> report && echo "" >> report)))) || true
|
||||
rm -f b.db a.db b.db.wal a.db.wal
|
||||
done
|
||||
done
|
||||
|
||||
- name: Internal error report
|
||||
shell: bash
|
||||
run: |
|
||||
cat report
|
||||
@@ -1,58 +0,0 @@
|
||||
name: Docker tests
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/DockerTests.yml'
|
||||
- '!scripts/test_docker_images.sh'
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review]
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/DockerTests.yml'
|
||||
- '!scripts/test_docker_images.sh'
|
||||
|
||||
concurrency:
|
||||
group: docker-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
|
||||
|
||||
jobs:
|
||||
linux-x64-docker:
|
||||
name: Docker tests on Linux (x64)
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
./scripts/test_docker_images.sh
|
||||
26
external/duckdb/.github/workflows/DraftMe.yml
vendored
26
external/duckdb/.github/workflows/DraftMe.yml
vendored
@@ -1,26 +0,0 @@
|
||||
# Marks all changed PR as draft
|
||||
name: Draft on Synchronize
|
||||
on:
|
||||
pull_request:
|
||||
types: [ synchronize ]
|
||||
|
||||
concurrency:
|
||||
group: shouldturntodraft-${{ github.event.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mark-as-draft:
|
||||
name: Mark as draft
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Save PR number
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.node_id }}
|
||||
run: |
|
||||
mkdir -p ./pr
|
||||
echo $PR_NUMBER > ./pr/pr_number
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: pr_number
|
||||
path: pr/
|
||||
21
external/duckdb/.github/workflows/DraftMeNot.yml
vendored
21
external/duckdb/.github/workflows/DraftMeNot.yml
vendored
@@ -1,21 +0,0 @@
|
||||
# Marks all changed PR as draft
|
||||
name: Placeholder to cancel auto draft
|
||||
on:
|
||||
pull_request:
|
||||
types: [ ready_for_review ]
|
||||
|
||||
concurrency:
|
||||
group: shouldturntodraft-${{ github.event.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mark-as-draft:
|
||||
name: Placeholder
|
||||
if: github.event.pull_request.draft == true
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Print PR number
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.node_id }}
|
||||
run: |
|
||||
echo $PR_NUMBER
|
||||
60
external/duckdb/.github/workflows/DraftPR.yml
vendored
60
external/duckdb/.github/workflows/DraftPR.yml
vendored
@@ -1,60 +0,0 @@
|
||||
# Marks all changed PR as draft
|
||||
name: Move PR to Draft
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [Draft on Synchronize]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
actually-move-to-draft:
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Download artifact'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "pr_number"
|
||||
})[0];
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: matchArtifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
let fs = require('fs');
|
||||
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/pr_number.zip`, Buffer.from(download.data));
|
||||
|
||||
- name: 'Unzip artifact'
|
||||
run: unzip pr_number.zip
|
||||
|
||||
- name: 'Extract PR node id'
|
||||
shell: bash
|
||||
run: |
|
||||
(echo -n "PR_NUMBER=" | cat - pr_number) >> $GITHUB_ENV
|
||||
|
||||
- name: 'Actually move to draft'
|
||||
shell: bash
|
||||
env:
|
||||
MOVE_PR_TO_DRAFT_TOKEN_ENV: ${{ secrets.MOVE_PR_TO_DRAFT_TOKEN }}
|
||||
if: ${{ env.MOVE_PR_TO_DRAFT_TOKEN_ENV != '' }}
|
||||
run: |
|
||||
echo ${{ env.MOVE_PR_TO_DRAFT_TOKEN_ENV }} | gh auth login --with-token
|
||||
gh api graphql -F id=${{ env.PR_NUMBER }} -f query='
|
||||
mutation($id: ID!) {
|
||||
convertPullRequestToDraft(input: { pullRequestId: $id }) {
|
||||
pullRequest {
|
||||
id
|
||||
number
|
||||
isDraft
|
||||
}
|
||||
}
|
||||
}
|
||||
'
|
||||
349
external/duckdb/.github/workflows/ExtendedTests.yml
vendored
349
external/duckdb/.github/workflows/ExtendedTests.yml
vendored
@@ -1,349 +0,0 @@
|
||||
name: ExtendedTests
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/ExtendedTests.yml'
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review]
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/ExtendedTests.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CCACHE_SAVE: ${{ github.repository != 'duckdb/duckdb' }}
|
||||
BASE_BRANCH: ${{ github.base_ref || (endsWith(github.ref, '_feature') && 'feature' || 'main') }}
|
||||
|
||||
jobs:
|
||||
regression-lto-benchmark-runner:
|
||||
name: Benchmark runner lto vs non-lto (OSX)
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
GEN: ninja
|
||||
BUILD_BENCHMARK: 1
|
||||
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: brew install ninja llvm && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
LTO=full make
|
||||
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
|
||||
cd duckdb
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Set up benchmarks
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r benchmark duckdb/
|
||||
|
||||
- name: Regression Test Micro
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCH
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --threads 2
|
||||
- name: Regression Test TPCH-PARQUET
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --threads 2
|
||||
|
||||
|
||||
- name: Regression Test TPCDS
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --threads 2
|
||||
|
||||
|
||||
- name: Regression Test H2OAI
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --threads 2
|
||||
|
||||
- name: Regression Test IMDB
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --threads 2
|
||||
|
||||
regression-clang16-vs-clang14-benchmark-runner:
|
||||
name: Benchmark runner clang-16 vs clang-14 (OSX)
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
GEN: ninja
|
||||
BUILD_BENCHMARK: 1
|
||||
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: brew install ninja llvm && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
CMAKE_LLVM_PATH='/opt/homebrew/opt/llvm' make
|
||||
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
|
||||
cd duckdb
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Set up benchmarks
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r benchmark duckdb/
|
||||
|
||||
- name: Regression Test Micro
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCH
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCH-PARQUET
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCDS
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --threads 2
|
||||
|
||||
- name: Regression Test H2OAI
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --threads 2
|
||||
|
||||
- name: Regression Test IMDB
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --threads 2
|
||||
|
||||
regression-clang-benchmark-runner:
|
||||
name: Benchmark runner clang vs gcc
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CC: gcc
|
||||
CXX: g++
|
||||
GEN: ninja
|
||||
BUILD_BENCHMARK: 1
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build llvm && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
#### This should also be alternative way to instal llvm to a specific version
|
||||
# wget https://apt.llvm.org/llvm.sh
|
||||
# chmod +x llvm.sh
|
||||
# sudo ./llvm.sh 17
|
||||
#####
|
||||
CMAKE_LLVM_PATH='/usr/lib/llvm-14' make
|
||||
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
|
||||
cd duckdb
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Set up benchmarks
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r benchmark duckdb/
|
||||
|
||||
- name: Regression Test Micro
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCH
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCH-PARQUET
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCDS
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --threads 2
|
||||
|
||||
- name: Regression Test H2OAI
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --threads 2
|
||||
|
||||
- name: Regression Test IMDB
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --threads 2
|
||||
|
||||
regression-flto-gcc-benchmark-runner:
|
||||
name: Benchmark runner gcc flto vs gcc
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CC: gcc
|
||||
CXX: g++
|
||||
GEN: ninja
|
||||
BUILD_BENCHMARK: 1
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
LTO='full' make
|
||||
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
|
||||
cd duckdb
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Set up benchmarks
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r benchmark duckdb/
|
||||
|
||||
- name: Regression Test Micro
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCH
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCH-PARQUET
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --threads 2
|
||||
|
||||
- name: Regression Test TPCDS
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --threads 2
|
||||
|
||||
- name: Regression Test H2OAI
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --threads 2
|
||||
|
||||
- name: Regression Test IMDB
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --nofail --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --threads 2
|
||||
366
external/duckdb/.github/workflows/Extensions.yml
vendored
366
external/duckdb/.github/workflows/Extensions.yml
vendored
@@ -1,366 +0,0 @@
|
||||
#
|
||||
# This workflow is responsible for building all DuckDB extensions
|
||||
#
|
||||
|
||||
name: Extensions (all platforms)
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
run_all:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
description: 'Version tag to override git describe. Use to produce binaries'
|
||||
type: string
|
||||
git_ref:
|
||||
description: 'Set to override the DuckDB version, leave empty for current commit'
|
||||
type: string
|
||||
required: false
|
||||
default: ''
|
||||
extra_exclude_archs:
|
||||
description: 'Inject more architectures to skip'
|
||||
type: string
|
||||
required: false
|
||||
default: ''
|
||||
skip_tests:
|
||||
description: 'Set to true to skip all testing'
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
run_all:
|
||||
type: string
|
||||
required: false
|
||||
default: 'true'
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Extensions.yml'
|
||||
- '!.github/workflows/_extension_distribution.yml'
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Extensions.yml'
|
||||
|
||||
concurrency:
|
||||
group: extensions-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
# This first step loads the various extension configs from the ~/.github/config directory storing them to drive the build jobs
|
||||
load-extension-configs:
|
||||
name: Load Extension Configs
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-draft
|
||||
outputs:
|
||||
main_extensions_config: ${{ steps.set-main-extensions.outputs.extension_config }}
|
||||
main_extensions_exclude_archs: ${{ steps.set-main-extensions.outputs.exclude_archs }}
|
||||
rust_based_extensions_config: ${{ steps.set-rust-based-extensions.outputs.extension_config }}
|
||||
rust_based_extensions_exclude_archs: ${{ steps.set-rust-based-extensions.outputs.exclude_archs }}
|
||||
env:
|
||||
# NOTE: on PRs we exclude some archs to speed things up
|
||||
BASE_EXCLUDE_ARCHS: ${{ (github.event_name == 'pull_request' || inputs.run_all != 'true') && 'wasm_eh;wasm_threads;windows_amd64_mingw;osx_amd64;linux_arm64;linux_amd64_musl;' || '' }}
|
||||
EXTRA_EXCLUDE_ARCHS: ${{ inputs.extra_exclude_archs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- id: set-main-extensions
|
||||
name: Configure main extensions
|
||||
env:
|
||||
IN_TREE_CONFIG_FILE: .github/config/in_tree_extensions.cmake
|
||||
OUT_OF_TREE_CONFIG_FILE: .github/config/out_of_tree_extensions.cmake
|
||||
DEFAULT_EXCLUDE_ARCHS: ''
|
||||
run: |
|
||||
# Set config
|
||||
echo exclude_archs="$DEFAULT_EXCLUDE_ARCHS;$BASE_EXCLUDE_ARCHS;$EXTRA_EXCLUDE_ARCHS" >> $GITHUB_OUTPUT
|
||||
in_tree_extensions="`cat $IN_TREE_CONFIG_FILE`"
|
||||
out_of_tree_extensions="`cat $OUT_OF_TREE_CONFIG_FILE`"
|
||||
echo "extension_config<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$in_tree_extensions" >> $GITHUB_OUTPUT
|
||||
echo -e "\n" >> $GITHUB_OUTPUT
|
||||
echo "$out_of_tree_extensions" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
cat $GITHUB_OUTPUT
|
||||
|
||||
- id: set-rust-based-extensions
|
||||
name: Configure Rust-based extensions
|
||||
env:
|
||||
CONFIG_FILE: .github/config/rust_based_extensions.cmake
|
||||
DEFAULT_EXCLUDE_ARCHS: 'wasm_mvp;wasm_eh;wasm_threads;windows_amd64_rtools;windows_amd64_mingw;linux_amd64_musl'
|
||||
run: |
|
||||
echo exclude_archs="$DEFAULT_EXCLUDE_ARCHS;$BASE_EXCLUDE_ARCHS;$EXTRA_EXCLUDE_ARCHS" >> $GITHUB_OUTPUT
|
||||
rust_based_extensions="`cat .github/config/rust_based_extensions.cmake`"
|
||||
echo "extension_config<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$rust_based_extensions" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
cat $GITHUB_OUTPUT
|
||||
|
||||
# Build the extensions from .github/config/in_tree_extensions.cmake
|
||||
main-extensions:
|
||||
name: Main Extensions
|
||||
needs:
|
||||
- load-extension-configs
|
||||
uses: ./.github/workflows/_extension_distribution.yml
|
||||
with:
|
||||
artifact_prefix: main-extensions
|
||||
exclude_archs: ${{ needs.load-extension-configs.outputs.main_extensions_exclude_archs }}
|
||||
extension_config: ${{ needs.load-extension-configs.outputs.main_extensions_config }}
|
||||
override_tag: ${{ inputs.override_git_describe }}
|
||||
duckdb_ref: ${{ inputs.git_ref }}
|
||||
skip_tests: ${{ inputs.skip_tests && true || false }}
|
||||
save_cache: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
# Build the extensions from .github/config/rust_based_extensions.cmake
|
||||
rust-based-extensions:
|
||||
name: Rust-based Extensions
|
||||
needs:
|
||||
- load-extension-configs
|
||||
uses: ./.github/workflows/_extension_distribution.yml
|
||||
with:
|
||||
exclude_archs: ${{ needs.load-extension-configs.outputs.rust_based_extensions_exclude_archs }}
|
||||
artifact_prefix: rust-based-extensions
|
||||
extension_config: ${{ needs.load-extension-configs.outputs.rust_based_extensions_config }}
|
||||
extra_toolchains: 'rust'
|
||||
override_tag: ${{ inputs.override_git_describe }}
|
||||
duckdb_ref: ${{ inputs.git_ref }}
|
||||
skip_tests: ${{ inputs.skip_tests && true || false }}
|
||||
save_cache: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
# Merge all extensions into a single, versioned repository
|
||||
create-extension-repository:
|
||||
name: Create Extension Repository
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- main-extensions
|
||||
- rust-based-extensions
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download main extensions
|
||||
with:
|
||||
pattern: main-extensions-${{ github.sha }}*
|
||||
path: /tmp/repository_generation/main-extensions
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download rust-based extensions
|
||||
with:
|
||||
pattern: rust-based-extensions-${{ github.sha }}*
|
||||
path: /tmp/repository_generation/rust-based-extensions
|
||||
|
||||
- name: Print all extensions
|
||||
run: |
|
||||
tree /tmp/repository_generation
|
||||
|
||||
- name: Merge into single repository
|
||||
run: |
|
||||
mkdir /tmp/merged_repository
|
||||
cp -r /tmp/repository_generation/*/*/* /tmp/merged_repository
|
||||
tree /tmp/merged_repository
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
if-no-files-found: error
|
||||
name: extension-repository-${{ github.sha }}
|
||||
path: |
|
||||
/tmp/merged_repository/**/*.duckdb_extension*
|
||||
|
||||
upload-extensions:
|
||||
name: Upload Extensions
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- create-extension-repository
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: extension-repository-${{ github.sha }}
|
||||
path: /tmp
|
||||
|
||||
- name: List extensions to deploy
|
||||
shell: bash
|
||||
run: |
|
||||
tree /tmp/extension-repository-${{ github.sha }}
|
||||
|
||||
- name: Deploy extensions
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ENDPOINT_URL: ${{ secrets.DUCKDB_CORE_EXTENSION_S3_ENDPOINT }}
|
||||
AWS_ACCESS_KEY_ID: ${{secrets.DUCKDB_CORE_EXTENSION_S3_ID}}
|
||||
AWS_SECRET_ACCESS_KEY: ${{secrets.DUCKDB_CORE_EXTENSION_S3_SECRET}}
|
||||
DUCKDB_DEPLOY_SCRIPT_MODE: for_real
|
||||
DUCKDB_EXTENSION_SIGNING_PK: ${{ secrets.DUCKDB_EXTENSION_SIGNING_PK }}
|
||||
run: |
|
||||
pip install awscli
|
||||
./scripts/extension-upload-repository.sh /tmp/extension-repository-${{ github.sha }}
|
||||
|
||||
autoload-tests:
|
||||
name: Extension Autoloading Tests
|
||||
if: ${{ !inputs.skip_tests}}
|
||||
runs-on: ubuntu-latest
|
||||
needs: create-extension-repository
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- name: Setup Build Environment
|
||||
run: |
|
||||
sudo apt-get update -y -qq
|
||||
sudo apt-get install -y -qq ninja-build ccache
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: extension-repository-${{ github.sha }}
|
||||
path: /tmp
|
||||
|
||||
- name: List extensions to test with
|
||||
shell: bash
|
||||
run: |
|
||||
tree /tmp/extension-repository-${{ github.sha }}
|
||||
|
||||
- name: Build DuckDB
|
||||
env:
|
||||
GEN: ninja
|
||||
CC: gcc
|
||||
CXX: g++
|
||||
EXTENSION_CONFIGS: './.github/config/rust_based_extensions.cmake;./.github/config/out_of_tree_extensions.cmake;./.github/config/in_tree_extensions.cmake'
|
||||
EXTENSION_TESTS_ONLY: 1
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
run: |
|
||||
make release
|
||||
|
||||
- name: Run Tests
|
||||
env:
|
||||
LOCAL_EXTENSION_REPO: /tmp/extension-repository-${{ github.sha }}
|
||||
run: |
|
||||
./build/release/test/unittest --autoloading available --skip-compiled
|
||||
|
||||
check-load-install-extensions:
|
||||
name: Checks extension entries
|
||||
if: ${{ !inputs.skip_tests}}
|
||||
runs-on: ubuntu-22.04
|
||||
needs: create-extension-repository
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
env:
|
||||
GENERATE_EXTENSION_ENTRIES: 1
|
||||
LOCAL_EXTENSION_REPO: build/release/repository_other
|
||||
run: |
|
||||
make
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download extension repository artifact
|
||||
with:
|
||||
pattern: extension-repository-${{ github.sha }}
|
||||
path: /tmp
|
||||
|
||||
- name: Copy over local extension repository
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r /tmp/extension-repository-${{ github.sha }} build/release/repository
|
||||
tree build/release/repository
|
||||
find build/release/repository -type f ! -path "build/release/repository/*/linux_amd64/*" -delete
|
||||
tree build/release/repository
|
||||
|
||||
- name: Check if extension_entries.hpp is up to date
|
||||
shell: bash
|
||||
env:
|
||||
EXTENSION_CONFIGS: '.github/config/in_tree_extensions.cmake;.github/config/out_of_tree_extensions.cmake'
|
||||
run: |
|
||||
make extension_configuration
|
||||
python scripts/generate_extensions_function.py
|
||||
pip install "black>=24"
|
||||
pip install cmake-format
|
||||
pip install "clang_format==11.0.1"
|
||||
make format-fix
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: extension_entries.hpp
|
||||
path: |
|
||||
src/include/duckdb/main/extension_entries.hpp
|
||||
|
||||
- name: Check for any difference
|
||||
run: |
|
||||
git diff --exit-code src/include/duckdb/main/extension_entries.hpp && echo "No differences found"
|
||||
|
||||
- name: Explainer
|
||||
if: failure()
|
||||
run: |
|
||||
echo "There are differences in src/include/duckdb/main/extension_entries.hpp"
|
||||
echo "Check the uploaded extension_entries.hpp (in the workflow Summary), and check that in instead of src/include/duckdb/main/extension_entries.hpp"
|
||||
63
external/duckdb/.github/workflows/ExtraTests.yml
vendored
63
external/duckdb/.github/workflows/ExtraTests.yml
vendored
@@ -1,63 +0,0 @@
|
||||
name: Extra Tests
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
jobs:
|
||||
regression-test-all:
|
||||
name: All Regression Tests
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
BUILD_BENCHMARK: 1
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "tpcd;tpcds;httpfs"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build Last Release
|
||||
shell: bash
|
||||
run: |
|
||||
make
|
||||
git clone https://github.com/duckdb/duckdb.git
|
||||
cd duckdb
|
||||
git checkout `git tag --list | tail -n 1`
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Set up benchmarks
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r benchmark duckdb/
|
||||
|
||||
- name: Regression Test
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
build/release/benchmark/benchmark_runner --list > alltests.list
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks alltests.list --verbose --threads 2
|
||||
@@ -1,87 +0,0 @@
|
||||
name: Create or Label Mirror Issue
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
|
||||
TITLE_PREFIX: "[duckdb/#${{ github.event.issue.number }}]"
|
||||
PUBLIC_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
|
||||
jobs:
|
||||
handle_pr_submitted_or_fix_on_nightly_label:
|
||||
if: github.event.label.name == 'PR submitted' || github.event.label.name == 'fixed on nightly'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Remove 'needs triage' label
|
||||
run: |
|
||||
gh issue edit --repo duckdb/duckdb ${{ github.event.issue.number }} --remove-label "needs triage"
|
||||
|
||||
add_needs_reproducible_example_comment:
|
||||
if: github.event.label.name == 'needs reproducible example'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Add comment
|
||||
run: |
|
||||
cat > needs-reproducible-example-comment.md << EOF
|
||||
Thanks for opening this issue in the DuckDB issue tracker! To resolve this issue, our team needs a reproducible example. This includes:
|
||||
|
||||
* A source code snippet which reproduces the issue.
|
||||
* The snippet should be self-contained, i.e., it should contain all imports and should use relative paths instead of hard coded paths (please avoid \`/Users/JohnDoe/...\`).
|
||||
* A lot of issues can be reproduced with plain SQL code executed in the [DuckDB command line client](https://duckdb.org/docs/api/cli/overview). If you can provide such an example, it greatly simplifies the reproduction process and likely results in a faster fix.
|
||||
* If the script needs additional data, please share the data as a CSV, JSON, or Parquet file. Unfortunately, we cannot fix issues that can only be reproduced with a confidential data set. [Support contracts](https://duckdblabs.com/#support) allow sharing confidential data with the core DuckDB team under NDA.
|
||||
|
||||
For more detailed guidelines on how to create reproducible examples, please visit Stack Overflow's [“Minimal, Reproducible Example”](https://stackoverflow.com/help/minimal-reproducible-example) page.
|
||||
EOF
|
||||
gh issue comment --repo duckdb/duckdb ${{ github.event.issue.number }} --body-file needs-reproducible-example-comment.md
|
||||
|
||||
create_or_label_mirror_issue:
|
||||
if: github.event.label.name == 'reproduced' || github.event.label.name == 'under review'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Remove 'needs triage' / 'under review' if 'reproduced'
|
||||
if: github.event.label.name == 'reproduced'
|
||||
run: |
|
||||
gh issue edit --repo duckdb/duckdb ${{ github.event.issue.number }} --remove-label "needs triage" --remove-label "under review" --remove-label "needs reproducible example"
|
||||
|
||||
- name: Remove 'needs triage' / 'reproduced' if 'under review'
|
||||
if: github.event.label.name == 'under review'
|
||||
run: |
|
||||
gh issue edit --repo duckdb/duckdb ${{ github.event.issue.number }} --remove-label "needs triage" --remove-label "reproduced"
|
||||
|
||||
- name: Remove 'needs triage' if 'expected behavior'
|
||||
if: github.event.label.name == 'expected behavior'
|
||||
run: |
|
||||
gh issue edit --repo duckdb/duckdb ${{ github.event.issue.number }} --remove-label "needs triage"
|
||||
|
||||
- name: Get mirror issue number
|
||||
run: |
|
||||
gh issue list --repo duckdblabs/duckdb-internal --search "${TITLE_PREFIX}" --json title,number --state all --jq ".[] | select(.title | startswith(\"$TITLE_PREFIX\")).number" > mirror_issue_number.txt
|
||||
echo "MIRROR_ISSUE_NUMBER=$(cat mirror_issue_number.txt)" >> $GITHUB_ENV
|
||||
|
||||
- name: Print whether mirror issue exists
|
||||
run: |
|
||||
if [ "$MIRROR_ISSUE_NUMBER" == "" ]; then
|
||||
echo "Mirror issue with title prefix '$TITLE_PREFIX' does not exist yet"
|
||||
else
|
||||
echo "Mirror issue with title prefix '$TITLE_PREFIX' exists with number $MIRROR_ISSUE_NUMBER"
|
||||
fi
|
||||
|
||||
- name: Set label environment variable
|
||||
run: |
|
||||
if ${{ github.event.label.name == 'reproduced' }}; then
|
||||
echo "LABEL=reproduced" >> $GITHUB_ENV
|
||||
echo "UNLABEL=under review" >> $GITHUB_ENV
|
||||
else
|
||||
echo "LABEL=under review" >> $GITHUB_ENV
|
||||
echo "UNLABEL=reproduced" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Create or label issue
|
||||
run: |
|
||||
if [ "$MIRROR_ISSUE_NUMBER" == "" ]; then
|
||||
gh issue create --repo duckdblabs/duckdb-internal --label "$LABEL" --title "$TITLE_PREFIX - $PUBLIC_ISSUE_TITLE" --body "See https://github.com/duckdb/duckdb/issues/${{ github.event.issue.number }}"
|
||||
else
|
||||
gh issue edit --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER --remove-label "$UNLABEL" --add-label "$LABEL"
|
||||
fi
|
||||
@@ -1,51 +0,0 @@
|
||||
name: Update Mirror Issue
|
||||
on:
|
||||
discussion:
|
||||
types:
|
||||
- labeled
|
||||
issues:
|
||||
types:
|
||||
- closed
|
||||
- reopened
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
|
||||
TITLE_PREFIX: "[duckdb/#${{ github.event.issue.number || github.event.discussion.number }}]"
|
||||
|
||||
jobs:
|
||||
update_mirror_issue:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get mirror issue number
|
||||
run: |
|
||||
gh issue list --repo duckdblabs/duckdb-internal --search "${TITLE_PREFIX}" --json title,number --state all --jq ".[] | select(.title | startswith(\"$TITLE_PREFIX\")).number" > mirror_issue_number.txt
|
||||
echo "MIRROR_ISSUE_NUMBER=$(cat mirror_issue_number.txt)" >> $GITHUB_ENV
|
||||
|
||||
- name: Print whether mirror issue exists
|
||||
run: |
|
||||
if [ "$MIRROR_ISSUE_NUMBER" == "" ]; then
|
||||
echo "Mirror issue with title prefix '$TITLE_PREFIX' does not exist yet"
|
||||
else
|
||||
echo "Mirror issue with title prefix '$TITLE_PREFIX' exists with number $MIRROR_ISSUE_NUMBER"
|
||||
fi
|
||||
|
||||
- name: Add comment with status to mirror issue
|
||||
run: |
|
||||
if [ "$MIRROR_ISSUE_NUMBER" != "" ]; then
|
||||
gh issue comment --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER --body "The issue has been ${{ github.event.action }} (https://github.com/duckdb/duckdb/issues/${{ github.event.issue.number || github.event.discussion.number }})."
|
||||
fi
|
||||
|
||||
- name: Add closed label to mirror issue
|
||||
if: github.event.action == 'closed'
|
||||
run: |
|
||||
if [ "$MIRROR_ISSUE_NUMBER" != "" ]; then
|
||||
gh issue edit --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER --add-label "public closed" --remove-label "public reopened"
|
||||
fi
|
||||
|
||||
- name: Reopen mirror issue and add reopened label
|
||||
if: github.event.action == 'reopened'
|
||||
run: |
|
||||
if [ "$MIRROR_ISSUE_NUMBER" != "" ]; then
|
||||
gh issue reopen --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER
|
||||
gh issue edit --repo duckdblabs/duckdb-internal $MIRROR_ISSUE_NUMBER --add-label "public reopened" --remove-label "public closed"
|
||||
fi
|
||||
101
external/duckdb/.github/workflows/InvokeCI.yml
vendored
101
external/duckdb/.github/workflows/InvokeCI.yml
vendored
@@ -1,101 +0,0 @@
|
||||
name: InvokeCI
|
||||
on:
|
||||
repository_dispatch:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
run_all:
|
||||
type: string
|
||||
twine_upload:
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: invokeci-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}-${{ inputs.git_ref }}-${{ inputs.skip_tests }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
extensions:
|
||||
uses: ./.github/workflows/Extensions.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
override_git_describe: ${{ inputs.override_git_describe }}
|
||||
git_ref: ${{ inputs.git_ref }}
|
||||
skip_tests: ${{ inputs.skip_tests }}
|
||||
run_all: ${{ inputs.run_all }}
|
||||
|
||||
osx:
|
||||
uses: ./.github/workflows/OSX.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
override_git_describe: ${{ inputs.override_git_describe }}
|
||||
git_ref: ${{ inputs.git_ref }}
|
||||
skip_tests: ${{ inputs.skip_tests }}
|
||||
run_all: ${{ inputs.run_all }}
|
||||
|
||||
linux-release:
|
||||
uses: ./.github/workflows/LinuxRelease.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
override_git_describe: ${{ inputs.override_git_describe }}
|
||||
git_ref: ${{ inputs.git_ref }}
|
||||
skip_tests: ${{ inputs.skip_tests }}
|
||||
|
||||
windows:
|
||||
uses: ./.github/workflows/Windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
override_git_describe: ${{ inputs.override_git_describe }}
|
||||
git_ref: ${{ inputs.git_ref }}
|
||||
skip_tests: ${{ inputs.skip_tests }}
|
||||
run_all: ${{ inputs.run_all }}
|
||||
|
||||
static-libraries:
|
||||
uses: ./.github/workflows/BundleStaticLibs.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
override_git_describe: ${{ inputs.override_git_describe }}
|
||||
git_ref: ${{ inputs.git_ref }}
|
||||
skip_tests: ${{ inputs.skip_tests }}
|
||||
|
||||
prepare-status:
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
needs:
|
||||
- extensions
|
||||
- osx
|
||||
- linux-release
|
||||
- windows
|
||||
- static-libraries
|
||||
outputs:
|
||||
is-success: ${{ steps.set-output.outputs.success }}
|
||||
steps:
|
||||
- id: set-output
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ "${{ needs.extensions.result }}" == "success" && \
|
||||
"${{ needs.osx.result }}" == "success" && \
|
||||
"${{ needs.linux-release.result }}" == "success" && \
|
||||
"${{ needs.windows.result }}" == "success" && \
|
||||
"${{ needs.static-libraries.result }}" == "success" ]]; then
|
||||
echo "success=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "success=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
notify-external-repos:
|
||||
uses: ./.github/workflows/NotifyExternalRepositories.yml
|
||||
secrets: inherit
|
||||
needs: prepare-status
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
is-success: ${{ needs.prepare-status.outputs.is-success }}
|
||||
target-branch: ${{ inputs.git_ref == '' && github.ref || inputs.git_ref }}
|
||||
duckdb-sha: ${{ github.sha }}
|
||||
triggering-event: ${{ github.event_name }}
|
||||
should-publish: 'true'
|
||||
override-git-describe: ${{ inputs.override_git_describe }}
|
||||
@@ -1,30 +0,0 @@
|
||||
name: Close Stale Issues
|
||||
on:
|
||||
repository_dispatch:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
close_stale_issues:
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write # only for delete-branch option
|
||||
issues: write
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Close stale issues
|
||||
uses: actions/stale@v9
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
with:
|
||||
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 30 days.'
|
||||
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 30 days.'
|
||||
close-issue-message: 'This issue was closed because it has been stale for 30 days with no activity.'
|
||||
close-pr-message: 'This pull request was closed because it has been stale for 30 days with no activity.'
|
||||
exempt-issue-labels: 'no stale'
|
||||
exempt-pr-labels: 'no stale'
|
||||
days-before-stale: 365
|
||||
days-before-close: 30
|
||||
operations-per-run: 500
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
115
external/duckdb/.github/workflows/Julia.yml
vendored
115
external/duckdb/.github/workflows/Julia.yml
vendored
@@ -1,115 +0,0 @@
|
||||
name: Julia
|
||||
on:
|
||||
workflow_dispatch:
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'examples/**'
|
||||
- 'test/**'
|
||||
- 'tools/**'
|
||||
- '!tools/juliapkg/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Julia.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'examples/**'
|
||||
- 'test/**'
|
||||
- 'tools/**'
|
||||
- '!tools/juliapkg/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Julia.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
format_check:
|
||||
name: Julia Format Check
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: julia-actions/setup-julia@v1
|
||||
with:
|
||||
version: 1.7
|
||||
arch: x64
|
||||
|
||||
- name: Format Check
|
||||
shell: bash
|
||||
run: |
|
||||
cd tools/juliapkg
|
||||
julia -e "import Pkg; Pkg.add(\"JuliaFormatter\")"
|
||||
./format_check.sh
|
||||
|
||||
main_julia:
|
||||
name: Julia ${{ matrix.version }}
|
||||
needs: check-draft
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- '1.10'
|
||||
- '1'
|
||||
os:
|
||||
- ubuntu-latest
|
||||
arch:
|
||||
- x64
|
||||
isRelease:
|
||||
- ${{ github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
arch: ${{ matrix.arch }}
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.version }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build DuckDB
|
||||
shell: bash
|
||||
run: |
|
||||
CORE_EXTENSIONS="tpch;icu" BUILD_JEMALLOC=1 make
|
||||
|
||||
- name: Run Tests
|
||||
shell: bash
|
||||
run: |
|
||||
export JULIA_DUCKDB_LIBRARY="`pwd`/build/release/src/libduckdb.so"
|
||||
export JULIA_NUM_THREADS=2
|
||||
export LD_PRELOAD="/usr/lib/x86_64-linux-gnu/libstdc++.so.6"
|
||||
ls $JULIA_DUCKDB_LIBRARY
|
||||
cd tools/juliapkg
|
||||
julia --project -e "import Pkg; Pkg.test()"
|
||||
211
external/duckdb/.github/workflows/LinuxRelease.yml
vendored
211
external/duckdb/.github/workflows/LinuxRelease.yml
vendored
@@ -1,211 +0,0 @@
|
||||
name: LinuxRelease
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
description: 'Version tag to override git describe'
|
||||
type: string
|
||||
git_ref:
|
||||
description: 'Git ref'
|
||||
type: string
|
||||
skip_tests:
|
||||
description: 'Pass "true" to skip tests'
|
||||
type: string
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/LinuxRelease.yml'
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/LinuxRelease.yml'
|
||||
|
||||
|
||||
concurrency:
|
||||
group: linuxrelease-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
linux-release-cli:
|
||||
needs:
|
||||
- check-draft
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
config: [ { runner: ubuntu-latest, arch: amd64, image: x86_64}, {runner: ubuntu-24.04-arm, arch: arm64, image: aarch64}]
|
||||
|
||||
name: Linux CLI (${{ matrix.config.arch }})
|
||||
runs-on: ${{ matrix.config.runner }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- name: Install pytest
|
||||
run: |
|
||||
python3 -m pip install pytest
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
export PWD=`pwd`
|
||||
docker run \
|
||||
-v$PWD:$PWD \
|
||||
-e CMAKE_BUILD_PARALLEL_LEVEL=2 \
|
||||
-e OVERRIDE_GIT_DESCRIBE=$OVERRIDE_GIT_DESCRIBE \
|
||||
-e EXTENSION_CONFIGS="$PWD/.github/config/bundled_extensions.cmake" \
|
||||
-e ENABLE_EXTENSION_AUTOLOADING=1 \
|
||||
-e ENABLE_EXTENSION_AUTOINSTALL=1 \
|
||||
-e BUILD_BENCHMARK=1 \
|
||||
-e FORCE_WARN_UNUSED=1 \
|
||||
quay.io/pypa/manylinux_2_28_${{ matrix.config.image }} \
|
||||
bash -c "
|
||||
set -e
|
||||
yum install -y perl-IPC-Cmd gcc-toolset-12 gcc-toolset-12-gcc-c++
|
||||
|
||||
source /opt/rh/gcc-toolset-12/enable
|
||||
export CC=gcc
|
||||
export CXX=g++
|
||||
|
||||
git config --global --add safe.directory $PWD
|
||||
make -C $PWD
|
||||
"
|
||||
|
||||
- name: Print platform
|
||||
shell: bash
|
||||
run: ./build/release/duckdb -c "PRAGMA platform;"
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
python3 scripts/amalgamation.py
|
||||
zip -j duckdb_cli-linux-${{ matrix.config.arch }}.zip build/release/duckdb
|
||||
gzip -9 -k -n -c build/release/duckdb > duckdb_cli-linux-${{ matrix.config.arch }}.gz
|
||||
zip -j libduckdb-linux-${{ matrix.config.arch }}.zip build/release/src/libduckdb*.* src/amalgamation/duckdb.hpp src/include/duckdb.h
|
||||
./scripts/upload-assets-to-staging.sh github_release libduckdb-linux-${{ matrix.config.arch }}.zip duckdb_cli-linux-${{ matrix.config.arch }}.zip duckdb_cli-linux-${{ matrix.config.arch }}.gz
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-binaries-linux-${{ matrix.config.arch }}
|
||||
path: |
|
||||
libduckdb-linux-${{ matrix.config.arch }}.zip
|
||||
duckdb_cli-linux-${{ matrix.config.arch }}.zip
|
||||
duckdb_cli-linux-${{ matrix.config.arch }}.gz
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: |
|
||||
python3 scripts/run_tests_one_by_one.py build/release/test/unittest "*" --time_execution
|
||||
|
||||
- name: Tools Tests
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: |
|
||||
python3 -m pytest tools/shell/tests --shell-binary build/release/duckdb
|
||||
|
||||
- name: Examples
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: |
|
||||
build/release/benchmark/benchmark_runner benchmark/micro/update/update_with_join.benchmark
|
||||
build/release/duckdb -c "COPY (SELECT 42) TO '/dev/stdout' (FORMAT PARQUET)" | cat
|
||||
|
||||
upload-libduckdb-src:
|
||||
name: Upload libduckdb-src.zip
|
||||
needs: linux-release-cli
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
python3 scripts/amalgamation.py
|
||||
zip -j libduckdb-src.zip src/amalgamation/duckdb.hpp src/amalgamation/duckdb.cpp src/include/duckdb.h src/include/duckdb_extension.h
|
||||
./scripts/upload-assets-to-staging.sh github_release libduckdb-src.zip
|
||||
|
||||
symbol-leakage:
|
||||
name: Symbol Leakage
|
||||
runs-on: ubuntu-22.04
|
||||
needs: linux-release-cli
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
env:
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Symbol Leakage Test
|
||||
shell: bash
|
||||
run: python3 scripts/exported_symbols_check.py build/release/src/libduckdb*.so
|
||||
517
external/duckdb/.github/workflows/Main.yml
vendored
517
external/duckdb/.github/workflows/Main.yml
vendored
@@ -1,517 +0,0 @@
|
||||
name: Main
|
||||
on:
|
||||
workflow_dispatch:
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Main.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
- '!.github/patches/extensions/fts/*.patch' # fts used in some jobs
|
||||
- '!.github/config/extensions/fts.cmake'
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Main.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
- '!.github/patches/extensions/fts/*.patch' # fts used in some jobs
|
||||
- '!.github/config/extensions/fts.cmake'
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
linux-debug:
|
||||
name: Linux Debug
|
||||
# This tests release build while enabling slow verifiers (masked by #ifdef DEBUG) and sanitizers
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
TREAT_WARNINGS_AS_ERRORS: 1
|
||||
GEN: ninja
|
||||
CRASH_ON_ASSERT: 1
|
||||
CMAKE_CXX_FLAGS: '-DDEBUG'
|
||||
FORCE_ASSERT: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- id: describe_step
|
||||
run: echo "git_describe=$(git describe --tags --long)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make release
|
||||
|
||||
- name: Output version info
|
||||
shell: bash
|
||||
run: ./build/release/duckdb -c "PRAGMA version;"
|
||||
|
||||
- name: Set DUCKDB_INSTALL_LIB for ADBC tests
|
||||
shell: bash
|
||||
run: echo "DUCKDB_INSTALL_LIB=$(find `pwd` -name "libduck*.so" | head -n 1)" >> $GITHUB_ENV
|
||||
|
||||
- name: Test DUCKDB_INSTALL_LIB variable
|
||||
run: echo $DUCKDB_INSTALL_LIB
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/run_tests_one_by_one.py build/release/test/unittest --tests-per-invocation 100
|
||||
|
||||
linux-release:
|
||||
name: Linux Release (full suite)
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
GEN: ninja
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet"
|
||||
DISABLE_SANITIZER: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make release
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: make allunit
|
||||
|
||||
no-string-inline:
|
||||
name: No String Inline / Destroy Unpinned Blocks
|
||||
runs-on: ubuntu-24.04
|
||||
needs: linux-configs
|
||||
env:
|
||||
GEN: ninja
|
||||
CORE_EXTENSIONS: "icu;parquet;tpch;tpcds;fts;json;inet"
|
||||
DISABLE_STRING_INLINE: 1
|
||||
DESTROY_UNPINNED_BLOCKS: 1
|
||||
ALTERNATIVE_VERIFY: 1
|
||||
DISABLE_POINTER_SALT: 1
|
||||
LSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-leak-suppressions.txt
|
||||
DUCKDB_TEST_DESCRIPTION: 'Compiled with ALTERNATIVE_VERIFY=1 DISABLE_STRING_INLINE=1 DESTROY_UNPINNED_BLOCKS=1 DISABLE_POINTER_SALT=1. Use require no_alternative_verify to skip.'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make relassert
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: build/relassert/test/unittest
|
||||
|
||||
vector-sizes:
|
||||
name: Vector Sizes
|
||||
runs-on: ubuntu-22.04
|
||||
needs: linux-configs
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
DUCKDB_TEST_DESCRIPTION: 'Compiled with STANDARD_VECTOR_SIZE=2. Use require vector_size 2048 to skip tests.'
|
||||
|
||||
steps:
|
||||
- name: Clean up the disc space
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Disk usage before clean up:"
|
||||
df -h
|
||||
rm -rf /opt/hostedtoolcache/CodeQL Java* Pypy Ruby go node
|
||||
echo "Disk usage after clean up:"
|
||||
df -h
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: STANDARD_VECTOR_SIZE=2 make reldebug
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest --no-exit --time_execution
|
||||
|
||||
valgrind:
|
||||
name: Valgrind
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/v') }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs: linux-configs
|
||||
env:
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
DISABLE_SANITIZER: 1
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: 'icu;json;parquet;tpch'
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build valgrind clang
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make relassert
|
||||
|
||||
- name: Output version info
|
||||
shell: bash
|
||||
run: ./build/relassert/duckdb -c "PRAGMA version;"
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: valgrind ./build/relassert/test/unittest test/sql/tpch/tpch_sf001.test_slow
|
||||
|
||||
threadsan:
|
||||
name: Thread Sanitizer
|
||||
needs: linux-configs
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
GEN: ninja
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet"
|
||||
TSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-thread-suppressions.txt
|
||||
DUCKDB_TEST_DESCRIPTION: 'Tests run with thread sanitizer.'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build clang
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: THREADSAN=1 make reldebug
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest --no-exit --timeout 600
|
||||
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[intraquery]" --no-exit --timeout 600
|
||||
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[interquery]" --no-exit --timeout 1800
|
||||
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[interquery]" --no-exit --timeout 1800 --force-storage
|
||||
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[interquery]" --no-exit --timeout 1800 --force-storage --force-reload
|
||||
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest "[detailed_profiler]" --no-exit --timeout 600
|
||||
python3 scripts/run_tests_one_by_one.py build/reldebug/test/unittest test/sql/tpch/tpch_sf01.test_slow --no-exit --timeout 600
|
||||
|
||||
amalgamation-tests:
|
||||
name: Amalgamation Tests
|
||||
runs-on: ubuntu-22.04
|
||||
needs: check-draft
|
||||
env:
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install LLVM and Clang
|
||||
uses: KyleMayes/install-llvm-action@v1
|
||||
with:
|
||||
version: "14.0"
|
||||
|
||||
- name: Generate Amalgamation
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/amalgamation.py --extended
|
||||
clang++ -std=c++17 -Isrc/amalgamation src/amalgamation/duckdb.cpp -emit-llvm -S -O0
|
||||
|
||||
force-blocking-sink-source:
|
||||
name: Forcing async Sinks/Sources
|
||||
runs-on: ubuntu-24.04
|
||||
needs: check-draft
|
||||
env:
|
||||
GEN: ninja
|
||||
CORE_EXTENSIONS: "icu;parquet;tpch;tpcds;fts;json;inet"
|
||||
FORCE_ASYNC_SINK_SOURCE: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make relassert
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest --no-exit --timeout 600
|
||||
|
||||
|
||||
|
||||
# TODO: Bring back BLOCK_VERIFICATION: 1, and consider bringing back fts
|
||||
# TODO: DEBUG_STACKTRACE: 1 + reldebug ?
|
||||
linux-configs:
|
||||
name: Tests a release build with different configurations
|
||||
runs-on: ubuntu-24.04
|
||||
needs: check-draft
|
||||
env:
|
||||
BASE_BRANCH: ${{ github.base_ref || 'main' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
id: build
|
||||
shell: bash
|
||||
env:
|
||||
CORE_EXTENSIONS: "json;parquet;icu;tpch;tpcds"
|
||||
GEN: ninja
|
||||
run: make
|
||||
|
||||
- name: test/configs/encryption.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/encryption.json
|
||||
|
||||
- name: test/configs/force_storage.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/force_storage.json
|
||||
|
||||
- name: test/configs/force_storage_restart.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/force_storage_restart.json
|
||||
|
||||
- name: test/configs/latest_storage.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/latest_storage.json
|
||||
|
||||
- name: test/configs/verify_fetch_row.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/verify_fetch_row.json
|
||||
|
||||
- name: test/configs/wal_verification.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/wal_verification.json
|
||||
|
||||
- name: test/configs/prefetch_all_parquet_files.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/prefetch_all_parquet_files.json
|
||||
|
||||
- name: test/configs/no_local_filesystem.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/no_local_filesystem.json
|
||||
|
||||
- name: test/configs/block_size_16kB.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/block_size_16kB.json
|
||||
|
||||
- name: test/configs/latest_storage_block_size_16kB.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/latest_storage_block_size_16kB.json
|
||||
|
||||
- name: test/configs/enable_verification.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/enable_verification.json
|
||||
|
||||
- name: Test dictionary_expression
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --verify-vector dictionary_expression --skip-compiled
|
||||
|
||||
- name: Test dictionary_operator
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --verify-vector dictionary_operator --skip-compiled
|
||||
|
||||
- name: Test constant_operator
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --verify-vector constant_operator --skip-compiled
|
||||
|
||||
- name: Test sequence_operator
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --verify-vector sequence_operator --skip-compiled
|
||||
|
||||
- name: Test nested_shuffle
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --verify-vector nested_shuffle --skip-compiled
|
||||
|
||||
- name: Test variant_vector
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/variant_vector.json
|
||||
|
||||
- name: Test variant_vector
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/compressed_in_memory.json
|
||||
|
||||
- name: Test block prefetching
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/prefetch_all_storage.json
|
||||
|
||||
- name: Test peg_parser
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/release/test/unittest --test-config test/configs/peg_parser.json
|
||||
- name: Forwards compatibility tests
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/test_storage_compatibility.py --versions "1.2.1|1.3.2" --new-unittest build/release/test/unittest
|
||||
@@ -1,19 +0,0 @@
|
||||
name: Create Mirror for Discussions
|
||||
on:
|
||||
discussion:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
|
||||
TITLE_PREFIX: "[duckdb/#${{ github.event.discussion.number }}]"
|
||||
PUBLIC_DISCUSSION_TITLE: ${{ github.event.discussion.title }}
|
||||
|
||||
jobs:
|
||||
create_mirror_issue:
|
||||
if: github.event.label.name == 'under review'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create mirror issue for discussion
|
||||
run: |
|
||||
gh issue create --repo duckdblabs/duckdb-internal --label "discussion" --title "$TITLE_PREFIX - $PUBLIC_DISCUSSION_TITLE" --body "See https://github.com/duckdb/duckdb/discussions/${{ github.event.discussion.number }}"
|
||||
@@ -1,42 +0,0 @@
|
||||
name: Create Documentation issue for the Needs Documentation label
|
||||
on:
|
||||
discussion:
|
||||
types:
|
||||
- labeled
|
||||
issues:
|
||||
types:
|
||||
- labeled
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
|
||||
# an event triggering this workflow is either an issue or a pull request,
|
||||
# hence only one of the numbers will be filled in the TITLE_PREFIX
|
||||
TITLE_PREFIX: "[duckdb/#${{ github.event.issue.number || github.event.pull_request.number }}]"
|
||||
PUBLIC_ISSUE_TITLE: ${{ github.event.issue.title || github.event.pull_request.title }}
|
||||
|
||||
jobs:
|
||||
create_documentation_issue:
|
||||
if: github.event.label.name == 'Needs Documentation'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get mirror issue number
|
||||
run: |
|
||||
gh issue list --repo duckdb/duckdb-web --json title,number --state all --jq ".[] | select(.title | startswith(\"${TITLE_PREFIX}\")).number" > mirror_issue_number.txt
|
||||
echo "MIRROR_ISSUE_NUMBER=$(cat mirror_issue_number.txt)" >> ${GITHUB_ENV}
|
||||
|
||||
- name: Print whether mirror issue exists
|
||||
run: |
|
||||
if [ "${MIRROR_ISSUE_NUMBER}" == "" ]; then
|
||||
echo "Mirror issue with title prefix '${TITLE_PREFIX}' does not exist yet"
|
||||
else
|
||||
echo "Mirror issue with title prefix '${TITLE_PREFIX}' exists with number ${MIRROR_ISSUE_NUMBER}"
|
||||
fi
|
||||
|
||||
- name: Create mirror issue if it does not yet exist
|
||||
run: |
|
||||
if [ "${MIRROR_ISSUE_NUMBER}" == "" ]; then
|
||||
gh issue create --repo duckdb/duckdb-web --title "${TITLE_PREFIX} - ${PUBLIC_ISSUE_TITLE} needs documentation" --body "See https://github.com/duckdb/duckdb/issues/${{ github.event.issue.number || github.event.pull_request.number }}"
|
||||
fi
|
||||
687
external/duckdb/.github/workflows/NightlyTests.yml
vendored
687
external/duckdb/.github/workflows/NightlyTests.yml
vendored
@@ -1,687 +0,0 @@
|
||||
name: NightlyTests
|
||||
on:
|
||||
workflow_dispatch:
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/NightlyTests.yml'
|
||||
- '!.github/patches/duckdb-wasm/**'
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**'
|
||||
- '!.github/workflows/NightlyTests.yml'
|
||||
- '!.github/patches/duckdb-wasm/**'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
DUCKDB_WASM_VERSION: "cf2048bd6d669ffa05c56d7d453e09e99de8b87e"
|
||||
CCACHE_SAVE: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
linux-memory-leaks:
|
||||
name: Linux Memory Leaks
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
python3 test/memoryleak/test_memory_leaks.py
|
||||
|
||||
release-assert:
|
||||
name: Release Assertions
|
||||
runs-on: ubuntu-latest
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
GEN: ninja
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet;httpfs"
|
||||
DISABLE_SANITIZER: 1
|
||||
CRASH_ON_ASSERT: 1
|
||||
RUN_SLOW_VERIFIERS: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build libcurl4-openssl-dev
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: UNSAFE_NUMERIC_CAST=1 make relassert
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest "*" --no-exit --timeout 1200
|
||||
|
||||
release-assert-osx:
|
||||
name: Release Assertions OSX
|
||||
runs-on: macos-latest
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
GEN: ninja
|
||||
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet;httpfs"
|
||||
DISABLE_SANITIZER: 1
|
||||
CRASH_ON_ASSERT: 1
|
||||
RUN_SLOW_VERIFIERS: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
run: brew install ninja llvm
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: CMAKE_LLVM_PATH='/opt/homebrew/opt/llvm' UNSAFE_NUMERIC_CAST=1 make relassert
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest "*" --no-exit --timeout 1200
|
||||
|
||||
release-assert-osx-storage:
|
||||
name: Release Assertions OSX Storage
|
||||
runs-on: macos-latest
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
GEN: ninja
|
||||
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet"
|
||||
DISABLE_SANITIZER: 1
|
||||
CRASH_ON_ASSERT: 1
|
||||
RUN_SLOW_VERIFIERS: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
run: brew install ninja
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: UNSAFE_NUMERIC_CAST=1 make relassert
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest "*" --no-exit --timeout 1200 --force-storage
|
||||
|
||||
smaller-binary:
|
||||
name: Smaller Binary
|
||||
runs-on: ubuntu-24.04
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
GEN: ninja
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "icu;tpch;tpcds;json"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: SMALLER_BINARY=1 make
|
||||
|
||||
- name: Measure Size
|
||||
shell: bash
|
||||
run: ls -trlah build/release/src/libduckdb*
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
build/release/test/unittest "*"
|
||||
|
||||
release-assert-clang:
|
||||
name: Release Assertions with Clang
|
||||
runs-on: ubuntu-latest
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
GEN: ninja
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "icu;tpch;tpcds;fts;json;inet;httpfs"
|
||||
DISABLE_SANITIZER: 1
|
||||
CRASH_ON_ASSERT: 1
|
||||
RUN_SLOW_VERIFIERS: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build llvm libcurl4-openssl-dev
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: UNSAFE_NUMERIC_CAST=1 make relassert
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/run_tests_one_by_one.py build/relassert/test/unittest "*" --no-exit --timeout 1200
|
||||
|
||||
sqllogic:
|
||||
name: Sqllogic tests
|
||||
runs-on: ubuntu-latest # Secondary task of this CI job is to test building duckdb on latest ubuntu
|
||||
needs: linux-memory-leaks
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2.11 # Note: pinned due to GLIBC incompatibility in later releases
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
# Build is implied by 'make sqlite' that will invoke implicitly 'make release' (we make it explicit)
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make release
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: make sqlite
|
||||
|
||||
storage-initialization:
|
||||
name: Storage Initialization Verification
|
||||
runs-on: ubuntu-22.04
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make debug
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: python3 scripts/test_zero_initialize.py
|
||||
|
||||
extension-updating:
|
||||
name: Extension updating test
|
||||
runs-on: ubuntu-22.04
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Cleanup disk before build
|
||||
run: |
|
||||
echo "Disk usage before clean up:"
|
||||
df -h
|
||||
sudo apt-get clean
|
||||
sudo rm -rf /var/lib/apt/lists/*
|
||||
docker system prune -af || true
|
||||
rm -rf ~/.cache
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf "/usr/local/share/boost"
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
echo "Disk usage after clean up:"
|
||||
df -h
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: pip install awscli
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: CORE_EXTENSIONS="tpch" make
|
||||
|
||||
- name: Start Minio
|
||||
shell: bash
|
||||
run: |
|
||||
sudo ./scripts/install_s3_test_server.sh
|
||||
./scripts/generate_presigned_url.sh
|
||||
source ./scripts/run_s3_test_server.sh
|
||||
source ./scripts/set_s3_test_server_variables.sh
|
||||
sleep 60
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
./scripts/run_extension_medata_tests.sh
|
||||
|
||||
regression-test-memory-safety:
|
||||
name: Regression Tests between safe and unsafe builds
|
||||
runs-on: ubuntu-22.04
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
BUILD_BENCHMARK: 1
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "tpch;tpcds;httpfs"
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout tools repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
path: unsafe
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build libcurl4-openssl-dev && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
make
|
||||
|
||||
cd unsafe
|
||||
UNSAFE_NUMERIC_CAST=1 DISABLE_MEMORY_SAFETY=1 make
|
||||
|
||||
- name: Set up benchmarks
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r benchmark unsafe/
|
||||
|
||||
- name: Regression Test Micro
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test TPCH
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test TPCDS
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test H2OAI
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test IMDB
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old unsafe/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --verbose --threads 2
|
||||
|
||||
vector-and-block-sizes:
|
||||
name: Tests different vector and block sizes
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
LSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-leak-suppressions.txt
|
||||
needs: linux-memory-leaks
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
id: build
|
||||
shell: bash
|
||||
env:
|
||||
CORE_EXTENSIONS: "json;parquet;icu;tpch;tpcds"
|
||||
GEN: ninja
|
||||
STANDARD_VECTOR_SIZE: 512
|
||||
run: make relassert
|
||||
|
||||
- name: Fast and storage tests with default and small block size
|
||||
shell: bash
|
||||
run: |
|
||||
./build/relassert/test/unittest
|
||||
./build/relassert/test/unittest "test/sql/storage/*"
|
||||
./build/relassert/test/unittest --test-config test/configs/block_size_16kB.json
|
||||
./build/relassert/test/unittest "test/sql/storage/*" --test-config test/configs/block_size_16kB.json
|
||||
|
||||
linux-debug-configs:
|
||||
name: Tests different configurations with a debug build
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
LSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-leak-suppressions.txt
|
||||
needs: linux-memory-leaks
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
id: build
|
||||
shell: bash
|
||||
env:
|
||||
CORE_EXTENSIONS: "json;parquet;icu;tpch;tpcds"
|
||||
GEN: ninja
|
||||
run: make debug
|
||||
|
||||
- name: test/configs/enable_verification_for_debug.json
|
||||
if: (success() || failure()) && steps.build.conclusion == 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
./build/debug/test/unittest --test-config test/configs/enable_verification_for_debug.json
|
||||
|
||||
linux-wasm-experimental:
|
||||
name: WebAssembly duckdb-wasm builds
|
||||
# disable in NightlyTests
|
||||
if: false
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: mymindstorm/setup-emsdk@v12
|
||||
with:
|
||||
version: 'latest'
|
||||
|
||||
- name: Setup
|
||||
shell: bash
|
||||
run: |
|
||||
git clone https://github.com/duckdb/duckdb-wasm
|
||||
cd duckdb-wasm
|
||||
git checkout ${{ env.DUCKDB_WASM_VERSION }}
|
||||
shopt -s nullglob
|
||||
for filename in ../.github/patches/duckdb-wasm/*.patch; do
|
||||
git apply $filename
|
||||
done
|
||||
git submodule init
|
||||
git submodule update
|
||||
git rm -r submodules/duckdb
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
path: duckdb-wasm/submodules/duckdb
|
||||
|
||||
- name: Setup
|
||||
shell: bash
|
||||
run: |
|
||||
cd duckdb-wasm
|
||||
make patch_duckdb || echo "done"
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Print version
|
||||
shell: bash
|
||||
run: |
|
||||
emcc --version
|
||||
|
||||
- name: Build WebAssembly MVP
|
||||
shell: bash
|
||||
run: |
|
||||
cd duckdb-wasm
|
||||
bash scripts/wasm_build_lib.sh relsize mvp $(pwd)/submodules/duckdb
|
||||
|
||||
- name: Build WebAssembly EH
|
||||
shell: bash
|
||||
run: |
|
||||
cd duckdb-wasm
|
||||
bash scripts/wasm_build_lib.sh relsize eh $(pwd)/submodules/duckdb
|
||||
|
||||
- name: Build WebAssembly COI
|
||||
shell: bash
|
||||
run: |
|
||||
cd duckdb-wasm
|
||||
bash scripts/wasm_build_lib.sh relsize coi $(pwd)/submodules/duckdb
|
||||
|
||||
- name: Package
|
||||
shell: bash
|
||||
run: |
|
||||
zip -r duckdb-wasm32.zip duckdb-wasm/packages/duckdb-wasm/src/bindings
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-wasm32
|
||||
path: |
|
||||
duckdb-wasm32.zip
|
||||
|
||||
hash-zero:
|
||||
name: Hash Zero
|
||||
runs-on: ubuntu-24.04
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
GEN: ninja
|
||||
CORE_EXTENSIONS: "icu;parquet;tpch;tpcds;fts;json;inet"
|
||||
HASH_ZERO: 1
|
||||
LSAN_OPTIONS: suppressions=${{ github.workspace }}/.sanitizer-leak-suppressions.txt
|
||||
DUCKDB_TEST_DESCRIPTION: 'Compiled with HASH_ZERO=1. Use require no_hash_zero to skip.'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ env.CCACHE_SAVE }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make relassert
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: build/relassert/test/unittest --test-config test/configs/hash_zero.json
|
||||
|
||||
codecov:
|
||||
name: Code Coverage
|
||||
runs-on: ubuntu-22.04
|
||||
needs: linux-memory-leaks
|
||||
env:
|
||||
GEN: ninja
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build lcov curl g++ zip
|
||||
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install pytest
|
||||
run: |
|
||||
python3 -m pip install pytest
|
||||
|
||||
- name: Check Coverage
|
||||
shell: bash
|
||||
continue-on-error: true
|
||||
run: |
|
||||
make coverage-check
|
||||
|
||||
- name: Create Archive
|
||||
if: ${{ success() || failure() }}
|
||||
shell: bash
|
||||
run: |
|
||||
zip -r coverage.zip coverage_html
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ success() || failure() }}
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.zip
|
||||
if-no-files-found: error
|
||||
@@ -1,115 +0,0 @@
|
||||
name: Notify External Repositories
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
duckdb-sha:
|
||||
description: 'Vendor Specific DuckDB SHA'
|
||||
required: false
|
||||
default: ''
|
||||
type: 'string'
|
||||
target-branch:
|
||||
description: 'Which Branch to Target'
|
||||
required: true
|
||||
default: ''
|
||||
type: 'string'
|
||||
triggering-event:
|
||||
description: 'Which event triggered the run'
|
||||
default: ''
|
||||
type: 'string'
|
||||
should-publish:
|
||||
description: 'Should the called workflow push updates or not'
|
||||
default: 'false'
|
||||
type: 'string'
|
||||
is-success:
|
||||
description: 'True, if all the builds in InvokeCI had succeeded'
|
||||
default: 'false'
|
||||
type: 'string'
|
||||
override-git-describe:
|
||||
description: 'The name of the release tag, used for release builds'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
duckdb-sha:
|
||||
description: 'Vendor Specific DuckDB SHA'
|
||||
required: false
|
||||
default: ''
|
||||
type: 'string'
|
||||
target-branch:
|
||||
description: 'Which Branch to Target'
|
||||
required: true
|
||||
default: ''
|
||||
type: 'string'
|
||||
triggering-event:
|
||||
description: 'Which event triggered the run'
|
||||
default: ''
|
||||
type: 'string'
|
||||
should-publish:
|
||||
description: 'Should the called workflow push updates'
|
||||
default: 'false'
|
||||
type: 'string'
|
||||
is-success:
|
||||
description: 'True, if all the builds in InvokeCI had succeeded'
|
||||
default: 'false'
|
||||
type: 'string'
|
||||
override-git-describe:
|
||||
description: 'The name of the release tag, used for release builds'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PAT_USER: ${{ secrets.PAT_USERNAME }}
|
||||
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
|
||||
|
||||
jobs:
|
||||
notify-odbc-run:
|
||||
name: Run ODBC Vendor
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ inputs.is-success == 'true' && inputs.override-git-describe == '' }}
|
||||
steps:
|
||||
- name: Run ODBC Vendor
|
||||
if: ${{ github.repository == 'duckdb/duckdb' }}
|
||||
run: |
|
||||
export URL=https://api.github.com/repos/duckdb/duckdb-odbc/actions/workflows/Vendor.yml/dispatches
|
||||
export DATA='{"ref": "${{ inputs.target-branch }}", "inputs": {"duckdb-sha": "${{ inputs.duckdb-sha }}"}}'
|
||||
curl -v -XPOST -u "${PAT_USER}:${PAT_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" $URL --data "$DATA"
|
||||
|
||||
notify-jdbc-run:
|
||||
name: Run JDBC Vendor
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ inputs.is-success == 'true' && inputs.override-git-describe == '' }}
|
||||
steps:
|
||||
- name: Run JDBC Vendor
|
||||
if: ${{ github.repository == 'duckdb/duckdb' }}
|
||||
run: |
|
||||
export URL=https://api.github.com/repos/duckdb/duckdb-java/actions/workflows/Vendor.yml/dispatches
|
||||
export DATA='{"ref": "${{ inputs.target-branch }}", "inputs": {"duckdb-sha": "${{ inputs.duckdb-sha }}"}}'
|
||||
curl -v -XPOST -u "${PAT_USER}:${PAT_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" $URL --data "$DATA"
|
||||
|
||||
notify-nightly-build-status:
|
||||
name: Run Nightly build status
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Run Nightly build status
|
||||
if: ${{ github.repository == 'duckdb/duckdb' }}
|
||||
run: |
|
||||
export URL=https://api.github.com/repos/duckdb/duckdb-build-status/actions/workflows/NightlyBuildsCheck.yml/dispatches
|
||||
export DATA='{"ref": "${{ inputs.target-branch }}", "inputs": {"event": "${{ inputs.triggering-event }}", "should_publish": "${{ inputs.should-publish }}"}}'
|
||||
curl -v -XPOST -u "${PAT_USER}:${PAT_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" $URL --data "$DATA"
|
||||
|
||||
notify-python-nightly:
|
||||
name: Dispatch Python nightly build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Call /dispatch
|
||||
if: ${{ github.repository == 'duckdb/duckdb' && inputs.override-git-describe == '' }}
|
||||
run: |
|
||||
export URL=https://api.github.com/repos/duckdb/duckdb-python/actions/workflows/release.yml/dispatches
|
||||
export DATA='{"ref": "${{ inputs.target-branch }}", "inputs": {"duckdb-sha": "${{ inputs.duckdb-sha }}", "pypi-index": "prod" }}'
|
||||
curl -v -XPOST -u "${PAT_USER}:${PAT_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" $URL --data "$DATA"
|
||||
208
external/duckdb/.github/workflows/OSX.yml
vendored
208
external/duckdb/.github/workflows/OSX.yml
vendored
@@ -1,208 +0,0 @@
|
||||
name: OSX
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
run_all:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
run_all:
|
||||
type: string
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/OSX.yml'
|
||||
|
||||
|
||||
concurrency:
|
||||
group: osx-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
|
||||
|
||||
jobs:
|
||||
xcode-debug:
|
||||
name: OSX Debug
|
||||
runs-on: macos-14
|
||||
|
||||
env:
|
||||
TREAT_WARNINGS_AS_ERRORS: 1
|
||||
CMAKE_CXX_FLAGS: '-DDEBUG'
|
||||
FORCE_ASSERT: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Install ninja
|
||||
shell: bash
|
||||
run: brew install ninja
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: GEN=ninja make release
|
||||
|
||||
- name: Set DUCKDB_INSTALL_LIB for ADBC tests
|
||||
shell: bash
|
||||
run: echo "DUCKDB_INSTALL_LIB=$(find `pwd` -name "libduck*.dylib" | head -n 1)" >> $GITHUB_ENV
|
||||
|
||||
- name: Test DUCKDB_INSTALL_LIB variable
|
||||
run: echo $DUCKDB_INSTALL_LIB
|
||||
|
||||
- name: Test
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
shell: bash
|
||||
run: make unittest_release
|
||||
|
||||
- name: Amalgamation
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/amalgamation.py --extended
|
||||
cd src/amalgamation
|
||||
clang++ -std=c++11 -O0 -Wall -Werror -emit-llvm -S duckdb.cpp
|
||||
clang++ -DNDEBUG -O0 -std=c++11 -Wall -Werror -emit-llvm -S duckdb.cpp
|
||||
clang++ -DDEBUG -O0 -std=c++11 -Wall -Werror -emit-llvm -S duckdb.cpp
|
||||
|
||||
xcode-release:
|
||||
# Builds binaries for osx_arm64 and osx_amd64
|
||||
name: OSX Release
|
||||
runs-on: macos-14
|
||||
needs: xcode-debug
|
||||
env:
|
||||
EXTENSION_CONFIGS: '${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake'
|
||||
ENABLE_EXTENSION_AUTOLOADING: 1
|
||||
ENABLE_EXTENSION_AUTOINSTALL: 1
|
||||
OSX_BUILD_UNIVERSAL: 1
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Ninja
|
||||
run: brew install ninja
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Install pytest
|
||||
run: |
|
||||
python -m pip install pytest
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: make
|
||||
|
||||
- name: Print platform
|
||||
shell: bash
|
||||
run: ./build/release/duckdb -c "PRAGMA platform;"
|
||||
|
||||
# from https://docs.github.com/en/actions/deployment/deploying-xcode-applications/installing-an-apple-certificate-on-macos-runners-for-xcode-development
|
||||
- name: Sign Binaries
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_CERTIFICATE_BASE64: ${{ secrets.OSX_CODESIGN_BUILD_CERTIFICATE_BASE64 }}
|
||||
P12_PASSWORD: ${{ secrets.OSX_CODESIGN_P12_PASSWORD }}
|
||||
KEYCHAIN_PASSWORD: ${{ secrets.OSX_CODESIGN_KEYCHAIN_PASSWORD }}
|
||||
TEAM_ID : ${{ secrets.OSX_NOTARIZE_TEAM_ID }}
|
||||
APPLE_ID: ${{ secrets.OSX_NOTARIZE_APPLE_ID }}
|
||||
PASSWORD: ${{ secrets.OSX_NOTARIZE_PASSWORD }}
|
||||
run: |
|
||||
if [[ "$GITHUB_REPOSITORY" = "duckdb/duckdb" ]] ; then
|
||||
. scripts/osx_import_codesign_certificate.sh
|
||||
|
||||
echo -e '<?xml version="1.0" encoding="UTF-8"?>\n<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n<plist version="1.0">\n<dict>\n <key>com.apple.security.cs.disable-library-validation</key>\n <true/>\n</dict>\n</plist>' > entitlements.plist
|
||||
codesign --options runtime --entitlements entitlements.plist --all-architectures --force --sign "Developer ID Application: Stichting DuckDB Foundation" build/release/duckdb
|
||||
codesign --options runtime --entitlements entitlements.plist --all-architectures --force --sign "Developer ID Application: Stichting DuckDB Foundation" build/release/src/libduckdb*.dylib
|
||||
|
||||
zip -j notarize.zip build/release/duckdb build/release/src/libduckdb.dylib
|
||||
export XCRUN_RESPONSE=$(xcrun notarytool submit --progress --apple-id "$APPLE_ID" --password "$PASSWORD" --team-id "$TEAM_ID" --wait -f json notarize.zip)
|
||||
rm notarize.zip
|
||||
if [[ $(./build/release/duckdb -csv -noheader -c "SELECT (getenv('XCRUN_RESPONSE')::JSON)->>'status'") != "Accepted" ]] ; then
|
||||
echo "Notarization failed!"
|
||||
echo $XCRUN_RESPONSE
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
python scripts/amalgamation.py
|
||||
zip -j duckdb_cli-osx-universal.zip build/release/duckdb
|
||||
gzip -9 -k -n -c build/release/duckdb > duckdb_cli-osx-universal.gz
|
||||
zip -j libduckdb-osx-universal.zip build/release/src/libduckdb*.dylib src/amalgamation/duckdb.hpp src/include/duckdb.h
|
||||
./scripts/upload-assets-to-staging.sh github_release libduckdb-osx-universal.zip duckdb_cli-osx-universal.zip duckdb_cli-osx-universal.gz
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-binaries-osx
|
||||
path: |
|
||||
libduckdb-osx-universal.zip
|
||||
duckdb_cli-osx-universal.zip
|
||||
|
||||
- name: Unit Test
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: make allunit
|
||||
|
||||
- name: Tools Tests
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: |
|
||||
python -m pytest tools/shell/tests --shell-binary build/release/duckdb
|
||||
|
||||
- name: Examples
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: |
|
||||
(cd examples/embedded-c; make)
|
||||
(cd examples/embedded-c++; make)
|
||||
16
external/duckdb/.github/workflows/OnTag.yml
vendored
16
external/duckdb/.github/workflows/OnTag.yml
vendored
@@ -1,16 +0,0 @@
|
||||
name: On Tag
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
|
||||
jobs:
|
||||
staged_upload:
|
||||
uses: ./.github/workflows/StagedUpload.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
target_git_describe: ${{ inputs.override_git_describe || github.ref_name }}
|
||||
@@ -1,34 +0,0 @@
|
||||
name: Pull Request Requires Maintainer Approval
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
|
||||
TITLE_PREFIX: "[duckdb/#${{ github.event.pull_request.number }}]"
|
||||
PUBLIC_PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
|
||||
jobs:
|
||||
create_or_label_issue:
|
||||
if: github.event.label.name == 'needs maintainer approval'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get mirror issue number
|
||||
run: |
|
||||
gh issue list --repo duckdblabs/duckdb-internal --search "${TITLE_PREFIX}" --json title,number --state all --jq ".[] | select(.title | startswith(\"${TITLE_PREFIX}\")).number" > mirror_issue_number.txt
|
||||
echo "MIRROR_ISSUE_NUMBER=$(cat mirror_issue_number.txt)" >> ${GITHUB_ENV}
|
||||
|
||||
- name: Print whether mirror issue exists
|
||||
run: |
|
||||
if [ "${MIRROR_ISSUE_NUMBER}" == "" ]; then
|
||||
echo "Mirror issue with title prefix '${TITLE_PREFIX}' does not exist yet"
|
||||
else
|
||||
echo "Mirror issue with title prefix '${TITLE_PREFIX}' exists with number ${MIRROR_ISSUE_NUMBER}"
|
||||
fi
|
||||
|
||||
- name: Create or label issue
|
||||
run: |
|
||||
if [ "${MIRROR_ISSUE_NUMBER}" == "" ]; then
|
||||
gh issue create --repo duckdblabs/duckdb-internal --label "external action required" --label "Pull request" --title "${TITLE_PREFIX} - ${PUBLIC_PR_TITLE}" --body "Pull request ${TITLE_PREFIX} needs input from maintainers. See https://github.com/duckdb/duckdb/pull/${{ github.event.pull_request.number }}"
|
||||
fi
|
||||
380
external/duckdb/.github/workflows/Regression.yml
vendored
380
external/duckdb/.github/workflows/Regression.yml
vendored
@@ -1,380 +0,0 @@
|
||||
name: Regression
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
base_hash:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
base_hash:
|
||||
description: 'Base hash'
|
||||
type: string
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- 'tools/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Regression.yml'
|
||||
- '.github/config/out_of_tree_extensions.cmake'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
- '!.github/patches/extensions/httpfs/*.patch' # httpfs used in some jobs
|
||||
- '!.github/config/extensions/httpfs.cmake'
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- 'tools/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Regression.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
- '!.github/patches/extensions/httpfs/*.patch' # httpfs used in some jobs
|
||||
- '!.github/config/extensions/httpfs.cmake'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
BASE_BRANCH: ${{ github.base_ref || (endsWith(github.ref, '_feature') && 'feature' || 'main') }}
|
||||
BASE_HASH: ${{ inputs.base_hash }}
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
regression-test-benchmark-runner:
|
||||
name: Regression Tests
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
BUILD_BENCHMARK: 1
|
||||
BUILD_JEMALLOC: 1
|
||||
CORE_EXTENSIONS: "json;tpch;tpcds;httpfs;inet;icu"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build libcurl4-openssl-dev && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Checkout Private Regression
|
||||
if: ${{ github.repository == 'duckdb/duckdb' && github.ref == 'refs/heads/main' }}
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: duckdblabs/fivetran_regression
|
||||
ref: main
|
||||
token: ${{ secrets.DUCKDBLABS_BOT_TOKEN }}
|
||||
path: benchmark/fivetran
|
||||
|
||||
# For PRs we compare against the base branch
|
||||
- name: Build Current and Base Branch
|
||||
if: ${{ !(github.repository == 'duckdb/duckdb' && github.ref == 'refs/heads/main') }}
|
||||
shell: bash
|
||||
run: |
|
||||
make
|
||||
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
|
||||
cd duckdb
|
||||
make
|
||||
cd ..
|
||||
|
||||
# For NightlyTest we fetch the last commit hash that ran Regression on main
|
||||
- name: Build Main and Previous Successful Regression Hash
|
||||
if: ${{ github.repository == 'duckdb/duckdb' && github.ref == 'refs/heads/main' }}
|
||||
shell: bash
|
||||
run: |
|
||||
make
|
||||
git clone https://github.com/duckdb/duckdb.git
|
||||
cd duckdb
|
||||
if [[ -z "${BASE_HASH}" ]]; then
|
||||
export CHECKOUT_HASH=$(gh run list --repo duckdb/duckdb --branch=main --workflow=Regression --event=repository_dispatch --status=completed --json=headSha --limit=1 --jq '.[0].headSha')
|
||||
else
|
||||
export CHECKOUT_HASH="$BASE_HASH"
|
||||
fi
|
||||
git checkout $CHECKOUT_HASH
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Set up benchmarks
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r benchmark duckdb/
|
||||
|
||||
- name: Regression Test Fivetran
|
||||
if: ${{ github.repository == 'duckdb/duckdb' && github.ref == 'refs/heads/main' }}
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks benchmark/fivetran/benchmark_list.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test Micro
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/micro.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test Ingestion Perf
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/ingestion.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test TPCH
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test TPCH-PARQUET
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpch_parquet.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test TPCDS
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/tpcds.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test H2OAI
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/h2oai.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test IMDB
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/imdb.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test CSV
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/csv.csv --verbose --threads 2
|
||||
|
||||
- name: Regression Test RealNest
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p duckdb_benchmark_data
|
||||
rm -R duckdb/duckdb_benchmark_data
|
||||
mkdir -p duckdb/duckdb_benchmark_data
|
||||
wget -q https://blobs.duckdb.org/data/realnest/realnest.duckdb --output-document=duckdb_benchmark_data/real_nest.duckdb
|
||||
cp duckdb_benchmark_data/real_nest.duckdb duckdb/duckdb_benchmark_data/real_nest.duckdb
|
||||
python scripts/regression/test_runner.py --old duckdb/build/release/benchmark/benchmark_runner --new build/release/benchmark/benchmark_runner --benchmarks .github/regression/realnest.csv --verbose --threads 2
|
||||
|
||||
regression-test-storage:
|
||||
name: Storage Size Regression Test
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
CORE_EXTENSIONS: "tpch;tpcds"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
make
|
||||
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
|
||||
cd duckdb
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Regression Test
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression_test_storage_size.py --old duckdb/build/release/duckdb --new build/release/duckdb
|
||||
|
||||
- name: Test for incompatibility
|
||||
shell: bash
|
||||
run: |
|
||||
if (cmp test/sql/storage_version/storage_version.db duckdb/test/sql/storage_version/storage_version.db); then
|
||||
echo "storage_changed=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "storage_changed=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Regression Compatibility Test (testing bidirectional compatibility)
|
||||
shell: bash
|
||||
if: env.storage_changed == 'false'
|
||||
run: |
|
||||
# Regenerate test/sql/storage_version.db with newer version -> read with older version
|
||||
python3 scripts/generate_storage_version.py
|
||||
./duckdb/build/release/duckdb test/sql/storage_version/storage_version.db
|
||||
# Regenerate test/sql/storage_version.db with older version -> read with newer version (already performed as part of test.slow)
|
||||
cd duckdb
|
||||
python3 ../scripts/generate_storage_version.py
|
||||
../build/release/duckdb duckdb/test/sql/storage_version/storage_version.db
|
||||
cd ..
|
||||
|
||||
- name: Regression Compatibility Test (testing storage version has been bumped)
|
||||
shell: bash
|
||||
if: env.storage_changed == 'true'
|
||||
run: |
|
||||
python3 scripts/generate_storage_version.py
|
||||
cd duckdb
|
||||
python3 scripts/generate_storage_version.py
|
||||
cd ..
|
||||
if (cmp -i 8 -n 12 test/sql/storage_version.db duckdb/test/sql/storage_version.db); then
|
||||
echo "Expected storage format to be bumped, but this is not the case"
|
||||
echo "This might fail spuriously if changes to content of test database / generation script happened"
|
||||
exit 1
|
||||
else
|
||||
echo "Storage bump detected, all good!"
|
||||
fi
|
||||
|
||||
regression-test-binary-size:
|
||||
name: Regression test binary size
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
CORE_EXTENSIONS: "tpch;tpcds;json;parquet"
|
||||
EXTENSION_STATIC_BUILD: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build && pip install requests
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
make
|
||||
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
|
||||
cd duckdb
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Regression Test Extension binary size
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/regression_test_extension_size.py --old 'duckdb/build/release/extension' --new build/release/extension --expect json,parquet,tpch,tpcds
|
||||
|
||||
regression-test-plan-cost:
|
||||
name: Regression Test Join Order Plan Cost
|
||||
needs: check-draft
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
CC: gcc-10
|
||||
CXX: g++-10
|
||||
GEN: ninja
|
||||
CORE_EXTENSIONS: "tpch;httpfs"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: sudo apt-get update -y -qq && sudo apt-get install -y -qq ninja-build libcurl4-openssl-dev && pip install tqdm
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
make
|
||||
git clone --branch ${{ env.BASE_BRANCH }} https://github.com/duckdb/duckdb.git --depth=1
|
||||
cd duckdb
|
||||
make
|
||||
cd ..
|
||||
|
||||
- name: Set up benchmarks
|
||||
shell: bash
|
||||
run: |
|
||||
cp -r benchmark duckdb/
|
||||
|
||||
- name: Regression Test IMDB
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/plan_cost_runner.py --old duckdb/build/release/duckdb --new build/release/duckdb --dir=benchmark/imdb_plan_cost
|
||||
|
||||
- name: Regression Test TPCH
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/plan_cost_runner.py --old duckdb/build/release/duckdb --new build/release/duckdb --dir=benchmark/tpch_plan_cost
|
||||
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
name: Staged Upload
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
target_git_describe:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
target_git_describe:
|
||||
type: string
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
jobs:
|
||||
staged-upload:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ inputs.target_git_describe != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install
|
||||
shell: bash
|
||||
run: pip install awscli
|
||||
|
||||
- name: Download from staging bucket
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
TARGET=$(git log -1 --format=%h)
|
||||
mkdir to_be_uploaded
|
||||
aws s3 cp --recursive "s3://duckdb-staging/$TARGET/${{ inputs.target_git_describe }}/$GITHUB_REPOSITORY/github_release" to_be_uploaded --region us-east-2
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
run: |
|
||||
python3 scripts/asset-upload-gha.py to_be_uploaded/*
|
||||
87
external/duckdb/.github/workflows/Swift.yml
vendored
87
external/duckdb/.github/workflows/Swift.yml
vendored
@@ -1,87 +0,0 @@
|
||||
name: Swift
|
||||
on:
|
||||
workflow_dispatch:
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'examples/**'
|
||||
- 'test/**'
|
||||
- 'tools/**'
|
||||
- '!tools/swift/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Swift.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'examples/**'
|
||||
- 'test/**'
|
||||
- 'tools/**'
|
||||
- '!tools/swift/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Swift.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
test-apple-platforms:
|
||||
name: Test Apple Platforms
|
||||
needs: check-draft
|
||||
strategy:
|
||||
matrix:
|
||||
# destinations need to match selected version of Xcode
|
||||
# https://github.com/actions/runner-images/blob/main/images/macos/macos-14-Readme.md#installed-simulators
|
||||
destination:
|
||||
- 'macOS'
|
||||
- 'iOS Simulator,name=iPhone 16'
|
||||
- 'tvOS Simulator,name=Apple TV 4K (at 1080p) (3nd generation)'
|
||||
isRelease:
|
||||
- ${{ github.ref == 'refs/heads/main' }}
|
||||
exclude:
|
||||
- isRelease: false
|
||||
destination: 'iOS Simulator,name=iPhone 16'
|
||||
- isRelease: false
|
||||
destination: 'tvOS Simulator,name=Apple TV 4K (at 1080p) (3nd generation)'
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# we need tags for the ubiquity build script to run without errors
|
||||
fetch-depth: '0'
|
||||
|
||||
- name: Prepare Package
|
||||
run: python3 tools/swift/create_package.py tools/swift
|
||||
|
||||
- name: Select Xcode
|
||||
run: sudo xcode-select -switch /Applications/Xcode_15.4.app && /usr/bin/xcodebuild -version
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
xcrun xcodebuild test \
|
||||
-workspace tools/swift/duckdb-swift/DuckDB.xcworkspace \
|
||||
-scheme DuckDB \
|
||||
-destination platform='${{ matrix.destination }}'
|
||||
@@ -1,70 +0,0 @@
|
||||
name: SwiftRelease
|
||||
on:
|
||||
workflow_dispatch:
|
||||
repository_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
SOURCE_REF: ${{ github.event_name == 'release' && github.ref_name || 'main' }}
|
||||
TARGET_REPO: 'duckdb/duckdb-swift'
|
||||
TARGET_REF: 'main'
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
jobs:
|
||||
update:
|
||||
|
||||
name: Update Swift Repo
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Source Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# we need tags for the ubiquity build script to run without errors
|
||||
fetch-depth: '0'
|
||||
ref: ${{ env.SOURCE_REF }}
|
||||
path: 'source-repo'
|
||||
|
||||
- name: Checkout Target Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.TARGET_REPO }}
|
||||
ref: ${{ env.TARGET_REF }}
|
||||
token: ${{ env.GH_TOKEN }}
|
||||
path: 'target-repo'
|
||||
|
||||
- name: Generate Swift Package
|
||||
run: python3 source-repo/tools/swift/create_package.py source-repo/tools/swift
|
||||
|
||||
- name: Package Update
|
||||
run: |
|
||||
mkdir updated-repo
|
||||
mv -v target-repo/.git updated-repo/.git
|
||||
mv -v source-repo/tools/swift/duckdb-swift/* updated-repo/
|
||||
|
||||
- name: Commit Updated Repo
|
||||
run: |
|
||||
git -C updated-repo config user.name github-actions
|
||||
git -C updated-repo config user.email github-actions@github.com
|
||||
git -C updated-repo add -A
|
||||
if [[ $(git -C updated-repo status --porcelain) ]]; then
|
||||
git -C updated-repo commit -m "automated update"
|
||||
fi
|
||||
|
||||
- name: Push Update
|
||||
run: |
|
||||
git -C updated-repo push
|
||||
|
||||
- name: Tag Release
|
||||
run: |
|
||||
cd source-repo
|
||||
export TAG_NAME=`python3 -c "import sys, os; sys.path.append(os.path.join('scripts')); import package_build; print(package_build.git_dev_version())"`
|
||||
cd ..
|
||||
git -C updated-repo fetch --tags
|
||||
if [[ $(git -C updated-repo tag -l $TAG_NAME) ]]; then
|
||||
echo 'Tag '$TAG_NAME' already exists - skipping'
|
||||
else
|
||||
git -C updated-repo tag -a $TAG_NAME -m "Release $TAG_NAME"
|
||||
git -C updated-repo push origin $TAG_NAME
|
||||
fi
|
||||
350
external/duckdb/.github/workflows/Windows.yml
vendored
350
external/duckdb/.github/workflows/Windows.yml
vendored
@@ -1,350 +0,0 @@
|
||||
name: Windows
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
run_all:
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
override_git_describe:
|
||||
type: string
|
||||
git_ref:
|
||||
type: string
|
||||
skip_tests:
|
||||
type: string
|
||||
run_all:
|
||||
type: string
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Windows.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
|
||||
merge_group:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review, converted_to_draft]
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'test/configs/**'
|
||||
- 'tools/**'
|
||||
- '!tools/shell/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/Windows.yml'
|
||||
- '.github/config/extensions/*.cmake'
|
||||
- '.github/patches/extensions/**/*.patch'
|
||||
|
||||
concurrency:
|
||||
group: windows-${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}-${{ inputs.override_git_describe }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
OVERRIDE_GIT_DESCRIBE: ${{ inputs.override_git_describe }}
|
||||
AZURE_CODESIGN_ENDPOINT: https://eus.codesigning.azure.net/
|
||||
AZURE_CODESIGN_ACCOUNT: duckdb-signing-2
|
||||
AZURE_CODESIGN_PROFILE: duckdb-certificate-profile
|
||||
|
||||
jobs:
|
||||
check-draft:
|
||||
# We run all other jobs on PRs only if they are not draft PR
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Preliminary checks on CI
|
||||
run: echo "Event name is ${{ github.event_name }}"
|
||||
|
||||
win-release-64:
|
||||
# Builds binaries for windows_amd64
|
||||
name: Windows (64 Bit)
|
||||
needs: check-draft
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
python scripts/windows_ci.py
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_GENERATOR_PLATFORM=x64 -DENABLE_EXTENSION_AUTOLOADING=1 -DENABLE_EXTENSION_AUTOINSTALL=1 -DDUCKDB_EXTENSION_CONFIGS="${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake" -DDISABLE_UNITY=1 -DOVERRIDE_GIT_DESCRIBE="$OVERRIDE_GIT_DESCRIBE"
|
||||
cmake --build . --config Release --parallel
|
||||
|
||||
- name: Set DUCKDB_INSTALL_LIB for ADBC tests
|
||||
shell: pwsh
|
||||
run: echo "DUCKDB_INSTALL_LIB=$((Get-ChildItem -Recurse -Filter "duckdb.dll" | Select-Object -First 1).FullName)" >> $GITHUB_ENV
|
||||
|
||||
- name: Test DUCKDB_INSTALL_LIB variable
|
||||
shell: bash
|
||||
run: echo $DUCKDB_INSTALL_LIB
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: |
|
||||
test/Release/unittest.exe
|
||||
|
||||
- name: Test with VS2019 C++ stdlib
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: |
|
||||
choco install wget -y --no-progress
|
||||
wget -P ./test/Release https://blobs.duckdb.org/ci/msvcp140.dll
|
||||
ls ./test/Release
|
||||
./test/Release/unittest.exe
|
||||
rm ./test/Release/msvcp140.dll
|
||||
|
||||
- name: Tools Test
|
||||
shell: bash
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
run: |
|
||||
python -m pip install pytest
|
||||
python -m pytest tools/shell/tests --shell-binary Release/duckdb.exe
|
||||
tools/sqlite3_api_wrapper/Release/test_sqlite3_api_wrapper.exe
|
||||
|
||||
- name: Sign files with Azure Trusted Signing (TM)
|
||||
if: github.repository == 'duckdb/duckdb' && github.event_name != 'pull_request'
|
||||
uses: azure/trusted-signing-action@v0
|
||||
with:
|
||||
azure-tenant-id: ${{ secrets.AZURE_CODESIGN_TENANT_ID }}
|
||||
azure-client-id: ${{ secrets.AZURE_CODESIGN_CLIENT_ID }}
|
||||
azure-client-secret: ${{ secrets.AZURE_CODESIGN_CLIENT_SECRET }}
|
||||
endpoint: ${{ env.AZURE_CODESIGN_ENDPOINT }}
|
||||
trusted-signing-account-name: ${{ env.AZURE_CODESIGN_ACCOUNT }}
|
||||
certificate-profile-name: ${{ env.AZURE_CODESIGN_PROFILE }}
|
||||
files-folder: ${{ github.workspace }}
|
||||
files-folder-filter: exe,dll
|
||||
files-folder-recurse: true
|
||||
file-digest: SHA256
|
||||
timestamp-rfc3161: http://timestamp.acs.microsoft.com
|
||||
timestamp-digest: SHA256
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
python scripts/amalgamation.py
|
||||
/c/msys64/usr/bin/bash.exe -lc "pacman -Sy --noconfirm zip"
|
||||
/c/msys64/usr/bin/zip.exe -j duckdb_cli-windows-amd64.zip Release/duckdb.exe
|
||||
/c/msys64/usr/bin/zip.exe -j libduckdb-windows-amd64.zip src/Release/duckdb.dll src/Release/duckdb.lib src/amalgamation/duckdb.hpp src/include/duckdb.h
|
||||
./scripts/upload-assets-to-staging.sh github_release libduckdb-windows-amd64.zip duckdb_cli-windows-amd64.zip
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-binaries-windows-amd64
|
||||
path: |
|
||||
libduckdb-windows-amd64.zip
|
||||
duckdb_cli-windows-amd64.zip
|
||||
|
||||
- uses: ilammy/msvc-dev-cmd@v1
|
||||
- name: Duckdb.dll export symbols with C++ on Windows
|
||||
shell: bash
|
||||
run: cl -I src/include examples/embedded-c++-windows/cppintegration.cpp -link src/Release/duckdb.lib
|
||||
|
||||
win-release-32:
|
||||
name: Windows (32 Bit)
|
||||
needs:
|
||||
- win-release-64
|
||||
- check-draft
|
||||
if: ${{ github.ref == 'refs/heads/main' || github.repository != 'duckdb/duckdb' || inputs.run_all == 'true' }}
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_GENERATOR_PLATFORM=Win32 -DDUCKDB_EXTENSION_CONFIGS="${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake" -DOVERRIDE_GIT_DESCRIBE="$OVERRIDE_GIT_DESCRIBE"
|
||||
cmake --build . --config Release --parallel
|
||||
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: test/Release/unittest.exe
|
||||
|
||||
- name: Tools Test
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install pytest
|
||||
python -m pytest tools/shell/tests --shell-binary Release/duckdb.exe
|
||||
tools/sqlite3_api_wrapper/Release/test_sqlite3_api_wrapper.exe
|
||||
|
||||
|
||||
win-release-arm64:
|
||||
name: Windows (ARM64)
|
||||
needs:
|
||||
- win-release-64
|
||||
- check-draft
|
||||
if: ${{ github.ref == 'refs/heads/main' || github.repository != 'duckdb/duckdb' || inputs.run_all == 'true' }}
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.git_ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_GENERATOR_PLATFORM=ARM64 -DDUCKDB_EXTENSION_CONFIGS="${GITHUB_WORKSPACE}/.github/config/bundled_extensions.cmake" -DOVERRIDE_GIT_DESCRIBE="$OVERRIDE_GIT_DESCRIBE" -DDUCKDB_EXPLICIT_PLATFORM=windows_arm64 -DDUCKDB_CUSTOM_PLATFORM=windows_arm64 -DBUILD_UNITTESTS=FALSE
|
||||
cmake --build . --config Release --parallel
|
||||
|
||||
- name: Sign files with Azure Trusted Signing (TM)
|
||||
if: github.repository == 'duckdb/duckdb' && github.event_name != 'pull_request'
|
||||
uses: azure/trusted-signing-action@v0
|
||||
with:
|
||||
azure-tenant-id: ${{ secrets.AZURE_CODESIGN_TENANT_ID }}
|
||||
azure-client-id: ${{ secrets.AZURE_CODESIGN_CLIENT_ID }}
|
||||
azure-client-secret: ${{ secrets.AZURE_CODESIGN_CLIENT_SECRET }}
|
||||
endpoint: ${{ env.AZURE_CODESIGN_ENDPOINT }}
|
||||
trusted-signing-account-name: ${{ env.AZURE_CODESIGN_ACCOUNT }}
|
||||
certificate-profile-name: ${{ env.AZURE_CODESIGN_PROFILE }}
|
||||
files-folder: ${{ github.workspace }}
|
||||
files-folder-filter: exe,dll
|
||||
files-folder-recurse: true
|
||||
file-digest: SHA256
|
||||
timestamp-rfc3161: http://timestamp.acs.microsoft.com
|
||||
timestamp-digest: SHA256
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DUCKDB_STAGING_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DUCKDB_STAGING_KEY }}
|
||||
run: |
|
||||
python scripts/amalgamation.py
|
||||
/c/msys64/usr/bin/bash.exe -lc "pacman -Sy --noconfirm zip"
|
||||
/c/msys64/usr/bin/zip.exe -j duckdb_cli-windows-arm64.zip Release/duckdb.exe
|
||||
/c/msys64/usr/bin/zip.exe -j libduckdb-windows-arm64.zip src/Release/duckdb.dll src/Release/duckdb.lib src/amalgamation/duckdb.hpp src/include/duckdb.h
|
||||
./scripts/upload-assets-to-staging.sh github_release libduckdb-windows-arm64.zip duckdb_cli-windows-arm64.zip
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-binaries-windows-arm64
|
||||
path: |
|
||||
libduckdb-windows-arm64.zip
|
||||
duckdb_cli-windows-arm64.zip
|
||||
|
||||
mingw:
|
||||
name: MinGW (64 Bit)
|
||||
needs:
|
||||
- win-release-64
|
||||
- check-draft
|
||||
if: ${{ inputs.skip_tests != 'true' }}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.git_ref }}
|
||||
- uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: MINGW64
|
||||
update: true
|
||||
install: git mingw-w64-x86_64-toolchain mingw-w64-x86_64-cmake mingw-w64-x86_64-ninja
|
||||
cache: ${{ github.ref == 'refs/heads/main' || github.repository != 'duckdb/duckdb' }}
|
||||
|
||||
# see here: https://gist.github.com/scivision/1de4fd6abea9ba6b2d87dc1e86b5d2ce
|
||||
- name: Put MSYS2_MinGW64 on PATH
|
||||
# there is not yet an environment variable for this path from msys2/setup-msys2
|
||||
shell: msys2 {0}
|
||||
run: export PATH=D:/a/_temp/msys/msys64/mingw64/bin:$PATH
|
||||
|
||||
- name: Setup Ccache
|
||||
uses: hendrikmuhs/ccache-action@main
|
||||
with:
|
||||
key: ${{ github.job }}
|
||||
save: ${{ vars.BRANCHES_TO_BE_CACHED == '' || contains(vars.BRANCHES_TO_BE_CACHED, github.ref) }}
|
||||
|
||||
- name: Build
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
cmake -G "Ninja" -DCMAKE_BUILD_TYPE=Release -DBUILD_EXTENSIONS='parquet' -DOVERRIDE_GIT_DESCRIBE="$OVERRIDE_GIT_DESCRIBE"
|
||||
cmake --build . --config Release
|
||||
|
||||
- name: Test
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
cp src/libduckdb.dll .
|
||||
test/unittest.exe
|
||||
|
||||
- name: Tools Test
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
tools/sqlite3_api_wrapper/test_sqlite3_api_wrapper.exe
|
||||
|
||||
win-packaged-upload:
|
||||
runs-on: windows-latest
|
||||
needs:
|
||||
- win-release-64
|
||||
- win-release-arm64
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: duckdb-binaries-windows-arm64
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: duckdb-binaries-windows-amd64
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: duckdb-binaries-windows
|
||||
path: |
|
||||
libduckdb-windows-amd64.zip
|
||||
duckdb_cli-windows-amd64.zip
|
||||
libduckdb-windows-arm64.zip
|
||||
duckdb_cli-windows-arm64.zip
|
||||
@@ -1,46 +0,0 @@
|
||||
# This is a reusable workflow to be used by extensions based on the extension template
|
||||
|
||||
name: Client Tests
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
duckdb_version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
python:
|
||||
name: Python
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GEN: ninja
|
||||
|
||||
steps:
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
sudo apt-get update -y -qq
|
||||
sudo apt-get install -y -qq ninja-build
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: 'true'
|
||||
|
||||
- name: Checkout DuckDB to version
|
||||
run: |
|
||||
cd duckdb
|
||||
git checkout ${{ inputs.duckdb_version }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Build DuckDB Python client
|
||||
run: make debug_python
|
||||
|
||||
- name: Install Python test dependencies
|
||||
run: python -m pip install --upgrade pytest
|
||||
|
||||
- name: Run Python client tests
|
||||
run: |
|
||||
make test_debug_python
|
||||
@@ -1,68 +0,0 @@
|
||||
name: Extension Build and Deploy
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
artifact_prefix:
|
||||
required: true
|
||||
type: string
|
||||
extension_config:
|
||||
required: true
|
||||
type: string
|
||||
exclude_archs:
|
||||
required: false
|
||||
type: string
|
||||
default: ''
|
||||
extra_toolchains:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
duckdb_ref:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
override_tag:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
skip_tests:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
save_cache:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@main
|
||||
with:
|
||||
# We piggy-back extension-template to build the extensions in extension_config, it's hacky, but it works ¯\_(ツ)_/¯
|
||||
override_repository: duckdb/extension-template
|
||||
override_ref: v1.4-andium
|
||||
|
||||
# Note when `upload_all_extensions` is true, the extension name is used as prefix to the artifact holding all built extensions
|
||||
upload_all_extensions: true
|
||||
extension_name: ${{ inputs.artifact_prefix }}
|
||||
|
||||
# DuckDB version is overridden to the current commit of the current repository
|
||||
set_caller_as_duckdb: true
|
||||
duckdb_version: ${{ github.sha }}
|
||||
|
||||
# CI tools is pinned to main
|
||||
override_ci_tools_repository: duckdb/extension-ci-tools
|
||||
ci_tools_version: main
|
||||
|
||||
exclude_archs: ${{ inputs.exclude_archs }}
|
||||
|
||||
extra_toolchains: ${{ inputs.extra_toolchains }}
|
||||
use_merged_vcpkg_manifest: '1'
|
||||
|
||||
duckdb_tag: ${{ inputs.override_tag }}
|
||||
|
||||
skip_tests: ${{ inputs.skip_tests }}
|
||||
save_cache: ${{ inputs.save_cache }}
|
||||
|
||||
# The extension_config.cmake configuration that gets built
|
||||
extra_extension_config: ${{ inputs.extension_config }}
|
||||
51
external/duckdb/.github/workflows/cifuzz.yml
vendored
51
external/duckdb/.github/workflows/cifuzz.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: CIFuzz
|
||||
on:
|
||||
workflow_dispatch:
|
||||
repository_dispatch:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'main'
|
||||
- 'feature'
|
||||
- 'v*.*-*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'tools/**'
|
||||
- '.github/patches/duckdb-wasm/**'
|
||||
- '.github/workflows/**'
|
||||
- '!.github/workflows/cifuzz.yml'
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
Fuzzing:
|
||||
name: OSSFuzz
|
||||
if: github.repository == 'duckdb/duckdb'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
sanitizer: [address, undefined, memory]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Build Fuzzers ${{ matrix.sanitizer }}
|
||||
id: build
|
||||
uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
|
||||
with:
|
||||
oss-fuzz-project-name: 'duckdb'
|
||||
dry-run: false
|
||||
sanitizer: ${{ matrix.sanitizer }}
|
||||
- name: Run Fuzzers ${{ matrix.sanitizer }}
|
||||
uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
|
||||
with:
|
||||
oss-fuzz-project-name: 'duckdb'
|
||||
fuzz-seconds: 3600
|
||||
dry-run: false
|
||||
sanitizer: ${{ matrix.sanitizer }}
|
||||
- name: Upload Crash
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure() && steps.build.outcome == 'success'
|
||||
with:
|
||||
name: artifacts-${{ matrix.sanitizer }}
|
||||
path: ./out/artifacts
|
||||
@@ -1,9 +0,0 @@
|
||||
Thanks for opening this issue! Based on our automated check, it seems that your post contains some code but it does not use [code blocks](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/creating-and-highlighting-code-blocks) to format it.
|
||||
|
||||
Please double-check your post and revise it if necessary. To employ syntax highlighting, it's recommended to use code blocks with triple backticks, e.g.:
|
||||
````
|
||||
```sql
|
||||
SELECT ...
|
||||
```
|
||||
````
|
||||
If this is a false positive, feel free to disregard this comment.
|
||||
53
external/duckdb/.github/workflows/coverity.yml
vendored
53
external/duckdb/.github/workflows/coverity.yml
vendored
@@ -1,53 +0,0 @@
|
||||
# Creates and uploads a Coverity build on a schedule
|
||||
# Requires that two secrets be created:
|
||||
# COVERITY_SCAN_EMAIL, with the email address that should be notified with scan results
|
||||
# COVERITY_SCAN_TOKEN, with the token from the Coverity project page (e.g., https://scan.coverity.com/projects/moshekaplan-duckdb?tab=project_settings )
|
||||
# Also, ensure that the 'github.repository' comparison and 'COVERITY_PROJECT_NAME' values below are accurate
|
||||
name: Coverity Scan
|
||||
on:
|
||||
repository_dispatch:
|
||||
# Run once daily (via repository_dispatch), duckdb is at ~900k LOC
|
||||
# Scan frequency limits from https://scan.coverity.com/faq#frequency :
|
||||
# Up to 28 builds per week, with a maximum of 4 builds per day, for projects with fewer than 100K lines of code
|
||||
# Up to 21 builds per week, with a maximum of 3 builds per day, for projects with 100K to 500K lines of code
|
||||
# Up to 14 builds per week, with a maximum of 2 build per day, for projects with 500K to 1 million lines of code
|
||||
# Up to 7 builds per week, with a maximum of 1 build per day, for projects with more than 1 million lines of code
|
||||
# Support manual execution
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
coverity:
|
||||
# So it doesn't try to run on forks
|
||||
if: github.repository == 'duckdb/duckdb'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
COVERITY_PROJECT_NAME: DuckDB
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Download and extract the Coverity Build Tool
|
||||
run: |
|
||||
wget https://scan.coverity.com/download/cxx/linux64 --post-data "token=${{ secrets.COVERITY_SCAN_TOKEN }}&project=${{ env.COVERITY_PROJECT_NAME }}" -O cov-analysis-linux64.tar.gz
|
||||
mkdir cov-analysis-linux64
|
||||
tar xzf cov-analysis-linux64.tar.gz --strip 1 -C cov-analysis-linux64
|
||||
- name: Install dependencies
|
||||
run: sudo apt update -y -qq && sudo apt install -y git g++ cmake ninja-build libssl-dev default-jdk
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Build with cov-build
|
||||
run: cov-analysis-linux64/bin/cov-build --dir cov-int make
|
||||
env:
|
||||
BUILD_TPCE: 1
|
||||
CORE_EXTENSIONS: "autocomplete;icu;tpcds;tpch;fts;httpfs;json;inet"
|
||||
|
||||
- name: Upload the result
|
||||
run: |
|
||||
tar czvf cov-int.tgz cov-int
|
||||
curl \
|
||||
--form project=${{ env.COVERITY_PROJECT_NAME }} \
|
||||
--form email=${{ secrets.COVERITY_SCAN_EMAIL }} \
|
||||
--form token=${{ secrets.COVERITY_SCAN_TOKEN }} \
|
||||
--form file=@cov-int.tgz \
|
||||
https://scan.coverity.com/builds
|
||||
22
external/duckdb/.github/workflows/lcov_exclude
vendored
22
external/duckdb/.github/workflows/lcov_exclude
vendored
@@ -1,22 +0,0 @@
|
||||
/usr*
|
||||
*/cl.hpp
|
||||
*/tools/shell/*
|
||||
*/tools/sqlite3_api_wrapper/*
|
||||
*/benchmark/*
|
||||
*/examples/*
|
||||
*/third_party/*
|
||||
*/test/*
|
||||
*/extension/autocomplete/*
|
||||
*/extension/fts/*
|
||||
*/extension/icu/*
|
||||
*/extension/jemalloc/*
|
||||
*/extension/tpcds/*
|
||||
*/extension/tpch/*
|
||||
*/extension/json/yyjson/*
|
||||
*/extension_helper.cpp
|
||||
*/generated_extension_loader.hpp
|
||||
*/adbc/*
|
||||
*/enum_util.cpp
|
||||
*/enums/expression_type.cpp
|
||||
*/serialization/*
|
||||
*/json_enums.cpp
|
||||
2
external/duckdb/.github/workflows/lcovrc
vendored
2
external/duckdb/.github/workflows/lcovrc
vendored
@@ -1,2 +0,0 @@
|
||||
lcov_excl_line = default:|InternalException|NotImplementedException|IOException|SerializationException|LCOV_EXCL_LINE|Print
|
||||
lcov_excl_br_line = InternalException|NotImplementedException|IOException|SerializationException|LCOV_EXCL_BR_LINE|Print
|
||||
360
external/duckdb/.gitignore
vendored
360
external/duckdb/.gitignore
vendored
@@ -1,360 +0,0 @@
|
||||
#==============================================================================#
|
||||
# This file specifies intentionally untracked files that git should ignore.
|
||||
#==============================================================================#
|
||||
|
||||
#==============================================================================#
|
||||
# File extensions to be ignored anywhere in the tree.
|
||||
#==============================================================================#
|
||||
# Temp files created by most text editors.
|
||||
*~
|
||||
# Merge files created by git.
|
||||
*.orig
|
||||
# Java bytecode
|
||||
*.class
|
||||
# Byte compiled python modules.
|
||||
*.pyc
|
||||
# vim swap files
|
||||
.*.sw?
|
||||
.sw?
|
||||
#OS X specific files.
|
||||
.DS_store
|
||||
# Core files
|
||||
#core
|
||||
|
||||
#==============================================================================#
|
||||
# Explicit files to ignore (only matches one).
|
||||
#==============================================================================#
|
||||
# Various tag programs
|
||||
/tags
|
||||
/TAGS
|
||||
/GPATH
|
||||
/GRTAGS
|
||||
/GSYMS
|
||||
/GTAGS
|
||||
.gitusers
|
||||
autom4te.cache
|
||||
cscope.files
|
||||
cscope.out
|
||||
autoconf/aclocal.m4
|
||||
autoconf/autom4te.cache
|
||||
/compile_commands.json
|
||||
/.wasm
|
||||
|
||||
#==============================================================================#
|
||||
# Directories to ignore (do not add trailing '/'s, they skip symlinks).
|
||||
#==============================================================================#
|
||||
# External projects that are tracked independently.
|
||||
projects/*
|
||||
!projects/*.*
|
||||
!projects/Makefile
|
||||
|
||||
|
||||
#==============================================================================#
|
||||
# Autotools artifacts
|
||||
#==============================================================================#
|
||||
config/
|
||||
configure
|
||||
config-h.in
|
||||
autom4te.cache
|
||||
*Makefile.in
|
||||
third_party/*/Makefile
|
||||
libtool
|
||||
aclocal.m4
|
||||
config.log
|
||||
config.status
|
||||
stamp-h1
|
||||
m4/libtool.m4
|
||||
m4/ltoptions.m4
|
||||
m4/ltsugar.m4
|
||||
m4/ltversion.m4
|
||||
m4/lt~obsolete.m4
|
||||
|
||||
#==============================================================================#
|
||||
# Build artifacts
|
||||
#==============================================================================#
|
||||
#m4/
|
||||
build/
|
||||
duckdb_benchmark_data/
|
||||
#*.m4
|
||||
*.o
|
||||
*.lo
|
||||
*.la
|
||||
*~
|
||||
*.pdf
|
||||
*.swp
|
||||
a.out
|
||||
|
||||
#==============================================================================#
|
||||
# Kate Swap Files
|
||||
#==============================================================================#
|
||||
*.kate-swp
|
||||
.#kate-*
|
||||
|
||||
#==============================================================================#
|
||||
# Backup artifacts
|
||||
#==============================================================================#
|
||||
~*
|
||||
*~
|
||||
tmp/
|
||||
|
||||
#==============================================================================#
|
||||
# KDevelop files
|
||||
#==============================================================================#
|
||||
.kdev4
|
||||
*.kdev4
|
||||
.dirstamp
|
||||
.deps
|
||||
.libs
|
||||
|
||||
#==============================================================================#
|
||||
# Eclipse files
|
||||
#==============================================================================#
|
||||
.wtpmodules
|
||||
.classpath
|
||||
.project
|
||||
.cproject
|
||||
.pydevproject
|
||||
.settings
|
||||
.autotools
|
||||
.csettings
|
||||
|
||||
/Debug/
|
||||
/misc/
|
||||
|
||||
#==============================================================================#
|
||||
# Intellij files
|
||||
#==============================================================================#
|
||||
.idea
|
||||
*.iml
|
||||
|
||||
#==============================================================================#
|
||||
# Code Coverage files
|
||||
#==============================================================================#
|
||||
*.gcno
|
||||
*.gcda
|
||||
|
||||
#==============================================================================#
|
||||
# Scripts
|
||||
#==============================================================================#
|
||||
*.jar
|
||||
scripts/PelotonTest/out
|
||||
scripts/PelotonTest/lib
|
||||
|
||||
#==============================================================================#
|
||||
# Protobuf
|
||||
#==============================================================================#
|
||||
*.pb-c.c
|
||||
*.pb-c.h
|
||||
*.pb.go
|
||||
|
||||
#==============================================================================#
|
||||
# Third party
|
||||
#==============================================================================#
|
||||
third_party/nanomsg/
|
||||
third_party/nvml/
|
||||
third_party/logcabin/
|
||||
|
||||
#==============================================================================#
|
||||
# Eclipse
|
||||
#==============================================================================#
|
||||
|
||||
.metadata
|
||||
bin/
|
||||
tmp/
|
||||
*.tmp
|
||||
*.bak
|
||||
*.swp
|
||||
*~.nib
|
||||
local.properties
|
||||
.settings/
|
||||
.loadpath
|
||||
.recommenders
|
||||
|
||||
# Clang language server (C/C++ tooling)
|
||||
/.cache/clangd
|
||||
.ccls-cache
|
||||
|
||||
# Eclipse Core
|
||||
.project
|
||||
|
||||
# External tool builders
|
||||
.externalToolBuilders/
|
||||
|
||||
# Locally stored "Eclipse launch configurations"
|
||||
*.launch
|
||||
|
||||
# PyDev specific (Python IDE for Eclipse)
|
||||
*.pydevproject
|
||||
|
||||
# CDT-specific (C/C++ Development Tooling)
|
||||
.cproject
|
||||
|
||||
# JDT-specific (Eclipse Java Development Tools)
|
||||
.classpath
|
||||
|
||||
# RStudio specific
|
||||
.Rproj.user
|
||||
|
||||
# Java annotation processor (APT)
|
||||
.factorypath
|
||||
|
||||
# PDT-specific (PHP Development Tools)
|
||||
.buildpath
|
||||
|
||||
# sbteclipse plugin
|
||||
.target
|
||||
|
||||
# Tern plugin
|
||||
.tern-project
|
||||
|
||||
# TeXlipse plugin
|
||||
.texlipse
|
||||
|
||||
# STS (Spring Tool Suite)
|
||||
.springBeans
|
||||
|
||||
# Code Recommenders
|
||||
.recommenders/
|
||||
io_file
|
||||
|
||||
## General
|
||||
|
||||
# Compiled Object files
|
||||
*.slo
|
||||
*.lo
|
||||
*.o
|
||||
*.cuo
|
||||
|
||||
# Compiled Dynamic libraries
|
||||
*.so
|
||||
*.dylib
|
||||
*.dll
|
||||
|
||||
# Compiled Static libraries
|
||||
*.lai
|
||||
*.la
|
||||
*.a
|
||||
*.lib
|
||||
|
||||
# Compiled protocol buffers
|
||||
*_pb2.py
|
||||
|
||||
# Compiled python
|
||||
*.pyc
|
||||
|
||||
# Compiled MATLAB
|
||||
*.mex*
|
||||
|
||||
# IPython notebook checkpoints
|
||||
.ipynb_checkpoints
|
||||
|
||||
# Python virtual env
|
||||
venv/
|
||||
|
||||
# Editor temporaries
|
||||
*.swp
|
||||
*~
|
||||
|
||||
# Sublime Text settings
|
||||
*.sublime-workspace
|
||||
*.sublime-project
|
||||
|
||||
# Eclipse Project settings
|
||||
*.*project
|
||||
.settings
|
||||
.csettings
|
||||
|
||||
# Visual Studio
|
||||
.vs
|
||||
settings.json
|
||||
.vscode
|
||||
|
||||
# QtCreator files
|
||||
*.user
|
||||
|
||||
# PyCharm files
|
||||
.idea
|
||||
|
||||
# OSX dir files
|
||||
.DS_Store
|
||||
|
||||
# User's build configuration
|
||||
Makefile.config
|
||||
|
||||
# build, distribute, and bins (+ python proto bindings)
|
||||
build
|
||||
.build_debug/*
|
||||
.build_release/*
|
||||
distribute/*
|
||||
*.testbin
|
||||
*.bin
|
||||
cmake_build
|
||||
.cmake_build
|
||||
cmake-build-debug
|
||||
cmake-build-release
|
||||
cmake-build-relwithdebinfo
|
||||
CMakeUserPresets.json
|
||||
|
||||
# Generated documentation
|
||||
docs
|
||||
|
||||
# tests
|
||||
test/test.sql
|
||||
duckdb_benchmark_data/
|
||||
|
||||
# SQLite logic tests
|
||||
test/evidence/
|
||||
third_party/sqllogictest
|
||||
|
||||
#imdb dataset
|
||||
third_party/imdb/data
|
||||
#taxi dataset
|
||||
benchmark/taxi/data/
|
||||
|
||||
# Format timer
|
||||
.last_format
|
||||
# Benchmarks
|
||||
.last_benchmarked_commit
|
||||
benchmark_results/
|
||||
duckdb_unittest_tempdir/
|
||||
grammar.y.tmp
|
||||
src/amalgamation/
|
||||
# single file compile
|
||||
amalgamation.cache
|
||||
dependencies.d
|
||||
deps.s
|
||||
duckdb.cpp.tmp
|
||||
duckdb.hpp.tmp
|
||||
# .Rcheck
|
||||
tools/duckdb.Rcheck/
|
||||
tools/*.tar.gz
|
||||
.Rhistory
|
||||
ub_*.cpp
|
||||
|
||||
# node tests
|
||||
__nvm
|
||||
|
||||
*.vcxproj*
|
||||
*.sln
|
||||
|
||||
# Zig files
|
||||
zig-out/*gs
|
||||
zig-cache/*
|
||||
*.zig
|
||||
|
||||
# .db files
|
||||
*.db
|
||||
*.db.gz
|
||||
|
||||
# local cmake extension config
|
||||
extension/extension_config_local.cmake
|
||||
|
||||
# extension_external dir
|
||||
extension_external
|
||||
|
||||
# pyodide (emscripten python) build and test environment
|
||||
.pyodide-xbuildenv
|
||||
.venv-pyodide
|
||||
|
||||
test/sql/pragma/output.json
|
||||
tools/pythonpkg/duckdb_build/
|
||||
@@ -1,7 +0,0 @@
|
||||
# dsdgen extension global statics
|
||||
leak:load_dist
|
||||
leak:find_dist
|
||||
leak:read_dist
|
||||
leak:init_text_pool
|
||||
leak:makePermutation
|
||||
leak:init_params
|
||||
@@ -1,8 +0,0 @@
|
||||
deadlock:InitializeIndexes
|
||||
race:InsertMatchesAndIncrementMisses
|
||||
race:NextInnerJoin
|
||||
race:NextRightSemiOrAntiJoin
|
||||
race:duckdb_moodycamel
|
||||
race:*duckdb/extension/jemalloc/jemalloc/*
|
||||
race:AddToEvictionQueue
|
||||
race:ValidityAppend
|
||||
11
external/duckdb/CITATION.cff
vendored
11
external/duckdb/CITATION.cff
vendored
@@ -1,11 +0,0 @@
|
||||
cff-version: 1.2.0
|
||||
message: "If you use this software, please cite it as below."
|
||||
authors:
|
||||
- family-names: "Raasveldt"
|
||||
given-names: "Mark"
|
||||
orcid: "https://orcid.org/0000-0001-5005-6844"
|
||||
- family-names: "Muehleisen"
|
||||
given-names: "Hannes"
|
||||
orcid: "https://orcid.org/0000-0001-8552-0029"
|
||||
title: "DuckDB"
|
||||
url: "https://github.com/duckdb/duckdb"
|
||||
1512
external/duckdb/CMakeLists.txt
vendored
1512
external/duckdb/CMakeLists.txt
vendored
File diff suppressed because it is too large
Load Diff
15
external/duckdb/CODE_OF_CONDUCT.md
vendored
15
external/duckdb/CODE_OF_CONDUCT.md
vendored
@@ -1,15 +0,0 @@
|
||||
# DuckDB Code of Conduct
|
||||
|
||||
**All creatures are welcome**: We aim to create a safe space for all community members, regardless of their age, race, gender, sexual orientation, physical appearance or disability, choice of text editor, or any other qualities by which living beings can be discriminated.
|
||||
|
||||
**Be excellent to each other**: We do not tolerate verbal or physical harassment, violence or intimidation.
|
||||
|
||||
We do not tolerate life forms who refuse to share this openness and respect towards others: Creatures that are not excellent to others are not welcome.
|
||||
|
||||
We continuously strive to make our community a better place for everyone – in the best tradition of hackers we "build, test, improve, reiterate". In this ongoing adventure, we rely on the support, courage, and creativity of all members of the DuckDB community.
|
||||
|
||||
If you are made uncomfortable in your role as DuckDB community member, please let us know: You can reach us at quack@duckdb.org. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
#### Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [CCC event CoC](https://www.ccc.de/en/updates/2016/a-reminder-to-be-excellent-to-each-other)
|
||||
132
external/duckdb/CONTRIBUTING.md
vendored
132
external/duckdb/CONTRIBUTING.md
vendored
@@ -1,132 +0,0 @@
|
||||
# Contributing
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
This project and everyone participating in it is governed by a [Code of Conduct](CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. Please report unacceptable behavior to [quack@duckdb.org](mailto:quack@duckdb.org).
|
||||
|
||||
## **Did you find a bug?**
|
||||
|
||||
* **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/duckdb/duckdb/issues).
|
||||
* If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/duckdb/duckdb/issues/new/choose). Be sure to include a **title and clear description**, as much relevant information as possible, and a **code sample** or an **executable test case** demonstrating the expected behavior that is not occurring.
|
||||
|
||||
## **Did you write a patch that fixes a bug?**
|
||||
|
||||
* Great!
|
||||
* If possible, add a unit test case to make sure the issue does not occur again.
|
||||
* Make sure you run the code formatter (`make format-fix`).
|
||||
* Open a new GitHub pull request with the patch.
|
||||
* Ensure the PR description clearly describes the problem and solution. Include the relevant issue number if applicable.
|
||||
|
||||
## Outside Contributors
|
||||
|
||||
* Discuss your intended changes with the core team on Github
|
||||
* Announce that you are working or want to work on a specific issue
|
||||
* Avoid large pull requests - they are much less likely to be merged as they are incredibly hard to review
|
||||
|
||||
## Pull Requests
|
||||
|
||||
* Do not commit/push directly to the main branch. Instead, create a fork and file a pull request.
|
||||
* When maintaining a branch, merge frequently with the main.
|
||||
* When maintaining a branch, submit pull requests to the main frequently.
|
||||
* If you are working on a bigger issue try to split it up into several smaller issues.
|
||||
* Please do not open "Draft" pull requests. Rather, use issues or discussion topics to discuss whatever needs discussing.
|
||||
* We reserve full and final discretion over whether or not we will merge a pull request. Adhering to these guidelines is not a complete guarantee that your pull request will be merged.
|
||||
|
||||
## CI for pull requests
|
||||
|
||||
* Pull requests will need to pass all continuous integration checks before merging.
|
||||
* For faster iteration and more control, consider running CI on your own fork or when possible directly locally.
|
||||
* Submitting changes to an open pull request will move it to 'draft' state.
|
||||
* Pull requests will get a complete run on the main repo CI only when marked as 'ready for review' (via Web UI, button on bottom right).
|
||||
|
||||
## Nightly CI
|
||||
|
||||
* Packages creation and long running tests will be performed during a nightly run
|
||||
* On your fork you can trigger long running tests (NightlyTests.yml) for any branch following information from https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow
|
||||
|
||||
## Building
|
||||
|
||||
* To build the project, run `make`.
|
||||
* To build the project for debugging, run `make debug`.
|
||||
* For parallel builds, you can use the [Ninja](https://ninja-build.org/) build system: `GEN=ninja make`.
|
||||
* The default number of parallel processes can lock up the system depending on the CPU-to-memory ratio. If this happens, restrict the maximum number of build processes: `CMAKE_BUILD_PARALLEL_LEVEL=4 GEN=ninja make`.
|
||||
* Without using Ninja, build times can still be reduced by setting `CMAKE_BUILD_PARALLEL_LEVEL=$(nproc)`.
|
||||
|
||||
## Testing
|
||||
|
||||
* Unit tests can be written either using the sqllogictest framework (`.test` files) or in C++ directly. We **strongly** prefer tests to be written using the sqllogictest framework. Only write tests in C++ if you absolutely need to (e.g. when testing concurrent connections or other exotic behavior).
|
||||
* Documentation for the testing framework can be found [here](https://duckdb.org/dev/testing).
|
||||
* Write many tests.
|
||||
* Test with different types, especially numerics, strings and complex nested types.
|
||||
* Try to test unexpected/incorrect usage as well, instead of only the happy path.
|
||||
* `make unit` runs the **fast** unit tests (~one minute), `make allunit` runs **all** unit tests (~one hour).
|
||||
* Make sure **all** unit tests pass before sending a PR.
|
||||
* Slower tests should be added to the **all** unit tests. You can do this by naming the test file `.test_slow` in the sqllogictests, or by adding `[.]` after the test group in the C++ tests.
|
||||
* Look at the code coverage report of your branch and attempt to cover all code paths in the fast unit tests. Attempt to trigger exceptions as well. It is acceptable to have some exceptions not triggered (e.g. out of memory exceptions or type switch exceptions), but large branches of code should always be either covered or removed.
|
||||
* DuckDB uses GitHub Actions as its continuous integration (CI) tool. You also have the option to run GitHub Actions on your forked repository. For detailed instructions, you can refer to the [GitHub documentation](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository). Before running GitHub Actions, please ensure that you have all the Git tags from the duckdb/duckdb repository. To accomplish this, execute the following commands `git fetch <your-duckdb/duckdb-repo-remote-name> --tags` and then
|
||||
`git push --tags` These commands will fetch all the git tags from the duckdb/duckdb repository and push them to your forked repository. This ensures that you have all the necessary tags available for your GitHub Actions workflow.
|
||||
|
||||
## Formatting
|
||||
|
||||
* Use tabs for indentation, spaces for alignment.
|
||||
* Lines should not exceed 120 columns.
|
||||
* To make sure the formatting is consistent, please use version 11.0.1, installable through `python3 -m pip install clang-format==11.0.1` or `pipx install clang-format==11.0.1`.
|
||||
* `clang_format` and `black` enforce these rules automatically, use `make format-fix` to run the formatter.
|
||||
* The project also comes with an [`.editorconfig` file](https://editorconfig.org/) that corresponds to these rules.
|
||||
|
||||
## C++ Guidelines
|
||||
|
||||
* Do not use `malloc`, prefer the use of smart pointers. Keywords `new` and `delete` are a code smell.
|
||||
* Strongly prefer the use of `unique_ptr` over `shared_ptr`, only use `shared_ptr` if you **absolutely** have to.
|
||||
* Use `const` whenever possible.
|
||||
* Do **not** import namespaces (e.g. `using std`).
|
||||
* All functions in source files in the core (`src` directory) should be part of the `duckdb` namespace.
|
||||
* When overriding a virtual method, avoid repeating virtual and always use `override` or `final`.
|
||||
* Use `[u]int(8|16|32|64)_t` instead of `int`, `long`, `uint` etc. Use `idx_t` instead of `size_t` for offsets/indices/counts of any kind.
|
||||
* Prefer using references over pointers as arguments.
|
||||
* Use `const` references for arguments of non-trivial objects (e.g. `std::vector`, ...).
|
||||
* Use C++11 for loops when possible: `for (const auto& item : items) {...}`
|
||||
* Use braces for indenting `if` statements and loops. Avoid single-line if statements and loops, especially nested ones.
|
||||
* **Class Layout:** Start out with a `public` block containing the constructor and public variables, followed by a `public` block containing public methods of the class. After that follow any private functions and private variables. For example:
|
||||
```cpp
|
||||
class MyClass {
|
||||
public:
|
||||
MyClass();
|
||||
|
||||
int my_public_variable;
|
||||
|
||||
public:
|
||||
void MyFunction();
|
||||
|
||||
private:
|
||||
void MyPrivateFunction();
|
||||
|
||||
private:
|
||||
int my_private_variable;
|
||||
};
|
||||
```
|
||||
* Avoid [unnamed magic numbers](https://en.wikipedia.org/wiki/Magic_number_(programming)). Instead, use named variables that are stored in a `constexpr`.
|
||||
* [Return early](https://medium.com/swlh/return-early-pattern-3d18a41bba8). Avoid deep nested branches.
|
||||
* Do not include commented out code blocks in pull requests.
|
||||
|
||||
## Error Handling
|
||||
|
||||
* Use exceptions **only** when an error is encountered that terminates a query (e.g. parser error, table not found). Exceptions should only be used for **exceptional** situations. For regular errors that do not break the execution flow (e.g. errors you **expect** might occur) use a return value instead.
|
||||
* Try to add test cases that trigger exceptions. If an exception cannot be easily triggered using a test case then it should probably be an assertion. This is not always true (e.g. out of memory errors are exceptions, but are very hard to trigger).
|
||||
* Use `D_ASSERT` to assert. Use **assert** only when failing the assert means a programmer error. Assert should never be triggered by user input. Avoid code like `D_ASSERT(a > b + 3);` without comments or context.
|
||||
* Assert liberally, but make it clear with comments next to the assert what went wrong when the assert is triggered.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
* Choose descriptive names. Avoid single-letter variable names.
|
||||
* Files: lowercase separated by underscores, e.g., abstract_operator.cpp
|
||||
* Types (classes, structs, enums, typedefs, using): CamelCase starting with uppercase letter, e.g., BaseColumn
|
||||
* Variables: lowercase separated by underscores, e.g., chunk_size
|
||||
* Functions: CamelCase starting with uppercase letter, e.g., GetChunk
|
||||
* Avoid `i`, `j`, etc. in **nested** loops. Prefer to use e.g. **column_idx**, **check_idx**. In a **non-nested** loop it is permissible to use **i** as iterator index.
|
||||
* These rules are partially enforced by `clang-tidy`.
|
||||
|
||||
## Generative AI Policy
|
||||
|
||||
Please do not submit pull requests generated by AI (LLMs).
|
||||
Reviewing such PRs puts a considerable burden on the maintainers.
|
||||
2384
external/duckdb/Doxyfile
vendored
2384
external/duckdb/Doxyfile
vendored
File diff suppressed because it is too large
Load Diff
25
external/duckdb/DuckDBConfig.cmake.in
vendored
25
external/duckdb/DuckDBConfig.cmake.in
vendored
@@ -1,25 +0,0 @@
|
||||
# Config file for DuckDB package
|
||||
# It defines the following variables
|
||||
#
|
||||
# DuckDB_INCLUDE_DIRS - include directories for DuckDB
|
||||
# DuckDB_LIBRARIES - libraries to link against
|
||||
|
||||
include(CMakeFindDependencyMacro)
|
||||
find_dependency(Threads)
|
||||
if(NOT @WITH_INTERNAL_ICU@)
|
||||
find_dependency(ICU COMPONENTS i18n uc data)
|
||||
endif()
|
||||
|
||||
# Compute paths
|
||||
get_filename_component(DuckDB_CMAKE_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH)
|
||||
set(DuckDB_INCLUDE_DIRS "@CONF_INCLUDE_DIRS@")
|
||||
|
||||
if(NOT TARGET duckdb AND NOT DuckDB_BINARY_DIR)
|
||||
include("${DuckDB_CMAKE_DIR}/DuckDBExports.cmake")
|
||||
endif()
|
||||
|
||||
if(DuckDB_USE_STATIC_LIBS)
|
||||
set(DuckDB_LIBRARIES duckdb_static)
|
||||
else()
|
||||
set(DuckDB_LIBRARIES duckdb)
|
||||
endif()
|
||||
11
external/duckdb/DuckDBConfigVersion.cmake.in
vendored
11
external/duckdb/DuckDBConfigVersion.cmake.in
vendored
@@ -1,11 +0,0 @@
|
||||
set(PACKAGE_VERSION "@DUCKDB_VERSION@")
|
||||
|
||||
if("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}")
|
||||
set(PACKAGE_VERSION_COMPATIBLE FALSE)
|
||||
else()
|
||||
set(PACKAGE_VERSION_COMPATIBLE TRUE)
|
||||
if ("${PACKAGE_VERSION}" VERSION_EQUAL "${PACKAGE_FIND_VERSION}")
|
||||
set(PACKAGE_VERSION_EXACT TRUE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
7
external/duckdb/LICENSE
vendored
7
external/duckdb/LICENSE
vendored
@@ -1,7 +0,0 @@
|
||||
Copyright 2018-2025 Stichting DuckDB Foundation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
51
external/duckdb/README.md
vendored
51
external/duckdb/README.md
vendored
@@ -1,51 +0,0 @@
|
||||
<div align="center">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: light)" srcset="logo/DuckDB_Logo-horizontal.svg">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="logo/DuckDB_Logo-horizontal-dark-mode.svg">
|
||||
<img alt="DuckDB logo" src="logo/DuckDB_Logo-horizontal.svg" height="100">
|
||||
</picture>
|
||||
</div>
|
||||
<br>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/duckdb/duckdb/actions"><img src="https://github.com/duckdb/duckdb/actions/workflows/Main.yml/badge.svg?branch=main" alt="Github Actions Badge"></a>
|
||||
<a href="https://discord.gg/tcvwpjfnZx"><img src="https://shields.io/discord/909674491309850675" alt="discord" /></a>
|
||||
<a href="https://github.com/duckdb/duckdb/releases/"><img src="https://img.shields.io/github/v/release/duckdb/duckdb?color=brightgreen&display_name=tag&logo=duckdb&logoColor=white" alt="Latest Release"></a>
|
||||
</p>
|
||||
|
||||
## DuckDB
|
||||
|
||||
DuckDB is a high-performance analytical database system. It is designed to be fast, reliable, portable, and easy to use. DuckDB provides a rich SQL dialect with support far beyond basic SQL. DuckDB supports arbitrary and nested correlated subqueries, window functions, collations, complex types (arrays, structs, maps), and [several extensions designed to make SQL easier to use](https://duckdb.org/docs/stable/sql/dialect/friendly_sql.html).
|
||||
|
||||
DuckDB is available as a [standalone CLI application](https://duckdb.org/docs/stable/clients/cli/overview) and has clients for [Python](https://duckdb.org/docs/stable/clients/python/overview), [R](https://duckdb.org/docs/stable/clients/r), [Java](https://duckdb.org/docs/stable/clients/java), [Wasm](https://duckdb.org/docs/stable/clients/wasm/overview), etc., with deep integrations with packages such as [pandas](https://duckdb.org/docs/guides/python/sql_on_pandas) and [dplyr](https://duckdb.org/docs/stable/clients/r#duckplyr-dplyr-api).
|
||||
|
||||
For more information on using DuckDB, please refer to the [DuckDB documentation](https://duckdb.org/docs/stable/).
|
||||
|
||||
## Installation
|
||||
|
||||
If you want to install DuckDB, please see [our installation page](https://duckdb.org/docs/installation/) for instructions.
|
||||
|
||||
## Data Import
|
||||
|
||||
For CSV files and Parquet files, data import is as simple as referencing the file in the FROM clause:
|
||||
|
||||
```sql
|
||||
SELECT * FROM 'myfile.csv';
|
||||
SELECT * FROM 'myfile.parquet';
|
||||
```
|
||||
|
||||
Refer to our [Data Import](https://duckdb.org/docs/stable/data/overview) section for more information.
|
||||
|
||||
## SQL Reference
|
||||
|
||||
The documentation contains a [SQL introduction and reference](https://duckdb.org/docs/stable/sql/introduction).
|
||||
|
||||
## Development
|
||||
|
||||
For development, DuckDB requires [CMake](https://cmake.org), Python 3 and a `C++11` compliant compiler. In the root directory, run `make` to compile the sources. For development, use `make debug` to build a non-optimized debug version. You should run `make unit` and `make allunit` to verify that your version works properly after making changes. To test performance, you can run `BUILD_BENCHMARK=1 BUILD_TPCH=1 make` and then perform several standard benchmarks from the root directory by executing `./build/release/benchmark/benchmark_runner`. The details of benchmarks are in our [Benchmark Guide](benchmark/README.md).
|
||||
|
||||
Please also refer to our [Build Guide](https://duckdb.org/docs/stable/dev/building/overview) and [Contribution Guide](CONTRIBUTING.md).
|
||||
|
||||
## Support
|
||||
|
||||
See the [Support Options](https://duckdblabs.com/support/) page and the dedicated [`endoflife.date`](https://endoflife.date/duckdb) page.
|
||||
34
external/duckdb/benchmark/CMakeLists.txt
vendored
34
external/duckdb/benchmark/CMakeLists.txt
vendored
@@ -1,34 +0,0 @@
|
||||
include_directories(../third_party/catch)
|
||||
include_directories(../third_party/tpce-tool/include)
|
||||
include_directories(../third_party/sqlite/include)
|
||||
include_directories(../test/include)
|
||||
include_directories(include)
|
||||
|
||||
add_subdirectory(micro)
|
||||
list(FIND DUCKDB_EXTENSION_NAMES tpch _index)
|
||||
if(${_index} GREATER -1)
|
||||
add_subdirectory(tpch)
|
||||
endif()
|
||||
|
||||
add_extension_definitions()
|
||||
set(BENCHMARK_ROOT_DIRECTORY
|
||||
""
|
||||
CACHE
|
||||
STRING
|
||||
"The benchmark runner root directory (defaults to the project source directory)"
|
||||
)
|
||||
if(DEFINED BENCHMARK_ROOT_DIRECTORY AND NOT BENCHMARK_ROOT_DIRECTORY STREQUAL
|
||||
"")
|
||||
add_definitions(-DDUCKDB_ROOT_DIRECTORY="${BENCHMARK_ROOT_DIRECTORY}")
|
||||
else()
|
||||
add_definitions(-DDUCKDB_ROOT_DIRECTORY="${PROJECT_SOURCE_DIR}")
|
||||
endif()
|
||||
|
||||
add_executable(benchmark_runner benchmark_runner.cpp interpreted_benchmark.cpp
|
||||
${BENCHMARK_OBJECT_FILES})
|
||||
|
||||
target_link_libraries(benchmark_runner duckdb imdb test_helpers)
|
||||
|
||||
if(${BUILD_TPCE})
|
||||
target_link_libraries(benchmark_runner tpce)
|
||||
endif()
|
||||
108
external/duckdb/benchmark/README.md
vendored
108
external/duckdb/benchmark/README.md
vendored
@@ -1,108 +0,0 @@
|
||||
|
||||
#### Clone the repo and compile the benchmark runner
|
||||
|
||||
```
|
||||
git clone https://github.com/duckdb/duckdb
|
||||
cd duckdb
|
||||
BUILD_BENCHMARK=1 BUILD_TPCH=1 make
|
||||
```
|
||||
|
||||
#### List all available benchmarks
|
||||
`build/release/benchmark/benchmark_runner --list`
|
||||
|
||||
#### Run a single benchmark
|
||||
`build/release/benchmark/benchmark_runner benchmark/micro/nulls/no_nulls_addition.benchmark`
|
||||
|
||||
The output will be printed to `stdout` in CSV format, in the following format:
|
||||
|
||||
```
|
||||
name run timing
|
||||
benchmark/micro/nulls/no_nulls_addition.benchmark 1 0.121234
|
||||
benchmark/micro/nulls/no_nulls_addition.benchmark 2 0.121702
|
||||
benchmark/micro/nulls/no_nulls_addition.benchmark 3 0.122948
|
||||
benchmark/micro/nulls/no_nulls_addition.benchmark 4 0.122534
|
||||
benchmark/micro/nulls/no_nulls_addition.benchmark 5 0.124102
|
||||
```
|
||||
|
||||
You can also specify an output file using the `--out` flag. This will write only the timings (delimited by newlines) to that file.
|
||||
|
||||
```
|
||||
build/release/benchmark/benchmark_runner benchmark/micro/nulls/no_nulls_addition.benchmark --out=timings.out
|
||||
cat timings.out
|
||||
0.182472
|
||||
0.185027
|
||||
0.184163
|
||||
0.185281
|
||||
0.182948
|
||||
```
|
||||
|
||||
#### Regex
|
||||
You can also use a regex to specify which benchmarks to run. Be careful of shell expansion of certain regex characters (e.g. `*` will likely be expanded by your shell, hence this requires proper quoting or escaping).
|
||||
|
||||
`build/release/benchmark/benchmark_runner "benchmark/micro/nulls/.*" `
|
||||
|
||||
#### Run all benchmarks
|
||||
Not specifying any argument will run all benchmarks.
|
||||
|
||||
`build/release/benchmark/benchmark_runner`
|
||||
|
||||
#### Other options
|
||||
`--info` gives you some other information about the benchmark.
|
||||
|
||||
```
|
||||
build/release/benchmark/benchmark_runner benchmark/micro/nulls/no_nulls_addition.benchmark --info
|
||||
display_name:NULL Addition (no nulls)
|
||||
group:micro
|
||||
subgroup:nulls
|
||||
```
|
||||
|
||||
`--query` will print the query that is run by the benchmark.
|
||||
|
||||
```
|
||||
SELECT MIN(i + 1) FROM integers
|
||||
```
|
||||
|
||||
`--profile` will output a query tree (pretty printed), primarily intended for interactive use.
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│┌───────────────────────────────────┐│
|
||||
││ Query Profiling Information ││
|
||||
│└───────────────────────────────────┘│
|
||||
└─────────────────────────────────────┘
|
||||
SELECT MIN(i + 1) FROM integers
|
||||
┌─────────────────────────────────────┐
|
||||
│┌───────────────────────────────────┐│
|
||||
││ Total Time: 0.176s ││
|
||||
│└───────────────────────────────────┘│
|
||||
└─────────────────────────────────────┘
|
||||
┌───────────────────────────┐
|
||||
│ UNGROUPED_AGGREGATE │
|
||||
│ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │
|
||||
│ min(#0) │
|
||||
│ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │
|
||||
│ 1 │
|
||||
│ (0.03s) │
|
||||
└─────────────┬─────────────┘
|
||||
┌─────────────┴─────────────┐
|
||||
│ PROJECTION │
|
||||
│ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │
|
||||
│ +(i, 1) │
|
||||
│ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │
|
||||
│ 100000000 │
|
||||
│ (0.05s) │
|
||||
└─────────────┬─────────────┘
|
||||
┌─────────────┴─────────────┐
|
||||
│ SEQ_SCAN │
|
||||
│ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │
|
||||
│ integers │
|
||||
│ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │
|
||||
│ i │
|
||||
│ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │
|
||||
│ 100000000 │
|
||||
│ (0.08s) │
|
||||
└───────────────────────────┘
|
||||
```
|
||||
|
||||
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# name: ${FILE_PATH}
|
||||
# description: ${DESCRIPTION}
|
||||
# group: [clickbench]
|
||||
|
||||
name Q${QUERY_NUMBER_PADDED}
|
||||
group appian
|
||||
|
||||
require httpfs
|
||||
|
||||
cache ads.5M.duck
|
||||
|
||||
load
|
||||
LOAD httpfs;
|
||||
attach 'https://blobs.duckdb.org/data/appian_benchmark_data.duckdb' as appian_db (READ_ONLY);
|
||||
create table AddressView as select * from appian_db.AddressView;
|
||||
create table CustomerView as select * from appian_db.CustomerView;
|
||||
create table OrderView as select * from appian_db.OrderView;
|
||||
create table CategoryView as select * from appian_db.CategoryView;
|
||||
create table OrderItemNovelty_Update as select * from appian_db.OrderItemNovelty_Update;
|
||||
create table ProductView as select * from appian_db.ProductView;
|
||||
create table CreditCardView as select * from appian_db.CreditCardView;
|
||||
create table OrderItemView as select * from appian_db.OrderItemView;
|
||||
create table TaxRecordView as select * from appian_db.TaxRecordView;
|
||||
|
||||
run benchmark/appian_benchmarks/queries/q${QUERY_NUMBER_PADDED}.sql
|
||||
@@ -1,7 +0,0 @@
|
||||
# name: benchmark/appian_benchmarks/q01.benchmark
|
||||
# description: Run query 01 from the appian benchmarks
|
||||
# group: [appian_benchmarks]
|
||||
|
||||
template benchmark/appian_benchmarks/appian.benchmark.in
|
||||
QUERY_NUMBER=1
|
||||
QUERY_NUMBER_PADDED=01
|
||||
@@ -1,7 +0,0 @@
|
||||
# name: benchmark/appian_benchmarks/q02.benchmark
|
||||
# description: Run query 02 from the appian benchmarks
|
||||
# group: [appian_benchmarks]
|
||||
|
||||
template benchmark/appian_benchmarks/appian.benchmark.in
|
||||
QUERY_NUMBER=2
|
||||
QUERY_NUMBER_PADDED=02
|
||||
@@ -1,7 +0,0 @@
|
||||
# name: benchmark/appian_benchmarks/q03.benchmark
|
||||
# description: Run query 03 from the appian benchmarks
|
||||
# group: [appian_benchmarks]
|
||||
|
||||
template benchmark/appian_benchmarks/appian.benchmark.in
|
||||
QUERY_NUMBER=3
|
||||
QUERY_NUMBER_PADDED=03
|
||||
@@ -1,7 +0,0 @@
|
||||
# name: benchmark/appian_benchmarks/q04.benchmark
|
||||
# description: Run query 04 from the appian benchmarks
|
||||
# group: [appian_benchmarks]
|
||||
|
||||
template benchmark/appian_benchmarks/appian.benchmark.in
|
||||
QUERY_NUMBER=4
|
||||
QUERY_NUMBER_PADDED=04
|
||||
@@ -1,7 +0,0 @@
|
||||
# name: benchmark/appian_benchmarks/q05.benchmark
|
||||
# description: Run query 05 from the appian benchmarks
|
||||
# group: [appian_benchmarks]
|
||||
|
||||
template benchmark/appian_benchmarks/appian.benchmark.in
|
||||
QUERY_NUMBER=5
|
||||
QUERY_NUMBER_PADDED=05
|
||||
@@ -1,7 +0,0 @@
|
||||
# name: benchmark/appian_benchmarks/q06.benchmark
|
||||
# description: Run query 06 from the appian benchmarks
|
||||
# group: [appian_benchmarks]
|
||||
|
||||
template benchmark/appian_benchmarks/appian.benchmark.in
|
||||
QUERY_NUMBER=6
|
||||
QUERY_NUMBER_PADDED=06
|
||||
@@ -1,7 +0,0 @@
|
||||
# name: benchmark/appian_benchmarks/q07.benchmark
|
||||
# description: Run query 07 from the appian benchmarks
|
||||
# group: [appian_benchmarks]
|
||||
|
||||
template benchmark/appian_benchmarks/appian.benchmark.in
|
||||
QUERY_NUMBER=7
|
||||
QUERY_NUMBER_PADDED=07
|
||||
@@ -1,7 +0,0 @@
|
||||
# name: benchmark/appian_benchmarks/q08.benchmark
|
||||
# description: Run query 08 from the appian benchmarks
|
||||
# group: [appian_benchmarks]
|
||||
|
||||
template benchmark/appian_benchmarks/appian.benchmark.in
|
||||
QUERY_NUMBER=8
|
||||
QUERY_NUMBER_PADDED=08
|
||||
@@ -1 +0,0 @@
|
||||
select address_state as g0, sum(orderItem_quantity) as p0 from CustomerView c left outer join AddressView a on c.customer_id = a.address_customerId left outer join OrderView o on c.customer_id = o.order_customerId left outer join OrderItemView oi on o.order_id = oi.orderItem_orderId group by address_state order by address_state limit 500;
|
||||
@@ -1 +0,0 @@
|
||||
select a.address_state as g0, t1rp1 as g1, t2rp1 as g2, max(t5rp1) as p0, avg(t8rp1 * t8rp2) as p1, max(t6rp1) as p2, count(c.customer_priority) as p3, coalesce(avg(t7rp1), 0.0) as p4 from CustomerView c left outer join AddressView a on c.customer_id = a.address_customerId left outer join TaxRecordView t on a.address_id = t.taxRecord_addressId left outer join ( select sum(creditCard_cvv) as t1rp1, c.customer_id as t1pk from CustomerView c left outer join CreditCardView cc on c.customer_id = cc.creditCard_customerId group by c.customer_id ) t1 on c.customer_id = t1.t1pk left outer join ( select min(p.product_likes) as t2rp1, c.customer_id as t2pk from CustomerView c left outer join OrderView o on c.customer_id = o.order_customerId left outer join OrderItemView oi on o.order_id = oi.orderItem_orderId left outer join ProductView p on oi.orderItem_productId = p.product_id left outer join CategoryView ca on p.product_categoryName = ca.category_name where ca.category_seasonal = true group by c.customer_id ) t2 on c.customer_id = t2.t2pk left outer join ( select max(o.order_subShipments) as t5rp1, c.customer_id as t5pk from CustomerView c left outer join OrderView o on c.customer_id = o.order_customerId group by c.customer_id ) t5 on c.customer_id = t5pk left outer join ( select max(coalesce(oi.orderItem_weight, 1)) as t6rp1, c.customer_id as t6pk from CustomerView c left outer join OrderView o on c.customer_id = o.order_customerId left outer join OrderItemView oi on o.order_id = oi.orderItem_orderId where o.order_serverId in (1, 3, 5) group by c.customer_id ) t6 on c.customer_id = t6pk left outer join ( select count(ca.category_seasonal) as t7rp1, c.customer_id as t7pk from CustomerView c left outer join OrderView o on c.customer_id = o.order_customerId left outer join OrderItemView oi on o.order_id = oi.orderItem_orderId left outer join ProductView p on oi.orderItem_productId = p.product_id left outer join CategoryView ca on p.product_categoryName = ca.category_name where ca.category_perishable = true group by c.customer_id ) t7 on c.customer_id = t7pk left outer join ( select sum(creditCard_zip) as t8rp1, sum(creditCard_lastChargeAmount) as t8rp2, c.customer_id as t8pk from CustomerView c left outer join OrderView o on c.customer_id = o.order_customerId left outer join CreditCardView cc on o.order_creditCardNumber = cc.creditCard_number group by c.customer_id ) t8 on c.customer_id = t8pk where t.taxRecord_value > 149670.0 group by a.address_state, t1rp1, t2rp1 order by g0, p0, p1 limit 500;
|
||||
@@ -1 +0,0 @@
|
||||
select c.customer_priority as g0, t1rp1 as g1, t.taxRecord_bracket as g2, sum(oi.orderItem_weight) as p0, max(ca.category_demandScore) as p1, max(ca.category_auditDate) as p2, cast(avg(ca.category_valuation) as int) as p3, sum(t1rp2) as p4, sum( case when p.product_inventoryLastOrderedOn - ca.category_auditDate > 300 then 1 when p.product_inventoryLastOrderedOn - ca.category_auditDate > 150 then 10 when p.product_inventoryLastOrderedOn - ca.category_auditDate > 0 then 100 else 1000 end + (c.customer_priority * a.address_zone) ) as p5 from OrderItemView oi left outer join OrderView o on oi.orderItem_orderId = o.order_id left outer join ProductView p on oi.orderItem_productId = p.product_id left outer join CreditCardView cc on o.order_creditCardNumber = cc.creditCard_number left outer join CustomerView c on o.order_customerId = c.customer_id left outer join AddressView a on c.customer_id = a.address_customerId left outer join TaxRecordView t on a.address_id = t.taxRecord_addressId left outer join CategoryView ca on p.product_categoryName = ca.category_name left outer join ( select min(cc.creditCard_expirationDate) as t1rp1, sum(cc.creditCard_lastChargeAmount) as t1rp2, c.customer_id as t1pk from CustomerView c left outer join CreditCardView cc on c.customer_id = cc.creditCard_customerId group by c.customer_id ) t1 on c.customer_id = t1pk where cc.creditCard_lastChargeAmount > 90.0 and p.product_price > 34.0 group by c.customer_priority, t1rp1, t.taxRecord_bracket order by p1, p3, g2 limit 500;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user