should be it

This commit is contained in:
2025-10-24 19:21:19 -05:00
parent a4b23fc57c
commit f09560c7b1
14047 changed files with 3161551 additions and 1 deletions

View File

@@ -0,0 +1,32 @@
set(PARAMETERS "-warnings")
build_loadable_extension_directory(
loadable_extension_demo
"CPP"
test/extension
"default-version"
""
${PARAMETERS}
loadable_extension_demo.cpp)
if(NOT WIN32 AND NOT SUN)
add_definitions(-DDUCKDB_BUILD_DIRECTORY="${PROJECT_BINARY_DIR}")
build_loadable_extension_directory(
loadable_extension_optimizer_demo
"CPP"
test/extension
"default-version"
""
${PARAMETERS}
../extension/loadable_extension_optimizer_demo.cpp)
if(${ENABLE_UNITTEST_CPP_TESTS})
set(TEST_EXT_OBJECTS test_remote_optimizer.cpp)
add_library_unity(test_extensions OBJECT ${TEST_EXT_OBJECTS})
set(ALL_OBJECT_FILES
${ALL_OBJECT_FILES} $<TARGET_OBJECTS:test_extensions>
PARENT_SCOPE)
endif()
endif()

View File

@@ -0,0 +1,43 @@
# name: test/extension/autoloading_copy_function.test
# description: Tests for autoloading with copy functions
# group: [extension]
# This test assumes parquet to be available in the LOCAL_EXTENSION_REPO and NOT linked into duckdb statically
# -> this should be the case for our autoloading tests where we have the local_extension_repo variable set
require-env LOCAL_EXTENSION_REPO
require parquet
# Ensure we have a clean extension directory without any preinstalled extensions
statement ok
set extension_directory='__TEST_DIR__/autoloading_types'
### No autoloading nor installing: throw error with installation hint
statement ok
set autoload_known_extensions=false
statement ok
set autoinstall_known_extensions=false
statement maybe
copy (select 1337 as edgy_hacker_number) TO '__TEST_DIR__/test1337.parquet'
----
Catalog Error: Copy Function with name "parquet" is not in the catalog, but it exists in the parquet extension.
### With autoloading, install and correct repo
statement ok
set autoload_known_extensions=true
statement ok
set autoinstall_known_extensions=true
statement ok
set autoinstall_extension_repository='${LOCAL_EXTENSION_REPO}';
statement ok
copy (select 1337 as edgy_hacker_number) TO '__TEST_DIR__/test1337.parquet'
query I
select * from '__TEST_DIR__/test1337.parquet';
----
1337

View File

@@ -0,0 +1,19 @@
# name: test/extension/autoloading_copy_function_target.test
# description: Tests for autoloading copy functions targets
# group: [extension]
statement ok
set autoload_known_extensions=false
statement ok
set autoinstall_known_extensions=false
statement error
copy (select 1337 as edgy_hacker_number) TO 's3://non-existent-bucket/test1337.csv'
----
<REGEX>:.*Missing Extension Error: File .*test1337.csv requires the extension httpfs to be loaded.*
statement error
copy (select 1337 as edgy_hacker_number) TO 'azure://non-existent-bucket/test1337.csv'
----
<REGEX>:.*Missing Extension Error: File .*test1337.csv requires the extension azure to be loaded.*

View File

@@ -0,0 +1,30 @@
# name: test/extension/autoloading_encodings.test
# description: Test autoloading of encodings.
# group: [extension]
mode skip
require-env LOCAL_EXTENSION_REPO
statement ok
set autoload_known_extensions=false
statement ok
set autoinstall_known_extensions=false
statement error
FROM read_csv('data/csv/test/test.csv', encoding = 'shift_jis')
----
<REGEX>:.*Invalid Input Error.*You can try "INSTALL encodings; LOAD encodings".*
statement ok
set autoload_known_extensions=true
statement ok
set autoinstall_known_extensions=true
statement ok
set autoinstall_extension_repository='${LOCAL_EXTENSION_REPO}';
statement ok
FROM read_csv('data/csv/test/test.csv', encoding = 'shift_jis')

View File

@@ -0,0 +1,42 @@
# name: test/extension/autoloading_types.test
# description: Tests for autoloading with types
# group: [extension]
# This test assumes icu and json to be available in the LOCAL_EXTENSION_REPO and NOT linked into duckdb statically
# -> this should be the case for our autoloading tests where we have the local_extension_repo variable set
require-env LOCAL_EXTENSION_REPO
require json
require icu
# Ensure we have a clean extension directory without any preinstalled extensions
statement ok
set extension_directory='__TEST_DIR__/autoloading_types'
### No autoloading nor installing: throw error with installation hint
statement ok
set autoload_known_extensions=false
statement ok
set autoinstall_known_extensions=false
statement maybe
SELECT '{}'::JSON;
----
Catalog Error: Type with name "JSON" is not in the catalog, but it exists in the json extension.
### With autoloading, install and correct repo
statement ok
set autoload_known_extensions=true
statement ok
set autoinstall_known_extensions=true
statement ok
set autoinstall_extension_repository='${LOCAL_EXTENSION_REPO}';
query I
SELECT '{}'::JSON;
----
{}

View File

@@ -0,0 +1,22 @@
# name: test/extension/concurrent_load_extension.test
# description: Test concurrent load extension
# group: [extension]
# FIXME: this is mostly working, but there are still some concurrency issues in the DBConfig around ParserExtension/ExtensionCallback registration
mode skip
require notmingw
require skip_reload
require allow_unsigned_extensions
concurrentloop i 0 100
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement ok
FROM duckdb_extensions();
endloop

View File

@@ -0,0 +1,26 @@
# name: test/extension/consistent_semicolon_extension_parse.test
# description: Original issue: https://github.com/duckdb/duckdb/issues/18485
# group: [extension]
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement error
quack (foo);
----
Parser Error: This is not a quack: (foo);
statement error
quack 's';
----
Parser Error: This is not a quack: 's';
statement error
unrecognizedkeyword ';';anotherunrecognizedkeyword';';;;;;
----
Parser Error: syntax error at or near "unrecognizedkeyword"
statement error
QUACK string_split('hello-world', ';');QUACK string_split('hello-world', ';')
----
string_split('hello-world', ';');

View File

@@ -0,0 +1,47 @@
# name: test/extension/duckdb_extensions.test
# description: Tests for the duckdb_extensions() table function
# group: [extension]
# This test assumes icu and json to be available in the LOCAL_EXTENSION_REPO and NOT linked into duckdb statically
# -> this should be the case for our autoloading tests where we have the local_extension_repo variable set
require-env LOCAL_EXTENSION_REPO
require no_extension_autoloading "EXPECTED: Test relies on explcit INSTALL and LOAD"
statement ok
PRAGMA enable_verification
# Set the repository to the correct one
statement ok
set custom_extension_repository='${LOCAL_EXTENSION_REPO}'
# Ensure we have a clean extension directory without any preinstalled extensions
statement ok
set extension_directory='__TEST_DIR__/duckdb_extensions'
require json
# json is statically linked
query II
SELECT extension_name, install_mode from duckdb_extensions() where extension_name='json'
----
json STATICALLY_LINKED
# now we install json (happens when users install extensions that are also statically loaded)
statement ok
install json
# json still shown as statically linked
query II
SELECT extension_name, install_mode from duckdb_extensions() where extension_name='json'
----
json STATICALLY_LINKED
statement ok
load json
# json still shown as statically linked
query II
SELECT extension_name, install_mode from duckdb_extensions() where extension_name='json'
----
json STATICALLY_LINKED

View File

@@ -0,0 +1,45 @@
# name: test/extension/install_extension.test
# description: Test various ways of installing extensions
# group: [extension]
statement ok
PRAGMA enable_verification
statement ok
set extension_directory='__TEST_DIR__/install_extension'
# Check defaults are correct
statement error
INSTALL will_never_exist;
----
Failed to download extension "will_never_exist" at URL "http://extensions.duckdb.org
# Explicitly install from core
statement error
INSTALL will_never_exist FROM core;
----
Failed to download extension "will_never_exist" at URL "http://extensions.duckdb.org
# Explicitly install from nightly
statement error
INSTALL will_never_exist FROM core_nightly;
----
Failed to download extension "will_never_exist" at URL "http://nightly-extensions.duckdb.org
# Explicitly install from community
statement error
INSTALL will_never_exist FROM community;
----
Failed to download extension "will_never_exist" at URL "http://community-extensions.duckdb.org
# Alias can not quoted: string literals are interpreted as paths
statement error
INSTALL will_never_exist FROM 'core';
----
IO Error: Failed to install local extension "will_never_exist", no access to the file at PATH
# Error message should point to extensions troubleshooting page
statement error
INSTALL will_never_exist FROM core;
----
For more info, visit https://duckdb.org/docs/stable/extensions/troubleshooting

View File

@@ -0,0 +1,111 @@
# name: test/extension/install_extension.test_slow
# description: Try loading a shared object with an extension
# group: [extension]
require notmingw
require skip_reload
statement ok
PRAGMA enable_verification
# we need a persistent database because otherwise we are not allowed to restart
load __TEST_DIR__/dummy_database_for_extension_installation.db
statement error
INSTALL 'asdf';
----
statement error
INSTALL 'asdf/asdf.duckdb_extension';
----
statement ok
INSTALL '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement ok
FORCE INSTALL '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement ok
LOAD 'loadable_extension_demo';
statement ok
LOAD 'loadable_extension_demo';
# need to restart to unload extensions
restart
# this will succeed on MacOS due to case insensitive filesystem
statement maybe
FORCE INSTALL '__BUILD_DIRECTORY__/test/extension/loadable_extension_DEMO.duckdb_extension';
----
IO Error: Failed to install local extension
statement ok
FORCE INSTALL '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement ok
LOAD 'Loadable_Extension_DEMO';
# need to restart to unload extensions
restart
# can't find extension in non-default extension location
statement ok
SET extension_directory='__TEST_DIR__/extension_directory'
statement error
LOAD 'loadable_extension_demo';
----
# can install after setting directory, even if the extension directory does not yet exist
statement ok
INSTALL '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
# can load now
statement ok
LOAD 'loadable_extension_demo';
# ensure file is there
query I
select count(*) from glob('__TEST_DIR__/extension_directory/**/loadable_extension_demo.duckdb_extension')
----
1
# unload again
restart
statement ok
SET extension_directory=''
# we will use a nonstandard home directory, but we won't create it if it does not exist
# can't find extension in non-default home location
statement ok
SET home_directory='__TEST_DIR__/home_directory'
statement error
LOAD 'loadable_extension_demo';
----
# we can't install because we will not create a non-existing home directory
statement error
INSTALL '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
----
# but this one should exist
statement ok
SET home_directory='__TEST_DIR__'
statement ok
INSTALL '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
query I
select COUNT(*) from glob('__TEST_DIR__/.duckdb/**/loadable_extension_demo.duckdb_extension')
----
1
# can load now
statement ok
LOAD 'loadable_extension_demo';

View File

@@ -0,0 +1,31 @@
# name: test/extension/install_version.test
# description: test installing extensions of specific versions
# group: [extension]
# This test assumes icu and json to be available in the LOCAL_EXTENSION_REPO and NOT linked into duckdb statically
# -> this should be the case for our autoloading tests where we have the local_extension_repo variable set
require-env LOCAL_EXTENSION_REPO
require notwindows
# Ensure we have a clean extension directory without any preinstalled extensions
statement ok
set extension_directory='__TEST_DIR__/install_version'
statement ok
set custom_extension_repository='${LOCAL_EXTENSION_REPO}'
# Confirm the version ends up in the url we search.
# TODO: the script to create a local extension repo should be able to produce versioned extension paths then we can properly
# test this
statement error
INSTALL quack FROM '${LOCAL_EXTENSION_REPO}' VERSION 'myversion';
----
/quack/myversion/
# Using the default extension repository
statement error
INSTALL quack VERSION 'myversion';
----
/quack/myversion/

View File

@@ -0,0 +1,12 @@
# name: test/extension/load_error.test
# description: Check the loading error for typos in extensions
# group: [extension]
require notmingw
require skip_reload
statement error
LOAD motherduk
----
motherduck

View File

@@ -0,0 +1,68 @@
# name: test/extension/load_extension.test
# description: Try loading a shared object with an extension
# group: [extension]
require notmingw
require skip_reload
require allow_unsigned_extensions
statement ok
PRAGMA enable_verification
statement error
LOAD 'asdf';
----
<REGEX>:IO Error: Extension.*not found.*
statement error
LOAD 'Makefile';
----
<REGEX>:IO Error: Extension.*not found.*
statement error
LOAD NULL;
----
<REGEX>:Parser Error: syntax error.*
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
query I
SELECT extension_version FROM duckdb_extensions() WHERE extension_name = 'loadable_extension_demo';
----
default-version
query I
SELECT hello('World');
----
10
query I
QUACK QUACK QUACK
----
QUACK
QUACK
QUACK
query I
quACk QUaCk
----
QUACK
QUACK
statement error
QUAC
----
<REGEX>:Parser Error: Did you mean.*
statement error
QUACK NOT QUACK
----
<REGEX>:Parser Error: This is not a quack.*
query I
SELECT contains(loaded_extensions(), 'loadable_extension_demo')
----
true

View File

@@ -0,0 +1,20 @@
# name: test/extension/load_test_alias.test
# description: Enable TestAliasExtension.
# group: [extension]
require skip_reload
require notmingw
require allow_unsigned_extensions
statement ok
PRAGMA enable_verification
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
query I
SELECT test_alias_hello();
----
Hello Alias!

View File

@@ -0,0 +1,645 @@
#include "duckdb.hpp"
#include "duckdb/parser/parser_extension.hpp"
#include "duckdb/parser/parsed_data/create_table_function_info.hpp"
#include "duckdb/common/string_util.hpp"
#include "duckdb/parser/parsed_data/create_scalar_function_info.hpp"
#include "duckdb/parser/parsed_data/create_type_info.hpp"
#include "duckdb/catalog/catalog_entry/type_catalog_entry.hpp"
#include "duckdb/planner/extension_callback.hpp"
#include "duckdb/function/cast/cast_function_set.hpp"
#include "duckdb/main/extension/extension_loader.hpp"
#include "duckdb/common/vector_operations/generic_executor.hpp"
#include "duckdb/common/exception/conversion_exception.hpp"
#include "duckdb/planner/expression/bound_constant_expression.hpp"
#include "duckdb/common/extension_type_info.hpp"
#include "duckdb/parser/sql_statement.hpp"
#include "duckdb/parser/query_node/select_node.hpp"
#include "duckdb/parser/expression/constant_expression.hpp"
#include "duckdb/parser/tableref/emptytableref.hpp"
using namespace duckdb;
//===--------------------------------------------------------------------===//
// Scalar function
//===--------------------------------------------------------------------===//
static inline int32_t hello_fun(string_t what) {
return what.GetSize() + 5;
}
static inline void TestAliasHello(DataChunk &args, ExpressionState &state, Vector &result) {
result.Reference(Value("Hello Alias!"));
}
static inline void AddPointFunction(DataChunk &args, ExpressionState &state, Vector &result) {
auto &left_vector = args.data[0];
auto &right_vector = args.data[1];
const int count = args.size();
auto left_vector_type = left_vector.GetVectorType();
auto right_vector_type = right_vector.GetVectorType();
args.Flatten();
UnifiedVectorFormat lhs_data;
UnifiedVectorFormat rhs_data;
left_vector.ToUnifiedFormat(count, lhs_data);
right_vector.ToUnifiedFormat(count, rhs_data);
result.SetVectorType(VectorType::FLAT_VECTOR);
auto &child_entries = StructVector::GetEntries(result);
auto &left_child_entries = StructVector::GetEntries(left_vector);
auto &right_child_entries = StructVector::GetEntries(right_vector);
for (int base_idx = 0; base_idx < count; base_idx++) {
auto lhs_list_index = lhs_data.sel->get_index(base_idx);
auto rhs_list_index = rhs_data.sel->get_index(base_idx);
if (!lhs_data.validity.RowIsValid(lhs_list_index) || !rhs_data.validity.RowIsValid(rhs_list_index)) {
FlatVector::SetNull(result, base_idx, true);
continue;
}
for (size_t col = 0; col < child_entries.size(); ++col) {
auto &child_entry = child_entries[col];
auto &left_child_entry = left_child_entries[col];
auto &right_child_entry = right_child_entries[col];
auto pdata = ConstantVector::GetData<int32_t>(*child_entry);
auto left_pdata = ConstantVector::GetData<int32_t>(*left_child_entry);
auto right_pdata = ConstantVector::GetData<int32_t>(*right_child_entry);
pdata[base_idx] = left_pdata[lhs_list_index] + right_pdata[rhs_list_index];
}
}
if (left_vector_type == VectorType::CONSTANT_VECTOR && right_vector_type == VectorType::CONSTANT_VECTOR) {
result.SetVectorType(VectorType::CONSTANT_VECTOR);
}
result.Verify(count);
}
static inline void SubPointFunction(DataChunk &args, ExpressionState &state, Vector &result) {
auto &left_vector = args.data[0];
auto &right_vector = args.data[1];
const int count = args.size();
auto left_vector_type = left_vector.GetVectorType();
auto right_vector_type = right_vector.GetVectorType();
args.Flatten();
UnifiedVectorFormat lhs_data;
UnifiedVectorFormat rhs_data;
left_vector.ToUnifiedFormat(count, lhs_data);
right_vector.ToUnifiedFormat(count, rhs_data);
result.SetVectorType(VectorType::FLAT_VECTOR);
auto &child_entries = StructVector::GetEntries(result);
auto &left_child_entries = StructVector::GetEntries(left_vector);
auto &right_child_entries = StructVector::GetEntries(right_vector);
for (int base_idx = 0; base_idx < count; base_idx++) {
auto lhs_list_index = lhs_data.sel->get_index(base_idx);
auto rhs_list_index = rhs_data.sel->get_index(base_idx);
if (!lhs_data.validity.RowIsValid(lhs_list_index) || !rhs_data.validity.RowIsValid(rhs_list_index)) {
FlatVector::SetNull(result, base_idx, true);
continue;
}
for (size_t col = 0; col < child_entries.size(); ++col) {
auto &child_entry = child_entries[col];
auto &left_child_entry = left_child_entries[col];
auto &right_child_entry = right_child_entries[col];
auto pdata = ConstantVector::GetData<int32_t>(*child_entry);
auto left_pdata = ConstantVector::GetData<int32_t>(*left_child_entry);
auto right_pdata = ConstantVector::GetData<int32_t>(*right_child_entry);
pdata[base_idx] = left_pdata[lhs_list_index] - right_pdata[rhs_list_index];
}
}
if (left_vector_type == VectorType::CONSTANT_VECTOR && right_vector_type == VectorType::CONSTANT_VECTOR) {
result.SetVectorType(VectorType::CONSTANT_VECTOR);
}
result.Verify(count);
}
//===--------------------------------------------------------------------===//
// Quack Table Function
//===--------------------------------------------------------------------===//
class QuackFunction : public TableFunction {
public:
QuackFunction() {
name = "quack";
arguments.push_back(LogicalType::BIGINT);
bind = QuackBind;
init_global = QuackInit;
function = QuackFunc;
}
struct QuackBindData : public TableFunctionData {
QuackBindData(idx_t number_of_quacks) : number_of_quacks(number_of_quacks) {
}
idx_t number_of_quacks;
};
struct QuackGlobalData : public GlobalTableFunctionState {
QuackGlobalData() : offset(0) {
}
idx_t offset;
};
static duckdb::unique_ptr<FunctionData> QuackBind(ClientContext &context, TableFunctionBindInput &input,
vector<LogicalType> &return_types, vector<string> &names) {
names.emplace_back("quack");
return_types.emplace_back(LogicalType::VARCHAR);
return make_uniq<QuackBindData>(BigIntValue::Get(input.inputs[0]));
}
static duckdb::unique_ptr<GlobalTableFunctionState> QuackInit(ClientContext &context,
TableFunctionInitInput &input) {
return make_uniq<QuackGlobalData>();
}
static void QuackFunc(ClientContext &context, TableFunctionInput &data_p, DataChunk &output) {
auto &bind_data = data_p.bind_data->Cast<QuackBindData>();
auto &data = (QuackGlobalData &)*data_p.global_state;
if (data.offset >= bind_data.number_of_quacks) {
// finished returning values
return;
}
// start returning values
// either fill up the chunk or return all the remaining columns
idx_t count = 0;
while (data.offset < bind_data.number_of_quacks && count < STANDARD_VECTOR_SIZE) {
output.SetValue(0, count, Value("QUACK"));
data.offset++;
count++;
}
output.SetCardinality(count);
}
};
//===--------------------------------------------------------------------===//
// Parser extension
//===--------------------------------------------------------------------===//
struct QuackExtensionData : public ParserExtensionParseData {
QuackExtensionData(idx_t number_of_quacks) : number_of_quacks(number_of_quacks) {
}
idx_t number_of_quacks;
duckdb::unique_ptr<ParserExtensionParseData> Copy() const override {
return make_uniq<QuackExtensionData>(number_of_quacks);
}
string ToString() const override {
vector<string> quacks;
for (idx_t i = 0; i < number_of_quacks; i++) {
quacks.push_back("QUACK");
}
return StringUtil::Join(quacks, " ");
}
};
class QuackExtension : public ParserExtension {
public:
QuackExtension() {
parse_function = QuackParseFunction;
plan_function = QuackPlanFunction;
parser_override = QuackParser;
}
static ParserExtensionParseResult QuackParseFunction(ParserExtensionInfo *info, const string &query) {
auto lcase = StringUtil::Lower(query);
if (!StringUtil::Contains(lcase, "quack")) {
// quack not found!?
if (StringUtil::Contains(lcase, "quac")) {
// use our error
return ParserExtensionParseResult("Did you mean... QUACK!?");
}
// use original error
return ParserExtensionParseResult();
}
auto splits = StringUtil::Split(lcase, "quack");
for (auto &split : splits) {
StringUtil::Trim(split);
if (!split.empty()) {
// we only accept quacks here
if (StringUtil::CIEquals(split, ";")) {
continue;
}
return ParserExtensionParseResult("This is not a quack: " + split);
}
}
// QUACK
return ParserExtensionParseResult(make_uniq<QuackExtensionData>(splits.size()));
}
static ParserExtensionPlanResult QuackPlanFunction(ParserExtensionInfo *info, ClientContext &context,
duckdb::unique_ptr<ParserExtensionParseData> parse_data) {
auto &quack_data = (QuackExtensionData &)*parse_data;
ParserExtensionPlanResult result;
result.function = QuackFunction();
result.parameters.push_back(Value::BIGINT(quack_data.number_of_quacks));
result.requires_valid_transaction = false;
result.return_type = StatementReturnType::QUERY_RESULT;
return result;
}
static ParserOverrideResult QuackParser(ParserExtensionInfo *info, const string &query) {
vector<string> queries = StringUtil::Split(query, ";");
vector<unique_ptr<SQLStatement>> statements;
for (const auto &query_input : queries) {
if (StringUtil::CIEquals(query_input, "override")) {
auto select_node = make_uniq<SelectNode>();
select_node->select_list.push_back(
make_uniq<ConstantExpression>(Value("The DuckDB parser has been overridden")));
select_node->from_table = make_uniq<EmptyTableRef>();
auto select_statement = make_uniq<SelectStatement>();
select_statement->node = std::move(select_node);
statements.push_back(std::move(select_statement));
}
if (StringUtil::CIEquals(query_input, "over")) {
auto exception = ParserException("Parser overridden, query equaled \"over\" but not \"override\"");
return ParserOverrideResult(exception);
}
}
if (statements.empty()) {
return ParserOverrideResult();
}
return ParserOverrideResult(std::move(statements));
}
};
static set<string> test_loaded_extension_list;
class QuackLoadExtension : public ExtensionCallback {
void OnExtensionLoaded(DatabaseInstance &db, const string &name) override {
test_loaded_extension_list.insert(name);
}
};
static inline void LoadedExtensionsFunction(DataChunk &args, ExpressionState &state, Vector &result) {
string result_str;
for (auto &ext : test_loaded_extension_list) {
if (!result_str.empty()) {
result_str += ", ";
}
result_str += ext;
}
result.Reference(Value(result_str));
}
//===--------------------------------------------------------------------===//
// Bounded type
//===--------------------------------------------------------------------===//
struct BoundedType {
static LogicalType Bind(const BindLogicalTypeInput &input) {
auto &modifiers = input.modifiers;
if (modifiers.size() != 1) {
throw BinderException("BOUNDED type must have one modifier");
}
if (modifiers[0].type() != LogicalType::INTEGER) {
throw BinderException("BOUNDED type modifier must be integer");
}
if (modifiers[0].IsNull()) {
throw BinderException("BOUNDED type modifier cannot be NULL");
}
auto bound_val = modifiers[0].GetValue<int32_t>();
return Get(bound_val);
}
static LogicalType Get(int32_t max_val) {
auto type = LogicalType(LogicalTypeId::INTEGER);
type.SetAlias("BOUNDED");
auto info = make_uniq<ExtensionTypeInfo>();
info->modifiers.emplace_back(Value::INTEGER(max_val));
type.SetExtensionInfo(std::move(info));
return type;
}
static LogicalType GetDefault() {
auto type = LogicalType(LogicalTypeId::INTEGER);
type.SetAlias("BOUNDED");
return type;
}
static int32_t GetMaxValue(const LogicalType &type) {
if (!type.HasExtensionInfo()) {
throw InvalidInputException("BOUNDED type must have a max value");
}
auto &mods = type.GetExtensionInfo()->modifiers;
if (mods[0].value.IsNull()) {
throw InvalidInputException("BOUNDED type must have a max value");
}
return mods[0].value.GetValue<int32_t>();
}
};
static void BoundedMaxFunc(DataChunk &args, ExpressionState &state, Vector &result) {
result.Reference(BoundedType::GetMaxValue(args.data[0].GetType()));
}
static unique_ptr<FunctionData> BoundedMaxBind(ClientContext &context, ScalarFunction &bound_function,
vector<unique_ptr<Expression>> &arguments) {
if (arguments[0]->return_type == BoundedType::GetDefault()) {
bound_function.arguments[0] = arguments[0]->return_type;
} else {
throw BinderException("bounded_max expects a BOUNDED type");
}
return nullptr;
}
static void BoundedAddFunc(DataChunk &args, ExpressionState &state, Vector &result) {
auto &left_vector = args.data[0];
auto &right_vector = args.data[1];
const auto count = args.size();
BinaryExecutor::Execute<int32_t, int32_t, int32_t>(left_vector, right_vector, result, count,
[&](int32_t left, int32_t right) { return left + right; });
}
static unique_ptr<FunctionData> BoundedAddBind(ClientContext &context, ScalarFunction &bound_function,
vector<unique_ptr<Expression>> &arguments) {
if (BoundedType::GetDefault() == arguments[0]->return_type &&
BoundedType::GetDefault() == arguments[1]->return_type) {
auto left_max_val = BoundedType::GetMaxValue(arguments[0]->return_type);
auto right_max_val = BoundedType::GetMaxValue(arguments[1]->return_type);
auto new_max_val = left_max_val + right_max_val;
bound_function.arguments[0] = arguments[0]->return_type;
bound_function.arguments[1] = arguments[1]->return_type;
bound_function.return_type = BoundedType::Get(new_max_val);
} else {
throw BinderException("bounded_add expects two BOUNDED types");
}
return nullptr;
}
struct BoundedFunctionData : public FunctionData {
int32_t max_val;
unique_ptr<FunctionData> Copy() const override {
auto copy = make_uniq<BoundedFunctionData>();
copy->max_val = max_val;
return std::move(copy);
}
bool Equals(const FunctionData &other_p) const override {
auto &other = other_p.Cast<BoundedFunctionData>();
return max_val == other.max_val;
}
};
static unique_ptr<FunctionData> BoundedInvertBind(ClientContext &context, ScalarFunction &bound_function,
vector<unique_ptr<Expression>> &arguments) {
if (arguments[0]->return_type == BoundedType::GetDefault()) {
bound_function.arguments[0] = arguments[0]->return_type;
bound_function.return_type = arguments[0]->return_type;
} else {
throw BinderException("bounded_invert expects a BOUNDED type");
}
auto result = make_uniq<BoundedFunctionData>();
result->max_val = BoundedType::GetMaxValue(bound_function.return_type);
return std::move(result);
}
static void BoundedInvertFunc(DataChunk &args, ExpressionState &state, Vector &result) {
auto &source_vector = args.data[0];
const auto count = args.size();
auto result_type = result.GetType();
auto output_max_val = BoundedType::GetMaxValue(result_type);
UnaryExecutor::Execute<int32_t, int32_t>(source_vector, result, count,
[&](int32_t input) { return std::min(-input, output_max_val); });
}
static void BoundedEvenFunc(DataChunk &args, ExpressionState &state, Vector &result) {
auto &source_vector = args.data[0];
const auto count = args.size();
UnaryExecutor::Execute<int32_t, bool>(source_vector, result, count, [&](int32_t input) { return input % 2 == 0; });
}
static void BoundedToAsciiFunc(DataChunk &args, ExpressionState &state, Vector &result) {
auto &source_vector = args.data[0];
const auto count = args.size();
UnaryExecutor::Execute<int32_t, string_t>(source_vector, result, count, [&](int32_t input) {
if (input < 0) {
throw NotImplementedException("Negative values not supported");
}
string s;
s.push_back(static_cast<char>(input));
return StringVector::AddString(result, s);
});
}
static bool BoundedToBoundedCast(Vector &source, Vector &result, idx_t count, CastParameters &parameters) {
auto input_max_val = BoundedType::GetMaxValue(source.GetType());
auto output_max_val = BoundedType::GetMaxValue(result.GetType());
if (input_max_val <= output_max_val) {
result.Reinterpret(source);
return true;
} else {
throw ConversionException(source.GetType(), result.GetType());
}
}
static bool IntToBoundedCast(Vector &source, Vector &result, idx_t count, CastParameters &parameters) {
auto &ty = result.GetType();
auto output_max_val = BoundedType::GetMaxValue(ty);
UnaryExecutor::Execute<int32_t, int32_t>(source, result, count, [&](int32_t input) {
if (input > output_max_val) {
throw ConversionException(StringUtil::Format("Value %s exceeds max value of bounded type (%s)",
to_string(input), to_string(output_max_val)));
}
return input;
});
return true;
}
//===--------------------------------------------------------------------===//
// MINMAX type
//===--------------------------------------------------------------------===//
// This is like the BOUNDED type, except it has a custom bind_modifiers function
// to verify that the range is valid
struct MinMaxType {
static LogicalType Bind(const BindLogicalTypeInput &input) {
auto &modifiers = input.modifiers;
if (modifiers.size() != 2) {
throw BinderException("MINMAX type must have two modifiers");
}
if (modifiers[0].type() != LogicalType::INTEGER || modifiers[1].type() != LogicalType::INTEGER) {
throw BinderException("MINMAX type modifiers must be integers");
}
if (modifiers[0].IsNull() || modifiers[1].IsNull()) {
throw BinderException("MINMAX type modifiers cannot be NULL");
}
const auto min_val = modifiers[0].GetValue<int32_t>();
const auto max_val = modifiers[1].GetValue<int32_t>();
if (min_val >= max_val) {
throw BinderException("MINMAX type min value must be less than max value");
}
auto type = LogicalType(LogicalTypeId::INTEGER);
type.SetAlias("MINMAX");
auto info = make_uniq<ExtensionTypeInfo>();
info->modifiers.emplace_back(Value::INTEGER(min_val));
info->modifiers.emplace_back(Value::INTEGER(max_val));
type.SetExtensionInfo(std::move(info));
return type;
}
static int32_t GetMinValue(const LogicalType &type) {
D_ASSERT(type.HasExtensionInfo());
auto &mods = type.GetExtensionInfo()->modifiers;
return mods[0].value.GetValue<int32_t>();
}
static int32_t GetMaxValue(const LogicalType &type) {
D_ASSERT(type.HasExtensionInfo());
auto &mods = type.GetExtensionInfo()->modifiers;
return mods[1].value.GetValue<int32_t>();
}
static LogicalType Get(int32_t min_val, int32_t max_val) {
auto type = LogicalType(LogicalTypeId::INTEGER);
type.SetAlias("MINMAX");
auto info = make_uniq<ExtensionTypeInfo>();
info->modifiers.emplace_back(Value::INTEGER(min_val));
info->modifiers.emplace_back(Value::INTEGER(max_val));
type.SetExtensionInfo(std::move(info));
return type;
}
static LogicalType GetDefault() {
auto type = LogicalType(LogicalTypeId::INTEGER);
type.SetAlias("MINMAX");
return type;
}
};
static bool IntToMinMaxCast(Vector &source, Vector &result, idx_t count, CastParameters &parameters) {
auto &ty = result.GetType();
auto min_val = MinMaxType::GetMinValue(ty);
auto max_val = MinMaxType::GetMaxValue(ty);
UnaryExecutor::Execute<int32_t, int32_t>(source, result, count, [&](int32_t input) {
if (input < min_val || input > max_val) {
throw ConversionException(StringUtil::Format("Value %s is outside of range [%s,%s]", to_string(input),
to_string(min_val), to_string(max_val)));
}
return input;
});
return true;
}
static void MinMaxRangeFunc(DataChunk &args, ExpressionState &state, Vector &result) {
auto &ty = args.data[0].GetType();
auto min_val = MinMaxType::GetMinValue(ty);
auto max_val = MinMaxType::GetMaxValue(ty);
result.Reference(Value::INTEGER(max_val - min_val));
}
//===--------------------------------------------------------------------===//
// Extension load + setup
//===--------------------------------------------------------------------===//
extern "C" {
DUCKDB_CPP_EXTENSION_ENTRY(loadable_extension_demo, loader) {
CreateScalarFunctionInfo hello_alias_info(
ScalarFunction("test_alias_hello", {}, LogicalType::VARCHAR, TestAliasHello));
auto &db = loader.GetDatabaseInstance();
// create a scalar function
Connection con(db);
auto &client_context = *con.context;
auto &catalog = Catalog::GetSystemCatalog(client_context);
con.BeginTransaction();
con.CreateScalarFunction<int32_t, string_t>("hello", {LogicalType(LogicalTypeId::VARCHAR)},
LogicalType(LogicalTypeId::INTEGER), &hello_fun);
catalog.CreateFunction(client_context, hello_alias_info);
// Add alias POINT type
string alias_name = "POINT";
child_list_t<LogicalType> child_types;
child_types.push_back(make_pair("x", LogicalType::INTEGER));
child_types.push_back(make_pair("y", LogicalType::INTEGER));
auto alias_info = make_uniq<CreateTypeInfo>();
alias_info->internal = true;
alias_info->name = alias_name;
LogicalType target_type = LogicalType::STRUCT(child_types);
target_type.SetAlias(alias_name);
alias_info->type = target_type;
auto type_entry = catalog.CreateType(client_context, *alias_info);
type_entry->tags["ext:name"] = "loadable_extension_demo";
type_entry->tags["ext:author"] = "DuckDB Labs";
// Function add point
ScalarFunction add_point_func("add_point", {target_type, target_type}, target_type, AddPointFunction);
CreateScalarFunctionInfo add_point_info(add_point_func);
auto add_point_entry = catalog.CreateFunction(client_context, add_point_info);
add_point_entry->tags["ext:name"] = "loadable_extension_demo";
add_point_entry->tags["ext:author"] = "DuckDB Labs";
// Function sub point
ScalarFunction sub_point_func("sub_point", {target_type, target_type}, target_type, SubPointFunction);
CreateScalarFunctionInfo sub_point_info(sub_point_func);
auto sub_point_entry = catalog.CreateFunction(client_context, sub_point_info);
sub_point_entry->tags["ext:name"] = "loadable_extension_demo";
sub_point_entry->tags["ext:author"] = "DuckDB Labs";
// Function sub point
ScalarFunction loaded_extensions("loaded_extensions", {}, LogicalType::VARCHAR, LoadedExtensionsFunction);
CreateScalarFunctionInfo loaded_extensions_info(loaded_extensions);
catalog.CreateFunction(client_context, loaded_extensions_info);
// Quack function
QuackFunction quack_function;
CreateTableFunctionInfo quack_info(quack_function);
catalog.CreateTableFunction(client_context, quack_info);
con.Commit();
// add a parser extension
auto &config = DBConfig::GetConfig(db);
config.parser_extensions.push_back(QuackExtension());
config.extension_callbacks.push_back(make_uniq<QuackLoadExtension>());
// Bounded type
auto bounded_type = BoundedType::GetDefault();
loader.RegisterType("BOUNDED", bounded_type, BoundedType::Bind);
// Example of function inspecting the type property
ScalarFunction bounded_max("bounded_max", {bounded_type}, LogicalType::INTEGER, BoundedMaxFunc, BoundedMaxBind);
loader.RegisterFunction(bounded_max);
// Example of function inspecting the type property and returning the same type
ScalarFunction bounded_invert("bounded_invert", {bounded_type}, bounded_type, BoundedInvertFunc, BoundedInvertBind);
// bounded_invert.serialize = BoundedReturnSerialize;
// bounded_invert.deserialize = BoundedReturnDeserialize;
loader.RegisterFunction(bounded_invert);
// Example of function inspecting the type property of both arguments and returning a new type
ScalarFunction bounded_add("bounded_add", {bounded_type, bounded_type}, bounded_type, BoundedAddFunc,
BoundedAddBind);
loader.RegisterFunction(bounded_add);
// Example of function that is generic over the type property (the bound is not important)
ScalarFunction bounded_even("bounded_even", {bounded_type}, LogicalType::BOOLEAN, BoundedEvenFunc);
loader.RegisterFunction(bounded_even);
// Example of function that is specialized over type property
auto bounded_specialized_type = BoundedType::Get(0xFF);
ScalarFunction bounded_to_ascii("bounded_ascii", {bounded_specialized_type}, LogicalType::VARCHAR,
BoundedToAsciiFunc);
loader.RegisterFunction(bounded_to_ascii);
// Enable explicit casting to our specialized type
loader.RegisterCastFunction(bounded_type, bounded_specialized_type, BoundCastInfo(BoundedToBoundedCast), 0);
// Casts
loader.RegisterCastFunction(LogicalType::INTEGER, bounded_type, BoundCastInfo(IntToBoundedCast), 0);
// MinMax Type
auto minmax_type = MinMaxType::GetDefault();
loader.RegisterType("MINMAX", minmax_type, MinMaxType::Bind);
loader.RegisterCastFunction(LogicalType::INTEGER, minmax_type, BoundCastInfo(IntToMinMaxCast), 0);
loader.RegisterFunction(ScalarFunction("minmax_range", {minmax_type}, LogicalType::INTEGER, MinMaxRangeFunc));
}
}

View File

@@ -0,0 +1,164 @@
#include "duckdb.hpp"
#include "duckdb/common/types/column/column_data_collection.hpp"
#include "duckdb/optimizer/optimizer_extension.hpp"
#include "duckdb/planner/operator/logical_column_data_get.hpp"
#include "duckdb/planner/operator/logical_get.hpp"
#include "duckdb/common/serializer/binary_serializer.hpp"
#include "duckdb/common/serializer/binary_deserializer.hpp"
#include "duckdb/common/serializer/memory_stream.hpp"
using namespace duckdb;
// whatever
#include <signal.h>
#include <sys/mman.h>
#include <unistd.h>
#include <stdio.h>
#include <netdb.h>
#include <netinet/in.h>
#include <stdlib.h>
#include <string.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <arpa/inet.h>
#ifdef __MVS__
#define _XOPEN_SOURCE_EXTENDED 1
#include <strings.h>
#endif
class WaggleExtension : public OptimizerExtension {
public:
WaggleExtension() {
optimize_function = WaggleOptimizeFunction;
}
static bool HasParquetScan(LogicalOperator &op) {
if (op.type == LogicalOperatorType::LOGICAL_GET) {
auto &get = op.Cast<LogicalGet>();
return get.function.name == "parquet_scan";
}
for (auto &child : op.children) {
if (HasParquetScan(*child)) {
return true;
}
}
return false;
}
static void WriteChecked(int sockfd, void *data, idx_t write_size) {
auto bytes_written = write(sockfd, data, write_size);
if (bytes_written < 0) {
throw InternalException("Failed to write \"%lld\" bytes to socket: %s", write_size, strerror(errno));
}
if (idx_t(bytes_written) != write_size) {
throw InternalException("Failed to write \"%llu\" bytes from socket - wrote %llu instead", write_size,
bytes_written);
}
}
static void ReadChecked(int sockfd, void *data, idx_t read_size) {
auto bytes_read = read(sockfd, data, read_size);
if (bytes_read < 0) {
throw InternalException("Failed to read \"%lld\" bytes from socket: %s", read_size, strerror(errno));
}
if (idx_t(bytes_read) != read_size) {
throw InternalException("Failed to read \"%llu\" bytes from socket - read %llu instead", read_size,
bytes_read);
}
}
static void WaggleOptimizeFunction(OptimizerExtensionInput &input, duckdb::unique_ptr<LogicalOperator> &plan) {
if (!HasParquetScan(*plan)) {
return;
}
// rpc
auto &context = input.context;
Value host, port;
if (!context.TryGetCurrentSetting("waggle_location_host", host) ||
!context.TryGetCurrentSetting("waggle_location_port", port)) {
throw InvalidInputException("Need the parameters damnit");
}
// socket create and verification
auto sockfd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sockfd == -1) {
throw InternalException("Failed to create socket");
}
struct sockaddr_in servaddr;
bzero(&servaddr, sizeof(servaddr));
// assign IP, PORT
servaddr.sin_family = AF_INET;
auto host_string = host.ToString();
servaddr.sin_addr.s_addr = inet_addr(host_string.c_str());
servaddr.sin_port = htons(port.GetValue<int32_t>());
// connect the client socket to server socket
if (connect(sockfd, (struct sockaddr *)&servaddr, sizeof(servaddr)) != 0) {
throw IOException("Failed to connect socket %s", string(strerror(errno)));
}
Allocator allocator;
MemoryStream stream(allocator);
BinarySerializer serializer(stream);
serializer.Begin();
plan->Serialize(serializer);
serializer.End();
auto data = stream.GetData();
idx_t len = stream.GetPosition();
WriteChecked(sockfd, &len, sizeof(idx_t));
WriteChecked(sockfd, data, len);
auto chunk_collection = make_uniq<ColumnDataCollection>(Allocator::DefaultAllocator());
idx_t n_chunks;
ReadChecked(sockfd, &n_chunks, sizeof(idx_t));
for (idx_t i = 0; i < n_chunks; i++) {
idx_t chunk_len;
ReadChecked(sockfd, &chunk_len, sizeof(idx_t));
auto buffer = malloc(chunk_len);
D_ASSERT(buffer);
ReadChecked(sockfd, buffer, chunk_len);
MemoryStream source(data_ptr_cast(buffer), chunk_len);
DataChunk chunk;
BinaryDeserializer deserializer(source);
deserializer.Begin();
chunk.Deserialize(deserializer);
deserializer.End();
chunk_collection->Initialize(chunk.GetTypes());
chunk_collection->Append(chunk);
free(buffer);
}
auto types = chunk_collection->Types();
plan = make_uniq<LogicalColumnDataGet>(0, types, std::move(chunk_collection));
len = 0;
(void)len;
WriteChecked(sockfd, &len, sizeof(idx_t));
// close the socket
close(sockfd);
}
};
//===--------------------------------------------------------------------===//
// Extension load + setup
//===--------------------------------------------------------------------===//
extern "C" {
DUCKDB_CPP_EXTENSION_ENTRY(loadable_extension_optimizer_demo, loader) {
auto &db = loader.GetDatabaseInstance();
Connection con(db);
// add a parser extension
auto &config = DBConfig::GetConfig(db);
config.optimizer_extensions.push_back(WaggleExtension());
config.AddExtensionOption("waggle_location_host", "host for remote callback", LogicalType::VARCHAR);
config.AddExtensionOption("waggle_location_port", "port for remote callback", LogicalType::INTEGER);
}
}

View File

@@ -0,0 +1,64 @@
# name: test/extension/loadable_parser_override.test
# description: Try loading a parser override with an extension
# group: [extension]
require skip_reload
statement error
override
----
Parser Error: syntax error at or near "override"
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement error
set allow_parser_override_extension=doesnotexist;
----
Invalid Input Error: Unrecognized value for parser override setting. Valid options are: "default", "fallback", "strict".
# Default behavior is not using the parser override
statement error
override
----
Parser Error: syntax error at or near "override"
# Fallback behavior is trying the parser override and if they error falling back to the default parser
statement ok
set allow_parser_override_extension=fallback;
# The QuackParser can return a valid SQLStatement for this query
query I
override
----
The DuckDB parser has been overridden
# The parser cannot return a valid SQLStatement for this query, the default parser also gets an error
statement error
over
----
Parser Error: syntax error at or near "over"
query I
SELECT 1;
----
1
statement ok
set allow_parser_override_extension=strict;
query I
override
----
The DuckDB parser has been overridden
statement error
over
----
Parser Error: Parser override could not parse this query. (Original error: Parser overridden, query equaled "over" but not "override")
statement error
SELECT 1;
----
<REGEX>:.*Parser Error: Parser override failed.*

View File

@@ -0,0 +1,112 @@
# name: test/extension/test_alias_point.test
# description: Enable Test alias for point.
# group: [extension]
require skip_reload
require notmingw
require allow_unsigned_extensions
statement ok
PRAGMA enable_verification
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement ok
CREATE TABLE points(i INTEGER, point POINT, pt STRUCT(i INTEGER, j INTEGER));
statement ok
INSERT INTO points VALUES (2, ({'x': 1, 'y': 2}), ({'i': 3, 'j': 1}));
statement ok
INSERT INTO points VALUES (3, ({'x': 2, 'y': 3}), ({'i': 5, 'j': 4}));
query III
SELECT * FROM points;
----
2 {'x': 1, 'y': 2} {'i': 3, 'j': 1}
3 {'x': 2, 'y': 3} {'i': 5, 'j': 4}
query I
SELECT add_point(({'x': 2, 'y': 3})::POINT, ({'x': 3, 'y': 4})::POINT)
----
{'x': 5, 'y': 7}
query I
SELECT sub_point(({'x': 2, 'y': 3})::POINT, ({'x': 3, 'y': 4})::POINT)
----
{'x': -1, 'y': -1}
statement error
SELECT add_point(pt, pt) from points;
----
<REGEX>:Binder Error:.*No function matches.*
statement error
SELECT sub_point(pt, pt) from points;
----
<REGEX>:Binder Error:.*No function matches.*
query I
SELECT add_point(point, point) from points;
----
{'x': 2, 'y': 4}
{'x': 4, 'y': 6}
query I
SELECT sub_point(point, point) from points;
----
{'x': 0, 'y': 0}
{'x': 0, 'y': 0}
query I
SELECT add_point(point, ({'x': 3, 'y': 4})::POINT) from points;
----
{'x': 4, 'y': 6}
{'x': 5, 'y': 7}
query I
SELECT sub_point(point, ({'x': 3, 'y': 4})::POINT) from points;
----
{'x': -2, 'y': -2}
{'x': -1, 'y': -1}
statement ok
INSERT INTO points VALUES (4, NULL, NULL);
statement ok
INSERT INTO points VALUES (5, ({'x': 54, 'y': 23}), ({'i': 10, 'j': 100}));
query I
SELECT add_point(point, point) from points;
----
{'x': 2, 'y': 4}
{'x': 4, 'y': 6}
NULL
{'x': 108, 'y': 46}
query I
SELECT sub_point(point, point) from points;
----
{'x': 0, 'y': 0}
{'x': 0, 'y': 0}
NULL
{'x': 0, 'y': 0}
query I
SELECT add_point(point, NULL::POINT) from points;
----
NULL
NULL
NULL
NULL
query I
SELECT sub_point(point, NULL::POINT) from points;
----
NULL
NULL
NULL
NULL

View File

@@ -0,0 +1,195 @@
# name: test/extension/test_custom_type_modifier.test_slow
# description: Test custom type level metadata.
# group: [extension]
require skip_reload
require notmingw
statement ok
PRAGMA enable_verification
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement ok
CREATE TABLE t1 (i BOUNDED(200));
statement ok
INSERT INTO t1 VALUES (97), (98), (99);
# Example of function ignoring the type property (no cast needed)
query I
SELECT bounded_even(i) FROM t1 ORDER BY 1;
----
false
false
true
query II
EXPLAIN SELECT bounded_even(i) FROM t1 ORDER BY 1;
----
physical_plan <!REGEX>:.*CAST.*
# Example of function inspecting the type property
query I
SELECT bounded_max(i) FROM t1;
----
200
200
200
# Example of function inspecting the type property to return value of the same type
query II
SELECT bounded_invert(i) as b, typeof(b) FROM t1 ORDER BY 1;
----
-99 BOUNDED(200)
-98 BOUNDED(200)
-97 BOUNDED(200)
statement ok
CREATE TABLE t2 (i BOUNDED(500));
statement ok
INSERT INTO t2 VALUES (100), (500);
# Example of function inspecting both arguments type property to return a new type
query II
SELECT bounded_add(t1.i, t2.i) as s, typeof(s) FROM t1, t2 ORDER BY 1;
----
197 BOUNDED(700)
198 BOUNDED(700)
199 BOUNDED(700)
597 BOUNDED(700)
598 BOUNDED(700)
599 BOUNDED(700)
# Example of function that is specialized by the type property
query II
EXPLAIN SELECT bounded_ascii(i) FROM t1 ORDER BY 1;
----
physical_plan <REGEX>:.*CAST.*
query I
SELECT bounded_ascii(i) FROM t1 ORDER BY 1;
----
a
b
c
statement error
SELECT bounded_ascii(i) FROM t2 ORDER BY 1;
----
Conversion Error: Type BOUNDED(500) can't be cast as BOUNDED(255)
query I
SELECT bounded_ascii(i::INTEGER::BOUNDED(255)) FROM t2 WHERE i < 255;
----
d
# Test that we cant apply too many modifiers
statement error
CREATE TABLE t3 (i BOUNDED(200, 300));
----
Binder Error: BOUNDED type must have one modifier
statement ok
CREATE TYPE user_type AS INTEGER
# We cant apply a modifier to a type without modifiers
statement error
CREATE TABLE t4 (i user_type(NULL));
----
Binder Error: Type 'user_type' does not take any type modifiers
statement error
SELECT 1::BOUNDED(NULL)
----
BOUNDED type modifier must be integer
statement error
SELECT 1::BOUNDED(900000000000000000)
----
BOUNDED type modifier must be integer
# MinMax Type
# This is similar to bounded, except it uses a custom bind function to enforce the min/max values at construction time
statement ok
CREATE TABLE t5 (i MINMAX(200, 300));
statement ok
INSERT INTO t5 VALUES (200), (300);
statement error
INSERT INTO t5 VALUES (199);
----
Conversion Error: Value 199 is outside of range [200,300]
statement error
INSERT INTO t5 VALUES (301);
----
Conversion Error: Value 301 is outside of range [200,300]
statement error
SELECT 10::MINMAX(0, 1);
----
Conversion Error: Value 10 is outside of range [0,1]
statement error
SELECT 10::MINMAX(1337);
----
Binder Error: MINMAX type must have two modifiers
statement error
SELECT 10::MINMAX('foob', 10);
----
Binder Error: MINMAX type modifiers must be integers
statement error
SELECT 10::MINMAX(10, NULL::INTEGER);
----
Parser Error: Expected a constant as type modifier
statement error
SELECT 10::MINMAX(15, 10);
----
Binder Error: MINMAX type min value must be less than max value
# Test with creating an index
statement ok
CREATE TABLE minmax_table (i MINMAX(0, 100));
statement ok
INSERT INTO minmax_table VALUES (0), (10), (20), (30), (40), (50), (60), (70), (80), (90), (100);
statement ok
CREATE INDEX minmax_index ON minmax_table(i);
query I
SELECT * FROM minmax_table WHERE i = 50;
----
50
query II
EXPLAIN ANALYZE SELECT * FROM minmax_table WHERE i = 50;
----
analyzed_plan <REGEX>:.*Type: Index Scan.*
# Check the typename in the catalog
query I
SELECT parameter_types from duckdb_functions() where function_name = 'minmax_range';
----
[MINMAX]
query I
SELECT parameter_types from duckdb_functions() where function_name = 'bounded_ascii';
----
['BOUNDED(255)']
query I
SELECT parameter_types from duckdb_functions() where function_name = 'bounded_add';
----
[BOUNDED, BOUNDED]

View File

@@ -0,0 +1,20 @@
# name: test/extension/test_custom_type_modifier_cast.test
# description: Test custom type level metadata.
# group: [extension]
require skip_reload
require notmingw
require allow_unsigned_extensions
statement ok
PRAGMA enable_verification
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
statement error
SELECT 1::MINMAX(500, 1000);
----
Conversion Error: Value 1 is outside of range [500,1000]

View File

@@ -0,0 +1,19 @@
# name: test/extension/test_loadable_optimizer.test
# description: Enable Test alias for point.
# group: [extension]
require skip_reload
require notmingw
mode skip
# FIXME: not going to do this now
statement ok
PRAGMA enable_verification
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_optimizer_demo.duckdb_extension';
statement ok
select 42

View File

@@ -0,0 +1,158 @@
#include "catch.hpp"
#include "test_helpers.hpp"
#include "duckdb/main/appender.hpp"
#include "duckdb/common/serializer/memory_stream.hpp"
#include "duckdb/parser/statement/logical_plan_statement.hpp"
#include "duckdb/common/serializer/binary_serializer.hpp"
#include "duckdb/common/serializer/binary_deserializer.hpp"
// whatever
#include <signal.h>
#include <sys/mman.h>
#include <unistd.h>
#include <stdio.h>
#include <netdb.h>
#include <netinet/in.h>
#include <stdlib.h>
#include <string.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <arpa/inet.h>
#ifdef __MVS__
#define _XOPEN_SOURCE_EXTENDED 1
#include <strings.h>
#endif
using namespace duckdb;
using namespace std;
TEST_CASE("Test using a remote optimizer pass in case thats important to someone", "[extension]") {
pid_t pid = fork();
int port = 4242;
if (pid == 0) { // child process
// sockets, man, how do they work?!
struct sockaddr_in servaddr, cli;
auto sockfd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sockfd == -1) {
printf("Failed to set up socket in child process: %s", strerror(errno));
exit(1);
}
bzero(&servaddr, sizeof(servaddr));
servaddr.sin_family = AF_INET;
servaddr.sin_addr.s_addr = inet_addr("127.0.0.1");
servaddr.sin_port = htons(port);
auto res = ::bind(sockfd, (struct sockaddr *)&servaddr, sizeof(servaddr));
if (res != 0) {
printf("Failed to bind socket in child process: %s", strerror(errno));
exit(1);
}
res = listen(sockfd, 5);
if (res != 0) {
printf("Failed to listen to socked in child process: %s", strerror(errno));
exit(1);
}
socklen_t len = sizeof(cli);
auto connfd = accept(sockfd, (struct sockaddr *)&cli, &len);
if (connfd < 0) {
printf("Failed to set up socket in child process: %s", strerror(errno));
exit(1);
}
DBConfig config;
config.options.allow_unsigned_extensions = true;
DuckDB db2(nullptr, &config);
Connection con2(db2);
auto load_parquet = con2.Query("LOAD parquet");
if (load_parquet->HasError()) {
printf("Failed to load Parquet in child process: %s", load_parquet->GetError().c_str());
exit(1);
}
while (true) {
idx_t bytes;
REQUIRE(read(connfd, &bytes, sizeof(idx_t)) == sizeof(idx_t));
if (bytes == 0) {
break;
}
auto buffer = malloc(bytes);
REQUIRE(buffer);
REQUIRE(read(connfd, buffer, bytes) == ssize_t(bytes));
// Non-owning stream
MemoryStream stream(data_ptr_cast(buffer), bytes);
con2.BeginTransaction();
BinaryDeserializer deserializer(stream);
deserializer.Set<ClientContext &>(*con2.context);
deserializer.Begin();
auto plan = LogicalOperator::Deserialize(deserializer);
deserializer.End();
plan->ResolveOperatorTypes();
con2.Commit();
auto statement = make_uniq<LogicalPlanStatement>(std::move(plan));
auto result = con2.Query(std::move(statement));
auto &collection = result->Collection();
idx_t num_chunks = collection.ChunkCount();
REQUIRE(write(connfd, &num_chunks, sizeof(idx_t)) == sizeof(idx_t));
for (auto &chunk : collection.Chunks()) {
Allocator allocator;
MemoryStream target(allocator);
BinarySerializer serializer(target);
serializer.Begin();
chunk.Serialize(serializer);
serializer.End();
auto data = target.GetData();
idx_t len = target.GetPosition();
REQUIRE(write(connfd, &len, sizeof(idx_t)) == sizeof(idx_t));
REQUIRE(write(connfd, data, len) == ssize_t(len));
}
}
exit(0);
} else if (pid > 0) { // parent process
DBConfig config;
config.options.allow_unsigned_extensions = true;
DuckDB db1(nullptr, &config);
Connection con1(db1);
auto load_parquet = con1.Query("LOAD 'parquet'");
if (load_parquet->HasError()) {
// Do not execute the test.
if (kill(pid, SIGKILL) != 0) {
FAIL();
}
return;
}
REQUIRE_NO_FAIL(con1.Query("LOAD '" DUCKDB_BUILD_DIRECTORY
"/test/extension/loadable_extension_optimizer_demo.duckdb_extension'"));
REQUIRE_NO_FAIL(con1.Query("SET waggle_location_host='127.0.0.1'"));
REQUIRE_NO_FAIL(con1.Query("SET waggle_location_port=4242"));
usleep(10000); // need to wait a bit till socket is up
// check if the child PID is still there
if (kill(pid, 0) != 0) {
// child is gone!
printf("Failed to execute remote optimizer test - child exited unexpectedly");
FAIL();
}
REQUIRE_NO_FAIL(con1.Query(
"SELECT first_name FROM PARQUET_SCAN('data/parquet-testing/userdata1.parquet') GROUP BY first_name"));
if (kill(pid, SIGKILL) != 0) {
FAIL();
}
} else {
FAIL();
}
}

View File

@@ -0,0 +1,26 @@
# name: test/extension/test_tags.test
# description: Test querying tagged extension items.
# group: [extension]
require skip_reload
require notmingw
require allow_unsigned_extensions
statement ok
PRAGMA enable_verification
statement ok
LOAD '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
query II
SELECT function_name, tags['ext:author'] FROM duckdb_functions() WHERE tags['ext:name'] = 'loadable_extension_demo' ORDER BY function_name;
----
add_point DuckDB Labs
sub_point DuckDB Labs
query II
SELECT type_name, tags['ext:author'] FROM duckdb_types() WHERE tags['ext:name'] = 'loadable_extension_demo' ORDER BY type_name;
----
POINT DuckDB Labs

View File

@@ -0,0 +1,67 @@
# name: test/extension/update_extensions.test
# description: Tests for the update extensions statement
# group: [extension]
# This test assumes icu and json to be available in the LOCAL_EXTENSION_REPO and NOT linked into duckdb statically
# -> this should be the case for our autoloading tests where we have the local_extension_repo variable set
require-env LOCAL_EXTENSION_REPO
require no_extension_autoloading "EXPECTED: Test relies on explicit INSTALL and LOAD"
statement ok
PRAGMA enable_verification
# Set the repository to the correct one
statement ok
set custom_extension_repository='${LOCAL_EXTENSION_REPO}'
# Ensure we have a clean extension directory without any preinstalled extensions
statement ok
set extension_directory='__TEST_DIR__/update_extensions'
statement error
with cte as (select 42 AS a) UPDATE EXTENSIONS
----
Providing a with clause with an UPDATE EXTENSIONS statement is not allowed
# No extensions installed -> update returns empty list
query IIIII
UPDATE EXTENSIONS;
----
statement ok
INSTALL json
query IIIII
UPDATE EXTENSIONS;
----
json <REGEX>:.* NO_UPDATE_AVAILABLE <REGEX>:.* <REGEX>:.*
query IIIII
UPDATE EXTENSIONS (json);
----
json <REGEX>:.* NO_UPDATE_AVAILABLE <REGEX>:.* <REGEX>:.*
statement error
UPDATE EXTENSIONS (foobar);
----
Failed to update the extension 'foobar', the extension is not installed!
statement ok
INSTALL '__BUILD_DIRECTORY__/test/extension/loadable_extension_demo.duckdb_extension';
# The loadable_extension_demo is loaded as a direct URL, these are not considered for updating
query IIIII rowsort
UPDATE EXTENSIONS;
----
json <REGEX>:.* NO_UPDATE_AVAILABLE <REGEX>:.* <REGEX>:.*
loadable_extension_demo (empty) NOT_A_REPOSITORY default-version default-version
# Doublecheck duckdb_extensions()
query IIII rowsort
SELECT extension_name, extension_version, install_mode, installed_from from duckdb_extensions() where installed
----
json <REGEX>:.* REPOSITORY <REGEX>:.*
loadable_extension_demo default-version CUSTOM_PATH <REGEX>:.*loadable\_extension\_demo\.duckdb_extension
parquet <REGEX>:.* STATICALLY_LINKED <REGEX>:.*

View File

@@ -0,0 +1,391 @@
# name: test/extension/update_extensions_ci.test
# description: Tests for the update extensions statement
# group: [extension]
# NOTE: this test requires specific setup and should probably only by ran through `scripts/run_extension_medata_tests.sh`
load __TEST_DIR__/update_extensions_ci.db
# This test expects a specific state, which is marked to be present through setting this env variable.
require-env RUN_EXTENSION_UPDATE_TEST
# This repo is expected to contain json and tpch, where tpch was updated from v0.0.1 to v0.0.2
require-env LOCAL_EXTENSION_REPO_UPDATED
# This repo is expected to contain the json extension built with a binary with an incorrect platform
require-env LOCAL_EXTENSION_REPO_INCORRECT_PLATFORM
# This repo is expected to contain the json extension built with a binary with an incorrect duckdb_version
require-env LOCAL_EXTENSION_REPO_INCORRECT_DUCKDB_VERSION
# This repo is expected to contain the json extension built with a binary with an incorrect version and platform
require-env LOCAL_EXTENSION_REPO_VERSION_AND_PLATFORM_INCORRECT
# This extension dir is expected to contain json, tpch and tpcds all at version v0.0.1 and tpcds installed directly, not through
# a repo
require-env LOCAL_EXTENSION_DIR
# This dir holds some directly installable, incorrectly matched extensions, but also a correctly installable
require-env DIRECT_INSTALL_DIR
# This extension dir is expected to contain tpch and tpcds, but with a corrupted tpcds metadata file
require-env LOCAL_EXTENSION_DIR_MALFORMED_INFO
# This extension dir is expected to contain tpch and tpcds, but with a corrupted tpcds metadata file
require-env LOCAL_EXTENSION_DIR_INFO_INCORRECT_VERSION
# Address on a minio server that has the LOCAL_EXTENSION_REPO_UPDATED copied to it
require-env REMOTE_EXTENSION_REPO_UPDATED
# Direct path with version and platform, for testing http direct install
require-env REMOTE_EXTENSION_REPO_DIRECT_PATH
# Parquet is statically loaded for this test
require parquet
# We start by testing some malformed dirs
statement ok
set extension_directory='${LOCAL_EXTENSION_DIR_MALFORMED_INFO}'
# this will now throw IOError
statement error
FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
IO Error: Failed to read info file for 'tpcds' extension
# this will now throw IOError
statement error
UPDATE EXTENSIONS
----
IO Error: Failed to read info file for 'tpcds' extension
# lets restore by reinstalling tpcds
statement ok
FORCE INSTALL '${DIRECT_INSTALL_DIR}/tpcds.duckdb_extension';
# Things are back to normal
query IIII
SELECT extension_name, install_mode, installed_from, extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
tpcds CUSTOM_PATH ./build/extension_metadata_test_data/direct_install/tpcds.duckdb_extension v0.0.1
statement ok
load tpcds
# Same here
query IIIII
UPDATE EXTENSIONS
----
tpcds (empty) NOT_A_REPOSITORY v0.0.1 v0.0.1
restart
# Here the metadata mismatches the actually installed extension
statement ok
set extension_directory='${LOCAL_EXTENSION_DIR_INFO_INCORRECT_VERSION}'
# duckdb_extensions() only reads the metadata. No extension files are opened
query IIII
SELECT extension_name, install_mode, installed_from, extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
tpch REPOSITORY ./build/extension_metadata_test_data/repository v0.0.1
# However when trying to load, we detect the mismatch
statement error
load tpch
----
Metadata mismatch detected when loading extension
# Recovery is done by force installing
statement ok
FORCE INSTALL tpch FROM '${LOCAL_EXTENSION_REPO_UPDATED}'
statement ok
load tpch;
restart
statement ok
set custom_extension_repository='${LOCAL_EXTENSION_REPO_UPDATED}'
statement ok
set extension_directory='${LOCAL_EXTENSION_DIR}'
query IIII rowsort
SELECT extension_name, install_mode, installed_from, extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
icu UNKNOWN (empty) (empty)
json REPOSITORY ./build/extension_metadata_test_data/repository v0.0.1
tpcds CUSTOM_PATH ./build/extension_metadata_test_data/direct_install/tpcds.duckdb_extension v0.0.1
tpch REPOSITORY ./build/extension_metadata_test_data/repository v0.0.1
query III rowsort
SELECT extension_name, install_mode, installed_from FROM duckdb_extensions() where extension_name = 'parquet'
----
parquet STATICALLY_LINKED (empty)
# Get the parquet version
query I rowsort parquet_version
SELECT extension_version FROM duckdb_extensions() where extension_name = 'parquet'
----
# ensure the parquet version matches duckdb's sourceid
query I rowsort parquet_version
select source_id from pragma_version();
----
query IIIII rowsort
UPDATE EXTENSIONS;
----
icu (empty) MISSING_INSTALL_INFO (empty) (empty)
json <REGEX>:.* NO_UPDATE_AVAILABLE v0.0.1 v0.0.1
tpcds <REGEX>:.* NOT_A_REPOSITORY v0.0.1 v0.0.1
tpch <REGEX>:.* UPDATED v0.0.1 v0.0.2
# duckdb_extensions() now also showing updated version
query IIII rowsort
SELECT extension_name, install_mode, installed_from, extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
icu UNKNOWN (empty) (empty)
json REPOSITORY ./build/extension_metadata_test_data/repository v0.0.1
tpcds CUSTOM_PATH ./build/extension_metadata_test_data/direct_install/tpcds.duckdb_extension v0.0.1
tpch REPOSITORY ./build/extension_metadata_test_data/repository v0.0.2
# Now lets restored the corrupt icu extension (it has a missing info file)
statement ok
FORCE INSTALL icu;
# Rerunning update will now show everything being up-to-date (icu extension was force installed and is now v0.0.2)
query IIIII rowsort
UPDATE EXTENSIONS;
----
icu <REGEX>:.* NO_UPDATE_AVAILABLE v0.0.2 v0.0.2
json <REGEX>:.* NO_UPDATE_AVAILABLE v0.0.1 v0.0.1
tpcds <REGEX>:.* NOT_A_REPOSITORY v0.0.1 v0.0.1
tpch <REGEX>:.* NO_UPDATE_AVAILABLE v0.0.2 v0.0.2
statement ok
load json;
statement ok
load tpch;
statement ok
load tpcds;
statement ok
load icu;
# Ensure the result is still fine after loading; this will ensure Version() call matches the encoded footer value
query IIII rowsort
SELECT extension_name, install_mode, installed_from, extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
icu REPOSITORY ./build/extension_metadata_test_data/repository v0.0.2
json REPOSITORY ./build/extension_metadata_test_data/repository v0.0.1
tpcds CUSTOM_PATH ./build/extension_metadata_test_data/direct_install/tpcds.duckdb_extension v0.0.1
tpch REPOSITORY ./build/extension_metadata_test_data/repository v0.0.2
## Try various failing installations and match their error
statement error
FORCE INSTALL '${DIRECT_INSTALL_DIR}/json_incorrect_platform.duckdb_extension';
----
Failed to install './build/extension_metadata_test_data/direct_install/json_incorrect_platform.duckdb_extension'
The file was built for the platform 'test_platform', but we can only load extensions built for platform
statement error
FORCE INSTALL json_incorrect_platform FROM '${LOCAL_EXTENSION_REPO_INCORRECT_PLATFORM}'
----
Failed to install 'json_incorrect_platform'
The file was built for the platform 'test_platform', but we can only load extensions built for platform
statement error
FORCE INSTALL '${DIRECT_INSTALL_DIR}/json_incorrect_version.duckdb_extension';
----
Failed to install './build/extension_metadata_test_data/direct_install/json_incorrect_version.duckdb_extension'
The file was built specifically for DuckDB version 'v1337' and can only be loaded with that version of DuckDB. (this version of DuckDB is
statement error
FORCE INSTALL json_incorrect_version FROM '${LOCAL_EXTENSION_REPO_INCORRECT_DUCKDB_VERSION}';
----
Failed to install 'json_incorrect_version'
The file was built specifically for DuckDB version 'v1337' and can only be loaded with that version of DuckDB. (this version of DuckDB is
# These should print both errors
statement error
FORCE INSTALL '${DIRECT_INSTALL_DIR}/json_incorrect_version_and_platform.duckdb_extension';
----
Also, the file was built for the platform 'test_platform', but we can only load extensions built for platform
statement error
FORCE INSTALL json_incorrect_version_and_platform FROM '${LOCAL_EXTENSION_REPO_VERSION_AND_PLATFORM_INCORRECT}'
----
Also, the file was built for the platform 'test_platform', but we can only load extensions built for platform
## Now try the same for loading, this time only with the direct load syntax
statement error
LOAD '${DIRECT_INSTALL_DIR}/json_incorrect_version_and_platform.duckdb_extension';
----
Also, the file was built for the platform 'test_platform', but we can only load extensions built for platform
statement error
LOAD '${DIRECT_INSTALL_DIR}/json_incorrect_platform.duckdb_extension';
----
The file was built for the platform 'test_platform', but we can only load extensions built for platform
statement error
LOAD '${DIRECT_INSTALL_DIR}/json_incorrect_version.duckdb_extension';
----
The file was built specifically for DuckDB version 'v1337' and can only be loaded with that version of DuckDB. (this version of DuckDB is
# Note that this is the json extension with incorrect platform and version
statement error
FORCE INSTALL '${DIRECT_INSTALL_DIR}/json.duckdb_extension';
----
Also, the file was built for the platform 'test_platform', but we can only load extensions built for platform
restart
# override the default behaviour of skipping HTTP errors and connection failures: this test fails on connection issues
set ignore_error_messages
# Set extension dir to a fresh one
statement ok
set extension_directory='__TEST_DIR__/update_extensions_ci_fresh'
# Nothing installed beforehand
query IIII
SELECT extension_name, install_mode, installed_from, extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
# Install from the remote repo
statement ok
force install icu from '${REMOTE_EXTENSION_REPO_UPDATED}'
# Installed from the minio repo now
query IIII
SELECT extension_name, install_mode, installed_from, extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
icu REPOSITORY http://duckdb-minio.com:9000/test-bucket-public/ci-test-repo v0.0.2
# Installed from the minio repo now
query IIIII
UPDATE EXTENSIONS
----
icu http://duckdb-minio.com:9000/test-bucket-public/ci-test-repo NO_UPDATE_AVAILABLE v0.0.2 v0.0.2
# Rerunning install with matching origin is a NOP and totally fine
statement ok
install icu from '${REMOTE_EXTENSION_REPO_UPDATED}'
# Direct installing the same extension is now not allowed
statement error
install '${REMOTE_EXTENSION_REPO_DIRECT_PATH}/icu.duckdb_extension.gz'
----
Invalid Input Error: Installing extension 'icu' failed. The extension is already installed but the origin is different.
Currently installed extension is from repository 'http://duckdb-minio.com:9000/test-bucket-public/ci-test-repo', while the extension to be installed is from custom_path
# Installing the same extension from a different repository is also not allowed
statement error
install '${REMOTE_EXTENSION_REPO_DIRECT_PATH}/icu.duckdb_extension.gz' FROM './dummy_repo'
----
Invalid Input Error: Installing extension 'icu' failed. The extension is already installed but the origin is different.
Currently installed extension is from repository 'http://duckdb-minio.com:9000/test-bucket-public/ci-test-repo', while the extension to be installed is from repository './dummy_repo'.
To solve this rerun this command with `FORCE INSTALL`
# We can circumvent this by disabling metadata checks
statement ok
set allow_extensions_metadata_mismatch=true;
# Note that this is a NOP
statement ok
install '${REMOTE_EXTENSION_REPO_DIRECT_PATH}/icu.duckdb_extension.gz'
# icu still the same
query IIII
SELECT extension_name, install_mode, installed_from, extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
icu REPOSITORY http://duckdb-minio.com:9000/test-bucket-public/ci-test-repo v0.0.2
# now we force install to override
statement ok
force install '${REMOTE_EXTENSION_REPO_DIRECT_PATH}/icu.duckdb_extension.gz'
# icu is now from a custom path
query IIII
SELECT extension_name, install_mode, parse_filename(installed_from), extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
icu CUSTOM_PATH icu.duckdb_extension.gz v0.0.2
# Other way around is fine and still a nop for now
statement ok
install icu from '${REMOTE_EXTENSION_REPO_UPDATED}'
query IIII
SELECT extension_name, install_mode, parse_filename(installed_from), extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
icu CUSTOM_PATH icu.duckdb_extension.gz v0.0.2
statement ok
set allow_extensions_metadata_mismatch=false;
### Now we test autoloading: it should be unaffected by error messages
statement ok
set autoload_known_extensions=true
statement ok
set autoinstall_known_extensions=true
# Set a non-existent autoinstall repo
statement ok
set autoinstall_extension_repository='hocus_pocus_this_is_bogus'
statement ok
set custom_extension_repository='hocus_pocus_this_is_bogus'
statement ok
FORCE INSTALL tpcds FROM '${LOCAL_EXTENSION_REPO_UPDATED}';
# Note: this would trigger the origin check normally, but now
statement ok
from tpcds_queries();
# The file should be from the custom path, NOT the autoinstall repo
query IIII
SELECT extension_name, install_mode, parse_filename(installed_from), extension_version FROM duckdb_extensions() where installed and extension_name not in ('jemalloc', 'parquet', 'core_functions')
----
icu CUSTOM_PATH icu.duckdb_extension.gz v0.0.2
tpcds REPOSITORY repository v0.0.1
### Tests with allow_unsigned extensions = false
restart
statement ok
set extension_directory='${LOCAL_EXTENSION_DIR}'
# Now we allow mismatching metadata
statement ok
set allow_extensions_metadata_mismatch=true;
# Meaning that now it works
statement ok
FORCE INSTALL '${DIRECT_INSTALL_DIR}/json.duckdb_extension';
# We can even load it
statement ok
LOAD json;
restart
# However, when signed unsigned extensions are not allowed, things are different
statement ok
set allow_unsigned_extensions=false
# Installing is still fine
statement ok
FORCE INSTALL '${DIRECT_INSTALL_DIR}/json.duckdb_extension';
# But loading is not
statement error
LOAD json;
----
Also, the file was built for the platform 'test_platform', but we can only load extensions built for platform

View File

@@ -0,0 +1,21 @@
# name: test/extension/wrong_function_type.test
# group: [extension]
statement error
FROM json("['item':'phasers','year':2155','count':1035]");
----
Catalog Error: Table Function with name "json" is not in the catalog, a function by this name exists in the json extension, but it's of a different type, namely Macro Function
require no_extension_autoloading "EXPECTED: Test relies on autoloading being disabled"
# Multiple options exist, none are scalar
statement error
select json_execute_serialized_sql(42) from range(5);
----
Catalog Error: Scalar Function with name "json_execute_serialized_sql" is not in the catalog, functions with this name exist in the json extension, but they are of different types, namely Pragma Function, Table Function
# One of the two options is Table Function
statement error
CALL json_execute_serialized_sql('test');
----
Catalog Error: Table Function with name "json_execute_serialized_sql" is not in the catalog, but it exists in the json extension.