diff --git a/.github/workflows/MainDistributionPipeline.yml b/.github/workflows/MainDistributionPipeline.yml index 60ed59a..a5a0664 100644 --- a/.github/workflows/MainDistributionPipeline.yml +++ b/.github/workflows/MainDistributionPipeline.yml @@ -19,6 +19,7 @@ jobs: extension_name: aws duckdb_version: 'v0.10.0' exclude_archs: 'wasm_mvp;wasm_eh;wasm_threads;windows_amd64_rtools' # Doesn't work anyway: env local file or env access possible + vcpkg_commit: 'a1a1cbc975abf909a6c8985a6a2b8fe20bbd9bd6' duckdb-stable-deploy: name: Deploy extension binaries diff --git a/.github/workflows/MinioTests.yml b/.github/workflows/MinioTests.yml index ef33cba..0d89c27 100644 --- a/.github/workflows/MinioTests.yml +++ b/.github/workflows/MinioTests.yml @@ -52,7 +52,7 @@ jobs: - name: Setup vcpkg uses: lukka/run-vcpkg@v11.1 with: - vcpkgGitCommitId: a42af01b72c28a8e1d7b48107b33e4f286a55ef6 + vcpkgGitCommitId: a1a1cbc975abf909a6c8985a6a2b8fe20bbd9bd6 - name: Build shell: bash diff --git a/.gitmodules b/.gitmodules index bb8e599..29cf095 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,3 +2,6 @@ path = duckdb url = https://github.com/duckdb/duckdb branch = main +[submodule "extension-ci-tools"] + path = extension-ci-tools + url = git@github.com:duckdb/extension-ci-tools.git diff --git a/Makefile b/Makefile index 61bbd5a..e91db43 100644 --- a/Makefile +++ b/Makefile @@ -1,121 +1,8 @@ -.PHONY: all clean format debug release duckdb_debug duckdb_release pull update +PROJ_DIR := $(dir $(abspath $(lastword $(MAKEFILE_LIST)))) -all: release +# Configuration of extension +EXT_NAME=quack +EXT_CONFIG=${PROJ_DIR}extension_config.cmake -MKFILE_PATH := $(abspath $(lastword $(MAKEFILE_LIST))) -PROJ_DIR := $(dir $(MKFILE_PATH)) - -ifeq ($(OS),Windows_NT) - TEST_PATH="/test/Release/unittest.exe" -else - TEST_PATH="/test/unittest" -endif - -OSX_BUILD_UNIVERSAL_FLAG= -ifneq (${OSX_BUILD_ARCH}, "") - OSX_BUILD_UNIVERSAL_FLAG=-DOSX_BUILD_ARCH=${OSX_BUILD_ARCH} -endif -ifeq (${STATIC_LIBCPP}, 1) - STATIC_LIBCPP=-DSTATIC_LIBCPP=TRUE -endif -ifeq (${DONT_STATIC_LINK_DUCKDB}, 1) - STATIC_LINK_DUCKDB_FLAG=-DEXTENSION_STATIC_BUILD=0 -else - STATIC_LINK_DUCKDB_FLAG=-DEXTENSION_STATIC_BUILD=1 -endif - -VCPKG_TOOLCHAIN_PATH?= -ifneq ("${VCPKG_TOOLCHAIN_PATH}", "") - TOOLCHAIN_FLAGS:=${TOOLCHAIN_FLAGS} -DVCPKG_MANIFEST_DIR='${PROJ_DIR}' -DVCPKG_BUILD=1 -DCMAKE_TOOLCHAIN_FILE='${VCPKG_TOOLCHAIN_PATH}' -endif -ifneq ("${VCPKG_TARGET_TRIPLET}", "") - TOOLCHAIN_FLAGS:=${TOOLCHAIN_FLAGS} -DVCPKG_TARGET_TRIPLET='${VCPKG_TARGET_TRIPLET}' -endif - -ifeq ($(GEN),ninja) - GENERATOR=-G "Ninja" - FORCE_COLOR=-DFORCE_COLORED_OUTPUT=1 -endif - -BUILD_FLAGS=-DBUILD_EXTENSIONS="tpch;httpfs" ${STATIC_LINK_DUCKDB_FLAG} ${OSX_BUILD_UNIVERSAL_FLAG} ${STATIC_LIBCPP} ${DISABLE_UNITY_FLAG} ${TOOLCHAIN_FLAGS} -DDUCKDB_EXPLICIT_PLATFORM='${DUCKDB_PLATFORM}' - -CLIENT_FLAGS := - -# These flags will make DuckDB build the extension -EXTENSION_FLAGS=\ --DDUCKDB_EXTENSION_NAMES="aws" \ --DDUCKDB_EXTENSION_AWS_PATH="$(PROJ_DIR)" \ --DDUCKDB_EXTENSION_AWS_SHOULD_LINK=1 \ --DDUCKDB_EXTENSION_AWS_LOAD_TESTS=1 \ --DDUCKDB_EXTENSION_AWS_TEST_PATH="$(PROJ_DIR)test" \ --DDUCKDB_EXTENSION_AWS_INCLUDE_PATH="$(PROJ_DIR)src/include" \ --DDUCKDB_EXTENSION_AWS_VERSION="$(PROJ_DIR)src/include" \ - - -pull: - git submodule init - git submodule update --recursive --remote - -clean: - rm -rf build - rm -rf testext - cd duckdb && make clean - -# Main build -debug: - mkdir -p build/debug && \ - cmake $(GENERATOR) $(FORCE_COLOR) $(EXTENSION_FLAGS) ${CLIENT_FLAGS} -DEXTENSION_STATIC_BUILD=1 -DCMAKE_BUILD_TYPE=Debug ${BUILD_FLAGS} -S ./duckdb/ -B build/debug && \ - cmake --build build/debug --config Debug - -release: - mkdir -p build/release && \ - cmake $(GENERATOR) $(FORCE_COLOR) $(EXTENSION_FLAGS) ${CLIENT_FLAGS} -DEXTENSION_STATIC_BUILD=1 -DCMAKE_BUILD_TYPE=Release ${BUILD_FLAGS} -S ./duckdb/ -B build/release && \ - cmake --build build/release --config Release - -# Client build -debug_js: CLIENT_FLAGS=-DBUILD_NODE=1 -DBUILD_EXTENSIONS=json -debug_js: debug - -debug_r: CLIENT_FLAGS=-DBUILD_R=1 -debug_r: debug - -debug_python: CLIENT_FLAGS=-DBUILD_PYTHON=1 -DBUILD_EXTENSIONS=json;fts;tpch;visualizer;tpcds -debug_python: debug - -release_js: CLIENT_FLAGS=-DBUILD_NODE=1 -DBUILD_EXTENSIONS=json -release_js: release - -release_r: CLIENT_FLAGS=-DBUILD_R=1 -release_r: release - -release_python: CLIENT_FLAGS=-DBUILD_PYTHON=1 -DBUILD_EXTENSIONS=json;fts;tpch;visualizer;tpcds -release_python: release - -# Main tests -test: test_release -test_release: release - ./build/release/$(TEST_PATH) "$(PROJ_DIR)test/*" -test_debug: debug - ./build/debug/$(TEST_PATH) "$(PROJ_DIR)test/*" - -# Client tests -test_js: test_debug_js -test_debug_js: debug_js - cd duckdb/tools/nodejs && npm run test-path -- "../../../test/nodejs/**/*.js" - -test_release_js: release_js - cd duckdb/tools/nodejs && npm run test-path -- "../../../test/nodejs/**/*.js" - -test_python: test_debug_python -test_debug_python: debug_python - cd test/python && python3 -m pytest - -test_release_python: release_python - cd test/python && python3 -m pytest - -format: - find src/ -iname *.hpp -o -iname *.cpp | xargs clang-format --sort-includes=0 -style=file -i - cmake-format -i CMakeLists.txt - -update: - git submodule update --remote --merge +# Include the Makefile from extension-ci-tools +include extension-ci-tools/makefiles/duckdb_extension.Makefile \ No newline at end of file diff --git a/duckdb b/duckdb index 20b1486..d320c0d 160000 --- a/duckdb +++ b/duckdb @@ -1 +1 @@ -Subproject commit 20b1486d1192f9fbd2328d1122b5afe5f1747fce +Subproject commit d320c0de2f5c14478ded72b956eff9fd82815af4 diff --git a/extension-ci-tools b/extension-ci-tools new file mode 160000 index 0000000..f0c2c59 --- /dev/null +++ b/extension-ci-tools @@ -0,0 +1 @@ +Subproject commit f0c2c59b4963a7684effba724168e03b4251f04b diff --git a/extension_config.cmake b/extension_config.cmake new file mode 100644 index 0000000..6af18e0 --- /dev/null +++ b/extension_config.cmake @@ -0,0 +1,9 @@ +# This file is included by DuckDB's build system. It specifies which extension to load + +# Extension from this repo +duckdb_extension_load(aws + SOURCE_DIR ${CMAKE_CURRENT_LIST_DIR} + LOAD_TESTS +) + +duckdb_extension_load(httpfs) \ No newline at end of file diff --git a/src/aws_secret.cpp b/src/aws_secret.cpp index cd6685a..75062b9 100644 --- a/src/aws_secret.cpp +++ b/src/aws_secret.cpp @@ -35,10 +35,7 @@ static unique_ptr ConstructBaseS3Secret(vector &prefix_p //! Generate a custom credential provider chain for authentication class DuckDBCustomAWSCredentialsProviderChain : public Aws::Auth::AWSCredentialsProviderChain { public: - explicit DuckDBCustomAWSCredentialsProviderChain(const string &credential_chain, const string &profile = "", - const string &task_role_resource_path = "", - const string &task_role_endpoint = "", - const string &task_role_token = "") { + explicit DuckDBCustomAWSCredentialsProviderChain(const string &credential_chain, const string &profile = "") { auto chain_list = StringUtil::Split(credential_chain, ';'); for (const auto &item : chain_list) { @@ -56,16 +53,6 @@ class DuckDBCustomAWSCredentialsProviderChain : public Aws::Auth::AWSCredentials AddProvider(make_shared()); } else if (item == "process") { AddProvider(make_shared()); - } else if (item == "task_role") { - if (!task_role_resource_path.empty()) { - AddProvider(make_shared(task_role_resource_path.c_str())); - } else if (!task_role_endpoint.empty()) { - AddProvider(make_shared(task_role_endpoint.c_str(), - task_role_token.c_str())); - } else { - throw InvalidInputException( - "task_role provider selected without a resource path or endpoint specified!"); - } } else if (item == "config") { if (profile.empty()) { AddProvider(make_shared()); @@ -99,12 +86,8 @@ static unique_ptr CreateAWSSecretFromCredentialChain(ClientContext & if (input.options.find("chain") != input.options.end()) { string chain = TryGetStringParam(input, "chain"); - string task_role_resource_path = TryGetStringParam(input, "task_role_resource_path"); - string task_role_endpoint = TryGetStringParam(input, "task_role_endpoint"); - string task_role_token = TryGetStringParam(input, "task_role_token"); - DuckDBCustomAWSCredentialsProviderChain provider(chain, profile, task_role_resource_path, task_role_endpoint, - task_role_token); + DuckDBCustomAWSCredentialsProviderChain provider(chain, profile); credentials = provider.GetAWSCredentials(); } else { if (input.options.find("profile") != input.options.end()) { @@ -210,9 +193,6 @@ void CreateAwsSecretFunctions::Register(DatabaseInstance &instance) { // Params for configuring the credential loading cred_chain_function.named_parameters["profile"] = LogicalType::VARCHAR; - cred_chain_function.named_parameters["task_role_resource_path"] = LogicalType::VARCHAR; - cred_chain_function.named_parameters["task_role_endpoint"] = LogicalType::VARCHAR; - cred_chain_function.named_parameters["task_role_token"] = LogicalType::VARCHAR; ExtensionUtil::RegisterFunction(instance, cred_chain_function); } diff --git a/test/sql/aws_secret_chains.test b/test/sql/aws_secret_chains.test index 2e6fbe2..67e2b23 100644 --- a/test/sql/aws_secret_chains.test +++ b/test/sql/aws_secret_chains.test @@ -59,30 +59,3 @@ CREATE SECRET env_secret ( PROVIDER credential_chain, CHAIN 'env' ); - -statement error -CREATE SECRET task_role_secret ( - TYPE S3, - PROVIDER credential_chain, - CHAIN 'task_role' -); ----- -task_role provider selected without a resource path or endpoint specified! - -statement ok -CREATE SECRET task_role_secret_resource_path ( - TYPE S3, - PROVIDER credential_chain, - CHAIN 'task_role', - TASK_ROLE_RESOURCE_PATH 'blablabla' -); - -statement ok -CREATE SECRET task_role_secret_endpoint ( - TYPE S3, - PROVIDER credential_chain, - CHAIN 'task_role', - TASK_ROLE_ENDPOINT 'blablabla', - TASK_ROLE_TOKEN 'lalala' -); - diff --git a/vcpkg.json b/vcpkg.json index 62a6756..2da224f 100644 --- a/vcpkg.json +++ b/vcpkg.json @@ -3,5 +3,7 @@ "zlib", "aws-sdk-cpp", "openssl" - ] + ], + "builtin-baseline": "a1a1cbc975abf909a6c8985a6a2b8fe20bbd9bd6", + "overrides": [{"name": "openssl", "version": "3.0.8"}] } \ No newline at end of file