diff --git a/.github/workflows/Linux.yml b/.github/workflows/Linux.yml index 3e35aad..d1908b2 100644 --- a/.github/workflows/Linux.yml +++ b/.github/workflows/Linux.yml @@ -16,18 +16,11 @@ jobs: matrix: # Add commits/tags to build against other DuckDB versions duckdb_version: [ '' ] - arch: ['linux_amd64', 'linux_arm64', 'linux_amd64_gcc4'] + arch: ['linux_amd64_gcc4'] vcpkg_version: [ '2023.04.15' ] - include: - - arch: 'linux_amd64_gcc4' - container: 'quay.io/pypa/manylinux2014_x86_64' - vcpkg_triplet: 'x64-linux' - - arch: 'linux_amd64' - container: 'ubuntu:18.04' - vcpkg_triplet: 'x64-linux' - - arch: 'linux_arm64' - container: 'ubuntu:18.04' - vcpkg_triplet: 'arm64-linux' + container: ['quay.io/pypa/manylinux2014_x86_64'] + vcpkg_triplet: ['x64-linux'] + env: VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_triplet }} GEN: Ninja @@ -105,7 +98,7 @@ jobs: - name: Setup vcpkg uses: lukka/run-vcpkg@v11.1 with: - vcpkgGitCommitId: 9edb1b8e590cc086563301d735cae4b6e732d2d2 + vcpkgGitCommitId: a42af01b72c28a8e1d7b48107b33e4f286a55ef6 # Build extension - name: Build extension diff --git a/.github/workflows/MacOS.yml b/.github/workflows/MacOS.yml index 44040d9..ba91c61 100644 --- a/.github/workflows/MacOS.yml +++ b/.github/workflows/MacOS.yml @@ -13,17 +13,11 @@ jobs: runs-on: macos-latest strategy: matrix: - # Add commits/tags to build against other DuckDB versions duckdb_version: [ '' ] vcpkg_version: [ '2023.04.15' ] - vcpkg_triplet: [ 'x64-osx', 'arm64-osx' ] - include: - - vcpkg_triplet: 'x64-osx' - osx_build_arch: 'x86_64' - duckdb_arch: 'osx_amd64' - - vcpkg_triplet: 'arm64-osx' - osx_build_arch: 'arm64' - duckdb_arch: 'osx_arm64' + vcpkg_triplet: [ 'x64-osx'] + osx_build_arch: ['x86_64'] + duckdb_arch: ['osx_amd64'] env: VCPKG_TARGET_TRIPLET: ${{ matrix.vcpkg_triplet }} @@ -54,7 +48,7 @@ jobs: - name: Setup vcpkg uses: lukka/run-vcpkg@v11 with: - vcpkgGitCommitId: 9edb1b8e590cc086563301d735cae4b6e732d2d2 + vcpkgGitCommitId: a42af01b72c28a8e1d7b48107b33e4f286a55ef6 - name: Build extension shell: bash diff --git a/.github/workflows/MainDistributionPipeline.yml b/.github/workflows/MainDistributionPipeline.yml index 0cd9c95..1cd5f75 100644 --- a/.github/workflows/MainDistributionPipeline.yml +++ b/.github/workflows/MainDistributionPipeline.yml @@ -14,11 +14,11 @@ concurrency: jobs: duckdb-stable-build: name: Build extension binaries - uses: duckdb/duckdb/.github/workflows/_extension_distribution.yml@v0.9.2 + uses: duckdb/duckdb/.github/workflows/_extension_distribution.yml@a491470b039c54fe2f0adfe1d161b14c7fe64642 with: - duckdb_version: v0.9.2 extension_name: azure - vcpkg_commit: 9edb1b8e590cc086563301d735cae4b6e732d2d2 # TODO: remove pinned vcpkg commit when updating duckdb version + exclude_archs: 'wasm_mvp;wasm_eh;wasm_threads' # Can't really work in wasm sandbox + duckdb_version: a491470b03 duckdb-stable-deploy: name: Deploy extension binaries @@ -26,6 +26,7 @@ jobs: uses: ./.github/workflows/_extension_deploy.yml secrets: inherit with: - duckdb_version: v0.9.2 extension_name: azure + exclude_archs: 'wasm_mvp;wasm_eh;wasm_threads' + duckdb_version: a491470b03 deploy_latest: ${{ startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main' }} \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index a3dc47f..a8486c0 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -10,7 +10,7 @@ include_directories(src/include) set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED True) -set(EXTENSION_SOURCES src/azure_extension.cpp) +set(EXTENSION_SOURCES src/azure_extension.cpp src/azure_secret.cpp) add_library(${EXTENSION_NAME} STATIC ${EXTENSION_SOURCES}) set(PARAMETERS "-warnings") diff --git a/duckdb b/duckdb index 3c695d7..a491470 160000 --- a/duckdb +++ b/duckdb @@ -1 +1 @@ -Subproject commit 3c695d7ba94d95d9facee48d395f46ed0bd72b46 +Subproject commit a491470b039c54fe2f0adfe1d161b14c7fe64642 diff --git a/src/azure_extension.cpp b/src/azure_extension.cpp index 3a36dfc..df2cda1 100644 --- a/src/azure_extension.cpp +++ b/src/azure_extension.cpp @@ -2,11 +2,14 @@ #include "azure_extension.hpp" +#include "azure_secret.hpp" #include "duckdb.hpp" #include "duckdb/common/exception.hpp" #include "duckdb/common/http_state.hpp" #include "duckdb/common/file_opener.hpp" #include "duckdb/common/string_util.hpp" +#include "duckdb/main/secret/secret.hpp" +#include "duckdb/main/secret/secret_manager.hpp" #include "duckdb/function/scalar/string_functions.hpp" #include "duckdb/function/scalar_function.hpp" #include "duckdb/main/extension_util.hpp" @@ -14,10 +17,12 @@ #include #include #include +#include #include +#include #include #include -#include +#include #include #include @@ -25,6 +30,7 @@ namespace duckdb { using namespace Azure::Core::Diagnostics; +// globals for collection Azure SDK logging information mutex AzureStorageFileSystem::azure_log_lock = {}; weak_ptr AzureStorageFileSystem::http_state = std::weak_ptr(); bool AzureStorageFileSystem::listener_set = false; @@ -33,7 +39,7 @@ bool AzureStorageFileSystem::listener_set = false; static void Log(Logger::Level level, std::string const &message) { auto http_state_ptr = AzureStorageFileSystem::http_state; auto http_state = http_state_ptr.lock(); - if (!http_state) { + if (!http_state && AzureStorageFileSystem::listener_set) { throw std::runtime_error("HTTP state weak pointer failed to lock"); } if (message.find("Request") != std::string::npos) { @@ -71,9 +77,20 @@ CreateCredentialChainFromSetting(const string &credential_chain) { return result; } -static AzureAuthentication ParseAzureAuthSettings(FileOpener *opener) { +static AzureAuthentication ParseAzureAuthSettings(FileOpener *opener, const string &path) { AzureAuthentication auth; + // Lookup Secret + auto context = opener->TryGetClientContext(); + if (context) { + auto transaction = CatalogTransaction::GetSystemCatalogTransaction(*context); + auto secret_lookup = context->db->config.secret_manager->LookupSecret(transaction, path, "azure"); + if (secret_lookup.HasMatch()) { + const auto &secret = secret_lookup.GetSecret(); + auth.secret = &dynamic_cast(secret); + } + } + Value connection_string_val; if (FileOpener::TryGetCurrentSetting(opener, "azure_storage_connection_string", connection_string_val)) { auth.connection_string = connection_string_val.ToString(); @@ -122,28 +139,72 @@ static AzureReadOptions ParseAzureReadOptions(FileOpener *opener) { } static Azure::Storage::Blobs::BlobContainerClient GetContainerClient(AzureAuthentication &auth, AzureParsedUrl &url) { - if (!auth.connection_string.empty()) { - return Azure::Storage::Blobs::BlobContainerClient::CreateFromConnectionString(auth.connection_string, - url.container); + string connection_string; + bool use_secret = false; + string chain; + string account_name; + string endpoint; + + // Firstly, try to use the auth from the secret + if (auth.secret) { + // If connection string, we're done heres + auto connection_string_value = auth.secret->TryGetValue("connection_string"); + if (!connection_string_value.IsNull()) { + return Azure::Storage::Blobs::BlobContainerClient::CreateFromConnectionString( + connection_string_value.ToString(), url.container); + } + + // Account_name can be used both for unauthenticated + if (!auth.secret->TryGetValue("account_name").IsNull()) { + use_secret = true; + account_name = auth.secret->TryGetValue("account_name").ToString(); + } + + if (auth.secret->GetProvider() == "credential_chain") { + use_secret = true; + if (!auth.secret->TryGetValue("chain").IsNull()) { + chain = auth.secret->TryGetValue("chain").ToString(); + } + if (chain.empty()) { + chain = "default"; + } + if (!auth.secret->TryGetValue("endpoint").IsNull()) { + endpoint = auth.secret->TryGetValue("endpoint").ToString(); + } + } + } + + if (!use_secret) { + chain = auth.credential_chain; + account_name = auth.account_name; + endpoint = auth.endpoint; + + if (!auth.connection_string.empty()) { + return Azure::Storage::Blobs::BlobContainerClient::CreateFromConnectionString(auth.connection_string, + url.container); + } + } + + if (endpoint.empty()) { + endpoint = "blob.core.windows.net"; } // Build credential chain, from last to first Azure::Identity::ChainedTokenCredential::Sources credential_chain; - if (!auth.credential_chain.empty()) { - credential_chain = CreateCredentialChainFromSetting(auth.credential_chain); + if (!chain.empty()) { + credential_chain = CreateCredentialChainFromSetting(chain); } - auto accountURL = "https://" + auth.account_name + "." + auth.endpoint; + auto accountURL = "https://" + account_name + "." + endpoint; if (!credential_chain.empty()) { // A set of credentials providers was passed auto chainedTokenCredential = std::make_shared(credential_chain); Azure::Storage::Blobs::BlobServiceClient blob_service_client(accountURL, chainedTokenCredential); return blob_service_client.GetBlobContainerClient(url.container); - } else if (!auth.account_name.empty()) { + } else if (!account_name.empty()) { return Azure::Storage::Blobs::BlobContainerClient(accountURL + "/" + url.container); } else { - throw InvalidInputException( - "No valid Azure credentials found, use either the azure_connection_string or azure_account_name"); + throw InvalidInputException("No valid Azure credentials found!"); } } @@ -155,7 +216,12 @@ BlobClientWrapper::BlobClientWrapper(AzureAuthentication &auth, AzureParsedUrl & BlobClientWrapper::~BlobClientWrapper() = default; Azure::Storage::Blobs::BlobClient *BlobClientWrapper::GetClient() { return blob_client.get(); -}; +} + +AzureStorageFileSystem::~AzureStorageFileSystem() { + Logger::SetListener(nullptr); + AzureStorageFileSystem::listener_set = false; +} AzureStorageFileHandle::AzureStorageFileHandle(FileSystem &fs, string path_p, uint8_t flags, AzureAuthentication &auth, const AzureReadOptions &read_options, AzureParsedUrl parsed_url) @@ -169,6 +235,10 @@ AzureStorageFileHandle::AzureStorageFileHandle(FileSystem &fs, string path_p, ui } catch (Azure::Storage::StorageException &e) { throw IOException("AzureStorageFileSystem open file '" + path + "' failed with code'" + e.ErrorCode + "',Reason Phrase: '" + e.ReasonPhrase + "', Message: '" + e.Message + "'"); + } catch (std::exception &e) { + throw IOException("AzureStorageFileSystem could not open file: '%s', unknown error occured, this could mean " + "the credentials used were wrong. Original error message: '%s' ", + path, e.what()); } if (flags & FileFlags::FILE_FLAGS_READ) { @@ -183,7 +253,7 @@ unique_ptr AzureStorageFileSystem::CreateHandle(const st D_ASSERT(compression == FileCompressionType::UNCOMPRESSED); auto parsed_url = ParseUrl(path); - auto azure_auth = ParseAzureAuthSettings(opener); + auto azure_auth = ParseAzureAuthSettings(opener, path); auto read_options = ParseAzureReadOptions(opener); return make_uniq(*this, path, flags, azure_auth, read_options, parsed_url); @@ -250,6 +320,9 @@ static void LoadInternal(DatabaseInstance &instance) { auto &fs = instance.GetFileSystem(); fs.RegisterSubSystem(make_uniq()); + // Load Secret functions + CreateAzureSecretFunctions::Register(instance); + // Load extension config auto &config = DBConfig::GetConfig(instance); config.AddExtensionOption("azure_storage_connection_string", @@ -329,7 +402,7 @@ vector AzureStorageFileSystem::Glob(const string &path, FileOpener *open throw InternalException("Cannot do Azure storage Glob without FileOpener"); } auto azure_url = AzureStorageFileSystem::ParseUrl(path); - auto azure_auth = ParseAzureAuthSettings(opener); + auto azure_auth = ParseAzureAuthSettings(opener, path); // Azure matches on prefix, not glob pattern, so we take a substring until the first wildcard auto first_wildcard_pos = azure_url.path.find_first_of("*[\\"); diff --git a/src/azure_secret.cpp b/src/azure_secret.cpp new file mode 100644 index 0000000..46dc108 --- /dev/null +++ b/src/azure_secret.cpp @@ -0,0 +1,100 @@ +#include "azure_secret.hpp" +#include "duckdb/main/extension_util.hpp" +#include +#include +#include +#include +#include +#include + +namespace duckdb { + +static string TryGetStringParam(CreateSecretInput &input, const string ¶m_name) { + auto param_lookup = input.options.find(param_name); + if (param_lookup != input.options.end()) { + return param_lookup->second.ToString(); + } else { + return ""; + } +} + +static unique_ptr CreateAzureSecretFromConfig(ClientContext &context, CreateSecretInput &input) { + string connection_string = TryGetStringParam(input, "connection_string"); + string account_name = TryGetStringParam(input, "account_name"); + + auto scope = input.scope; + if (scope.empty()) { + scope.push_back("azure://"); + scope.push_back("az://"); + } + + auto result = make_uniq(scope, input.type, input.provider, input.name); + + //! Add connection string + if (!connection_string.empty()) { + result->secret_map["connection_string"] = connection_string; + } + + // Add account_id + if (!account_name.empty()) { + result->secret_map["account_name"] = account_name; + } + + //! Connection string may hold sensitive data: it should be redacted + result->redact_keys.insert("connection_string"); + + return std::move(result); +} + +static unique_ptr CreateAzureSecretFromCredentialChain(ClientContext &context, CreateSecretInput &input) { + string chain = TryGetStringParam(input, "chain"); + string account_name = TryGetStringParam(input, "account_name"); + string azure_endpoint = TryGetStringParam(input, "azure_endpoint"); + + auto scope = input.scope; + if (scope.empty()) { + scope.push_back("azure://"); + scope.push_back("az://"); + } + + auto result = make_uniq(scope, input.type, input.provider, input.name); + + // Add config to kv secret + if (input.options.find("chain") != input.options.end()) { + result->secret_map["chain"] = TryGetStringParam(input, "chain"); + } + if (input.options.find("account_name") != input.options.end()) { + result->secret_map["account_name"] = TryGetStringParam(input, "account_name"); + } + if (input.options.find("azure_endpoint") != input.options.end()) { + result->secret_map["azure_endpoint"] = TryGetStringParam(input, "azure_endpoint"); + } + + return std::move(result); +} + +void CreateAzureSecretFunctions::Register(DatabaseInstance &instance) { + string type = "azure"; + + // Register the new type + SecretType secret_type; + secret_type.name = type; + secret_type.deserializer = KeyValueSecret::Deserialize; + secret_type.default_provider = "config"; + ExtensionUtil::RegisterSecretType(instance, secret_type); + + // Register the connection string secret provider + CreateSecretFunction connection_string_function = {type, "config", CreateAzureSecretFromConfig}; + connection_string_function.named_parameters["connection_string"] = LogicalType::VARCHAR; + connection_string_function.named_parameters["account_name"] = LogicalType::VARCHAR; + ExtensionUtil::RegisterFunction(instance, connection_string_function); + + // Register the credential_chain secret provider + CreateSecretFunction cred_chain_function = {type, "credential_chain", CreateAzureSecretFromCredentialChain}; + cred_chain_function.named_parameters["chain"] = LogicalType::VARCHAR; + cred_chain_function.named_parameters["account_name"] = LogicalType::VARCHAR; + cred_chain_function.named_parameters["azure_endpoint"] = LogicalType::VARCHAR; + ExtensionUtil::RegisterFunction(instance, cred_chain_function); +} + +} // namespace duckdb diff --git a/src/include/azure_extension.hpp b/src/include/azure_extension.hpp index dc9e5dc..9db01a9 100644 --- a/src/include/azure_extension.hpp +++ b/src/include/azure_extension.hpp @@ -12,6 +12,8 @@ class BlobClient; namespace duckdb { class HTTPState; +class AzureSecret; +class KeyValueSecret; class AzureExtension : public Extension { public: @@ -20,6 +22,9 @@ class AzureExtension : public Extension { }; struct AzureAuthentication { + //! Main Auth method: through secret + optional_ptr secret; + //! Auth method #1: setting the connection string string connection_string; @@ -83,6 +88,8 @@ class AzureStorageFileHandle : public FileHandle { class AzureStorageFileSystem : public FileSystem { public: + ~AzureStorageFileSystem(); + duckdb::unique_ptr OpenFile(const string &path, uint8_t flags, FileLockType lock = DEFAULT_LOCK, FileCompressionType compression = DEFAULT_COMPRESSION, FileOpener *opener = nullptr) final; diff --git a/src/include/azure_secret.hpp b/src/include/azure_secret.hpp new file mode 100644 index 0000000..3dc39e9 --- /dev/null +++ b/src/include/azure_secret.hpp @@ -0,0 +1,24 @@ +#pragma once + +#include "azure_extension.hpp" +#include "duckdb.hpp" +#include + +#include +#include +#include +#include +#include +#include + +namespace duckdb { +struct CreateSecretInput; +class CreateSecretFunction; + +struct CreateAzureSecretFunctions { +public: + //! Register all CreateSecretFunctions + static void Register(DatabaseInstance &instance); +}; + +} // namespace duckdb diff --git a/test/sql/azure_auth_local.test b/test/sql/azure_auth_local.test index 3bc5d53..fe811c9 100644 --- a/test/sql/azure_auth_local.test +++ b/test/sql/azure_auth_local.test @@ -30,9 +30,9 @@ SELECT count(*) FROM 'azure://testing-public/l1.parquet'; 60175 query I -SELECT count(*) FROM 'azure://testing-public/l*.parquet'; +SELECT count(*) FROM 'azure://testing-public/l[12].parquet'; ---- -180525 +120350 # With the CLI credentials, private authentication should now work query I @@ -41,9 +41,9 @@ SELECT count(*) FROM 'azure://testing-private/l1.parquet'; 60175 query I -SELECT count(*) FROM 'azure://testing-private/l*.parquet'; +SELECT count(*) FROM 'azure://testing-private/l[12].parquet'; ---- -180525 +120350 # No credential providers, public buckets should still work! statement ok @@ -55,9 +55,9 @@ SELECT count(*) FROM 'azure://testing-public/l1.parquet'; 60175 query I -SELECT count(*) FROM 'azure://testing-public/l*.parquet'; +SELECT count(*) FROM 'azure://testing-public/l[12].parquet'; ---- -180525 +120350 # private without credentials don't work statement error @@ -82,12 +82,14 @@ SELECT count(*) FROM 'azure://testing-private/l1.parquet'; 60175 query I -SELECT count(*) FROM 'azure://testing-private/l*.parquet'; +SELECT count(*) FROM 'azure://testing-private/l[12].parquet'; ---- -180525 +120350 statement ok set azure_endpoint='nop.nop'; statement error -SELECT count(*) FROM 'azure://testing-private/l*.parquet'; \ No newline at end of file +SELECT count(*) FROM 'azure://testing-private/l[12].parquet'; +---- +Fail to get a new connection for: https://duckdbtesting.nop.nop. Couldn't resolve host name \ No newline at end of file diff --git a/test/sql/azure_auth_local_secrets.test b/test/sql/azure_auth_local_secrets.test new file mode 100644 index 0000000..75bf0c6 --- /dev/null +++ b/test/sql/azure_auth_local_secrets.test @@ -0,0 +1,107 @@ +# name: test/sql/azure_auth_local_secrets.test +# description: test azure extension authentication +# group: [azure] + +require azure + +require parquet + +require-env DUCKDB_CLI_TEST_ENV_AVAILABLE + +load __TEST_DIR__/azure_auth_local_secrets.db + +statement ok +set secret_directory='__TEST_DIR__/azure_auth_local_secrets' + +# Note: this test is currently not run in CI as it requires setting up quite a bit of setup. +# for now, to run this test locally, ensure you have access to the duckdbtesting storage +# account, then login through the cli. Then running the test with DUCKDB_CLI_TEST_ENV_AVAILABLE=1 +# should give all green! +# +# TODO: We should setup a key in CI to automatically test this. Ideally that would also involve Managed identities and +# service principals + +statement ok +CREATE PERSISTENT SECRET az1 ( + TYPE AZURE, + PROVIDER CREDENTIAL_CHAIN, + ACCOUNT_NAME 'duckdbtesting' +) + +# With the CLI credentials, private authentication should now work +query I +SELECT count(*) FROM 'azure://testing-private/l1.parquet'; +---- +60175 + +restart + +statement ok +set secret_directory='__TEST_DIR__/azure_auth_local_secrets' + +# The credentials are persistent! +query I +SELECT count(*) FROM 'azure://testing-private/l[12].parquet'; +---- +120350 + +statement ok +DROP SECRET az1; + +# This is the default config provider: it will allow access to public azure urls +statement ok +CREATE SECRET az1 ( + TYPE AZURE, + ACCOUNT_NAME 'duckdbtesting' +) + +query I +SELECT count(*) FROM 'azure://testing-public/l1.parquet'; +---- +60175 + +statement error +SELECT count(*) FROM 'azure://testing-private/l1.parquet'; +---- +failed to authenticate the request + +statement ok +DROP SECRET az1; + +# Now use the credential provider with a chain that won't work +statement ok +CREATE PERSISTENT SECRET az1 ( + TYPE AZURE, + PROVIDER CREDENTIAL_CHAIN, + CHAIN 'env', + ACCOUNT_NAME 'duckdbtesting' +) + +statement error +SELECT count(*) FROM 'azure://testing-private/l1.parquet'; +---- +IO Error: AzureStorageFileSystem could not open file: 'azure://testing-private/l1.parquet', unknown error occured, this could mean the credentials used were wrong. Original error message: 'Failed to get token from ChainedTokenCredential.' + +# Currently, when providing a credential provider secret that does not yield a token, even public request fail +statement error +SELECT count(*) FROM 'azure://testing-public/l1.parquet'; +---- +IO Error: AzureStorageFileSystem could not open file: 'azure://testing-public/l1.parquet', unknown error occured, this could mean the credentials used were wrong. Original error message: 'Failed to get token from ChainedTokenCredential.' + +statement ok +DROP SECRET az1 + +# Now use the credential provider with the provider that does work +statement ok +CREATE PERSISTENT SECRET az1 ( + TYPE AZURE, + PROVIDER CREDENTIAL_CHAIN, + CHAIN 'cli', + ACCOUNT_NAME 'duckdbtesting' +) + +# Auth works again! +query I +SELECT count(*) FROM 'azure://testing-private/l1.parquet'; +---- +60175 \ No newline at end of file diff --git a/test/sql/azure_secret.test b/test/sql/azure_secret.test new file mode 100644 index 0000000..2d355d3 --- /dev/null +++ b/test/sql/azure_secret.test @@ -0,0 +1,62 @@ +# name: test/sql/azure_secret.test +# description: test azure extension secrets +# group: [azure] + +# Require statement will ensure this test is run with this extension loaded +require azure + +require parquet + +require-env AZURE_STORAGE_CONNECTION_STRING + +# We need a connection string to do requests +foreach prefix azure:// az:// + +statement error +SELECT sum(l_orderkey) FROM '${prefix}testing-private/l.parquet'; +---- +Invalid Input Error: No valid Azure credentials found + +# Start with default provider +statement ok +CREATE SECRET s1 ( + TYPE AZURE, + CONNECTION_STRING '${AZURE_STORAGE_CONNECTION_STRING}' +) + +# Read a column from a parquet file +query I +SELECT sum(l_orderkey) FROM '${prefix}testing-private/l.parquet'; +---- +1802759573 + +# Remove secret +statement ok +DROP SECRET s1 + +endloop + +statement error +SELECT sum(l_orderkey) FROM 'az://testing-private/l.parquet'; +---- +Invalid Input Error: No valid Azure credentials found + +# Explicit provider, and a scope +statement ok +CREATE SECRET s1 ( + TYPE AZURE, + PROVIDER CONFIG, + SCOPE 'az://testing-private', + CONNECTION_STRING '${AZURE_STORAGE_CONNECTION_STRING}' +) + +# Only matching scope works +statement error +SELECT sum(l_orderkey) FROM 'azure://testing-private/l.parquet'; +---- +Invalid Input Error: No valid Azure credentials found + +query I +SELECT sum(l_orderkey) FROM 'az://testing-private/l.parquet'; +---- +1802759573 \ No newline at end of file