From da2a38027ad0b565ea0357200c346ae74f7671bb Mon Sep 17 00:00:00 2001
From: Connor McArthur
Date: Tue, 5 Nov 2024 13:43:47 -0500
Subject: [PATCH 01/22] add query_id as optional to SQLQueryStatus (#342)
---
.../Under the Hood-20241105-121440.yaml | 6 +
dbt/adapters/contracts/connection.py | 1 +
dbt/adapters/events/adapter_types.proto | 1 +
dbt/adapters/events/adapter_types_pb2.py | 205 +++++++++---------
dbt/adapters/sql/connections.py | 5 +-
5 files changed, 115 insertions(+), 103 deletions(-)
create mode 100644 .changes/unreleased/Under the Hood-20241105-121440.yaml
diff --git a/.changes/unreleased/Under the Hood-20241105-121440.yaml b/.changes/unreleased/Under the Hood-20241105-121440.yaml
new file mode 100644
index 00000000..11946fc4
--- /dev/null
+++ b/.changes/unreleased/Under the Hood-20241105-121440.yaml
@@ -0,0 +1,6 @@
+kind: Under the Hood
+body: Add `query_id` to SQLQueryStatus
+time: 2024-11-05T12:14:40.181931-05:00
+custom:
+ Author: cmcarthur
+ Issue: "342"
diff --git a/dbt/adapters/contracts/connection.py b/dbt/adapters/contracts/connection.py
index e3baf284..2d10c9a3 100644
--- a/dbt/adapters/contracts/connection.py
+++ b/dbt/adapters/contracts/connection.py
@@ -41,6 +41,7 @@ class AdapterResponse(dbtClassMixin):
_message: str
code: Optional[str] = None
rows_affected: Optional[int] = None
+ query_id: Optional[str] = None
def __str__(self):
return self._message
diff --git a/dbt/adapters/events/adapter_types.proto b/dbt/adapters/events/adapter_types.proto
index 69d64325..70b4e1e3 100644
--- a/dbt/adapters/events/adapter_types.proto
+++ b/dbt/adapters/events/adapter_types.proto
@@ -266,6 +266,7 @@ message SQLQueryStatus {
AdapterNodeInfo node_info = 1;
string status = 2;
float elapsed = 3;
+ string query_id = 4;
}
message SQLQueryStatusMsg {
diff --git a/dbt/adapters/events/adapter_types_pb2.py b/dbt/adapters/events/adapter_types_pb2.py
index bfd44080..4d6ae9b8 100644
--- a/dbt/adapters/events/adapter_types_pb2.py
+++ b/dbt/adapters/events/adapter_types_pb2.py
@@ -15,17 +15,18 @@
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"%\n\x10TypeCodeNotFound\x12\x11\n\ttype_code\x18\x01 \x01(\x05\"u\n\x13TypeCodeNotFoundMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.TypeCodeNotFoundb\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"t\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\x12\x10\n\x08query_id\x18\x04 \x01(\t\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"%\n\x10TypeCodeNotFound\x12\x11\n\ttype_code\x18\x01 \x01(\x05\"u\n\x13TypeCodeNotFoundMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.TypeCodeNotFoundb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'adapter_types_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
+
DESCRIPTOR._options = None
- _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._options = None
- _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._serialized_options = b'8\001'
- _globals['_CACHEDUMPGRAPH_DUMPENTRY']._options = None
- _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_options = b'8\001'
+ _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._options = None
+ _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_options = b'8\001'
+ _CACHEDUMPGRAPH_DUMPENTRY._options = None
+ _CACHEDUMPGRAPH_DUMPENTRY._serialized_options = b'8\001'
_globals['_ADAPTERCOMMONEVENTINFO']._serialized_start=100
_globals['_ADAPTERCOMMONEVENTINFO']._serialized_end=399
_globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._serialized_start=355
@@ -109,101 +110,101 @@
_globals['_SQLQUERYMSG']._serialized_start=4628
_globals['_SQLQUERYMSG']._serialized_end=4729
_globals['_SQLQUERYSTATUS']._serialized_start=4731
- _globals['_SQLQUERYSTATUS']._serialized_end=4829
- _globals['_SQLQUERYSTATUSMSG']._serialized_start=4831
- _globals['_SQLQUERYSTATUSMSG']._serialized_end=4944
- _globals['_SQLCOMMIT']._serialized_start=4946
- _globals['_SQLCOMMIT']._serialized_end=5025
- _globals['_SQLCOMMITMSG']._serialized_start=5027
- _globals['_SQLCOMMITMSG']._serialized_end=5130
- _globals['_COLTYPECHANGE']._serialized_start=5132
- _globals['_COLTYPECHANGE']._serialized_end=5229
- _globals['_COLTYPECHANGEMSG']._serialized_start=5231
- _globals['_COLTYPECHANGEMSG']._serialized_end=5342
- _globals['_SCHEMACREATION']._serialized_start=5344
- _globals['_SCHEMACREATION']._serialized_end=5408
- _globals['_SCHEMACREATIONMSG']._serialized_start=5410
- _globals['_SCHEMACREATIONMSG']._serialized_end=5523
- _globals['_SCHEMADROP']._serialized_start=5525
- _globals['_SCHEMADROP']._serialized_end=5585
- _globals['_SCHEMADROPMSG']._serialized_start=5587
- _globals['_SCHEMADROPMSG']._serialized_end=5692
- _globals['_CACHEACTION']._serialized_start=5695
- _globals['_CACHEACTION']._serialized_end=5917
- _globals['_CACHEACTIONMSG']._serialized_start=5919
- _globals['_CACHEACTIONMSG']._serialized_end=6026
- _globals['_CACHEDUMPGRAPH']._serialized_start=6029
- _globals['_CACHEDUMPGRAPH']._serialized_end=6181
- _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_start=6138
- _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_end=6181
- _globals['_CACHEDUMPGRAPHMSG']._serialized_start=6183
- _globals['_CACHEDUMPGRAPHMSG']._serialized_end=6296
- _globals['_ADAPTERREGISTERED']._serialized_start=6298
- _globals['_ADAPTERREGISTERED']._serialized_end=6364
- _globals['_ADAPTERREGISTEREDMSG']._serialized_start=6366
- _globals['_ADAPTERREGISTEREDMSG']._serialized_end=6485
- _globals['_ADAPTERIMPORTERROR']._serialized_start=6487
- _globals['_ADAPTERIMPORTERROR']._serialized_end=6520
- _globals['_ADAPTERIMPORTERRORMSG']._serialized_start=6522
- _globals['_ADAPTERIMPORTERRORMSG']._serialized_end=6643
- _globals['_PLUGINLOADERROR']._serialized_start=6645
- _globals['_PLUGINLOADERROR']._serialized_end=6680
- _globals['_PLUGINLOADERRORMSG']._serialized_start=6682
- _globals['_PLUGINLOADERRORMSG']._serialized_end=6797
- _globals['_NEWCONNECTIONOPENING']._serialized_start=6799
- _globals['_NEWCONNECTIONOPENING']._serialized_end=6896
- _globals['_NEWCONNECTIONOPENINGMSG']._serialized_start=6898
- _globals['_NEWCONNECTIONOPENINGMSG']._serialized_end=7023
- _globals['_CODEEXECUTION']._serialized_start=7025
- _globals['_CODEEXECUTION']._serialized_end=7081
- _globals['_CODEEXECUTIONMSG']._serialized_start=7083
- _globals['_CODEEXECUTIONMSG']._serialized_end=7194
- _globals['_CODEEXECUTIONSTATUS']._serialized_start=7196
- _globals['_CODEEXECUTIONSTATUS']._serialized_end=7250
- _globals['_CODEEXECUTIONSTATUSMSG']._serialized_start=7252
- _globals['_CODEEXECUTIONSTATUSMSG']._serialized_end=7375
- _globals['_CATALOGGENERATIONERROR']._serialized_start=7377
- _globals['_CATALOGGENERATIONERROR']._serialized_end=7414
- _globals['_CATALOGGENERATIONERRORMSG']._serialized_start=7417
- _globals['_CATALOGGENERATIONERRORMSG']._serialized_end=7546
- _globals['_WRITECATALOGFAILURE']._serialized_start=7548
- _globals['_WRITECATALOGFAILURE']._serialized_end=7593
- _globals['_WRITECATALOGFAILUREMSG']._serialized_start=7595
- _globals['_WRITECATALOGFAILUREMSG']._serialized_end=7718
- _globals['_CATALOGWRITTEN']._serialized_start=7720
- _globals['_CATALOGWRITTEN']._serialized_end=7750
- _globals['_CATALOGWRITTENMSG']._serialized_start=7752
- _globals['_CATALOGWRITTENMSG']._serialized_end=7865
- _globals['_CANNOTGENERATEDOCS']._serialized_start=7867
- _globals['_CANNOTGENERATEDOCS']._serialized_end=7887
- _globals['_CANNOTGENERATEDOCSMSG']._serialized_start=7889
- _globals['_CANNOTGENERATEDOCSMSG']._serialized_end=8010
- _globals['_BUILDINGCATALOG']._serialized_start=8012
- _globals['_BUILDINGCATALOG']._serialized_end=8029
- _globals['_BUILDINGCATALOGMSG']._serialized_start=8031
- _globals['_BUILDINGCATALOGMSG']._serialized_end=8146
- _globals['_DATABASEERRORRUNNINGHOOK']._serialized_start=8148
- _globals['_DATABASEERRORRUNNINGHOOK']._serialized_end=8193
- _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_start=8196
- _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_end=8329
- _globals['_HOOKSRUNNING']._serialized_start=8331
- _globals['_HOOKSRUNNING']._serialized_end=8383
- _globals['_HOOKSRUNNINGMSG']._serialized_start=8385
- _globals['_HOOKSRUNNINGMSG']._serialized_end=8494
- _globals['_FINISHEDRUNNINGSTATS']._serialized_start=8496
- _globals['_FINISHEDRUNNINGSTATS']._serialized_end=8580
- _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_start=8582
- _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_end=8707
- _globals['_CONSTRAINTNOTENFORCED']._serialized_start=8709
- _globals['_CONSTRAINTNOTENFORCED']._serialized_end=8769
- _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_start=8771
- _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_end=8898
- _globals['_CONSTRAINTNOTSUPPORTED']._serialized_start=8900
- _globals['_CONSTRAINTNOTSUPPORTED']._serialized_end=8961
- _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_start=8964
- _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_end=9093
- _globals['_TYPECODENOTFOUND']._serialized_start=9095
- _globals['_TYPECODENOTFOUND']._serialized_end=9132
- _globals['_TYPECODENOTFOUNDMSG']._serialized_start=9134
- _globals['_TYPECODENOTFOUNDMSG']._serialized_end=9251
+ _globals['_SQLQUERYSTATUS']._serialized_end=4847
+ _globals['_SQLQUERYSTATUSMSG']._serialized_start=4849
+ _globals['_SQLQUERYSTATUSMSG']._serialized_end=4962
+ _globals['_SQLCOMMIT']._serialized_start=4964
+ _globals['_SQLCOMMIT']._serialized_end=5043
+ _globals['_SQLCOMMITMSG']._serialized_start=5045
+ _globals['_SQLCOMMITMSG']._serialized_end=5148
+ _globals['_COLTYPECHANGE']._serialized_start=5150
+ _globals['_COLTYPECHANGE']._serialized_end=5247
+ _globals['_COLTYPECHANGEMSG']._serialized_start=5249
+ _globals['_COLTYPECHANGEMSG']._serialized_end=5360
+ _globals['_SCHEMACREATION']._serialized_start=5362
+ _globals['_SCHEMACREATION']._serialized_end=5426
+ _globals['_SCHEMACREATIONMSG']._serialized_start=5428
+ _globals['_SCHEMACREATIONMSG']._serialized_end=5541
+ _globals['_SCHEMADROP']._serialized_start=5543
+ _globals['_SCHEMADROP']._serialized_end=5603
+ _globals['_SCHEMADROPMSG']._serialized_start=5605
+ _globals['_SCHEMADROPMSG']._serialized_end=5710
+ _globals['_CACHEACTION']._serialized_start=5713
+ _globals['_CACHEACTION']._serialized_end=5935
+ _globals['_CACHEACTIONMSG']._serialized_start=5937
+ _globals['_CACHEACTIONMSG']._serialized_end=6044
+ _globals['_CACHEDUMPGRAPH']._serialized_start=6047
+ _globals['_CACHEDUMPGRAPH']._serialized_end=6199
+ _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_start=6156
+ _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_end=6199
+ _globals['_CACHEDUMPGRAPHMSG']._serialized_start=6201
+ _globals['_CACHEDUMPGRAPHMSG']._serialized_end=6314
+ _globals['_ADAPTERREGISTERED']._serialized_start=6316
+ _globals['_ADAPTERREGISTERED']._serialized_end=6382
+ _globals['_ADAPTERREGISTEREDMSG']._serialized_start=6384
+ _globals['_ADAPTERREGISTEREDMSG']._serialized_end=6503
+ _globals['_ADAPTERIMPORTERROR']._serialized_start=6505
+ _globals['_ADAPTERIMPORTERROR']._serialized_end=6538
+ _globals['_ADAPTERIMPORTERRORMSG']._serialized_start=6540
+ _globals['_ADAPTERIMPORTERRORMSG']._serialized_end=6661
+ _globals['_PLUGINLOADERROR']._serialized_start=6663
+ _globals['_PLUGINLOADERROR']._serialized_end=6698
+ _globals['_PLUGINLOADERRORMSG']._serialized_start=6700
+ _globals['_PLUGINLOADERRORMSG']._serialized_end=6815
+ _globals['_NEWCONNECTIONOPENING']._serialized_start=6817
+ _globals['_NEWCONNECTIONOPENING']._serialized_end=6914
+ _globals['_NEWCONNECTIONOPENINGMSG']._serialized_start=6916
+ _globals['_NEWCONNECTIONOPENINGMSG']._serialized_end=7041
+ _globals['_CODEEXECUTION']._serialized_start=7043
+ _globals['_CODEEXECUTION']._serialized_end=7099
+ _globals['_CODEEXECUTIONMSG']._serialized_start=7101
+ _globals['_CODEEXECUTIONMSG']._serialized_end=7212
+ _globals['_CODEEXECUTIONSTATUS']._serialized_start=7214
+ _globals['_CODEEXECUTIONSTATUS']._serialized_end=7268
+ _globals['_CODEEXECUTIONSTATUSMSG']._serialized_start=7270
+ _globals['_CODEEXECUTIONSTATUSMSG']._serialized_end=7393
+ _globals['_CATALOGGENERATIONERROR']._serialized_start=7395
+ _globals['_CATALOGGENERATIONERROR']._serialized_end=7432
+ _globals['_CATALOGGENERATIONERRORMSG']._serialized_start=7435
+ _globals['_CATALOGGENERATIONERRORMSG']._serialized_end=7564
+ _globals['_WRITECATALOGFAILURE']._serialized_start=7566
+ _globals['_WRITECATALOGFAILURE']._serialized_end=7611
+ _globals['_WRITECATALOGFAILUREMSG']._serialized_start=7613
+ _globals['_WRITECATALOGFAILUREMSG']._serialized_end=7736
+ _globals['_CATALOGWRITTEN']._serialized_start=7738
+ _globals['_CATALOGWRITTEN']._serialized_end=7768
+ _globals['_CATALOGWRITTENMSG']._serialized_start=7770
+ _globals['_CATALOGWRITTENMSG']._serialized_end=7883
+ _globals['_CANNOTGENERATEDOCS']._serialized_start=7885
+ _globals['_CANNOTGENERATEDOCS']._serialized_end=7905
+ _globals['_CANNOTGENERATEDOCSMSG']._serialized_start=7907
+ _globals['_CANNOTGENERATEDOCSMSG']._serialized_end=8028
+ _globals['_BUILDINGCATALOG']._serialized_start=8030
+ _globals['_BUILDINGCATALOG']._serialized_end=8047
+ _globals['_BUILDINGCATALOGMSG']._serialized_start=8049
+ _globals['_BUILDINGCATALOGMSG']._serialized_end=8164
+ _globals['_DATABASEERRORRUNNINGHOOK']._serialized_start=8166
+ _globals['_DATABASEERRORRUNNINGHOOK']._serialized_end=8211
+ _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_start=8214
+ _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_end=8347
+ _globals['_HOOKSRUNNING']._serialized_start=8349
+ _globals['_HOOKSRUNNING']._serialized_end=8401
+ _globals['_HOOKSRUNNINGMSG']._serialized_start=8403
+ _globals['_HOOKSRUNNINGMSG']._serialized_end=8512
+ _globals['_FINISHEDRUNNINGSTATS']._serialized_start=8514
+ _globals['_FINISHEDRUNNINGSTATS']._serialized_end=8598
+ _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_start=8600
+ _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_end=8725
+ _globals['_CONSTRAINTNOTENFORCED']._serialized_start=8727
+ _globals['_CONSTRAINTNOTENFORCED']._serialized_end=8787
+ _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_start=8789
+ _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_end=8916
+ _globals['_CONSTRAINTNOTSUPPORTED']._serialized_start=8918
+ _globals['_CONSTRAINTNOTSUPPORTED']._serialized_end=8979
+ _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_start=8982
+ _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_end=9111
+ _globals['_TYPECODENOTFOUND']._serialized_start=9113
+ _globals['_TYPECODENOTFOUND']._serialized_end=9150
+ _globals['_TYPECODENOTFOUNDMSG']._serialized_start=9152
+ _globals['_TYPECODENOTFOUNDMSG']._serialized_end=9269
# @@protoc_insertion_point(module_scope)
diff --git a/dbt/adapters/sql/connections.py b/dbt/adapters/sql/connections.py
index 4d450c88..baccddc9 100644
--- a/dbt/adapters/sql/connections.py
+++ b/dbt/adapters/sql/connections.py
@@ -92,11 +92,14 @@ def add_query(
cursor = connection.handle.cursor()
cursor.execute(sql, bindings)
+ result = self.get_response(cursor)
+
fire_event(
SQLQueryStatus(
- status=str(self.get_response(cursor)),
+ status=str(result),
elapsed=time.perf_counter() - pre,
node_info=get_node_info(),
+ query_id=result.query_id,
)
)
From 9c08dea4d29d9af33d181bc1cd7a826cc564bf44 Mon Sep 17 00:00:00 2001
From: Quigley Malcolm
Date: Fri, 8 Nov 2024 16:25:07 -0600
Subject: [PATCH 02/22] Switch from environment variable to behavior flag for
gating microbatch functionality (#323)
---
.../unreleased/Features-20241001-165406.yaml | 7 ++++++
.../test_incremental_microbatch.py | 3 ---
dbt/adapters/base/impl.py | 22 ++++++++++++++++---
3 files changed, 26 insertions(+), 6 deletions(-)
create mode 100644 .changes/unreleased/Features-20241001-165406.yaml
diff --git a/.changes/unreleased/Features-20241001-165406.yaml b/.changes/unreleased/Features-20241001-165406.yaml
new file mode 100644
index 00000000..609684d4
--- /dev/null
+++ b/.changes/unreleased/Features-20241001-165406.yaml
@@ -0,0 +1,7 @@
+kind: Features
+body: Use a behavior flag to gate microbatch functionality (instead of an environment
+ variable)
+time: 2024-10-01T16:54:06.121016-05:00
+custom:
+ Author: QMalcolm
+ Issue: "327"
diff --git a/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py
index 5bbabbe1..34078ac3 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py
@@ -1,6 +1,4 @@
-import os
from pprint import pformat
-from unittest import mock
import pytest
@@ -63,7 +61,6 @@ def assert_row_count(self, project, relation_name: str, expected_row_count: int)
assert len(result) == expected_row_count, f"{relation_name}:{pformat(result)}"
- @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"})
def test_run_with_event_time(self, project, insert_two_rows_sql):
# initial run -- backfills all data
with patch_microbatch_end_time("2020-01-03 13:57:00"):
diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py
index 41481535..c8457e2f 100644
--- a/dbt/adapters/base/impl.py
+++ b/dbt/adapters/base/impl.py
@@ -23,7 +23,6 @@
Union,
TYPE_CHECKING,
)
-import os
import pytz
from dbt_common.behavior_flags import Behavior, BehaviorFlag
from dbt_common.clients.jinja import CallableMacroGenerator
@@ -316,7 +315,13 @@ def _behavior_flags(self) -> List[BehaviorFlag]:
"""
This method should be overwritten by adapter maintainers to provide platform-specific flags
"""
- return []
+ return [
+ {
+ "name": "require_batched_execution_for_custom_microbatch_strategy",
+ "default": False,
+ "docs_url": "https://docs.getdbt.com/docs/build/incremental-microbatch",
+ }
+ ]
###
# Methods that pass through to the connection manager
@@ -1574,13 +1579,24 @@ def valid_incremental_strategies(self):
def builtin_incremental_strategies(self):
builtin_strategies = ["append", "delete+insert", "merge", "insert_overwrite"]
- if os.environ.get("DBT_EXPERIMENTAL_MICROBATCH"):
+ if self.behavior.require_batched_execution_for_custom_microbatch_strategy.no_warn:
builtin_strategies.append("microbatch")
return builtin_strategies
@available.parse_none
def get_incremental_strategy_macro(self, model_context, strategy: str):
+ """Gets the macro for the given incremental strategy.
+
+ Additionally some validations are done:
+ 1. Assert that if the given strategy is a "builtin" strategy, then it must
+ also be defined as a "valid" strategy for the associated adapter
+ 2. Assert that the incremental strategy exists in the model context
+
+ Notably, something be defined by the adapter as "valid" without it being
+ a "builtin", and nothing will break (and that is desirable).
+ """
+
# Construct macro_name from strategy name
if strategy is None:
strategy = "default"
From 5b99d745c4e2133418f5faffb3fcc7c75138fc28 Mon Sep 17 00:00:00 2001
From: Github Build Bot
Date: Mon, 11 Nov 2024 19:05:30 +0000
Subject: [PATCH 03/22] Bumping version to 1.11.0 and generate changelog
---
.changes/1.11.0.md | 12 ++++++++++++
.changes/unreleased/Features-20241001-165406.yaml | 7 -------
.../unreleased/Under the Hood-20241105-121440.yaml | 6 ------
CHANGELOG.md | 14 +++++++++++++-
dbt/adapters/__about__.py | 2 +-
5 files changed, 26 insertions(+), 15 deletions(-)
create mode 100644 .changes/1.11.0.md
delete mode 100644 .changes/unreleased/Features-20241001-165406.yaml
delete mode 100644 .changes/unreleased/Under the Hood-20241105-121440.yaml
diff --git a/.changes/1.11.0.md b/.changes/1.11.0.md
new file mode 100644
index 00000000..fbe85222
--- /dev/null
+++ b/.changes/1.11.0.md
@@ -0,0 +1,12 @@
+## dbt-adapters 1.11.0 - November 11, 2024
+
+### Features
+
+- Use a behavior flag to gate microbatch functionality (instead of an environment variable) ([#327](https://github.com/dbt-labs/dbt-adapters/issues/327))
+
+### Under the Hood
+
+- Add `query_id` to SQLQueryStatus ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342))
+
+### Contributors
+- [@cmcarthur](https://github.com/cmcarthur) ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342))
diff --git a/.changes/unreleased/Features-20241001-165406.yaml b/.changes/unreleased/Features-20241001-165406.yaml
deleted file mode 100644
index 609684d4..00000000
--- a/.changes/unreleased/Features-20241001-165406.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-kind: Features
-body: Use a behavior flag to gate microbatch functionality (instead of an environment
- variable)
-time: 2024-10-01T16:54:06.121016-05:00
-custom:
- Author: QMalcolm
- Issue: "327"
diff --git a/.changes/unreleased/Under the Hood-20241105-121440.yaml b/.changes/unreleased/Under the Hood-20241105-121440.yaml
deleted file mode 100644
index 11946fc4..00000000
--- a/.changes/unreleased/Under the Hood-20241105-121440.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Under the Hood
-body: Add `query_id` to SQLQueryStatus
-time: 2024-11-05T12:14:40.181931-05:00
-custom:
- Author: cmcarthur
- Issue: "342"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9971e5ff..b6617204 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,10 +5,22 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html),
and is generated by [Changie](https://github.com/miniscruff/changie).
-## dbt-adapters 1.10.3 - October 29, 2024
+## dbt-adapters 1.11.0 - November 11, 2024
+
+### Features
+
+- Use a behavior flag to gate microbatch functionality (instead of an environment variable) ([#327](https://github.com/dbt-labs/dbt-adapters/issues/327))
+
+### Under the Hood
+
+- Add `query_id` to SQLQueryStatus ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342))
+### Contributors
+- [@cmcarthur](https://github.com/cmcarthur) ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342))
+## dbt-adapters 1.10.3 - October 29, 2024
+
## dbt-adapters 1.10.2 - October 01, 2024
### Under the Hood
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index 6aaa73b8..b6c30336 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.8.0"
+version = "1.11.0"
From a7cc32351c05d0874e709db05d9c4349b743f53d Mon Sep 17 00:00:00 2001
From: Github Build Bot
Date: Mon, 11 Nov 2024 21:56:22 +0000
Subject: [PATCH 04/22] Bumping version to 1.10.4 and generate changelog
---
.changes/1.10.4.md | 1 +
CHANGELOG.md | 3 +++
dbt-tests-adapter/dbt/tests/__about__.py | 2 +-
3 files changed, 5 insertions(+), 1 deletion(-)
create mode 100644 .changes/1.10.4.md
diff --git a/.changes/1.10.4.md b/.changes/1.10.4.md
new file mode 100644
index 00000000..f8bbd420
--- /dev/null
+++ b/.changes/1.10.4.md
@@ -0,0 +1 @@
+## dbt-adapters 1.10.4 - November 11, 2024
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b6617204..ed927965 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -18,6 +18,9 @@ and is generated by [Changie](https://github.com/miniscruff/changie).
### Contributors
- [@cmcarthur](https://github.com/cmcarthur) ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342))
+## dbt-adapters 1.10.4 - November 11, 2024
+
+
## dbt-adapters 1.10.3 - October 29, 2024
diff --git a/dbt-tests-adapter/dbt/tests/__about__.py b/dbt-tests-adapter/dbt/tests/__about__.py
index 977620c3..08e0d06b 100644
--- a/dbt-tests-adapter/dbt/tests/__about__.py
+++ b/dbt-tests-adapter/dbt/tests/__about__.py
@@ -1 +1 @@
-version = "1.10.3"
+version = "1.10.4"
From 7caf83f0dde166ef77b1d29fdb592e51e17dd243 Mon Sep 17 00:00:00 2001
From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com>
Date: Mon, 11 Nov 2024 15:10:00 -0800
Subject: [PATCH 05/22] Pin gh-action-pypi-publish to `v1.11` (#344)
---
.github/actions/publish-pypi/action.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/actions/publish-pypi/action.yml b/.github/actions/publish-pypi/action.yml
index 25bc3a8d..ebfdb89e 100644
--- a/.github/actions/publish-pypi/action.yml
+++ b/.github/actions/publish-pypi/action.yml
@@ -24,6 +24,6 @@ runs:
shell: bash
- name: Publish artifacts to PyPI
- uses: pypa/gh-action-pypi-publish@release/v1
+ uses: pypa/gh-action-pypi-publish@release/v1.11.0
with:
repository-url: ${{ inputs.repository-url }}
From a72379e2e81d27d1a2fd13895e429f3ac3544e61 Mon Sep 17 00:00:00 2001
From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com>
Date: Mon, 11 Nov 2024 15:57:24 -0800
Subject: [PATCH 06/22] Update action.yml (#345)
---
.github/actions/publish-pypi/action.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/actions/publish-pypi/action.yml b/.github/actions/publish-pypi/action.yml
index ebfdb89e..22e6773b 100644
--- a/.github/actions/publish-pypi/action.yml
+++ b/.github/actions/publish-pypi/action.yml
@@ -24,6 +24,6 @@ runs:
shell: bash
- name: Publish artifacts to PyPI
- uses: pypa/gh-action-pypi-publish@release/v1.11.0
+ uses: pypa/gh-action-pypi-publish@release/v1.11
with:
repository-url: ${{ inputs.repository-url }}
From 85122e591ffa6a0378abc736b8ea0a2162f11e09 Mon Sep 17 00:00:00 2001
From: Michelle Ark
Date: Wed, 13 Nov 2024 12:48:07 -0500
Subject: [PATCH 07/22] Fix setting of global behavior flags (#348)
Co-authored-by: Quigley Malcolm
---
.../unreleased/Fixes-20241112-143740.yaml | 6 +
dbt/adapters/base/impl.py | 20 +-
.../test_behavior_flags.py | 10 +
tests/unit/conftest.py | 8 +-
tests/unit/fixtures/__init__.py | 8 +-
tests/unit/fixtures/adapter.py | 187 +++++++++---------
6 files changed, 137 insertions(+), 102 deletions(-)
create mode 100644 .changes/unreleased/Fixes-20241112-143740.yaml
diff --git a/.changes/unreleased/Fixes-20241112-143740.yaml b/.changes/unreleased/Fixes-20241112-143740.yaml
new file mode 100644
index 00000000..ca899cbc
--- /dev/null
+++ b/.changes/unreleased/Fixes-20241112-143740.yaml
@@ -0,0 +1,6 @@
+kind: Fixes
+body: Move require_batched_execution_for_custom_microbatch_strategy flag to global
+time: 2024-11-12T14:37:40.681284-06:00
+custom:
+ Author: QMalcolm MichelleArk
+ Issue: 351
diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py
index c8457e2f..15093600 100644
--- a/dbt/adapters/base/impl.py
+++ b/dbt/adapters/base/impl.py
@@ -98,6 +98,13 @@
GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
FRESHNESS_MACRO_NAME = "collect_freshness"
GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
+DEFAULT_BASE_BEHAVIOR_FLAGS = [
+ {
+ "name": "require_batched_execution_for_custom_microbatch_strategy",
+ "default": False,
+ "docs_url": "https://docs.getdbt.com/docs/build/incremental-microbatch",
+ }
+]
class ConstraintSupport(str, Enum):
@@ -273,8 +280,7 @@ def __init__(self, config, mp_context: SpawnContext) -> None:
self.connections = self.ConnectionManager(config, mp_context)
self._macro_resolver: Optional[MacroResolverProtocol] = None
self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None
- # this will be updated to include global behavior flags once they exist
- self.behavior = [] # type: ignore
+ self.behavior = DEFAULT_BASE_BEHAVIOR_FLAGS # type: ignore
###
# Methods to set / access a macro resolver
@@ -314,14 +320,10 @@ def behavior(self, flags: List[BehaviorFlag]) -> None:
def _behavior_flags(self) -> List[BehaviorFlag]:
"""
This method should be overwritten by adapter maintainers to provide platform-specific flags
+
+ The BaseAdapter should NOT include any global flags here as those should be defined via DEFAULT_BASE_BEHAVIOR_FLAGS
"""
- return [
- {
- "name": "require_batched_execution_for_custom_microbatch_strategy",
- "default": False,
- "docs_url": "https://docs.getdbt.com/docs/build/incremental-microbatch",
- }
- ]
+ return []
###
# Methods that pass through to the connection manager
diff --git a/tests/unit/behavior_flag_tests/test_behavior_flags.py b/tests/unit/behavior_flag_tests/test_behavior_flags.py
index 7f3abb89..378d07bb 100644
--- a/tests/unit/behavior_flag_tests/test_behavior_flags.py
+++ b/tests/unit/behavior_flag_tests/test_behavior_flags.py
@@ -1,5 +1,6 @@
from typing import Any, Dict, List
+from dbt.adapters.base.impl import DEFAULT_BASE_BEHAVIOR_FLAGS
from dbt_common.behavior_flags import BehaviorFlag
from dbt_common.exceptions import DbtBaseException
import pytest
@@ -64,3 +65,12 @@ def test_register_behavior_flags(adapter):
assert not adapter.behavior.default_true_user_false_flag
assert adapter.behavior.default_true_user_true_flag
assert adapter.behavior.default_true_user_skip_flag
+
+
+def test_behaviour_flags_property_empty(adapter_default_behaviour_flags):
+ assert adapter_default_behaviour_flags._behavior_flags == []
+
+
+def test_behavior_property_has_defaults(adapter_default_behaviour_flags):
+ for flag in DEFAULT_BASE_BEHAVIOR_FLAGS:
+ assert hasattr(adapter_default_behaviour_flags.behavior, flag["name"])
diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py
index 346634df..225bdf57 100644
--- a/tests/unit/conftest.py
+++ b/tests/unit/conftest.py
@@ -1 +1,7 @@
-from tests.unit.fixtures import adapter, behavior_flags, config, flags
+from tests.unit.fixtures import (
+ adapter,
+ adapter_default_behaviour_flags,
+ behavior_flags,
+ config,
+ flags,
+)
diff --git a/tests/unit/fixtures/__init__.py b/tests/unit/fixtures/__init__.py
index 78135a2c..caa1448f 100644
--- a/tests/unit/fixtures/__init__.py
+++ b/tests/unit/fixtures/__init__.py
@@ -1 +1,7 @@
-from tests.unit.fixtures.adapter import adapter, behavior_flags, config, flags
+from tests.unit.fixtures.adapter import (
+ adapter,
+ adapter_default_behaviour_flags,
+ behavior_flags,
+ config,
+ flags,
+)
diff --git a/tests/unit/fixtures/adapter.py b/tests/unit/fixtures/adapter.py
index b59b0423..3730a083 100644
--- a/tests/unit/fixtures/adapter.py
+++ b/tests/unit/fixtures/adapter.py
@@ -15,105 +15,110 @@
from tests.unit.fixtures.credentials import CredentialsStub
-@pytest.fixture
-def adapter(config, behavior_flags) -> BaseAdapter:
+class BaseAdapterStub(BaseAdapter):
+ """
+ A stub for an adapter that uses the cache as the database
+ """
+
+ ConnectionManager = ConnectionManagerStub
+
+ ###
+ # Abstract methods for database-specific values, attributes, and types
+ ###
+ @classmethod
+ def date_function(cls) -> str:
+ return "date_function"
+
+ @classmethod
+ def is_cancelable(cls) -> bool:
+ return False
+
+ def list_schemas(self, database: str) -> List[str]:
+ return list(self.cache.schemas)
+
+ ###
+ # Abstract methods about relations
+ ###
+ def drop_relation(self, relation: BaseRelation) -> None:
+ self.cache_dropped(relation)
+
+ def truncate_relation(self, relation: BaseRelation) -> None:
+ self.cache_dropped(relation)
+
+ def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None:
+ self.cache_renamed(from_relation, to_relation)
+
+ def get_columns_in_relation(self, relation: BaseRelation) -> List[Column]:
+ # there's no database, so these need to be added as kwargs in the existing_relations fixture
+ return relation.columns
+
+ def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None:
+ # there's no database, so these need to be added as kwargs in the existing_relations fixture
+ object.__setattr__(current, "columns", goal.columns)
+
+ def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[BaseRelation]:
+ # there's no database, so use the cache as the database
+ return self.cache.get_relations(schema_relation.database, schema_relation.schema)
+
+ ###
+ # ODBC FUNCTIONS -- these should not need to change for every adapter,
+ # although some adapters may override them
+ ###
+ def create_schema(self, relation: BaseRelation):
+ # there's no database, this happens implicitly by adding a relation to the cache
+ pass
+
+ def drop_schema(self, relation: BaseRelation):
+ for each_relation in self.cache.get_relations(relation.database, relation.schema):
+ self.cache_dropped(each_relation)
+
+ @classmethod
+ def quote(cls, identifier: str) -> str:
+ quote_char = ""
+ return f"{quote_char}{identifier}{quote_char}"
+
+ ###
+ # Conversions: These must be implemented by concrete implementations, for
+ # converting agate types into their sql equivalents.
+ ###
+ @classmethod
+ def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
+ return "str"
+
+ @classmethod
+ def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
+ return "float"
+
+ @classmethod
+ def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
+ return "bool"
+
+ @classmethod
+ def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
+ return "datetime"
+
+ @classmethod
+ def convert_date_type(cls, *args, **kwargs):
+ return "date"
+
+ @classmethod
+ def convert_time_type(cls, *args, **kwargs):
+ return "time"
- class BaseAdapterStub(BaseAdapter):
- """
- A stub for an adapter that uses the cache as the database
- """
- ConnectionManager = ConnectionManagerStub
+@pytest.fixture
+def adapter(config, behavior_flags) -> BaseAdapter:
+ class BaseAdapterBehaviourFlagStub(BaseAdapterStub):
@property
def _behavior_flags(self) -> List[BehaviorFlag]:
return behavior_flags
- ###
- # Abstract methods for database-specific values, attributes, and types
- ###
- @classmethod
- def date_function(cls) -> str:
- return "date_function"
-
- @classmethod
- def is_cancelable(cls) -> bool:
- return False
-
- def list_schemas(self, database: str) -> List[str]:
- return list(self.cache.schemas)
-
- ###
- # Abstract methods about relations
- ###
- def drop_relation(self, relation: BaseRelation) -> None:
- self.cache_dropped(relation)
-
- def truncate_relation(self, relation: BaseRelation) -> None:
- self.cache_dropped(relation)
-
- def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None:
- self.cache_renamed(from_relation, to_relation)
-
- def get_columns_in_relation(self, relation: BaseRelation) -> List[Column]:
- # there's no database, so these need to be added as kwargs in the existing_relations fixture
- return relation.columns
-
- def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None:
- # there's no database, so these need to be added as kwargs in the existing_relations fixture
- object.__setattr__(current, "columns", goal.columns)
-
- def list_relations_without_caching(
- self, schema_relation: BaseRelation
- ) -> List[BaseRelation]:
- # there's no database, so use the cache as the database
- return self.cache.get_relations(schema_relation.database, schema_relation.schema)
-
- ###
- # ODBC FUNCTIONS -- these should not need to change for every adapter,
- # although some adapters may override them
- ###
- def create_schema(self, relation: BaseRelation):
- # there's no database, this happens implicitly by adding a relation to the cache
- pass
-
- def drop_schema(self, relation: BaseRelation):
- for each_relation in self.cache.get_relations(relation.database, relation.schema):
- self.cache_dropped(each_relation)
-
- @classmethod
- def quote(cls, identifier: str) -> str:
- quote_char = ""
- return f"{quote_char}{identifier}{quote_char}"
-
- ###
- # Conversions: These must be implemented by concrete implementations, for
- # converting agate types into their sql equivalents.
- ###
- @classmethod
- def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
- return "str"
-
- @classmethod
- def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
- return "float"
-
- @classmethod
- def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
- return "bool"
-
- @classmethod
- def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
- return "datetime"
-
- @classmethod
- def convert_date_type(cls, *args, **kwargs):
- return "date"
-
- @classmethod
- def convert_time_type(cls, *args, **kwargs):
- return "time"
+ return BaseAdapterBehaviourFlagStub(config, get_context("spawn"))
+
+@pytest.fixture
+def adapter_default_behaviour_flags(config) -> BaseAdapter:
return BaseAdapterStub(config, get_context("spawn"))
From f70bae80bd68b5880aa0cfec9054cae7bb1c2e6f Mon Sep 17 00:00:00 2001
From: Quigley Malcolm
Date: Wed, 13 Nov 2024 12:02:34 -0600
Subject: [PATCH 08/22] Fix microbatch behavior flag check (#350)
---
.changes/unreleased/Fixes-20241112-141109.yaml | 6 ++++++
dbt/adapters/base/impl.py | 8 +++++++-
2 files changed, 13 insertions(+), 1 deletion(-)
create mode 100644 .changes/unreleased/Fixes-20241112-141109.yaml
diff --git a/.changes/unreleased/Fixes-20241112-141109.yaml b/.changes/unreleased/Fixes-20241112-141109.yaml
new file mode 100644
index 00000000..46df4e48
--- /dev/null
+++ b/.changes/unreleased/Fixes-20241112-141109.yaml
@@ -0,0 +1,6 @@
+kind: Fixes
+body: Negate the check for microbatch behavior flag in determining builtins
+time: 2024-11-12T14:11:09.341634-06:00
+custom:
+ Author: QMalcolm
+ Issue: 349
diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py
index 15093600..44817a18 100644
--- a/dbt/adapters/base/impl.py
+++ b/dbt/adapters/base/impl.py
@@ -1580,8 +1580,14 @@ def valid_incremental_strategies(self):
return ["append"]
def builtin_incremental_strategies(self):
+ """
+ List of possible builtin strategies for adapters
+
+ Microbatch is added by _default_. It is only not added when the behavior flag
+ `require_batched_execution_for_custom_microbatch_strategy` is True.
+ """
builtin_strategies = ["append", "delete+insert", "merge", "insert_overwrite"]
- if self.behavior.require_batched_execution_for_custom_microbatch_strategy.no_warn:
+ if not self.behavior.require_batched_execution_for_custom_microbatch_strategy.no_warn:
builtin_strategies.append("microbatch")
return builtin_strategies
From ff804528cffb7b2103b59d683fca8988f01f2339 Mon Sep 17 00:00:00 2001
From: Mike Alfare <13974384+mikealfare@users.noreply.github.com>
Date: Wed, 13 Nov 2024 13:21:23 -0500
Subject: [PATCH 09/22] Revert the version bump on dbt-adapters (#353)
---
dbt/adapters/__about__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index b6c30336..946fa56e 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.11.0"
+version = "1.9.0b"
From 3b900590e0b4ae11aeef6ce9927c83c112f39ed8 Mon Sep 17 00:00:00 2001
From: Github Build Bot
Date: Wed, 13 Nov 2024 18:27:25 +0000
Subject: [PATCH 10/22] Bumping version to 1.9.0 and generate changelog
---
.changes/1.9.0.md | 6 ++++++
.changes/unreleased/Fixes-20241112-141109.yaml | 6 ------
.changes/unreleased/Fixes-20241112-143740.yaml | 6 ------
CHANGELOG.md | 11 +++++++++--
dbt/adapters/__about__.py | 2 +-
5 files changed, 16 insertions(+), 15 deletions(-)
create mode 100644 .changes/1.9.0.md
delete mode 100644 .changes/unreleased/Fixes-20241112-141109.yaml
delete mode 100644 .changes/unreleased/Fixes-20241112-143740.yaml
diff --git a/.changes/1.9.0.md b/.changes/1.9.0.md
new file mode 100644
index 00000000..cde85d7c
--- /dev/null
+++ b/.changes/1.9.0.md
@@ -0,0 +1,6 @@
+## dbt-adapters 1.9.0 - November 13, 2024
+
+### Fixes
+
+- Negate the check for microbatch behavior flag in determining builtins ([#349](https://github.com/dbt-labs/dbt-adapters/issues/349))
+- Move require_batched_execution_for_custom_microbatch_strategy flag to global ([#351](https://github.com/dbt-labs/dbt-adapters/issues/351))
diff --git a/.changes/unreleased/Fixes-20241112-141109.yaml b/.changes/unreleased/Fixes-20241112-141109.yaml
deleted file mode 100644
index 46df4e48..00000000
--- a/.changes/unreleased/Fixes-20241112-141109.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Fixes
-body: Negate the check for microbatch behavior flag in determining builtins
-time: 2024-11-12T14:11:09.341634-06:00
-custom:
- Author: QMalcolm
- Issue: 349
diff --git a/.changes/unreleased/Fixes-20241112-143740.yaml b/.changes/unreleased/Fixes-20241112-143740.yaml
deleted file mode 100644
index ca899cbc..00000000
--- a/.changes/unreleased/Fixes-20241112-143740.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Fixes
-body: Move require_batched_execution_for_custom_microbatch_strategy flag to global
-time: 2024-11-12T14:37:40.681284-06:00
-custom:
- Author: QMalcolm MichelleArk
- Issue: 351
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ed927965..87cca898 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -20,8 +20,6 @@ and is generated by [Changie](https://github.com/miniscruff/changie).
## dbt-adapters 1.10.4 - November 11, 2024
-
-
## dbt-adapters 1.10.3 - October 29, 2024
## dbt-adapters 1.10.2 - October 01, 2024
@@ -34,6 +32,15 @@ and is generated by [Changie](https://github.com/miniscruff/changie).
## dbt-adapters 1.10.0 - September 12, 2024
+## dbt-adapters 1.9.0 - November 13, 2024
+
+### Fixes
+
+- Negate the check for microbatch behavior flag in determining builtins ([#349](https://github.com/dbt-labs/dbt-adapters/issues/349))
+- Move require_batched_execution_for_custom_microbatch_strategy flag to global ([#351](https://github.com/dbt-labs/dbt-adapters/issues/351))
+
+
+
## dbt-adapters 1.8.0 - October 29, 2024
### Fixes
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index 946fa56e..7aba6409 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.9.0b"
+version = "1.9.0"
From a8a89f4230549dcc995d9a029d9569e83001e0eb Mon Sep 17 00:00:00 2001
From: Gerda Shank
Date: Tue, 19 Nov 2024 11:31:44 -0500
Subject: [PATCH 11/22] Upgrade protobuf to >=5.0,<6.0 (#346)
---
dbt/adapters/events/adapter_types_pb2.py | 24 +++++++++++++++++-------
pyproject.toml | 4 ++--
2 files changed, 19 insertions(+), 9 deletions(-)
diff --git a/dbt/adapters/events/adapter_types_pb2.py b/dbt/adapters/events/adapter_types_pb2.py
index 4d6ae9b8..6a411842 100644
--- a/dbt/adapters/events/adapter_types_pb2.py
+++ b/dbt/adapters/events/adapter_types_pb2.py
@@ -1,11 +1,22 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
+# NO CHECKED-IN PROTOBUF GENCODE
# source: adapter_types.proto
+# Protobuf Python Version: 5.28.3
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import runtime_version as _runtime_version
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
+_runtime_version.ValidateProtobufRuntimeVersion(
+ _runtime_version.Domain.PUBLIC,
+ 5,
+ 28,
+ 3,
+ '',
+ 'adapter_types.proto'
+)
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@@ -20,13 +31,12 @@
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'adapter_types_pb2', _globals)
-if _descriptor._USE_C_DESCRIPTORS == False:
-
- DESCRIPTOR._options = None
- _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._options = None
- _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_options = b'8\001'
- _CACHEDUMPGRAPH_DUMPENTRY._options = None
- _CACHEDUMPGRAPH_DUMPENTRY._serialized_options = b'8\001'
+if not _descriptor._USE_C_DESCRIPTORS:
+ DESCRIPTOR._loaded_options = None
+ _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._loaded_options = None
+ _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._serialized_options = b'8\001'
+ _globals['_CACHEDUMPGRAPH_DUMPENTRY']._loaded_options = None
+ _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_options = b'8\001'
_globals['_ADAPTERCOMMONEVENTINFO']._serialized_start=100
_globals['_ADAPTERCOMMONEVENTINFO']._serialized_end=399
_globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._serialized_start=355
diff --git a/pyproject.toml b/pyproject.toml
index 7a8d1a50..3c62898d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,12 +23,12 @@ classifiers = [
"Programming Language :: Python :: 3.12",
]
dependencies = [
- "dbt-common>=1.11,<2.0",
+ "dbt-common>=1.13,<2.0",
"pytz>=2015.7",
# installed via dbt-common but used directly
"agate>=1.0,<2.0",
"mashumaro[msgpack]>=3.0,<4.0",
- "protobuf>=3.0,<5.0",
+ "protobuf>=5.0,<6.0",
"typing-extensions>=4.0,<5.0",
]
[project.urls]
From 79925b09288c848aba45e988f57104acf935fb2b Mon Sep 17 00:00:00 2001
From: Github Build Bot
Date: Tue, 19 Nov 2024 22:24:15 +0000
Subject: [PATCH 12/22] Bumping version to 1.10.0 and generate changelog
---
dbt/adapters/__about__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index 7aba6409..bd1378f4 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.9.0"
+version = "1.10.0"
From fb9d0850c00bdad617a81aaaeef456a370253b5f Mon Sep 17 00:00:00 2001
From: Anders
Date: Wed, 20 Nov 2024 08:27:50 -0500
Subject: [PATCH 13/22] update readme (#232)
Co-authored-by: Amy Chen <46451573+amychen1776@users.noreply.github.com>
---
README.md | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/README.md b/README.md
index 7bafae02..f4cf0c5a 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
-# dbt-tests-adapter
+# dbt-adapters
This package is responsible for:
@@ -10,9 +10,11 @@ This package is responsible for:
- caching information from databases
- determining how relations are defined
-There are two major adapter types: base and sql
+In this repo there is also our testing suite used for tesing adapter functionality
+
+# Adapters
-# Directories
+There are two major adapter types: base and sql
## `base`
From 9e19bd82e8dbdbe3ec1ca282343efa7eae2b2235 Mon Sep 17 00:00:00 2001
From: Michelle Ark
Date: Wed, 20 Nov 2024 15:40:15 -0500
Subject: [PATCH 14/22] add base MicrobatchConcurrency capability (#358)
---
.changes/unreleased/Features-20241120-112806.yaml | 6 ++++++
dbt/adapters/capability.py | 3 +++
2 files changed, 9 insertions(+)
create mode 100644 .changes/unreleased/Features-20241120-112806.yaml
diff --git a/.changes/unreleased/Features-20241120-112806.yaml b/.changes/unreleased/Features-20241120-112806.yaml
new file mode 100644
index 00000000..a135f946
--- /dev/null
+++ b/.changes/unreleased/Features-20241120-112806.yaml
@@ -0,0 +1,6 @@
+kind: Features
+body: Introduce new Capability for MicrobatchConcurrency support
+time: 2024-11-20T11:28:06.258507-05:00
+custom:
+ Author: michelleark
+ Issue: "359"
diff --git a/dbt/adapters/capability.py b/dbt/adapters/capability.py
index 2bd49112..f0243053 100644
--- a/dbt/adapters/capability.py
+++ b/dbt/adapters/capability.py
@@ -21,6 +21,9 @@ class Capability(str, Enum):
"""Indicates support for getting catalog information including table-level and column-level metadata for a single
relation."""
+ MicrobatchConcurrency = "MicrobatchConcurrency"
+ """Indicates support running the microbatch incremental materialization strategy concurrently across threads."""
+
class Support(str, Enum):
Unknown = "Unknown"
From 1c3479bbccf10e7348b04ffde2840a4a5526563e Mon Sep 17 00:00:00 2001
From: Github Build Bot
Date: Wed, 20 Nov 2024 23:42:34 +0000
Subject: [PATCH 15/22] Bumping version to 1.10.1 and generate changelog
---
dbt/adapters/__about__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index bd1378f4..8c5bf7a0 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.10.0"
+version = "1.10.1"
From 159b2340017eebbbd7d2a0112fc4171d1222b838 Mon Sep 17 00:00:00 2001
From: Peter Webb
Date: Fri, 22 Nov 2024 14:58:42 -0500
Subject: [PATCH 16/22] Add hard_deletes config and new_record Option for
Snapshots (#317)
---
.../unreleased/Features-20241104-120653.yaml | 6 +
.../simple_snapshot/new_record_mode.py | 225 ++++++++++++++++++
dbt/adapters/base/impl.py | 57 ++++-
.../materializations/snapshots/helpers.sql | 54 ++++-
.../materializations/snapshots/snapshot.sql | 2 +-
.../materializations/snapshots/strategies.sql | 12 +-
6 files changed, 344 insertions(+), 12 deletions(-)
create mode 100644 .changes/unreleased/Features-20241104-120653.yaml
create mode 100644 dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/new_record_mode.py
diff --git a/.changes/unreleased/Features-20241104-120653.yaml b/.changes/unreleased/Features-20241104-120653.yaml
new file mode 100644
index 00000000..a85e1f7f
--- /dev/null
+++ b/.changes/unreleased/Features-20241104-120653.yaml
@@ -0,0 +1,6 @@
+kind: Features
+body: Add new hard_deletes="new_record" mode for snapshots.
+time: 2024-11-04T12:06:53.225939-05:00
+custom:
+ Author: peterallenwebb
+ Issue: "317"
diff --git a/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/new_record_mode.py b/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/new_record_mode.py
new file mode 100644
index 00000000..c50f0ff9
--- /dev/null
+++ b/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/new_record_mode.py
@@ -0,0 +1,225 @@
+import pytest
+
+from dbt.tests.util import check_relations_equal, run_dbt
+
+_seed_new_record_mode = """
+create table {database}.{schema}.seed (
+ id INTEGER,
+ first_name VARCHAR(50),
+ last_name VARCHAR(50),
+ email VARCHAR(50),
+ gender VARCHAR(50),
+ ip_address VARCHAR(20),
+ updated_at TIMESTAMP WITHOUT TIME ZONE
+);
+
+create table {database}.{schema}.snapshot_expected (
+ id INTEGER,
+ first_name VARCHAR(50),
+ last_name VARCHAR(50),
+ email VARCHAR(50),
+ gender VARCHAR(50),
+ ip_address VARCHAR(20),
+
+ -- snapshotting fields
+ updated_at TIMESTAMP WITHOUT TIME ZONE,
+ dbt_valid_from TIMESTAMP WITHOUT TIME ZONE,
+ dbt_valid_to TIMESTAMP WITHOUT TIME ZONE,
+ dbt_scd_id TEXT,
+ dbt_updated_at TIMESTAMP WITHOUT TIME ZONE,
+ dbt_is_deleted TEXT
+);
+
+
+-- seed inserts
+-- use the same email for two users to verify that duplicated check_cols values
+-- are handled appropriately
+insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values
+(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'),
+(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'),
+(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'),
+(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'),
+(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'),
+(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'),
+(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'),
+(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'),
+(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'),
+(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'),
+(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'),
+(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'),
+(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'),
+(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'),
+(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'),
+(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'),
+(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'),
+(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'),
+(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'),
+(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19');
+
+
+-- populate snapshot table
+insert into {database}.{schema}.snapshot_expected (
+ id,
+ first_name,
+ last_name,
+ email,
+ gender,
+ ip_address,
+ updated_at,
+ dbt_valid_from,
+ dbt_valid_to,
+ dbt_updated_at,
+ dbt_scd_id,
+ dbt_is_deleted
+)
+
+select
+ id,
+ first_name,
+ last_name,
+ email,
+ gender,
+ ip_address,
+ updated_at,
+ -- fields added by snapshotting
+ updated_at as dbt_valid_from,
+ null::timestamp as dbt_valid_to,
+ updated_at as dbt_updated_at,
+ md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id,
+ 'False' as dbt_is_deleted
+from {database}.{schema}.seed;
+"""
+
+_snapshot_actual_sql = """
+{% snapshot snapshot_actual %}
+
+ {{
+ config(
+ unique_key='id || ' ~ "'-'" ~ ' || first_name',
+ )
+ }}
+
+ select * from {{target.database}}.{{target.schema}}.seed
+
+{% endsnapshot %}
+"""
+
+_snapshots_yml = """
+snapshots:
+ - name: snapshot_actual
+ config:
+ strategy: timestamp
+ updated_at: updated_at
+ hard_deletes: new_record
+"""
+
+_ref_snapshot_sql = """
+select * from {{ ref('snapshot_actual') }}
+"""
+
+
+_invalidate_sql = """
+-- update records 11 - 21. Change email and updated_at field
+update {schema}.seed set
+ updated_at = updated_at + interval '1 hour',
+ email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end
+where id >= 10 and id <= 20;
+
+
+-- invalidate records 11 - 21
+update {schema}.snapshot_expected set
+ dbt_valid_to = updated_at + interval '1 hour'
+where id >= 10 and id <= 20;
+
+"""
+
+_update_sql = """
+-- insert v2 of the 11 - 21 records
+
+insert into {database}.{schema}.snapshot_expected (
+ id,
+ first_name,
+ last_name,
+ email,
+ gender,
+ ip_address,
+ updated_at,
+ dbt_valid_from,
+ dbt_valid_to,
+ dbt_updated_at,
+ dbt_scd_id,
+ dbt_is_deleted
+)
+
+select
+ id,
+ first_name,
+ last_name,
+ email,
+ gender,
+ ip_address,
+ updated_at,
+ -- fields added by snapshotting
+ updated_at as dbt_valid_from,
+ null::timestamp as dbt_valid_to,
+ updated_at as dbt_updated_at,
+ md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id,
+ 'False' as dbt_is_deleted
+from {database}.{schema}.seed
+where id >= 10 and id <= 20;
+"""
+
+_delete_sql = """
+delete from {schema}.seed where id = 1
+"""
+
+
+class SnapshotNewRecordMode:
+ @pytest.fixture(scope="class")
+ def snapshots(self):
+ return {"snapshot.sql": _snapshot_actual_sql}
+
+ @pytest.fixture(scope="class")
+ def models(self):
+ return {
+ "snapshots.yml": _snapshots_yml,
+ "ref_snapshot.sql": _ref_snapshot_sql,
+ }
+
+ @pytest.fixture(scope="class")
+ def seed_new_record_mode(self):
+ return _seed_new_record_mode
+
+ @pytest.fixture(scope="class")
+ def invalidate_sql(self):
+ return _invalidate_sql
+
+ @pytest.fixture(scope="class")
+ def update_sql(self):
+ return _update_sql
+
+ @pytest.fixture(scope="class")
+ def delete_sql(self):
+ return _delete_sql
+
+ def test_snapshot_new_record_mode(
+ self, project, seed_new_record_mode, invalidate_sql, update_sql
+ ):
+ project.run_sql(seed_new_record_mode)
+ results = run_dbt(["snapshot"])
+ assert len(results) == 1
+
+ project.run_sql(invalidate_sql)
+ project.run_sql(update_sql)
+
+ results = run_dbt(["snapshot"])
+ assert len(results) == 1
+
+ check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"])
+
+ project.run_sql(_delete_sql)
+
+ results = run_dbt(["snapshot"])
+ assert len(results) == 1
+
+ # TODO: Further validate results.
diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py
index 44817a18..ae172635 100644
--- a/dbt/adapters/base/impl.py
+++ b/dbt/adapters/base/impl.py
@@ -206,6 +206,14 @@ class FreshnessResponse(TypedDict):
age: float # age in seconds
+class SnapshotStrategy(TypedDict):
+ unique_key: Optional[str]
+ updated_at: Optional[str]
+ row_changed: Optional[str]
+ scd_id: Optional[str]
+ hard_deletes: Optional[str]
+
+
class BaseAdapter(metaclass=AdapterMeta):
"""The BaseAdapter provides an abstract base class for adapters.
@@ -795,8 +803,8 @@ def valid_snapshot_target(
columns = self.get_columns_in_relation(relation)
names = set(c.name.lower() for c in columns)
missing = []
- # Note: we're not checking dbt_updated_at here because it's not
- # always present.
+ # Note: we're not checking dbt_updated_at or dbt_is_deleted here because they
+ # aren't always present.
for column in ("dbt_scd_id", "dbt_valid_from", "dbt_valid_to"):
desired = column_names[column] if column_names else column
if desired not in names:
@@ -805,6 +813,28 @@ def valid_snapshot_target(
if missing:
raise SnapshotTargetNotSnapshotTableError(missing)
+ @available.parse_none
+ def assert_valid_snapshot_target_given_strategy(
+ self, relation: BaseRelation, column_names: Dict[str, str], strategy: SnapshotStrategy
+ ) -> None:
+ # Assert everything we can with the legacy function.
+ self.valid_snapshot_target(relation, column_names)
+
+ # Now do strategy-specific checks.
+ # TODO: Make these checks more comprehensive.
+ if strategy.get("hard_deletes", None) == "new_record":
+ columns = self.get_columns_in_relation(relation)
+ names = set(c.name.lower() for c in columns)
+ missing = []
+
+ for column in ("dbt_is_deleted",):
+ desired = column_names[column] if column_names else column
+ if desired not in names:
+ missing.append(desired)
+
+ if missing:
+ raise SnapshotTargetNotSnapshotTableError(missing)
+
@available.parse_none
def expand_target_column_types(
self, from_relation: BaseRelation, to_relation: BaseRelation
@@ -1795,6 +1825,29 @@ def _get_adapter_specific_run_info(cls, config) -> Dict[str, Any]:
"""
return {}
+ @available.parse_none
+ @classmethod
+ def get_hard_deletes_behavior(cls, config):
+ """Check the hard_deletes config enum, and the legacy invalidate_hard_deletes
+ config flag in order to determine which behavior should be used for deleted
+ records in a snapshot. The default is to ignore them."""
+ invalidate_hard_deletes = config.get("invalidate_hard_deletes", None)
+ hard_deletes = config.get("hard_deletes", None)
+
+ if invalidate_hard_deletes is not None and hard_deletes is not None:
+ raise DbtValidationError(
+ "You cannot set both the invalidate_hard_deletes and hard_deletes config properties on the same snapshot."
+ )
+
+ if invalidate_hard_deletes or hard_deletes == "invalidate":
+ return "invalidate"
+ elif hard_deletes == "new_record":
+ return "new_record"
+ elif hard_deletes is None or hard_deletes == "ignore":
+ return "ignore"
+
+ raise DbtValidationError("Invalid setting for property hard_deletes.")
+
COLUMNS_EQUAL_SQL = """
with diff_count as (
diff --git a/dbt/include/global_project/macros/materializations/snapshots/helpers.sql b/dbt/include/global_project/macros/materializations/snapshots/helpers.sql
index b4cd7c14..33492cc9 100644
--- a/dbt/include/global_project/macros/materializations/snapshots/helpers.sql
+++ b/dbt/include/global_project/macros/materializations/snapshots/helpers.sql
@@ -35,7 +35,7 @@
{% endmacro %}
{% macro get_snapshot_table_column_names() %}
- {{ return({'dbt_valid_to': 'dbt_valid_to', 'dbt_valid_from': 'dbt_valid_from', 'dbt_scd_id': 'dbt_scd_id', 'dbt_updated_at': 'dbt_updated_at'}) }}
+ {{ return({'dbt_valid_to': 'dbt_valid_to', 'dbt_valid_from': 'dbt_valid_from', 'dbt_scd_id': 'dbt_scd_id', 'dbt_updated_at': 'dbt_updated_at', 'dbt_is_deleted': 'dbt_is_deleted'}) }}
{% endmacro %}
{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}
@@ -82,7 +82,7 @@
from snapshot_query
),
- {%- if strategy.invalidate_hard_deletes %}
+ {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %}
deletes_source_data as (
@@ -96,6 +96,9 @@
select
'insert' as dbt_change_type,
source_data.*
+ {%- if strategy.hard_deletes == 'new_record' -%}
+ ,'False' as {{ columns.dbt_is_deleted }}
+ {%- endif %}
from insertions_source_data as source_data
left outer join snapshotted_data
@@ -113,6 +116,9 @@
'update' as dbt_change_type,
source_data.*,
snapshotted_data.{{ columns.dbt_scd_id }}
+ {%- if strategy.hard_deletes == 'new_record' -%}
+ , snapshotted_data.{{ columns.dbt_is_deleted }}
+ {%- endif %}
from updates_source_data as source_data
join snapshotted_data
@@ -122,9 +128,8 @@
)
)
- {%- if strategy.invalidate_hard_deletes -%}
+ {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %}
,
-
deletes as (
select
@@ -134,7 +139,38 @@
{{ snapshot_get_time() }} as {{ columns.dbt_updated_at }},
{{ snapshot_get_time() }} as {{ columns.dbt_valid_to }},
snapshotted_data.{{ columns.dbt_scd_id }}
+ {%- if strategy.hard_deletes == 'new_record' -%}
+ , snapshotted_data.{{ columns.dbt_is_deleted }}
+ {%- endif %}
+ from snapshotted_data
+ left join deletes_source_data as source_data
+ on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }}
+ where {{ unique_key_is_null(strategy.unique_key, "source_data") }}
+ )
+ {%- endif %}
+
+ {%- if strategy.hard_deletes == 'new_record' %}
+ {% set source_sql_cols = get_column_schema_from_query(source_sql) %}
+ ,
+ deletion_records as (
+ select
+ 'insert' as dbt_change_type,
+ {%- for col in source_sql_cols -%}
+ snapshotted_data.{{ adapter.quote(col.column) }},
+ {% endfor -%}
+ {%- if strategy.unique_key | is_list -%}
+ {%- for key in strategy.unique_key -%}
+ snapshotted_data.{{ key }} as dbt_unique_key_{{ loop.index }},
+ {% endfor -%}
+ {%- else -%}
+ snapshotted_data.dbt_unique_key as dbt_unique_key,
+ {% endif -%}
+ {{ snapshot_get_time() }} as {{ columns.dbt_valid_from }},
+ {{ snapshot_get_time() }} as {{ columns.dbt_updated_at }},
+ snapshotted_data.{{ columns.dbt_valid_to }} as {{ columns.dbt_valid_to }},
+ snapshotted_data.{{ columns.dbt_scd_id }},
+ 'True' as {{ columns.dbt_is_deleted }}
from snapshotted_data
left join deletes_source_data as source_data
on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }}
@@ -145,10 +181,15 @@
select * from insertions
union all
select * from updates
- {%- if strategy.invalidate_hard_deletes %}
+ {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %}
union all
select * from deletes
{%- endif %}
+ {%- if strategy.hard_deletes == 'new_record' %}
+ union all
+ select * from deletion_records
+ {%- endif %}
+
{%- endmacro %}
@@ -165,6 +206,9 @@
{{ strategy.updated_at }} as {{ columns.dbt_updated_at }},
{{ strategy.updated_at }} as {{ columns.dbt_valid_from }},
{{ get_dbt_valid_to_current(strategy, columns) }}
+ {%- if strategy.hard_deletes == 'new_record' -%}
+ , 'False' as {{ columns.dbt_is_deleted }}
+ {% endif -%}
from (
{{ sql }}
) sbq
diff --git a/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql b/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql
index 0c9590b6..683a0c58 100644
--- a/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql
+++ b/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql
@@ -37,7 +37,7 @@
{% set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() %}
- {{ adapter.valid_snapshot_target(target_relation, columns) }}
+ {{ adapter.assert_valid_snapshot_target_given_strategy(target_relation, columns, strategy) }}
{% set build_or_select_sql = snapshot_staging_table(strategy, sql, target_relation) %}
{% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}
diff --git a/dbt/include/global_project/macros/materializations/snapshots/strategies.sql b/dbt/include/global_project/macros/materializations/snapshots/strategies.sql
index f9f5afbd..49a381e8 100644
--- a/dbt/include/global_project/macros/materializations/snapshots/strategies.sql
+++ b/dbt/include/global_project/macros/materializations/snapshots/strategies.sql
@@ -54,7 +54,8 @@
{# The model_config parameter is no longer used, but is passed in anyway for compatibility. #}
{% set primary_key = config.get('unique_key') %}
{% set updated_at = config.get('updated_at') %}
- {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes') or false %}
+ {% set hard_deletes = adapter.get_hard_deletes_behavior(config) %}
+ {% set invalidate_hard_deletes = hard_deletes == 'invalidate' %}
{% set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() %}
{#/*
@@ -78,7 +79,8 @@
"updated_at": updated_at,
"row_changed": row_changed_expr,
"scd_id": scd_id_expr,
- "invalidate_hard_deletes": invalidate_hard_deletes
+ "invalidate_hard_deletes": invalidate_hard_deletes,
+ "hard_deletes": hard_deletes
}) %}
{% endmacro %}
@@ -141,7 +143,8 @@
{# The model_config parameter is no longer used, but is passed in anyway for compatibility. #}
{% set check_cols_config = config.get('check_cols') %}
{% set primary_key = config.get('unique_key') %}
- {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes') or false %}
+ {% set hard_deletes = adapter.get_hard_deletes_behavior(config) %}
+ {% set invalidate_hard_deletes = hard_deletes == 'invalidate' %}
{% set updated_at = config.get('updated_at') or snapshot_get_time() %}
{% set column_added = false %}
@@ -175,6 +178,7 @@
"updated_at": updated_at,
"row_changed": row_changed_expr,
"scd_id": scd_id_expr,
- "invalidate_hard_deletes": invalidate_hard_deletes
+ "invalidate_hard_deletes": invalidate_hard_deletes,
+ "hard_deletes": hard_deletes
}) %}
{% endmacro %}
From 2b29f2eb55004ace36420b1a45e090ee5c5e4823 Mon Sep 17 00:00:00 2001
From: Github Build Bot
Date: Fri, 22 Nov 2024 22:34:15 +0000
Subject: [PATCH 17/22] Bumping version to 1.10.2 and generate changelog
---
dbt/adapters/__about__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index 8c5bf7a0..8c657eec 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.10.1"
+version = "1.10.2"
From 2793f9fb88252bd365375772df36a3e3b7273c2e Mon Sep 17 00:00:00 2001
From: Mila Page <67295367+VersusFacit@users.noreply.github.com>
Date: Mon, 25 Nov 2024 09:24:52 -0800
Subject: [PATCH 18/22] Pin Mashumaro to keep aligned with dbt core depedencies
(#363)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 3c62898d..47cd3ece 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,7 +27,7 @@ dependencies = [
"pytz>=2015.7",
# installed via dbt-common but used directly
"agate>=1.0,<2.0",
- "mashumaro[msgpack]>=3.0,<4.0",
+ "mashumaro[msgpack]>=3.9,<3.15",
"protobuf>=5.0,<6.0",
"typing-extensions>=4.0,<5.0",
]
From bb68737b1e9be045f892a32e0797f28a0f3586ac Mon Sep 17 00:00:00 2001
From: Github Build Bot
Date: Mon, 25 Nov 2024 17:59:54 +0000
Subject: [PATCH 19/22] Bumping version to 1.10.3 and generate changelog
---
dbt/adapters/__about__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index 8c657eec..977620c3 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.10.2"
+version = "1.10.3"
From 86efa882075fbba7cc9ecd91d3ebdebca22e5026 Mon Sep 17 00:00:00 2001
From: Quigley Malcolm
Date: Wed, 4 Dec 2024 13:18:33 -0600
Subject: [PATCH 20/22] Append `model.batch.id` to suffix in
`make_temp_relation` when present (#361)
---
dbt/include/global_project/macros/adapters/relation.sql | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/dbt/include/global_project/macros/adapters/relation.sql b/dbt/include/global_project/macros/adapters/relation.sql
index b9af4969..ae1f041d 100644
--- a/dbt/include/global_project/macros/adapters/relation.sql
+++ b/dbt/include/global_project/macros/adapters/relation.sql
@@ -7,6 +7,11 @@
{% endmacro %}
{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}
+ {#-- This ensures microbatch batches get unique temp relations to avoid clobbering --#}
+ {% if suffix == '__dbt_tmp' and model.batch %}
+ {% set suffix = suffix ~ '_' ~ model.batch.id %}
+ {% endif %}
+
{{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}
{% endmacro %}
From a64351152f35bd474f740eba3900838502bf6f65 Mon Sep 17 00:00:00 2001
From: Github Build Bot
Date: Thu, 5 Dec 2024 01:46:11 +0000
Subject: [PATCH 21/22] Bumping version to 1.10.4 and generate changelog
---
dbt/adapters/__about__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index 977620c3..08e0d06b 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.10.3"
+version = "1.10.4"
From fb11e9fa169c21abc1de3888c4c52cb42a308448 Mon Sep 17 00:00:00 2001
From: Mike Alfare <13974384+mikealfare@users.noreply.github.com>
Date: Tue, 10 Dec 2024 13:45:24 -0500
Subject: [PATCH 22/22] ADAP-1124: Establish shared workflows for the monorepo
migration (#362)
---
.github/workflows/_changelog-entry-check.yml | 65 +++
.github/workflows/_code-quality.yml | 39 ++
.github/workflows/_generate-changelog.yml | 229 ++++++++
.github/workflows/_package-directory.yml | 33 ++
.github/workflows/_publish-internal.yml | 106 ++++
.github/workflows/_publish-pypi.yml | 91 ++++
.github/workflows/_unit-tests.yml | 72 +++
.github/workflows/_verify-build.yml | 73 +++
.github/workflows/build.yml | 54 --
.github/workflows/changelog-existence.yml | 37 --
.github/workflows/code-quality.yml | 34 --
.github/workflows/docs-issue.yml | 41 --
.github/workflows/github-release.yml | 259 ---------
.github/workflows/issue-triage.yml | 20 +
.github/workflows/precommit-autoupdate.yml | 22 -
.github/workflows/publish.yml | 68 +++
.github/workflows/pull-request-checks.yml | 58 ++
.github/workflows/release.yml | 180 ------
.github/workflows/release_prep_hatch.yml | 542 -------------------
.github/workflows/resubmit-for-triage.yml | 31 --
.github/workflows/scheduled-maintenance.yml | 41 ++
.github/workflows/stale.yml | 30 -
.github/workflows/unit-tests.yml | 49 --
.github/workflows/user-docs.yml | 31 ++
pyproject.toml | 6 +
25 files changed, 932 insertions(+), 1279 deletions(-)
create mode 100644 .github/workflows/_changelog-entry-check.yml
create mode 100644 .github/workflows/_code-quality.yml
create mode 100644 .github/workflows/_generate-changelog.yml
create mode 100644 .github/workflows/_package-directory.yml
create mode 100644 .github/workflows/_publish-internal.yml
create mode 100644 .github/workflows/_publish-pypi.yml
create mode 100644 .github/workflows/_unit-tests.yml
create mode 100644 .github/workflows/_verify-build.yml
delete mode 100644 .github/workflows/build.yml
delete mode 100644 .github/workflows/changelog-existence.yml
delete mode 100644 .github/workflows/code-quality.yml
delete mode 100644 .github/workflows/docs-issue.yml
delete mode 100644 .github/workflows/github-release.yml
create mode 100644 .github/workflows/issue-triage.yml
delete mode 100644 .github/workflows/precommit-autoupdate.yml
create mode 100644 .github/workflows/publish.yml
create mode 100644 .github/workflows/pull-request-checks.yml
delete mode 100644 .github/workflows/release.yml
delete mode 100644 .github/workflows/release_prep_hatch.yml
delete mode 100644 .github/workflows/resubmit-for-triage.yml
create mode 100644 .github/workflows/scheduled-maintenance.yml
delete mode 100644 .github/workflows/stale.yml
delete mode 100644 .github/workflows/unit-tests.yml
create mode 100644 .github/workflows/user-docs.yml
diff --git a/.github/workflows/_changelog-entry-check.yml b/.github/workflows/_changelog-entry-check.yml
new file mode 100644
index 00000000..ddc97290
--- /dev/null
+++ b/.github/workflows/_changelog-entry-check.yml
@@ -0,0 +1,65 @@
+name: "Changelog entry check"
+
+# this cannot be tested via workflow_dispatch
+# dorny/paths-filter inspects the current trigger to determine how to compare branches
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: "Choose the package to test"
+ type: string
+ default: "dbt-adapters"
+ pull-request:
+ description: "The PR number"
+ type: string
+ required: true
+
+permissions:
+ contents: read
+ pull-requests: write
+
+jobs:
+ package:
+ uses: ./.github/workflows/_package-directory.yml
+ with:
+ package: ${{ inputs.package }}
+
+ changelog-check:
+ needs: package
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'Skip Changelog') }}
+ outputs:
+ exists: ${{ steps.changelog.outputs.exists }}
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ steps:
+ - id: changelog
+ uses: dorny/paths-filter@v3
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ filters: |
+ exists:
+ - added|modified: '${{ needs.package.outputs.directory }}.changes/unreleased/**.yaml'
+
+ comment:
+ needs: changelog-check
+ if: needs.changelog-check.outputs.exists == false
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ env:
+ AUTHOR: "github-actions[bot]"
+ COMMENT: >-
+ Thank you for your pull request! We could not find a changelog entry for this change in the ${{ inputs.package }} package.
+ For details on how to document a change, see the [Contributing Guide](https://github.com/dbt-labs/dbt-adapters/blob/main/CONTRIBUTING.md).
+ steps:
+ - id: comment
+ uses: peter-evans/find-comment@v3
+ with:
+ issue-number: ${{ inputs.pull-request }}
+ comment-author: ${{ env.AUTHOR }}
+ body-includes: ${{ env.COMMENT }}
+ - if: steps.comment.outputs.comment-body == ''
+ run: gh issue comment ${{ inputs.pull-request }} --repo ${{ github.repository }} --body "${{ env.COMMENT }}"
+ shell: bash
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - uses: actions/github-script@v7
+ with:
+ script: core.setFailed('Changelog entry required to merge.')
diff --git a/.github/workflows/_code-quality.yml b/.github/workflows/_code-quality.yml
new file mode 100644
index 00000000..05d2e6da
--- /dev/null
+++ b/.github/workflows/_code-quality.yml
@@ -0,0 +1,39 @@
+name: "Code quality"
+
+on:
+ workflow_call:
+ inputs:
+ branch:
+ description: "Choose the branch to check"
+ type: string
+ default: "main"
+ repository:
+ description: "Choose the repository to check, when using a fork"
+ type: string
+ default: "dbt-labs/dbt-adapters"
+ workflow_dispatch:
+ inputs:
+ branch:
+ description: "Choose the branch to check"
+ type: string
+ default: "main"
+ repository:
+ description: "Choose the repository to check, when using a fork"
+ type: string
+ default: "dbt-labs/dbt-adapters"
+
+permissions:
+ contents: read
+
+jobs:
+ code-quality:
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ repository: ${{ inputs.repository }}
+ - uses: actions/setup-python@v5
+ with:
+ python-version: ${{ vars.DEFAULT_PYTHON_VERSION }}
+ - uses: pre-commit/action@v3.0.1
diff --git a/.github/workflows/_generate-changelog.yml b/.github/workflows/_generate-changelog.yml
new file mode 100644
index 00000000..37b67bbb
--- /dev/null
+++ b/.github/workflows/_generate-changelog.yml
@@ -0,0 +1,229 @@
+name: "Changelog generation"
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: "Choose the package getting published"
+ type: string
+ default: "dbt-adapters"
+ merge:
+ description: "Choose whether to merge the changelog branch"
+ type: boolean
+ default: true
+ branch:
+ description: "Choose the branch to use"
+ type: string
+ default: "main"
+ outputs:
+ branch-name:
+ description: "The SHA to release"
+ value: ${{ jobs.branch.outputs.name }}
+ secrets:
+ FISHTOWN_BOT_PAT:
+ description: "Token to commit/merge changes into branches"
+ required: true
+ IT_TEAM_MEMBERSHIP:
+ description: "Token that can view org level teams"
+ required: true
+ workflow_dispatch:
+ inputs:
+ package:
+ description: "Choose the package getting published"
+ type: string
+ default: "dbt-adapters"
+ merge:
+ description: "Choose whether to merge the changelog branch"
+ type: boolean
+ default: false
+ branch:
+ description: "Choose the branch to use"
+ type: string
+ default: "main"
+ secrets:
+ FISHTOWN_BOT_PAT:
+ description: "Token to commit/merge changes into branches"
+ required: true
+ IT_TEAM_MEMBERSHIP:
+ description: "Token that can view org level teams"
+ required: true
+
+permissions:
+ contents: write
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ package:
+ uses: ./.github/workflows/_package-directory.yml
+ with:
+ package: ${{ inputs.package }}
+
+ version:
+ needs: package
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ outputs:
+ raw: ${{ steps.version.outputs.raw }}
+ base: ${{ steps.semver.outputs.base-version }}
+ prerelease: ${{ steps.semver.outputs.pre-release }}
+ is-prerelease: ${{ steps.semver.outputs.is-pre-release }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ - uses: actions/setup-python@v5
+ with:
+ python-version: ${{ vars.DEFAULT_PYTHON_VERSION }}
+ - uses: pypa/hatch@install
+ - id: version
+ run: echo "raw=$(hatch version)" >> $GITHUB_OUTPUT
+ working-directory: ./${{ needs.package.outputs.directory }}
+ - id: semver
+ uses: dbt-labs/actions/parse-semver@v1.1.1
+ with:
+ version: ${{ steps.version.outputs.raw }}
+
+ changelog:
+ needs: [package, version]
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ outputs:
+ path: ${{ steps.changelog.outputs.path }}
+ exists: ${{ steps.changelog.outputs.exists }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ - id: changelog
+ run: |
+ path=".changes/${{ needs.version.outputs.base }}"
+ if [[ ${{ needs.version.outputs.is-prerelease }} -eq 1 ]]
+ then
+ path+="-${{ needs.version.outputs.prerelease }}"
+ fi
+ path+=".md"
+
+ echo "path=$path" >> $GITHUB_OUTPUT
+
+ exists=false
+ if test -f $path
+ then
+ exists=true
+ fi
+ echo "exists=$exists">> $GITHUB_OUTPUT
+ working-directory: ./${{ needs.package.outputs.directory }}
+
+ temp-branch:
+ needs: [version, changelog]
+ if: needs.changelog.outputs.exists == 'false'
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ outputs:
+ name: ${{ steps.branch.outputs.name }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ - id: branch
+ run: |
+ name="prep-release/${{ inputs.package }}/$GITHUB_RUN_ID"
+ echo "name=$name" >> $GITHUB_OUTPUT
+ - run: |
+ git checkout -b ${{ steps.branch.outputs.name }}
+ git push -u origin ${{ steps.branch.outputs.name }}
+
+ dbt-membership:
+ needs: [version, changelog]
+ if: needs.changelog.outputs.exists == 'false'
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ outputs:
+ team: ${{ steps.team.outputs.team }}
+ steps:
+ - id: temp-file
+ run: echo "name=output_$GITHUB_RUN_ID.json" >> $GITHUB_OUTPUT
+ - run: |
+ gh api -H "Accept: application/vnd.github+json" orgs/dbt-labs/teams/core-group/members > ${{ steps.temp-file.outputs.name }}
+ env:
+ GH_TOKEN: ${{ secrets.IT_TEAM_MEMBERSHIP }}
+ - id: team
+ run: |
+ team_list=$(jq -r '.[].login' ${{ steps.temp-file.outputs.name }})
+ team_list_single=$(echo $team_list | tr '\n' ' ')
+ echo "team=$team_list_single" >> $GITHUB_OUTPUT
+ - run: rm ${{ steps.temp-file.outputs.name }}
+
+ generate-changelog:
+ needs: [package, version, changelog, temp-branch, dbt-membership]
+ if: needs.changelog.outputs.exists == 'false'
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.temp-branch.outputs.name }}
+ - run: echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
+ - run: |
+ brew install pre-commit
+ brew tap miniscruff/changie https://github.com/miniscruff/changie
+ brew install changie
+ - run: |
+ if [[ ${{ needs.version.outputs.is-prerelease }} -eq 1 ]]
+ then
+ changie batch ${{ needs.version.outputs.base }} --move-dir '${{ needs.version.outputs.base }}' --prerelease ${{ needs.version.outputs.prerelease }}
+ elif [[ -d ".changes/${{ needs.version.outputs.base }}" ]]
+ then
+ changie batch ${{ needs.version.outputs.base }} --include '${{ needs.version.outputs.base }}' --remove-prereleases
+ else # releasing a final patch with no prereleases
+ changie batch ${{ needs.version.outputs.base }}
+ fi
+ changie merge
+ working-directory: ./${{ needs.package.outputs.directory }}
+ env:
+ CHANGIE_CORE_TEAM: ${{ needs.dbt-membership.outputs.team }}
+ - run: |
+ pre-commit run trailing-whitespace --files __version__.py CHANGELOG.md .changes/*
+ pre-commit run end-of-file-fixer --files __version__.py CHANGELOG.md .changes/*
+ working-directory: ./${{ needs.package.outputs.directory }}
+ continue-on-error: true
+ - run: |
+ git config user.name "Github Build Bot"
+ git config user.email "buildbot@fishtownanalytics.com"
+ git pull
+ git add .
+ git commit -m "generate changelog"
+ git push
+ working-directory: ./${{ needs.package.outputs.directory }}
+
+ merge-changes:
+ needs: [temp-branch, generate-changelog]
+ if: ${{ needs.temp-branch.outputs.name != '' && inputs.merge }}
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ steps:
+ - uses: everlytic/branch-merge@1.1.5
+ with:
+ source_ref: ${{ needs.temp-branch.outputs.name }}
+ target_branch: ${{ inputs.branch }}
+ github_token: ${{ secrets.FISHTOWN_BOT_PAT }}
+ commit_message_template: "[Automated] Merged {source_ref} into target {target_branch} during release process"
+ - run: git push origin -d ${{ needs.temp-branch.outputs.name }}
+
+ branch:
+ needs: [temp-branch, merge-changes]
+ if: ${{ !failure() && !cancelled() }}
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ # always run this job, regardless of whether changelog generation was skipped
+ # Get the sha that will be released. If the changelog already exists on the input sha and the version has already been bumped,
+ # then it is what we will release. Otherwise, we generated a changelog and did the version bump in this workflow and there is a
+ # new sha to use from the merge we just did. Grab that here instead.
+ outputs:
+ name: ${{ steps.branch.outputs.name }}
+ steps:
+ - id: branch
+ run: |
+ branch=""
+ if [ ${{ needs.temp-branch.outputs.name == '' || inputs.merge }}]
+ then
+ branch="${{ inputs.branch }}"
+ else
+ branch=${{ needs.temp-branch.outputs.name }}
+ fi
+ echo "name=$branch" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/_package-directory.yml b/.github/workflows/_package-directory.yml
new file mode 100644
index 00000000..d10e9758
--- /dev/null
+++ b/.github/workflows/_package-directory.yml
@@ -0,0 +1,33 @@
+name: "Package directory"
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: "Choose the package whose directory you need"
+ type: string
+ default: "dbt-adapters"
+ outputs:
+ directory:
+ description: "The root directory of the package"
+ value: ${{ jobs.package.outputs.directory }}
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ package:
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ outputs:
+ directory: ${{ steps.package.outputs.directory }}
+ steps:
+ - id: package
+ run: |
+ if [[ ${{ inputs.package }} == "dbt-adapters" ]]
+ then
+ directory=""
+ else
+ directory="${{ inputs.package }}/"
+ fi
+ echo "directory=$directory" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/_publish-internal.yml b/.github/workflows/_publish-internal.yml
new file mode 100644
index 00000000..42fb6290
--- /dev/null
+++ b/.github/workflows/_publish-internal.yml
@@ -0,0 +1,106 @@
+name: "Publish internally"
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: "Choose the package to publish"
+ type: string
+ default: "dbt-adapters"
+ deploy-to:
+ description: "Choose whether to publish to test or prod"
+ type: string
+ default: "prod"
+ branch:
+ description: "Choose the branch to publish"
+ type: string
+ default: "main"
+ workflow_dispatch:
+ inputs:
+ package:
+ description: "Choose the package to publish"
+ type: choice
+ options: ["dbt-adapters"]
+ deploy-to:
+ description: "Choose whether to publish to test or prod"
+ type: environment
+ default: "test"
+ branch:
+ description: "Choose the branch to publish"
+ type: string
+ default: "main"
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ package:
+ uses: ./.github/workflows/_package-directory.yml
+ with:
+ package: ${{ inputs.package }}
+
+ publish:
+ needs: package
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ environment:
+ name: ${{ inputs.deploy-to }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ - uses: actions/setup-python@v5
+ with:
+ python-version: ${{ vars.DEFAULT_PYTHON_VERSION }}
+ - uses: pypa/hatch@install
+ - uses: aws-actions/configure-aws-credentials@v4
+ with:
+ aws-region: ${{ vars.AWS_REGION }}
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ - id: package
+ run: |
+ # strip the pre-release off to find all iterations of this patch
+ hatch version release
+ echo "version=$(hatch version)" >> $GITHUB_OUTPUT
+ working-directory: ./${{ needs.package.outputs.directory }}
+ - id: published
+ run: |
+ versions_published="$(aws codeartifact list-package-versions \
+ --domain ${{ vars.AWS_DOMAIN }} \
+ --repository ${{ vars.AWS_REPOSITORY }} \
+ --format pypi \
+ --package ${{ inputs.package }} \
+ --output json \
+ --query 'versions[*].version' | jq -r '.[]' | grep "^${{ steps.package.outputs.version }}" || true )" # suppress pipefail only here
+ echo "versions=$(echo "${versions_published[*]}"| tr '\n' ',')" >> $GITHUB_OUTPUT
+ - id: next
+ uses: dbt-labs/dbt-release/.github/actions/next-cloud-release-version@main
+ with:
+ version_number: ${{ steps.package.outputs.version }}
+ versions_published: ${{ steps.published.outputs.versions }}
+ - run: |
+ VERSION=${{ steps.next.outputs.internal_release_version }}+$(git rev-parse HEAD)
+ tee <<< "version = \"$VERSION\"" ./src/dbt/adapters/$(cut -c 5- ${{ inputs.package }})/__version__.py
+ working-directory: ./${{ needs.package.outputs.directory }}
+ - run: sed -i "/dbt-core[<>~=]/d" ./pyproject.toml
+ working-directory: ./${{ needs.package.outputs.directory }}
+ - run: |
+ export HATCH_INDEX_USER=${{ secrets.AWS_USER }}
+
+ export HATCH_INDEX_AUTH=$(aws codeartifact get-authorization-token \
+ --domain ${{ vars.AWS_DOMAIN }} \
+ --output text \
+ --query authorizationToken)
+
+ export HATCH_INDEX_REPO=$(aws codeartifact get-repository-endpoint \
+ --domain ${{ vars.AWS_DOMAIN }} \
+ --repository ${{ vars.AWS_REPOSITORY }} \
+ --format pypi \
+ --output text \
+ --query repositoryEndpoint)
+
+ hatch build --clean
+ hatch run build:check-all
+ hatch publish
+ working-directory: ./${{ needs.package.outputs.directory }}
diff --git a/.github/workflows/_publish-pypi.yml b/.github/workflows/_publish-pypi.yml
new file mode 100644
index 00000000..f05b1fd6
--- /dev/null
+++ b/.github/workflows/_publish-pypi.yml
@@ -0,0 +1,91 @@
+name: "Publish to PyPI"
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: "Choose the package to publish"
+ type: string
+ default: "dbt-adapters"
+ deploy-to:
+ description: "Choose whether to publish to test or prod"
+ type: string
+ default: "prod"
+ branch:
+ description: "Choose the branch to publish"
+ type: string
+ default: "main"
+ workflow_dispatch:
+ inputs:
+ package:
+ description: "Choose the package to publish"
+ type: choice
+ options: ["dbt-adapters"]
+ deploy-to:
+ description: "Choose whether to publish to test or prod"
+ type: environment
+ default: "test"
+ branch:
+ description: "Choose the branch to publish"
+ type: string
+ default: "main"
+
+permissions:
+ contents: read
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ package:
+ uses: ./.github/workflows/_package-directory.yml
+ with:
+ package: ${{ inputs.package }}
+
+ publish:
+ needs: package
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ environment:
+ name: ${{ inputs.deploy-to }}
+ url: ${{ vars.PYPI_PROJECT_URL }}/${{ inputs.package }}
+ permissions:
+ # this permission is required for trusted publishing
+ # see https://github.com/marketplace/actions/pypi-publish
+ id-token: write
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ - uses: actions/setup-python@v5
+ with:
+ python-version: ${{ vars.DEFAULT_PYTHON_VERSION }}
+ - uses: pypa/hatch@install
+ # hatch will build using test PyPI first and fall back to prod PyPI when deploying to test
+ # this is done via environment variables in the test environment in GitHub
+ - run: hatch build && hatch run build:check-all
+ working-directory: ./${{ needs.package.outputs.directory }}
+ - uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ repository-url: ${{ vars.PYPI_REPOSITORY_URL }}
+ packages-dir: ./${{ needs.package.outputs.directory }}dist/
+
+ verify:
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ needs: [package, publish]
+ # check the correct index
+ environment:
+ name: ${{ inputs.deploy-to }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ - id: version
+ run: echo "version=$(hatch version)" >> $GITHUB_OUTPUT
+ working-directory: ./${{ needs.package.outputs.directory }}
+ - uses: nick-fields/retry@v3
+ with:
+ timeout_seconds: 10
+ retry_wait_seconds: 10
+ max_attempts: 15 # 5 minutes: (10s timeout + 10s delay) * 15 attempts
+ command: wget ${{ vars.PYPI_PROJECT_URL }}/${{ inputs.package }}/${{ steps.version.outputs.version }}
diff --git a/.github/workflows/_unit-tests.yml b/.github/workflows/_unit-tests.yml
new file mode 100644
index 00000000..0c0a8215
--- /dev/null
+++ b/.github/workflows/_unit-tests.yml
@@ -0,0 +1,72 @@
+name: "Unit tests"
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: "Choose the package to test"
+ type: string
+ default: "dbt-adapters"
+ branch:
+ description: "Choose the branch to test"
+ type: string
+ default: "main"
+ repository:
+ description: "Choose the repository to test, when using a fork"
+ type: string
+ default: "dbt-labs/dbt-adapters"
+ os:
+ description: "Choose the OS to test against"
+ type: string
+ default: "ubuntu-22.04"
+ python-version:
+ description: "Choose the Python version to test against"
+ type: string
+ default: 3.9
+ workflow_dispatch:
+ inputs:
+ package:
+ description: "Choose the package to test"
+ type: choice
+ options: ["dbt-adapters"]
+ branch:
+ description: "Choose the branch to test"
+ type: string
+ default: "main"
+ repository:
+ description: "Choose the repository to test, when using a fork"
+ type: string
+ default: "dbt-labs/dbt-adapters"
+ os:
+ description: "Choose the OS to test against"
+ type: string
+ default: "ubuntu-22.04"
+ python-version:
+ description: "Choose the Python version to test against"
+ type: choice
+ options: ["3.9", "3.10", "3.11", "3.12"]
+
+permissions:
+ contents: read
+
+jobs:
+ package:
+ uses: ./.github/workflows/_package-directory.yml
+ with:
+ package: ${{ inputs.package }}
+
+ unit-tests:
+ needs: package
+ runs-on: ${{ inputs.os }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ repository: ${{ inputs.repository }}
+ - uses: actions/setup-python@v5
+ with:
+ python-version: ${{ inputs.python-version }}
+ - uses: pypa/hatch@install
+ - run: hatch run unit-tests
+ shell: bash
+ working-directory: ./${{ needs.package.outputs.directory }}
diff --git a/.github/workflows/_verify-build.yml b/.github/workflows/_verify-build.yml
new file mode 100644
index 00000000..f667b600
--- /dev/null
+++ b/.github/workflows/_verify-build.yml
@@ -0,0 +1,73 @@
+name: "Verify build"
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: "Choose the package to build"
+ type: string
+ default: "dbt-adapters"
+ branch:
+ description: "Choose the branch to build"
+ type: string
+ default: "main"
+ repository:
+ description: "Choose the repository to build, (used primarily when testing a fork)"
+ type: string
+ default: "dbt-labs/dbt-adapters"
+ os:
+ description: "Choose the OS to test against"
+ type: string
+ default: "ubuntu-22.04"
+ python-version:
+ description: "Choose the Python version to test against"
+ type: string
+ default: "3.9"
+ workflow_dispatch:
+ inputs:
+ package:
+ description: "Choose the package to build"
+ type: choice
+ options:
+ - "dbt-adapters"
+ - "dbt-tests-adapter"
+ branch:
+ description: "Choose the branch to build"
+ type: string
+ default: "main"
+ repository:
+ description: "Choose the repository to build, (used primarily when testing a fork)"
+ type: string
+ default: "dbt-labs/dbt-adapters"
+ os:
+ description: "Choose the OS to test against"
+ type: string
+ default: "ubuntu-22.04"
+ python-version:
+ description: "Choose the Python version to test against"
+ type: choice
+ options: ["3.9", "3.10", "3.11", "3.12"]
+
+permissions: read-all
+
+jobs:
+ package:
+ uses: ./.github/workflows/_package-directory.yml
+ with:
+ package: ${{ inputs.package }}
+
+ build:
+ needs: package
+ runs-on: ${{ inputs.os }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch }}
+ repository: ${{ inputs.repository }}
+ - uses: actions/setup-python@v5
+ with:
+ python-version: ${{ inputs.python-version }}
+ - uses: pypa/hatch@install
+ - run: hatch build && hatch run build:check-all
+ shell: bash
+ working-directory: ./${{ needs.package.outputs.directory }}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
deleted file mode 100644
index 00afd704..00000000
--- a/.github/workflows/build.yml
+++ /dev/null
@@ -1,54 +0,0 @@
-# **what?**
-# Verifies python build on all code commited to the repository. This workflow
-# should not require any secrets since it runs for PRs from forked repos. By
-# default, secrets are not passed to workflows running from a forked repos.
-
-# **why?**
-# Ensure code for dbt meets a certain quality standard.
-
-# **when?**
-# This will run for all PRs, when code is pushed to main, and when manually triggered.
-
-name: "Build"
-
-on:
- push:
- branches:
- - "main"
- pull_request:
- merge_group:
- types: [checks_requested]
- workflow_dispatch:
- workflow_call:
-
-permissions: read-all
-
-# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
-concurrency:
- group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
- cancel-in-progress: true
-
-defaults:
- run:
- shell: bash
-
-jobs:
- build:
- name: Build, Test and publish to PyPi
- runs-on: ubuntu-latest
- permissions:
- id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
- steps:
- - name: "Check out repository"
- uses: actions/checkout@v4
-
- - name: Setup `hatch`
- uses: ./.github/actions/setup-hatch
-
- - name: Build `dbt-adapters`
- uses: ./.github/actions/build-hatch
-
- - name: Build `dbt-tests-adapter`
- uses: ./.github/actions/build-hatch
- with:
- working-dir: "./dbt-tests-adapter/"
diff --git a/.github/workflows/changelog-existence.yml b/.github/workflows/changelog-existence.yml
deleted file mode 100644
index 8732177f..00000000
--- a/.github/workflows/changelog-existence.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-# **what?**
-# Checks that a file has been committed under the /.changes directory
-# as a new CHANGELOG entry. Cannot check for a specific filename as
-# it is dynamically generated by change type and timestamp.
-# This workflow runs on pull_request_target because it requires
-# secrets to post comments.
-
-# **why?**
-# Ensure code change gets reflected in the CHANGELOG.
-
-# **when?**
-# This will run for all PRs going into main. It will
-# run when they are opened, reopened, when any label is added or removed
-# and when new code is pushed to the branch. The action will get
-# skipped if the 'Skip Changelog' label is present is any of the labels.
-
-name: Check Changelog Entry
-
-on:
- pull_request_target:
- types: [opened, reopened, labeled, unlabeled, synchronize]
-
-defaults:
- run:
- shell: bash
-
-permissions:
- contents: read
- pull-requests: write
-
-jobs:
- changelog:
- uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main
- with:
- changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-adapters/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
- skip_label: 'Skip Changelog'
- secrets: inherit
diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml
deleted file mode 100644
index 9c203847..00000000
--- a/.github/workflows/code-quality.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-name: Code Quality
-
-on:
- push:
- branches:
- - "main"
- - "*.latest"
- pull_request:
- workflow_dispatch:
-
-permissions: read-all
-
-# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
-concurrency:
- group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
- cancel-in-progress: true
-
-jobs:
- code-quality:
- name: Code Quality
- runs-on: ubuntu-latest
-
- steps:
- - name: Check out repository
- uses: actions/checkout@v4
- with:
- persist-credentials: false
-
- - name: Setup `hatch`
- uses: ./.github/actions/setup-hatch
-
- - name: Run code quality
- shell: bash
- run: hatch run code-quality
diff --git a/.github/workflows/docs-issue.yml b/.github/workflows/docs-issue.yml
deleted file mode 100644
index f49cf517..00000000
--- a/.github/workflows/docs-issue.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-# **what?**
-# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed
-
-# **why?**
-# To reduce barriers for keeping docs up to date
-
-# **when?**
-# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed.
-
-
-name: Open issues in docs.getdbt.com repo when an issue is labeled
-run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}"
-
-on:
- issues:
- types: [labeled, closed]
-
-defaults:
- run:
- shell: bash
-
-permissions:
- issues: write # comments on issues
-
-jobs:
- open_issues:
- # we only want to run this when the issue is closed as completed and the label `user docs` has been assigned.
- # If this logic does not exist in this workflow, it runs the
- # risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
- # generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
- # decide if it should run or not.
- if: |
- (github.event.issue.state == 'closed' && github.event.issue.state_reason == 'completed') && (
- (github.event.action == 'closed' && contains(github.event.issue.labels.*.name, 'user docs')) ||
- (github.event.action == 'labeled' && github.event.label.name == 'user docs'))
- uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
- with:
- issue_repository: "dbt-labs/docs.getdbt.com"
- issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
- issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
- secrets: inherit
diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml
deleted file mode 100644
index ad0cc2d8..00000000
--- a/.github/workflows/github-release.yml
+++ /dev/null
@@ -1,259 +0,0 @@
-# **what?**
-# Create a new release on GitHub and include any artifacts in the `/dist` directory of the GitHub artifacts store.
-#
-# Inputs:
-# sha: The commit to attach to this release
-# version_number: The release version number (i.e. 1.0.0b1, 1.2.3rc2, 1.0.0)
-# changelog_path: Path to the changelog file for release notes
-# test_run: Test run (Publish release as draft)
-#
-# **why?**
-# Reusable and consistent GitHub release process.
-#
-# **when?**
-# Call after a successful build. Build artifacts should be ready to release and live in a dist/ directory.
-#
-# This workflow expects the artifacts to already be built and living in the artifact store of the workflow.
-#
-# Validation Checks
-#
-# 1. If no release already exists for this commit and version, create the tag and release it to GitHub.
-# 2. If a release already exists for this commit, skip creating the release but finish with a success.
-# 3. If a release exists for this commit under a different tag, fail.
-# 4. If the commit is already associated with a different release, fail.
-
-name: GitHub Release
-
-on:
- workflow_call:
- inputs:
- sha:
- description: The commit to attach to this release
- required: true
- type: string
- version_number:
- description: The release version number (i.e. 1.0.0b1)
- required: true
- type: string
- changelog_path:
- description: Path to the changelog file for release notes
- required: true
- type: string
- test_run:
- description: Test run (Publish release as draft)
- required: true
- type: boolean
- archive_name:
- description: artifact name to download
- required: true
- type: string
- outputs:
- tag:
- description: The path to the changelog for this version
- value: ${{ jobs.check-release-exists.outputs.tag }}
-
-permissions:
- contents: write
-
-env:
- REPO_LINK: ${{ github.server_url }}/${{ github.repository }}
- NOTIFICATION_PREFIX: "[GitHub Release]"
-
-jobs:
- log-inputs:
- runs-on: ubuntu-latest
- steps:
- - name: "[DEBUG] Print Variables"
- run: |
- echo The last commit sha in the release: ${{ inputs.sha }}
- echo The release version number: ${{ inputs.version_number }}
- echo Expected Changelog path: ${{ inputs.changelog_path }}
- echo Test run: ${{ inputs.test_run }}
- echo Repo link: ${{ env.REPO_LINK }}
- echo Notification prefix: ${{ env.NOTIFICATION_PREFIX }}
-
- check-release-exists:
- runs-on: ubuntu-latest
- outputs:
- exists: ${{ steps.release_check.outputs.exists }}
- draft_exists: ${{ steps.release_check.outputs.draft_exists }}
- tag: ${{ steps.set_tag.outputs.tag }}
-
- steps:
- - name: "Generate Release Tag"
- id: set_tag
- run: echo "tag=v${{ inputs.version_number }}" >> $GITHUB_OUTPUT
-
- # When the GitHub CLI doesn't find a release for the given tag, it will exit 1 with a
- # message of "release not found". In our case, it's not an actual error, just a
- # confirmation that the release does not already exists so we can go ahead and create it.
- # The `|| true` makes it so the step does not exit with a non-zero exit code
- # Also check if the release already exists is draft state. If it does, and we are not
- # testing then we can publish that draft as is. If it's in draft and we are testing, skip the
- # release.
- - name: "Check If Release Exists For Tag ${{ steps.set_tag.outputs.tag }}"
- id: release_check
- run: |
- output=$((gh release view ${{ steps.set_tag.outputs.tag }} --json isDraft,targetCommitish --repo ${{ env.REPO_LINK }}) 2>&1) || true
- if [[ "$output" == "release not found" ]]
- then
- title="Release for tag ${{ steps.set_tag.outputs.tag }} does not exist."
- message="Check passed."
- echo "exists=false" >> $GITHUB_OUTPUT
- echo "draft_exists=false" >> $GITHUB_OUTPUT
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- exit 0
- fi
- commit=$(jq -r '.targetCommitish' <<< "$output")
- if [[ $commit != ${{ inputs.sha }} ]]
- then
- title="Release for tag ${{ steps.set_tag.outputs.tag }} already exists for commit $commit!"
- message="Cannot create a new release for commit ${{ inputs.sha }}. Exiting."
- echo "::error title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- exit 1
- fi
- isDraft=$(jq -r '.isDraft' <<< "$output")
- if [[ $isDraft == true ]] && [[ ${{ inputs.test_run }} == false ]]
- then
- title="Release tag ${{ steps.set_tag.outputs.tag }} already associated with the draft release."
- message="Release workflow will publish the associated release."
- echo "exists=false" >> $GITHUB_OUTPUT
- echo "draft_exists=true" >> $GITHUB_OUTPUT
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- exit 0
- fi
- title="Release for tag ${{ steps.set_tag.outputs.tag }} already exists."
- message="Skip GitHub Release Publishing."
- echo "exists=true" >> $GITHUB_OUTPUT
- echo "draft_exists=false" >> $GITHUB_OUTPUT
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- REPO: ${{ env.REPO_LINK }}
-
- - name: "[DEBUG] Log Job Outputs"
- run: |
- echo exists: ${{ steps.release_check.outputs.exists }}
- echo draft_exists: ${{ steps.release_check.outputs.draft_exists }}
- echo tag: ${{ steps.set_tag.outputs.tag }}
-
- skip-github-release:
- runs-on: ubuntu-latest
- needs: [check-release-exists]
- if: needs.check-release-exists.outputs.exists == 'true'
-
- steps:
- - name: "Tag Exists, Skip GitHub Release Job"
- run: |
- echo title="A tag already exists for ${{ needs.check-release-exists.outputs.tag }} and commit."
- echo message="Skipping GitHub release."
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
-
- audit-release-different-commit:
- runs-on: ubuntu-latest
- needs: [check-release-exists]
- if: needs.check-release-exists.outputs.exists == 'false'
-
- steps:
- - name: "Check If Release Already Exists For Commit"
- uses: cardinalby/git-get-release-action@1.2.4
- id: check_release_commit
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- commitSha: ${{ inputs.sha }}
- doNotFailIfNotFound: true # returns blank outputs when not found instead of error
- searchLimit: 15 # Since we only care about recent releases, speed up the process
-
- - name: "[DEBUG] Print Release Details"
- run: |
- echo steps.check_release_commit.outputs.id: ${{ steps.check_release_commit.outputs.id }}
- echo steps.check_release_commit.outputs.tag_name: ${{ steps.check_release_commit.outputs.tag_name }}
- echo steps.check_release_commit.outputs.target_commitish: ${{ steps.check_release_commit.outputs.target_commitish }}
- echo steps.check_release_commit.outputs.prerelease: ${{ steps.check_release_commit.outputs.prerelease }}
-
- # Since we already know a release for this tag does not exist, if we find anything it's for the wrong tag, exit
- - name: "Check If The Tag Matches The Version Number"
- if: steps.check_release_commit.outputs.id != ''
- run: |
- title="Tag ${{ steps.check_release_commit.outputs.tag_name }} already exists for this commit!"
- message="Cannot create a new tag for ${{ needs.check-release-exists.outputs.tag }} for the same commit"
- echo "::error title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- exit 1
-
- publish-draft-release:
- runs-on: ubuntu-latest
- needs: [check-release-exists, audit-release-different-commit]
- if: >-
- needs.check-release-exists.outputs.draft_exists == 'true' &&
- inputs.test_run == false
-
- steps:
- - name: "Publish Draft Release - ${{ needs.check-release-exists.outputs.tag }}"
- run: |
- gh release edit $TAG --draft=false --repo ${{ env.REPO_LINK }}
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- TAG: ${{ needs.check-release-exists.outputs.tag }}
-
- create-github-release:
- runs-on: ubuntu-latest
- needs: [check-release-exists, audit-release-different-commit]
- if: needs.check-release-exists.outputs.draft_exists == 'false'
-
- steps:
- - name: "Check out repository"
- uses: actions/checkout@v4
- with:
- ref: ${{ inputs.sha }}
-
- - name: "Download Artifact ${{ inputs.archive_name }}"
- uses: actions/download-artifact@v4
- with:
- name: ${{ inputs.archive_name }}
- path: dist/
-
- - name: "[DEBUG] Display Structure Of Expected Files"
- run: |
- ls -R .changes
- ls -l dist
-
- - name: "Set Release Type"
- id: release_type
- run: |
- if ${{ contains(inputs.version_number, 'rc') || contains(inputs.version_number, 'b') }}
- then
- echo Release will be set as pre-release
- echo "prerelease=--prerelease" >> $GITHUB_OUTPUT
- else
- echo This is not a prerelease
- fi
-
- - name: "Set As Draft Release"
- id: draft
- run: |
- if [[ ${{ inputs.test_run }} == true ]]
- then
- echo Release will be published as draft
- echo "draft=--draft" >> $GITHUB_OUTPUT
- else
- echo This is not a draft release
- fi
-
- - name: "GitHub Release Workflow Annotation"
- run: |
- title="Release ${{ needs.check-release-exists.outputs.tag }}"
- message="Configuration: ${{ steps.release_type.outputs.prerelease }} ${{ steps.draft.outputs.draft }}"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
-
- - name: "Create New GitHub Release - ${{ needs.check-release-exists.outputs.tag }}"
- run: |
- gh release create $TAG ./dist/* --title "$TITLE" --notes-file $RELEASE_NOTES --target $COMMIT $PRERELEASE $DRAFT --repo ${{ env.REPO_LINK }}
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- TAG: ${{ needs.check-release-exists.outputs.tag }}
- TITLE: ${{ github.event.repository.name }} ${{ needs.check-release-exists.outputs.tag }}
- RELEASE_NOTES: ${{ inputs.changelog_path }}
- COMMIT: ${{ inputs.sha }}
- PRERELEASE: ${{ steps.release_type.outputs.prerelease }}
- DRAFT: ${{ steps.draft.outputs.draft }}
diff --git a/.github/workflows/issue-triage.yml b/.github/workflows/issue-triage.yml
new file mode 100644
index 00000000..7903a732
--- /dev/null
+++ b/.github/workflows/issue-triage.yml
@@ -0,0 +1,20 @@
+name: "Issue triage"
+run-name: "Issue triage - #${{ github.event.issue.number }}: ${{ github.event.issue.title }} - ${{ github.actor }}"
+
+on: issue_comment
+
+defaults:
+ run:
+ shell: bash
+
+permissions:
+ issues: write
+
+jobs:
+ triage:
+ if: contains(github.event.issue.labels.*.name, 'triage:awaiting-response')
+ uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main
+ with:
+ add_label: "triage:dbt"
+ remove_label: "triage:awaiting-response"
+ secrets: inherit
diff --git a/.github/workflows/precommit-autoupdate.yml b/.github/workflows/precommit-autoupdate.yml
deleted file mode 100644
index 74976c48..00000000
--- a/.github/workflows/precommit-autoupdate.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-name: "Run pre-commit autoupdate"
-
-on:
- schedule:
- - cron: "30 1 * * SAT"
- workflow_dispatch:
-
-permissions:
- contents: write
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.sha }}
- cancel-in-progress: true
-
-jobs:
- precommit-autoupdate:
- name: "Run pre-commit autoupdate"
- uses: dbt-labs/actions/.github/workflows/pre-commit-autoupdate.yml
- secrets:
- TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
- SLACK_WEBHOOK_PR_URL: ${{ secrets.SLACK_DEV_ADAPTER_PULL_REQUESTS }}
- SLACK_WEBHOOK_ALERTS_URL: ${{ secrets.SLACK_DEV_ADAPTER_ALERTS }}
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 00000000..04a14545
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,68 @@
+name: "Publish"
+run-name: "Publish - ${{ inputs.package }} - ${{ inputs.deploy-to }} - ${{ github.actor }}"
+
+on:
+ workflow_dispatch:
+ inputs:
+ package:
+ description: "Choose the package to publish"
+ type: choice
+ options:
+ - "dbt-adapters"
+ - "dbt-tests-adapter"
+ deploy-to:
+ description: "Choose whether to publish to test or prod"
+ type: environment
+ default: "prod"
+ branch:
+ description: "Choose the branch to publish from"
+ type: string
+ default: "main"
+ pypi-internal:
+ description: "Publish Internally"
+ type: boolean
+ default: true
+ pypi-public:
+ description: "Publish to PyPI"
+ type: boolean
+ default: false
+
+# don't publish to the same target in parallel
+concurrency:
+ group: ${{ github.workflow }}-${{ inputs.package }}-${{ inputs.deploy-to }}
+ cancel-in-progress: true
+
+jobs:
+ unit-tests:
+ uses: ./.github/workflows/_unit-tests.yml
+ with:
+ package: ${{ inputs.package }}
+ branch: ${{ inputs.branch }}
+
+ generate-changelog:
+ needs: unit-tests
+ uses: ./.github/workflows/_generate-changelog.yml
+ with:
+ package: ${{ inputs.package }}
+ merge: ${{ inputs.deploy-to == 'prod' }}
+ branch: ${{ inputs.branch }}
+ secrets: inherit
+
+ publish-internal:
+ if: ${{ inputs.pypi-internal == true }}
+ needs: generate-changelog
+ uses: ./.github/workflows/_publish-internal.yml
+ with:
+ package: ${{ inputs.package }}
+ deploy-to: ${{ inputs.deploy-to }}
+ branch: ${{ needs.generate-changelog.outputs.branch-name }}
+ secrets: inherit
+
+ publish-pypi:
+ if: ${{ inputs.pypi-public == true }}
+ needs: generate-changelog
+ uses: ./.github/workflows/_publish-pypi.yml
+ with:
+ package: ${{ inputs.package }}
+ deploy-to: ${{ inputs.deploy-to }}
+ branch: ${{ needs.generate-changelog.outputs.branch-name }}
diff --git a/.github/workflows/pull-request-checks.yml b/.github/workflows/pull-request-checks.yml
new file mode 100644
index 00000000..0fd958ee
--- /dev/null
+++ b/.github/workflows/pull-request-checks.yml
@@ -0,0 +1,58 @@
+name: "Pull request checks"
+run-name: "Publish - #${{ github.event.number }} - ${{ github.actor }}"
+
+on:
+ pull_request_target:
+ types: [opened, reopened, synchronize, labeled, unlabeled]
+
+# only run this once per PR at a time
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.number }}
+ cancel-in-progress: true
+
+jobs:
+ changelog-entry:
+ uses: ./.github/workflows/_changelog-entry-check.yml
+ with:
+ pull-request: ${{ github.event.pull_request.number }}
+
+ code-quality:
+ uses: ./.github/workflows/_code-quality.yml
+ with:
+ branch: ${{ github.event.pull_request.head.ref }}
+ repository: ${{ github.event.pull_request.head.repo.full_name }}
+
+ verify-builds:
+ uses: ./.github/workflows/_verify-build.yml
+ strategy:
+ matrix:
+ package: ["dbt-adapters", "dbt-tests-adapter"]
+ python-version: ["3.9", "3.10", "3.11", "3.12"]
+ with:
+ package: ${{ matrix.package }}
+ branch: ${{ github.event.pull_request.head.ref }}
+ repository: ${{ github.event.pull_request.head.repo.full_name }}
+ python-version: ${{ matrix.python-version }}
+
+ unit-tests:
+ uses: ./.github/workflows/_unit-tests.yml
+ strategy:
+ matrix:
+ package: ["dbt-adapters"]
+ python-version: ["3.9", "3.10", "3.11", "3.12"]
+ with:
+ package: ${{ matrix.package }}
+ branch: ${{ github.event.pull_request.head.ref }}
+ repository: ${{ github.event.pull_request.head.repo.full_name }}
+
+ # This job does nothing and is only used for branch protection
+ results:
+ name: "Pull request checks" # keep this name, branch protection references it
+ if: always()
+ needs: [changelog-entry, code-quality, verify-builds, unit-tests]
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ steps:
+ - uses: re-actors/alls-green@release/v1
+ with:
+ jobs: ${{ toJSON(needs) }}
+ allowed-skips: 'changelog-entry'
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
deleted file mode 100644
index 828350dd..00000000
--- a/.github/workflows/release.yml
+++ /dev/null
@@ -1,180 +0,0 @@
-name: Release
-run-name: Release ${{ inputs.package }}==${{ inputs.version_number }} to ${{ inputs.deploy-to }}
-
-on:
- workflow_dispatch:
- inputs:
- package:
- type: choice
- description: Choose what to publish
- options:
- - dbt-adapters
- - dbt-tests-adapter
- version_number:
- description: "The release version number (i.e. 1.0.0b1)"
- type: string
- required: true
- deploy-to:
- type: choice
- description: Choose where to publish
- options:
- - prod
- - test
- default: prod
- nightly_release:
- description: "Nightly release to dev environment"
- type: boolean
- default: false
- required: false
- target_branch:
- description: "The branch to release from"
- type: string
- required: false
- default: main
-
- workflow_call:
- inputs:
- package:
- type: string
- description: Choose what to publish
- required: true
- version_number:
- description: "The release version number (i.e. 1.0.0b1)"
- type: string
- required: true
- deploy-to:
- type: string
- default: prod
- required: false
- nightly_release:
- description: "Nightly release to dev environment"
- type: boolean
- default: false
- required: false
- target_branch:
- description: "The branch to release from"
- type: string
- required: false
- default: main
-
-# this is the permission that allows creating a new release
-permissions:
- contents: write
- id-token: write
-
-# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
-concurrency:
- group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}-${{ inputs.package }}-${{ inputs.deploy-to }}
- cancel-in-progress: true
-
-defaults:
- run:
- shell: bash
-
-jobs:
- release-inputs:
- name: "Release inputs"
- runs-on: ubuntu-latest
- outputs:
- working-dir: ${{ steps.release-inputs.outputs.working-dir }}
- run-unit-tests: ${{ steps.release-inputs.outputs.run-unit-tests }}
- archive-name: ${{ steps.release-inputs.outputs.archive-name }}
- steps:
- - name: "Inputs"
- id: release-inputs
- run: |
- working_dir="./"
- run_unit_tests=true
- archive_name=${{ inputs.package }}-${{ inputs.version_number }}-${{ inputs.deploy-to }}
-
- if test "${{ inputs.package }}" = "dbt-tests-adapter"
- then
- working_dir="./dbt-tests-adapter/"
- run_unit_tests=false
- fi
-
- echo "working-dir=$working_dir" >> $GITHUB_OUTPUT
- echo "run-unit-tests=$run_unit_tests" >> $GITHUB_OUTPUT
- echo "archive-name=$archive_name" >> $GITHUB_OUTPUT
-
- - name: "[DEBUG]"
- run: |
- echo package : ${{ inputs.package }}
- echo working-dir : ${{ steps.release-inputs.outputs.working-dir }}
- echo run-unit-tests : ${{ steps.release-inputs.outputs.run-unit-tests }}
- echo archive-name : ${{ steps.release-inputs.outputs.archive-name }}
-
- bump-version-generate-changelog:
- name: "Bump package version, Generate changelog"
- uses: dbt-labs/dbt-adapters/.github/workflows/release_prep_hatch.yml@main
- needs: [release-inputs]
- with:
- version_number: ${{ inputs.version_number }}
- deploy_to: ${{ inputs.deploy-to }}
- nightly_release: ${{ inputs.nightly_release }}
- target_branch: ${{ inputs.target_branch }}
- working-dir: ${{ needs.release-inputs.outputs.working-dir }}
- run-unit-tests: ${{ fromJSON(needs.release-inputs.outputs.run-unit-tests) }}
- secrets: inherit
-
- log-outputs-bump-version-generate-changelog:
- name: "[Log output] Bump package version, Generate changelog"
- if: ${{ !failure() && !cancelled() }}
- needs: [release-inputs, bump-version-generate-changelog]
- runs-on: ubuntu-latest
- steps:
- - name: Print variables
- run: |
- echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
- echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
-
- build-and-test:
- name: "Build and Test"
- needs: [release-inputs, bump-version-generate-changelog]
- runs-on: ubuntu-latest
- permissions:
- id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
- steps:
- - name: "Check out repository"
- uses: actions/checkout@v4
- with:
- ref: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
-
- - name: "Setup `hatch`"
- uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main
-
- - name: "Build ${{ inputs.package }}"
- uses: dbt-labs/dbt-adapters/.github/actions/build-hatch@main
- with:
- working-dir: ${{ needs.release-inputs.outputs.working-dir }}
- archive-name: ${{ needs.release-inputs.outputs.archive-name }}
-
- github-release:
- name: "GitHub Release"
- # ToDo: update GH release to handle adding dbt-tests-adapter and dbt-adapters assets to the same release
- if: ${{ !failure() && !cancelled() && inputs.package == 'dbt-adapters' }}
- needs: [release-inputs, build-and-test, bump-version-generate-changelog]
- uses: dbt-labs/dbt-adapters/.github/workflows/github-release.yml@main
- with:
- sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
- version_number: ${{ inputs.version_number }}
- changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
- test_run: ${{ inputs.deploy-to == 'test' && true || false }}
- archive_name: ${{ needs.release-inputs.outputs.archive-name }}
-
- pypi-release:
- name: "Publish to PyPI"
- runs-on: ubuntu-latest
- needs: [release-inputs, build-and-test]
- environment:
- name: ${{ inputs.deploy-to }}
- url: ${{ vars.PYPI_PROJECT_URL }}
- steps:
- - name: "Check out repository"
- uses: actions/checkout@v4
-
- - name: "Publish to PyPI"
- uses: dbt-labs/dbt-adapters/.github/actions/publish-pypi@main
- with:
- repository-url: ${{ vars.PYPI_REPOSITORY_URL }}
- archive-name: ${{ needs.release-inputs.outputs.archive-name }}
diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml
deleted file mode 100644
index a6105786..00000000
--- a/.github/workflows/release_prep_hatch.yml
+++ /dev/null
@@ -1,542 +0,0 @@
-# **what?**
-# Perform the version bump, generate the changelog and run tests.
-#
-# Inputs:
-# version_number: The release version number (i.e. 1.0.0b1, 1.2.3rc2, 1.0.0)
-# target_branch: The branch that we will release from
-# env_setup_script_path: Path to the environment setup script
-# deploy_to: If we are deploying to prod or test, if test then release from branch
-# nightly_release: Identifier that this is nightly release
-#
-# Outputs:
-# final_sha: The sha that will actually be released. This can differ from the
-# input sha if adding a version bump and/or changelog
-# changelog_path: Path to the changelog file (ex .changes/1.2.3-rc1.md)
-#
-# Branching strategy:
-# - During execution workflow execution the temp branch will be generated.
-# - For normal runs the temp branch will be removed once changes were merged to target branch;
-# - For test runs we will keep temp branch and will use it for release;
-# Naming strategy:
-# - For normal runs: prep-release/${{ inputs.deploy_to}}/${{ inputs.version_number }}_$GITHUB_RUN_ID
-# - For nightly releases: prep-release/nightly-release/${{ inputs.version_number }}_$GITHUB_RUN_ID
-#
-# **why?**
-# Reusable and consistent GitHub release process.
-#
-# **when?**
-# Call when ready to kick off a build and release
-#
-# Validation Checks
-#
-# 1. Bump the version if it has not been bumped
-# 2. Generate the changelog (via changie) if there is no markdown file for this version
-#
-
-name: Version Bump and Changelog Generation
-run-name: Bump to ${{ inputs.version_number }} for release to ${{ inputs.deploy_to }} and generate changelog
-on:
- workflow_call:
- inputs:
- version_number:
- required: true
- type: string
- deploy_to:
- type: string
- default: prod
- required: false
- nightly_release:
- type: boolean
- default: false
- required: false
- env_setup_script_path:
- type: string
- required: false
- default: ''
- run-unit-tests:
- type: boolean
- default: false
- run-integration-tests:
- type: boolean
- default: false
- target_branch:
- description: "The branch to release from"
- type: string
- required: false
- default: main
- working-dir:
- description: "The working directory to use for run statements"
- type: string
- default: "./"
- outputs:
- changelog_path:
- description: The path to the changelog for this version
- value: ${{ jobs.audit-changelog.outputs.changelog_path }}
- final_sha:
- description: The sha that will actually be released
- value: ${{ jobs.determine-release-branch.outputs.final_sha }}
- secrets:
- FISHTOWN_BOT_PAT:
- description: "Token to commit/merge changes into branches"
- required: true
- IT_TEAM_MEMBERSHIP:
- description: "Token that can view org level teams"
- required: true
-
-permissions:
- contents: write
-
-defaults:
- run:
- shell: bash
-
-env:
- PYTHON_TARGET_VERSION: 3.11
- NOTIFICATION_PREFIX: "[Release Preparation]"
-
-jobs:
- log-inputs:
- runs-on: ubuntu-latest
-
- steps:
- - name: "[DEBUG] Print Variables"
- run: |
- # WORKFLOW INPUTS
- echo The release version number: ${{ inputs.version_number }}
- echo Deploy to: ${{ inputs.deploy_to }}
- echo Target branch: ${{ inputs.target_branch }}
- echo Nightly release: ${{ inputs.nightly_release }}
- echo Optional env setup script: ${{ inputs.env_setup_script_path }}
- echo run-unit-tests: ${{ inputs.run-unit-tests }}
- echo run-integration-tests: ${{ inputs.run-integration-tests }}
- echo working-dir: ${{ inputs.working-dir }}
- # ENVIRONMENT VARIABLES
- echo Python target version: ${{ env.PYTHON_TARGET_VERSION }}
- echo Notification prefix: ${{ env.NOTIFICATION_PREFIX }}
- audit-changelog:
- runs-on: ubuntu-latest
-
- outputs:
- changelog_path: ${{ steps.set_path.outputs.changelog_path }}
- exists: ${{ steps.set_existence.outputs.exists }}
- base_version: ${{ steps.semver.outputs.base-version }}
- prerelease: ${{ steps.semver.outputs.pre-release }}
- is_prerelease: ${{ steps.semver.outputs.is-pre-release }}
-
- steps:
- - name: "Checkout ${{ github.repository }}"
- uses: actions/checkout@v4
- with:
- ref: ${{ inputs.target_branch }}
-
- - name: "Audit Version And Parse Into Parts"
- id: semver
- uses: dbt-labs/actions/parse-semver@v1.1.1
- with:
- version: ${{ inputs.version_number }}
-
- - name: "Set Changelog Path"
- id: set_path
- run: |
- path=".changes/"
- if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]]
- then
- path+="${{ steps.semver.outputs.base-version }}-${{ steps.semver.outputs.pre-release }}.md"
- else
- path+="${{ steps.semver.outputs.base-version }}.md"
- fi
- # Send notification
- echo "changelog_path=$path" >> $GITHUB_OUTPUT
- title="Changelog path"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$changelog_path"
- - name: "Set Changelog Existence For Subsequent Jobs"
- id: set_existence
- run: |
- does_exist=false
- if test -f ${{ steps.set_path.outputs.changelog_path }}
- then
- does_exist=true
- fi
- echo "exists=$does_exist">> $GITHUB_OUTPUT
- - name: "[Notification] Set Changelog Existence For Subsequent Jobs"
- run: |
- title="Changelog exists"
- if [[ ${{ steps.set_existence.outputs.exists }} == true ]]
- then
- message="Changelog file ${{ steps.set_path.outputs.changelog_path }} already exists"
- else
- message="Changelog file ${{ steps.set_path.outputs.changelog_path }} doesn't exist"
- fi
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- - name: "[DEBUG] Print Outputs"
- run: |
- echo changelog_path: ${{ steps.set_path.outputs.changelog_path }}
- echo exists: ${{ steps.set_existence.outputs.exists }}
- echo base_version: ${{ steps.semver.outputs.base-version }}
- echo prerelease: ${{ steps.semver.outputs.pre-release }}
- echo is_prerelease: ${{ steps.semver.outputs.is-pre-release }}
-
- audit-version-in-code:
- runs-on: ubuntu-latest
-
- outputs:
- up_to_date: ${{ steps.version-check.outputs.up_to_date }}
-
- steps:
- - name: "Checkout ${{ github.repository }} Branch ${{ inputs.target_branch }}"
- uses: actions/checkout@v4
- with:
- ref: ${{ inputs.target_branch }}
-
- - name: Setup `hatch`
- uses: ./.github/actions/setup-hatch
-
- - name: "Check Current Version In Code"
- id: version-check
- run: |
- is_updated=false
- current_version=$(hatch version)
- if test "$current_version" = "${{ inputs.version_number }}"
- then
- is_updated=true
- fi
- echo "up_to_date=$is_updated" >> $GITHUB_OUTPUT
- working-directory: ${{ inputs.working-dir }}
-
- - name: "[Notification] Check Current Version In Code"
- run: |
- title="Version check"
- if [[ ${{ steps.version-check.outputs.up_to_date }} == true ]]
- then
- message="The version in the codebase is equal to the provided version"
- else
- message="The version in the codebase differs from the provided version"
- fi
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- - name: "[DEBUG] Print Outputs"
- run: |
- echo up_to_date: ${{ steps.version-check.outputs.up_to_date }}
-
- skip-generate-changelog:
- runs-on: ubuntu-latest
- needs: [audit-changelog]
- if: needs.audit-changelog.outputs.exists == 'true'
-
- steps:
- - name: "Changelog Exists, Skip Generating New Changelog"
- run: |
- # Send notification
- title="Skip changelog generation"
- message="A changelog file already exists at ${{ needs.audit-changelog.outputs.changelog_path }}, skipping generating changelog"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
-
- skip-version-bump:
- runs-on: ubuntu-latest
- needs: [audit-version-in-code]
- if: needs.audit-version-in-code.outputs.up_to_date == 'true'
-
- steps:
- - name: "Version Already Bumped"
- run: |
- # Send notification
- title="Skip version bump"
- message="The version has already been bumped to ${{ inputs.version_number }}, skipping version bump"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
-
- create-temp-branch:
- runs-on: ubuntu-latest
- needs: [audit-changelog, audit-version-in-code]
- if: needs.audit-changelog.outputs.exists == 'false' || needs.audit-version-in-code.outputs.up_to_date == 'false'
-
- outputs:
- branch_name: ${{ steps.variables.outputs.branch_name }}
-
- steps:
- - name: "Checkout ${{ github.repository }}"
- uses: actions/checkout@v4
- with:
- ref: ${{ inputs.target_branch }}
-
- - name: "Generate Branch Name"
- id: variables
- run: |
- name="prep-release/"
- if [[ ${{ inputs.nightly_release }} == true ]]
- then
- name+="nightly-release/"
- else
- name+="${{ inputs.deploy_to }}/"
- fi
- name+="${{ inputs.version_number }}_$GITHUB_RUN_ID"
- echo "branch_name=$name" >> $GITHUB_OUTPUT
- - name: "Create Branch - ${{ steps.variables.outputs.branch_name }}"
- run: |
- git checkout -b ${{ steps.variables.outputs.branch_name }}
- git push -u origin ${{ steps.variables.outputs.branch_name }}
- - name: "[Notification] Temp branch created"
- run: |
- # Send notification
- title="Temp branch generated"
- message="The ${{ steps.variables.outputs.branch_name }} branch created"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- - name: "[DEBUG] Print Outputs"
- run: |
- echo branch_name ${{ steps.variables.outputs.branch_name }}
- generate-changelog-bump-version:
- runs-on: ubuntu-latest
- needs: [audit-changelog, audit-version-in-code, create-temp-branch]
-
- steps:
- - name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}"
- uses: actions/checkout@v4
- with:
- ref: ${{ needs.create-temp-branch.outputs.branch_name }}
- - name: Setup `hatch`
- uses: ./.github/actions/setup-hatch
- - name: "Add Homebrew To PATH"
- run: |
- echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
- - name: "Install Homebrew Packages"
- run: |
- brew install pre-commit
- brew tap miniscruff/changie https://github.com/miniscruff/changie
- brew install changie
- - name: "Set json File Name"
- id: json_file
- run: |
- echo "name=output_$GITHUB_RUN_ID.json" >> $GITHUB_OUTPUT
- - name: "Get Core Team Membership"
- run: |
- gh api -H "Accept: application/vnd.github+json" orgs/dbt-labs/teams/core-group/members > ${{ steps.json_file.outputs.name }}
- env:
- GH_TOKEN: ${{ secrets.IT_TEAM_MEMBERSHIP }}
- - name: "Set Core Team Membership for Changie Contributors exclusion"
- id: set_team_membership
- run: |
- team_list=$(jq -r '.[].login' ${{ steps.json_file.outputs.name }})
- echo $team_list
- team_list_single=$(echo $team_list | tr '\n' ' ')
- echo "CHANGIE_CORE_TEAM=$team_list_single" >> $GITHUB_ENV
- - name: "Delete the json File"
- run: |
- rm ${{ steps.json_file.outputs.name }}
- - name: "Generate Release Changelog"
- if: needs.audit-changelog.outputs.exists == 'false'
- run: |
- if [[ ${{ needs.audit-changelog.outputs.is_prerelease }} -eq 1 ]]
- then
- changie batch ${{ needs.audit-changelog.outputs.base_version }} --move-dir '${{ needs.audit-changelog.outputs.base_version }}' --prerelease ${{ needs.audit-changelog.outputs.prerelease }}
- elif [[ -d ".changes/${{ needs.audit-changelog.outputs.base_version }}" ]]
- then
- changie batch ${{ needs.audit-changelog.outputs.base_version }} --include '${{ needs.audit-changelog.outputs.base_version }}' --remove-prereleases
- else # releasing a final patch with no prereleases
- changie batch ${{ needs.audit-changelog.outputs.base_version }}
- fi
- changie merge
- git status
- - name: "Check Changelog Created Successfully"
- if: needs.audit-changelog.outputs.exists == 'false'
- run: |
- title="Changelog"
- if [[ -f ${{ needs.audit-changelog.outputs.changelog_path }} ]]
- then
- message="Changelog file created successfully"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- else
- message="Changelog failed to generate"
- echo "::error title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- exit 1
- fi
- - name: "Bump Version To ${{ inputs.version_number }}"
- if: needs.audit-version-in-code.outputs.up_to_date == 'false'
- run: |
- hatch version ${{ inputs.version_number }}
- working-directory: ${{ inputs.working-dir }}
- - name: "[Notification] Bump Version To ${{ inputs.version_number }}"
- if: needs.audit-version-in-code.outputs.up_to_date == 'false'
- run: |
- title="Version bump"
- message="Version successfully bumped in codebase to ${{ inputs.version_number }}"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- # TODO: can these 2 steps be done via hatch? probably.
- # this step will fail on whitespace errors but also correct them
- - name: "Remove Trailing Whitespace Via Pre-commit"
- continue-on-error: true
- run: |
- pre-commit run trailing-whitespace --files dbt/adapters/__about__.py CHANGELOG.md .changes/*
- git status
- # this step will fail on newline errors but also correct them
- - name: "Removing Extra Newlines Via Pre-commit"
- continue-on-error: true
- run: |
- pre-commit run end-of-file-fixer --files dbt/adapters/__about__.py CHANGELOG.md .changes/*
- git status
- - name: "Commit & Push Changes"
- run: |
- #Data for commit
- user="Github Build Bot"
- email="buildbot@fishtownanalytics.com"
- commit_message="Bumping version to ${{ inputs.version_number }} and generate changelog"
- #Commit changes to branch
- git config user.name "$user"
- git config user.email "$email"
- git pull
- git add .
- git commit -m "$commit_message"
- git push
-
- run-unit-tests:
- if: inputs.run-unit-tests == true
- runs-on: ubuntu-latest
- needs: [create-temp-branch, generate-changelog-bump-version]
-
- steps:
- - name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}"
- uses: actions/checkout@v4
- with:
- ref: ${{ needs.create-temp-branch.outputs.branch_name }}
- - name: "Setup `hatch`"
- uses: ./.github/actions/setup-hatch
- - name: "Run Unit Tests"
- run: hatch run unit-tests
-
- run-integration-tests:
- runs-on: ubuntu-20.04
- needs: [create-temp-branch, generate-changelog-bump-version]
- if: inputs.run-integration-tests == true
-
- steps:
- - name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}"
- uses: actions/checkout@v4
- with:
- ref: ${{ needs.create-temp-branch.outputs.branch_name }}
-
- - name: "Setup Environment Variables - ./${{ inputs.env_setup_script_path }}"
- if: inputs.env_setup_script_path != ''
- run: source ./${{ inputs.env_setup_script_path }}
-
- - name: "Setup Environment Variables - Secrets Context"
- if: inputs.env_setup_script_path != ''
- uses: actions/github-script@v6
- id: check-env
- with:
- result-encoding: string
- script: |
- try {
- const { SECRETS_CONTEXT, INTEGRATION_TESTS_SECRETS_PREFIX } = process.env
- const secrets = JSON.parse(SECRETS_CONTEXT)
- if (INTEGRATION_TESTS_SECRETS_PREFIX) {
- for (const [key, value] of Object.entries(secrets)) {
- if (key.startsWith(INTEGRATION_TESTS_SECRETS_PREFIX)) {
- core.exportVariable(key, value)
- }
- }
- } else {
- core.info("The INTEGRATION_TESTS_SECRETS_PREFIX env variable is empty or not defined, skipping the secrets setup.")
- }
- } catch (err) {
- core.error("Error while reading or parsing the JSON")
- core.setFailed(err)
- }
- env:
- SECRETS_CONTEXT: ${{ toJson(secrets) }}
-
- - name: "Set up Python & Hatch - ${{ env.PYTHON_TARGET_VERSION }}"
- uses: ./.github/actions/setup-python-env
- with:
- python-version: ${{ env.PYTHON_TARGET_VERSION }}
-
- - name: Run tests
- run: hatch run integration-tests
-
- merge-changes-into-target-branch:
- runs-on: ubuntu-latest
- needs: [run-unit-tests, run-integration-tests, create-temp-branch, audit-version-in-code, audit-changelog]
- if: |
- !failure() && !cancelled() &&
- inputs.deploy_to == 'prod' &&
- (
- needs.audit-changelog.outputs.exists == 'false' ||
- needs.audit-version-in-code.outputs.up_to_date == 'false'
- )
- steps:
- - name: "[Debug] Print Variables"
- run: |
- echo branch_name: ${{ needs.create-temp-branch.outputs.branch_name }}
- echo inputs.deploy_to: ${{ inputs.deploy_to }}
- echo needs.audit-changelog.outputs.exists: ${{ needs.audit-changelog.outputs.exists }}
- echo needs.audit-version-in-code.outputs.up_to_date: ${{ needs.audit-version-in-code.outputs.up_to_date }}
- - name: "Checkout Repo ${{ github.repository }}"
- uses: actions/checkout@v4
-
- - name: "Merge Changes Into ${{ inputs.target_branch }}"
- uses: everlytic/branch-merge@1.1.5
- with:
- source_ref: ${{ needs.create-temp-branch.outputs.branch_name }}
- target_branch: ${{ inputs.target_branch }}
- github_token: ${{ secrets.FISHTOWN_BOT_PAT }}
- commit_message_template: "[Automated] Merged {source_ref} into target {target_branch} during release process"
-
- - name: "[Notification] Changes Merged into main"
- run: |
- title="Changelog and Version Bump Branch Merge"
- message="The ${{ needs.create-temp-branch.outputs.branch_name }} branch was merged into mains"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
-
- determine-release-branch:
- runs-on: ubuntu-latest
- needs:
- [
- create-temp-branch,
- merge-changes-into-target-branch,
- audit-changelog,
- audit-version-in-code,
- ]
- # always run this job, regardless of if the dependant jobs were skipped
- if: ${{ !failure() && !cancelled() }}
-
- # Get the sha that will be released. If the changelog already exists on the input sha and the version has already been bumped,
- # then it is what we will release. Otherwise we generated a changelog and did the version bump in this workflow and there is a
- # new sha to use from the merge we just did. Grab that here instead.
- outputs:
- final_sha: ${{ steps.resolve_commit_sha.outputs.release_sha }}
-
- steps:
- - name: "[Debug] Print Variables"
- run: |
- echo new_branch: ${{ needs.create-temp-branch.outputs.branch_name }}
- echo changelog_exists: ${{ needs.audit-changelog.outputs.exists }}
- echo up_to_date: ${{ needs.audit-version-in-code.outputs.up_to_date }}
- - name: "Resolve Branch To Checkout"
- id: resolve_branch
- run: |
- branch=""
- if [ ${{ inputs.deploy_to == 'test' }}] || [ ${{ inputs.nightly_release == 'true' }} ]
- then
- branch=${{ needs.create-temp-branch.outputs.branch_name }}
- else
- branch="${{ inputs.target_branch }}"
- fi
- echo "target_branch=$branch" >> $GITHUB_OUTPUT
- - name: "[Notification] Resolve Branch To Checkout"
- run: |
- title="Branch pick"
- message="The ${{ steps.resolve_branch.outputs.target_branch }} branch will be used for release"
- echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- - name: "Checkout Resolved Branch - ${{ steps.resolve_branch.outputs.target_branch }}"
- uses: actions/checkout@v4
- with:
- ref: ${{ steps.resolve_branch.outputs.target_branch }}
-
- - name: "[Debug] Log Branch"
- run: git status
-
- - name: "Resolve Commit SHA For Release"
- id: resolve_commit_sha
- run: |
- echo "release_sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
-
- - name: "Remove Temp Branch - ${{ needs.create-temp-branch.outputs.branch_name }}"
- if: ${{ inputs.deploy_to == 'prod' && inputs.nightly_release == 'false' && needs.create-temp-branch.outputs.branch_name != '' }}
- run: |
- git push origin -d ${{ needs.create-temp-branch.outputs.branch_name }}
diff --git a/.github/workflows/resubmit-for-triage.yml b/.github/workflows/resubmit-for-triage.yml
deleted file mode 100644
index 385ef820..00000000
--- a/.github/workflows/resubmit-for-triage.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-# **what?**
-# When triaging submissions, we sometimes need more information from the issue creator.
-# In those cases we remove the `triage` label and add the `awaiting_response` label.
-# Once we receive a response in the form of a comment, we want the `awaiting_response` label removed
-# and the `triage` label added so that we are aware that the issue needs action.
-
-# **why?**
-# This automates a part of issue triaging while also removing noise from triage lists.
-
-# **when?**
-# This will run when a comment is added to an issue and that issue has an `awaiting_response` label.
-
-name: Resubmit for Triage
-
-on: issue_comment
-
-defaults:
- run:
- shell: bash
-
-permissions:
- issues: write
-
-jobs:
- triage_label:
- if: contains(github.event.issue.labels.*.name, 'awaiting_response')
- uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main
- with:
- add_label: "triage"
- remove_label: "awaiting_response"
- secrets: inherit # this is only acceptable because we own the action we're calling
diff --git a/.github/workflows/scheduled-maintenance.yml b/.github/workflows/scheduled-maintenance.yml
new file mode 100644
index 00000000..d0b6fe29
--- /dev/null
+++ b/.github/workflows/scheduled-maintenance.yml
@@ -0,0 +1,41 @@
+name: "Scheduled maintenance"
+
+on:
+ schedule:
+ - cron: "30 1 * * SAT"
+
+permissions:
+ contents: write
+
+# don't run this in parallel
+concurrency:
+ group: ${{ github.workflow }}
+ cancel-in-progress: true
+
+jobs:
+ pre-commit-autoupdate:
+ uses: dbt-labs/actions/.github/workflows/pre-commit-autoupdate.yml
+ secrets:
+ TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
+ SLACK_WEBHOOK_PR_URL: ${{ secrets.SLACK_DEV_ADAPTER_PULL_REQUESTS }}
+ SLACK_WEBHOOK_ALERTS_URL: ${{ secrets.SLACK_DEV_ADAPTER_ALERTS }}
+
+ stale:
+ runs-on: ${{ vars.DEFAULT_RUNNER }}
+ strategy:
+ matrix:
+ include:
+ - threshold: 90
+ labels: 'triage:awaiting-response,triage:more-information-needed'
+ - threshold: 360
+ labels: 'misc:good-first-issue,misc:help-wanted,type:tech-debt'
+ steps:
+ - uses: actions/stale@v9
+ with:
+ stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please comment on the issue or else it will be closed in 7 days."
+ stale-pr-message: "This PR has been marked as Stale because it has been open with no activity as of late. If you would like the PR to remain open, please comment on the PR or else it will be closed in 7 days."
+ close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest. Just add a comment to notify the maintainers."
+ close-pr-message: "Although we are closing this PR as stale, it can still be reopened to continue development. Just add a comment to notify the maintainers."
+ close-issue-reason: "not_planned"
+ days-before-stale: ${{ matrix.threshold }}
+ any-of-labels: ${{ matrix.labels }}
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
deleted file mode 100644
index 75a14dd4..00000000
--- a/.github/workflows/stale.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-# **what?**
-# For issues that have been open for awhile without activity, label
-# them as stale with a warning that they will be closed out. If
-# anyone comments to keep the issue open, it will automatically
-# remove the stale label and keep it open.
-
-# Stale label rules:
-# awaiting_response, more_information_needed -> 90 days
-# good_first_issue, help_wanted -> 360 days (a year)
-# tech_debt -> 720 (2 years)
-# all else defaults -> 180 days (6 months)
-
-# **why?**
-# To keep the repo in a clean state from issues that aren't relevant anymore
-
-# **when?**
-# Once a day
-
-name: "Close stale issues and PRs"
-on:
- schedule:
- - cron: "30 1 * * *"
-
-permissions:
- issues: write
- pull-requests: write
-
-jobs:
- stale:
- uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml
deleted file mode 100644
index b4ac615d..00000000
--- a/.github/workflows/unit-tests.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-name: Unit Tests
-
-on:
- push:
- branches:
- - "main"
- - "*.latest"
- pull_request:
- workflow_dispatch:
-
-permissions: read-all
-
-# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
-concurrency:
- group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
- cancel-in-progress: true
-
-jobs:
- unit:
- name: Unit Tests
- runs-on: ubuntu-latest
-
- strategy:
- fail-fast: false
- matrix:
- python-version: ["3.9", "3.10", "3.11", "3.12"]
-
- steps:
- - name: Check out repository
- uses: actions/checkout@v4
- with:
- persist-credentials: false
-
- - name: Setup `hatch`
- uses: ./.github/actions/setup-hatch
- with:
- python-version: ${{ matrix.python-version }}
-
- - name: Run unit tests
- run: hatch run unit-tests
- shell: bash
-
- - name: Publish results
- uses: ./.github/actions/publish-results
- if: always()
- with:
- source-file: "results.csv"
- file-name: "unit_results"
- python-version: ${{ matrix.python-version }}
diff --git a/.github/workflows/user-docs.yml b/.github/workflows/user-docs.yml
new file mode 100644
index 00000000..065d1d2e
--- /dev/null
+++ b/.github/workflows/user-docs.yml
@@ -0,0 +1,31 @@
+name: "Open user docs issue"
+run-name: "Open user docs issue - #${{ github.event.issue.number }} - ${{ github.actor }}"
+
+on:
+ issues:
+ types: [labeled, closed]
+
+defaults:
+ run:
+ shell: bash
+
+permissions:
+ issues: write # comments on issues
+
+# only run this once per issue
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.issue.number }}
+ cancel-in-progress: true
+
+jobs:
+ open_issues:
+ if: |
+ github.event.issue.state == 'closed' &&
+ github.event.issue.state_reason == 'completed' &&
+ contains(github.event.issue.labels.*.name, 'user docs')
+ uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
+ with:
+ issue_repository: "dbt-labs/docs.getdbt.com"
+ issue_title: "Docs Changes Needed from dbt-adapters - Issue #${{ github.event.issue.number }}"
+ issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
+ secrets: inherit
diff --git a/pyproject.toml b/pyproject.toml
index 47cd3ece..0c03b341 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -64,6 +64,12 @@ dependencies = [
setup = "pre-commit install"
code-quality = "pre-commit run --all-files"
unit-tests = "python -m pytest {args:tests/unit}"
+workflow-code-quality = "gh workflow run _code-quality.yml --ref $(git rev-parse --abbrev-ref HEAD) -f branch=$(git rev-parse --abbrev-ref HEAD)"
+workflow-generate-changelog = "gh workflow run _generate-changelog.yml --ref $(git rev-parse --abbrev-ref HEAD) -f package=dbt-adapters -f merge=false -f branch=$(git rev-parse --abbrev-ref HEAD)"
+workflow-publish-pypi = "gh workflow run _publish-pypi.yml --ref $(git rev-parse --abbrev-ref HEAD) -f package=dbt-adapters -f deploy-to=test -f branch=$(git rev-parse --abbrev-ref HEAD)"
+workflow-unit-tests = "gh workflow run _unit-tests.yml --ref $(git rev-parse --abbrev-ref HEAD) -f package=dbt-adapters -f branch=$(git rev-parse --abbrev-ref HEAD)"
+workflow-verify-build = "gh workflow run _verify-build.yml --ref $(git rev-parse --abbrev-ref HEAD) -f package=dbt-adapters -f branch=$(git rev-parse --abbrev-ref HEAD)"
+workflow-publish = "gh workflow run publish.yml --ref $(git rev-parse --abbrev-ref HEAD) -f package=dbt-adapters -f branch=$(git rev-parse --abbrev-ref HEAD) -f deploy-to=test -f pypi-internal=false -f pypi-public=true"
[tool.hatch.envs.build]
detached = true