Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use redshift_connector's get_columns call to get column metadata #899

Merged
merged 16 commits into from
Sep 10, 2024
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changes/unreleased/Features-20240826-123954.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Features
body: Remove `pg_catalog` from metadata queries
time: 2024-08-26T12:39:54.481505-04:00
custom:
Author: mikealfare
Issue: "555"
54 changes: 54 additions & 0 deletions dbt/adapters/redshift/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from contextlib import contextmanager
from typing import Any, Callable, Dict, Tuple, Union, Optional, List, TYPE_CHECKING
from dataclasses import dataclass, field
import time

import sqlparse
import redshift_connector
Expand All @@ -12,10 +13,14 @@
from dbt.adapters.sql import SQLConnectionManager
from dbt.adapters.contracts.connection import AdapterResponse, Connection, Credentials
from dbt.adapters.events.logging import AdapterLogger
from dbt.adapters.events.types import SQLQuery, SQLQueryStatus
from dbt_common.contracts.util import Replaceable
from dbt_common.dataclass_schema import dbtClassMixin, StrEnum, ValidationError
from dbt_common.events.contextvars import get_node_info
from dbt_common.events.functions import fire_event
from dbt_common.helper_types import Port
from dbt_common.exceptions import DbtRuntimeError, CompilationError, DbtDatabaseError
from dbt_common.utils import cast_to_str

if TYPE_CHECKING:
# Indirectly imported via agate_helper, which is lazy loaded further downfile.
Expand Down Expand Up @@ -460,3 +465,52 @@ def _initialize_sqlparse_lexer():

if hasattr(Lexer, "get_default_instance"):
Lexer.get_default_instance()

def columns_in_relation(self, relation) -> List[Dict[str, Any]]:
connection = self.get_thread_connection()

fire_event(
SQLQuery(
conn_name=cast_to_str(connection.name),
sql=f"get_columns_in_relation: {relation.render()}",
node_info=get_node_info(),
)
)

pre = time.perf_counter()

cursor = connection.handle.cursor()
columns = cursor.get_columns(
catalog=relation.database,
schema_pattern=relation.schema,
tablename_pattern=relation.identifier,
)

fire_event(
SQLQueryStatus(
status=str(self.get_response(cursor)),
elapsed=time.perf_counter() - pre,
node_info=get_node_info(),
)
)

return [self._parse_column_results(column) for column in columns]

@staticmethod
def _parse_column_results(record: Tuple[Any, ...]) -> Dict[str, Any]:
# column positions in the tuple
column_name = 3
dtype_code = 4
dtype_name = 5
column_size = 6
decimals = 8

char_dtypes = [1, 12]
num_dtypes = [2, 3, 4, 5, 6, 7, 8]
return {
"column": record[column_name],
"dtype": record[dtype_name],
"char_size": record[column_size] if record[dtype_code] in char_dtypes else None,
"numeric_precision": record[column_size] if record[dtype_code] in num_dtypes else None,
"numeric_scale": record[decimals] if record[dtype_code] in num_dtypes else None,
}
15 changes: 14 additions & 1 deletion dbt/adapters/redshift/impl.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import os
from dataclasses import dataclass

from dbt_common.behavior_flags import BehaviorFlag
from dbt_common.contracts.constraints import ConstraintType
from typing import Optional, Set, Any, Dict, Type, TYPE_CHECKING
from typing import Optional, Set, Any, Dict, Type, TYPE_CHECKING, List
from collections import namedtuple
from dbt.adapters.base import PythonJobHelper
from dbt.adapters.base.column import Column
from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport
from dbt.adapters.base.meta import available
from dbt.adapters.capability import Capability, CapabilityDict, CapabilitySupport, Support
Expand Down Expand Up @@ -65,6 +68,10 @@ class RedshiftAdapter(SQLAdapter):
}
)

@property
def _behavior_extra(self) -> List[BehaviorFlag]:
return [{"name": "retire_pg_catalog", "default": False}]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.


@classmethod
def date_function(cls):
return "getdate()"
Expand All @@ -87,6 +94,12 @@ def drop_relation(self, relation):
with self.connections.fresh_transaction():
return super().drop_relation(relation)

def get_columns_in_relation(self, relation) -> List[Column]:
if self.behavior.retire_pg_catalog:
column_configs = self.connections.columns_in_relation(relation)
return [Column(**column) for column in column_configs]
return super().get_columns_in_relation(relation)

@classmethod
def convert_text_type(cls, agate_table: "agate.Table", col_idx):
column = agate_table.columns[col_idx]
Expand Down
8 changes: 4 additions & 4 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# install latest changes in dbt-core + dbt-postgres
git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core
git+https://github.com/dbt-labs/dbt-adapters.git
git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter
git+https://github.com/dbt-labs/dbt-common.git
git+https://github.com/dbt-labs/dbt-core.git@behavior-flags#subdirectory=core
git+https://github.com/dbt-labs/dbt-adapters.git@behavior-flags
git+https://github.com/dbt-labs/dbt-adapters.git@behavior-flags#subdirectory=dbt-tests-adapter
git+https://github.com/dbt-labs/dbt-common.git@behavior-flags
git+https://github.com/dbt-labs/dbt-postgres.git

# dev
Expand Down
59 changes: 59 additions & 0 deletions tests/functional/test_columns_in_relation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from dbt.adapters.base import Column
from dbt.tests.util import run_dbt
import pytest

from dbt.adapters.redshift import RedshiftRelation


class ColumnsInRelation:

@pytest.fixture(scope="class")
def models(self):
return {"my_model.sql": "select 1.23 as my_num, 'a' as my_char"}

@pytest.fixture(scope="class", autouse=True)
def setup(self, project):
run_dbt(["run"])

@pytest.fixture(scope="class")
def expected_columns(self):
return []

def test_columns_in_relation(self, project, expected_columns):
my_relation = RedshiftRelation.create(
database=project.database,
schema=project.test_schema,
identifier="my_model",
type=RedshiftRelation.View,
)
with project.adapter.connection_named("_test"):
actual_columns = project.adapter.get_columns_in_relation(my_relation)
assert actual_columns == expected_columns


class TestColumnsInRelationBehaviorFlagOff(ColumnsInRelation):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"flags": {}}

@pytest.fixture(scope="class")
def expected_columns(self):
# the SDK query returns "varchar" whereas our custom query returns "character varying"
return [
Column(column="my_num", dtype="numeric", numeric_precision=3, numeric_scale=2),
Column(column="my_char", dtype="character varying", char_size=1),
]


class TestColumnsInRelationBehaviorFlagOn(ColumnsInRelation):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"flags": {"retire_pg_catalog": True}}

@pytest.fixture(scope="class")
def expected_columns(self):
# the SDK query returns "varchar" whereas our custom query returns "character varying"
return [
Column(column="my_num", dtype="numeric", numeric_precision=3, numeric_scale=2),
Column(column="my_char", dtype="varchar", char_size=1),
]
Loading