Skip to content

Commit

Permalink
Merge branch 'dc/truncate' of https://github.com/dancardin/fakesnow i…
Browse files Browse the repository at this point in the history
…nto dc/truncate
  • Loading branch information
tekumara committed Nov 9, 2024
2 parents 93f8347 + aadd8a8 commit d3fb1cd
Show file tree
Hide file tree
Showing 16 changed files with 148 additions and 80 deletions.
2 changes: 1 addition & 1 deletion .cruft.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"template": "[email protected]:tekumara/python-typed-template.git",
"commit": "0f4759d4049e0ec77f9b351e39197cc01e0cb770",
"commit": "49cb14fd299526b393f6257238c2baf1f3b59725",
"checkout": null,
"context": {
"cookiecutter": {
Expand Down
20 changes: 20 additions & 0 deletions .github/workflows/auto-merge.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Dependabot Auto-merge
on:
pull_request:
types: [opened]

permissions:
contents: write
pull-requests: write

jobs:
dependabot:
runs-on: ubuntu-latest
if: github.event.pull_request.user.login == 'dependabot[bot]' && github.repository == 'tekumara/fakesnow'
steps:
- name: Enable auto-merge for Dependabot PRs
# don't include verbose dependabot PR description as body when merging
run: gh pr merge --auto --squash "$PR_URL" --body ""
env:
PR_URL: ${{github.event.pull_request.html_url}}
GH_TOKEN: ${{secrets.GITHUB_TOKEN}}
2 changes: 2 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ jobs:
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
- name: Install uv
uses: astral-sh/setup-uv@v3
- name: Cache virtualenv
id: cache-venv
uses: actions/cache@v4
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/release-please.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ jobs:
# https://github.com/apps/potatobot-prime
app-id: ${{ secrets.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- uses: google-github-actions/release-please-action@v4
- uses: googleapis/release-please-action@v4
with:
# use app token so that PRs and releases created by release-please trigger
# additional workflows. They'll also be authored by the app.
# see https://github.com/google-github-actions/release-please-action#github-credentials
# see https://github.com/googleapis/release-please-action#github-credentials
token: ${{ steps.app-token.outputs.token }}
4 changes: 2 additions & 2 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ jobs:
- name: Set up Python 3.9
id: setup-python
uses: actions/setup-python@v5
with:
python-version: 3.9
- name: Install uv
uses: astral-sh/setup-uv@v3
- name: Cache virtualenv
id: cache-venv
uses: actions/cache@v4
Expand Down
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# unless otherwise specified, hooks run on push only
default_stages: [push]
default_stages: [pre-push]
repos:
- repo: https://github.com/crate-ci/typos
rev: v1.23.2
rev: v1.27.0
hooks:
- id: typos
# formatters and linters are available in the virtualenv so they can be run from the makefile & vscode
Expand Down
12 changes: 7 additions & 5 deletions Makefile-common.mk
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --warn-undefined-variables --check-symlink-times
SHELL = /bin/bash -o pipefail
.DEFAULT_GOAL := help
.PHONY: help clean install format check pyright test dist hooks install-hooks
Expand All @@ -11,7 +11,8 @@ venv ?= .venv
# this is a symlink so we set the --check-symlink-times makeflag above
python := $(venv)/bin/python
# use uv if present, else fall back to pip
pip = $(shell command -v uv >/dev/null && echo "uv pip" || echo "$(venv)/bin/pip")
# set VIRTUAL_ENV to avoid uv installing into a different activated venv
pip = $(shell command -v uv >/dev/null && echo "VIRTUAL_ENV=$(venv) uv pip" || echo "$(venv)/bin/pip")

$(python): $(if $(value CI),|,) .python-version
# create venv using system python even when another venv is active
Expand All @@ -20,6 +21,7 @@ $(python): $(if $(value CI),|,) .python-version
$(pip) install --upgrade pip~=24.0

$(venv): $(if $(value CI),|,) pyproject.toml $(python)
# TODO use uv sync
$(pip) install -e '.[dev,server$(if $(value CI),,,notebook)]'
touch $(venv)

Expand Down Expand Up @@ -52,9 +54,9 @@ test: $(venv)

## build python distribution
dist: $(venv)
# start with a clean slate (see setuptools/#2347)
rm -rf dist *.egg-info
$(venv)/bin/python -m build --sdist --wheel
# start with a clean slate (see setuptools/#2347)
rm -rf build dist *.egg-info
$(venv)/bin/python -m build --wheel

## publish to pypi
publish: $(venv)
Expand Down
1 change: 1 addition & 0 deletions fakesnow/cursor.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ def _transform(self, expression: exp.Expression) -> exp.Expression:
.transform(transforms.extract_comment_on_columns)
.transform(transforms.information_schema_fs_columns_snowflake)
.transform(transforms.information_schema_fs_tables_ext)
.transform(transforms.information_schema_fs_views)
.transform(transforms.drop_schema_cascade)
.transform(transforms.tag)
.transform(transforms.semi_structured_types)
Expand Down
2 changes: 1 addition & 1 deletion fakesnow/info_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@
# replicates https://docs.snowflake.com/sql-reference/info-schema/views
SQL_CREATE_INFORMATION_SCHEMA_VIEWS_VIEW = Template(
"""
create view if not exists ${catalog}.information_schema.views AS
create view if not exists ${catalog}.information_schema._fs_views AS
select
database_name as table_catalog,
schema_name as table_schema,
Expand Down
49 changes: 38 additions & 11 deletions fakesnow/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,7 +471,6 @@ def flatten(expression: exp.Expression) -> exp.Expression:
See https://docs.snowflake.com/en/sql-reference/functions/flatten
TODO: return index.
TODO: support objects.
"""
if (
Expand All @@ -483,20 +482,34 @@ def flatten(expression: exp.Expression) -> exp.Expression:
):
explode_expression = expression.this.this.expression

return exp.Lateral(
this=exp.Unnest(
value = exp.Cast(
this=explode_expression,
to=exp.DataType(
this=exp.DataType.Type.ARRAY,
expressions=[exp.DataType(this=exp.DataType.Type.JSON, nested=False, prefix=False)],
nested=True,
),
)

return exp.Subquery(
this=exp.Select(
expressions=[
exp.Cast(
this=explode_expression,
to=exp.DataType(
this=exp.DataType.Type.ARRAY,
expressions=[exp.DataType(this=exp.DataType.Type.JSON, nested=False, prefix=False)],
nested=True,
exp.Unnest(
expressions=[value],
alias=exp.Identifier(this="VALUE", quoted=False),
),
exp.Alias(
this=exp.Sub(
this=exp.Anonymous(
this="generate_subscripts", expressions=[value, exp.Literal(this="1", is_string=False)]
),
expression=exp.Literal(this="1", is_string=False),
),
)
alias=exp.Identifier(this="INDEX", quoted=False),
),
],
),
alias=exp.TableAlias(this=alias.this, columns=[exp.Identifier(this="VALUE", quoted=False)]),
alias=exp.TableAlias(this=alias.this),
)

return expression
Expand Down Expand Up @@ -622,6 +635,20 @@ def information_schema_fs_tables_ext(expression: exp.Expression) -> exp.Expressi
return expression


def information_schema_fs_views(expression: exp.Expression) -> exp.Expression:
"""Use information_schema._fs_views to return Snowflake's version instead of duckdb's."""

if (
isinstance(expression, exp.Select)
and (tbl_exp := expression.find(exp.Table))
and tbl_exp.name.upper() == "VIEWS"
and tbl_exp.db.upper() == "INFORMATION_SCHEMA"
):
tbl_exp.set("this", exp.Identifier(this="_FS_VIEWS", quoted=False))

return expression


def integer_precision(expression: exp.Expression) -> exp.Expression:
"""Convert integers to bigint.
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"devDependencies": {
"pyright": "1.1.382"
"pyright": "1.1.387"
}
}
10 changes: 6 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ classifiers = ["License :: OSI Approved :: MIT License"]
keywords = ["snowflake", "snowflakedb", "fake", "local", "mock", "testing"]
requires-python = ">=3.9"
dependencies = [
"duckdb~=1.0.0",
"duckdb~=1.1.3",
"pyarrow",
"snowflake-connector-python",
"sqlglot~=25.24.1",
Expand All @@ -28,13 +28,13 @@ dev = [
"pandas-stubs",
# include compatible version of pandas, and secure-local-storage for token caching
"snowflake-connector-python[pandas, secure-local-storage]",
"pre-commit~=3.4",
"pre-commit~=4.0",
"pyarrow-stubs==10.0.1.9",
"pytest~=8.0",
"pytest-asyncio",
"ruff~=0.6.3",
"ruff~=0.7.2",
"twine~=5.0",
"snowflake-sqlalchemy~=1.5.0",
"snowflake-sqlalchemy~=1.6.1",
]
# for debugging, see https://duckdb.org/docs/guides/python/jupyter.html
notebook = ["duckdb-engine", "ipykernel", "jupysql"]
Expand Down Expand Up @@ -92,7 +92,9 @@ select = [
"SIM", # flake8-simplify
"S113", # request-without-timeout
"A", # flake8-builtins
"ASYNC", # flake8-async
]

ignore = [
# allow untyped self and cls args
"ANN101",
Expand Down
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def dcur(conn: snowflake.connector.SnowflakeConnection) -> Iterator[snowflake.co

@pytest.fixture
def snowflake_engine(_fakesnow: None) -> Engine:
return create_engine("snowflake://user:password@account/db1/schema1") # type: ignore
return create_engine("snowflake://user:password@account/db1/schema1")


@pytest.fixture(scope="session")
Expand Down
63 changes: 32 additions & 31 deletions tests/test_fakes.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import re
import tempfile
from decimal import Decimal
from typing import cast

import pandas as pd
import pytest
Expand Down Expand Up @@ -713,6 +714,17 @@ def test_flatten(cur: snowflake.connector.cursor.SnowflakeCursor):
assert cur.fetchall() == [(1, '"banana"'), (2, '"coconut"'), (2, '"durian"')]


def test_flatten_index(cur: snowflake.connector.cursor.SnowflakeCursor):
cur.execute(
"""
select id, f.value::varchar as v, f.index as i
from (select column1 as id, column2 as col from (values (1, 's1,s3,s2'), (2, 's2,s1'))) as t
, lateral flatten(input => split(t.col, ',')) as f order by id;
"""
)
assert cur.fetchall() == [(1, "s1", 0), (1, "s3", 1), (1, "s2", 2), (2, "s2", 0), (2, "s1", 1)]


def test_flatten_value_cast_as_varchar(cur: snowflake.connector.cursor.SnowflakeCursor):
cur.execute(
"""
Expand Down Expand Up @@ -1084,44 +1096,33 @@ def test_show_keys(dcur: snowflake.connector.cursor.SnowflakeCursor):

def test_show_objects(dcur: snowflake.connector.cursor.SnowflakeCursor):
dcur.execute("create table example(x int)")
dcur.execute("create view view1 as select * from example")
dcur.execute("show terse objects in db1.schema1")
dcur.execute("create schema schema2")
dcur.execute("create view schema2.view1 as select * from schema1.example")

objects = [
{
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
"name": "EXAMPLE",
"kind": "TABLE",
"database_name": "DB1",
"kind": "TABLE",
"name": "EXAMPLE",
"schema_name": "SCHEMA1",
},
{
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
"name": "VIEW1",
"kind": "VIEW",
"database_name": "DB1",
"schema_name": "SCHEMA1",
"kind": "VIEW",
"name": "VIEW1",
"schema_name": "SCHEMA2",
},
]
assert dcur.fetchall() == objects

dcur.execute("show terse objects in db1.schema1")
assert dcur.fetchall() == [objects[0]]

dcur.execute("show terse objects in database")
assert dcur.fetchall() == [
*objects,
{
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
"name": "databases",
"kind": "VIEW",
"database_name": "DB1",
"schema_name": "information_schema",
},
{
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
"name": "views",
"kind": "VIEW",
"database_name": "DB1",
"schema_name": "information_schema",
},
]
rows: list[dict] = cast(list[dict], dcur.fetchall())
assert [r for r in rows if r["schema_name"] != "information_schema"] == objects

assert [r.name for r in dcur.description] == ["created_on", "name", "kind", "database_name", "schema_name"]

dcur.execute("show objects").fetchall()
Expand All @@ -1148,16 +1149,16 @@ def test_show_schemas(dcur: snowflake.connector.cursor.SnowflakeCursor):
assert dcur.fetchall() == [
{
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
"name": "SCHEMA1",
"kind": None,
"database_name": "DB1",
"kind": None,
"name": "SCHEMA1",
"schema_name": None,
},
{
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
"name": "information_schema",
"kind": None,
"database_name": "DB1",
"kind": None,
"name": "information_schema",
"schema_name": None,
},
]
Expand All @@ -1171,9 +1172,9 @@ def test_show_tables(dcur: snowflake.connector.cursor.SnowflakeCursor):
objects = [
{
"created_on": datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc),
"name": "EXAMPLE",
"kind": "TABLE",
"database_name": "DB1",
"kind": "TABLE",
"name": "EXAMPLE",
"schema_name": "SCHEMA1",
},
]
Expand Down
2 changes: 1 addition & 1 deletion tests/test_sqlalchemy.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def test_engine(snowflake_engine: Engine):
with snowflake_engine.connect() as conn:
conn.execute(TextClause("CREATE VIEW foo AS SELECT * FROM information_schema.databases"))

result = conn.execute("SELECT database_name FROM foo")
result = conn.execute(TextClause("SELECT database_name FROM foo"))
assert result
assert result.fetchall() == [("DB1",)]

Expand Down
Loading

0 comments on commit d3fb1cd

Please sign in to comment.