CT-2112: bump snowflake connector python (#476)

* Raise the upper bound to be the next major version of `snowflake-connector-python`
* Changelog entry
* Update .changes/unreleased/Dependencies-20230216-093128.yaml
* created ubuntu-py38 image for local integration testing, added makefile instructions for easy deployment
* added debian image
* added docker dev-environment image
* added additional python versions in ubuntu and debian containers, added warning readme file
* resolved five of seven failing tests
* resolved TestSimpleBigSeedBatched.test_big_batched_seed() test failure
* updated changelog to show specific version
* added changelog for updated build dependencies

---------

Co-authored-by: Doug Beatty <doug.beatty@dbtlabs.com>
Co-authored-by: Doug Beatty <44704949+dbeatty10@users.noreply.github.com>
This commit is contained in:
Mike Alfare
2023-03-08 10:11:21 -05:00
committed by GitHub
parent d1b6cfea05
commit 967a8e93b7
12 changed files with 191 additions and 30 deletions

View File

@@ -0,0 +1,6 @@
kind: Dependencies
body: Update snowflake-connector-python to 3.0
time: 2023-02-16T09:31:28.844127-07:00
custom:
Author: dbeatty10
Issue: "469"

View File

@@ -0,0 +1,6 @@
kind: Dependencies
body: Removed explicit dependencies for cryptography and requests
time: 2023-03-04T13:54:58.498449-05:00
custom:
Author: mikealfare
PR: "476"

2
.dockerignore Normal file
View File

@@ -0,0 +1,2 @@
*
!docker_dev

View File

@@ -64,3 +64,33 @@ help: ## Show this help message.
@echo
@echo 'targets:'
@grep -E '^[7+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
.PHONY: ubuntu-py38
ubuntu-py38:
docker build -f docker_dev/ubuntu.Dockerfile -t dbt-snowflake-ubuntu-py38 . --build-arg version=3.8
docker run --rm -it --name dbt-snowflake-ubuntu-py38 -v $(shell pwd):/opt/code dbt-snowflake-ubuntu-py38
.PHONY: ubuntu-py39
ubuntu-py39:
docker build -f docker_dev/ubuntu.Dockerfile -t dbt-snowflake-ubuntu-py39 . --build-arg version=3.9
docker run --rm -it --name dbt-snowflake-ubuntu-py39 -v $(shell pwd):/opt/code dbt-snowflake-ubuntu-py39
.PHONY: ubuntu-py310
ubuntu-py310:
docker build -f docker_dev/ubuntu.Dockerfile -t dbt-snowflake-ubuntu-py310 . --build-arg version=3.10
docker run --rm -it --name dbt-snowflake-ubuntu-py310 -v $(shell pwd):/opt/code dbt-snowflake-ubuntu-py310
.PHONY: ubuntu-py311
ubuntu-py311:
docker build -f docker_dev/ubuntu.Dockerfile -t dbt-snowflake-ubuntu-py311 . --build-arg version=3.11
docker run --rm -it --name dbt-snowflake-ubuntu-py311 -v $(shell pwd):/opt/code dbt-snowflake-ubuntu-py311
.PHONY: debian-py38
debian-py38:
docker build -f docker/debian-py38.Dockerfile -t dbt-snowflake-debian-py38 . --build-arg version=3.8.15
docker run --rm -it --name dbt-snowflake-debian-py38 -v $(shell pwd):/opt/code dbt-snowflake-debian-py38
.PHONY: dev-env-default
dev-env-default:
docker build -f docker/dev-env-default.Dockerfile -t dbt-snowflake-dev-env-default .
docker run --rm -it --name dbt-snowflake-dev-env-default -v $(shell pwd):/opt/code dbt-snowflake-dev-env-default

5
docker_dev/README.md Normal file
View File

@@ -0,0 +1,5 @@
# Docker Dev Images
These images are solely for development purposes. They are
saved here for convenience. There should be no expectation
of stability or maintenance.

View File

@@ -0,0 +1,49 @@
FROM debian:latest
# default to py3.11.1, this can be overridden at build, e.g. `docker build ... --build-arg version=3.10.8`
ARG version=3.11.1
# install python dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends \
build-essential \
zlib1g-dev \
libncurses5-dev \
libgdbm-dev \
libnss3-dev \
libssl-dev \
libreadline-dev \
libffi-dev \
libsqlite3-dev \
wget \
libbz2-dev \
git-all
# download, extract, and install python
RUN wget https://www.python.org/ftp/python/$version/Python-$version.tgz && \
tar -xvf Python-$version.tgz && \
cd Python-$version && \
./configure --enable-optimizations && \
make -j $(shell nproc) && \
make altinstall
# clean up
RUN apt-get clean && \
rm -rf \
/var/lib/apt/lists/* \
/tmp/* \
/var/tmp/* \
/Python-$version.tgz
# add this installation to the path and update the default system interpreter to the newly installed version
RUN export PATH="/Python-$version:$PATH" && \
update-alternatives --install /usr/bin/python3 python3 /Python-$version/python 1
# update python build tools
RUN python3 -m pip install --upgrade pip setuptools wheel --no-cache-dir
# setup mount for our code
WORKDIR /opt/code
VOLUME /opt/code
ENV PYTHONUNBUFFERED=1

View File

@@ -0,0 +1,25 @@
FROM docker/dev-environments-default:latest
# install python and git (for installing dbt-core)
RUN apt-get update && \
apt-get install -y --no-install-recommends \
python3-pip \
python3-wheel \
build-essential
# clean up
RUN apt-get clean && \
rm -rf \
/var/lib/apt/lists/* \
/tmp/* \
/var/tmp/*
# update python build tools
RUN python3 -m pip install --upgrade pip setuptools wheel --no-cache-dir
# setup mount for our code
WORKDIR /opt/code
VOLUME /opt/code
# send stdout/stderr to terminal
ENV PYTHONUNBUFFERED=1

View File

@@ -0,0 +1,50 @@
FROM ubuntu:latest
# default to py3.11, this can be overridden at build, e.g. `docker build ... --build-arg version=3.10`
ARG version=3.11
# prevent python installation from asking for time zone region
ARG DEBIAN_FRONTEND=noninteractive
# get add-apt-repository
RUN apt-get update && \
apt-get install -y software-properties-common
# add the python repository
RUN apt-get update && \
add-apt-repository -y ppa:deadsnakes/ppa
# install python and git (for installing dbt-core)
RUN apt-get update && \
apt-get install -y --no-install-recommends \
python$version \
python$version-dev \
python$version-distutils \
python$version-venv \
python3-pip \
python3-wheel \
build-essential \
git-all
# clean up
RUN apt-get clean && \
rm -rf \
/var/lib/apt/lists/* \
/tmp/* \
/var/tmp/*
# update the default system interpreter to the newly installed version
RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python$version 1
# setup mount for our code
WORKDIR /opt/code
VOLUME /opt/code
# install tox in the system interpreter (it creates it's own virtual environments)
RUN pip install tox
# explicitly create a virtual environment as well for interactive testing
RUN python3 -m venv /opt/venv
# send stdout/stderr to terminal
ENV PYTHONUNBUFFERED=1

View File

@@ -68,9 +68,7 @@ setup(
include_package_data=True,
install_requires=[
"dbt-core~={}".format(dbt_core_version),
"snowflake-connector-python[secure-local-storage]>=2.4.1,<2.8.2",
"requests<3.0.0",
"cryptography>=3.2,<40.0.0",
"snowflake-connector-python[secure-local-storage]~=3.0",
],
zip_safe=False,
classifiers=[

View File

@@ -1,35 +1,16 @@
import csv
import pytest
from dbt.tests.adapter.simple_seed.test_seed import SeedConfigBase
from pathlib import Path
from dbt.tests.util import (
mkdir,
rm_dir,
run_dbt,
read_file
)
from dbt.tests.adapter.simple_seed.test_seed import SeedConfigBase
from dbt.tests.util import run_dbt
class TestSimpleBigSeedBatched(SeedConfigBase):
@staticmethod
def _make_big_seed(test_data_dir):
mkdir(test_data_dir)
big_seed_path = test_data_dir / Path("tmp.csv")
with open(big_seed_path, "w") as f:
writer = csv.writer(f)
writer.writerow(["seed_id"])
for i in range(0, 20000):
writer.writerow([i])
return big_seed_path
@pytest.fixture(scope="class")
def seeds(self, test_data_dir):
big_seed_path = self._make_big_seed(test_data_dir)
big_seed = read_file(big_seed_path)
yield {
"big_batched_seed.csv": big_seed
}
rm_dir(test_data_dir)
def seeds(self):
seed_data = ["seed_id"]
seed_data.extend([str(i) for i in range(20_000)])
return {"big_batched_seed.csv": "\n".join(seed_data)}
def test_big_batched_seed(self, project):
seed_results = run_dbt(["seed"])

View File

@@ -1,10 +1,12 @@
import pytest
from dbt.tests.util import relation_from_name
from dbt.tests.adapter.constraints.test_constraints import (
BaseConstraintsColumnsEqual,
BaseConstraintsRuntimeEnforcement
)
_expected_sql_snowflake = """
create or replace transient table {0} (
id integer not null primary key ,
@@ -20,6 +22,7 @@ create or replace transient table {0} (
class TestSnowflakeConstraintsColumnsEqual(BaseConstraintsColumnsEqual):
@pytest.fixture
def int_type(self):
return "FIXED"
@@ -45,6 +48,7 @@ class TestSnowflakeConstraintsColumnsEqual(BaseConstraintsColumnsEqual):
class TestSnowflakeConstraintsRuntimeEnforcement(BaseConstraintsRuntimeEnforcement):
@pytest.fixture(scope="class")
def expected_sql(self, project):
relation = relation_from_name(project.adapter, "my_model")

View File

@@ -26,7 +26,7 @@ python scripts/werkzeug-refresh-token.py ${acount_name} '${json_blob}'
Open http://localhost:8080
Log in as the test user, get a resonse page with some environment variables.
Log in as the test user, get a response page with some environment variables.
Update CI providers and test.env with the new values (If you kept the security
integration the same, just the refresh token changed)
"""
@@ -38,20 +38,24 @@ from dbt.tests.util import (
check_relations_equal
)
_MODELS__MODEL_1_SQL = """
select 1 as id
"""
_MODELS__MODEL_2_SQL = """
select 2 as id
"""
_MODELS__MODEL_3_SQL = """
select * from {{ ref('model_1') }}
union all
select * from {{ ref('model_2') }}
"""
_MODELS__MODEL_4_SQL = """
select 1 as id
union all
@@ -60,6 +64,7 @@ select 2 as id
class TestSnowflakeOauth:
@pytest.fixture(scope="class", autouse=True)
def dbt_profile_target(self):
return {