-
Notifications
You must be signed in to change notification settings - Fork 123
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Updating Python Functional Tests (#526)
- Loading branch information
Showing
13 changed files
with
230 additions
and
267 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,112 @@ | ||
simple_python_model = """ | ||
import pandas | ||
def model(dbt, spark): | ||
dbt.config( | ||
materialized='table', | ||
) | ||
data = [[1,2]] * 10 | ||
return spark.createDataFrame(data, schema=['test', 'test2']) | ||
""" | ||
|
||
simple_python_model_v2 = """ | ||
import pandas | ||
def model(dbt, spark): | ||
dbt.config( | ||
materialized='table', | ||
) | ||
data = [[1,2]] * 10 | ||
return spark.createDataFrame(data, schema=['test1', 'test3']) | ||
""" | ||
|
||
incremental_model = """ | ||
import pandas as pd | ||
def model(dbt, spark): | ||
dbt.config(materialized="incremental") | ||
dbt.config(unique_key="name") | ||
dbt.config(on_schema_change="append_new_columns") | ||
if dbt.is_incremental: | ||
data = [[2, "Teo", "Mr"], [2, "Fang", "Ms"], [3, "Elbert", "Dr"]] | ||
pdf = pd.DataFrame(data, columns=["date", "name", "title"]) | ||
else: | ||
data = [[2, "Teo"], [2, "Fang"], [1, "Elia"]] | ||
pdf = pd.DataFrame(data, columns=["date", "name"]) | ||
df = spark.createDataFrame(pdf) | ||
return df | ||
""" | ||
|
||
expected_incremental = """date,name,title | ||
1,"Elia",null | ||
2,"Teo","Mr" | ||
2,"Fang","Ms" | ||
3,"Elbert","Dr" | ||
""" | ||
|
||
http_path_schema = """version: 2 | ||
models: | ||
- name: my_versioned_sql_model | ||
versions: | ||
- v: 1 | ||
- name: my_python_model | ||
config: | ||
http_path: "{{ env_var('DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH') }}" | ||
sources: | ||
- name: test_source | ||
loader: custom | ||
schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" | ||
quoting: | ||
identifier: True | ||
tags: | ||
- my_test_source_tag | ||
tables: | ||
- name: test_table | ||
identifier: source | ||
""" | ||
|
||
complex_schema = """version: 2 | ||
models: | ||
- name: complex_config | ||
config: | ||
marterialized: table | ||
tags: ["python"] | ||
location_root: '{{ env_var("DBT_DATABRICKS_LOCATION_ROOT") }}' | ||
columns: | ||
- name: date | ||
tests: | ||
- not_null | ||
- name: name | ||
tests: | ||
- unique | ||
""" | ||
|
||
complex_py = """ | ||
import pandas as pd # type: ignore | ||
def model(dbt, spark): | ||
dbt.config(materialized="incremental") | ||
dbt.config(partition_by="date") | ||
dbt.config(unique_key="name") | ||
if dbt.is_incremental: | ||
data = [[2, "Teo"], [2, "Fang"], [3, "Elbert"]] | ||
else: | ||
data = [[2, "Teo"], [2, "Fang"], [1, "Elia"]] | ||
pdf = pd.DataFrame(data, columns=["date", "name"]) | ||
df = spark.createDataFrame(pdf) | ||
return df | ||
""" | ||
|
||
expected_complex = """date,name | ||
1,"Elia" | ||
2,"Teo" | ||
2,"Fang" | ||
3,"Elbert" | ||
""" |
95 changes: 95 additions & 0 deletions
95
tests/functional/adapter/python_model/test_python_model.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,95 @@ | ||
import os | ||
|
||
import pytest | ||
|
||
from dbt.tests import util | ||
from dbt.tests.adapter.python_model.test_python_model import ( | ||
BasePythonIncrementalTests, | ||
BasePythonModelTests, | ||
) | ||
from dbt.tests.adapter.python_model import test_python_model as fixtures | ||
from tests.functional.adapter.python_model import fixtures as override_fixtures | ||
|
||
|
||
@pytest.mark.skip_profile("databricks_uc_sql_endpoint") | ||
class TestPythonModel(BasePythonModelTests): | ||
pass | ||
|
||
|
||
@pytest.mark.skip_profile("databricks_uc_sql_endpoint") | ||
class TestPythonIncrementalModel(BasePythonIncrementalTests): | ||
pass | ||
|
||
|
||
@pytest.mark.skip_profile("databricks_uc_sql_endpoint") | ||
class TestChangingSchema: | ||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return {"simple_python_model.py": override_fixtures.simple_python_model} | ||
|
||
def test_changing_schema_with_log_validation(self, project, logs_dir): | ||
util.run_dbt(["run"]) | ||
util.write_file( | ||
override_fixtures.simple_python_model_v2, | ||
project.project_root + "/models", | ||
"simple_python_model.py", | ||
) | ||
util.run_dbt(["run"]) | ||
log_file = os.path.join(logs_dir, "dbt.log") | ||
with open(log_file, "r") as f: | ||
log = f.read() | ||
# validate #5510 log_code_execution works | ||
assert "On model.test.simple_python_model:" in log | ||
assert "spark.createDataFrame(data, schema=['test1', 'test3'])" in log | ||
assert "Execution status: OK in" in log | ||
|
||
|
||
@pytest.mark.skip_profile("databricks_uc_sql_endpoint") | ||
class TestChangingSchemaIncremental: | ||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return {"incremental_model.py": override_fixtures.incremental_model} | ||
|
||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return {"expected_incremental.csv": override_fixtures.expected_incremental} | ||
|
||
def test_changing_schema_via_incremental(self, project): | ||
util.run_dbt(["seed"]) | ||
util.run_dbt(["run"]) | ||
util.run_dbt(["run"]) | ||
|
||
util.check_relations_equal(project.adapter, ["incremental_model", "expected_incremental"]) | ||
|
||
|
||
@pytest.mark.skip_profile("databricks_cluster", "databricks_uc_cluster") | ||
class TestSpecifyingHttpPath(BasePythonModelTests): | ||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return { | ||
"schema.yml": override_fixtures.http_path_schema, | ||
"my_sql_model.sql": fixtures.basic_sql, | ||
"my_versioned_sql_model_v1.sql": fixtures.basic_sql, | ||
"my_python_model.py": fixtures.basic_python, | ||
"second_sql_model.sql": fixtures.second_sql, | ||
} | ||
|
||
|
||
@pytest.mark.skip_profile("databricks_cluster", "databricks_uc_sql_endpoint") | ||
class TestComplexConfig: | ||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return {"expected_complex.csv": override_fixtures.expected_complex} | ||
|
||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return { | ||
"schema.yml": override_fixtures.complex_schema, | ||
"complex_config.py": override_fixtures.complex_py, | ||
} | ||
|
||
def test_expected_handling_of_complex_config(self, project): | ||
util.run_dbt(["seed"]) | ||
util.run_dbt(["build", "-s", "complex_config"]) | ||
util.run_dbt(["build", "-s", "complex_config"]) | ||
util.check_relations_equal(project.adapter, ["complex_config", "expected_complex"]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
import pytest | ||
|
||
from dbt.tests.adapter.python_model.test_spark import ( | ||
BasePySparkTests, | ||
) | ||
from dbt.tests.adapter.python_model import test_spark as fixtures | ||
from dbt.tests import util | ||
|
||
|
||
@pytest.mark.skip_profile("databricks_uc_sql_endpoint") | ||
class TestPySpark(BasePySparkTests): | ||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return { | ||
"pandas_df.py": fixtures.PANDAS_MODEL, | ||
"pyspark_df.py": fixtures.PYSPARK_MODEL, | ||
"pandas_on_spark_df.py": fixtures.PANDAS_ON_SPARK_MODEL, | ||
} | ||
|
||
def test_different_dataframes(self, project): | ||
# test | ||
results = util.run_dbt(["run"]) | ||
assert len(results) == 3 |
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.