-
Notifications
You must be signed in to change notification settings - Fork 120
Expand file tree
/
Copy pathconftest.py
More file actions
94 lines (73 loc) · 3.04 KB
/
conftest.py
File metadata and controls
94 lines (73 loc) · 3.04 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
"""This file configures pytest, initializes Databricks Connect, and provides fixtures for Spark and loading test data."""
import os, sys, pathlib
from contextlib import contextmanager
try:
from databricks.connect import DatabricksSession
from databricks.sdk import WorkspaceClient
from pyspark.sql import SparkSession
import pytest
import json
import csv
import os
except ImportError:
raise ImportError(
"Test dependencies not found.\n\nRun tests using 'uv run pytest'. See http://docs.astral.sh/uv to learn more about uv."
)
@pytest.fixture()
def spark() -> SparkSession:
"""Provide a SparkSession fixture for tests.
Minimal example:
def test_uses_spark(spark):
df = spark.createDataFrame([(1,)], ["x"])
assert df.count() == 1
"""
return DatabricksSession.builder.getOrCreate()
@pytest.fixture()
def load_fixture(spark: SparkSession):
"""Provide a callable to load JSON or CSV from fixtures/ directory.
Example usage:
def test_using_fixture(load_fixture):
data = load_fixture("my_data.json")
assert data.count() >= 1
"""
def _loader(filename: str):
path = pathlib.Path(__file__).parent.parent / "fixtures" / filename
suffix = path.suffix.lower()
if suffix == ".json":
rows = json.loads(path.read_text())
return spark.createDataFrame(rows)
if suffix == ".csv":
with path.open(newline="") as f:
rows = list(csv.DictReader(f))
return spark.createDataFrame(rows)
raise ValueError(f"Unsupported fixture type for: {filename}")
return _loader
def _enable_fallback_compute():
"""Enable serverless compute if no compute is specified."""
conf = WorkspaceClient().config
if conf.serverless_compute_id or conf.cluster_id or os.environ.get("SPARK_REMOTE"):
return
url = "https://docs.databricks.com/dev-tools/databricks-connect/cluster-config"
print("☁️ no compute specified, falling back to serverless compute", file=sys.stderr)
print(f" see {url} for manual configuration", file=sys.stdout)
os.environ["DATABRICKS_SERVERLESS_COMPUTE_ID"] = "auto"
@contextmanager
def _allow_stderr_output(config: pytest.Config):
"""Temporarily disable pytest output capture."""
capman = config.pluginmanager.get_plugin("capturemanager")
if capman:
with capman.global_and_fixture_disabled():
yield
else:
yield
def pytest_configure(config: pytest.Config):
"""Configure pytest session."""
with _allow_stderr_output(config):
_enable_fallback_compute()
# Initialize Spark session eagerly, so it is available even when
# SparkSession.builder.getOrCreate() is used. For DB Connect 15+,
# we validate version compatibility with the remote cluster.
if hasattr(DatabricksSession.builder, "validateSession"):
DatabricksSession.builder.validateSession().getOrCreate()
else:
DatabricksSession.builder.getOrCreate()