Skip to content

Commit

Permalink
Fix: Adds small additional test to check dbutils functions
Browse files Browse the repository at this point in the history
  - Ensures that all synonmyms of debutils.fs methods are
   ignored and only dbutils.fs.xxx functions are checked

Signed-off-by: Jim.Idle <[email protected]>
  • Loading branch information
jimidle committed Apr 24, 2024
1 parent a684858 commit 59cbb45
Showing 1 changed file with 14 additions and 3 deletions.
17 changes: 14 additions & 3 deletions tests/unit/source_code/test_pyspark.py
Original file line number Diff line number Diff line change
Expand Up @@ -731,12 +731,23 @@ def test_spark_cloud_direct_access(empty_index, code, expected):
assert advisories == expected


# TODO: Expand the tests to cover all dbutils.fs functions
def test_direct_cloud_access_reports_nothing(empty_index):
FS_FUNCTIONS = [
"ls",
"cp",
"rm",
"mv",
"head",
"put",
"mkdirs",
]


@pytest.mark.parametrize("fs_function", FS_FUNCTIONS)
def test_direct_cloud_access_reports_nothing(empty_index, fs_function):
ftf = FromTable(empty_index, CurrentSessionState())
sqf = SparkSql(ftf, empty_index)
# ls function calls have to be from dbutils.fs, or we ignore them
code = """spark.ls("/bucket/path")"""
code = f"""spark.{fs_function}("/bucket/path")"""
advisories = list(sqf.lint(code))
assert not advisories

Expand Down

0 comments on commit 59cbb45

Please sign in to comment.