Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 39 additions & 1 deletion python/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import logging
from argparse import ArgumentParser
import os
import importlib
import platform
import re
import shutil
Expand Down Expand Up @@ -224,6 +225,42 @@ def get_default_python_executables():
return python_execs


def split_and_validate_testnames(testnames):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is nice. Can we show the full example of this, and fix the docs in https://spark.apache.org/docs/latest/api/python/development/testing.html#testing-pyspark as well?

testnames_to_test = []

def module_exists(module):
try:
return importlib.util.find_spec(module) is not None
except ModuleNotFoundError:
return False

for testname in testnames.split(','):
if " " in testname:
# "{module} {class.testcase_name}"
module, testcase = testname.split(" ")
if not module_exists(module):
print(f"Error: Can't find module '{module}'.")
sys.exit(-1)
testnames_to_test.append(f"{module} {testcase}")
else:
if module_exists(testname):
# "{module}"
testnames_to_test.append(testname)
else:
# "{module.class.testcase_name}"
index = len(testname)
while (index := testname.rfind(".", 0, index)) != -1:
module, testcase = testname[:index], testname[index + 1:]
if module_exists(module):
testnames_to_test.append(f"{module} {testcase}")
break
else:
print(f"Error: Invalid testname '{testname}'.")
sys.exit(-1)

return testnames_to_test


def parse_opts():
parser = ArgumentParser(
prog="run-tests"
Expand Down Expand Up @@ -256,6 +293,7 @@ def parse_opts():
"For example, 'pyspark.sql.foo' to run the module as unittests or doctests, "
"'pyspark.sql.tests FooTests' to run the specific class of unittests, "
"'pyspark.sql.tests FooTests.test_foo' to run the specific unittest in the class. "
"'pyspark.sql.tests.FooTests.test_foo' will work too. "
"'--modules' option is ignored if they are given.")
)
group.add_argument(
Expand Down Expand Up @@ -312,7 +350,7 @@ def main():
sys.exit(-1)
LOGGER.info("Will test the following Python modules: %s", [x.name for x in modules_to_test])
else:
testnames_to_test = opts.testnames.split(',')
testnames_to_test = split_and_validate_testnames(opts.testnames)
LOGGER.info("Will test the following Python tests: %s", testnames_to_test)

task_queue = Queue.PriorityQueue()
Expand Down