From d781f4204e606bd446106e7e116cc8bb88de468c Mon Sep 17 00:00:00 2001 From: Bryce Adelstein Lelbach aka wash Date: Tue, 15 Dec 2020 15:44:01 -0800 Subject: [PATCH] Replace the term "sanity", which is non-inclusive. "sanity", which has denotative (e.g. primary) meaning of "basic tests", also has negative connotations (e.g. secondary meanings) relating to mental health. The use of the term "sanity check" may suggest that individuals with mental illnesses are inferior, wrong, or incorrect. In accordance with the Thrust code of conduct, we should strive to avoid non-inclusive terms like "sanity" and use neutral and inclusive language instead: https://github.com/NVIDIA/thrust/blob/main/CODE_OF_CONDUCT.md The term "sanity" is listed as a non-inclusive term to avoid by a number of technical and standards organizations: W3C: https://w3c.github.io/manual-of-style/#inclusive IETF: https://github.com/ietf/terminology Google Engineering: https://developers.google.com/style/inclusive-documentation Twitter Engineering INCITS (US national standards) ISO (international standards) --- cmake/filecheck_confidence_test | 1 + cmake/sanity | 1 - examples/CMakeLists.txt | 4 +- examples/sparse_vector.cu | 8 +- internal/test/thrust.confidence.filecheck | 1 + internal/test/thrust.sanity.filecheck | 1 - internal/test/thrust_nightly.pl | 23 +++-- testing/unittest/cuda/testframework.cu | 40 ++++---- testing/unittest/cuda/testframework.h | 2 +- testing/unittest/testframework.cu | 116 +++++++++++----------- testing/unittest/testframework.h | 2 +- 11 files changed, 99 insertions(+), 100 deletions(-) create mode 100644 cmake/filecheck_confidence_test delete mode 100644 cmake/sanity create mode 100644 internal/test/thrust.confidence.filecheck delete mode 100644 internal/test/thrust.sanity.filecheck diff --git a/cmake/filecheck_confidence_test b/cmake/filecheck_confidence_test new file mode 100644 index 000000000..db959d55f --- /dev/null +++ b/cmake/filecheck_confidence_test @@ -0,0 +1 @@ +CONFIDENCE diff --git a/cmake/sanity b/cmake/sanity deleted file mode 100644 index f9db80b7f..000000000 --- a/cmake/sanity +++ /dev/null @@ -1 +0,0 @@ -SANITY diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index b86d8a18b..416cddcb8 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -28,8 +28,8 @@ if (THRUST_ENABLE_EXAMPLE_FILECHECK) endif() execute_process( - COMMAND "${THRUST_FILECHECK_EXECUTABLE}" "${filecheck_data_path}/thrust.sanity.filecheck" - INPUT_FILE "${Thrust_SOURCE_DIR}/cmake/sanity" + COMMAND "${THRUST_FILECHECK_EXECUTABLE}" "${filecheck_data_path}/thrust.confidence.filecheck" + INPUT_FILE "${Thrust_SOURCE_DIR}/cmake/filecheck_confidence_test" RESULT_VARIABLE exit_code ) diff --git a/examples/sparse_vector.cu b/examples/sparse_vector.cu index c7528cff2..463bfa008 100644 --- a/examples/sparse_vector.cu +++ b/examples/sparse_vector.cu @@ -11,7 +11,6 @@ template B_index(6); thrust::device_vector B_value(6); @@ -97,7 +95,7 @@ int main(void) // compute sparse vector C = A + B thrust::device_vector C_index; thrust::device_vector C_value; - + sum_sparse_vectors(A_index, A_value, B_index, B_value, C_index, C_value); std::cout << "Computing C = A + B for sparse vectors A and B" << std::endl; diff --git a/internal/test/thrust.confidence.filecheck b/internal/test/thrust.confidence.filecheck new file mode 100644 index 000000000..897227c80 --- /dev/null +++ b/internal/test/thrust.confidence.filecheck @@ -0,0 +1 @@ + CHECK: CONFIDENCE diff --git a/internal/test/thrust.sanity.filecheck b/internal/test/thrust.sanity.filecheck deleted file mode 100644 index 1770bc9f3..000000000 --- a/internal/test/thrust.sanity.filecheck +++ /dev/null @@ -1 +0,0 @@ - CHECK: SANITY diff --git a/internal/test/thrust_nightly.pl b/internal/test/thrust_nightly.pl index 61e03bda4..79d0c4850 100755 --- a/internal/test/thrust_nightly.pl +++ b/internal/test/thrust_nightly.pl @@ -182,12 +182,12 @@ sub process_return_code { my $have_filecheck = 1; -sub filecheck_sanity { - my $filecheck_cmd = "$filecheck_path/FileCheck $filecheck_data_path/thrust.sanity.filecheck"; +sub filecheck_test { + my $filecheck_cmd = "$filecheck_path/FileCheck $filecheck_data_path/thrust.confidence.filecheck"; my $filecheck_pid = open(my $filecheck_stdin, "|-", "$filecheck_cmd 2>&1"); - print $filecheck_stdin "SANITY"; + print $filecheck_stdin "CONFIDENCE"; my $filecheck_ret = 0; if (close($filecheck_stdin) == 0) @@ -196,21 +196,21 @@ sub filecheck_sanity { } if ($filecheck_ret == 0) { - printf("#### SANE FileCheck\n"); + printf("&&&& PASSED FileCheck\n"); } else { # Use a temporary file to send the output to # FileCheck so we can get the output this time, # because Perl and bidirectional pipes suck. my $tmp = File::Temp->new(); my $tmp_filename = $tmp->filename; - print $tmp "SANITY"; + print $tmp "CONFIDENCE"; printf("********************************************************************************\n"); print `$filecheck_cmd -input-file $tmp_filename`; printf("********************************************************************************\n"); - process_return_code("FileCheck Sanity", $filecheck_ret, ""); - printf("#### INSANE FileCheck\n"); + process_return_code("FileCheck Test", $filecheck_ret, ""); + printf("&&&& FAILED FileCheck\n"); $have_filecheck = 0; } @@ -243,7 +243,7 @@ sub run_cmd { { $ret = $?; } - + alarm 0; }; my $elapsed = timestamp() - $start; @@ -286,7 +286,7 @@ sub run_examples { { my $test_exe = $test; - # Ignore FileCheck files. + # Ignore FileCheck files. if ($test =~ /[.]filecheck$/) { next; @@ -403,7 +403,7 @@ sub run_unit_tests { { my $test_exe = $test; - # Ignore FileCheck files. + # Ignore FileCheck files. if ($test =~ /[.]filecheck$/) { next; @@ -558,6 +558,7 @@ sub dvs_summary { printf("\n"); + # We can't remove "sanity" here yet because DVS looks for this exact string. printf("CUDA DVS BASIC SANITY SCORE : %.1f\n", $dvs_score); if ($failures + $errors > 0) { @@ -582,7 +583,7 @@ sub dvs_summary { printf("\n"); -filecheck_sanity(); +filecheck_test(); printf("\n"); diff --git a/testing/unittest/cuda/testframework.cu b/testing/unittest/cuda/testframework.cu index a8bc52ea4..4c34b0b8f 100644 --- a/testing/unittest/cuda/testframework.cu +++ b/testing/unittest/cuda/testframework.cu @@ -29,15 +29,15 @@ void list_devices(void) { std::cout << "There is no device supporting CUDA" << std::endl; } - + int selected_device; cudaGetDevice(&selected_device); - + for (int dev = 0; dev < deviceCount; ++dev) { cudaDeviceProp deviceProp; cudaGetDeviceProperties(&deviceProp, dev); - + if(dev == 0) { if(deviceProp.major == 9999 && deviceProp.minor == 9999) @@ -47,12 +47,12 @@ void list_devices(void) else std::cout << "There are " << deviceCount << " devices supporting CUDA" << std:: endl; } - + std::cout << "\nDevice " << dev << ": \"" << deviceProp.name << "\""; if(dev == selected_device) std::cout << " [SELECTED]"; std::cout << std::endl; - + std::cout << " Major revision number: " << deviceProp.major << std::endl; std::cout << " Minor revision number: " << deviceProp.minor << std::endl; std::cout << " Total amount of global memory: " << deviceProp.totalGlobalMem << " bytes" << std::endl; @@ -70,16 +70,16 @@ template Iterator my_next(Iterator iter) std::vector CUDATestDriver::target_devices(const ArgumentMap &kwargs) { std::vector result; - + // by default, test all devices in the system (device id -1) int device_id = kwargs.count("device") ? atoi(kwargs.find("device")->second.c_str()) : -1; - + if(device_id < 0) { // target all devices in the system int count = 0; cudaGetDeviceCount(&count); - + result.resize(count); std::iota(result.begin(), result.end(), 0); } @@ -88,7 +88,7 @@ std::vector CUDATestDriver::target_devices(const ArgumentMap &kwargs) // target the specified device result = std::vector(1,device_id); } - + return result; } @@ -105,12 +105,12 @@ bool CUDATestDriver::check_cuda_error(bool concise) << std::string(cudaGetErrorString(error)) << "]" << std::endl; } - } + } return cudaSuccess != error; } -bool CUDATestDriver::post_test_sanity_check(const UnitTest &test, bool concise) +bool CUDATestDriver::post_test_confidence_check(const UnitTest &test, bool concise) { cudaError_t const error = cudaDeviceSynchronize(); if(cudaSuccess != error) @@ -127,7 +127,7 @@ bool CUDATestDriver::post_test_sanity_check(const UnitTest &test, bool concise) return cudaSuccess == error; } - + bool CUDATestDriver::run_tests(const ArgumentSet &args, const ArgumentMap &kwargs) { bool verbose = kwargs.count("verbose"); @@ -142,17 +142,17 @@ bool CUDATestDriver::run_tests(const ArgumentSet &args, const ArgumentMap &kwarg // check error status before doing anything if(check_cuda_error(concise)) return false; - + bool result = true; if(kwargs.count("verbose")) { list_devices(); } - + // figure out which devices to target std::vector devices = target_devices(kwargs); - + // target each device for(std::vector::iterator device = devices.begin(); device != devices.end(); @@ -170,7 +170,7 @@ bool CUDATestDriver::run_tests(const ArgumentSet &args, const ArgumentMap &kwarg // note which device we're skipping cudaDeviceProp deviceProp; cudaGetDeviceProperties(&deviceProp, *device); - + std::cout << "Skipping Device " << *device << ": \"" << deviceProp.name << "\"" << std::endl; continue; @@ -181,23 +181,23 @@ bool CUDATestDriver::run_tests(const ArgumentSet &args, const ArgumentMap &kwarg // note which device we're testing cudaDeviceProp deviceProp; cudaGetDeviceProperties(&deviceProp, *device); - + std::cout << "Testing Device " << *device << ": \"" << deviceProp.name << "\"" << std::endl; } // check error status before running any tests if(check_cuda_error(concise)) return false; - + // run tests result &= UnitTestDriver::run_tests(args, kwargs); - + if(!concise && my_next(device) != devices.end()) { // provide some separation between the output of separate tests std::cout << std::endl; } } - + return result; } diff --git a/testing/unittest/cuda/testframework.h b/testing/unittest/cuda/testframework.h index 953f88c1c..40c7c3faa 100644 --- a/testing/unittest/cuda/testframework.h +++ b/testing/unittest/cuda/testframework.h @@ -16,7 +16,7 @@ class CUDATestDriver bool check_cuda_error(bool concise); - virtual bool post_test_sanity_check(const UnitTest &test, bool concise); + virtual bool post_test_confidence_check(const UnitTest &test, bool concise); virtual bool run_tests(const ArgumentSet &args, const ArgumentMap &kwargs); }; diff --git a/testing/unittest/testframework.cu b/testing/unittest/testframework.cu index 26db08a3e..288cac42d 100644 --- a/testing/unittest/testframework.cu +++ b/testing/unittest/testframework.cu @@ -30,7 +30,7 @@ const size_t standard_test_sizes[] = (1 << 26) + 1, (1 << 27) - 1, (1 << 27) }; - + const size_t tiny_threshold = 1 << 5; // 32 const size_t small_threshold = 1 << 8; // 256 const size_t medium_threshold = 1 << 12; // 4K @@ -110,9 +110,9 @@ void process_args(int argc, char ** argv, { std::string arg(argv[i]); - // look for --key or --key=value arguments + // look for --key or --key=value arguments if(arg.substr(0,2) == "--") - { + { std::string::size_type n = arg.find('=',2); if(n == std::string::npos) @@ -135,7 +135,7 @@ void process_args(int argc, char ** argv, void usage(int /*argc*/, char** argv) { std::string indent = " "; - + std::cout << "Example Usage:\n"; std::cout << indent << argv[0] << "\n"; std::cout << indent << argv[0] << " TestName1 [TestName2 ...] \n"; @@ -164,14 +164,14 @@ struct TestResult TestStatus status; std::string name; std::string message; - + // XXX use a c++11 timer result when available std::clock_t elapsed; - + TestResult(const TestStatus status, std::clock_t elapsed, const UnitTest& u, const std::string& message = "") : status(status), name(u.name), message(message), elapsed(elapsed) {} - + bool operator<(const TestResult& tr) const { if(status < tr.status) @@ -199,20 +199,20 @@ void record_result(const TestResult& test_result, std::vector< TestResult >& tes void report_results(std::vector< TestResult >& test_results, double elapsed_minutes) { std::cout << std::endl; - + std::string hline = "================================================================"; - + std::sort(test_results.begin(), test_results.end()); - + size_t num_passes = 0; size_t num_failures = 0; size_t num_known_failures = 0; size_t num_errors = 0; - + for(size_t i = 0; i < test_results.size(); i++) { const TestResult& tr = test_results[i]; - + if(tr.status == Pass) { num_passes++; @@ -220,7 +220,7 @@ void report_results(std::vector< TestResult >& test_results, double elapsed_minu else { std::cout << hline << std::endl; - + switch(tr.status) { case Failure: @@ -232,13 +232,13 @@ void report_results(std::vector< TestResult >& test_results, double elapsed_minu default: break; } - + std::cout << ": " << tr.name << std::endl << tr.message << std::endl; } } - + std::cout << hline << std::endl; - + std::cout << "Totals: "; std::cout << num_failures << " failures, "; std::cout << num_known_failures << " known failures, "; @@ -257,7 +257,7 @@ void UnitTestDriver::list_tests(void) } -bool UnitTestDriver::post_test_sanity_check(const UnitTest &/*test*/, bool /*concise*/) +bool UnitTestDriver::post_test_confidence_check(const UnitTest &/*test*/, bool /*concise*/) { return true; } @@ -266,45 +266,45 @@ bool UnitTestDriver::post_test_sanity_check(const UnitTest &/*test*/, bool /*con bool UnitTestDriver::run_tests(std::vector& tests_to_run, const ArgumentMap& kwargs) { std::time_t start_time = std::time(0); - + THRUST_DISABLE_MSVC_FORCING_VALUE_TO_BOOL_WARNING_BEGIN bool verbose = kwargs.count("verbose"); bool concise = kwargs.count("concise"); THRUST_DISABLE_MSVC_FORCING_VALUE_TO_BOOL_WARNING_END - + std::vector< TestResult > test_results; - + if(verbose && concise) { std::cout << "--verbose and --concise cannot be used together" << std::endl; exit(EXIT_FAILURE); } - + if(!concise) { std::cout << "Running " << tests_to_run.size() << " unit tests." << std::endl; } - + for(size_t i = 0; i < tests_to_run.size(); i++) { UnitTest& test = *tests_to_run[i]; - + if(verbose) { std::cout << "Running " << test.name << "..." << std::flush; } - + try { // time the test std::clock_t start = std::clock(); - + // run the test test.run(); - + // test passed record_result(TestResult(Pass, std::clock() - start, test), test_results); - } + } catch(unittest::UnitTestFailure& f) { record_result(TestResult(Failure, (std::numeric_limits::max)(), test, f.message), test_results); @@ -321,7 +321,7 @@ bool UnitTestDriver::run_tests(std::vector& tests_to_run, const Argu { record_result(TestResult(Error, (std::numeric_limits::max)(), test, e.message), test_results); } - + // immediate report if(!concise) { @@ -342,7 +342,7 @@ bool UnitTestDriver::run_tests(std::vector& tests_to_run, const Argu default: break; } - + std::cout << " " << test.name << std::endl; } else @@ -362,24 +362,24 @@ bool UnitTestDriver::run_tests(std::vector& tests_to_run, const Argu } } } - - if(!post_test_sanity_check(test, concise)) + + if(!post_test_confidence_check(test, concise)) { return false; } - + std::cout.flush(); } - + double elapsed_minutes = double(std::time(0) - start_time) / 60; - + // summary report if(!concise) { report_results(test_results, elapsed_minutes); } - - + + // if any failures or errors return false for(size_t i = 0; i < test_results.size(); i++) { @@ -388,7 +388,7 @@ bool UnitTestDriver::run_tests(std::vector& tests_to_run, const Argu return false; } } - + // all tests pass or are known failures return true; } @@ -400,35 +400,35 @@ bool UnitTestDriver::run_tests(const ArgumentSet& args, const ArgumentMap& kwarg { // run all tests std::vector tests_to_run; - + for(TestMap::iterator iter = test_map.begin(); iter != test_map.end(); iter++) { tests_to_run.push_back(iter->second); } - + return run_tests(tests_to_run, kwargs); } else { // all non-keyword arguments are assumed to be test names or partial test names - + typedef TestMap::iterator TestMapIterator; - + // vector to accumulate tests std::vector tests_to_run; - + for(ArgumentSet::const_iterator iter = args.begin(); iter != args.end(); iter++) { const std::string& arg = *iter; - + size_t len = arg.size(); size_t matches = 0; - + if(arg[len-1] == '*') { // wildcard search std::string search = arg.substr(0,len-1); - + TestMapIterator lb = test_map.lower_bound(search); while(lb != test_map.end()) { @@ -436,8 +436,8 @@ bool UnitTestDriver::run_tests(const ArgumentSet& args, const ArgumentMap& kwarg { break; } - - tests_to_run.push_back(lb->second); + + tests_to_run.push_back(lb->second); lb++; matches++; } @@ -446,21 +446,21 @@ bool UnitTestDriver::run_tests(const ArgumentSet& args, const ArgumentMap& kwarg { // non-wildcard search TestMapIterator lb = test_map.find(arg); - + if(lb != test_map.end()) { - tests_to_run.push_back(lb->second); + tests_to_run.push_back(lb->second); matches++; } } - + if(matches == 0) { std::cout << "[ERROR] found no test names matching the pattern: " << arg << std::endl; return false; } } - + return run_tests(tests_to_run, kwargs); } } @@ -487,21 +487,21 @@ int main(int argc, char **argv) { ArgumentSet args; ArgumentMap kwargs; - + process_args(argc, argv, args, kwargs); - + if(kwargs.count("help")) { usage(argc, argv); return 0; } - + if(kwargs.count("list")) { UnitTestDriver::s_driver().list_tests(); return 0; } - + if(kwargs.count("sizes")) { set_test_sizes(kwargs["sizes"]); @@ -510,14 +510,14 @@ int main(int argc, char **argv) { set_test_sizes("default"); } - + bool passed = UnitTestDriver::s_driver().run_tests(args, kwargs); - + if(kwargs.count("concise")) { std::cout << ((passed) ? "PASSED" : "FAILED") << std::endl; } - + return (passed) ? EXIT_SUCCESS : EXIT_FAILURE; } diff --git a/testing/unittest/testframework.h b/testing/unittest/testframework.h index 1c6dde949..117908dd9 100644 --- a/testing/unittest/testframework.h +++ b/testing/unittest/testframework.h @@ -332,7 +332,7 @@ class UnitTestDriver // \param test The UnitTest of interest // \param concise Whether or not to suppress output // \return true if all is well; false if the tests must be immediately aborted - virtual bool post_test_sanity_check(const UnitTest &test, bool concise); + virtual bool post_test_confidence_check(const UnitTest &test, bool concise); public: inline virtual ~UnitTestDriver() {};