diff --git a/hepcrawl/testlib/fixtures.py b/hepcrawl/testlib/fixtures.py index 73f28f96..c227d109 100644 --- a/hepcrawl/testlib/fixtures.py +++ b/hepcrawl/testlib/fixtures.py @@ -133,13 +133,13 @@ def expected_json_results_from_file(*path_chunks, **kwargs): return expected_data - -def clean_dir(path): +def clean_dir(path=os.path.join(os.getcwd(), '.scrapy')): """ Deletes all contained files of given target directory path. Args: - path: Absolute path of target directory to be cleaned. + path(str): path of directory to be deleted. Default path is the produced DB per spider that + stores the requested urls. Example: diff --git a/tests/functional/arxiv/test_arxiv.py b/tests/functional/arxiv/test_arxiv.py index ce01eecb..22025020 100644 --- a/tests/functional/arxiv/test_arxiv.py +++ b/tests/functional/arxiv/test_arxiv.py @@ -11,7 +11,6 @@ from __future__ import absolute_import, division, print_function -import os from time import sleep import pytest @@ -53,7 +52,7 @@ def set_up_local_environment(): } } - clean_dir(path=os.path.join(os.getcwd(), '.scrapy')) + clean_dir() @pytest.mark.parametrize( diff --git a/tests/functional/wsp/test_wsp.py b/tests/functional/wsp/test_wsp.py index d034e7aa..27f36955 100644 --- a/tests/functional/wsp/test_wsp.py +++ b/tests/functional/wsp/test_wsp.py @@ -44,7 +44,7 @@ def set_up_ftp_environment(): ) # The test must wait until the docker environment is up (takes about 10 seconds). - sleep(7) + sleep(10) yield { 'CRAWLER_HOST_URL': 'http://scrapyd:6800', @@ -55,7 +55,7 @@ def set_up_ftp_environment(): } } - clean_dir(path='/tmp/WSP/') + clean_dir() clean_dir(path=os.path.join(os.getcwd(), '.scrapy')) @@ -81,7 +81,7 @@ def set_up_local_environment(): def remove_generated_files(package_location): - clean_dir(path='/tmp/WSP/') + clean_dir() clean_dir(path=os.path.join(os.getcwd(), '.scrapy')) _, dirs, files = next(os.walk(package_location))