From adf4d7b822a4d3d5a6ad9a6a32bfe3a7c19f334e Mon Sep 17 00:00:00 2001 From: Spiros Delviniotis Date: Fri, 18 Aug 2017 14:38:29 +0200 Subject: [PATCH] testlib: refactored `clean_dir` default arguments * Adds: new default argument to `clean_dir` method is the generated DB folder from `scrapy-crawl-once` plugin. Addresses #161 Signed-off-by: Spiros Delviniotis --- hepcrawl/testlib/fixtures.py | 7 ++++++- tests/functional/arxiv/test_arxiv.py | 3 +-- tests/functional/wsp/test_wsp.py | 17 ++--------------- 3 files changed, 9 insertions(+), 18 deletions(-) diff --git a/hepcrawl/testlib/fixtures.py b/hepcrawl/testlib/fixtures.py index b8e9992b..a5da3d56 100644 --- a/hepcrawl/testlib/fixtures.py +++ b/hepcrawl/testlib/fixtures.py @@ -134,5 +134,10 @@ def expected_json_results_from_file(*path_chunks, **kwargs): return expected_data -def clean_dir(path='/tmp/WSP/'): +def clean_dir(path=os.path.join(os.getcwd(), '.scrapy')): + """ + Args: + path(str): path of directory to be deleted. Default path is the produced DB per spider that + stores the requested urls. + """ shutil.rmtree(path, ignore_errors=True) diff --git a/tests/functional/arxiv/test_arxiv.py b/tests/functional/arxiv/test_arxiv.py index 76e8d705..ba1d7bf3 100644 --- a/tests/functional/arxiv/test_arxiv.py +++ b/tests/functional/arxiv/test_arxiv.py @@ -11,7 +11,6 @@ from __future__ import absolute_import, division, print_function -import os from time import sleep import pytest @@ -53,7 +52,7 @@ def set_up_local_environment(): } } - clean_dir(path=os.path.join(os.getcwd(), '.scrapy')) + clean_dir() @pytest.mark.parametrize( diff --git a/tests/functional/wsp/test_wsp.py b/tests/functional/wsp/test_wsp.py index ba88b9f8..a5e6655c 100644 --- a/tests/functional/wsp/test_wsp.py +++ b/tests/functional/wsp/test_wsp.py @@ -12,7 +12,6 @@ from __future__ import absolute_import, division, print_function import pytest -import os from time import sleep @@ -44,7 +43,7 @@ def set_up_ftp_environment(): ) # The test must wait until the docker environment is up (takes about 10 seconds). - sleep(7) + sleep(10) yield { 'CRAWLER_HOST_URL': 'http://scrapyd:6800', @@ -55,7 +54,7 @@ def set_up_ftp_environment(): } } - clean_dir(path=os.path.join(os.getcwd(), '.scrapy')) + clean_dir() @pytest.fixture(scope="function") @@ -76,19 +75,7 @@ def set_up_local_environment(): } } - remove_generated_files(package_location) - - -def remove_generated_files(package_location): clean_dir() - clean_dir(path=os.path.join(os.getcwd(), '.scrapy')) - - _, dirs, files = next(os.walk(package_location)) - for dir_name in dirs: - clean_dir(os.path.join(package_location, dir_name)) - for file_name in files: - if not file_name.endswith('.zip'): - os.unlink(os.path.join(package_location, file_name)) @pytest.mark.parametrize(