Skip to content

Commit

Permalink
tests: add clean_dir to tests that using the DB
Browse files Browse the repository at this point in the history
Addresses inspirehep#161

Signed-off-by: Spiros Delviniotis <[email protected]>
  • Loading branch information
spirosdelviniotis committed Aug 21, 2017
1 parent 0c0a66b commit 3bdf93c
Show file tree
Hide file tree
Showing 7 changed files with 39 additions and 13 deletions.
6 changes: 4 additions & 2 deletions tests/unit/test_alpha.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,10 @@

from hepcrawl.spiders import alpha_spider

from hepcrawl.testlib.fixtures import fake_response_from_file

from hepcrawl.testlib.fixtures import (
fake_response_from_file,
clean_dir,
)

@pytest.fixture
def results():
Expand Down
5 changes: 4 additions & 1 deletion tests/unit/test_aps.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@
import pytest

from hepcrawl.spiders import aps_spider
from hepcrawl.testlib.fixtures import fake_response_from_file
from hepcrawl.testlib.fixtures import (
fake_response_from_file,
clean_dir,
)


@pytest.fixture
Expand Down
4 changes: 3 additions & 1 deletion tests/unit/test_arxiv_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ def many_results(spider):
assert records
pipeline = InspireCeleryPushPipeline()
pipeline.open_spider(spider)
return [pipeline.process_item(record, spider) for record in records]
yield [pipeline.process_item(record, spider) for record in records]

clean_dir()


def test_page_nr(many_results):
Expand Down
8 changes: 6 additions & 2 deletions tests/unit/test_arxiv_single.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@

from hepcrawl.pipelines import InspireCeleryPushPipeline
from hepcrawl.spiders import arxiv_spider
from hepcrawl.testlib.fixtures import fake_response_from_file
from hepcrawl.testlib.fixtures import (
fake_response_from_file,
clean_dir,
)


@pytest.fixture
Expand Down Expand Up @@ -45,8 +48,9 @@ def results():
validate(processed_record, 'hep')
processed_records.append(processed_record)

return processed_records
yield processed_records

clean_dir()


def test_abstracts(results):
Expand Down
9 changes: 7 additions & 2 deletions tests/unit/test_pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,10 @@
from hepcrawl.spiders import arxiv_spider
from hepcrawl.pipelines import InspireAPIPushPipeline

from hepcrawl.testlib.fixtures import fake_response_from_file
from hepcrawl.testlib.fixtures import (
fake_response_from_file,
clean_dir,
)


@pytest.fixture
Expand All @@ -44,7 +47,9 @@ def json_spider_record(tmpdir):
)
parsed_record = items.next()
assert parsed_record
return spider, parsed_record
yield spider, parsed_record

clean_dir()


@pytest.fixture
Expand Down
9 changes: 7 additions & 2 deletions tests/unit/test_pos.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@
from hepcrawl.pipelines import InspireCeleryPushPipeline
from hepcrawl.spiders import pos_spider

from hepcrawl.testlib.fixtures import fake_response_from_file
from hepcrawl.testlib.fixtures import (
fake_response_from_file,
clean_dir,
)


@pytest.fixture
Expand Down Expand Up @@ -52,7 +55,9 @@ def record(scrape_pos_page_body):
pipeline = InspireCeleryPushPipeline()
pipeline.open_spider(spider)
record = request.callback(response)
return pipeline.process_item(record, spider)
yield pipeline.process_item(record, spider)

clean_dir()


def test_titles(record):
Expand Down
11 changes: 8 additions & 3 deletions tests/unit/test_world_scientific.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,10 @@
from hepcrawl.pipelines import InspireCeleryPushPipeline
from hepcrawl.spiders import wsp_spider

from hepcrawl.testlib.fixtures import fake_response_from_file
from hepcrawl.testlib.fixtures import (
fake_response_from_file,
clean_dir,
)


def create_spider():
Expand All @@ -44,11 +47,13 @@ def get_records(response_file_name):
pipeline = InspireCeleryPushPipeline()
pipeline.open_spider(spider)

return (pipeline.process_item(record, spider) for record in records)
yield (pipeline.process_item(record, spider) for record in records)

clean_dir()


def get_one_record(response_file_name):
results = get_records(response_file_name)
results = get_records(response_file_name).next()
return results.next()


Expand Down

0 comments on commit 3bdf93c

Please sign in to comment.