Skip to content

Commit

Permalink
global: add testlib module for reusability
Browse files Browse the repository at this point in the history
* Adds celery_monitor module to testlib.
* Adds tasks module to testlib.
* Adds custom scrapyd runner for coverage.

Closes inspirehep#100

Signed-off-by: Spiros Delviniotis <[email protected]>
  • Loading branch information
spirosdelviniotis committed May 3, 2017
1 parent 6d06078 commit f4518f8
Show file tree
Hide file tree
Showing 10 changed files with 105 additions and 97 deletions.
4 changes: 2 additions & 2 deletions docker-compose.test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ services:
- APP_BROKER_URL=amqp://guest:guest@rabbitmq:5672//
- APP_CELERY_RESULT_BACKEND=amqp://guest:guest@rabbitmq:5672//
- APP_CRAWLER_HOST_URL=http://scrapyd:6800
- APP_API_PIPELINE_TASK_ENDPOINT_DEFAULT=tests.functional.tasks.submit_results
- APP_API_PIPELINE_TASK_ENDPOINT_DEFAULT=hepcrawl.testlib.tasks.submit_results
- COVERAGE_PROCESS_START=/code/.coveragerc
command: py.test -vv tests/functional/WSP/test_wsp.py
volumes: &static_volume
Expand Down Expand Up @@ -44,7 +44,7 @@ services:
celery:
image: hepcrawl_base
environment: *env_variables
command: celery worker --events --app tests.functional.tasks --loglevel=debug
command: celery worker --events --app hepcrawl.testlib.tasks --loglevel=debug
volumes: *static_volume
links:
- rabbitmq
Expand Down
4 changes: 3 additions & 1 deletion tests/__init__.py → hepcrawl/testlib/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
# -*- coding: utf-8 -*-
#
# This file is part of hepcrawl.
# Copyright (C) 2015, 2016, 2017 CERN.
# Copyright (C) 2017 CERN.
#
# hepcrawl is a free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.

from __future__ import absolute_import, print_function, unicode_literals
84 changes: 84 additions & 0 deletions hepcrawl/testlib/celery_monitor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# -*- coding: utf-8 -*-
#
# This file is part of hepcrawl.
# Copyright (C) 2017 CERN.
#
# hepcrawl is a free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.

"""Celery monitor dealing with celery tasks for functional tests."""

from __future__ import absolute_import, print_function, unicode_literals

from itertools import islice


class CeleryMonitor(object):
def __init__(self, app, monitor_timeout=3, monitor_iter_limit=100):
self.results = []
self.recv = None
self.app = app
self.connection = None
self.monitor_timeout = monitor_timeout
self.monitor_iter_limit = monitor_iter_limit

def __enter__(self):
state = self.app.events.State()

def announce_succeeded_tasks(event):
state.event(event)
task = state.tasks.get(event['uuid'])
print('TASK SUCCEEDED: %s[%s] %s' % (task.name, task.uuid, task.info(),))
tasks = self.app.AsyncResult(task.id)
for task in tasks.result:
self.results.append(task)
self.recv.should_stop = True

def announce_failed_tasks(event):
state.event(event)
task = state.tasks.get(event['uuid'])
print('TASK FAILED: %s[%s] %s' % (task.name, task.uuid, task.info(),))
self.results.append(task.info())
self.recv.should_stop = True

self.app.control.enable_events()
self.connection = self.app.connection()
self.recv = self.app.events.Receiver(self.connection, handlers={
'task-succeeded': announce_succeeded_tasks,
'task-failed': announce_failed_tasks,
})

return self

def __exit__(self, exc_type, exc_val, exc_tb):
events_iter = self.recv.itercapture(limit=None, timeout=self.monitor_timeout, wakeup=True)
self._wait_for_results(events_iter)
self.connection.__exit__()

def _wait_for_results(self, events_iter):
any(islice(events_iter, self.monitor_iter_limit))

@classmethod
def do_crawl(cls,
app,
monitor_timeout,
monitor_iter_limit,
crawler_instance,
project='hepcrawl',
spider='WSP',
settings=None,
**crawler_arguments):

if settings is None:
settings = {}

with cls(app, monitor_timeout=monitor_timeout, monitor_iter_limit=monitor_iter_limit) as my_monitor:
crawler_instance.schedule(
project=project,
spider=spider,
settings=settings or {},
**crawler_arguments
)

return my_monitor.results
File renamed without changes.
7 changes: 3 additions & 4 deletions tests/functional/tasks.py → hepcrawl/testlib/tasks.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
#
# This file is part of hepcrawl.
# Copyright (C) 2015, 2016, 2017 CERN.
# Copyright (C) 2017 CERN.
#
# hepcrawl is a free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
Expand All @@ -11,12 +11,11 @@

from __future__ import absolute_import, print_function, unicode_literals

import json

from six.moves.urllib.parse import urlparse

from celery import Celery

import json


class Config(object):
CELERY_RESULT_BACKEND = "amqp://guest:guest@rabbitmq:5672//"
Expand Down
8 changes: 0 additions & 8 deletions tests/functional/WSP/__init__.py

This file was deleted.

74 changes: 2 additions & 72 deletions tests/functional/WSP/test_wsp.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,81 +15,11 @@
import json
import os

from itertools import islice
from scrapyd_api import ScrapydAPI
from time import sleep

from tests.functional.tasks import app


class CeleryMonitor(object):
def __init__(self, app, monitor_timeout=3, monitor_iter_limit=100):
self.results = []
self.recv = None
self.app = app
self.connection = None
self.monitor_timeout = monitor_timeout
self.monitor_iter_limit = monitor_iter_limit

def __enter__(self):
state = self.app.events.State()

def announce_succeeded_tasks(event):
state.event(event)
task = state.tasks.get(event['uuid'])
print('TASK SUCCEEDED: %s[%s] %s' % (task.name, task.uuid, task.info(),))
tasks = app.AsyncResult(task.id)
for task in tasks.result:
self.results.append(task)
self.recv.should_stop = True

def announce_failed_tasks(event):
state.event(event)
task = state.tasks.get(event['uuid'])
print('TASK FAILED: %s[%s] %s' % (task.name, task.uuid, task.info(),))
self.results.append(task.info())
self.recv.should_stop = True

self.app.control.enable_events()
self.connection = self.app.connection()
self.recv = self.app.events.Receiver(self.connection, handlers={
'task-succeeded': announce_succeeded_tasks,
'task-failed': announce_failed_tasks,
})

return self

def __exit__(self, exc_type, exc_val, exc_tb):
events_iter = self.recv.itercapture(limit=None, timeout=self.monitor_timeout, wakeup=True)
self._wait_for_results(events_iter)
self.connection.__exit__()

def _wait_for_results(self, events_iter):
any(islice(events_iter, self.monitor_iter_limit))

@classmethod
def do_crawl(cls,
app,
monitor_timeout,
monitor_iter_limit,
crawler_instance,
project='hepcrawl',
spider='WSP',
settings=None,
**crawler_arguments):

if settings is None:
settings = {}

with cls(app, monitor_timeout=monitor_timeout, monitor_iter_limit=monitor_iter_limit) as my_monitor:
crawler_instance.schedule(
project=project,
spider=spider,
settings=settings or {},
**crawler_arguments
)

return my_monitor.results
from hepcrawl.testlib.tasks import app
from hepcrawl.testlib.celery_monitor import CeleryMonitor


def get_crawler_instance(crawler_host, *args, **kwargs):
Expand Down
8 changes: 0 additions & 8 deletions tests/functional/__init__.py

This file was deleted.

2 changes: 1 addition & 1 deletion tests/functional/scrapyd_coverage_runner.conf
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@


[scrapyd]
runner = tests.functional.scrapyd_coverage_runner
runner = hepcrawl.testlib.scrapyd_coverage_runner
11 changes: 10 additions & 1 deletion tests/unit/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,10 @@

# -*- coding: utf-8 -*-
#
# This file is part of hepcrawl.
# Copyright (C) 2017 CERN.
#
# hepcrawl is a free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.

from __future__ import absolute_import, print_function, unicode_literals

0 comments on commit f4518f8

Please sign in to comment.