"
+ logger.info("Citations created")
# This is the unicode replacement character. It shows up sometimes in
# our imports; we should just remove it.
- citations = re.sub(u'\ufffd', ' ', citations)
+ citations = re.sub("\ufffd", " ", citations)
return citations
@classmethod
@@ -97,35 +89,40 @@ def create_original_text(cls, record_list):
that leaves zero records. Will also verify that all records have the
same author and raise a ValidationError if not."""
- logger.info('Creating original text of email for {list}'.format(
- list=record_list))
+ logger.info("Creating original text of email for {list}".format(list=record_list))
if not record_list:
- logger.warning('Could not create email text - no record_list')
- raise ValidationError('No records provided.')
+ logger.warning("Could not create email text - no record_list")
+ raise ValidationError("No records provided.")
available_records = record_list.filter(email__isnull=True)
if not available_records:
- logger.warning('Could not create email text - all records already '
- 'have emails')
- raise ValidationError('All records already have emails.')
+ logger.warning(
+ "Could not create email text - all records already " "have emails"
+ )
+ raise ValidationError("All records already have emails.")
try:
- authors = record_list.values_list('author')
+ authors = record_list.values_list("author")
# list(set()) removes duplicates.
assert len(list(set(authors))) == 1
except AssertionError:
- logger.exception('Could not create email text - multiple authors '
- 'in record set')
- raise ValidationError('All records must have the same author.')
+ logger.exception(
+ "Could not create email text - multiple authors " "in record set"
+ )
+ raise ValidationError("All records must have the same author.")
author = record_list.first().author
citations = cls._create_citations(available_records)
- logger.info('Returning original text of email')
- return render_to_string('emails/author_email_template.html',
- context={'author': author,
- 'liaison': author.dlc.liaison,
- 'citations': citations})
+ logger.info("Returning original text of email")
+ return render_to_string(
+ "emails/author_email_template.html",
+ context={
+ "author": author,
+ "liaison": author.dlc.liaison,
+ "citations": citations,
+ },
+ )
@staticmethod
def _filter_records(records):
@@ -133,7 +130,7 @@ def _filter_records(records):
records = records.filter(email__date_sent__isnull=True)
if records.count() == 0:
- logger.info('No records - not creating email')
+ logger.info("No records - not creating email")
return None
return records
@@ -144,14 +141,13 @@ def _finalize_email(cls, email, records, author):
email = email[0]
email.rebuild_citations()
else:
- email = cls(original_text=cls.create_original_text(records),
- author=author)
+ email = cls(original_text=cls.create_original_text(records), author=author)
email.save()
# Make sure to create the ForeignKey relation from those records to
# the email! Otherwise this method will only ever create new emails
# rather than finding existing ones.
- logger.info('Creating foreign key relationship to email for records')
+ logger.info("Creating foreign key relationship to email for records")
records.update(email=email)
return email
@@ -162,22 +158,20 @@ def _get_author_from_records(records):
# requires Author. Don't call this with an empty record set!
assert records
if count > 1:
- logger.warning('Records have different authors - not creating '
- 'email')
- raise ValidationError('Records do not all have the same author.')
+ logger.warning("Records have different authors - not creating " "email")
+ raise ValidationError("Records do not all have the same author.")
else:
return Author.objects.filter(record__in=records)[0]
@classmethod
def _get_email_for_author(cls, author):
- emails = cls.objects.filter(
- author=author, date_sent__isnull=True).distinct()
+ emails = cls.objects.filter(author=author, date_sent__isnull=True).distinct()
try:
assert len(emails) in [0, 1]
except AssertionError:
- logger.exception('Multiple unsent emails found for %s' % author)
- raise ValidationError('Multiple unsent emails found.')
+ logger.exception("Multiple unsent emails found for %s" % author)
+ raise ValidationError("Multiple unsent emails found.")
return emails if emails else None
@@ -187,8 +181,7 @@ def get_or_create_for_records(cls, records):
their author (there should not be more than one of these at a time).
Records must all be by the same author."""
- logger.info('Creating unsent email for {records}'.format(
- records=records))
+ logger.info("Creating unsent email for {records}".format(records=records))
records = cls._filter_records(records)
if not records:
@@ -201,32 +194,33 @@ def get_or_create_for_records(cls, records):
email = cls._get_email_for_author(author)
email = cls._finalize_email(email, records, author)
- logger.info('Retuning email')
+ logger.info("Retuning email")
return email
def _is_valid_for_sending(self):
- logger.info('Checking if email {pk} is valid for sending'.format(
- pk=self.pk))
+ logger.info("Checking if email {pk} is valid for sending".format(pk=self.pk))
try:
assert not self.date_sent
except AssertionError:
- logger.exception('Attempt to send invalid email')
+ logger.exception("Attempt to send invalid email")
return False
try:
# Can't send the email if there isn't a liaison.
assert self.liaison
except AssertionError:
- logger.exception('Attempt to send email {pk}, which is missing a '
- 'liaison'.format(pk=self.pk))
+ logger.exception(
+ "Attempt to send email {pk}, which is missing a "
+ "liaison".format(pk=self.pk)
+ )
return False
- logger.info('Email {pk} is valid for sending'.format(pk=self.pk))
+ logger.info("Email {pk} is valid for sending".format(pk=self.pk))
return True
def _update_after_sending(self):
"""Set the metadata that should be set after sending an email."""
- logger.info('Updating date_sent for email {pk}'.format(pk=self.pk))
+ logger.info("Updating date_sent for email {pk}".format(pk=self.pk))
self.date_sent = date.today()
self._liaison = self.liaison
self.save()
@@ -235,7 +229,7 @@ def _inner_send(self):
"""Actually perform the sending of an EmailMessage. Return True on
success, False otherwise."""
- logger.info('Sending email {pk}'.format(pk=self.pk))
+ logger.info("Sending email {pk}".format(pk=self.pk))
try:
if settings.EMAIL_TESTING_MODE:
@@ -255,13 +249,13 @@ def _inner_send(self):
)
except SMTPException:
- logger.exception('Could not send email; SMTP exception')
+ logger.exception("Could not send email; SMTP exception")
return False
except Exception:
- logger.exception('Could not send email; unanticipated exception')
+ logger.exception("Could not send email; unanticipated exception")
return False
- logger.info('Done sending email')
+ logger.info("Done sending email")
return True
def send(self, username):
@@ -271,7 +265,7 @@ def send(self, username):
otherwise.
"""
- logger.info('Entering email.send() for {pk}'.format(pk=self.pk))
+ logger.info("Entering email.send() for {pk}".format(pk=self.pk))
# First, validate.
if not self._is_valid_for_sending():
return False
@@ -281,13 +275,11 @@ def send(self, username):
return False
self._update_after_sending()
- logger.info('Sending email_sent signal')
+ logger.info("Sending email_sent signal")
- email_sent.send(sender=self.__class__,
- instance=self,
- username=username)
+ email_sent.send(sender=self.__class__, instance=self, username=username)
- logger.info('Email {pk} sent'.format(pk=self.pk))
+ logger.info("Email {pk} sent".format(pk=self.pk))
return True
def rebuild_citations(self):
@@ -299,19 +291,18 @@ def rebuild_citations(self):
# will include the current email record_set and also unsent emails.
# Use this rather than filtering Record directly to avoid circular
# imports.
- records = self.author.record_set.exclude(
- email__date_sent__isnull=False)
+ records = self.author.record_set.exclude(email__date_sent__isnull=False)
if records.count() == self.record_set.count():
# Nothing to change here.
return False
- soup = BeautifulSoup(self.latest_text, 'html.parser')
+ soup = BeautifulSoup(self.latest_text, "html.parser")
citations = EmailMessage._create_citations(records)
- new_soup = BeautifulSoup(citations, 'html.parser')
+ new_soup = BeautifulSoup(citations, "html.parser")
- cite_block = soup.find('div', class_='control-citations')
+ cite_block = soup.find("div", class_="control-citations")
cite_block.clear()
cite_block.insert(1, new_soup)
self.latest_text = soup.prettify()
@@ -326,7 +317,7 @@ def revert(self):
Right now we implement this by setting the latest text to the original,
but we explicitly don't guarantee any particular implementation."""
- logger.info('Reverting changes for {pk}'.format(pk=self.pk))
+ logger.info("Reverting changes for {pk}".format(pk=self.pk))
self.latest_text = self.original_text
self.save()
@@ -370,9 +361,9 @@ def subject(self):
# and we *should* notice if this problem happens - but we should log
# it.
try:
- return 'OA outreach message to forward: {author}'.format(
- author=self.author.last_name)
+ return "OA outreach message to forward: {author}".format(
+ author=self.author.last_name
+ )
except AttributeError:
- logger.exception('Could not find author for email #{pk}'.format(
- pk=self.pk))
+ logger.exception("Could not find author for email #{pk}".format(pk=self.pk))
raise
diff --git a/solenoid/emails/tests.py b/solenoid/emails/tests.py
index 774f01a1..7c783057 100644
--- a/solenoid/emails/tests.py
+++ b/solenoid/emails/tests.py
@@ -14,31 +14,33 @@
from .views import _get_or_create_emails, EmailSend
-@override_settings(LOGIN_REQUIRED=False,
- USE_ELEMENTS=False,
- # We need to set this or there won't be email recipients in
- # EMAIL_TESTING_MODE, so emails won't send.
- ADMINS=[('Admin', 'admin@fake.com')])
+@override_settings(
+ LOGIN_REQUIRED=False,
+ USE_ELEMENTS=False,
+ # We need to set this or there won't be email recipients in
+ # EMAIL_TESTING_MODE, so emails won't send.
+ ADMINS=[("Admin", "admin@fake.com")],
+)
class EmailCreatorTestCase(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
- @patch('solenoid.emails.views._get_or_create_emails')
+ @patch("solenoid.emails.views._get_or_create_emails")
def test_posting_to_create_view_calls_creator(self, mock_create):
EmailMessage.objects.get(pk=2).delete()
mock_create.return_value = [1]
c = Client()
- c.post(reverse('emails:create'), {'records': ['1']})
- mock_create.assert_called_once_with(['1'])
+ c.post(reverse("emails:create"), {"records": ["1"]})
+ mock_create.assert_called_once_with(["1"])
mock_create.reset_mock()
- c.post(reverse('emails:create'), {'records': ['1', '2']})
- mock_create.assert_called_once_with(['1', '2'])
+ c.post(reverse("emails:create"), {"records": ["1", "2"]})
+ mock_create.assert_called_once_with(["1", "2"])
def test_posting_to_create_view_returns_email_eval(self):
EmailMessage.objects.get(pk=2).delete()
c = Client()
- response = c.post(reverse('emails:create'), {'records': ['1']})
- self.assertRedirects(response, reverse('emails:evaluate', args=(1,)))
+ response = c.post(reverse("emails:create"), {"records": ["1"]})
+ self.assertRedirects(response, reverse("emails:evaluate", args=(1,)))
def test_email_recipient(self):
"""The email created by _get_or_create_emails must be to: the relevant
@@ -69,16 +71,18 @@ def test_get_or_create_emails_returns_correctly(self):
self.assertEqual(len(email_pks), 3)
-@override_settings(LOGIN_REQUIRED=False,
- USE_ELEMENTS=False,
- # We need to set this or there won't be email recipients in
- # EMAIL_TESTING_MODE, so emails won't send.
- ADMINS=[('Admin', 'admin@fake.com')])
+@override_settings(
+ LOGIN_REQUIRED=False,
+ USE_ELEMENTS=False,
+ # We need to set this or there won't be email recipients in
+ # EMAIL_TESTING_MODE, so emails won't send.
+ ADMINS=[("Admin", "admin@fake.com")],
+)
class EmailEvaluateTestCase(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def setUp(self):
- self.url = reverse('emails:evaluate', args=(1,))
+ self.url = reverse("emails:evaluate", args=(1,))
self.client = Client()
def tearDown(self):
@@ -90,21 +94,20 @@ def test_latest_version_displays_on_unsent_page_if_not_blank(self):
def test_liaison_email_address_displays(self):
response = self.client.get(self.url)
- self.assertContains(response, 'krug@example.com')
+ self.assertContains(response, "krug@example.com")
def test_users_can_save_changes_to_emails(self):
session = self.client.session
- session['email_pks'] = [2, 3]
- session['total_email'] = 3
- session['current_email'] = 1
+ session["email_pks"] = [2, 3]
+ session["total_email"] = 3
+ session["current_email"] = 1
session.save()
- new_text = 'This is what we change the email to'
+ new_text = "This is what we change the email to"
- self.client.post(self.url, {
- 'submit_save': 'save & next',
- 'latest_text': new_text
- })
+ self.client.post(
+ self.url, {"submit_save": "save & next", "latest_text": new_text}
+ )
self.assertEqual(new_text, EmailMessage.objects.get(pk=1).latest_text)
@@ -119,7 +122,7 @@ def test_only_unsent_emails_are_editable_1(self):
self.assertContains(response, "Most recent text of email 1")
self.assertNotContains(response, "")
- @patch('solenoid.emails.models.EmailMessage.send')
+ @patch("solenoid.emails.models.EmailMessage.send")
def test_only_unsent_emails_are_editable_2(self, mock_send):
"""On post, the email evaluate page does not re-send emails that have
already been sent."""
@@ -127,7 +130,7 @@ def test_only_unsent_emails_are_editable_2(self, mock_send):
sent_email.date_sent = date.today()
sent_email.save()
- self.client.post(self.url, {'submit_send': 'send & next'})
+ self.client.post(self.url, {"submit_send": "send & next"})
assert not mock_send.called
def test_template_renders_form_media(self):
@@ -138,7 +141,7 @@ def test_template_renders_form_media(self):
the expected output of form.media, which would be unlikely to get there
any other way."""
response = self.client.get(self.url)
- self.assertContains(response, 'ckeditor/ckeditor.js')
+ self.assertContains(response, "ckeditor/ckeditor.js")
def test_email_evaluate_workflow_1(self):
"""
@@ -155,27 +158,24 @@ def test_email_evaluate_workflow_1(self):
# persistent-state
# for info on how to use sessions in testing.
session = self.client.session
- session['email_pks'] = [2, 3]
- session['total_email'] = 3
- session['current_email'] = 1
+ session["email_pks"] = [2, 3]
+ session["total_email"] = 3
+ session["current_email"] = 1
session.save()
- current_url = reverse('emails:evaluate', args=(1,))
+ current_url = reverse("emails:evaluate", args=(1,))
self.client.get(current_url)
- response = self.client.post(current_url,
- data={'submit_cancel': 'submit_cancel'})
- expected_url = reverse('emails:evaluate', args=(2,))
+ response = self.client.post(current_url, data={"submit_cancel": "submit_cancel"})
+ expected_url = reverse("emails:evaluate", args=(2,))
self.assertRedirects(response, expected_url)
- response = self.client.post(expected_url,
- data={'submit_cancel': 'submit_cancel'})
- expected_url = reverse('emails:evaluate', args=(3,))
+ response = self.client.post(expected_url, data={"submit_cancel": "submit_cancel"})
+ expected_url = reverse("emails:evaluate", args=(3,))
self.assertRedirects(response, expected_url)
- response = self.client.post(expected_url,
- data={'submit_cancel': 'submit_cancel'})
- expected_url = reverse('home')
+ response = self.client.post(expected_url, data={"submit_cancel": "submit_cancel"})
+ expected_url = reverse("home")
self.assertRedirects(response, expected_url)
def test_email_evaluate_workflow_2(self):
@@ -190,37 +190,34 @@ def test_email_evaluate_workflow_2(self):
# persistent-state
# for info on how to use sessions in testing.
session = self.client.session
- session['email_pks'] = [2, 3]
- session['total_email'] = 3
- session['current_email'] = 1
+ session["email_pks"] = [2, 3]
+ session["total_email"] = 3
+ session["current_email"] = 1
session.save()
- current_url = reverse('emails:evaluate', args=(1,))
+ current_url = reverse("emails:evaluate", args=(1,))
self.client.get(current_url)
- response = self.client.post(current_url,
- data={'submit_save': 'submit_save'})
- expected_url = reverse('emails:evaluate', args=(2,))
+ response = self.client.post(current_url, data={"submit_save": "submit_save"})
+ expected_url = reverse("emails:evaluate", args=(2,))
self.assertRedirects(response, expected_url)
- response = self.client.post(expected_url,
- data={'submit_save': 'submit_save'})
- expected_url = reverse('emails:evaluate', args=(3,))
+ response = self.client.post(expected_url, data={"submit_save": "submit_save"})
+ expected_url = reverse("emails:evaluate", args=(3,))
self.assertRedirects(response, expected_url)
- response = self.client.post(expected_url,
- data={'submit_save': 'submit_save'})
- expected_url = reverse('home')
+ response = self.client.post(expected_url, data={"submit_save": "submit_save"})
+ expected_url = reverse("home")
self.assertRedirects(response, expected_url)
- @patch('django.contrib.auth.models.AnonymousUser')
+ @patch("django.contrib.auth.models.AnonymousUser")
def test_email_evaluate_workflow_3(self, mock_user):
"""
Make sure that EmailEvaluate walks through the expected set of emails
when users are hitting 'send & next'.
"""
# request.user.email needs to exist or this test will fail.
- mock_user.email = 'fake@example.com'
+ mock_user.email = "fake@example.com"
# Set up a path that should take us through the evaluate view 3 times.
# Implicitly, we entered the email evaluation workflow with the pks =
@@ -229,9 +226,9 @@ def test_email_evaluate_workflow_3(self, mock_user):
# persistent-state
# for info on how to use sessions in testing.
session = self.client.session
- session['email_pks'] = [2, 3]
- session['total_email'] = 3
- session['current_email'] = 1
+ session["email_pks"] = [2, 3]
+ session["total_email"] = 3
+ session["current_email"] = 1
session.save()
# Make sure email 2 is sendable - in the test data it's missing a
@@ -241,22 +238,19 @@ def test_email_evaluate_workflow_3(self, mock_user):
record.email = email
record.save()
- current_url = reverse('emails:evaluate', args=(1,))
+ current_url = reverse("emails:evaluate", args=(1,))
self.client.get(current_url)
- response = self.client.post(current_url,
- data={'submit_send': 'submit_send'})
- expected_url = reverse('emails:evaluate', args=(2,))
+ response = self.client.post(current_url, data={"submit_send": "submit_send"})
+ expected_url = reverse("emails:evaluate", args=(2,))
self.assertRedirects(response, expected_url)
- response = self.client.post(expected_url,
- data={'submit_send': 'submit_send'})
- expected_url = reverse('emails:evaluate', args=(3,))
+ response = self.client.post(expected_url, data={"submit_send": "submit_send"})
+ expected_url = reverse("emails:evaluate", args=(3,))
self.assertRedirects(response, expected_url)
- response = self.client.post(expected_url,
- data={'submit_send': 'submit_send'})
- expected_url = reverse('home')
+ response = self.client.post(expected_url, data={"submit_send": "submit_send"})
+ expected_url = reverse("home")
self.assertRedirects(response, expected_url)
def test_saving_unsets_new_citations_flag(self):
@@ -265,39 +259,37 @@ def test_saving_unsets_new_citations_flag(self):
email.save()
session = self.client.session
- session['email_pks'] = [2, 3]
- session['total_email'] = 3
- session['current_email'] = 1
+ session["email_pks"] = [2, 3]
+ session["total_email"] = 3
+ session["current_email"] = 1
session.save()
- self.client.post(self.url, {
- 'submit_save': 'save & next',
- 'latest_text': email.latest_text
- })
+ self.client.post(
+ self.url, {"submit_save": "save & next", "latest_text": email.latest_text}
+ )
email.refresh_from_db()
self.assertEqual(email.new_citations, False)
- @patch('django.contrib.auth.models.AnonymousUser')
+ @patch("django.contrib.auth.models.AnonymousUser")
def test_sending_unsets_new_citations_flag(self, mock_user):
# request.user.email needs to exist or this test will fail.
- mock_user.email = 'fake@example.com'
+ mock_user.email = "fake@example.com"
email = EmailMessage.objects.get(pk=1)
email.new_citations = True
email.save()
session = self.client.session
- session['email_pks'] = [2, 3]
- session['total_email'] = 3
- session['current_email'] = 1
+ session["email_pks"] = [2, 3]
+ session["total_email"] = 3
+ session["current_email"] = 1
session.save()
- self.client.post(self.url, {
- 'submit_send': 'send & next',
- 'latest_text': email.latest_text
- })
+ self.client.post(
+ self.url, {"submit_send": "send & next", "latest_text": email.latest_text}
+ )
email.refresh_from_db()
@@ -309,46 +301,54 @@ def test_warning_message_shows_when_flag_set(self):
email.save()
response = self.client.get(self.url)
- expected = "New citations for this author have been imported since " \
- "last time the email was edited. They've been added to this " \
+ expected = (
+ "New citations for this author have been imported since "
+ "last time the email was edited. They've been added to this "
"email automatically, but please proofread."
- assert any([msg.message == expected and msg.level_tag == 'error'
- for msg in response.context['messages']])
+ )
+ assert any(
+ [
+ msg.message == expected and msg.level_tag == "error"
+ for msg in response.context["messages"]
+ ]
+ )
-@override_settings(LOGIN_REQUIRED=False,
- USE_ELEMENTS=False,
- # We need to set this or there won't be email recipients in
- # EMAIL_TESTING_MODE, so emails won't send.
- ADMINS=[('Admin', 'admin@fake.com')])
+@override_settings(
+ LOGIN_REQUIRED=False,
+ USE_ELEMENTS=False,
+ # We need to set this or there won't be email recipients in
+ # EMAIL_TESTING_MODE, so emails won't send.
+ ADMINS=[("Admin", "admin@fake.com")],
+)
class EmailMessageModelTestCase(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def tearDown(self):
EmailMessage.objects.all().delete()
def test_revert(self):
- original_text = 'This is the original text'
- latest_text = 'This is the subsequent text'
+ original_text = "This is the original text"
+ latest_text = "This is the subsequent text"
email = EmailMessage.objects.create(
original_text=original_text,
latest_text=latest_text,
- author=Author.objects.latest('pk'),
- _liaison=Liaison.objects.latest('pk'),
+ author=Author.objects.latest("pk"),
+ _liaison=Liaison.objects.latest("pk"),
)
email.revert()
self.assertEqual(email.latest_text, original_text)
def test_latest_text_is_set_on_creation(self):
- original_text = 'This is the original text'
+ original_text = "This is the original text"
email = EmailMessage.objects.create(
original_text=original_text,
# Note that latest_text is not set here, hence defaults blank.
- author=Author.objects.latest('pk'),
- _liaison=Liaison.objects.latest('pk'),
+ author=Author.objects.latest("pk"),
+ _liaison=Liaison.objects.latest("pk"),
)
self.assertEqual(email.latest_text, original_text)
@@ -370,14 +370,14 @@ def test_already_sent_records_do_not_get_emailed(self):
def test_publisher_special_message_included(self):
"""The email text includes special messages for each publisher in its
record set with a special message."""
- message = 'A very special message'
+ message = "A very special message"
r3 = Record.objects.get(pk=3)
r3.message = message
r3.save()
records = Record.objects.filter(pk__in=[3, 4, 5])
text = EmailMessage.create_original_text(records)
- self.assertEqual(text.count('A very special message'), 1)
+ self.assertEqual(text.count("A very special message"), 1)
def test_html_rendered_as_html(self):
"""Make sure that we see
, not <p>, and so forth, in our
@@ -386,7 +386,7 @@ def test_html_rendered_as_html(self):
which is no good for our purposes.)"""
records = Record.objects.filter(pk__in=[3, 4, 5])
email = EmailMessage.create_original_text(records)
- self.assertNotIn('<p>', email)
+ self.assertNotIn("<p>", email)
def test_fpv_accepted_message_included(self):
"""The Recruit from Author - FPV Accepted message is included if that is
@@ -394,17 +394,18 @@ def test_fpv_accepted_message_included(self):
# Record 4 should have an fpv message; 3 does not
records = Record.objects.filter(pk__in=[3, 4])
email = EmailMessage.create_original_text(records)
- self.assertIn(Record.objects.get(pk=4).fpv_message,
- email)
+ self.assertIn(Record.objects.get(pk=4).fpv_message, email)
def test_fpv_accepted_message_excluded(self):
"""The Recruit from Author - FPV Accepted message is NOT included if
that ISN'T the recruitment strategy."""
record = Record.objects.filter(pk=3)
email = EmailMessage.create_original_text(record)
- msg = 'allows authors to download and deposit the final published ' \
- 'article, but does not allow the Libraries to perform the ' \
- 'downloading'
+ msg = (
+ "allows authors to download and deposit the final published "
+ "article, but does not allow the Libraries to perform the "
+ "downloading"
+ )
self.assertNotIn(msg, email)
def test_liaison_property_1(self):
@@ -452,11 +453,12 @@ def test_plaintext_property(self):
"""We have only the html message but we need to generate a text
format and update the email sending function."""
email = EmailMessage.objects.get(pk=3)
- self.assertEqual(email.latest_text,
- "Most recent text of email 3
citations
")
- self.assertEqual(email.plaintext,
- "Most recent text of email 3 citations")
+ self.assertEqual(
+ email.latest_text,
+ "Most recent text of email 3
citations
",
+ )
+ self.assertEqual(email.plaintext, "Most recent text of email 3 citations")
def test_get_or_create_for_records_1(self):
"""EmailMessage.get_or_create_for_records raises an error if the given
@@ -536,11 +538,11 @@ def test_create_citations(self):
# First, ensure that our if conditions in the for loop below will be
# True at least once each.
r1 = Record.objects.get(pk=1)
- r1.message = 'This is a message'
+ r1.message = "This is a message"
r1.save()
r2 = Record.objects.get(pk=2)
- r2.acq_method = 'RECRUIT_FROM_AUTHOR_FPV'
+ r2.acq_method = "RECRUIT_FROM_AUTHOR_FPV"
r2.save()
records = Record.objects.filter(pk__in=[1, 2, 3])
@@ -611,18 +613,23 @@ def test_finalize_email_2(self):
# Add new record so that update is meaningful. Note that we haven't set
# its email yet.
- r = Record(author=author, publisher_name='yo',
- acq_method='RECRUIT_FROM_AUTHOR_MANUSCRIPT',
- citation='boo', paper_id='3567')
+ r = Record(
+ author=author,
+ publisher_name="yo",
+ acq_method="RECRUIT_FROM_AUTHOR_MANUSCRIPT",
+ citation="boo",
+ paper_id="3567",
+ )
r.save()
- records = Record.objects.filter(
- author=author).exclude(email__date_sent__isnull=False)
+ records = Record.objects.filter(author=author).exclude(
+ email__date_sent__isnull=False
+ )
assert records.count() > orig_records.count()
email = EmailMessage._finalize_email(emails, records, author)
- assert 'boo' in email.latest_text
+ assert "boo" in email.latest_text
def test_finalize_email_3(self):
"""Finalizing email updates associated records with the email."""
@@ -632,13 +639,18 @@ def test_finalize_email_3(self):
# Add new record so that update is meaningful. Note that we haven't set
# its email yet.
- r = Record(author=author, publisher_name='yo',
- acq_method='RECRUIT_FROM_AUTHOR_MANUSCRIPT',
- citation='boo', paper_id='3567')
+ r = Record(
+ author=author,
+ publisher_name="yo",
+ acq_method="RECRUIT_FROM_AUTHOR_MANUSCRIPT",
+ citation="boo",
+ paper_id="3567",
+ )
r.save()
- records = Record.objects.filter(
- author=author).exclude(email__date_sent__isnull=False)
+ records = Record.objects.filter(author=author).exclude(
+ email__date_sent__isnull=False
+ )
assert records.count() > orig_records.count()
@@ -655,13 +667,19 @@ def test_rebuild_citations_returns_false_if_no_new_citations(self):
def test_rebuild_citations_sets_text(self):
email = EmailMessage.objects.get(pk=1)
- orig_citation = ('Fermi, Enrico. Paper name. Some journal or other. '
- '145:5 (2016)')
- new_citation = 'yo I am for sure a citation'
-
- r = Record(email=email, author=email.author, publisher_name='yo',
- acq_method='RECRUIT_FROM_AUTHOR_MANUSCRIPT',
- citation=new_citation, paper_id='3567')
+ orig_citation = (
+ "Fermi, Enrico. Paper name. Some journal or other. " "145:5 (2016)"
+ )
+ new_citation = "yo I am for sure a citation"
+
+ r = Record(
+ email=email,
+ author=email.author,
+ publisher_name="yo",
+ acq_method="RECRUIT_FROM_AUTHOR_MANUSCRIPT",
+ citation=new_citation,
+ paper_id="3567",
+ )
r.save()
email.rebuild_citations()
email.refresh_from_db()
@@ -671,30 +689,40 @@ def test_rebuild_citations_sets_text(self):
def test_rebuild_citations_sets_flag(self):
email = EmailMessage.objects.get(pk=1)
- r = Record(email=email, author=email.author, publisher_name='yo',
- acq_method='RECRUIT_FROM_AUTHOR_MANUSCRIPT', citation='yo',
- paper_id='3567')
+ r = Record(
+ email=email,
+ author=email.author,
+ publisher_name="yo",
+ acq_method="RECRUIT_FROM_AUTHOR_MANUSCRIPT",
+ citation="yo",
+ paper_id="3567",
+ )
r.save()
email.rebuild_citations()
email.refresh_from_db()
assert email.new_citations is True
- @patch('solenoid.emails.models.EmailMessage.rebuild_citations')
+ @patch("solenoid.emails.models.EmailMessage.rebuild_citations")
def test_rebuild_citations_fired_when_needed_1(self, mock_rebuild):
"""get_or_create_for_records fires rebuild_citations when there are
new citations."""
email = EmailMessage.objects.get(pk=5)
- r = Record(email=email, author=email.author, publisher_name='yo',
- acq_method='RECRUIT_FROM_AUTHOR_MANUSCRIPT', citation='yo',
- paper_id='3567')
+ r = Record(
+ email=email,
+ author=email.author,
+ publisher_name="yo",
+ acq_method="RECRUIT_FROM_AUTHOR_MANUSCRIPT",
+ citation="yo",
+ paper_id="3567",
+ )
r.save()
records = Record.objects.filter(email=email)
EmailMessage.get_or_create_for_records(records)
mock_rebuild.assert_called_once()
- @patch('solenoid.emails.models.EmailMessage.rebuild_citations')
+ @patch("solenoid.emails.models.EmailMessage.rebuild_citations")
def test_rebuild_citations_fired_when_needed_2(self, mock_rebuild):
"""get_or_create_for_records does not fire rebuild_citations when there
are no new citations."""
@@ -705,37 +733,39 @@ def test_rebuild_citations_fired_when_needed_2(self, mock_rebuild):
mock_rebuild.assert_not_called()
-@override_settings(LOGIN_REQUIRED=False,
- USE_ELEMENTS=False,
- # We need to set this or there won't be email recipients in
- # EMAIL_TESTING_MODE, so emails won't send.
- ADMINS=[('Admin', 'admin@fake.com')])
+@override_settings(
+ LOGIN_REQUIRED=False,
+ USE_ELEMENTS=False,
+ # We need to set this or there won't be email recipients in
+ # EMAIL_TESTING_MODE, so emails won't send.
+ ADMINS=[("Admin", "admin@fake.com")],
+)
class EmailSendTestCase(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def setUp(self):
- self.url = reverse('emails:send')
+ self.url = reverse("emails:send")
self.client = Client()
@override_settings()
def test_email_send_function_sends(self):
email = EmailMessage.objects.get(pk=1)
self.assertFalse(email.date_sent)
- email.send('username')
+ email.send("username")
self.assertEqual(len(mail.outbox), 1)
- @override_settings(ADMINS=(('fake admin', 'fake@example.com'),))
+ @override_settings(ADMINS=(("fake admin", "fake@example.com"),))
def test_email_send_function_does_not_resend(self):
email = EmailMessage.objects.get(pk=1)
email.date_sent = date.today()
email.save()
- email.send('username')
+ email.send("username")
self.assertEqual(len(mail.outbox), 0)
def test_email_send_function_sets_datestamp(self):
email = EmailMessage.objects.get(pk=1)
self.assertFalse(email.date_sent)
- email.send('username')
+ email.send("username")
email.refresh_from_db()
self.assertTrue(email.date_sent)
# Conceivably this test will fail if run near midnight UTC.
@@ -746,72 +776,75 @@ def test_email_send_function_sets_liaison(self):
liaison = email.liaison
assert not email.date_sent
- email.send('username')
+ email.send("username")
email.refresh_from_db()
assert email.date_sent
self.assertEqual(email._liaison, liaison)
- @override_settings(SCHOLCOMM_MOIRA_LIST='scholcomm@example.com',
- EMAIL_TESTING_MODE=False)
+ @override_settings(
+ SCHOLCOMM_MOIRA_LIST="scholcomm@example.com", EMAIL_TESTING_MODE=False
+ )
def test_email_is_sent_to_liaison(self):
email = EmailMessage.objects.get(pk=1)
- email.send('username')
+ email.send("username")
self.assertEqual(len(mail.outbox), 1) # check assumption
- self.assertIn(EmailMessage.objects.get(pk=1).liaison.email_address,
- mail.outbox[0].to)
+ self.assertIn(
+ EmailMessage.objects.get(pk=1).liaison.email_address, mail.outbox[0].to
+ )
- @override_settings(SCHOLCOMM_MOIRA_LIST='scholcomm@example.com',
- EMAIL_TESTING_MODE=False)
+ @override_settings(
+ SCHOLCOMM_MOIRA_LIST="scholcomm@example.com", EMAIL_TESTING_MODE=False
+ )
def test_email_is_sent_to_scholcomm_moira_list(self):
email = EmailMessage.objects.get(pk=1)
- email.send('username')
+ email.send("username")
self.assertEqual(len(mail.outbox), 1) # check assumption
- self.assertIn('scholcomm@example.com', mail.outbox[0].to)
+ self.assertIn("scholcomm@example.com", mail.outbox[0].to)
@override_settings(SCHOLCOMM_MOIRA_LIST=None)
def test_email_handles_empty_moira_list(self):
"""If no scholcomm list has been set, the email function should not
break."""
email = EmailMessage.objects.get(pk=1)
- email.send('username')
+ email.send("username")
self.assertEqual(len(mail.outbox), 1)
# autospec=True ensures that 'self' is passed into the mock, allowing us to
# examine the call args as desired:
# https://docs.python.org/3.3/library/unittest.mock-examples.html#mocking-unbound-methods
- @patch('solenoid.emails.models.EmailMessage.send', autospec=True)
+ @patch("solenoid.emails.models.EmailMessage.send", autospec=True)
# Middleware isn't available for RequestFactory, so we need to mock out the
# messages call so it doesn't trigger an error.
- @patch('solenoid.emails.views.messages')
+ @patch("solenoid.emails.views.messages")
def test_email_send_view_calls_function(self, mock_messages, mock_send):
# We need to use the RequestFactory and not Client because request.user
# needs to exist, so that the username is available for calling
# EmailMessage.send().
factory = RequestFactory()
- request = factory.post(self.url, data={'emails': [1, 2]})
+ request = factory.post(self.url, data={"emails": [1, 2]})
request.user = User.objects.create_user(
- username='wbrogers',
- email='wbrogers@mit.edu',
- password='top_secret')
+ username="wbrogers", email="wbrogers@mit.edu", password="top_secret"
+ )
EmailSend.as_view()(request)
email1 = EmailMessage.objects.get(pk=1)
email2 = EmailMessage.objects.get(pk=2)
- expected = [call(email1, 'wbrogers'), call(email2, 'wbrogers')]
+ expected = [call(email1, "wbrogers"), call(email2, "wbrogers")]
mock_send.assert_has_calls(expected, any_order=True)
def test_subject_is_something_logical(self):
email = EmailMessage.objects.get(pk=1)
- email.send('username')
+ email.send("username")
self.assertEqual(len(mail.outbox), 1)
- expected = 'OA outreach message to forward: {author}'.format(
- author=email.author.last_name)
+ expected = "OA outreach message to forward: {author}".format(
+ author=email.author.last_name
+ )
self.assertEqual(mail.outbox[0].subject, expected)
def test_email_not_sent_when_missing_liaison(self):
email = EmailMessage.objects.get(pk=5)
assert not email.liaison # check assumption: no liaison
assert not email.date_sent # check assumption: unsent
- self.assertFalse(email.send('username'))
+ self.assertFalse(email.send("username"))
self.assertEqual(len(mail.outbox), 0)
diff --git a/solenoid/emails/urls.py b/solenoid/emails/urls.py
index 7b804d93..463d6322 100644
--- a/solenoid/emails/urls.py
+++ b/solenoid/emails/urls.py
@@ -2,15 +2,13 @@
from . import views
-app_name = 'emails'
+app_name = "emails"
urlpatterns = [
- re_path(r'^create/$', views.EmailCreate.as_view(), name='create'),
- re_path(r'^(?P\d+)/$', views.EmailEvaluate.as_view(), name='evaluate'),
- re_path(r'^send/$', views.EmailSend.as_view(), name='send'),
- re_path(r'^$', views.EmailListPending.as_view(), name='list_pending'),
- re_path(r'^liaison/(?P\d+)/$', views.EmailLiaison.as_view(),
- name='get_liaison'),
- re_path(r'^rebuild/(?P\d+)/$', views.EmailRebuild.as_view(),
- name='rebuild'),
- ]
+ re_path(r"^create/$", views.EmailCreate.as_view(), name="create"),
+ re_path(r"^(?P\d+)/$", views.EmailEvaluate.as_view(), name="evaluate"),
+ re_path(r"^send/$", views.EmailSend.as_view(), name="send"),
+ re_path(r"^$", views.EmailListPending.as_view(), name="list_pending"),
+ re_path(r"^liaison/(?P\d+)/$", views.EmailLiaison.as_view(), name="get_liaison"),
+ re_path(r"^rebuild/(?P\d+)/$", views.EmailRebuild.as_view(), name="rebuild"),
+]
diff --git a/solenoid/emails/views.py b/solenoid/emails/views.py
index ca94c926..56b5c29b 100644
--- a/solenoid/emails/views.py
+++ b/solenoid/emails/views.py
@@ -3,9 +3,7 @@
from django.contrib import messages
from django.urls import reverse
from django.db import close_old_connections, connection
-from django.http import (HttpResponseRedirect,
- HttpResponse,
- HttpResponseForbidden)
+from django.http import HttpResponseRedirect, HttpResponse, HttpResponseForbidden
from django.shortcuts import get_object_or_404
from django.views.generic import View, DetailView
from django.views.generic.edit import UpdateView
@@ -42,23 +40,22 @@ def _get_or_create_emails(pk_list):
class EmailCreate(ConditionalLoginRequiredMixin, View):
- http_method_names = ['post']
+ http_method_names = ["post"]
def post(self, request, *args, **kwargs):
- pk_list = request.POST.getlist('records')
+ pk_list = request.POST.getlist("records")
email_pks = _get_or_create_emails(pk_list)
try:
- logger.info('Creating emails for {pks}.'.format(pks=email_pks))
+ logger.info("Creating emails for {pks}.".format(pks=email_pks))
first_pk = email_pks.pop(0)
request.session["email_pks"] = email_pks
request.session["total_email"] = len(email_pks) + 1
request.session["current_email"] = 1
- return HttpResponseRedirect(reverse('emails:evaluate',
- args=(first_pk,)))
+ return HttpResponseRedirect(reverse("emails:evaluate", args=(first_pk,)))
except IndexError:
- logger.exception('No email pks found; email cannot be created.')
- messages.warning(request, 'No email messages found.')
- return HttpResponseRedirect(reverse('home'))
+ logger.exception("No email pks found; email cannot be created.")
+ messages.warning(request, "No email messages found.")
+ return HttpResponseRedirect(reverse("home"))
class EmailEvaluate(ConditionalLoginRequiredMixin, UpdateView):
@@ -72,96 +69,110 @@ class EmailEvaluate(ConditionalLoginRequiredMixin, UpdateView):
2) If there isn't, or if it's empty, users will be redirected to the
dashboard.
"""
+
form_class = EmailMessageForm
model = EmailMessage
def _check_citation_validity(self):
if self.object.new_citations:
- messages.error(self.request, "New citations for this author "
- "have been imported since last time the email "
- "was edited. They've been added to this email "
- "automatically, but please proofread.")
+ messages.error(
+ self.request,
+ "New citations for this author "
+ "have been imported since last time the email "
+ "was edited. They've been added to this email "
+ "automatically, but please proofread.",
+ )
def _check_record_count(self):
- available_records = Record.objects.filter(
- author=self.object.author).exclude(
- email__date_sent__isnull=False)
+ available_records = Record.objects.filter(author=self.object.author).exclude(
+ email__date_sent__isnull=False
+ )
if available_records.count() > self.object.record_set.count():
- messages.error(self.request, "New citations for this author "
- "have been imported since last time the email was "
- "edited, but not added to this email. Do you want "
- "to add them?"
- .format(url=reverse('emails:rebuild',
- args=(self.object.pk,))))
+ messages.error(
+ self.request,
+ "New citations for this author "
+ "have been imported since last time the email was "
+ "edited, but not added to this email. Do you want "
+ "to add them?".format(
+ url=reverse("emails:rebuild", args=(self.object.pk,))
+ ),
+ )
def _finish_handle(self):
next_pk = self._update_session()
if next_pk:
- self.kwargs['next_pk'] = next_pk
+ self.kwargs["next_pk"] = next_pk
else:
try:
- del self.kwargs['next_pk']
+ del self.kwargs["next_pk"]
except Exception:
pass
return HttpResponseRedirect(self.get_success_url())
def _handle_cancel(self):
- logger.info('Canceling changes to email {}'.format(self.kwargs['pk']))
- messages.info(self.request, "Any changes to the email have "
- "been discarded. You may return to the email and "
- "update it later.")
+ logger.info("Canceling changes to email {}".format(self.kwargs["pk"]))
+ messages.info(
+ self.request,
+ "Any changes to the email have "
+ "been discarded. You may return to the email and "
+ "update it later.",
+ )
return self._finish_handle()
def _handle_save(self):
- logger.info('Saving changes to email {}'.format(self.kwargs['pk']))
+ logger.info("Saving changes to email {}".format(self.kwargs["pk"]))
self.form_valid(self.get_form())
messages.success(self.request, "Email message updated.")
return self._finish_handle()
def _handle_send(self):
- logger.info('Sending email {pk}'.format(pk=self.kwargs['pk']))
+ logger.info("Sending email {pk}".format(pk=self.kwargs["pk"]))
self.form_valid(self.get_form())
# This should exist, because if it doesn't dispatch() will have already
# thrown an error and we won't reach this line.
# If it doesn't exist, users will see a 500, which is also reasonable.
- email = EmailMessage.objects.get(pk=self.kwargs['pk'])
+ email = EmailMessage.objects.get(pk=self.kwargs["pk"])
# Don't use username as it's usually a cryptic string.
try:
email.send(self.request.user.email)
except AttributeError as e:
logger.info(e)
- email.send('admin')
- messages.success(self.request, "Email message updated and sent. "
- "Articles in Elements will be updated within an "
- "hour to reflect their new library status.")
+ email.send("admin")
+ messages.success(
+ self.request,
+ "Email message updated and sent. "
+ "Articles in Elements will be updated within an "
+ "hour to reflect their new library status.",
+ )
return self._finish_handle()
def _get_title(self):
if self.object.date_sent:
- return 'View sent email'
+ return "View sent email"
else:
- return 'Send email'
+ return "Send email"
def _update_session(self):
- if ('email_pks' not in self.request.session or
- not len(self.request.session['email_pks'])):
+ if "email_pks" not in self.request.session or not len(
+ self.request.session["email_pks"]
+ ):
return None
try:
- logger.info('Updating email pks in session')
- next_pk = self.request.session['email_pks'].pop(0)
- self.request.session['current_email'] += 1
+ logger.info("Updating email pks in session")
+ next_pk = self.request.session["email_pks"].pop(0)
+ self.request.session["current_email"] += 1
return next_pk
except (KeyError, IndexError):
- logger.exception('Could not update email pks in session')
+ logger.exception("Could not update email pks in session")
# If we don't have email_pks in session, or we have run off the end
# of the list and have no more to pop, then we should clean up any
# other email-pk-related stuff that still exists.
try:
- del self.request.session['current_email']
- del self.request.session['total_email']
+ del self.request.session["current_email"]
+ del self.request.session["total_email"]
except KeyError:
pass
@@ -169,20 +180,20 @@ def _update_session(self):
def get_context_data(self, **kwargs):
context = super(EmailEvaluate, self).get_context_data(**kwargs)
- context['title'] = self._get_title()
+ context["title"] = self._get_title()
try:
- context['progress'] = '#{k} of {n}'.format(
- k=self.request.session['current_email'],
- n=self.request.session['total_email'])
+ context["progress"] = "#{k} of {n}".format(
+ k=self.request.session["current_email"],
+ n=self.request.session["total_email"],
+ )
except KeyError:
pass
- context['breadcrumbs'] = [
- {'url': reverse('home'), 'text': 'dashboard'},
- {'url': reverse('emails:list_pending'),
- 'text': 'view pending emails'},
- {'url': '#', 'text': 'send email'}
+ context["breadcrumbs"] = [
+ {"url": reverse("home"), "text": "dashboard"},
+ {"url": reverse("emails:list_pending"), "text": "view pending emails"},
+ {"url": "#", "text": "send email"},
]
self._check_citation_validity()
@@ -191,14 +202,14 @@ def get_context_data(self, **kwargs):
return context
def get_success_url(self):
- if 'next_pk' in self.kwargs:
- return reverse('emails:evaluate', args=(self.kwargs['next_pk'],))
+ if "next_pk" in self.kwargs:
+ return reverse("emails:evaluate", args=(self.kwargs["next_pk"],))
else:
- return reverse('home')
+ return reverse("home")
def get_template_names(self):
if self.object.date_sent:
- return ['emails/evaluate_sent.html']
+ return ["emails/evaluate_sent.html"]
else:
return super(EmailEvaluate, self).get_template_names()
@@ -208,62 +219,66 @@ def post(self, request, *args, **kwargs):
self.object = self.get_object()
if self.object.date_sent:
- messages.warning(request, 'This email has been sent; no '
- 'further changes can be made.')
- return HttpResponseRedirect(reverse('emails:evaluate',
- args=(self.kwargs['pk'])))
-
- if 'submit_cancel' in request.POST:
+ messages.warning(
+ request, "This email has been sent; no " "further changes can be made."
+ )
+ return HttpResponseRedirect(
+ reverse("emails:evaluate", args=(self.kwargs["pk"]))
+ )
+
+ if "submit_cancel" in request.POST:
return self._handle_cancel()
- elif 'submit_save' in request.POST:
+ elif "submit_save" in request.POST:
return self._handle_save()
- elif 'submit_send' in request.POST:
+ elif "submit_send" in request.POST:
return self._handle_send()
else:
- messages.warning(request, "I'm sorry; I can't tell what you meant "
- "to do.")
+ messages.warning(request, "I'm sorry; I can't tell what you meant " "to do.")
return self.form_invalid(self.get_form())
class EmailSend(ConditionalLoginRequiredMixin, View):
- http_method_names = ['post']
+ http_method_names = ["post"]
def post(self, request, *args, **kwargs):
- pk_list = request.POST.getlist('emails')
+ pk_list = request.POST.getlist("emails")
statuses = []
for pk in pk_list:
- logger.info('Sending email {pk}'.format(pk=pk))
- sent = EmailMessage.objects.get(pk=pk).send(
- self.request.user.username)
+ logger.info("Sending email {pk}".format(pk=pk))
+ sent = EmailMessage.objects.get(pk=pk).send(self.request.user.username)
statuses.append(sent)
if False in statuses:
- logger.warning('Could not send all emails. {bad} of {all} were '
- 'not sent.'.format(bad=statuses.count(False),
- all=len(pk_list)))
- messages.warning(request, 'Some emails were not successfully sent.'
- ' Check to be sure that a liaison has been '
- 'assigned for each DLC and try again.')
+ logger.warning(
+ "Could not send all emails. {bad} of {all} were "
+ "not sent.".format(bad=statuses.count(False), all=len(pk_list))
+ )
+ messages.warning(
+ request,
+ "Some emails were not successfully sent."
+ " Check to be sure that a liaison has been "
+ "assigned for each DLC and try again.",
+ )
else:
- logger.info('All emails successfully sent')
- messages.success(request, 'All emails sent. Hooray!')
+ logger.info("All emails successfully sent")
+ messages.success(request, "All emails sent. Hooray!")
- return HttpResponseRedirect(reverse('home'))
+ return HttpResponseRedirect(reverse("home"))
class EmailListPending(ConditionalLoginRequiredMixin, ListView):
-
def get_queryset(self):
- qs = EmailMessage.objects.filter(
- date_sent__isnull=True).prefetch_related('author')
+ qs = EmailMessage.objects.filter(date_sent__isnull=True).prefetch_related(
+ "author"
+ )
return qs
def get_context_data(self, **kwargs):
context = super(EmailListPending, self).get_context_data(**kwargs)
- context['title'] = 'Unsent emails'
- context['breadcrumbs'] = [
- {'url': reverse('home'), 'text': 'dashboard'},
- {'url': '#', 'text': 'view pending emails'},
+ context["title"] = "Unsent emails"
+ context["breadcrumbs"] = [
+ {"url": reverse("home"), "text": "dashboard"},
+ {"url": "#", "text": "view pending emails"},
]
return context
@@ -276,6 +291,7 @@ class EmailLiaison(ConditionalLoginRequiredMixin, DetailView):
We need this on the email evaluation page, so that we can figure out the
outcome of the assign-liaison process that happens in the modal.
"""
+
model = EmailMessage
def get(self, request, *args, **kwargs):
@@ -288,17 +304,17 @@ def get(self, request, *args, **kwargs):
class EmailRebuild(ConditionalLoginRequiredMixin, View):
- http_method_names = ['get']
+ http_method_names = ["get"]
def get(self, request, *args, **kwargs):
- email = get_object_or_404(EmailMessage, pk=kwargs['pk'])
+ email = get_object_or_404(EmailMessage, pk=kwargs["pk"])
if email.date_sent:
return HttpResponseForbidden()
email.rebuild_citations()
- records = Record.objects.filter(
- author=email.author).exclude(
- email__date_sent__isnull=False)
+ records = Record.objects.filter(author=email.author).exclude(
+ email__date_sent__isnull=False
+ )
records.update(email=email)
# Override usual new-citation warning, since presumably users triggered
@@ -307,5 +323,4 @@ def get(self, request, *args, **kwargs):
email.save()
messages.info(request, "New citations added. Please proofread.")
- return HttpResponseRedirect(reverse('emails:evaluate',
- args=(email.pk,)))
+ return HttpResponseRedirect(reverse("emails:evaluate", args=(email.pk,)))
diff --git a/solenoid/people/admin.py b/solenoid/people/admin.py
index a75e2026..d58e4eef 100644
--- a/solenoid/people/admin.py
+++ b/solenoid/people/admin.py
@@ -5,12 +5,12 @@
class DLCInline(admin.TabularInline):
model = DLC
- fields = ('name',)
+ fields = ("name",)
class LiaisonAdmin(admin.ModelAdmin):
- list_filter = ('active',)
- list_display = ('first_name', 'last_name', 'email_address', 'active')
+ list_filter = ("active",)
+ list_display = ("first_name", "last_name", "email_address", "active")
inlines = (DLCInline,)
@@ -18,16 +18,21 @@ class LiaisonAdmin(admin.ModelAdmin):
class DLCAdmin(admin.ModelAdmin):
- list_filter = ('liaison',)
- list_display = ('name', 'liaison')
+ list_filter = ("liaison",)
+ list_display = ("name", "liaison")
admin.site.register(DLC, DLCAdmin)
class AuthorAdmin(admin.ModelAdmin):
- list_filter = ('dlc', 'dlc__liaison')
- list_display = ('first_name', 'last_name', 'email', 'dlc',)
+ list_filter = ("dlc", "dlc__liaison")
+ list_display = (
+ "first_name",
+ "last_name",
+ "email",
+ "dlc",
+ )
admin.site.register(Author, AuthorAdmin)
diff --git a/solenoid/people/forms.py b/solenoid/people/forms.py
index f95e552e..d399c117 100644
--- a/solenoid/people/forms.py
+++ b/solenoid/people/forms.py
@@ -6,15 +6,14 @@
class LiaisonCreateForm(forms.ModelForm):
class Meta:
model = Liaison
- fields = ('first_name', 'last_name', 'email_address')
+ fields = ("first_name", "last_name", "email_address")
def __init__(self, *args, **kwargs):
super(LiaisonCreateForm, self).__init__(*args, **kwargs)
- self.fields['dlc'] = forms.ModelMultipleChoiceField(
- queryset=DLC.objects.all(),
- required=False) # allow 0 DLCs to be selected
- self.fields['dlc'].widget.attrs['class'] = 'field field-select'
- self.fields['first_name'].widget.attrs['class'] = 'field field-text'
- self.fields['last_name'].widget.attrs['class'] = 'field field-text'
- self.fields['email_address'].widget.attrs['class'] = ('field '
- 'field-email')
+ self.fields["dlc"] = forms.ModelMultipleChoiceField(
+ queryset=DLC.objects.all(), required=False
+ ) # allow 0 DLCs to be selected
+ self.fields["dlc"].widget.attrs["class"] = "field field-select"
+ self.fields["first_name"].widget.attrs["class"] = "field field-text"
+ self.fields["last_name"].widget.attrs["class"] = "field field-text"
+ self.fields["email_address"].widget.attrs["class"] = "field " "field-email"
diff --git a/solenoid/people/migrations/0001_initial.py b/solenoid/people/migrations/0001_initial.py
index a4c72540..e6305b7c 100644
--- a/solenoid/people/migrations/0001_initial.py
+++ b/solenoid/people/migrations/0001_initial.py
@@ -5,62 +5,82 @@
class Migration(migrations.Migration):
-
- dependencies = [
- ]
+ dependencies = []
operations = [
migrations.CreateModel(
- name='Author',
+ name="Author",
fields=[
- ('id', models.AutoField(serialize=False, verbose_name='ID',
- auto_created=True, primary_key=True)),
- ('email', models.EmailField(max_length=254,
- help_text='Author email address')),
- ('first_name', models.CharField(max_length=20)),
- ('last_name', models.CharField(max_length=40)),
- ('mit_id', models.CharField(max_length=10)),
+ (
+ "id",
+ models.AutoField(
+ serialize=False,
+ verbose_name="ID",
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ (
+ "email",
+ models.EmailField(max_length=254, help_text="Author email address"),
+ ),
+ ("first_name", models.CharField(max_length=20)),
+ ("last_name", models.CharField(max_length=40)),
+ ("mit_id", models.CharField(max_length=10)),
],
options={
- 'verbose_name_plural': 'Authors',
- 'verbose_name': 'Author',
+ "verbose_name_plural": "Authors",
+ "verbose_name": "Author",
},
),
migrations.CreateModel(
- name='DLC',
+ name="DLC",
fields=[
- ('id', models.AutoField(serialize=False, verbose_name='ID',
- auto_created=True, primary_key=True)),
- ('name', models.CharField(max_length=100)),
+ (
+ "id",
+ models.AutoField(
+ serialize=False,
+ verbose_name="ID",
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("name", models.CharField(max_length=100)),
],
options={
- 'verbose_name_plural': 'DLCs',
- 'verbose_name': 'DLC',
+ "verbose_name_plural": "DLCs",
+ "verbose_name": "DLC",
},
),
migrations.CreateModel(
- name='Liaison',
+ name="Liaison",
fields=[
- ('id', models.AutoField(serialize=False, verbose_name='ID',
- auto_created=True, primary_key=True)),
- ('first_name', models.CharField(max_length=15)),
- ('last_name', models.CharField(max_length=30)),
- ('email_address', models.EmailField(max_length=254)),
+ (
+ "id",
+ models.AutoField(
+ serialize=False,
+ verbose_name="ID",
+ auto_created=True,
+ primary_key=True,
+ ),
+ ),
+ ("first_name", models.CharField(max_length=15)),
+ ("last_name", models.CharField(max_length=30)),
+ ("email_address", models.EmailField(max_length=254)),
],
options={
- 'verbose_name_plural': 'Liaisons',
- 'verbose_name': 'Liaison',
+ "verbose_name_plural": "Liaisons",
+ "verbose_name": "Liaison",
},
),
migrations.AddField(
- model_name='dlc',
- name='liaison',
- field=models.ForeignKey(to='people.Liaison',
- on_delete=models.CASCADE),
+ model_name="dlc",
+ name="liaison",
+ field=models.ForeignKey(to="people.Liaison", on_delete=models.CASCADE),
),
migrations.AddField(
- model_name='author',
- name='dlc',
- field=models.ForeignKey(to='people.DLC', on_delete=models.CASCADE),
+ model_name="author",
+ name="dlc",
+ field=models.ForeignKey(to="people.DLC", on_delete=models.CASCADE),
),
]
diff --git a/solenoid/people/migrations/0002_auto_20170424_1452.py b/solenoid/people/migrations/0002_auto_20170424_1452.py
index 4c0cd8fb..dfc150f4 100644
--- a/solenoid/people/migrations/0002_auto_20170424_1452.py
+++ b/solenoid/people/migrations/0002_auto_20170424_1452.py
@@ -5,17 +5,16 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0001_initial'),
+ ("people", "0001_initial"),
]
operations = [
migrations.AlterField(
- model_name='dlc',
- name='liaison',
- field=models.ForeignKey(blank=True, null=True,
- to='people.Liaison',
- on_delete=models.CASCADE),
+ model_name="dlc",
+ name="liaison",
+ field=models.ForeignKey(
+ blank=True, null=True, to="people.Liaison", on_delete=models.CASCADE
+ ),
),
]
diff --git a/solenoid/people/migrations/0003_auto_20170426_2005.py b/solenoid/people/migrations/0003_auto_20170426_2005.py
index af31fa55..49d4ccec 100644
--- a/solenoid/people/migrations/0003_auto_20170426_2005.py
+++ b/solenoid/people/migrations/0003_auto_20170426_2005.py
@@ -5,15 +5,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0002_auto_20170424_1452'),
+ ("people", "0002_auto_20170424_1452"),
]
operations = [
migrations.AlterField(
- model_name='dlc',
- name='name',
+ model_name="dlc",
+ name="name",
field=models.CharField(max_length=100, unique=True),
),
]
diff --git a/solenoid/people/migrations/0004_auto_20170501_1559.py b/solenoid/people/migrations/0004_auto_20170501_1559.py
index ab42f3ee..6c5a934f 100644
--- a/solenoid/people/migrations/0004_auto_20170501_1559.py
+++ b/solenoid/people/migrations/0004_auto_20170501_1559.py
@@ -5,22 +5,25 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0003_auto_20170426_2005'),
+ ("people", "0003_auto_20170426_2005"),
]
operations = [
migrations.AddField(
- model_name='author',
- name='_mit_id_hash',
- field=models.CharField(max_length=32, default=0, help_text="This "
- "stores the *hash* of the MIT ID, not the "
- "MIT ID itself. We want to have a unique "
- "identifier for the author but we don't "
- "want to be storing sensitive data "
- "offsite. Hashing the ID achieves "
- "our goals."),
+ model_name="author",
+ name="_mit_id_hash",
+ field=models.CharField(
+ max_length=32,
+ default=0,
+ help_text="This "
+ "stores the *hash* of the MIT ID, not the "
+ "MIT ID itself. We want to have a unique "
+ "identifier for the author but we don't "
+ "want to be storing sensitive data "
+ "offsite. Hashing the ID achieves "
+ "our goals.",
+ ),
preserve_default=False,
),
]
diff --git a/solenoid/people/migrations/0005_initialize_hashes.py b/solenoid/people/migrations/0005_initialize_hashes.py
index c70133cc..2c59b97d 100644
--- a/solenoid/people/migrations/0005_initialize_hashes.py
+++ b/solenoid/people/migrations/0005_initialize_hashes.py
@@ -12,17 +12,13 @@ def initialize_hashes(apps, schema_editor):
for author in Author.objects.all():
# We can't use Author.get_hash because it doesn't deconstruct properly
# to be available for migrations.
- author._mit_id_hash = hashlib.md5(
- author.mit_id.encode('utf-8')).hexdigest()
+ author._mit_id_hash = hashlib.md5(author.mit_id.encode("utf-8")).hexdigest()
author.save()
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0004_auto_20170501_1559'),
+ ("people", "0004_auto_20170501_1559"),
]
- operations = [
- migrations.RunPython(initialize_hashes)
- ]
+ operations = [migrations.RunPython(initialize_hashes)]
diff --git a/solenoid/people/migrations/0006_remove_author_mit_id.py b/solenoid/people/migrations/0006_remove_author_mit_id.py
index 9a8ac821..272bd251 100644
--- a/solenoid/people/migrations/0006_remove_author_mit_id.py
+++ b/solenoid/people/migrations/0006_remove_author_mit_id.py
@@ -5,14 +5,13 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0005_initialize_hashes'),
+ ("people", "0005_initialize_hashes"),
]
operations = [
migrations.RemoveField(
- model_name='author',
- name='mit_id',
+ model_name="author",
+ name="mit_id",
),
]
diff --git a/solenoid/people/migrations/0007_liaison_active.py b/solenoid/people/migrations/0007_liaison_active.py
index c12e3387..ef0c51ab 100644
--- a/solenoid/people/migrations/0007_liaison_active.py
+++ b/solenoid/people/migrations/0007_liaison_active.py
@@ -5,15 +5,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0006_remove_author_mit_id'),
+ ("people", "0006_remove_author_mit_id"),
]
operations = [
migrations.AddField(
- model_name='liaison',
- name='active',
+ model_name="liaison",
+ name="active",
field=models.BooleanField(default=True),
),
]
diff --git a/solenoid/people/migrations/0008_auto_20170522_1620.py b/solenoid/people/migrations/0008_auto_20170522_1620.py
index 606d67de..815ce5b9 100644
--- a/solenoid/people/migrations/0008_auto_20170522_1620.py
+++ b/solenoid/people/migrations/0008_auto_20170522_1620.py
@@ -5,16 +5,17 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0007_liaison_active'),
+ ("people", "0007_liaison_active"),
]
operations = [
migrations.AlterModelOptions(
- name='liaison',
- options={'verbose_name': 'Liaison',
- 'verbose_name_plural': 'Liaisons',
- 'ordering': ['last_name', 'first_name']},
+ name="liaison",
+ options={
+ "verbose_name": "Liaison",
+ "verbose_name_plural": "Liaisons",
+ "ordering": ["last_name", "first_name"],
+ },
),
]
diff --git a/solenoid/people/migrations/0009_auto_20170522_1621.py b/solenoid/people/migrations/0009_auto_20170522_1621.py
index 401ba851..278538a6 100644
--- a/solenoid/people/migrations/0009_auto_20170522_1621.py
+++ b/solenoid/people/migrations/0009_auto_20170522_1621.py
@@ -5,22 +5,25 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0008_auto_20170522_1620'),
+ ("people", "0008_auto_20170522_1620"),
]
operations = [
migrations.AlterModelOptions(
- name='author',
- options={'verbose_name': 'Author',
- 'verbose_name_plural': 'Authors',
- 'ordering': ['last_name', 'first_name']},
+ name="author",
+ options={
+ "verbose_name": "Author",
+ "verbose_name_plural": "Authors",
+ "ordering": ["last_name", "first_name"],
+ },
),
migrations.AlterModelOptions(
- name='dlc',
- options={'verbose_name': 'DLC',
- 'verbose_name_plural': 'DLCs',
- 'ordering': ['name']},
+ name="dlc",
+ options={
+ "verbose_name": "DLC",
+ "verbose_name_plural": "DLCs",
+ "ordering": ["name"],
+ },
),
]
diff --git a/solenoid/people/migrations/0010_auto_20171218_1819.py b/solenoid/people/migrations/0010_auto_20171218_1819.py
index 1a23b327..cc305ba4 100644
--- a/solenoid/people/migrations/0010_auto_20171218_1819.py
+++ b/solenoid/people/migrations/0010_auto_20171218_1819.py
@@ -5,15 +5,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0009_auto_20170522_1621'),
+ ("people", "0009_auto_20170522_1621"),
]
operations = [
migrations.AlterField(
- model_name='author',
- name='first_name',
+ model_name="author",
+ name="first_name",
field=models.CharField(max_length=30),
),
]
diff --git a/solenoid/people/migrations/0011_auto_20180308_1812.py b/solenoid/people/migrations/0011_auto_20180308_1812.py
index ae88f59f..55a0d924 100644
--- a/solenoid/people/migrations/0011_auto_20180308_1812.py
+++ b/solenoid/people/migrations/0011_auto_20180308_1812.py
@@ -7,16 +7,15 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0010_auto_20171218_1819'),
+ ("people", "0010_auto_20171218_1819"),
]
operations = [
migrations.AlterModelManagers(
- name='liaison',
+ name="liaison",
managers=[
- ('objects_all', django.db.models.manager.Manager()),
+ ("objects_all", django.db.models.manager.Manager()),
],
),
]
diff --git a/solenoid/people/migrations/0012_author__dspace_id.py b/solenoid/people/migrations/0012_author__dspace_id.py
index a27eca25..76f73ef9 100644
--- a/solenoid/people/migrations/0012_author__dspace_id.py
+++ b/solenoid/people/migrations/0012_author__dspace_id.py
@@ -4,16 +4,15 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0011_auto_20180308_1812'),
+ ("people", "0011_auto_20180308_1812"),
]
operations = [
migrations.AddField(
- model_name='author',
- name='_dspace_id',
- field=models.CharField(default='', max_length=32),
+ model_name="author",
+ name="_dspace_id",
+ field=models.CharField(default="", max_length=32),
preserve_default=False,
),
]
diff --git a/solenoid/people/models.py b/solenoid/people/models.py
index 8c5e7936..0332acf0 100644
--- a/solenoid/people/models.py
+++ b/solenoid/people/models.py
@@ -31,16 +31,14 @@ def get_queryset(self):
class ActiveLiaisonManager(models.Manager):
def get_queryset(self):
- return ProtectiveQueryset(self.model, using=self._db).filter(
- active=True)
+ return ProtectiveQueryset(self.model, using=self._db).filter(active=True)
class Liaison(models.Model):
-
class Meta:
verbose_name = "Liaison"
verbose_name_plural = "Liaisons"
- ordering = ['last_name', 'first_name']
+ ordering = ["last_name", "first_name"]
def __str__(self):
return "{self.first_name} {self.last_name}".format(self=self)
@@ -76,11 +74,10 @@ def delete(self):
class DLC(models.Model):
-
class Meta:
verbose_name = "DLC"
verbose_name_plural = "DLCs"
- ordering = ['name']
+ ordering = ["name"]
def __str__(self):
return self.name
@@ -88,23 +85,17 @@ def __str__(self):
name = models.CharField(max_length=100, unique=True)
# DLCs are created as needed during the metadata import process, and we
# don't have liaison information available at that time.
- liaison = models.ForeignKey(
- Liaison,
- blank=True,
- null=True,
- on_delete=models.CASCADE)
+ liaison = models.ForeignKey(Liaison, blank=True, null=True, on_delete=models.CASCADE)
class Author(models.Model):
-
class Meta:
verbose_name = "Author"
verbose_name_plural = "Authors"
- ordering = ['last_name', 'first_name']
+ ordering = ["last_name", "first_name"]
def __str__(self):
- return ("{self.first_name} {self.last_name}/{self.dlc}"
- .format(self=self))
+ return "{self.first_name} {self.last_name}/{self.dlc}".format(self=self)
# Authors may have blank DLCs in a given paper's metadata, but if that
# happens we're going to push it back to the Sympletic layer and request
@@ -113,13 +104,16 @@ def __str__(self):
email = models.EmailField(help_text="Author email address")
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=40)
- _mit_id_hash = models.CharField(max_length=32, help_text="This stores the "
- "*hash* of the MIT ID, not the MIT ID "
- "itself. We want to have a unique "
- "identifier for the author but we don't "
- "want to be storing sensitive data "
- "offsite. Hashing the ID achieves "
- "our goals.")
+ _mit_id_hash = models.CharField(
+ max_length=32,
+ help_text="This stores the "
+ "*hash* of the MIT ID, not the MIT ID "
+ "itself. We want to have a unique "
+ "identifier for the author but we don't "
+ "want to be storing sensitive data "
+ "offsite. Hashing the ID achieves "
+ "our goals.",
+ )
_dspace_id = models.CharField(max_length=32)
@classmethod
@@ -133,9 +127,9 @@ def get_hash(cls, mit_id, salt=None):
# This doesn't have to be cryptographically secure - we just need a
# reasonable non-collision guarantee.
if salt:
- return hashlib.md5((salt + mit_id).encode('utf-8')).hexdigest()
+ return hashlib.md5((salt + mit_id).encode("utf-8")).hexdigest()
else:
- return hashlib.md5(mit_id.encode('utf-8')).hexdigest()
+ return hashlib.md5(mit_id.encode("utf-8")).hexdigest()
@classmethod
def get_by_mit_id(cls, mit_id):
diff --git a/solenoid/people/signals.py b/solenoid/people/signals.py
index 96529cd3..d9f4b18e 100644
--- a/solenoid/people/signals.py
+++ b/solenoid/people/signals.py
@@ -20,11 +20,11 @@ def update_emails_with_dlcs(dlcs, liaison=None):
"""
for dlc in dlcs:
EmailMessage.objects.filter(
- record__author__dlc=dlc,
- date_sent__isnull=True).update(_liaison=liaison)
+ record__author__dlc=dlc, date_sent__isnull=True
+ ).update(_liaison=liaison)
@receiver(post_save, sender=DLC)
def update_emails_with_dlcs_on_save(sender, **kwargs):
- dlc = kwargs['instance']
+ dlc = kwargs["instance"]
update_emails_with_dlcs([dlc], dlc.liaison)
diff --git a/solenoid/people/tests.py b/solenoid/people/tests.py
index e1cc8e61..ebd978ec 100644
--- a/solenoid/people/tests.py
+++ b/solenoid/people/tests.py
@@ -9,30 +9,35 @@
@override_settings(LOGIN_REQUIRED=False)
class LiaisonCreateViewTests(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def setUp(self):
- self.url = reverse('people:liaison_create')
+ self.url = reverse("people:liaison_create")
self.client = Client()
def test_create_liaison_url_exists(self):
resolve(self.url)
def test_create_liaison_view_renders(self):
- with self.assertTemplateUsed('people/liaison_form.html'):
+ with self.assertTemplateUsed("people/liaison_form.html"):
self.client.get(self.url)
def test_submit_liaison_form_creates_liaison_with_dlcs(self):
- first_name = 'Anastasius'
- last_name = 'Bibliotecarius'
- email_address = 'ab@example.com'
-
- self.client.post(self.url, {'first_name': first_name,
- 'last_name': last_name,
- 'email_address': email_address,
- 'dlc': [1, 2]})
-
- liaison = Liaison.objects.latest('pk')
+ first_name = "Anastasius"
+ last_name = "Bibliotecarius"
+ email_address = "ab@example.com"
+
+ self.client.post(
+ self.url,
+ {
+ "first_name": first_name,
+ "last_name": last_name,
+ "email_address": email_address,
+ "dlc": [1, 2],
+ },
+ )
+
+ liaison = Liaison.objects.latest("pk")
self.assertEqual(liaison.first_name, first_name)
self.assertEqual(liaison.last_name, last_name)
self.assertEqual(liaison.email_address, email_address)
@@ -43,27 +48,25 @@ def test_submit_liaison_form_creates_liaison_with_dlcs(self):
@override_settings(LOGIN_REQUIRED=False)
class LiaisonDeletionTests(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def setUp(self):
self.client = Client()
def test_create_liaison_url_exists(self):
- resolve(reverse('people:liaison_delete', args=(1,)))
+ resolve(reverse("people:liaison_delete", args=(1,)))
def test_delete_view_1(self):
"""Delete view works for a liaison without attached emails."""
- response = self.client.post(reverse('people:liaison_delete',
- args=(2,)))
- self.assertRedirects(response, reverse('people:liaison_list'))
+ response = self.client.post(reverse("people:liaison_delete", args=(2,)))
+ self.assertRedirects(response, reverse("people:liaison_list"))
self.assertFalse(Liaison.objects.filter(pk=2))
self.assertFalse(Liaison.objects_all.filter(pk=2))
def test_delete_view_2(self):
"""Delete view works for a liaison with attached emails."""
- response = self.client.post(reverse('people:liaison_delete',
- args=(1,)))
- self.assertRedirects(response, reverse('people:liaison_list'))
+ response = self.client.post(reverse("people:liaison_delete", args=(1,)))
+ self.assertRedirects(response, reverse("people:liaison_list"))
self.assertFalse(Liaison.objects.filter(pk=1))
self.assertTrue(Liaison.objects_all.filter(pk=1))
@@ -76,11 +79,11 @@ def test_queryset_delete(self):
@override_settings(LOGIN_REQUIRED=False)
class LiaisonListTests(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def test_queryset(self):
"""Make sure inactive liaisons don't show."""
- liaison = Liaison.objects.latest('pk')
+ liaison = Liaison.objects.latest("pk")
liaison.active = False
liaison.save()
@@ -97,22 +100,21 @@ def test_queryset(self):
@override_settings(LOGIN_REQUIRED=False)
class LiaisonUpdateTests(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def test_queryset(self):
"""Make sure inactive liaisons can't be updated."""
- liaison = Liaison.objects.latest('pk')
+ liaison = Liaison.objects.latest("pk")
liaison.active = False
liaison.save()
c = Client()
- response = c.get(reverse('people:liaison_update', args=(liaison.pk,)))
+ response = c.get(reverse("people:liaison_update", args=(liaison.pk,)))
assert response.status_code == 404
@override_settings(LOGIN_REQUIRED=False)
class DLCTests(TestCase):
-
# We used to have an option for letting people manually edit DLCs. We've
# backed this out in favor of only creating DLCs via data import - there
# is name authority work going on upstream we would prefer to rely on.
@@ -120,10 +122,10 @@ class DLCTests(TestCase):
# here, and it last existed at commit
# 489ec2a7f2c921db0cdcaa6d05cf733865c4315d.
def test_can_create_DLC_without_liaison(self):
- DLC.objects.create(name='Test DLC')
+ DLC.objects.create(name="Test DLC")
-@override_settings(DSPACE_SALT='salty')
+@override_settings(DSPACE_SALT="salty")
@override_settings(LOGIN_REQUIRED=False)
class AuthorTests(TestCase):
def tearDown(self):
@@ -136,27 +138,30 @@ def test_mit_id_is_hashed(self):
unique key, so hashing it and storing the hash is fine;
http://infoprotect.mit.edu/what-needs-protecting . Note that MIT IDs in
test data files are already fake and thus not sensitive."""
- dlc = DLC.objects.create(name='Test DLC')
- mit_id = '000000000'
- author = Author.objects.create(dlc=dlc,
- email='foo@example.com',
- first_name='Test',
- last_name='Author',
- mit_id=mit_id)
-
- self.assertEqual(author.mit_id,
- hashlib.md5(mit_id.encode('utf-8')).hexdigest())
+ dlc = DLC.objects.create(name="Test DLC")
+ mit_id = "000000000"
+ author = Author.objects.create(
+ dlc=dlc,
+ email="foo@example.com",
+ first_name="Test",
+ last_name="Author",
+ mit_id=mit_id,
+ )
+
+ self.assertEqual(author.mit_id, hashlib.md5(mit_id.encode("utf-8")).hexdigest())
def test_mit_id_is_not_stored(self):
"""No matter what properties we end up putting on Author, none of them
are the MIT ID."""
- dlc = DLC.objects.create(name='Test DLC')
- mit_id = '000000000'
- author = Author.objects.create(dlc=dlc,
- email='foo@example.com',
- first_name='Test',
- last_name='Author',
- mit_id=mit_id)
+ dlc = DLC.objects.create(name="Test DLC")
+ mit_id = "000000000"
+ author = Author.objects.create(
+ dlc=dlc,
+ email="foo@example.com",
+ first_name="Test",
+ last_name="Author",
+ mit_id=mit_id,
+ )
author_fields = Author._meta.get_fields()
@@ -165,43 +170,49 @@ def test_mit_id_is_not_stored(self):
self.assertNotEqual(getattr(author, field.name), mit_id)
def test_author_set_hash(self):
- dlc = DLC.objects.create(name='Test DLC')
- author = Author.objects.create(dlc=dlc,
- email='foo@example.com',
- first_name='Test',
- last_name='Author',
- mit_id='000000000')
+ dlc = DLC.objects.create(name="Test DLC")
+ author = Author.objects.create(
+ dlc=dlc,
+ email="foo@example.com",
+ first_name="Test",
+ last_name="Author",
+ mit_id="000000000",
+ )
- new_mit_id = '214915295'
+ new_mit_id = "214915295"
author.mit_id = new_mit_id
author.save()
- self.assertEqual(author.mit_id,
- hashlib.md5(new_mit_id.encode('utf-8')).hexdigest())
+ self.assertEqual(
+ author.mit_id, hashlib.md5(new_mit_id.encode("utf-8")).hexdigest()
+ )
def test_author_get_hash(self):
- mit_id = '214915295'
+ mit_id = "214915295"
- self.assertEqual(Author.get_hash(mit_id),
- hashlib.md5(mit_id.encode('utf-8')).hexdigest())
+ self.assertEqual(
+ Author.get_hash(mit_id), hashlib.md5(mit_id.encode("utf-8")).hexdigest()
+ )
def test_author_dspace_id_hash(self):
- dlc = DLC.objects.create(name='Test DLC')
- mit_id = '000000000'
- author = Author.objects.create(dlc=dlc,
- email='foo@example.com',
- first_name='Test',
- last_name='Author',
- mit_id=mit_id,
- dspace_id=mit_id)
-
- self.assertEqual(author.dspace_id,
- hashlib.md5(('salty' + mit_id).encode('utf-8'))
- .hexdigest())
+ dlc = DLC.objects.create(name="Test DLC")
+ mit_id = "000000000"
+ author = Author.objects.create(
+ dlc=dlc,
+ email="foo@example.com",
+ first_name="Test",
+ last_name="Author",
+ mit_id=mit_id,
+ dspace_id=mit_id,
+ )
+
+ self.assertEqual(
+ author.dspace_id, hashlib.md5(("salty" + mit_id).encode("utf-8")).hexdigest()
+ )
class LiaisonModelTests(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def test_filter_only_shows_active(self):
self.assertEqual(Liaison.objects.count(), 3)
diff --git a/solenoid/people/urls.py b/solenoid/people/urls.py
index f3e78b33..b3fdbb9b 100644
--- a/solenoid/people/urls.py
+++ b/solenoid/people/urls.py
@@ -2,13 +2,13 @@
from . import views
-app_name = 'people'
+app_name = "people"
urlpatterns = [
- re_path(r'^new$', views.LiaisonCreate.as_view(), name='liaison_create'),
- re_path(r'^$', views.LiaisonList.as_view(), name='liaison_list'),
- re_path(r'^(?P\d+)/$', views.LiaisonUpdate.as_view(),
- name='liaison_update'),
- re_path(r'^delete/(?P\d+)/$', views.LiaisonDelete.as_view(),
- name='liaison_delete'),
- ]
+ re_path(r"^new$", views.LiaisonCreate.as_view(), name="liaison_create"),
+ re_path(r"^$", views.LiaisonList.as_view(), name="liaison_list"),
+ re_path(r"^(?P\d+)/$", views.LiaisonUpdate.as_view(), name="liaison_update"),
+ re_path(
+ r"^delete/(?P\d+)/$", views.LiaisonDelete.as_view(), name="liaison_delete"
+ ),
+]
diff --git a/solenoid/people/views.py b/solenoid/people/views.py
index cc4acff6..b844211f 100644
--- a/solenoid/people/views.py
+++ b/solenoid/people/views.py
@@ -19,23 +19,22 @@
class LiaisonCreate(ConditionalLoginRequiredMixin, CreateView):
model = Liaison
form_class = LiaisonCreateForm
- success_url = reverse_lazy('people:liaison_list')
+ success_url = reverse_lazy("people:liaison_list")
def get_context_data(self, **kwargs):
context = super(LiaisonCreate, self).get_context_data(**kwargs)
- context['title'] = 'Add liaison'
- context['breadcrumbs'] = [
- {'url': reverse_lazy('home'), 'text': 'dashboard'},
- {'url': reverse_lazy('people:liaison_list'),
- 'text': 'manage liaisons'},
- {'url': '#', 'text': 'create liaison'}
+ context["title"] = "Add liaison"
+ context["breadcrumbs"] = [
+ {"url": reverse_lazy("home"), "text": "dashboard"},
+ {"url": reverse_lazy("people:liaison_list"), "text": "manage liaisons"},
+ {"url": "#", "text": "create liaison"},
]
- context['form_id'] = 'liaison-create'
+ context["form_id"] = "liaison-create"
return context
def form_valid(self, form):
liaison = form.save()
- dlcs = form.cleaned_data['dlc']
+ dlcs = form.cleaned_data["dlc"]
liaison.dlc_set.add(*dlcs)
update_emails_with_dlcs(dlcs, liaison)
return HttpResponseRedirect(self.success_url)
@@ -47,13 +46,13 @@ class LiaisonList(ConditionalLoginRequiredMixin, ListView):
def get_context_data(self, **kwargs):
context = super(LiaisonList, self).get_context_data(**kwargs)
- context['dlc_set'] = DLC.objects.all()
- context['title'] = 'Manage liaisons'
- context['breadcrumbs'] = [
- {'url': reverse_lazy('home'), 'text': 'dashboard'},
- {'url': '#', 'text': 'manage liaisons'}
+ context["dlc_set"] = DLC.objects.all()
+ context["title"] = "Manage liaisons"
+ context["breadcrumbs"] = [
+ {"url": reverse_lazy("home"), "text": "dashboard"},
+ {"url": "#", "text": "manage liaisons"},
]
- context['unassigned_dlcs'] = DLC.objects.filter(liaison__isnull=True)
+ context["unassigned_dlcs"] = DLC.objects.filter(liaison__isnull=True)
return context
@@ -61,23 +60,22 @@ class LiaisonUpdate(ConditionalLoginRequiredMixin, UpdateView):
model = Liaison
queryset = Liaison.objects.all()
form_class = LiaisonCreateForm
- success_url = reverse_lazy('people:liaison_list')
+ success_url = reverse_lazy("people:liaison_list")
def get_context_data(self, **kwargs):
context = super(LiaisonUpdate, self).get_context_data(**kwargs)
- context['title'] = 'Edit liaison'
- context['breadcrumbs'] = [
- {'url': reverse_lazy('home'), 'text': 'dashboard'},
- {'url': reverse_lazy('people:liaison_list'),
- 'text': 'manage liaisons'},
- {'url': '#', 'text': 'edit liaison'}
+ context["title"] = "Edit liaison"
+ context["breadcrumbs"] = [
+ {"url": reverse_lazy("home"), "text": "dashboard"},
+ {"url": reverse_lazy("people:liaison_list"), "text": "manage liaisons"},
+ {"url": "#", "text": "edit liaison"},
]
- context['form_id'] = 'liaison-update'
+ context["form_id"] = "liaison-update"
return context
def get_initial(self):
initial = super(LiaisonUpdate, self).get_initial()
- initial['dlc'] = DLC.objects.filter(liaison=self.get_object())
+ initial["dlc"] = DLC.objects.filter(liaison=self.get_object())
return initial
def post(self, request, *args, **kwargs):
@@ -89,7 +87,7 @@ def post(self, request, *args, **kwargs):
if form.is_valid():
try:
- dlcs = DLC.objects.filter(pk__in=request.POST.getlist('dlc'))
+ dlcs = DLC.objects.filter(pk__in=request.POST.getlist("dlc"))
except KeyError:
logger.exception()
raise
@@ -98,19 +96,19 @@ def post(self, request, *args, **kwargs):
# Clear existing DLCs (and related EmailMessage liaisons)
EmailMessage.objects.filter(
- record__author__dlc__in=liaison.dlc_set.all(),
- date_sent__isnull=True).update(_liaison=None)
+ record__author__dlc__in=liaison.dlc_set.all(), date_sent__isnull=True
+ ).update(_liaison=None)
liaison.dlc_set.clear()
# Update liaison ane EmailMessages with new DLCs
liaison.dlc_set.add(*dlcs)
update_emails_with_dlcs(dlcs, liaison)
- messages.success(request, 'Liaison updated.')
+ messages.success(request, "Liaison updated.")
return self.form_valid(form)
else:
- messages.warning(request, 'Please correct the errors below.')
+ messages.warning(request, "Please correct the errors below.")
return self.form_invalid(form)
@@ -120,16 +118,14 @@ class LiaisonDelete(ConditionalLoginRequiredMixin, DeleteView):
def get_context_data(self, **kwargs):
context = super(LiaisonDelete, self).get_context_data(**kwargs)
- context['title'] = 'Delete liaison ({name})'.format(
- name=self.get_object())
- context['breadcrumbs'] = [
- {'url': reverse_lazy('home'), 'text': 'dashboard'},
- {'url': reverse_lazy('people:liaison_list'),
- 'text': 'manage liaisons'},
- {'url': '#', 'text': 'delete liaison'}
+ context["title"] = "Delete liaison ({name})".format(name=self.get_object())
+ context["breadcrumbs"] = [
+ {"url": reverse_lazy("home"), "text": "dashboard"},
+ {"url": reverse_lazy("people:liaison_list"), "text": "manage liaisons"},
+ {"url": "#", "text": "delete liaison"},
]
return context
def get_success_url(self):
- messages.success(self.request, 'Liaison deleted.')
- return reverse_lazy('people:liaison_list')
+ messages.success(self.request, "Liaison deleted.")
+ return reverse_lazy("people:liaison_list")
diff --git a/solenoid/records/admin.py b/solenoid/records/admin.py
index 86a6c17c..a228e8c1 100644
--- a/solenoid/records/admin.py
+++ b/solenoid/records/admin.py
@@ -5,8 +5,8 @@
class RecordAdmin(admin.ModelAdmin):
- list_filter = ('acq_method',)
- list_display = ('author', 'publisher_name', 'doi', 'is_sent')
+ list_filter = ("acq_method",)
+ list_display = ("author", "publisher_name", "doi", "is_sent")
admin.site.register(Record, RecordAdmin)
diff --git a/solenoid/records/helpers.py b/solenoid/records/helpers.py
index e01bbfd3..65579198 100644
--- a/solenoid/records/helpers.py
+++ b/solenoid/records/helpers.py
@@ -1,24 +1,38 @@
class Fields(object):
- EMAIL = 'Email'
- DOI = 'Doi'
- FIRST_NAME = 'First Name'
- LAST_NAME = 'Last Name'
- MIT_ID = 'MIT ID'
- CITATION = 'Citation'
- PUBLISHER_NAME = 'Publisher-name'
- ACQ_METHOD = 'C-Method-Of-Acquisition'
- DLC = 'DLC'
- PAPER_ID = 'PaperID'
- MESSAGE = 'C-Publisher-Related-Email-Message'
- PUBDATE = 'Year Published'
- TITLE = 'Title1'
- JOURNAL = 'Journal-name'
- VOLUME = 'Volume'
- ISSUE = 'Issue'
+ EMAIL = "Email"
+ DOI = "Doi"
+ FIRST_NAME = "First Name"
+ LAST_NAME = "Last Name"
+ MIT_ID = "MIT ID"
+ CITATION = "Citation"
+ PUBLISHER_NAME = "Publisher-name"
+ ACQ_METHOD = "C-Method-Of-Acquisition"
+ DLC = "DLC"
+ PAPER_ID = "PaperID"
+ MESSAGE = "C-Publisher-Related-Email-Message"
+ PUBDATE = "Year Published"
+ TITLE = "Title1"
+ JOURNAL = "Journal-name"
+ VOLUME = "Volume"
+ ISSUE = "Issue"
- EXPECTED_FIELDS = [EMAIL, DOI, FIRST_NAME, LAST_NAME, MIT_ID, CITATION,
- PUBLISHER_NAME, ACQ_METHOD, DLC, PAPER_ID, MESSAGE,
- TITLE, JOURNAL, VOLUME, ISSUE]
+ EXPECTED_FIELDS = [
+ EMAIL,
+ DOI,
+ FIRST_NAME,
+ LAST_NAME,
+ MIT_ID,
+ CITATION,
+ PUBLISHER_NAME,
+ ACQ_METHOD,
+ DLC,
+ PAPER_ID,
+ MESSAGE,
+ TITLE,
+ JOURNAL,
+ VOLUME,
+ ISSUE,
+ ]
# This is information we can get from the database if it happens to be
# missing from the Elements metadata, if we already know about
@@ -39,12 +53,22 @@ class Fields(object):
# * CITATION and citation-related data: We can construct a minimal citation
# from other data; alternately, we don't need the other data if we have a
# citation. The Record model will check for this.
- REQUIRED_DATA = list(set(EXPECTED_FIELDS) -
- set(AUTHOR_DATA) - {DOI} - {MESSAGE} - {CITATION} -
- {TITLE} - {JOURNAL} - {VOLUME} - {ISSUE} -
- # Acq method is allowed to be blank.
- {ACQ_METHOD} -
- # We don't need publisher name unless the method of
- # acquisition is FPV (in which case the publisher name
- # is interpolated into the email text).
- {PUBLISHER_NAME})
+ REQUIRED_DATA = list(
+ set(EXPECTED_FIELDS)
+ - set(AUTHOR_DATA)
+ - {DOI}
+ - {MESSAGE}
+ - {CITATION}
+ - {TITLE}
+ - {JOURNAL}
+ - {VOLUME}
+ - {ISSUE}
+ -
+ # Acq method is allowed to be blank.
+ {ACQ_METHOD}
+ -
+ # We don't need publisher name unless the method of
+ # acquisition is FPV (in which case the publisher name
+ # is interpolated into the email text).
+ {PUBLISHER_NAME}
+ )
diff --git a/solenoid/records/migrations/0001_initial.py b/solenoid/records/migrations/0001_initial.py
index 4f5ecd44..504a7c52 100644
--- a/solenoid/records/migrations/0001_initial.py
+++ b/solenoid/records/migrations/0001_initial.py
@@ -6,42 +6,68 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('people', '0001_initial'),
+ ("people", "0001_initial"),
]
operations = [
migrations.CreateModel(
- name='Record',
+ name="Record",
fields=[
- ('id', models.AutoField(auto_created=True,
- primary_key=True, serialize=False,
- verbose_name='ID')),
- ('publisher_name', models.CharField(max_length=50)),
- ('acq_method',
- models.CharField(choices=[('RECRUIT_FROM_AUTHOR_MANUSCRIPT',
- 'RECRUIT_FROM_AUTHOR_MANUSCRIPT'),
- ('RECRUIT_FROM_AUTHOR_FPV',
- 'RECRUIT_FROM_AUTHOR_FPV')],
- max_length=32)),
- ('citation', models.TextField()),
- ('status', models.CharField(choices=[('Unsent', 'Unsent'),
- ('Sent', 'Sent'),
- ('Invalid', 'Invalid')],
- default='Unsent', max_length=7)),
- ('status_timestamp',
- models.DateField(default=datetime.date.today)),
- ('paper_id',
- models.CharField(help_text='This is the Publication ID field '
- 'from Elements; it is supposed to be unique '
- 'but we will not be relying on it as a '
- 'primary key here.', max_length=10)),
- ('author', models.ForeignKey(to='people.Author',
- on_delete=models.CASCADE)),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("publisher_name", models.CharField(max_length=50)),
+ (
+ "acq_method",
+ models.CharField(
+ choices=[
+ (
+ "RECRUIT_FROM_AUTHOR_MANUSCRIPT",
+ "RECRUIT_FROM_AUTHOR_MANUSCRIPT",
+ ),
+ ("RECRUIT_FROM_AUTHOR_FPV", "RECRUIT_FROM_AUTHOR_FPV"),
+ ],
+ max_length=32,
+ ),
+ ),
+ ("citation", models.TextField()),
+ (
+ "status",
+ models.CharField(
+ choices=[
+ ("Unsent", "Unsent"),
+ ("Sent", "Sent"),
+ ("Invalid", "Invalid"),
+ ],
+ default="Unsent",
+ max_length=7,
+ ),
+ ),
+ ("status_timestamp", models.DateField(default=datetime.date.today)),
+ (
+ "paper_id",
+ models.CharField(
+ help_text="This is the Publication ID field "
+ "from Elements; it is supposed to be unique "
+ "but we will not be relying on it as a "
+ "primary key here.",
+ max_length=10,
+ ),
+ ),
+ (
+ "author",
+ models.ForeignKey(to="people.Author", on_delete=models.CASCADE),
+ ),
],
options={
- 'ordering': ['author__dlc', 'author__last_name'],
+ "ordering": ["author__dlc", "author__last_name"],
},
),
]
diff --git a/solenoid/records/migrations/0002_auto_20170420_2027.py b/solenoid/records/migrations/0002_auto_20170420_2027.py
index c9d14caa..5fd481f9 100644
--- a/solenoid/records/migrations/0002_auto_20170420_2027.py
+++ b/solenoid/records/migrations/0002_auto_20170420_2027.py
@@ -5,19 +5,18 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0001_initial'),
+ ("records", "0001_initial"),
]
operations = [
migrations.RemoveField(
- model_name='record',
- name='paper_id',
+ model_name="record",
+ name="paper_id",
),
migrations.AddField(
- model_name='record',
- name='doi',
+ model_name="record",
+ name="doi",
field=models.CharField(default=0, max_length=30),
preserve_default=False,
),
diff --git a/solenoid/records/migrations/0003_auto_20170501_1357.py b/solenoid/records/migrations/0003_auto_20170501_1357.py
index 4eba1253..cc1b6a27 100644
--- a/solenoid/records/migrations/0003_auto_20170501_1357.py
+++ b/solenoid/records/migrations/0003_auto_20170501_1357.py
@@ -5,15 +5,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0002_auto_20170420_2027'),
+ ("records", "0002_auto_20170420_2027"),
]
operations = [
migrations.AlterField(
- model_name='record',
- name='doi',
+ model_name="record",
+ name="doi",
field=models.CharField(max_length=30, blank=True),
),
]
diff --git a/solenoid/records/migrations/0004_record_paper_id.py b/solenoid/records/migrations/0004_record_paper_id.py
index ed1cfa9d..700c8061 100644
--- a/solenoid/records/migrations/0004_record_paper_id.py
+++ b/solenoid/records/migrations/0004_record_paper_id.py
@@ -5,15 +5,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0003_auto_20170501_1357'),
+ ("records", "0003_auto_20170501_1357"),
]
operations = [
migrations.AddField(
- model_name='record',
- name='paper_id',
+ model_name="record",
+ name="paper_id",
field=models.CharField(max_length=10, default=0),
preserve_default=False,
),
diff --git a/solenoid/records/migrations/0005_record_email.py b/solenoid/records/migrations/0005_record_email.py
index bfb92ce4..cb92e0a0 100644
--- a/solenoid/records/migrations/0005_record_email.py
+++ b/solenoid/records/migrations/0005_record_email.py
@@ -5,18 +5,17 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('emails', '0003_auto_20170508_1757'),
- ('records', '0004_record_paper_id'),
+ ("emails", "0003_auto_20170508_1757"),
+ ("records", "0004_record_paper_id"),
]
operations = [
migrations.AddField(
- model_name='record',
- name='email',
- field=models.ForeignKey(blank=True, null=True,
- to='emails.EmailMessage',
- on_delete=models.CASCADE),
+ model_name="record",
+ name="email",
+ field=models.ForeignKey(
+ blank=True, null=True, to="emails.EmailMessage", on_delete=models.CASCADE
+ ),
),
]
diff --git a/solenoid/records/migrations/0006_auto_20170524_1851.py b/solenoid/records/migrations/0006_auto_20170524_1851.py
index c09ab25e..f04a1e52 100644
--- a/solenoid/records/migrations/0006_auto_20170524_1851.py
+++ b/solenoid/records/migrations/0006_auto_20170524_1851.py
@@ -5,18 +5,17 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0005_record_email'),
+ ("records", "0005_record_email"),
]
operations = [
migrations.RemoveField(
- model_name='record',
- name='status',
+ model_name="record",
+ name="status",
),
migrations.RemoveField(
- model_name='record',
- name='status_timestamp',
+ model_name="record",
+ name="status_timestamp",
),
]
diff --git a/solenoid/records/migrations/0007_auto_20170526_1825.py b/solenoid/records/migrations/0007_auto_20170526_1825.py
index 04cf1e58..fab227d3 100644
--- a/solenoid/records/migrations/0007_auto_20170526_1825.py
+++ b/solenoid/records/migrations/0007_auto_20170526_1825.py
@@ -5,25 +5,31 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0006_auto_20170524_1851'),
+ ("records", "0006_auto_20170524_1851"),
]
operations = [
migrations.CreateModel(
- name='Message',
+ name="Message",
fields=[
- ('id', models.AutoField(verbose_name='ID', primary_key=True,
- serialize=False, auto_created=True)),
- ('text', models.TextField()),
+ (
+ "id",
+ models.AutoField(
+ verbose_name="ID",
+ primary_key=True,
+ serialize=False,
+ auto_created=True,
+ ),
+ ),
+ ("text", models.TextField()),
],
),
migrations.AddField(
- model_name='record',
- name='message',
- field=models.ForeignKey(blank=True, null=True,
- to='records.Message',
- on_delete=models.CASCADE),
+ model_name="record",
+ name="message",
+ field=models.ForeignKey(
+ blank=True, null=True, to="records.Message", on_delete=models.CASCADE
+ ),
),
]
diff --git a/solenoid/records/migrations/0008_auto_20170602_1425.py b/solenoid/records/migrations/0008_auto_20170602_1425.py
index 8227ad94..cf413913 100644
--- a/solenoid/records/migrations/0008_auto_20170602_1425.py
+++ b/solenoid/records/migrations/0008_auto_20170602_1425.py
@@ -5,14 +5,13 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0007_auto_20170526_1825'),
+ ("records", "0007_auto_20170526_1825"),
]
operations = [
migrations.AlterUniqueTogether(
- name='record',
- unique_together=set([('author', 'paper_id')]),
+ name="record",
+ unique_together=set([("author", "paper_id")]),
),
]
diff --git a/solenoid/records/migrations/0009_auto_20170602_1731.py b/solenoid/records/migrations/0009_auto_20170602_1731.py
index 82ca8f4f..d0a2ca1e 100644
--- a/solenoid/records/migrations/0009_auto_20170602_1731.py
+++ b/solenoid/records/migrations/0009_auto_20170602_1731.py
@@ -5,22 +5,21 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0008_auto_20170602_1425'),
+ ("records", "0008_auto_20170602_1425"),
]
operations = [
migrations.AddField(
- model_name='record',
- name='elements_id',
+ model_name="record",
+ name="elements_id",
field=models.CharField(max_length=50, default=1),
preserve_default=False,
),
migrations.AddField(
- model_name='record',
- name='source',
- field=models.CharField(max_length=15, default='Manual'),
+ model_name="record",
+ name="source",
+ field=models.CharField(max_length=15, default="Manual"),
preserve_default=False,
),
]
diff --git a/solenoid/records/migrations/0010_auto_20170802_1425.py b/solenoid/records/migrations/0010_auto_20170802_1425.py
index 64993683..8bd48a9d 100644
--- a/solenoid/records/migrations/0010_auto_20170802_1425.py
+++ b/solenoid/records/migrations/0010_auto_20170802_1425.py
@@ -5,15 +5,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0009_auto_20170602_1731'),
+ ("records", "0009_auto_20170602_1731"),
]
operations = [
migrations.AlterField(
- model_name='record',
- name='source',
+ model_name="record",
+ name="source",
field=models.CharField(max_length=25),
),
]
diff --git a/solenoid/records/migrations/0011_auto_20170811_1433.py b/solenoid/records/migrations/0011_auto_20170811_1433.py
index b17fa053..1576c730 100644
--- a/solenoid/records/migrations/0011_auto_20170811_1433.py
+++ b/solenoid/records/migrations/0011_auto_20170811_1433.py
@@ -5,15 +5,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0010_auto_20170802_1425'),
+ ("records", "0010_auto_20170802_1425"),
]
operations = [
migrations.AlterField(
- model_name='record',
- name='publisher_name',
+ model_name="record",
+ name="publisher_name",
field=models.CharField(max_length=75),
),
]
diff --git a/solenoid/records/migrations/0012_auto_20170823_1938.py b/solenoid/records/migrations/0012_auto_20170823_1938.py
index 26cea040..e443a522 100644
--- a/solenoid/records/migrations/0012_auto_20170823_1938.py
+++ b/solenoid/records/migrations/0012_auto_20170823_1938.py
@@ -5,15 +5,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0011_auto_20170811_1433'),
+ ("records", "0011_auto_20170811_1433"),
]
operations = [
migrations.AlterField(
- model_name='record',
- name='doi',
+ model_name="record",
+ name="doi",
field=models.CharField(max_length=45, blank=True),
),
]
diff --git a/solenoid/records/migrations/0013_auto_20171218_1741.py b/solenoid/records/migrations/0013_auto_20171218_1741.py
index 1a8ff253..04bc4b7d 100644
--- a/solenoid/records/migrations/0013_auto_20171218_1741.py
+++ b/solenoid/records/migrations/0013_auto_20171218_1741.py
@@ -5,30 +5,31 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0012_auto_20170823_1938'),
+ ("records", "0012_auto_20170823_1938"),
]
operations = [
migrations.RemoveField(
- model_name='record',
- name='elements_id',
+ model_name="record",
+ name="elements_id",
),
migrations.RemoveField(
- model_name='record',
- name='source',
+ model_name="record",
+ name="source",
),
migrations.AlterField(
- model_name='record',
- name='acq_method',
- field=models.CharField(max_length=32, blank=True,
- choices=[('RECRUIT_FROM_AUTHOR_MANUSCRIPT',
- 'RECRUIT_FROM_AUTHOR_MANUSCRIPT'),
- ('RECRUIT_FROM_AUTHOR_FPV',
- 'RECRUIT_FROM_AUTHOR_FPV'),
- ('', ''),
- ('INDIVIDUAL_DOWNLOAD',
- 'INDIVIDUAL_DOWNLOAD')]),
+ model_name="record",
+ name="acq_method",
+ field=models.CharField(
+ max_length=32,
+ blank=True,
+ choices=[
+ ("RECRUIT_FROM_AUTHOR_MANUSCRIPT", "RECRUIT_FROM_AUTHOR_MANUSCRIPT"),
+ ("RECRUIT_FROM_AUTHOR_FPV", "RECRUIT_FROM_AUTHOR_FPV"),
+ ("", ""),
+ ("INDIVIDUAL_DOWNLOAD", "INDIVIDUAL_DOWNLOAD"),
+ ],
+ ),
),
]
diff --git a/solenoid/records/migrations/0014_auto_20200403_1820.py b/solenoid/records/migrations/0014_auto_20200403_1820.py
index d50cf973..c429f998 100644
--- a/solenoid/records/migrations/0014_auto_20200403_1820.py
+++ b/solenoid/records/migrations/0014_auto_20200403_1820.py
@@ -4,15 +4,14 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0013_auto_20171218_1741'),
+ ("records", "0013_auto_20171218_1741"),
]
operations = [
migrations.AlterField(
- model_name='record',
- name='acq_method',
+ model_name="record",
+ name="acq_method",
field=models.CharField(blank=True, max_length=32),
),
]
diff --git a/solenoid/records/migrations/0015_auto_20200403_1856.py b/solenoid/records/migrations/0015_auto_20200403_1856.py
index 3c5951b2..a99f5e40 100644
--- a/solenoid/records/migrations/0015_auto_20200403_1856.py
+++ b/solenoid/records/migrations/0015_auto_20200403_1856.py
@@ -4,22 +4,22 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0014_auto_20200403_1820'),
+ ("records", "0014_auto_20200403_1820"),
]
operations = [
- migrations.RenameField(model_name='record',
- old_name='message',
- new_name='message_old'),
- migrations.AddField(model_name='record',
- name='message',
- field=models.TextField(blank=True, default=''),
- preserve_default=False),
- migrations.RemoveField(model_name='record',
- name='message_old'),
+ migrations.RenameField(
+ model_name="record", old_name="message", new_name="message_old"
+ ),
+ migrations.AddField(
+ model_name="record",
+ name="message",
+ field=models.TextField(blank=True, default=""),
+ preserve_default=False,
+ ),
+ migrations.RemoveField(model_name="record", name="message_old"),
migrations.DeleteModel(
- name='Message',
+ name="Message",
),
]
diff --git a/solenoid/records/migrations/0016_auto_20210716_1724.py b/solenoid/records/migrations/0016_auto_20210716_1724.py
index 79465b50..78b03301 100644
--- a/solenoid/records/migrations/0016_auto_20210716_1724.py
+++ b/solenoid/records/migrations/0016_auto_20210716_1724.py
@@ -4,30 +4,29 @@
class Migration(migrations.Migration):
-
dependencies = [
- ('records', '0015_auto_20200403_1856'),
+ ("records", "0015_auto_20200403_1856"),
]
operations = [
migrations.AlterField(
- model_name='record',
- name='acq_method',
+ model_name="record",
+ name="acq_method",
field=models.CharField(blank=True, max_length=255),
),
migrations.AlterField(
- model_name='record',
- name='doi',
+ model_name="record",
+ name="doi",
field=models.CharField(blank=True, max_length=255),
),
migrations.AlterField(
- model_name='record',
- name='paper_id',
+ model_name="record",
+ name="paper_id",
field=models.CharField(max_length=255),
),
migrations.AlterField(
- model_name='record',
- name='publisher_name',
+ model_name="record",
+ name="publisher_name",
field=models.CharField(max_length=255),
),
]
diff --git a/solenoid/records/models.py b/solenoid/records/models.py
index 74502b58..1f4f76ee 100644
--- a/solenoid/records/models.py
+++ b/solenoid/records/models.py
@@ -13,30 +13,24 @@
class Record(models.Model):
"""The Record contains:
- * citation information for an MIT author publication
- * plus all of the other data from Elements we will need to construct an
- email
+ * citation information for an MIT author publication
+ * plus all of the other data from Elements we will need to construct an
+ email
"""
class Meta:
- ordering = ['author__dlc', 'author__last_name']
+ ordering = ["author__dlc", "author__last_name"]
# PaperID is not unique, because papers with multiple MIT authors may
# show up in data imports multiple times. However, we should only see
# them once per author.
- unique_together = (('author', 'paper_id'))
+ unique_together = ("author", "paper_id")
- author = models.ForeignKey(
- Author,
- on_delete=models.CASCADE)
+ author = models.ForeignKey(Author, on_delete=models.CASCADE)
email = models.ForeignKey(
- EmailMessage,
- blank=True,
- null=True,
- on_delete=models.CASCADE)
+ EmailMessage, blank=True, null=True, on_delete=models.CASCADE
+ )
publisher_name = models.CharField(max_length=255)
- acq_method = models.CharField(
- max_length=255,
- blank=True)
+ acq_method = models.CharField(max_length=255, blank=True)
citation = models.TextField()
doi = models.CharField(max_length=255, blank=True)
# This is the unique ID within Elements, which is NOT the same as the
@@ -49,15 +43,17 @@ class Meta:
message = models.TextField(blank=True)
def __str__(self):
- return ("{self.author.last_name}, {self.author.first_name} "
- "({self.paper_id})".format(self=self))
+ return (
+ "{self.author.last_name}, {self.author.first_name} "
+ "({self.paper_id})".format(self=self)
+ )
def save(self, *args, **kwargs):
# blank=False by default in TextFields, but this applies only to *form*
# validation, not to *instance* validation - django will happily save
# blank strings to the database, and we don't want it to.
if not self.citation:
- raise ValidationError('Citation cannot be blank')
+ raise ValidationError("Citation cannot be blank")
return super(Record, self).save(*args, **kwargs)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ STATIC METHODS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -80,27 +76,28 @@ def create_citation(paper_data):
Volume(Issue), pp.-pp. doi:XX.XXXXX.
We don't appear to get page number information, so we'll skip that.
"""
- citation = '{last}, {first_init}. '.format(
- last=paper_data[Fields.LAST_NAME],
- first_init=paper_data[Fields.FIRST_NAME][0])
+ citation = "{last}, {first_init}. ".format(
+ last=paper_data[Fields.LAST_NAME], first_init=paper_data[Fields.FIRST_NAME][0]
+ )
if paper_data[Fields.PUBDATE]:
- citation += f'({paper_data[Fields.PUBDATE][0:4]}). '
+ citation += f"({paper_data[Fields.PUBDATE][0:4]}). "
- citation += '{title}. {journal}'.format(
- title=paper_data[Fields.TITLE],
- journal=paper_data[Fields.JOURNAL])
+ citation += "{title}. {journal}".format(
+ title=paper_data[Fields.TITLE], journal=paper_data[Fields.JOURNAL]
+ )
if paper_data[Fields.VOLUME] and paper_data[Fields.ISSUE]:
- citation += ', {volume}({issue})'.format(
- volume=paper_data[Fields.VOLUME],
- issue=paper_data[Fields.ISSUE])
+ citation += ", {volume}({issue})".format(
+ volume=paper_data[Fields.VOLUME], issue=paper_data[Fields.ISSUE]
+ )
- citation += '.'
+ citation += "."
if paper_data[Fields.DOI]:
- citation += (' doi:{doi}'
- ''.format(doi=paper_data[Fields.DOI]))
+ citation += ' doi:{doi}' "".format(
+ doi=paper_data[Fields.DOI]
+ )
return citation
@staticmethod
@@ -122,9 +119,10 @@ def get_or_create_from_data(author, paper_data):
the record.
"""
try:
- record = Record.objects.get(paper_id=paper_data[Fields.PAPER_ID],
- author=author)
- logger.info('Got an existing record')
+ record = Record.objects.get(
+ paper_id=paper_data[Fields.PAPER_ID], author=author
+ )
+ logger.info("Got an existing record")
return record, False
except Record.DoesNotExist:
citation = Record._get_citation(paper_data)
@@ -136,8 +134,9 @@ def get_or_create_from_data(author, paper_data):
citation=citation,
doi=paper_data[Fields.DOI],
paper_id=paper_data[Fields.PAPER_ID],
- message=paper_data[Fields.MESSAGE])
- logger.info('record created')
+ message=paper_data[Fields.MESSAGE],
+ )
+ logger.info("record created")
return record, True
@@ -152,10 +151,9 @@ def get_duplicates(author, paper_data):
(Same citation doesn't suffice, as we may legitimately receive a paper
with the same citation multiple times, once per author.)"""
- dupes = Record.objects.filter(author=author,
- citation=paper_data[Fields.CITATION]
- ).exclude(
- paper_id=paper_data[Fields.PAPER_ID])
+ dupes = Record.objects.filter(
+ author=author, citation=paper_data[Fields.CITATION]
+ ).exclude(paper_id=paper_data[Fields.PAPER_ID])
if dupes:
return dupes
else:
@@ -171,7 +169,7 @@ def is_record_creatable(paper_data):
bool: True if record can be created, False otherwise.
"""
try:
- if paper_data[Fields.ACQ_METHOD] == 'RECRUIT_FROM_AUTHOR_FPV':
+ if paper_data[Fields.ACQ_METHOD] == "RECRUIT_FROM_AUTHOR_FPV":
assert bool(paper_data[Fields.DOI])
assert bool(paper_data[Fields.PUBLISHER_NAME])
@@ -196,15 +194,11 @@ def paper_requested(paper_data):
"""
# Find all records of the same paper, if any.
- records = Record.objects.filter(
- paper_id=paper_data[Fields.PAPER_ID]
- )
+ records = Record.objects.filter(paper_id=paper_data[Fields.PAPER_ID])
# Return True if we've already sent an email for any of those records;
# False otherwise.
- return any([record.email.date_sent
- for record in records
- if record.email])
+ return any([record.email.date_sent for record in records if record.email])
@staticmethod
def is_data_valid(paper_data):
@@ -213,10 +207,10 @@ def is_data_valid(paper_data):
For citation data, we'll accept *either* a preconstructed citation,
*or* enough data to construct a minimal citation ourselves."""
- citable = bool(paper_data[Fields.CITATION]) or \
- all([bool(paper_data[x]) for x in Fields.CITATION_DATA])
- return (all([bool(paper_data[x]) for x in Fields.REQUIRED_DATA]) and
- citable)
+ citable = bool(paper_data[Fields.CITATION]) or all(
+ [bool(paper_data[x]) for x in Fields.CITATION_DATA]
+ )
+ return all([bool(paper_data[x]) for x in Fields.REQUIRED_DATA]) and citable
# ~~~~~~~~~~~~~~~~~~~~~~~~~~ INSTANCE METHODS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -225,10 +219,14 @@ def update_if_needed(self, author, paper_data):
discrepancies with the existing record. If so, updates it and returns
True. If not, returns False."""
changed = False
- if not all([self.author == author,
- self.publisher_name == paper_data[Fields.PUBLISHER_NAME],
- self.acq_method == paper_data[Fields.ACQ_METHOD],
- self.doi == paper_data[Fields.DOI]]):
+ if not all(
+ [
+ self.author == author,
+ self.publisher_name == paper_data[Fields.PUBLISHER_NAME],
+ self.acq_method == paper_data[Fields.ACQ_METHOD],
+ self.doi == paper_data[Fields.DOI],
+ ]
+ ):
self.author = author
self.publisher_name = paper_data[Fields.PUBLISHER_NAME]
self.acq_method = paper_data[Fields.ACQ_METHOD]
@@ -258,18 +256,19 @@ def update_if_needed(self, author, paper_data):
@property
def fpv_message(self):
- msg = Template('[Note: $publisher_name allows authors to download '
- 'and deposit the final published article, but does not '
- 'allow the Libraries to perform the downloading. If '
- 'you follow this link, download the article, and '
- 'attach it to an email reply, we can deposit it on '
- 'your behalf: http://'
- 'libproxy.mit.edu/login?url=https://dx.doi.org/'
- '$doi]')
- if self.acq_method == 'RECRUIT_FROM_AUTHOR_FPV':
- return msg.substitute(publisher_name=self.publisher_name,
- doi=self.doi)
+ msg = Template(
+ "[Note: $publisher_name allows authors to download "
+ "and deposit the final published article, but does not "
+ "allow the Libraries to perform the downloading. If "
+ "you follow this link, download the article, and "
+ "attach it to an email reply, we can deposit it on "
+ 'your behalf: http://'
+ "libproxy.mit.edu/login?url=https://dx.doi.org/"
+ "$doi]"
+ )
+ if self.acq_method == "RECRUIT_FROM_AUTHOR_FPV":
+ return msg.substitute(publisher_name=self.publisher_name, doi=self.doi)
else:
return None
@@ -283,4 +282,4 @@ def is_sent(self):
@property
def is_valid(self):
# If acq_method is FPV, we must have the DOI.
- return (self.acq_method != 'RECRUIT_FROM_AUTHOR_FPV' or bool(self.doi))
+ return self.acq_method != "RECRUIT_FROM_AUTHOR_FPV" or bool(self.doi)
diff --git a/solenoid/records/tasks.py b/solenoid/records/tasks.py
index 68e1c75f..efe59cf5 100644
--- a/solenoid/records/tasks.py
+++ b/solenoid/records/tasks.py
@@ -9,8 +9,8 @@
from solenoid.elements.xml_handlers import (
parse_author_pubs_xml,
parse_journal_policies,
- parse_paper_xml
- )
+ parse_paper_xml,
+)
from solenoid.people.models import Author
from .helpers import Fields
@@ -20,9 +20,7 @@
logger = get_task_logger(__name__)
-@shared_task(bind=True,
- autoretry_for=(RetryError,),
- retry_backoff=True)
+@shared_task(bind=True, autoretry_for=(RetryError,), retry_backoff=True)
def task_import_papers_for_author(self, author_url, author_data, author):
RESULTS = {}
logger.info("Import task started")
@@ -32,18 +30,19 @@ def task_import_papers_for_author(self, author_url, author_data, author):
logger.info("Parsing author publications list")
pub_ids = parse_author_pubs_xml(
- get_paged(f'{author_url}/publications?&detail=full'),
- author_data
- )
+ get_paged(f"{author_url}/publications?&detail=full"), author_data
+ )
total = len(pub_ids)
logger.info(f"Finished retrieving publication IDs to import for author.")
for i, paper in enumerate(pub_ids):
- paper_id = paper['id']
+ paper_id = paper["id"]
if not self.request.called_directly:
progress_recorder.set_progress(
- i, total, description=f'Importing paper #{paper_id} by {author_data[Fields.LAST_NAME]}, {i} of {total}'
- )
+ i,
+ total,
+ description=f"Importing paper #{paper_id} by {author_data[Fields.LAST_NAME]}, {i} of {total}",
+ )
paper_data = _get_paper_data_from_elements(paper_id, author_data)
author_record = Author.objects.get(pk=author)
checks = _run_checks_on_paper(paper_data, author_record)
@@ -51,14 +50,13 @@ def task_import_papers_for_author(self, author_url, author_data, author):
RESULTS[paper_id] = checks
continue
- result = _create_or_update_record_from_paper_data(
- paper_data, author_record
- )
+ result = _create_or_update_record_from_paper_data(paper_data, author_record)
RESULTS[paper_id] = result
- logger.info(f'Finished importing paper #{paper_id}')
+ logger.info(f"Finished importing paper #{paper_id}")
- logger.info(f"Import of all papers by author "
- f"{author_data['ELEMENTS ID']} completed")
+ logger.info(
+ f"Import of all papers by author " f"{author_data['ELEMENTS ID']} completed"
+ )
return RESULTS
@@ -69,34 +67,35 @@ def _create_or_update_record_from_paper_data(paper_data, author):
if Record.is_record_creatable(paper_data):
record, created = Record.get_or_create_from_data(author, paper_data)
if created:
- logger.info(f'Record {record} was created from paper {paper_id}')
- return 'Paper was successfully imported.'
+ logger.info(f"Record {record} was created from paper {paper_id}")
+ return "Paper was successfully imported."
else:
updated = record.update_if_needed(author, paper_data)
if updated:
- return 'Record updated with new data from Elements.'
+ return "Record updated with new data from Elements."
else:
- return 'Paper already in database, no updates made.'
+ return "Paper already in database, no updates made."
- logger.warning(f'Cannot create record for paper {paper_id} '
- f'with author {author_name}')
- return ('Paper could not be added to the database. Please make '
- 'sure data is correct in Elements and try again.')
+ logger.warning(
+ f"Cannot create record for paper {paper_id} " f"with author {author_name}"
+ )
+ return (
+ "Paper could not be added to the database. Please make "
+ "sure data is correct in Elements and try again."
+ )
def _get_paper_data_from_elements(paper_id, author_data):
- logger.info(f'Importing data for paper {paper_id}')
+ logger.info(f"Importing data for paper {paper_id}")
- paper_url = f'{settings.ELEMENTS_ENDPOINT}publications/{paper_id}'
+ paper_url = f"{settings.ELEMENTS_ENDPOINT}publications/{paper_id}"
paper_xml = get_from_elements(paper_url)
paper_data = parse_paper_xml(paper_xml)
paper_data.update(author_data)
journal_url = paper_data["Journal-elements-url"]
if bool(journal_url):
- policy_xml = get_from_elements(
- f'{journal_url}/policies?detail=full'
- )
+ policy_xml = get_from_elements(f"{journal_url}/policies?detail=full")
policy_data = parse_journal_policies(policy_xml)
paper_data.update(policy_data)
@@ -109,29 +108,34 @@ def _run_checks_on_paper(paper_data, author):
# Check that data provided from Elements is complete
if not Record.is_data_valid(paper_data):
- logger.info(f'Invalid data for paper {paper_id}')
- return (f'Publication #{paper_id} by '
- f'{author_name} is missing required data '
- f'(one or more of {", ".join(Fields.REQUIRED_DATA)}), so '
- f'this citation will not be imported.')
+ logger.info(f"Invalid data for paper {paper_id}")
+ return (
+ f"Publication #{paper_id} by "
+ f"{author_name} is missing required data "
+ f'(one or more of {", ".join(Fields.REQUIRED_DATA)}), so '
+ f"this citation will not be imported."
+ )
# Check that paper hasn't already been requested
if Record.paper_requested(paper_data):
- logger.info(f'Paper {paper_id} already requested, '
- f'record not imported')
- return (f'Publication #{paper_id} by '
- f'{author_name} has already been requested '
- f'(possibly from another author), so this record will not '
- f'be imported. Please add this citation manually to an '
- f'email, and manually mark it as requested in Symplectic, '
- 'if you would like to request it from this author also.')
+ logger.info(f"Paper {paper_id} already requested, " f"record not imported")
+ return (
+ f"Publication #{paper_id} by "
+ f"{author_name} has already been requested "
+ f"(possibly from another author), so this record will not "
+ f"be imported. Please add this citation manually to an "
+ f"email, and manually mark it as requested in Symplectic, "
+ "if you would like to request it from this author also."
+ )
# Check that paper doesn't already exist in database
dupes = Record.get_duplicates(author, paper_data)
if dupes:
- dupe_list = [id for id in dupes.values_list('paper_id', flat=True)]
- logger.info(f'Duplicates of paper {paper_id}: {dupes}')
- return (f'Publication #{paper_id} by {author_name} duplicates the '
- f'following record(s) already in the database: '
- f'{", ".join(dupe_list)}. Please merge #{paper_id} into an '
- f'existing record in Elements. It will not be imported.')
+ dupe_list = [id for id in dupes.values_list("paper_id", flat=True)]
+ logger.info(f"Duplicates of paper {paper_id}: {dupes}")
+ return (
+ f"Publication #{paper_id} by {author_name} duplicates the "
+ f"following record(s) already in the database: "
+ f'{", ".join(dupe_list)}. Please merge #{paper_id} into an '
+ f"existing record in Elements. It will not be imported."
+ )
diff --git a/solenoid/records/tests/test_models.py b/solenoid/records/tests/test_models.py
index 22521569..06bcd626 100644
--- a/solenoid/records/tests/test_models.py
+++ b/solenoid/records/tests/test_models.py
@@ -11,40 +11,40 @@
class RecordModelTest(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def setUp(self):
# A dict containing all the EXPECTED_FIELDS.
self.paper_data = {
- Fields.EMAIL: 'test@example.com',
- Fields.DOI: '10.5137/527va',
- Fields.FIRST_NAME: 'William Barton',
- Fields.LAST_NAME: 'Rogers',
- Fields.MIT_ID: '1',
- Fields.PUBLISHER_NAME: 'Haus of Books',
- Fields.ACQ_METHOD: 'RECRUIT_FROM_AUTHOR_FPV',
+ Fields.EMAIL: "test@example.com",
+ Fields.DOI: "10.5137/527va",
+ Fields.FIRST_NAME: "William Barton",
+ Fields.LAST_NAME: "Rogers",
+ Fields.MIT_ID: "1",
+ Fields.PUBLISHER_NAME: "Haus of Books",
+ Fields.ACQ_METHOD: "RECRUIT_FROM_AUTHOR_FPV",
Fields.DLC: "President's Office",
- Fields.PAPER_ID: '895327',
- Fields.MESSAGE: '',
+ Fields.PAPER_ID: "895327",
+ Fields.MESSAGE: "",
}
# MIT physics professor Frank Wilczek coauthored this paper, for which
# he won the Nobel prize in 2004.
self.citation_data = {
- Fields.FIRST_NAME: 'Frank',
- Fields.LAST_NAME: 'Wilczek',
- Fields.PUBDATE: '19730625',
- Fields.VOLUME: '30',
- Fields.ISSUE: '26',
- Fields.DOI: '10.1103/PhysRevLett.30.1343',
- Fields.JOURNAL: 'Physical Review Letters',
- Fields.TITLE: 'Ultraviolet behavior of non-abelian gauge theories'
+ Fields.FIRST_NAME: "Frank",
+ Fields.LAST_NAME: "Wilczek",
+ Fields.PUBDATE: "19730625",
+ Fields.VOLUME: "30",
+ Fields.ISSUE: "26",
+ Fields.DOI: "10.1103/PhysRevLett.30.1343",
+ Fields.JOURNAL: "Physical Review Letters",
+ Fields.TITLE: "Ultraviolet behavior of non-abelian gauge theories",
}
# need to actually test create_citation
def test_is_metadata_valid_yes_citation_no_citation_data(self):
metadata = copy.copy(self.paper_data)
- metadata[Fields.CITATION] = 'This is a citation'
+ metadata[Fields.CITATION] = "This is a citation"
metadata[Fields.TITLE] = None
metadata[Fields.JOURNAL] = None
assert Record.is_data_valid(metadata)
@@ -52,8 +52,8 @@ def test_is_metadata_valid_yes_citation_no_citation_data(self):
def test_is_metadata_valid_no_citation_yes_citation_data(self):
metadata = copy.copy(self.paper_data)
metadata[Fields.CITATION] = None
- metadata[Fields.TITLE] = 'This is a paper title'
- metadata[Fields.JOURNAL] = 'Journal of Awesomeness'
+ metadata[Fields.TITLE] = "This is a paper title"
+ metadata[Fields.JOURNAL] = "Journal of Awesomeness"
assert Record.is_data_valid(metadata)
def test_is_metadata_valid_no_citation_no_citation_data(self):
@@ -66,77 +66,77 @@ def test_is_metadata_valid_no_citation_no_citation_data(self):
def test_is_record_creatable(self):
# Data includes the basics? Good!
data = {
- Fields.PUBLISHER_NAME: 'foo',
- Fields.ACQ_METHOD: 'RECRUIT_FROM_AUTHOR_MANUSCRIPT',
- Fields.CITATION: 'nonempty'
+ Fields.PUBLISHER_NAME: "foo",
+ Fields.ACQ_METHOD: "RECRUIT_FROM_AUTHOR_MANUSCRIPT",
+ Fields.CITATION: "nonempty",
}
assert Record.is_record_creatable(data)
data = {
- Fields.PUBLISHER_NAME: 'foo',
- Fields.ACQ_METHOD: '',
- Fields.CITATION: 'nonempty'
+ Fields.PUBLISHER_NAME: "foo",
+ Fields.ACQ_METHOD: "",
+ Fields.CITATION: "nonempty",
}
assert Record.is_record_creatable(data)
# Missing data for required basics? Bad!
data = copy.copy(self.paper_data)
data.update(self.citation_data)
- data[Fields.CITATION] = ''
- data[Fields.FIRST_NAME] = ''
+ data[Fields.CITATION] = ""
+ data[Fields.FIRST_NAME] = ""
assert not Record.is_record_creatable(data)
data = {
- Fields.PUBLISHER_NAME: 'foo',
+ Fields.PUBLISHER_NAME: "foo",
# No acq method column at all
- Fields.CITATION: 'nonempty'
+ Fields.CITATION: "nonempty",
}
assert not Record.is_record_creatable(data)
# RECRUIT_FROM_AUTHOR_FPV requires a DOI.
data = {
- Fields.PUBLISHER_NAME: 'foo',
- Fields.ACQ_METHOD: 'RECRUIT_FROM_AUTHOR_FPV',
- Fields.CITATION: 'nonempty',
- Fields.DOI: ''
+ Fields.PUBLISHER_NAME: "foo",
+ Fields.ACQ_METHOD: "RECRUIT_FROM_AUTHOR_FPV",
+ Fields.CITATION: "nonempty",
+ Fields.DOI: "",
}
assert not Record.is_record_creatable(data)
data = {
- Fields.PUBLISHER_NAME: 'foo',
- Fields.ACQ_METHOD: 'RECRUIT_FROM_AUTHOR_FPV',
- Fields.CITATION: 'nonempty',
- Fields.DOI: '4217896'
+ Fields.PUBLISHER_NAME: "foo",
+ Fields.ACQ_METHOD: "RECRUIT_FROM_AUTHOR_FPV",
+ Fields.CITATION: "nonempty",
+ Fields.DOI: "4217896",
}
assert Record.is_record_creatable(data)
def test_is_valid_fpv_but_no_doi(self):
record = Record.objects.get(pk=1)
# RECRUIT_FROM_AUTHOR_FPV and no DOI: invalid
- record.acq_method = 'RECRUIT_FROM_AUTHOR_FPV'
- record.doi = ''
+ record.acq_method = "RECRUIT_FROM_AUTHOR_FPV"
+ record.doi = ""
record.save()
assert not record.is_valid
def test_is_valid_fpv_but_has_doi(self):
record = Record.objects.get(pk=1)
# RECRUIT_FROM_AUTHOR_FPV and yes DOI: valid
- record.doi = '53297853'
+ record.doi = "53297853"
record.save()
assert record.is_valid
def test_is_valid_not_fpv_and_no_doi(self):
record = Record.objects.get(pk=1)
# RECRUIT_FROM_AUTHOR_MANUSCRIPT and no DOI: valid
- record.acq_method = 'RECRUIT_FROM_AUTHOR_MANUSCRIPT'
- record.doi = ''
+ record.acq_method = "RECRUIT_FROM_AUTHOR_MANUSCRIPT"
+ record.doi = ""
record.save()
assert record.is_valid
def test_is_valid_not_fpv_and_not_doi(self):
record = Record.objects.get(pk=1)
# RECRUIT_FROM_AUTHOR_MANUSCRIPT and yes DOI: valid
- record.doi = '53297853'
+ record.doi = "53297853"
record.save()
assert record.is_valid
@@ -148,7 +148,7 @@ def test_is_valid_no_citation(self):
def test_is_valid_blank_citation(self):
record = Record.objects.get(pk=1)
- record.citation = ''
+ record.citation = ""
with self.assertRaises(ValidationError):
record.save()
@@ -171,65 +171,67 @@ def test_is_sent(self):
def test_fpv_message(self):
record = Record.objects.get(pk=1)
- record.acq_method = 'not fpv'
+ record.acq_method = "not fpv"
record.save()
assert record.fpv_message is None
- fake_doi = 'fake_doi'
- publisher_name = 'fake_publisher'
- record.acq_method = 'RECRUIT_FROM_AUTHOR_FPV'
+ fake_doi = "fake_doi"
+ publisher_name = "fake_publisher"
+ record.acq_method = "RECRUIT_FROM_AUTHOR_FPV"
record.doi = fake_doi
record.publisher_name = publisher_name
record.save()
- msg = Template('[Note: $publisher_name allows authors to download '
- 'and deposit the final published article, but does not '
- 'allow the Libraries to perform the downloading. If '
- 'you follow this link, download the article, and '
- 'attach it to an email reply, we can deposit it on '
- 'your behalf: http://'
- 'libproxy.mit.edu/login?url=https://dx.doi.org/'
- '$doi]')
+ msg = Template(
+ "[Note: $publisher_name allows authors to download "
+ "and deposit the final published article, but does not "
+ "allow the Libraries to perform the downloading. If "
+ "you follow this link, download the article, and "
+ "attach it to an email reply, we can deposit it on "
+ 'your behalf: http://'
+ "libproxy.mit.edu/login?url=https://dx.doi.org/"
+ "$doi]"
+ )
assert record.fpv_message == msg.substitute(
- publisher_name=publisher_name, doi=fake_doi)
+ publisher_name=publisher_name, doi=fake_doi
+ )
def test_get_or_create_from_data(self):
author = Author.objects.get(pk=1)
- record, created = Record.get_or_create_from_data(
- author, {Fields.PAPER_ID: 1})
+ record, created = Record.get_or_create_from_data(author, {Fields.PAPER_ID: 1})
assert record.pk == 1
assert not created
row = {
- Fields.PUBLISHER_NAME: 'publisher_name',
- Fields.ACQ_METHOD: 'RECRUIT_FROM_AUTHOR_FPV',
- Fields.CITATION: 'citation',
- Fields.DOI: 'doi',
- Fields.PAPER_ID: 'paper_id',
- Fields.MESSAGE: ''
+ Fields.PUBLISHER_NAME: "publisher_name",
+ Fields.ACQ_METHOD: "RECRUIT_FROM_AUTHOR_FPV",
+ Fields.CITATION: "citation",
+ Fields.DOI: "doi",
+ Fields.PAPER_ID: "paper_id",
+ Fields.MESSAGE: "",
}
record, created = Record.get_or_create_from_data(author, row)
assert created
- assert record.publisher_name == 'publisher_name'
- assert record.acq_method == 'RECRUIT_FROM_AUTHOR_FPV'
- assert record.citation == 'citation'
- assert record.doi == 'doi'
- assert record.paper_id == 'paper_id'
- assert record.message == ''
+ assert record.publisher_name == "publisher_name"
+ assert record.acq_method == "RECRUIT_FROM_AUTHOR_FPV"
+ assert record.citation == "citation"
+ assert record.doi == "doi"
+ assert record.paper_id == "paper_id"
+ assert record.message == ""
def test_get_duplicates_1(self):
"""There are no duplicates: this should return None."""
metadata = {
- Fields.PUBLISHER_NAME: 'publisher_name',
- Fields.ACQ_METHOD: 'RECRUIT_FROM_AUTHOR_FPV',
- Fields.CITATION: 'citation',
- Fields.DOI: 'doi',
- Fields.PAPER_ID: 'paper_id',
+ Fields.PUBLISHER_NAME: "publisher_name",
+ Fields.ACQ_METHOD: "RECRUIT_FROM_AUTHOR_FPV",
+ Fields.CITATION: "citation",
+ Fields.DOI: "doi",
+ Fields.PAPER_ID: "paper_id",
}
author = Author.objects.get(pk=1)
@@ -240,28 +242,29 @@ def test_get_duplicates_2(self):
this should return None."""
metadata = {
- Fields.PUBLISHER_NAME: 'Wiley',
- Fields.ACQ_METHOD: 'RECRUIT_FROM_AUTHOR_FPV',
- Fields.CITATION: ('Fermi, Enrico. Paper name. Some journal or '
- 'other. 145:5 (2016)'),
- Fields.DOI: '10.1412/4678156',
- Fields.PAPER_ID: 'paper_id',
- Fields.FIRST_NAME: 'Different',
- Fields.LAST_NAME: 'Author',
+ Fields.PUBLISHER_NAME: "Wiley",
+ Fields.ACQ_METHOD: "RECRUIT_FROM_AUTHOR_FPV",
+ Fields.CITATION: (
+ "Fermi, Enrico. Paper name. Some journal or " "other. 145:5 (2016)"
+ ),
+ Fields.DOI: "10.1412/4678156",
+ Fields.PAPER_ID: "paper_id",
+ Fields.FIRST_NAME: "Different",
+ Fields.LAST_NAME: "Author",
Fields.MIT_ID: 214614,
}
# Check assumption - we don't have this author in the db at all, so
# we can't have a record associated with this author yet
- id_hash = Author.get_hash('214614')
+ id_hash = Author.get_hash("214614")
assert not Author.objects.filter(_mit_id_hash=id_hash)
author = Author.objects.create(
- first_name='Different',
- last_name='Author',
+ first_name="Different",
+ last_name="Author",
_mit_id_hash=id_hash,
dlc=DLC.objects.first(),
- email='da@example.com'
+ email="da@example.com",
)
assert Record.get_duplicates(author, metadata) is None
@@ -274,17 +277,18 @@ def test_get_duplicates_3(self):
# This is a duplicate of record #2, except for the paper ID.
metadata = {
- Fields.PUBLISHER_NAME: 'Nature',
- Fields.ACQ_METHOD: 'RECRUIT_FROM_AUTHOR_FPV',
- Fields.CITATION: ('Tonegawa, Susumu. Paper name. Some journal or '
- 'other. 31:4 (2012)'),
- Fields.DOI: '10.1240.2/4914241',
- Fields.PAPER_ID: '24618',
- Fields.FIRST_NAME: 'Susumu',
- Fields.LAST_NAME: 'Tonegawa',
- Fields.MIT_ID: '2',
+ Fields.PUBLISHER_NAME: "Nature",
+ Fields.ACQ_METHOD: "RECRUIT_FROM_AUTHOR_FPV",
+ Fields.CITATION: (
+ "Tonegawa, Susumu. Paper name. Some journal or " "other. 31:4 (2012)"
+ ),
+ Fields.DOI: "10.1240.2/4914241",
+ Fields.PAPER_ID: "24618",
+ Fields.FIRST_NAME: "Susumu",
+ Fields.LAST_NAME: "Tonegawa",
+ Fields.MIT_ID: "2",
}
- author = Author.objects.get(last_name='Tonegawa')
+ author = Author.objects.get(last_name="Tonegawa")
dupes = Record.get_duplicates(author, metadata)
assert dupes.count() == 1
@@ -294,143 +298,153 @@ def test_create_citation_case_1(self):
"""Minimal citation plus:
publication date: YES
volume & issue: NO
- doi: NO """
+ doi: NO"""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.VOLUME,
- Fields.ISSUE,
- Fields.DOI],
- None))
+ data.update(dict.fromkeys([Fields.VOLUME, Fields.ISSUE, Fields.DOI], None))
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. (1973). Ultraviolet behavior '
- 'of non-abelian gauge theories. Physical Review '
- 'Letters.')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. (1973). Ultraviolet behavior "
+ "of non-abelian gauge theories. Physical Review "
+ "Letters.",
+ )
def test_create_citation_case_2(self):
"""Minimal citation plus:
publication date: YES
volume & issue: YES
- doi: NO """
+ doi: NO"""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.DOI],
- None))
+ data.update(dict.fromkeys([Fields.DOI], None))
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. (1973). Ultraviolet behavior '
- 'of non-abelian gauge theories. Physical Review '
- 'Letters, 30(26).')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. (1973). Ultraviolet behavior "
+ "of non-abelian gauge theories. Physical Review "
+ "Letters, 30(26).",
+ )
def test_create_citation_case_3(self):
"""Minimal citation plus:
publication date: YES
volume & issue: NO
- doi: YES """
+ doi: YES"""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.VOLUME,
- Fields.ISSUE],
- None))
+ data.update(dict.fromkeys([Fields.VOLUME, Fields.ISSUE], None))
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. (1973). Ultraviolet behavior '
- 'of non-abelian gauge theories. Physical Review '
- 'Letters. doi:10.1103/PhysRevLett.30.1343'
- '')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. (1973). Ultraviolet behavior "
+ "of non-abelian gauge theories. Physical Review "
+ 'Letters. doi:10.1103/PhysRevLett.30.1343'
+ "",
+ )
def test_create_citation_case_4(self):
"""Minimal citation plus:
publication date: YES
volume & issue: YES
- doi: YES """
+ doi: YES"""
citation = Record.create_citation(self.citation_data)
- self.assertEqual(citation, 'Wilczek, F. (1973). Ultraviolet behavior '
- 'of non-abelian gauge theories. Physical Review '
- 'Letters, 30(26). doi:10.1103/PhysRevLett.30.1343'
- '')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. (1973). Ultraviolet behavior "
+ "of non-abelian gauge theories. Physical Review "
+ 'Letters, 30(26). doi:10.1103/PhysRevLett.30.1343'
+ "",
+ )
def test_create_citation_case_5(self):
"""Minimal citation plus:
publication date: NO
volume & issue: NO
- doi: NO """
+ doi: NO"""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.PUBDATE,
- Fields.VOLUME,
- Fields.ISSUE,
- Fields.DOI],
- None))
+ data.update(
+ dict.fromkeys([Fields.PUBDATE, Fields.VOLUME, Fields.ISSUE, Fields.DOI], None)
+ )
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. Ultraviolet behavior of '
- 'non-abelian gauge theories. Physical Review '
- 'Letters.')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. Ultraviolet behavior of "
+ "non-abelian gauge theories. Physical Review "
+ "Letters.",
+ )
def test_create_citation_case_6(self):
"""Minimal citation plus:
publication date: NO
volume & issue: YES
- doi: NO """
+ doi: NO"""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.PUBDATE,
- Fields.DOI],
- None))
+ data.update(dict.fromkeys([Fields.PUBDATE, Fields.DOI], None))
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. Ultraviolet behavior of '
- 'non-abelian gauge theories. Physical Review '
- 'Letters, 30(26).')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. Ultraviolet behavior of "
+ "non-abelian gauge theories. Physical Review "
+ "Letters, 30(26).",
+ )
def test_create_citation_case_7(self):
"""Minimal citation plus:
publication date: NO
volume & issue: NO
- doi: YES """
+ doi: YES"""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.PUBDATE,
- Fields.VOLUME,
- Fields.ISSUE],
- None))
+ data.update(dict.fromkeys([Fields.PUBDATE, Fields.VOLUME, Fields.ISSUE], None))
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. Ultraviolet behavior of '
- 'non-abelian gauge theories. Physical Review '
- 'Letters. doi:10.1103/PhysRevLett.30.1343'
- '')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. Ultraviolet behavior of "
+ "non-abelian gauge theories. Physical Review "
+ 'Letters. doi:10.1103/PhysRevLett.30.1343'
+ "",
+ )
def test_create_citation_case_8(self):
"""Minimal citation plus:
publication date: NO
volume & issue: YES
- doi: YES """
+ doi: YES"""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.PUBDATE],
- None))
+ data.update(dict.fromkeys([Fields.PUBDATE], None))
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. Ultraviolet behavior of '
- 'non-abelian gauge theories. Physical Review '
- 'Letters, 30(26). doi:10.1103/PhysRevLett.30.1343'
- '')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. Ultraviolet behavior of "
+ "non-abelian gauge theories. Physical Review "
+ 'Letters, 30(26). doi:10.1103/PhysRevLett.30.1343'
+ "",
+ )
def test_create_citation_error_case_1(self):
"""Minimal citation; has volume, lacks issue."""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.PUBDATE,
- Fields.ISSUE,
- Fields.DOI],
- None))
+ data.update(dict.fromkeys([Fields.PUBDATE, Fields.ISSUE, Fields.DOI], None))
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. Ultraviolet behavior of '
- 'non-abelian gauge theories. Physical Review '
- 'Letters.')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. Ultraviolet behavior of "
+ "non-abelian gauge theories. Physical Review "
+ "Letters.",
+ )
def test_create_citation_error_case_2(self):
"""Minimal citation; has issue, lacks volume."""
data = copy.copy(self.citation_data)
- data.update(dict.fromkeys([Fields.PUBDATE,
- Fields.VOLUME,
- Fields.DOI],
- None))
+ data.update(dict.fromkeys([Fields.PUBDATE, Fields.VOLUME, Fields.DOI], None))
citation = Record.create_citation(data)
- self.assertEqual(citation, 'Wilczek, F. Ultraviolet behavior of '
- 'non-abelian gauge theories. Physical Review '
- 'Letters.')
+ self.assertEqual(
+ citation,
+ "Wilczek, F. Ultraviolet behavior of "
+ "non-abelian gauge theories. Physical Review "
+ "Letters.",
+ )
def test_update_if_needed_case_1(self):
"""update_if_needed alters the record when it sees a new author."""
@@ -449,7 +463,7 @@ def test_update_if_needed_case_2(self):
"""update_if_needed alters the record when it sees a new publisher."""
r1 = Record.objects.get(pk=1)
metadata = {}
- new_publisher = r1.publisher_name + 'new'
+ new_publisher = r1.publisher_name + "new"
metadata[Fields.PUBLISHER_NAME] = new_publisher
metadata[Fields.ACQ_METHOD] = r1.acq_method
metadata[Fields.DOI] = r1.doi
@@ -465,19 +479,19 @@ def test_update_if_needed_case_3(self):
r1 = Record.objects.get(pk=1)
metadata = {}
metadata[Fields.PUBLISHER_NAME] = r1.publisher_name
- metadata[Fields.ACQ_METHOD] = 'RECRUIT_FROM_AUTHOR_MANUSCRIPT'
+ metadata[Fields.ACQ_METHOD] = "RECRUIT_FROM_AUTHOR_MANUSCRIPT"
metadata[Fields.DOI] = r1.doi
metadata[Fields.CITATION] = r1.citation
author = r1.author
assert r1.update_if_needed(author, metadata)
r1.refresh_from_db()
- assert r1.acq_method == 'RECRUIT_FROM_AUTHOR_MANUSCRIPT'
+ assert r1.acq_method == "RECRUIT_FROM_AUTHOR_MANUSCRIPT"
def test_update_if_needed_case_4(self):
"""update_if_needed alters the record when it sees a new DOI."""
r1 = Record.objects.get(pk=1)
metadata = {}
- new_doi = r1.doi + 'new'
+ new_doi = r1.doi + "new"
metadata[Fields.PUBLISHER_NAME] = r1.publisher_name
metadata[Fields.ACQ_METHOD] = r1.acq_method
metadata[Fields.DOI] = new_doi
@@ -492,7 +506,7 @@ def test_update_if_needed_case_5(self):
that is not blank."""
r1 = Record.objects.get(pk=1)
metadata = {}
- new_citation = r1.citation + 'new'
+ new_citation = r1.citation + "new"
metadata[Fields.PUBLISHER_NAME] = r1.publisher_name
metadata[Fields.ACQ_METHOD] = r1.acq_method
metadata[Fields.DOI] = r1.doi
@@ -510,13 +524,13 @@ def test_update_if_needed_case_6(self):
metadata[Fields.PUBLISHER_NAME] = r1.publisher_name
metadata[Fields.ACQ_METHOD] = r1.acq_method
metadata[Fields.DOI] = r1.doi
- metadata[Fields.LAST_NAME] = 'Fermi'
- metadata[Fields.FIRST_NAME] = 'Enrico'
- metadata[Fields.PUBDATE] = '20160815'
- metadata[Fields.TITLE] = 'Paper name'
- metadata[Fields.JOURNAL] = 'Some journal or other'
- metadata[Fields.VOLUME] = '145'
- metadata[Fields.ISSUE] = '5'
+ metadata[Fields.LAST_NAME] = "Fermi"
+ metadata[Fields.FIRST_NAME] = "Enrico"
+ metadata[Fields.PUBDATE] = "20160815"
+ metadata[Fields.TITLE] = "Paper name"
+ metadata[Fields.JOURNAL] = "Some journal or other"
+ metadata[Fields.VOLUME] = "145"
+ metadata[Fields.ISSUE] = "5"
author = r1.author
# Ensure that the citation will not have changed
@@ -535,14 +549,14 @@ def test_update_if_needed_case_7(self):
metadata[Fields.PUBLISHER_NAME] = r1.publisher_name
metadata[Fields.ACQ_METHOD] = r1.acq_method
metadata[Fields.DOI] = r1.doi
- metadata[Fields.LAST_NAME] = 'Fermi'
- metadata[Fields.FIRST_NAME] = 'Enrico'
- metadata[Fields.PUBDATE] = '20160815'
- metadata[Fields.TITLE] = 'Paper name'
- metadata[Fields.JOURNAL] = 'Some journal or other'
- metadata[Fields.VOLUME] = '145'
- metadata[Fields.ISSUE] = '5'
- metadata[Fields.CITATION] = ''
+ metadata[Fields.LAST_NAME] = "Fermi"
+ metadata[Fields.FIRST_NAME] = "Enrico"
+ metadata[Fields.PUBDATE] = "20160815"
+ metadata[Fields.TITLE] = "Paper name"
+ metadata[Fields.JOURNAL] = "Some journal or other"
+ metadata[Fields.VOLUME] = "145"
+ metadata[Fields.ISSUE] = "5"
+ metadata[Fields.CITATION] = ""
author = r1.author
assert r1.citation != Record.create_citation(metadata)
diff --git a/solenoid/records/tests/test_tasks.py b/solenoid/records/tests/test_tasks.py
index 86939bef..f62c18b0 100644
--- a/solenoid/records/tests/test_tasks.py
+++ b/solenoid/records/tests/test_tasks.py
@@ -12,18 +12,18 @@
from ..models import Record
from ..tasks import task_import_papers_for_author
-IMPORT_URL = reverse('records:import')
-AUTHOR_URL = 'mock://api.com/users/98765'
+IMPORT_URL = reverse("records:import")
+AUTHOR_URL = "mock://api.com/users/98765"
AUTHOR_DATA = {
- 'Email': 'PERSONA@ORG.EDU',
- 'First Name': 'Person',
- 'Last Name': 'Author',
- 'MIT ID': 'MITID',
- 'DLC': 'Department Faculty',
- 'Start Date': '2011-01-01',
- 'End Date': '2020-06-30',
- 'ELEMENTS ID': '98765'
- }
+ "Email": "PERSONA@ORG.EDU",
+ "First Name": "Person",
+ "Last Name": "Author",
+ "MIT ID": "MITID",
+ "DLC": "Department Faculty",
+ "Start Date": "2011-01-01",
+ "End Date": "2020-06-30",
+ "ELEMENTS ID": "98765",
+}
@pytest.mark.django_db(transaction=True)
@@ -37,36 +37,36 @@ def test_import_papers_for_author_success(mock_elements, test_settings):
dlc=dlc,
email=AUTHOR_DATA[Fields.EMAIL],
mit_id=AUTHOR_DATA[Fields.MIT_ID],
- dspace_id=AUTHOR_DATA[Fields.MIT_ID]
- )
+ dspace_id=AUTHOR_DATA[Fields.MIT_ID],
+ )
task_import_papers_for_author(AUTHOR_URL, AUTHOR_DATA, author.pk)
- record = Record.objects.latest('pk')
- hashed_id = hashlib.md5('saltyMITID'.encode('utf-8')).hexdigest()
- assert 'Person' == record.author.first_name
- assert 'Author' == record.author.last_name
- assert 'PERSONA@ORG.EDU' == record.author.email
+ record = Record.objects.latest("pk")
+ hashed_id = hashlib.md5("saltyMITID".encode("utf-8")).hexdigest()
+ assert "Person" == record.author.first_name
+ assert "Author" == record.author.last_name
+ assert "PERSONA@ORG.EDU" == record.author.email
assert hashed_id == record.author.dspace_id
- assert 'Big Publisher' == record.publisher_name
- assert 'doi:123.45' == record.doi
+ assert "Big Publisher" == record.publisher_name
+ assert "doi:123.45" == record.doi
@pytest.mark.django_db(transaction=True)
def test_fun_characters_handled_correctly(mock_elements, test_settings):
orig_count = Record.objects.count()
- fun_author_url = 'mock://api.com/users/fun'
+ fun_author_url = "mock://api.com/users/fun"
fun_author_data = {
- 'Email': 'PERSONA@ORG.EDU',
- 'First Name': 'Person',
- 'Last Name': 'Author',
- 'MIT ID': 'MITID',
- 'DLC': 'Department Faculty',
- 'Start Date': '2011-01-01',
- 'End Date': '2020-06-30',
- 'ELEMENTS ID': 'fun'
- }
+ "Email": "PERSONA@ORG.EDU",
+ "First Name": "Person",
+ "Last Name": "Author",
+ "MIT ID": "MITID",
+ "DLC": "Department Faculty",
+ "Start Date": "2011-01-01",
+ "End Date": "2020-06-30",
+ "ELEMENTS ID": "fun",
+ }
dlc, _ = DLC.objects.get_or_create(name=fun_author_data[Fields.DLC])
author = Author.objects.create(
first_name=fun_author_data[Fields.FIRST_NAME],
@@ -74,28 +74,28 @@ def test_fun_characters_handled_correctly(mock_elements, test_settings):
dlc=dlc,
email=fun_author_data[Fields.EMAIL],
mit_id=fun_author_data[Fields.MIT_ID],
- dspace_id=fun_author_data[Fields.MIT_ID]
- )
+ dspace_id=fun_author_data[Fields.MIT_ID],
+ )
task_import_papers_for_author(fun_author_url, fun_author_data, author.pk)
assert orig_count + 4 == Record.objects.count()
- diacritics = Record.objects.get(paper_id='diacritics')
- assert 'Newmån, Díânne Ç' in diacritics.citation
+ diacritics = Record.objects.get(paper_id="diacritics")
+ assert "Newmån, Díânne Ç" in diacritics.citation
- emoji = Record.objects.get(paper_id='emoji')
- assert '❤️' in emoji.citation
+ emoji = Record.objects.get(paper_id="emoji")
+ assert "❤️" in emoji.citation
- math = Record.objects.get(paper_id='math')
- assert '∫' in math.citation
- assert 'π' in math.citation
- assert 'ℵ' in math.citation
+ math = Record.objects.get(paper_id="math")
+ assert "∫" in math.citation
+ assert "π" in math.citation
+ assert "ℵ" in math.citation
# This test is keepin' it real.
- assert 'ℝ' in math.citation
+ assert "ℝ" in math.citation
- nonroman = Record.objects.get(paper_id='nonroman')
- assert '医父逃心需者決応術紙女特周保形四困' in nonroman.citation
+ nonroman = Record.objects.get(paper_id="nonroman")
+ assert "医父逃心需者決応術紙女特周保形四困" in nonroman.citation
@pytest.mark.django_db(transaction=True)
@@ -104,7 +104,7 @@ def test_paper_id_respected_case_1(mock_elements, test_settings):
If we re-import an UNSENT record with a known ID, we should leave the
existing record alone and not create a new one."""
with pytest.raises(Record.DoesNotExist):
- Record.objects.get(paper_id='12345')
+ Record.objects.get(paper_id="12345")
dlc, _ = DLC.objects.get_or_create(name=AUTHOR_DATA[Fields.DLC])
author = Author.objects.create(
@@ -113,21 +113,21 @@ def test_paper_id_respected_case_1(mock_elements, test_settings):
dlc=dlc,
email=AUTHOR_DATA[Fields.EMAIL],
mit_id=AUTHOR_DATA[Fields.MIT_ID],
- dspace_id=AUTHOR_DATA[Fields.MIT_ID]
- )
+ dspace_id=AUTHOR_DATA[Fields.MIT_ID],
+ )
task_import_papers_for_author(AUTHOR_URL, AUTHOR_DATA, author.pk)
orig_count = Record.objects.count()
- record = Record.objects.latest('pk')
- assert '12345' == record.paper_id # check assumptions
+ record = Record.objects.latest("pk")
+ assert "12345" == record.paper_id # check assumptions
orig_record = model_to_dict(record)
t = task_import_papers_for_author(AUTHOR_URL, AUTHOR_DATA, author.pk)
assert orig_count == Record.objects.count()
- assert orig_record == model_to_dict(Record.objects.get(paper_id='12345'))
- assert 'Paper already in database, no updates made.' == t['2']
+ assert orig_record == model_to_dict(Record.objects.get(paper_id="12345"))
+ assert "Paper already in database, no updates made." == t["2"]
@pytest.mark.django_db(transaction=True)
@@ -136,7 +136,7 @@ def test_paper_id_respected_case_2(mock_elements, test_settings):
If we re-import an UNSENT record with a known ID and altered data, we
should update the existing record and not create a new one."""
with pytest.raises(Record.DoesNotExist):
- Record.objects.get(paper_id='12345')
+ Record.objects.get(paper_id="12345")
dlc, _ = DLC.objects.get_or_create(name=AUTHOR_DATA[Fields.DLC])
author = Author.objects.create(
@@ -145,30 +145,31 @@ def test_paper_id_respected_case_2(mock_elements, test_settings):
dlc=dlc,
email=AUTHOR_DATA[Fields.EMAIL],
mit_id=AUTHOR_DATA[Fields.MIT_ID],
- dspace_id=AUTHOR_DATA[Fields.MIT_ID]
- )
+ dspace_id=AUTHOR_DATA[Fields.MIT_ID],
+ )
- task_import_papers_for_author('mock://api.com/users/98765-updated',
- AUTHOR_DATA, author.pk)
+ task_import_papers_for_author(
+ "mock://api.com/users/98765-updated", AUTHOR_DATA, author.pk
+ )
orig_count = Record.objects.count()
- record = Record.objects.latest('pk')
+ record = Record.objects.latest("pk")
- assert '12345' == record.paper_id # check assumptions
+ assert "12345" == record.paper_id # check assumptions
orig_record = model_to_dict(record)
- orig_doi = orig_record.pop('doi')
- orig_record.pop('citation')
+ orig_doi = orig_record.pop("doi")
+ orig_record.pop("citation")
t = task_import_papers_for_author(AUTHOR_URL, AUTHOR_DATA, author.pk)
assert orig_count == Record.objects.count()
- new_record = model_to_dict(Record.objects.get(paper_id='12345'))
- new_doi = new_record.pop('doi')
- new_record.pop('citation')
+ new_record = model_to_dict(Record.objects.get(paper_id="12345"))
+ new_doi = new_record.pop("doi")
+ new_record.pop("citation")
assert orig_record == new_record
assert orig_doi != new_doi
- assert 'Record updated with new data from Elements.' == t['2']
+ assert "Record updated with new data from Elements." == t["2"]
@pytest.mark.django_db(transaction=True)
@@ -180,7 +181,7 @@ def test_paper_id_respected_case_3(mock_elements, test_settings):
orig_count = Record.objects.count()
with pytest.raises(Record.DoesNotExist):
- Record.objects.get(paper_id='12345')
+ Record.objects.get(paper_id="12345")
dlc, _ = DLC.objects.get_or_create(name=AUTHOR_DATA[Fields.DLC])
author = Author.objects.create(
@@ -189,25 +190,27 @@ def test_paper_id_respected_case_3(mock_elements, test_settings):
dlc=dlc,
email=AUTHOR_DATA[Fields.EMAIL],
mit_id=AUTHOR_DATA[Fields.MIT_ID],
- dspace_id=AUTHOR_DATA[Fields.MIT_ID]
- )
+ dspace_id=AUTHOR_DATA[Fields.MIT_ID],
+ )
task_import_papers_for_author(AUTHOR_URL, AUTHOR_DATA, author.pk)
new_count = Record.objects.count()
assert orig_count + 1 == new_count
- record = Record.objects.latest('pk')
- assert '12345' == record.paper_id
+ record = Record.objects.latest("pk")
+ assert "12345" == record.paper_id
- liaison = Liaison.objects.create(first_name='foo',
- last_name='bar',
- email_address='fake@example.com')
+ liaison = Liaison.objects.create(
+ first_name="foo", last_name="bar", email_address="fake@example.com"
+ )
- email = EmailMessage.objects.create(original_text='gjhdka',
- date_sent=date.today(),
- author=record.author,
- _liaison=liaison)
+ email = EmailMessage.objects.create(
+ original_text="gjhdka",
+ date_sent=date.today(),
+ author=record.author,
+ _liaison=liaison,
+ )
record.email = email
record.save()
@@ -216,8 +219,8 @@ def test_paper_id_respected_case_3(mock_elements, test_settings):
t = task_import_papers_for_author(AUTHOR_URL, AUTHOR_DATA, author.pk)
assert new_count == Record.objects.count()
- assert orig_record == model_to_dict(Record.objects.get(paper_id='12345'))
- assert 'Publication #12345 by Author has already been requested' in t['2']
+ assert orig_record == model_to_dict(Record.objects.get(paper_id="12345"))
+ assert "Publication #12345 by Author has already been requested" in t["2"]
@pytest.mark.django_db(transaction=True)
@@ -227,7 +230,7 @@ def test_paper_id_respected_case_4(mock_elements, test_settings):
author, we should raise a warning and not create a new record."""
orig_count = Record.objects.count()
with pytest.raises(Record.DoesNotExist):
- Record.objects.get(paper_id='12345')
+ Record.objects.get(paper_id="12345")
dlc, _ = DLC.objects.get_or_create(name=AUTHOR_DATA[Fields.DLC])
author = Author.objects.create(
@@ -236,57 +239,58 @@ def test_paper_id_respected_case_4(mock_elements, test_settings):
dlc=dlc,
email=AUTHOR_DATA[Fields.EMAIL],
mit_id=AUTHOR_DATA[Fields.MIT_ID],
- dspace_id=AUTHOR_DATA[Fields.MIT_ID]
- )
+ dspace_id=AUTHOR_DATA[Fields.MIT_ID],
+ )
task_import_papers_for_author(AUTHOR_URL, AUTHOR_DATA, author.pk)
new_count = Record.objects.count()
assert orig_count + 1 == new_count
- record = Record.objects.latest('pk')
- assert '12345' == record.paper_id
+ record = Record.objects.latest("pk")
+ assert "12345" == record.paper_id
- liaison = Liaison.objects.create(first_name='foo',
- last_name='bar',
- email_address='fake@example.com')
+ liaison = Liaison.objects.create(
+ first_name="foo", last_name="bar", email_address="fake@example.com"
+ )
- email = EmailMessage.objects.create(original_text='gjhdka',
- date_sent=date.today(),
- author=record.author,
- _liaison=liaison)
+ email = EmailMessage.objects.create(
+ original_text="gjhdka",
+ date_sent=date.today(),
+ author=record.author,
+ _liaison=liaison,
+ )
record.email = email
record.save()
orig_record = model_to_dict(record)
# Next, import new data about the same paper as recorded under a different
# author. This should *not* create any new records.
- new_author_url = 'mock://api.com/users/54321'
+ new_author_url = "mock://api.com/users/54321"
new_author_data = {
- 'Email': 'PTWONA@ORG.EDU',
- 'First Name': 'Person Two',
- 'Last Name': 'New Author',
- 'MIT ID': 'MITID02',
- 'DLC': 'Department Faculty',
- 'Start Date': '2011-01-01',
- 'End Date': '3000-01-01',
- 'ELEMENTS ID': '54321'
- }
+ "Email": "PTWONA@ORG.EDU",
+ "First Name": "Person Two",
+ "Last Name": "New Author",
+ "MIT ID": "MITID02",
+ "DLC": "Department Faculty",
+ "Start Date": "2011-01-01",
+ "End Date": "3000-01-01",
+ "ELEMENTS ID": "54321",
+ }
new_author = Author.objects.create(
first_name=new_author_data[Fields.FIRST_NAME],
last_name=new_author_data[Fields.LAST_NAME],
dlc=dlc,
email=new_author_data[Fields.EMAIL],
mit_id=new_author_data[Fields.MIT_ID],
- dspace_id=new_author_data[Fields.MIT_ID]
- )
+ dspace_id=new_author_data[Fields.MIT_ID],
+ )
- t = task_import_papers_for_author(new_author_url, new_author_data,
- new_author.pk)
+ t = task_import_papers_for_author(new_author_url, new_author_data, new_author.pk)
assert new_count == Record.objects.count()
- assert orig_record == model_to_dict(Record.objects.get(paper_id='12345'))
- assert 'Publication #12345 by New Author has already been requested' in t['2']
+ assert orig_record == model_to_dict(Record.objects.get(paper_id="12345"))
+ assert "Publication #12345 by New Author has already been requested" in t["2"]
@pytest.mark.django_db(transaction=True)
@@ -298,10 +302,10 @@ def test_acq_method_set_when_present(mock_elements, test_settings):
dlc=dlc,
email=AUTHOR_DATA[Fields.EMAIL],
mit_id=AUTHOR_DATA[Fields.MIT_ID],
- dspace_id=AUTHOR_DATA[Fields.MIT_ID]
- )
+ dspace_id=AUTHOR_DATA[Fields.MIT_ID],
+ )
task_import_papers_for_author(AUTHOR_URL, AUTHOR_DATA, author.pk)
- record = Record.objects.latest('pk')
- assert record.acq_method == 'RECRUIT_FROM_AUTHOR_FPV'
+ record = Record.objects.latest("pk")
+ assert record.acq_method == "RECRUIT_FROM_AUTHOR_FPV"
diff --git a/solenoid/records/tests/test_views.py b/solenoid/records/tests/test_views.py
index 31c174c3..7830cc5b 100644
--- a/solenoid/records/tests/test_views.py
+++ b/solenoid/records/tests/test_views.py
@@ -11,33 +11,33 @@
from ..models import Record
from ..views import UnsentList
-IMPORT_URL = reverse('records:import')
+IMPORT_URL = reverse("records:import")
@override_settings(LOGIN_REQUIRED=False)
class UnsentRecordsViewsTest(TestCase):
- fixtures = ['testdata.yaml']
+ fixtures = ["testdata.yaml"]
def setUp(self):
- self.url = reverse('records:unsent_list')
+ self.url = reverse("records:unsent_list")
def test_unsent_records_url_exists(self):
resolve(self.url)
def test_unsent_records_view_renders(self):
c = Client()
- with self.assertTemplateUsed('records/record_list.html'):
+ with self.assertTemplateUsed("records/record_list.html"):
c.get(self.url)
def test_unsent_records_page_has_all_unsent_in_context(self):
# assertQuerysetEqual never works, so we're just comparing the pks.
self.assertEqual(
- set(UnsentList().get_queryset().values_list('pk')),
+ set(UnsentList().get_queryset().values_list("pk")),
set(
- Record.objects.exclude(
- email__date_sent__isnull=False).distinct().values_list(
- 'pk')
- )
+ Record.objects.exclude(email__date_sent__isnull=False)
+ .distinct()
+ .values_list("pk")
+ ),
)
def test_unsent_records_page_displays_all_unsent(self):
@@ -51,37 +51,34 @@ def test_unsent_records_page_displays_all_unsent(self):
# Import View Tests
def test_import_records_view_renders(client):
- with assertTemplateUsed('records/import.html'):
+ with assertTemplateUsed("records/import.html"):
client.get(IMPORT_URL)
@pytest.mark.django_db()
-@patch('solenoid.records.tasks.task_import_papers_for_author.delay')
+@patch("solenoid.records.tasks.task_import_papers_for_author.delay")
def test_import_form_valid_calls_task_with_correct_args_and_redirects(
mock_patch, client, mock_elements, test_settings
- ):
- result = AsyncResult('555-444-333')
+):
+ result = AsyncResult("555-444-333")
mock_patch.return_value = result
- author_url = 'mock://api.com/users/98765'
+ author_url = "mock://api.com/users/98765"
author_data = {
- 'Email': 'PERSONA@ORG.EDU',
- 'First Name': 'Person',
- 'Last Name': 'Author',
- 'MIT ID': 'MITID',
- 'DLC': 'Department Faculty',
- 'Start Date': '2011-10-01',
- 'End Date': '2020-06-30',
- 'ELEMENTS ID': '98765'
- }
- r = client.post(IMPORT_URL, {'author_id': '98765'}, follow=True)
+ "Email": "PERSONA@ORG.EDU",
+ "First Name": "Person",
+ "Last Name": "Author",
+ "MIT ID": "MITID",
+ "DLC": "Department Faculty",
+ "Start Date": "2011-10-01",
+ "End Date": "2020-06-30",
+ "ELEMENTS ID": "98765",
+ }
+ r = client.post(IMPORT_URL, {"author_id": "98765"}, follow=True)
mock_patch.assert_called_once_with(author_url, author_data, 1)
- assertRedirects(r, reverse('records:status',
- kwargs={'task_id': '555-444-333'}
- )
- )
+ assertRedirects(r, reverse("records:status", kwargs={"task_id": "555-444-333"}))
# Status View Tests
def test_status_view_renders(client):
- with assertTemplateUsed('records/status.html'):
- client.get(reverse('records:status', kwargs={'task_id': '12345'}))
+ with assertTemplateUsed("records/status.html"):
+ client.get(reverse("records:status", kwargs={"task_id": "12345"}))
diff --git a/solenoid/records/urls.py b/solenoid/records/urls.py
index a0556303..459e14bb 100644
--- a/solenoid/records/urls.py
+++ b/solenoid/records/urls.py
@@ -3,14 +3,15 @@
from . import views
-app_name = 'records'
+app_name = "records"
urlpatterns = [
- re_path(r'^$', views.UnsentList.as_view(), name='unsent_list'),
- re_path(r'^import/$', views.Import.as_view(), name='import'),
- re_path(r'^import/status/(?P[^/]+)/$', views.status,
- name="status"),
- re_path(r'^instructions/$',
- TemplateView.as_view(template_name="records/instructions.html"),
- name='instructions'),
- ]
+ re_path(r"^$", views.UnsentList.as_view(), name="unsent_list"),
+ re_path(r"^import/$", views.Import.as_view(), name="import"),
+ re_path(r"^import/status/(?P[^/]+)/$", views.status, name="status"),
+ re_path(
+ r"^instructions/$",
+ TemplateView.as_view(template_name="records/instructions.html"),
+ name="instructions",
+ ),
+]
diff --git a/solenoid/records/views.py b/solenoid/records/views.py
index 97281e69..ceadb8b3 100644
--- a/solenoid/records/views.py
+++ b/solenoid/records/views.py
@@ -27,49 +27,52 @@
class UnsentList(ConditionalLoginRequiredMixin, ListView):
def get_context_data(self, **kwargs):
context = super(UnsentList, self).get_context_data(**kwargs)
- context['title'] = 'Unsent citations'
- context['extension_template'] = 'records/_unsent_list.html'
- context['breadcrumbs'] = [
- {'url': reverse_lazy('home'), 'text': 'dashboard'},
- {'url': '#', 'text': 'view unsent citations'}
+ context["title"] = "Unsent citations"
+ context["extension_template"] = "records/_unsent_list.html"
+ context["breadcrumbs"] = [
+ {"url": reverse_lazy("home"), "text": "dashboard"},
+ {"url": "#", "text": "view unsent citations"},
]
- authors = Author.objects.filter(
- record__in=self.get_queryset()).distinct()
- context['authors'] = authors
- context['dlcs'] = DLC.objects.filter(author__in=authors).distinct()
+ authors = Author.objects.filter(record__in=self.get_queryset()).distinct()
+ context["authors"] = authors
+ context["dlcs"] = DLC.objects.filter(author__in=authors).distinct()
return context
def get_queryset(self):
- return Record.objects.exclude(
- email__date_sent__isnull=False).prefetch_related(
- 'author', 'author__dlc')
+ return Record.objects.exclude(email__date_sent__isnull=False).prefetch_related(
+ "author", "author__dlc"
+ )
class Import(ConditionalLoginRequiredMixin, FormView):
- template_name = 'records/import.html'
+ template_name = "records/import.html"
form_class = ImportForm
- success_url = reverse_lazy('records:status')
+ success_url = reverse_lazy("records:status")
def _get_author_data(self, form, author_id):
- author_url = f'{settings.ELEMENTS_ENDPOINT}users/{author_id}'
+ author_url = f"{settings.ELEMENTS_ENDPOINT}users/{author_id}"
try:
author_xml = get_from_elements(author_url)
except HTTPError as e:
logger.info(e)
- if '404 Client Error' in str(e):
- msg = (f'Author with ID {author_id} not found in Elements. '
- 'Please confirm the Elements ID and try again.')
+ if "404 Client Error" in str(e):
+ msg = (
+ f"Author with ID {author_id} not found in Elements. "
+ "Please confirm the Elements ID and try again."
+ )
messages.warning(self.request, msg)
return super(Import, self).form_invalid(form)
except Timeout as e:
logger.info(e)
- msg = ('Unable to connect to Symplectic '
- 'Elements. Please wait a few '
- 'minutes and try again.')
+ msg = (
+ "Unable to connect to Symplectic "
+ "Elements. Please wait a few "
+ "minutes and try again."
+ )
messages.warning(self.request, msg)
return super(Import, self).form_invalid(form)
author_data = parse_author_xml(author_xml)
- author_data['ELEMENTS ID'] = author_id
+ author_data["ELEMENTS ID"] = author_id
return author_data
def _get_author_record_id(self, form, author_data):
@@ -80,61 +83,62 @@ def _get_author_record_id(self, form, author_data):
author.save()
except Author.DoesNotExist:
if Author.is_author_creatable(author_data):
- dlc, _ = DLC.objects.get_or_create(
- name=author_data[Fields.DLC]
- )
+ dlc, _ = DLC.objects.get_or_create(name=author_data[Fields.DLC])
author = Author.objects.create(
first_name=author_data[Fields.FIRST_NAME],
last_name=author_data[Fields.LAST_NAME],
dlc=dlc,
email=author_data[Fields.EMAIL],
mit_id=author_data[Fields.MIT_ID],
- dspace_id=author_data[Fields.MIT_ID]
- )
+ dspace_id=author_data[Fields.MIT_ID],
+ )
else:
logger.info(
f"Author #{author_data['ELEMENTS ID']} was missing data "
"from Elements"
- )
- msg = (f"Author with ID {author_data['ELEMENTS ID']} is "
- f"missing required information. Please check the "
- f"author record in Elements and confirm that all of "
- f"the following information is present: "
- f"{', '.join(Fields.AUTHOR_DATA)}")
+ )
+ msg = (
+ f"Author with ID {author_data['ELEMENTS ID']} is "
+ f"missing required information. Please check the "
+ f"author record in Elements and confirm that all of "
+ f"the following information is present: "
+ f"{', '.join(Fields.AUTHOR_DATA)}"
+ )
messages.warning(self.request, msg)
return super(Import, self).form_invalid(form)
return author.id
def form_valid(self, form, **kwargs):
- author_id = form.cleaned_data['author_id']
- author_url = f'{settings.ELEMENTS_ENDPOINT}users/{author_id}'
+ author_id = form.cleaned_data["author_id"]
+ author_url = f"{settings.ELEMENTS_ENDPOINT}users/{author_id}"
author_data = self._get_author_data(form, author_id)
author = self._get_author_record_id(form, author_data)
- result = task_import_papers_for_author.delay(
- author_url,
- author_data,
- author)
+ result = task_import_papers_for_author.delay(author_url, author_data, author)
task_id = result.task_id
- return redirect('records:status', task_id=task_id)
+ return redirect("records:status", task_id=task_id)
def form_invalid(self, form):
- msg = format_html('Something went wrong. Please try again, and if it '
- 'still doesn\'t work, contact '
- 'a Solenoid admin.',
- mark_safe(settings.ADMINS[0][1])),
+ msg = (
+ format_html(
+ "Something went wrong. Please try again, and if it "
+ 'still doesn\'t work, contact '
+ "a Solenoid admin.",
+ mark_safe(settings.ADMINS[0][1]),
+ ),
+ )
messages.warning(self.request, msg)
return super(Import, self).form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Import, self).get_context_data(**kwargs)
- context['breadcrumbs'] = [
- {'url': reverse_lazy('home'), 'text': 'dashboard'},
- {'url': '#', 'text': 'import data'}
+ context["breadcrumbs"] = [
+ {"url": reverse_lazy("home"), "text": "dashboard"},
+ {"url": "#", "text": "import data"},
]
return context
def status(request, task_id):
- return render(request, 'records/status.html', context={'task_id': task_id})
+ return render(request, "records/status.html", context={"task_id": task_id})
diff --git a/solenoid/settings/base.py b/solenoid/settings/base.py
index b543952a..e0164929 100644
--- a/solenoid/settings/base.py
+++ b/solenoid/settings/base.py
@@ -14,8 +14,7 @@
import dj_database_url
from django.urls import reverse_lazy
-BASE_DIR = os.path.dirname(
- os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def boolean(value):
@@ -28,7 +27,7 @@ def boolean(value):
"""
if isinstance(value, bool) or value is None:
return bool(value)
- return value.lower() in ('true', 't', 'yes', 'y', '1')
+ return value.lower() in ("true", "t", "yes", "y", "1")
def make_list(value):
@@ -40,7 +39,7 @@ def make_list(value):
"""
if value is None:
return []
- return list(filter(None, [s.strip() for s in value.split(',')]))
+ return list(filter(None, [s.strip() for s in value.split(",")]))
# -----------------------------------------------------------------------------
@@ -51,20 +50,20 @@ def make_list(value):
# -----------------------------------------------------------------------------
DJANGO_APPS = [
- 'django.contrib.admin',
- 'django.contrib.auth',
- 'django.contrib.contenttypes',
- 'django.contrib.sessions',
- 'django.contrib.messages',
- 'django.contrib.staticfiles',
+ "django.contrib.admin",
+ "django.contrib.auth",
+ "django.contrib.contenttypes",
+ "django.contrib.sessions",
+ "django.contrib.messages",
+ "django.contrib.staticfiles",
]
SOLENOID_APPS = [
- 'solenoid.elements',
- 'solenoid.emails',
- 'solenoid.people',
- 'solenoid.records',
- 'solenoid.userauth',
+ "solenoid.elements",
+ "solenoid.emails",
+ "solenoid.people",
+ "solenoid.records",
+ "solenoid.userauth",
]
INSTALLED_APPS = DJANGO_APPS + SOLENOID_APPS
@@ -74,21 +73,21 @@ def make_list(value):
# -----------------------------------------------------------------------------
MIDDLEWARE = [
- 'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.middleware.common.CommonMiddleware',
- 'django.middleware.csrf.CsrfViewMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
- 'django.middleware.clickjacking.XFrameOptionsMiddleware',
- 'django.middleware.security.SecurityMiddleware',
- 'whitenoise.middleware.WhiteNoiseMiddleware',
+ "django.contrib.sessions.middleware.SessionMiddleware",
+ "django.middleware.common.CommonMiddleware",
+ "django.middleware.csrf.CsrfViewMiddleware",
+ "django.contrib.auth.middleware.AuthenticationMiddleware",
+ "django.contrib.messages.middleware.MessageMiddleware",
+ "django.middleware.clickjacking.XFrameOptionsMiddleware",
+ "django.middleware.security.SecurityMiddleware",
+ "whitenoise.middleware.WhiteNoiseMiddleware",
]
# DEBUG
# -----------------------------------------------------------------------------
-DEBUG = boolean(os.getenv('DJANGO_DEBUG', False))
+DEBUG = boolean(os.getenv("DJANGO_DEBUG", False))
# DATABASE CONFIGURATION
# -----------------------------------------------------------------------------
@@ -96,9 +95,9 @@ def make_list(value):
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
- 'default': dj_database_url.config(
- default=os.getenv('DATABASE_URL', 'sqlite:///db.sqlite3'),
- conn_max_age=600)
+ "default": dj_database_url.config(
+ default=os.getenv("DATABASE_URL", "sqlite:///db.sqlite3"), conn_max_age=600
+ )
}
@@ -106,31 +105,30 @@ def make_list(value):
# -----------------------------------------------------------------------------
# SECURITY WARNING: keep the secret key used in production secret!
-SECRET_KEY = os.environ['DJANGO_SECRET_KEY']
+SECRET_KEY = os.environ["DJANGO_SECRET_KEY"]
# This will accept a comma-separated list of allowed hosts
-ALLOWED_HOSTS = make_list(os.getenv('ALLOWED_HOSTS'))
+ALLOWED_HOSTS = make_list(os.getenv("ALLOWED_HOSTS"))
-if 'HEROKU_APP_NAME' in os.environ:
- ALLOWED_HOSTS.append(
- '{}.herokuapp.com'.format(os.environ['HEROKU_APP_NAME']))
+if "HEROKU_APP_NAME" in os.environ:
+ ALLOWED_HOSTS.append("{}.herokuapp.com".format(os.environ["HEROKU_APP_NAME"]))
-ROOT_URLCONF = 'solenoid.urls'
+ROOT_URLCONF = "solenoid.urls"
-WSGI_APPLICATION = 'solenoid.wsgi.application'
+WSGI_APPLICATION = "solenoid.wsgi.application"
SITE_ID = 1
-SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
+SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
# INTERNATIONALIZATION CONFIGURATION
# -----------------------------------------------------------------------------
# https://docs.djangoproject.com/en/1.8/topics/i18n/
-LANGUAGE_CODE = 'en-us'
+LANGUAGE_CODE = "en-us"
-TIME_ZONE = 'UTC'
+TIME_ZONE = "UTC"
USE_TZ = True
# Turned off to save on overhead, since we won't need this for an MIT internal
@@ -143,15 +141,15 @@ def make_list(value):
TEMPLATES = [
{
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'DIRS': [os.path.join(BASE_DIR, 'solenoid', 'templates')],
- 'APP_DIRS': True,
- 'OPTIONS': {
- 'context_processors': [
- 'django.template.context_processors.debug',
- 'django.template.context_processors.request',
- 'django.contrib.auth.context_processors.auth',
- 'django.contrib.messages.context_processors.messages',
+ "BACKEND": "django.template.backends.django.DjangoTemplates",
+ "DIRS": [os.path.join(BASE_DIR, "solenoid", "templates")],
+ "APP_DIRS": True,
+ "OPTIONS": {
+ "context_processors": [
+ "django.template.context_processors.debug",
+ "django.template.context_processors.request",
+ "django.contrib.auth.context_processors.auth",
+ "django.contrib.messages.context_processors.messages",
],
},
},
@@ -163,80 +161,76 @@ def make_list(value):
# https://docs.djangoproject.com/en/1.8/howto/static-files/
-STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
-STATIC_URL = '/static/'
+STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
+STATIC_URL = "/static/"
# Extra places for collectstatic to find static files.
-STATICFILES_DIRS = [os.path.join(BASE_DIR, 'solenoid', 'static')]
+STATICFILES_DIRS = [os.path.join(BASE_DIR, "solenoid", "static")]
-FIXTURE_DIRS = [os.path.join(BASE_DIR, 'solenoid', 'fixtures')]
+FIXTURE_DIRS = [os.path.join(BASE_DIR, "solenoid", "fixtures")]
# LOGGING CONFIGURATION
# -----------------------------------------------------------------------------
LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': False,
- 'filters': {
- 'require_debug_false': {
- '()': 'django.utils.log.RequireDebugFalse'
- }
- },
- 'formatters': {
- 'brief': {
- 'format': ('%(asctime)s %(levelname)s %(name)s[%(funcName)s]: '
- '%(message)s'),
+ "version": 1,
+ "disable_existing_loggers": False,
+ "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}},
+ "formatters": {
+ "brief": {
+ "format": (
+ "%(asctime)s %(levelname)s %(name)s[%(funcName)s]: " "%(message)s"
+ ),
},
},
- 'handlers': {
- 'file': {
- 'level': 'INFO',
- 'class': 'logging.handlers.RotatingFileHandler',
- 'filename': os.path.join(BASE_DIR, 'logs', 'solenoid.log'),
- 'maxBytes': 1024 * 1024 * 5, # 5 MB
- 'backupCount': 5,
- 'formatter': 'brief',
+ "handlers": {
+ "file": {
+ "level": "INFO",
+ "class": "logging.handlers.RotatingFileHandler",
+ "filename": os.path.join(BASE_DIR, "logs", "solenoid.log"),
+ "maxBytes": 1024 * 1024 * 5, # 5 MB
+ "backupCount": 5,
+ "formatter": "brief",
},
},
- 'loggers': {
- '': {
- 'handlers': ['file'],
- 'level': 'INFO',
+ "loggers": {
+ "": {
+ "handlers": ["file"],
+ "level": "INFO",
}
- }
+ },
}
# EMAIL CONFIGURATION
# -----------------------------------------------------------------------------
-ADMINS = [('Solenoid Admin', os.getenv('SOLENOID_ADMIN', None))]
+ADMINS = [("Solenoid Admin", os.getenv("SOLENOID_ADMIN", None))]
EMAIL_USE_TLS = True
-EMAIL_HOST = 'outgoing.mit.edu'
+EMAIL_HOST = "outgoing.mit.edu"
EMAIL_PORT = 587
-EMAIL_HOST_USER = 'libsys'
-DEFAULT_FROM_EMAIL = EMAIL_HOST_USER + '@mit.edu'
+EMAIL_HOST_USER = "libsys"
+DEFAULT_FROM_EMAIL = EMAIL_HOST_USER + "@mit.edu"
# THIS CONTROLS WHETHER THE SYSTEM WILL SEND EMAIL. If you don't want to send
# real actual email, don't set this environment variable.
-EMAIL_HOST_PASSWORD = os.environ.get('DJANGO_SMTP_PASSWORD', None)
+EMAIL_HOST_PASSWORD = os.environ.get("DJANGO_SMTP_PASSWORD", None)
# The default backend is SMTP, but if we haven't configured the environment
# with the password, we can't use SMTP, so use the console backend instead.
# This will allow for local development/testing and avoid spamming anyone.
if not EMAIL_HOST_PASSWORD:
- EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
+ EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
# Expects a string, which is an email address, or None. Any emails sent by the
# system will be cc:ed to this email address.
-SCHOLCOMM_MOIRA_LIST = 'sccs-fta@mit.edu'
+SCHOLCOMM_MOIRA_LIST = "sccs-fta@mit.edu"
# If True, will only send email to admins. If False, will send email to
# liaisons and the moira list.
-EMAIL_TESTING_MODE = boolean(os.environ.get('DJANGO_EMAIL_TESTING_MODE',
- False))
+EMAIL_TESTING_MODE = boolean(os.environ.get("DJANGO_EMAIL_TESTING_MODE", False))
# -----------------------------------------------------------------------------
@@ -246,25 +240,24 @@ def make_list(value):
# OAUTH CONFIGURATION
# -----------------------------------------------------------------------------
-INSTALLED_APPS += ['social_django']
+INSTALLED_APPS += ["social_django"]
# These are the people who should be allowed to log in. This should be a list
# of strings representing MIT usernames; they will be correctly formatted in
# the SOCIAL_AUTH_MITOAUTH2_WHITELISTED_EMAILS list comprehension.
WHITELIST = ["lhanscom", "khdunn", "kevgrant", "sroosa", "jprevost", "ghukill"]
-SOCIAL_AUTH_MITOAUTH2_WHITELISTED_EMAILS = ['%s@mit.edu' % kerb
- for kerb in WHITELIST]
+SOCIAL_AUTH_MITOAUTH2_WHITELISTED_EMAILS = ["%s@mit.edu" % kerb for kerb in WHITELIST]
SOCIAL_AUTH_PIPELINE = [
- 'social_core.pipeline.social_auth.social_details',
- 'social_core.pipeline.social_auth.social_uid',
- 'social_core.pipeline.social_auth.auth_allowed',
- 'social_core.pipeline.social_auth.social_user',
- 'social_core.pipeline.social_auth.associate_by_email',
- 'social_core.pipeline.user.get_username',
- 'social_core.pipeline.user.create_user',
- 'social_core.pipeline.social_auth.associate_user',
+ "social_core.pipeline.social_auth.social_details",
+ "social_core.pipeline.social_auth.social_uid",
+ "social_core.pipeline.social_auth.auth_allowed",
+ "social_core.pipeline.social_auth.social_user",
+ "social_core.pipeline.social_auth.associate_by_email",
+ "social_core.pipeline.user.get_username",
+ "social_core.pipeline.user.create_user",
+ "social_core.pipeline.social_auth.associate_user",
]
# This is actually a token revocation pipeline, not a disconnection pipeline.
@@ -279,46 +272,48 @@ def make_list(value):
# their MIT OAuth IDs - we'll have to rework this.
SOCIAL_AUTH_DISCONNECT_PIPELINE = [
# Collects the social associations to disconnect.
- 'social_core.pipeline.disconnect.get_entries',
-
+ "social_core.pipeline.disconnect.get_entries",
# Revoke any access_token when possible.
- 'social_core.pipeline.disconnect.revoke_tokens',
+ "social_core.pipeline.disconnect.revoke_tokens",
]
# Default to not requiring login for ease of local development, but allow it
# to be set with an environment variable to facilitate testing. You will need
# to fill in key and secret values for your environment as well if you set this
# to True.
-LOGIN_REQUIRED = boolean(os.environ.get('DJANGO_LOGIN_REQUIRED', False))
+LOGIN_REQUIRED = boolean(os.environ.get("DJANGO_LOGIN_REQUIRED", False))
if LOGIN_REQUIRED:
# args is *case-sensitive*, even though other parts of python-social-auth
# are casual about casing.
- LOGIN_URL = reverse_lazy('social:begin', args=('mitoauth2',))
+ LOGIN_URL = reverse_lazy("social:begin", args=("mitoauth2",))
AUTHENTICATION_BACKENDS = [
- 'solenoid.userauth.backends.MITOAuth2',
+ "solenoid.userauth.backends.MITOAuth2",
# Required for user/pass authentication - this is useful for the admin
# site.
- 'django.contrib.auth.backends.ModelBackend',
+ "django.contrib.auth.backends.ModelBackend",
]
- SOCIAL_AUTH_MITOAUTH2_KEY = os.environ.get('DJANGO_MITOAUTH2_KEY')
- SOCIAL_AUTH_MITOAUTH2_SECRET = os.environ.get('DJANGO_MITOAUTH2_SECRET')
+ SOCIAL_AUTH_MITOAUTH2_KEY = os.environ.get("DJANGO_MITOAUTH2_KEY")
+ SOCIAL_AUTH_MITOAUTH2_SECRET = os.environ.get("DJANGO_MITOAUTH2_SECRET")
MIDDLEWARE += [
- 'social_django.middleware.SocialAuthExceptionMiddleware',
+ "social_django.middleware.SocialAuthExceptionMiddleware",
]
- TEMPLATES[0]['OPTIONS']['context_processors'].extend(
- ['social_django.context_processors.backends',
- 'social_django.context_processors.login_redirect'])
+ TEMPLATES[0]["OPTIONS"]["context_processors"].extend(
+ [
+ "social_django.context_processors.backends",
+ "social_django.context_processors.login_redirect",
+ ]
+ )
# CKEDITOR CONFIGURATION
# -----------------------------------------------------------------------------
-INSTALLED_APPS += ['ckeditor']
+INSTALLED_APPS += ["ckeditor"]
# This is the same version of jquery that is commented out in the base
@@ -326,21 +321,22 @@ def make_list(value):
# -If you uncomment that line and load jquery in base.html, delete this
# setting.- Loading jquery multiple times will lead to sorrow.
-CKEDITOR_JQUERY_URL = ('https://ajax.googleapis.com/ajax/libs/jquery/'
- '1.12.4/jquery.min.js')
+CKEDITOR_JQUERY_URL = (
+ "https://ajax.googleapis.com/ajax/libs/jquery/" "1.12.4/jquery.min.js"
+)
# We're intentionally not configuring CKeditor file uploads, because file
# uploads are not part of the use case documentation, and they add security
# headaches.
CKEDITOR_CONFIGS = {
- 'default': {
- 'removePlugins': 'stylesheetparser',
- 'allowedContent': {
- '$1': {
- 'elements': 'div p a b i em strong',
- 'attributes': 'href',
- 'classes': True
+ "default": {
+ "removePlugins": "stylesheetparser",
+ "allowedContent": {
+ "$1": {
+ "elements": "div p a b i em strong",
+ "attributes": "href",
+ "classes": True,
}
},
}
@@ -352,39 +348,39 @@ def make_list(value):
# Defaults to the dev instance - only use the production Elements app if you
# are very sure you should!
-ELEMENTS_ENDPOINT = os.environ.get('DJANGO_ELEMENTS_ENDPOINT',
- 'https://pubdata-dev.mit.edu:8091/'
- 'secure-api/v5.5/')
+ELEMENTS_ENDPOINT = os.environ.get(
+ "DJANGO_ELEMENTS_ENDPOINT", "https://pubdata-dev.mit.edu:8091/" "secure-api/v5.5/"
+)
# You'll need to have an API user configured in the Elements app that matches
# these parameters. See docs/README.md.
-ELEMENTS_USER = os.environ.get('DJANGO_ELEMENTS_USER', 'solenoid')
-ELEMENTS_PASSWORD = os.environ.get('DJANGO_ELEMENTS_PASSWORD')
+ELEMENTS_USER = os.environ.get("DJANGO_ELEMENTS_USER", "solenoid")
+ELEMENTS_PASSWORD = os.environ.get("DJANGO_ELEMENTS_PASSWORD")
# Set this to False if you don't want to issue API calls (e.g. during testing,
# on localhost, on environments that don't know the password or don't have IPs
# known to the Elements firewall).
-USE_ELEMENTS = boolean(os.environ.get('DJANGO_USE_ELEMENTS', False))
+USE_ELEMENTS = boolean(os.environ.get("DJANGO_USE_ELEMENTS", False))
QUOTAGUARD_URL = None
# DJANGO-COMPRESSOR CONFIGURATION
# -----------------------------------------------------------------------------
-INSTALLED_APPS += ['compressor']
+INSTALLED_APPS += ["compressor"]
STATICFILES_FINDERS = [
- 'django.contrib.staticfiles.finders.FileSystemFinder',
- 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
- 'compressor.finders.CompressorFinder',
+ "django.contrib.staticfiles.finders.FileSystemFinder",
+ "django.contrib.staticfiles.finders.AppDirectoriesFinder",
+ "compressor.finders.CompressorFinder",
]
COMPRESS_ENABLED = True
COMPRESS_OFFLINE = False # The default, but we're being explicit.
COMPRESS_PRECOMPILERS = [
- ('text/x-sass', 'django_libsass.SassCompiler'),
- ('text/x-scss', 'django_libsass.SassCompiler'),
+ ("text/x-sass", "django_libsass.SassCompiler"),
+ ("text/x-scss", "django_libsass.SassCompiler"),
]
COMPRESS_ROOT = STATIC_ROOT
@@ -392,39 +388,37 @@ def make_list(value):
# CRISPY-FORMS CONFIGURATION
# -----------------------------------------------------------------------------
-INSTALLED_APPS += ['crispy_forms']
+INSTALLED_APPS += ["crispy_forms"]
# See http://django-crispy-forms.readthedocs.io/en/latest/template_packs.html .
-CRISPY_TEMPLATE_PACK = 'mitlib_crispy'
+CRISPY_TEMPLATE_PACK = "mitlib_crispy"
-CRISPY_ALLOWED_TEMPLATE_PACKS = ['mitlib_crispy']
+CRISPY_ALLOWED_TEMPLATE_PACKS = ["mitlib_crispy"]
# DJANGO-DEBUG-TOOLBAR CONFIGURATION
# -----------------------------------------------------------------------------
-INSTALLED_APPS += ['debug_toolbar']
+INSTALLED_APPS += ["debug_toolbar"]
-MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware']
+MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"]
-INTERNAL_IPS = ['127.0.0.1']
+INTERNAL_IPS = ["127.0.0.1"]
# DSPACE CUSTOMIZATION CONFIGURATION
# -----------------------------------------------------------------------------
-DSPACE_SALT = os.getenv('DSPACE_AUTHOR_ID_SALT', default='salty')
+DSPACE_SALT = os.getenv("DSPACE_AUTHOR_ID_SALT", default="salty")
# CELERY CONFIGURATION
# -----------------------------------------------------------------------------
-INSTALLED_APPS += ['celery_progress']
+INSTALLED_APPS += ["celery_progress"]
-CELERY_BROKER_URL = os.getenv('REDIS_URL',
- default='redis://localhost:6379/0')
-CELERY_RESULT_BACKEND = os.getenv('REDIS_URL',
- default='redis://localhost:6379')
+CELERY_BROKER_URL = os.getenv("REDIS_URL", default="redis://localhost:6379/0")
+CELERY_RESULT_BACKEND = os.getenv("REDIS_URL", default="redis://localhost:6379")
CELERY_BROKER_TRANSPORT_OPTIONS = {
"max_retries": 5,
"interval_start": 0,
diff --git a/solenoid/settings/heroku.py b/solenoid/settings/heroku.py
index af54768b..19f2eab7 100644
--- a/solenoid/settings/heroku.py
+++ b/solenoid/settings/heroku.py
@@ -12,34 +12,36 @@
# -----------------------------------------------------------------------------
db_from_env = dj_database_url.config(conn_max_age=500)
-DATABASES['default'].update(db_from_env)
+DATABASES["default"].update(db_from_env)
# GENERAL CONFIGURATION
# -----------------------------------------------------------------------------
-SECRET_KEY = bool(os.environ.get('DJANGO_SECRET_KEY'))
+SECRET_KEY = bool(os.environ.get("DJANGO_SECRET_KEY"))
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
-SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
+SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
-ALLOWED_HOSTS = ['mitlibraries-solenoid.herokuapp.com',
- 'mitlibraries-solenoid-staging.herokuapp.com']
+ALLOWED_HOSTS = [
+ "mitlibraries-solenoid.herokuapp.com",
+ "mitlibraries-solenoid-staging.herokuapp.com",
+]
# This allows us to include review apps in ALLOWED_HOSTS even though we don't
# know their name until runtime.
-APP_NAME = os.environ.get('HEROKU_APP_NAME')
+APP_NAME = os.environ.get("HEROKU_APP_NAME")
if APP_NAME:
- url = '{}.herokuapp.com'.format(APP_NAME)
+ url = "{}.herokuapp.com".format(APP_NAME)
ALLOWED_HOSTS.append(url)
# STATIC FILE CONFIGURATION
# -----------------------------------------------------------------------------
-STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
+STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
-MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
+MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
COMPRESS_ENABLED = True
COMPRESS_OFFLINE = True
@@ -49,37 +51,34 @@
# -----------------------------------------------------------------------------
LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': False,
- 'filters': {
- 'require_debug_false': {
- '()': 'django.utils.log.RequireDebugFalse'
- }
- },
- 'formatters': {
- 'brief': {
- 'format': ('%(asctime)s %(levelname)s %(name)s[%(funcName)s]: '
- '%(message)s'),
+ "version": 1,
+ "disable_existing_loggers": False,
+ "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}},
+ "formatters": {
+ "brief": {
+ "format": (
+ "%(asctime)s %(levelname)s %(name)s[%(funcName)s]: " "%(message)s"
+ ),
},
},
- 'handlers': {
- 'console_info': {
- 'level': 'INFO',
- 'class': 'logging.StreamHandler',
- 'stream': sys.stdout
+ "handlers": {
+ "console_info": {
+ "level": "INFO",
+ "class": "logging.StreamHandler",
+ "stream": sys.stdout,
},
},
- 'loggers': {
- '': {
- 'handlers': ['console_info'],
- 'level': 'INFO',
+ "loggers": {
+ "": {
+ "handlers": ["console_info"],
+ "level": "INFO",
}
- }
+ },
}
# Will be emailed by the management command about API usage. This is currently
# set to a Webmoira list.
-ADMINS = [('Solenoid Admins', 'solenoid-admins@mit.edu')]
+ADMINS = [("Solenoid Admins", "solenoid-admins@mit.edu")]
# OAUTH CONFIGURATION
@@ -88,7 +87,7 @@
# Default to requiring login on Heroku servers, but allow this to be turned off
# via environment variable in case it's useful to have a test server be more
# freely accessible.
-LOGIN_REQUIRED = boolean(os.environ.get('DJANGO_LOGIN_REQUIRED', True))
+LOGIN_REQUIRED = boolean(os.environ.get("DJANGO_LOGIN_REQUIRED", True))
# QUOTAGUARD CONFIGURATION
@@ -97,16 +96,16 @@
# The quotaguard docs say there will be a QUOTAGUARD_URL env variable
# provisioned, but in the wild the observed name of this variable is
# QUOTAGUARDSTATIC_URL.
-QUOTAGUARD_URL = os.environ.get('QUOTAGUARDSTATIC_URL', None)
+QUOTAGUARD_URL = os.environ.get("QUOTAGUARDSTATIC_URL", None)
# EXCEPTION CONFIGURATION
# -----------------------------------------------------------------------------
sentry_sdk.init(
- dsn=os.environ.get('SENTRY_DSN', None),
- environment=os.environ.get('SENTRY_ENVIRONMENT', 'development'),
- integrations=[CeleryIntegration(), DjangoIntegration()]
+ dsn=os.environ.get("SENTRY_DSN", None),
+ environment=os.environ.get("SENTRY_ENVIRONMENT", "development"),
+ integrations=[CeleryIntegration(), DjangoIntegration()],
)
@@ -114,4 +113,4 @@
# -----------------------------------------------------------------------------
# Do not substitute a default value on Heroku, this is required in production
-DSPACE_SALT = os.environ['DSPACE_AUTHOR_ID_SALT']
+DSPACE_SALT = os.environ["DSPACE_AUTHOR_ID_SALT"]
diff --git a/solenoid/urls.py b/solenoid/urls.py
index a821ffe5..fd3f230c 100644
--- a/solenoid/urls.py
+++ b/solenoid/urls.py
@@ -22,24 +22,27 @@
from .views import HomeView
urlpatterns = [
- re_path(r'^admin/', admin.site.urls),
- re_path(r'^$', HomeView.as_view(), name='home'),
- re_path(r'^celery-progress/', include('celery_progress.urls',
- namespace='celery_progress')),
- re_path(r'^records/', include('solenoid.records.urls',
- namespace='records')),
- re_path(r'^emails/', include('solenoid.emails.urls', namespace='emails')),
- re_path(r'^people/', include('solenoid.people.urls', namespace='people')),
- re_path(r'^oauth2/', include('social_django.urls', namespace='social')),
- re_path(r'^logout/$',
- SolenoidLogoutView.as_view(template_name='userauth/logout.html'),
- name='logout'),
- re_path(r'^500/$', server_error),
- ]
+ re_path(r"^admin/", admin.site.urls),
+ re_path(r"^$", HomeView.as_view(), name="home"),
+ re_path(
+ r"^celery-progress/", include("celery_progress.urls", namespace="celery_progress")
+ ),
+ re_path(r"^records/", include("solenoid.records.urls", namespace="records")),
+ re_path(r"^emails/", include("solenoid.emails.urls", namespace="emails")),
+ re_path(r"^people/", include("solenoid.people.urls", namespace="people")),
+ re_path(r"^oauth2/", include("social_django.urls", namespace="social")),
+ re_path(
+ r"^logout/$",
+ SolenoidLogoutView.as_view(template_name="userauth/logout.html"),
+ name="logout",
+ ),
+ re_path(r"^500/$", server_error),
+]
if settings.DEBUG:
import debug_toolbar
+
urlpatterns = [
- re_path(r'^__debug__/', include(debug_toolbar.urls)),
- ] + urlpatterns
+ re_path(r"^__debug__/", include(debug_toolbar.urls)),
+ ] + urlpatterns
diff --git a/solenoid/userauth/backends.py b/solenoid/userauth/backends.py
index ed18d1f6..a2e0525d 100644
--- a/solenoid/userauth/backends.py
+++ b/solenoid/userauth/backends.py
@@ -6,10 +6,11 @@
class MITOAuth2(BaseOAuth2):
"""MIT OAuth authentication backend"""
- name = 'mitoauth2'
- AUTHORIZATION_URL = 'https://oidc.mit.edu/authorize'
- ACCESS_TOKEN_URL = 'https://oidc.mit.edu/token'
- USER_INFO_URL = 'https://oidc.mit.edu/userinfo'
+
+ name = "mitoauth2"
+ AUTHORIZATION_URL = "https://oidc.mit.edu/authorize"
+ ACCESS_TOKEN_URL = "https://oidc.mit.edu/token"
+ USER_INFO_URL = "https://oidc.mit.edu/userinfo"
def get_user_details(self, response):
"""Fetch user details from user information endpoint after successful
@@ -18,19 +19,21 @@ def get_user_details(self, response):
This is important because it's not enough to verify that the user has
an MIT account (which is covered by the authorization steps); we will
need to verify that the account is on our list of authorized users."""
- token = response.get('access_token')
+ token = response.get("access_token")
headers = {"Authorization": "Bearer %s" % token}
endpoint = self.USER_INFO_URL
response = requests.get(endpoint, headers=headers)
- return {'email': response.json()['email'] or '',
- # We'll need sub, the unique ID, for get_user_id.
- 'sub': response.json()['sub']}
+ return {
+ "email": response.json()["email"] or "",
+ # We'll need sub, the unique ID, for get_user_id.
+ "sub": response.json()["sub"],
+ }
def get_user_id(self, details, response):
# This is important for allowing us to associate logins with already-
# created accounts; otherwise we'll see an error if someone tries to
# log in more than once.
- return details['sub']
+ return details["sub"]
def auth_complete_credentials(self):
return self.get_key_and_secret()
@@ -38,9 +41,9 @@ def auth_complete_credentials(self):
def access_token_url(self):
base_url = super(MITOAuth2, self).access_token_url()
params = {
- 'grant_type': 'authorization_code',
- 'code': self.data['code'],
- 'redirect_uri': self.get_redirect_uri()
+ "grant_type": "authorization_code",
+ "code": self.data["code"],
+ "redirect_uri": self.get_redirect_uri(),
}
return url_add_parameters(base_url, params)
diff --git a/solenoid/userauth/mixins.py b/solenoid/userauth/mixins.py
index 1134e88e..e933fec2 100644
--- a/solenoid/userauth/mixins.py
+++ b/solenoid/userauth/mixins.py
@@ -10,9 +10,10 @@ class ConditionalLoginRequiredMixin(AccessMixin):
It is written by analogy with django.contrib.auth.mixins.LoginRequiredMixin
but includes the conditional settings check.
"""
+
def dispatch(self, request, *args, **kwargs):
- if (settings.LOGIN_REQUIRED and
- not getattr(request.user, 'is_authenticated')):
+ if settings.LOGIN_REQUIRED and not getattr(request.user, "is_authenticated"):
return self.handle_no_permission()
- return super(ConditionalLoginRequiredMixin, self
- ).dispatch(request, *args, **kwargs)
+ return super(ConditionalLoginRequiredMixin, self).dispatch(
+ request, *args, **kwargs
+ )
diff --git a/solenoid/userauth/views.py b/solenoid/userauth/views.py
index 68f7f085..d06b3853 100644
--- a/solenoid/userauth/views.py
+++ b/solenoid/userauth/views.py
@@ -6,8 +6,7 @@ class SolenoidLogoutView(LogoutView):
def dispatch(self, request, *args, **kwargs):
backend = self._get_backend_instance(request)
backend.revoke(request.user)
- return super(SolenoidLogoutView, self).dispatch(
- request, *args, **kwargs)
+ return super(SolenoidLogoutView, self).dispatch(request, *args, **kwargs)
def _get_backend_instance(self, request):
strategy = load_strategy()
diff --git a/solenoid/views.py b/solenoid/views.py
index caa1c557..26b7f3d9 100644
--- a/solenoid/views.py
+++ b/solenoid/views.py
@@ -6,15 +6,15 @@
class HomeView(ConditionalLoginRequiredMixin, TemplateView):
- template_name = 'index.html'
+ template_name = "index.html"
def get_context_data(self, **kwargs):
context = super(HomeView, self).get_context_data(**kwargs)
if Liaison.objects.count():
- context['liaison_url'] = reverse('people:liaison_list')
- context['liaison_text'] = 'manage'
+ context["liaison_url"] = reverse("people:liaison_list")
+ context["liaison_text"] = "manage"
else:
- context['liaison_url'] = reverse('people:liaison_create')
- context['liaison_text'] = 'create'
+ context["liaison_url"] = reverse("people:liaison_create")
+ context["liaison_text"] = "create"
return context