Skip to content

Commit

Permalink
Make Goodreads import more forgiving WRT already existing books
Browse files Browse the repository at this point in the history
  • Loading branch information
LaurenceWarne committed Oct 28, 2024
1 parent 8107ace commit 2c22e99
Show file tree
Hide file tree
Showing 8 changed files with 43 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package fin.service.book
import fin.Types._

trait BookManagementService[F[_]] {
def books: F[List[UserBook]]
def createBook(args: MutationCreateBookArgs): F[UserBook]
def createBooks(books: List[UserBook]): F[List[UserBook]]
def rateBook(args: MutationRateBookArgs): F[UserBook]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ class BookManagementServiceImpl[F[_]: MonadThrow, G[_]: MonadThrow] private (
transact: G ~> F
) extends BookManagementService[F] {

override def books: F[List[UserBook]] = transact(bookRepo.books)

override def createBook(args: MutationCreateBookArgs): F[UserBook] = {
val transaction: LocalDate => G[UserBook] = date =>
for {
Expand Down
2 changes: 2 additions & 0 deletions finito/core/src/fin/service/book/SpecialBookService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ class SpecialBookService[F[_]: Sync: Logger] private (

private val collectionHooks = specialCollections.flatMap(_.collectionHooks)

override def books: F[List[UserBook]] = wrappedBookService.books

override def createBook(args: MutationCreateBookArgs): F[UserBook] =
wrappedBookService.createBook(args)

Expand Down
25 changes: 16 additions & 9 deletions finito/core/src/fin/service/port/GoodreadsImportService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ import fs2.data.csv._
import fs2.data.csv.generic.semiauto._
import org.typelevel.log4cats.Logger

import fin.BookAlreadyInCollectionError
import fin.BookConversions._
import fin.Types._
import fin.service.book._
import fin.service.collection._
import fin.service.search.BookInfoService
import fin.{BookAlreadyBeingReadError, BookAlreadyInCollectionError}

/** https://www.goodreads.com/review/import
*/
Expand Down Expand Up @@ -50,7 +50,8 @@ class GoodreadsImportService[F[_]: Async: Logger](
_ <- Logger[F].debug(
show"Received ${content.length} chars worth of content"
)
rows <- Async[F].fromEither(result)
rows <- Async[F].fromEither(result)

userBooks <- createBooks(rows, langRestrict)
_ <- markBooks(userBooks)

Expand Down Expand Up @@ -111,7 +112,10 @@ class GoodreadsImportService[F[_]: Async: Logger](
langRestrict: Option[String]
): F[List[UserBook]] = {
for {
existing <- specialBookManagementService.books
existingIsbs = existing.map(_.isbn).toSet
userBooks <- rows
.filterNot(r => existingIsbs.contains(r.sanitizedIsbn))
.map { b =>
b.title match {
case s"$title ($_ #$_)" => b.copy(title = title)
Expand Down Expand Up @@ -152,17 +156,20 @@ class GoodreadsImportService[F[_]: Async: Logger](
for {
_ <- books.map(b => (b, b.lastRead)).traverseCollect {
case (b, Some(date)) =>
specialBookManagementService.finishReading(
MutationFinishReadingArgs(b.toBookInput, Some(date))
) *> Logger[F]
specialBookManagementService
.finishReading(
MutationFinishReadingArgs(b.toBookInput, Some(date))
) *> Logger[F]
.info(show"Marked ${b.title} as finished on ${date.toString}")
}
_ <- books.map(b => (b, b.startedReading)).traverseCollect {
case (b, Some(date)) =>
specialBookManagementService.startReading(
MutationStartReadingArgs(b.toBookInput, Some(date))
) *> Logger[F]
.info(show"Marked ${b.title} as started on ${date.toString}")
specialBookManagementService
.startReading(MutationStartReadingArgs(b.toBookInput, Some(date)))
.void
.recover { case BookAlreadyBeingReadError(_) => () } *>
Logger[F]
.info(show"Marked ${b.title} as started on ${date.toString}")
}
_ <- books.map(b => (b, b.rating)).traverseCollect {
case (b, Some(rating)) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ import fin.persistence.BookRepository
class InMemoryBookRepository[F[_]: Monad](booksRef: Ref[F, List[UserBook]])
extends BookRepository[F] {

override def books: F[List[UserBook]] = booksRef.get

override def createBook(book: BookInput, date: LocalDate): F[Unit] =
booksRef.update(book.toUserBook(dateAdded = date.some) :: _)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,22 @@ object GoodreadsImportServiceTest extends IOSuite {
expect(importResult.unsuccessful.length == 0)
}

test(
"importResource doesn't fail when called when CSV contains already existing books"
) { case (importService, bookRepo, _, rnd) =>
for {
(isbn1, isbn2, csv) <- csv(rnd)
_ <- importService.importResource(csv, None)
importResult <- importService.importResource(csv, None)
book1 <- bookRepo.retrieveBook(isbn1)
book2 <- bookRepo.retrieveBook(isbn2)
} yield expect(book1.nonEmpty) &&
expect(book2.nonEmpty) &&
expect(importResult.successful.length == 0) &&
expect(importResult.partiallySuccessful.length == 0) &&
expect(importResult.unsuccessful.length == 0)
}

test("importResource adds books to correct collections") {
case (importService, _, collectionService, rnd) =>
for {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import java.time.LocalDate
import fin.Types._

trait BookRepository[F[_]] {
def books: F[List[UserBook]]
def retrieveBook(isbn: String): F[Option[UserBook]]
def retrieveMultipleBooks(isbns: List[String]): F[List[UserBook]]
def createBook(book: BookInput, date: LocalDate): F[Unit]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ object SqliteBookRepository extends BookRepository[ConnectionIO] {

import BookFragments._

override def books: ConnectionIO[List[UserBook]] =
allBooks.query[BookRow].to[List].nested.map(_.toBook).value

override def retrieveBook(isbn: String): ConnectionIO[Option[UserBook]] =
BookFragments
.retrieveBook(isbn)
Expand Down

0 comments on commit 2c22e99

Please sign in to comment.