Skip to content

Commit

Permalink
Extraction to separate Collection from Model for more complex scenarios
Browse files Browse the repository at this point in the history
  • Loading branch information
darkfrog26 committed May 1, 2024
1 parent b4cf710 commit 62324ce
Show file tree
Hide file tree
Showing 22 changed files with 136 additions and 88 deletions.
1 change: 1 addition & 0 deletions all/src/test/scala/spec/SimpleHaloAndLuceneSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import lightdb._
import lightdb.halo.HaloDBSupport
import lightdb.lucene.LuceneSupport
import lightdb.lucene.index.{IntField, StringField}
import lightdb.model.Collection
import lightdb.sqlite.{SQLIndexedField, SQLiteSupport}
import lightdb.upgrade.DatabaseUpgrade
import org.scalatest.matchers.should.Matchers
Expand Down
1 change: 1 addition & 0 deletions all/src/test/scala/spec/SimpleHaloAndSQLiteSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import cats.effect.testing.scalatest.AsyncIOSpec
import fabric.rw._
import lightdb._
import lightdb.halo.HaloDBSupport
import lightdb.model.Collection
import lightdb.sqlite.{SQLIndexedField, SQLiteSupport}
import lightdb.upgrade.DatabaseUpgrade
import org.scalatest.matchers.should.Matchers
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ package benchmark
import cats.effect.IO
import fabric.rw.RW
import lightdb.halo.HaloDBSupport
import lightdb.model.Collection
import lightdb.sqlite.{SQLData, SQLIndexedField, SQLiteSupport}
import lightdb.upgrade.DatabaseUpgrade
import lightdb.{Collection, Document, Id, IndexedLinks, LightDB, MaxLinks}
import lightdb.{Document, Id, IndexedLinks, LightDB, MaxLinks}

import java.nio.file.Paths
import java.sql.ResultSet
Expand Down
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ val developerURL: String = "https://matthicks.com"

name := projectName
ThisBuild / organization := org
ThisBuild / version := "0.4.2"
ThisBuild / version := "0.5.0-SNAPSHOT"
ThisBuild / scalaVersion := scala213
ThisBuild / crossScalaVersions := allScalaVersions
ThisBuild / scalacOptions ++= Seq("-unchecked", "-deprecation")
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/scala/lightdb/IndexedLinks.scala
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
package lightdb

import cats.effect.IO
import lightdb.model.AbstractCollection

case class IndexedLinks[V, D <: Document[D]](name: String,
createKey: V => String,
createV: D => V,
loadStore: () => Store,
collection: Collection[D],
collection: AbstractCollection[D],
maxLinks: MaxLinks) {
lazy val store: Store = loadStore()

Expand Down
1 change: 1 addition & 0 deletions core/src/main/scala/lightdb/LightDB.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package lightdb
import cats.effect.IO
import cats.implicits.{catsSyntaxApplicativeByName, catsSyntaxParallelSequence1, toTraverseOps}
import fabric.rw._
import lightdb.model.Collection
import lightdb.upgrade.DatabaseUpgrade

import java.nio.file.Path
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/scala/lightdb/RecordDocumentCollection.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@ package lightdb

import cats.effect.IO
import fabric._
import lightdb.model.{AbstractCollection, Collection}

abstract class RecordDocumentCollection[D <: RecordDocument[D]](collectionName: String, db: LightDB) extends Collection[D](collectionName, db) {
override protected def preSetJson(json: Json): IO[Json] = IO {
override def preSetJson(json: Json, collection: AbstractCollection[D]): IO[Json] = IO {
json.modify("modified") { _ =>
System.currentTimeMillis()
}
Expand Down
1 change: 1 addition & 0 deletions core/src/main/scala/lightdb/StoredValue.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package lightdb

import cats.effect.IO
import fabric.rw._
import lightdb.model.Collection

case class StoredValue[T](key: String,
collection: Collection[KeyValue],
Expand Down
11 changes: 6 additions & 5 deletions core/src/main/scala/lightdb/index/IndexSupport.scala
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
package lightdb.index

import cats.effect.IO
import lightdb.model.{AbstractCollection, Collection}
import lightdb.query.{PagedResults, Query, SearchContext}
import lightdb.{Collection, Document}
import lightdb.Document

trait IndexSupport[D <: Document[D]] extends Collection[D] {
lazy val query: Query[D] = Query(this)
Expand All @@ -18,13 +19,13 @@ trait IndexSupport[D <: Document[D]] extends Collection[D] {
offset: Int,
after: Option[PagedResults[D]]): IO[PagedResults[D]]

override protected def postSet(doc: D): IO[Unit] = for {
override def postSet(doc: D, collection: AbstractCollection[D]): IO[Unit] = for {
_ <- indexDoc(doc, index.fields)
_ <- super.postSet(doc)
_ <- super.postSet(doc, collection)
} yield ()

override protected def postDelete(doc: D): IO[Unit] = index.delete(doc._id).flatMap { _ =>
super.postDelete(doc)
override def postDelete(doc: D, collection: AbstractCollection[D]): IO[Unit] = index.delete(doc._id).flatMap { _ =>
super.postDelete(doc, collection)
}

protected def indexDoc(doc: D, fields: List[IndexedField[_, D]]): IO[Unit]
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/scala/lightdb/index/IndexedField.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package lightdb.index

import lightdb.{Collection, Document}
import lightdb.Document
import lightdb.model.Collection

trait IndexedField[F, D <: Document[D]] {
def fieldName: String
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/scala/lightdb/index/Indexer.scala
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
package lightdb.index

import cats.effect.IO
import lightdb.model.Collection
import lightdb.query.SearchContext
import lightdb.{Collection, Document, Id, query}
import lightdb.{Document, Id, query}

trait Indexer[D <: Document[D]] {
protected var _fields = List.empty[IndexedField[_, D]]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,67 +1,30 @@
package lightdb
package lightdb.model

import cats.effect.IO
import cats.effect.unsafe.implicits.global
import cats.implicits.{catsSyntaxApplicativeByName, toTraverseOps}
import fabric.Json
import fabric.rw.RW
import cats.implicits.toTraverseOps
import io.chrisdavenport.keysemaphore.KeySemaphore
import lightdb.index._

abstract class Collection[D <: Document[D]](val collectionName: String,
protected[lightdb] val db: LightDB,
val autoCommit: Boolean = false,
val atomic: Boolean = true) {
type Field[F] = IndexedField[F, D]

implicit val rw: RW[D]

protected lazy val store: Store = db.createStoreInternal(collectionName)
import lightdb.{DocLock, Document, Id, IndexedLinks, LightDB, MaxLinks, Store}

trait AbstractCollection[D <: Document[D]] {
// Id-level locking
private lazy val sem = KeySemaphore.of[IO, Id[D]](_ => 1L).unsafeRunSync()

private var _indexedLinks = List.empty[IndexedLinks[_, D]]
def model: DocumentModel[D]

def idStream: fs2.Stream[IO, Id[D]] = store.keyStream
def collectionName: String

def stream: fs2.Stream[IO, D] = store.streamJsonDocs[D]
def autoCommit: Boolean

/**
* Called before preSetJson and before the data is set to the database
*/
protected def preSet(doc: D): IO[D] = IO.pure(doc)
def atomic: Boolean

/**
* Called after preSet and before the data is set to the database
*/
protected def preSetJson(json: Json): IO[Json] = IO.pure(json)

/**
* Called after set
*/
protected def postSet(doc: D): IO[Unit] = for {
// Update IndexedLinks
_ <- _indexedLinks.map(_.add(doc)).sequence
_ <- commit().whenA(autoCommit)
} yield ()
protected[lightdb] def db: LightDB

protected def preDelete(id: Id[D]): IO[Id[D]] = IO.pure(id)
protected lazy val store: Store = db.createStoreInternal(collectionName)

protected def postDelete(doc: D): IO[Unit] = for {
// Update IndexedLinks
_ <- _indexedLinks.map(_.remove(doc)).sequence
_ <- commit().whenA(autoCommit)
} yield ()
def idStream: fs2.Stream[IO, Id[D]] = store.keyStream

def set(doc: D)(implicit existingLock: DocLock[D] = new DocLock.Empty[D]): IO[D] = withLock(doc._id) { _ =>
for {
modified <- preSet(doc)
json <- preSetJson(rw.read(doc))
_ <- store.putJson(doc._id, json)
_ <- postSet(doc)
} yield modified
}
def stream: fs2.Stream[IO, D] = store.streamJsonDocs[D](model.rw)

def withLock[Return](id: Id[D])(f: DocLock[D] => IO[Return])
(implicit existingLock: DocLock[D] = new DocLock.Empty[D]): IO[Return] = {
Expand All @@ -82,6 +45,15 @@ abstract class Collection[D <: Document[D]](val collectionName: String,
}
}

def set(doc: D)(implicit existingLock: DocLock[D] = new DocLock.Empty[D]): IO[D] = withLock(doc._id) { _ =>
for {
modified <- model.preSet(doc, this)
json <- model.preSetJson(model.rw.read(doc), this)
_ <- store.putJson(doc._id, json)
_ <- model.postSet(doc, this)
} yield modified
}

def modify(id: Id[D])
(f: Option[D] => IO[Option[D]])
(implicit existingLock: DocLock[D] = new DocLock.Empty[D]): IO[Option[D]] = withLock(id) { implicit lock =>
Expand All @@ -92,30 +64,38 @@ abstract class Collection[D <: Document[D]](val collectionName: String,
}
}
}

def delete(id: Id[D])
(implicit existingLock: DocLock[D] = new DocLock.Empty[D]): IO[Option[D]] = withLock(id) { implicit lock =>
for {
modifiedId <- preDelete(id)
modifiedId <- model.preDelete(id, this)
deleted <- get(modifiedId).flatMap {
case Some(d) => store.delete(id).map(_ => Some(d))
case None => IO.pure(None)
}
_ <- deleted match {
case Some(doc) => postDelete(doc)
case Some(doc) => model.postDelete(doc, this)
case None => IO.unit
}
} yield deleted
}

def truncate(): IO[Unit] = for {
_ <- store.truncate()
_ <- _indexedLinks.map(_.store.truncate()).sequence
_ <- model.indexedLinks.map(_.store.truncate()).sequence
} yield ()

def get(id: Id[D]): IO[Option[D]] = store.getJsonDoc(id)
def get(id: Id[D]): IO[Option[D]] = store.getJsonDoc(id)(model.rw)

def apply(id: Id[D]): IO[D] = get(id)
.map(_.getOrElse(throw new RuntimeException(s"$id not found in $collectionName")))

def size: IO[Int] = store.size

def commit(): IO[Unit] = store.commit()

def dispose(): IO[Unit] = IO.unit

/**
* Creates a key/value stored object with a list of links. This can be incredibly efficient for small lists, but much
* slower for larger sets of data and a standard index would be preferable.
Expand All @@ -133,23 +113,8 @@ abstract class Collection[D <: Document[D]](val collectionName: String,
maxLinks = maxLinks
)
synchronized {
_indexedLinks = il :: _indexedLinks
model._indexedLinks = il :: model._indexedLinks
}
il
}

def size: IO[Int] = store.size

def commit(): IO[Unit] = store.commit()

def dispose(): IO[Unit] = IO.unit
}

object Collection {
def apply[D <: Document[D]](collectionName: String,
db: LightDB,
autoCommit: Boolean = false)(implicit docRW: RW[D]): Collection[D] =
new Collection[D](collectionName, db, autoCommit = autoCommit) {
override implicit val rw: RW[D] = docRW
}
}
20 changes: 20 additions & 0 deletions core/src/main/scala/lightdb/model/Collection.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package lightdb.model

import fabric.rw.RW
import lightdb.{Document, LightDB}

abstract class Collection[D <: Document[D]](val collectionName: String,
protected[lightdb] val db: LightDB,
val autoCommit: Boolean = false,
val atomic: Boolean = true) extends AbstractCollection[D] with DocumentModel[D] {
override def model: DocumentModel[D] = this
}

object Collection {
def apply[D <: Document[D]](collectionName: String,
db: LightDB,
autoCommit: Boolean = false)(implicit docRW: RW[D]): Collection[D] =
new Collection[D](collectionName, db, autoCommit = autoCommit) {
override implicit val rw: RW[D] = docRW
}
}
45 changes: 45 additions & 0 deletions core/src/main/scala/lightdb/model/DocumentModel.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package lightdb.model

import cats.effect.IO
import cats.implicits.{catsSyntaxApplicativeByName, toTraverseOps}
import fabric.Json
import fabric.rw.RW
import lightdb.index.IndexedField
import lightdb.{Document, Id, IndexedLinks}

trait DocumentModel[D <: Document[D]] {
type Field[F] = IndexedField[F, D]

implicit val rw: RW[D]

private[lightdb] var _indexedLinks = List.empty[IndexedLinks[_, D]]

def indexedLinks: List[IndexedLinks[_, D]] = _indexedLinks

/**
* Called before preSetJson and before the data is set to the database
*/
def preSet(doc: D, collection: AbstractCollection[D]): IO[D] = IO.pure(doc)

/**
* Called after preSet and before the data is set to the database
*/
def preSetJson(json: Json, collection: AbstractCollection[D]): IO[Json] = IO.pure(json)

/**
* Called after set
*/
def postSet(doc: D, collection: AbstractCollection[D]): IO[Unit] = for {
// Update IndexedLinks
_ <- _indexedLinks.map(_.add(doc)).sequence
_ <- collection.commit().whenA(collection.autoCommit)
} yield ()

def preDelete(id: Id[D], collection: AbstractCollection[D]): IO[Id[D]] = IO.pure(id)

def postDelete(doc: D, collection: AbstractCollection[D]): IO[Unit] = for {
// Update IndexedLinks
_ <- _indexedLinks.map(_.remove(doc)).sequence
_ <- collection.commit().whenA(collection.autoCommit)
} yield ()
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package lightdb.lucene.index

import lightdb.lucene.LuceneIndexedField
import lightdb.{Collection, Document}
import lightdb.Document
import lightdb.model.Collection
import org.apache.lucene.document.Field
import org.apache.lucene.search.SortField
import org.apache.lucene.{document => ld}
Expand Down
3 changes: 2 additions & 1 deletion lucene/src/main/scala/lightdb/lucene/index/DoubleField.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ package lightdb.lucene.index

import lightdb.index.IndexedField
import lightdb.lucene.LuceneIndexedField
import lightdb.{Collection, Document}
import lightdb.Document
import lightdb.model.Collection
import org.apache.lucene.document.Field
import org.apache.lucene.search.SortField
import org.apache.lucene.{document => ld}
Expand Down
3 changes: 2 additions & 1 deletion lucene/src/main/scala/lightdb/lucene/index/FloatField.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ package lightdb.lucene.index

import lightdb.index.IndexedField
import lightdb.lucene.LuceneIndexedField
import lightdb.{Collection, Document}
import lightdb.Document
import lightdb.model.Collection
import org.apache.lucene.document.Field
import org.apache.lucene.search.SortField
import org.apache.lucene.{document => ld}
Expand Down
Loading

0 comments on commit 62324ce

Please sign in to comment.