Skip to content

Commit

Permalink
SQLite spec works
Browse files Browse the repository at this point in the history
  • Loading branch information
darkfrog26 committed Apr 12, 2024
1 parent 67691e0 commit 305cd01
Show file tree
Hide file tree
Showing 12 changed files with 425 additions and 55 deletions.
2 changes: 2 additions & 0 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ ThisBuild / resolvers += "jitpack" at "https://jitpack.io"

ThisBuild / outputStrategy := Some(StdoutOutput)

ThisBuild / Test / testOptions += Tests.Argument(TestFrameworks.ScalaTest, "-oDF")

val collectionCompatVersion: String = "2.11.0"
val haloDBVersion: String = "v0.5.6"
val catsEffectVersion: String = "3.5.4"
Expand Down
13 changes: 13 additions & 0 deletions core/src/main/scala/lightdb/index/IndexSupport.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,21 @@ trait IndexSupport[D <: Document[D]] extends Collection[D] {

def index: Indexer[D]

def withSearchContext[Return](f: SearchContext[D] => IO[Return]): IO[Return] = index.withSearchContext(f)

def doSearch(query: Query[D],
context: SearchContext[D],
offset: Int,
after: Option[PagedResults[D]]): IO[PagedResults[D]]

override protected def postSet(doc: D): IO[Unit] = for {
_ <- indexDoc(doc, index.fields)
_ <- super.postSet(doc)
} yield ()

override protected def postDelete(doc: D): IO[Unit] = index.delete(doc._id).flatMap { _ =>
super.postDelete(doc)
}

protected def indexDoc(doc: D, fields: List[IndexedField[_, D]]): IO[Unit]
}
2 changes: 2 additions & 0 deletions core/src/main/scala/lightdb/index/IndexedField.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,6 @@ trait IndexedField[F, D <: Document[D]] {
def fieldName: String
def collection: Collection[D]
def get: D => Option[F]

collection.asInstanceOf[IndexSupport[D]].index.register(this)
}
8 changes: 0 additions & 8 deletions core/src/main/scala/lightdb/query/IndexContext.scala

This file was deleted.

19 changes: 19 additions & 0 deletions core/src/main/scala/lightdb/query/PageContext.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package lightdb.query

import cats.effect.IO
import lightdb.Document

trait PageContext[D <: Document[D]] {
def context: SearchContext[D]

def nextPage(currentPage: PagedResults[D]): IO[Option[PagedResults[D]]] = if (currentPage.hasNext) {
currentPage.query.indexSupport.doSearch(
query = currentPage.query,
context = context,
offset = currentPage.offset + currentPage.query.pageSize,
after = Some(currentPage)
).map(Some.apply)
} else {
IO.pure(None)
}
}
2 changes: 1 addition & 1 deletion core/src/main/scala/lightdb/query/PagedResults.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import cats.implicits.toTraverseOps
import lightdb.{Document, Id}

case class PagedResults[D <: Document[D]](query: Query[D],
context: IndexContext[D],
context: PageContext[D],
offset: Int,
total: Int,
ids: List[Id[D]]) {
Expand Down
20 changes: 0 additions & 20 deletions lucene/src/main/scala/lightdb/lucene/LuceneIndexContext.scala

This file was deleted.

2 changes: 0 additions & 2 deletions lucene/src/main/scala/lightdb/lucene/LuceneIndexedField.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,4 @@ trait LuceneIndexedField[F, D <: Document[D]] extends IndexedField[F, D] {
protected[lightdb] def createFields(doc: D): List[ld.Field]

protected[lightdb] def sortType: SortField.Type

collection.asInstanceOf[LuceneSupport[D]].index.register(this)
}
9 changes: 9 additions & 0 deletions lucene/src/main/scala/lightdb/lucene/LucenePageContext.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package lightdb.lucene

import cats.effect.IO
import lightdb.Document
import lightdb.query.{PageContext, PagedResults, SearchContext}
import org.apache.lucene.search.ScoreDoc

case class LucenePageContext[D <: Document[D]](context: SearchContext[D],
lastScoreDoc: Option[ScoreDoc]) extends PageContext[D]
17 changes: 5 additions & 12 deletions lucene/src/main/scala/lightdb/lucene/LuceneSupport.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import cats.effect.IO
import lightdb._
import lightdb.index.{IndexSupport, IndexedField, Indexer}
import lightdb.lucene.index._
import lightdb.query.{Filter, IndexContext, PagedResults, Query, SearchContext, Sort}
import lightdb.query.{Filter, PageContext, PagedResults, Query, SearchContext, Sort}
import org.apache.lucene.search.{IndexSearcher, MatchAllDocsQuery, ScoreDoc, SearcherFactory, SearcherManager, SortField, TopFieldDocs, Query => LuceneQuery, Sort => LuceneSort}
import org.apache.lucene.{document => ld}
import org.apache.lucene.analysis.Analyzer
Expand All @@ -24,8 +24,6 @@ trait LuceneSupport[D <: Document[D]] extends IndexSupport[D] {

protected[lucene] def indexSearcher(context: SearchContext[D]): IndexSearcher = index.contextMapping.get(context)

def withSearchContext[Return](f: SearchContext[D] => IO[Return]): IO[Return] = index.withSearchContext(f)

private def sort2SortField(sort: Sort): SortField = sort match {
case Sort.BestMatch => SortField.FIELD_SCORE
case Sort.IndexOrder => SortField.FIELD_DOC
Expand All @@ -45,15 +43,15 @@ trait LuceneSupport[D <: Document[D]] extends IndexSupport[D] {
val indexSearcher = query.indexSupport.asInstanceOf[LuceneSupport[D]].indexSearcher(context)
val topFieldDocs: TopFieldDocs = after match {
case Some(afterPage) =>
val afterDoc = afterPage.context.asInstanceOf[LuceneIndexContext[D]].lastScoreDoc.get
val afterDoc = afterPage.context.asInstanceOf[LucenePageContext[D]].lastScoreDoc.get
indexSearcher.searchAfter(afterDoc, q, query.pageSize, s, query.scoreDocs)
case None => indexSearcher.search(q, query.pageSize, s, query.scoreDocs)
}
val scoreDocs: List[ScoreDoc] = topFieldDocs.scoreDocs.toList
val total: Int = topFieldDocs.totalHits.value.toInt
val storedFields: StoredFields = indexSearcher.storedFields()
val ids: List[Id[D]] = scoreDocs.map(doc => Id[D](storedFields.document(doc.doc).get("_id")))
val indexContext = LuceneIndexContext(
val indexContext = LucenePageContext(
context = context,
lastScoreDoc = scoreDocs.lastOption
)
Expand All @@ -66,15 +64,10 @@ trait LuceneSupport[D <: Document[D]] extends IndexSupport[D] {
)
}

override protected def postSet(doc: D): IO[Unit] = for {
fields <- IO(index.fields.flatMap { field =>
override protected def indexDoc(doc: D, fields: List[IndexedField[_, D]]): IO[Unit] = for {
fields <- IO(fields.flatMap { field =>
field.asInstanceOf[LuceneIndexedField[_, D]].createFields(doc)
})
_ = index.addDoc(doc._id, fields)
_ <- super.postSet(doc)
} yield ()

override protected def postDelete(doc: D): IO[Unit] = index.delete(doc._id).flatMap { _ =>
super.postDelete(doc)
}
}
143 changes: 131 additions & 12 deletions sqlite/src/main/scala/lightdb/sqlite/SQLiteSupport.scala
Original file line number Diff line number Diff line change
@@ -1,42 +1,161 @@
package lightdb.sqlite

import cats.effect.IO
import lightdb.{Document, Id}
import lightdb.index.{IndexSupport, Indexer}
import lightdb.query.{PagedResults, Query, SearchContext}
import lightdb.{Collection, Document, Id}
import lightdb.index.{IndexSupport, IndexedField, Indexer}
import lightdb.query.{Filter, PageContext, PagedResults, Query, SearchContext}

import java.nio.file.{Files, Path}
import java.sql.{Connection, DriverManager}
import java.sql.{Connection, DriverManager, PreparedStatement, Types}

trait SQLiteSupport[D <: Document[D]] extends IndexSupport[D] {
private lazy val path: Path = db.directory.resolve("sqlite.db")
private lazy val path: Path = db.directory.resolve(collectionName).resolve("sqlite.db")
// TODO: Should each collection have a connection?
private lazy val connection: Connection = {
private[sqlite] lazy val connection: Connection = {
val c = DriverManager.getConnection(s"jdbc:sqlite:${path.toFile.getCanonicalPath}")
c.setAutoCommit(false)
val s = c.createStatement()
try {
s.executeUpdate(s"CREATE TABLE IF NOT EXISTS $collectionName(${index.fields.map(_.fieldName).mkString(", ")}, PRIMARY KEY (_id))")
} finally {
s.close()
}
c
}

override lazy val index: SQLiteIndexer[D] = SQLiteIndexer(this)

val _id: SQLIndexedField[Id[D], D] = index("_id", doc => Some(doc._id))

override def doSearch(query: Query[D],
context: SearchContext[D],
offset: Int,
after: Option[PagedResults[D]]): IO[PagedResults[D]] = ???
after: Option[PagedResults[D]]): IO[PagedResults[D]] = IO {
var params = List.empty[Option[Any]]
val filters = query.filter match {
case Some(f) =>
val filter = f.asInstanceOf[SQLFilter[_, D]]
params = Some(filter.value) :: params
s"WHERE ${filter.fieldName} ${filter.condition} ?"
case None => ""
}
val sqlCount = s"""SELECT
| COUNT(*)
|FROM
| $collectionName
|$filters
|""".stripMargin
val countPs = prepare(sqlCount, params.reverse)
val total = try {
val rs = countPs.executeQuery()
rs.getInt(1)
} finally {
countPs.close()
}
// TODO: Add sort
val sql = s"""SELECT
| _id
|FROM
| $collectionName
|$filters
|LIMIT ${query.pageSize}
|OFFSET $offset
|""".stripMargin
val ps = prepare(sql, params.reverse)
val rs = ps.executeQuery()
try {
val iterator = new Iterator[Id[D]] {
override def hasNext: Boolean = rs.next()

override def next(): Id[D] = Id[D](rs.getString(1))
}
val ids = iterator.toList
PagedResults(
query = query,
context = SQLPageContext(context),
offset = offset,
total = total,
ids = ids
)
} finally {
ps.close()
}
}

override protected def indexDoc(doc: D, fields: List[IndexedField[_, D]]): IO[Unit] = IO {
val sql = s"INSERT OR REPLACE INTO $collectionName(${fields.map(_.fieldName).mkString(", ")}) VALUES (${fields.map(_ => "?").mkString(", ")})"
val values = fields.map(_.get(doc))
val ps = prepare(sql, values)
ps.executeUpdate()
ps.close()
}

private def prepare(sql: String, params: List[Option[Any]]): PreparedStatement = {
val ps = connection.prepareStatement(sql)
params.zipWithIndex.foreach {
case (value, index) => setValue(ps, index + 1, value)
}
ps
}

private def setValue(ps: PreparedStatement, index: Int, value: Option[Any]): Unit = value match {
case Some(v) => v match {
case s: String => ps.setString(index, s)
case i: Int => ps.setInt(index, i)
case id: Id[_] => ps.setString(index, id.value)
case _ => throw new RuntimeException(s"Unsupported value for $collectionName (index: $index): $value")
}
case None => ps.setNull(index, Types.NULL)
}

override def dispose(): IO[Unit] = super.dispose().map { _ =>
connection.close()
}
}

case class SQLiteIndexer[D <: Document[D]](indexSupport: SQLiteSupport[D]) extends Indexer[D] {
override def withSearchContext[Return](f: SearchContext[D] => IO[Return]): IO[Return] = ???
override def withSearchContext[Return](f: SearchContext[D] => IO[Return]): IO[Return] = {
val context = SearchContext(indexSupport)
f(context)
}

def apply[F](name: String, get: D => Option[F]): SQLIndexedField[F, D] = SQLIndexedField(
fieldName = name,
collection = indexSupport,
get = get
)

override def count(): IO[Int] = IO {
val ps = indexSupport.connection.prepareStatement(s"SELECT COUNT(_id) FROM ${indexSupport.collectionName}")
try {
val rs = ps.executeQuery()
rs.next()
rs.getInt(1)
} finally {
ps.close()
}
}

override def count(): IO[Int] = ???
override private[lightdb] def delete(id: Id[D]): IO[Unit] = IO {
val ps = indexSupport.connection.prepareStatement(s"DELETE FROM ${indexSupport.collectionName} WHERE _id = ?")
try {
ps.setString(1, id.value)
ps.executeUpdate()
} finally {
ps.close()
}
}

override private[lightdb] def delete(id: Id[D]): IO[Unit] = ???
override def commit(): IO[Unit] = IO.unit
}

override def commit(): IO[Unit] = ???
case class SQLIndexedField[F, D <: Document[D]](fieldName: String,
collection: Collection[D],
get: D => Option[F]) extends IndexedField[F, D] {
def ===(value: F): Filter[D] = is(value)
def is(value: F): Filter[D] = SQLFilter[F, D](fieldName, "=", value)
}

case class SQLFilter[F, D <: Document[D]](fieldName: String, condition: String, value: F) extends Filter[D]

}
case class SQLPageContext[D <: Document[D]](context: SearchContext[D]) extends PageContext[D]
Loading

0 comments on commit 305cd01

Please sign in to comment.