diff --git a/benchmark/src/main/scala/benchmark/bench/Bench.scala b/benchmark/src/main/scala/benchmark/bench/Bench.scala new file mode 100644 index 00000000..21ee17b2 --- /dev/null +++ b/benchmark/src/main/scala/benchmark/bench/Bench.scala @@ -0,0 +1,26 @@ +package benchmark.bench + +trait Bench { + val RecordCount: Int = 1_000_000 + val StreamIterations: Int = 1 + val SearchIterations: Int = 1 + + val tasks: List[Task] = List( + Task("Insert Records", RecordCount, insertRecords), + Task("Stream Records", StreamIterations, streamRecords), + Task("Search Each Record", StreamIterations * RecordCount, searchEachRecord), + Task("Search All Records", StreamIterations, searchAllRecords) + ) + + def init(): Unit + + protected def insertRecords(status: StatusCallback): Unit + + protected def streamRecords(status: StatusCallback): Unit + + protected def searchEachRecord(status: StatusCallback): Unit + + protected def searchAllRecords(status: StatusCallback): Unit + + def dispose(): Unit +} diff --git a/benchmark/src/main/scala/benchmark/bench/BenchmarkReport.scala b/benchmark/src/main/scala/benchmark/bench/BenchmarkReport.scala new file mode 100644 index 00000000..12a06180 --- /dev/null +++ b/benchmark/src/main/scala/benchmark/bench/BenchmarkReport.scala @@ -0,0 +1,9 @@ +package benchmark.bench + +import fabric.rw.RW + +case class BenchmarkReport(name: String, maxProgress: Double, logs: List[StatusLog]) + +object BenchmarkReport { + implicit val rw: RW[BenchmarkReport] = RW.gen +} \ No newline at end of file diff --git a/benchmark/src/main/scala/benchmark/bench/ReportGenerator.scala b/benchmark/src/main/scala/benchmark/bench/ReportGenerator.scala new file mode 100644 index 00000000..a0a24c55 --- /dev/null +++ b/benchmark/src/main/scala/benchmark/bench/ReportGenerator.scala @@ -0,0 +1,7 @@ +package benchmark.bench + +object ReportGenerator { + def main(args: Array[String]): Unit = { + + } +} diff --git a/benchmark/src/main/scala/benchmark/bench/Runner.scala b/benchmark/src/main/scala/benchmark/bench/Runner.scala new file mode 100644 index 00000000..3ec58939 --- /dev/null +++ b/benchmark/src/main/scala/benchmark/bench/Runner.scala @@ -0,0 +1,58 @@ +package benchmark.bench + +import benchmark.bench.impl.LightDBBench +import fabric.io.JsonFormatter +import fabric.rw.Convertible +import lightdb.duckdb.DuckDBIndexer +import lightdb.h2.H2Indexer +import lightdb.halo.HaloDBStore +import lightdb.lucene.LuceneIndexer +import lightdb.mapdb.MapDBStore +import lightdb.rocks.RocksDBStore +import lightdb.sqlite.SQLiteIndexer +import lightdb.store.{AtomicMapStore, MapStore} + +import java.nio.file.{Files, Path} + +object Runner { + val implementations: Map[String, Bench] = Map( + "ldbHaloLucene" -> LightDBBench(HaloDBStore, LuceneIndexer), + "ldbMapLucene" -> LightDBBench(MapDBStore, LuceneIndexer), + "ldbRocksLucene" -> LightDBBench(RocksDBStore, LuceneIndexer), + "ldbAtomicLucene" -> LightDBBench(AtomicMapStore, LuceneIndexer), + "ldbMapLucene" -> LightDBBench(MapStore, LuceneIndexer), + "ldbHaloSQLite" -> LightDBBench(HaloDBStore, SQLiteIndexer), + "ldbHaloH2" -> LightDBBench(HaloDBStore, H2Indexer), + "ldbHaloDuck" -> LightDBBench(HaloDBStore, DuckDBIndexer), + ) + + def main(args: Array[String]): Unit = { + args.headOption match { + case Some(implName) if implementations.contains(implName) => + val bench = implementations(implName) + scribe.info(s"Initializing $implName benchmark...") + bench.init() + scribe.info(s"Initialized successfully!") + val reports = bench.tasks.map { task => + val status = StatusCallback() + status.start() + scribe.info(s"Executing ${task.name} task...") + task.f(status) + status.finish() + val logs = status.logs + scribe.info(s"Completed in ${logs.last.elapsed} seconds") + BenchmarkReport(task.name, task.maxProgress, logs) + } + scribe.info(s"Disposing $implName benchmark...") + bench.dispose() + scribe.info(s"Disposed!") + + val json = reports.json + Files.writeString(Path.of(s"report-$implName.json"), JsonFormatter.Default(json)) + + sys.exit(0) + case Some(implName) => scribe.error(s"Invalid implementation name: $implName. Valid implementations: ${implementations.keys.mkString(", ")}") + case None => scribe.error(s"Exactly one command-line argument must be present to specify the implementation. Valid implementations: ${implementations.keys.mkString(", ")}") + } + } +} diff --git a/benchmark/src/main/scala/benchmark/bench/StatusCallback.scala b/benchmark/src/main/scala/benchmark/bench/StatusCallback.scala new file mode 100644 index 00000000..02b82622 --- /dev/null +++ b/benchmark/src/main/scala/benchmark/bench/StatusCallback.scala @@ -0,0 +1,58 @@ +package benchmark.bench + +import com.google.common.util.concurrent.AtomicDouble +import com.sun.management.OperatingSystemMXBean + +import java.lang.management.ManagementFactory + +case class StatusCallback(every: Long = 1_000L) { + val progress = new AtomicDouble(0.0) + + def logs: List[StatusLog] = _logs.reverse + + private var keepAlive = true + private val startTime = System.currentTimeMillis() + private var _logs = List.empty[StatusLog] + + def start(): Unit = { + val t = new Thread { + setDaemon(true) + + override def run(): Unit = { + while (keepAlive) { + report() + Thread.sleep(every) + } + } + } + t.start() + } + + def finish(): Unit = { + keepAlive = false + report() + } + + private def report(): Unit = { + val now = System.currentTimeMillis() + val elapsed = (now - startTime) / 1000.0 + val memory = ManagementFactory.getMemoryMXBean + val heap = memory.getHeapMemoryUsage + val nonHeap = memory.getNonHeapMemoryUsage + val heapUsed = heap.getUsed + val nonHeapUsed = nonHeap.getUsed + val os = ManagementFactory.getPlatformMXBean(classOf[OperatingSystemMXBean]) + val cpuLoad = os.getProcessCpuLoad + val cpuTime = os.getProcessCpuTime + val log = StatusLog( + progress = progress.get(), + timeStamp = now, + elapsed = elapsed, + heap = heapUsed, + nonHeap = nonHeapUsed, + cpuLoad = cpuLoad, + cpuTime = cpuTime + ) + _logs = log :: _logs + } +} diff --git a/benchmark/src/main/scala/benchmark/bench/StatusLog.scala b/benchmark/src/main/scala/benchmark/bench/StatusLog.scala new file mode 100644 index 00000000..234263fd --- /dev/null +++ b/benchmark/src/main/scala/benchmark/bench/StatusLog.scala @@ -0,0 +1,15 @@ +package benchmark.bench + +import fabric.rw.RW + +case class StatusLog(progress: Double, + timeStamp: Long, + elapsed: Double, + heap: Long, + nonHeap: Long, + cpuLoad: Double, + cpuTime: Long) + +object StatusLog { + implicit val rw: RW[StatusLog] = RW.gen +} \ No newline at end of file diff --git a/benchmark/src/main/scala/benchmark/bench/Task.scala b/benchmark/src/main/scala/benchmark/bench/Task.scala new file mode 100644 index 00000000..e4699cc9 --- /dev/null +++ b/benchmark/src/main/scala/benchmark/bench/Task.scala @@ -0,0 +1,3 @@ +package benchmark.bench + +case class Task(name: String, maxProgress: Double = 1.0, f: StatusCallback => Unit) \ No newline at end of file diff --git a/benchmark/src/main/scala/benchmark/bench/impl/LightDBBench.scala b/benchmark/src/main/scala/benchmark/bench/impl/LightDBBench.scala new file mode 100644 index 00000000..c8b77b95 --- /dev/null +++ b/benchmark/src/main/scala/benchmark/bench/impl/LightDBBench.scala @@ -0,0 +1,104 @@ +package benchmark.bench.impl + +import benchmark.bench.{Bench, StatusCallback} +import fabric.rw.RW +import lightdb.document.{Document, DocumentModel} +import lightdb.index.{Indexed, IndexedCollection, IndexerManager} +import lightdb.store.StoreManager +import lightdb.upgrade.DatabaseUpgrade +import lightdb.util.Unique +import lightdb.{Id, LightDB} +import org.apache.commons.io.FileUtils + +import java.io.File +import java.nio.file.Path +import scala.collection.parallel.CollectionConverters._ + +case class LightDBBench(sm: StoreManager, im: IndexerManager) extends Bench { + override def init(): Unit = { + val dbDir = new File("db") + FileUtils.deleteDirectory(dbDir) + dbDir.mkdirs() + + scribe.info("DB init...") + DB.init() + scribe.info("Initialized!") + } + + override protected def insertRecords(status: StatusCallback): Unit = DB.people.transaction { implicit transaction => + (0 until RecordCount) + .foreach { index => + DB.people.set(Person( + name = Unique(), + age = index + )) + status.progress.set(index + 1) + } + } + + override protected def streamRecords(status: StatusCallback): Unit = DB.people.transaction { implicit transaction => + (0 until StreamIterations) + .par + .foreach { iteration => + val count = DB.people.iterator.size + if (count != RecordCount) { + scribe.warn(s"RecordCount was not $RecordCount, it was $count") + } + status.progress.set(iteration + 1) + } + } + + override protected def searchEachRecord(status: StatusCallback): Unit = DB.people.transaction { implicit transaction => + (0 until StreamIterations) + .par + .foreach { iteration => + (0 until RecordCount) + .par + .foreach { index => + val list = DB.people.query.filter(_.age === index).search.docs.list + if (list.size != 1) { + scribe.warn(s"Unable to find age = $index") + } + if (list.head.age != index) { + scribe.warn(s"${list.head.age} was not $index") + } + status.progress.set((iteration + 1) * (index + 1)) + } + } + } + + override protected def searchAllRecords(status: StatusCallback): Unit = DB.people.transaction { implicit transaction => + (0 until StreamIterations) + .par + .foreach { iteration => + val count = DB.people.query.search.docs.iterator.foldLeft(0)((count, _) => count + 1) + if (count != RecordCount) { + scribe.warn(s"RecordCount was not $RecordCount, it was $count") + } + status.progress.set(iteration + 1) + } + } + + override def dispose(): Unit = DB.dispose() + + object DB extends LightDB { + override lazy val directory: Option[Path] = Some(Path.of(s"db/bench")) + + val people: IndexedCollection[Person, Person.type] = collection("people", Person, im.create[Person, Person.type]()) + + override def storeManager: StoreManager = sm + + override def upgrades: List[DatabaseUpgrade] = Nil + } + + case class Person(name: String, + age: Int, + _id: Id[Person] = Person.id()) extends Document[Person] + + object Person extends DocumentModel[Person] with Indexed[Person] { + implicit val rw: RW[Person] = RW.gen + + val name: I[String] = index.one("name", _.name, store = true) + val age: I[Int] = index.one("age", _.age, store = true) + } +} diff --git a/benchmark/src/main/scala/benchmark/BenchmarkImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/BenchmarkImplementation.scala similarity index 98% rename from benchmark/src/main/scala/benchmark/BenchmarkImplementation.scala rename to benchmark/src/main/scala/benchmark/imdb/BenchmarkImplementation.scala index 16e67130..91f252d1 100644 --- a/benchmark/src/main/scala/benchmark/BenchmarkImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/BenchmarkImplementation.scala @@ -1,4 +1,4 @@ -package benchmark +package benchmark.imdb import cats.effect.IO diff --git a/benchmark/src/main/scala/benchmark/IMDBBenchmark.scala b/benchmark/src/main/scala/benchmark/imdb/IMDBBenchmark.scala similarity index 98% rename from benchmark/src/main/scala/benchmark/IMDBBenchmark.scala rename to benchmark/src/main/scala/benchmark/imdb/IMDBBenchmark.scala index 644afa3e..2667aff4 100644 --- a/benchmark/src/main/scala/benchmark/IMDBBenchmark.scala +++ b/benchmark/src/main/scala/benchmark/imdb/IMDBBenchmark.scala @@ -1,20 +1,19 @@ -package benchmark +package benchmark.imdb -import cats.effect.unsafe.IORuntime +import benchmark.IOIterator import cats.effect.IO - -import java.io.{BufferedOutputStream, BufferedReader, File, FileInputStream, FileOutputStream, FileReader, PrintWriter} -import java.util.zip.GZIPInputStream -import scala.io.Source +import cats.effect.unsafe.IORuntime import fs2._ import fs2.io.file._ import perfolation._ -import scribe.{Level, Logger} -import java.net.{URI, URL} -import scala.annotation.tailrec -import sys.process._ +import java.io._ +import java.net.URI import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} +import java.util.zip.GZIPInputStream +import scala.annotation.tailrec +import scala.io.Source +import scala.sys.process._ object IMDBBenchmark { // extends IOApp { val limit: Limit = Limit.OneMillion diff --git a/benchmark/src/main/scala/benchmark/LightDBImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/LightDBImplementation.scala similarity index 98% rename from benchmark/src/main/scala/benchmark/LightDBImplementation.scala rename to benchmark/src/main/scala/benchmark/imdb/LightDBImplementation.scala index 930c9698..966e18dc 100644 --- a/benchmark/src/main/scala/benchmark/LightDBImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/LightDBImplementation.scala @@ -1,4 +1,4 @@ -package benchmark +package benchmark.imdb import cats.effect.IO import fabric.rw.{Asable, RW} @@ -6,9 +6,8 @@ import lightdb.collection.Collection import lightdb.document.{Document, DocumentModel} import lightdb.halo.HaloDBStore import lightdb.index.{Indexed, IndexedCollection} -import lightdb.lucene.LuceneIndexer import lightdb.sqlite.SQLiteIndexer -import lightdb.store.{AtomicMapStore, StoreManager} +import lightdb.store.StoreManager import lightdb.transaction.Transaction import lightdb.upgrade.DatabaseUpgrade import lightdb.{Id, LightDB} diff --git a/benchmark/src/main/scala/benchmark/MariaDBImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/MariaDBImplementation.scala similarity index 99% rename from benchmark/src/main/scala/benchmark/MariaDBImplementation.scala rename to benchmark/src/main/scala/benchmark/imdb/MariaDBImplementation.scala index c87465db..cf5861a2 100644 --- a/benchmark/src/main/scala/benchmark/MariaDBImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/MariaDBImplementation.scala @@ -1,5 +1,6 @@ -package benchmark +package benchmark.imdb +import benchmark.FlushingBacklog import cats.effect.IO import cats.effect.unsafe.IORuntime diff --git a/benchmark/src/main/scala/benchmark/MongoDBImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/MongoDBImplementation.scala similarity index 98% rename from benchmark/src/main/scala/benchmark/MongoDBImplementation.scala rename to benchmark/src/main/scala/benchmark/imdb/MongoDBImplementation.scala index 34827fa3..ed2dc590 100644 --- a/benchmark/src/main/scala/benchmark/MongoDBImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/MongoDBImplementation.scala @@ -1,5 +1,6 @@ -package benchmark +package benchmark.imdb +import benchmark.FlushingBacklog import cats.effect.IO import cats.effect.unsafe.IORuntime import com.mongodb.client.MongoClients diff --git a/benchmark/src/main/scala/benchmark/PostgresImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/PostgresImplementation.scala similarity index 98% rename from benchmark/src/main/scala/benchmark/PostgresImplementation.scala rename to benchmark/src/main/scala/benchmark/imdb/PostgresImplementation.scala index 084add5f..961043b4 100644 --- a/benchmark/src/main/scala/benchmark/PostgresImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/PostgresImplementation.scala @@ -1,10 +1,10 @@ -package benchmark -import cats.effect.{IO, Unique} -import cats.effect.unsafe.IORuntime +package benchmark.imdb + import benchmark.FlushingBacklog +import cats.effect.IO +import cats.effect.unsafe.IORuntime import java.sql.{Connection, DriverManager, ResultSet} -import scala.concurrent.{ExecutionContext, Future} object PostgresImplementation extends BenchmarkImplementation { implicit val runtime: IORuntime = IORuntime.global diff --git a/benchmark/src/main/scala/benchmark/SQLiteImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/SQLiteImplementation.scala similarity index 99% rename from benchmark/src/main/scala/benchmark/SQLiteImplementation.scala rename to benchmark/src/main/scala/benchmark/imdb/SQLiteImplementation.scala index b0557e78..25dc6880 100644 --- a/benchmark/src/main/scala/benchmark/SQLiteImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/SQLiteImplementation.scala @@ -1,5 +1,6 @@ -package benchmark +package benchmark.imdb +import benchmark.FlushingBacklog import cats.effect.IO import cats.effect.unsafe.IORuntime import lightdb.util.Unique diff --git a/benchmark/src/main/scala/benchmark/ScarangoImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/ScarangoImplementation.scala similarity index 99% rename from benchmark/src/main/scala/benchmark/ScarangoImplementation.scala rename to benchmark/src/main/scala/benchmark/imdb/ScarangoImplementation.scala index a00b5c81..4775676b 100644 --- a/benchmark/src/main/scala/benchmark/ScarangoImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/ScarangoImplementation.scala @@ -1,12 +1,12 @@ -package benchmark +package benchmark.imdb +import benchmark.FlushingBacklog import cats.effect.IO import com.outr.arango.collection.DocumentCollection import com.outr.arango.query._ import com.outr.arango.query.dsl.ref2Wrapped import com.outr.arango.{Document, DocumentModel, Field, Graph, Id, Index} import fabric.rw.RW -import benchmark.FlushingBacklog object ScarangoImplementation extends BenchmarkImplementation { override type TitleAka = TitleAkaADB diff --git a/core/src/main/scala/lightdb/index/IndexerManager.scala b/core/src/main/scala/lightdb/index/IndexerManager.scala new file mode 100644 index 00000000..af783b86 --- /dev/null +++ b/core/src/main/scala/lightdb/index/IndexerManager.scala @@ -0,0 +1,7 @@ +package lightdb.index + +import lightdb.document.{Document, DocumentModel} + +trait IndexerManager { + def create[D <: Document[D], M <: DocumentModel[D]](): Indexer[D, M] +} diff --git a/index/duckdb/src/main/scala/lightdb/duckdb/DuckDBIndexer.scala b/index/duckdb/src/main/scala/lightdb/duckdb/DuckDBIndexer.scala index c016d9e8..0c3c39e8 100644 --- a/index/duckdb/src/main/scala/lightdb/duckdb/DuckDBIndexer.scala +++ b/index/duckdb/src/main/scala/lightdb/duckdb/DuckDBIndexer.scala @@ -1,6 +1,7 @@ package lightdb.duckdb import lightdb.document.{Document, DocumentModel} +import lightdb.index.{Indexer, IndexerManager} import lightdb.sql.{ConnectionManager, SQLConfig, SQLIndexer, SingleConnectionManager} import lightdb.transaction.{Transaction, TransactionKey} import org.duckdb.{DuckDBAppender, DuckDBConnection} @@ -51,4 +52,8 @@ case class DuckDBIndexer[D <: Document[D], M <: DocumentModel[D]]() extends SQLI } } }*/ +} + +object DuckDBIndexer extends IndexerManager { + override def create[D <: Document[D], M <: DocumentModel[D]](): Indexer[D, M] = DuckDBIndexer() } \ No newline at end of file diff --git a/index/h2/src/main/scala/lightdb/h2/H2Indexer.scala b/index/h2/src/main/scala/lightdb/h2/H2Indexer.scala index 981d8222..ee1b9762 100644 --- a/index/h2/src/main/scala/lightdb/h2/H2Indexer.scala +++ b/index/h2/src/main/scala/lightdb/h2/H2Indexer.scala @@ -1,6 +1,7 @@ package lightdb.h2 import lightdb.document.{Document, DocumentModel} +import lightdb.index.{Indexer, IndexerManager} import lightdb.sql.{ConnectionManager, HikariConnectionManager, SQLConfig, SQLIndexer, SingleConnectionManager} import java.nio.file.{Files, Path} @@ -20,3 +21,7 @@ case class H2Indexer[D <: Document[D], M <: DocumentModel[D]]() extends SQLIndex override protected def concatPrefix: String = "LISTAGG" } + +object H2Indexer extends IndexerManager { + override def create[D <: Document[D], M <: DocumentModel[D]](): Indexer[D, M] = H2Indexer() +} \ No newline at end of file diff --git a/index/lucene/src/main/scala/lightdb/lucene/LuceneIndexer.scala b/index/lucene/src/main/scala/lightdb/lucene/LuceneIndexer.scala index 8d41c716..02e00247 100644 --- a/index/lucene/src/main/scala/lightdb/lucene/LuceneIndexer.scala +++ b/index/lucene/src/main/scala/lightdb/lucene/LuceneIndexer.scala @@ -2,7 +2,7 @@ package lightdb.lucene import fabric._ import fabric.define.DefType -import lightdb.index.{Index, Indexer, MaterializedAggregate, MaterializedIndex} +import lightdb.index.{Index, Indexer, IndexerManager, MaterializedAggregate, MaterializedIndex} import lightdb.Id import org.apache.lucene.analysis.Analyzer import org.apache.lucene.analysis.standard.StandardAnalyzer @@ -285,4 +285,8 @@ case class LuceneIndexer[D <: Document[D], M <: DocumentModel[D]](persistent: Bo val fieldSortName = if (separate) s"${index.name}Sort" else index.name LatLonDocValuesField.newDistanceSort(fieldSortName, from.latitude, from.longitude) } +} + +object LuceneIndexer extends IndexerManager { + override def create[D <: Document[D], M <: DocumentModel[D]](): Indexer[D, M] = LuceneIndexer() } \ No newline at end of file diff --git a/index/sqlite/src/main/scala/lightdb/sqlite/SQLiteIndexer.scala b/index/sqlite/src/main/scala/lightdb/sqlite/SQLiteIndexer.scala index 2554531c..a2116b10 100644 --- a/index/sqlite/src/main/scala/lightdb/sqlite/SQLiteIndexer.scala +++ b/index/sqlite/src/main/scala/lightdb/sqlite/SQLiteIndexer.scala @@ -1,6 +1,7 @@ package lightdb.sqlite import lightdb.document.{Document, DocumentModel} +import lightdb.index.{Indexer, IndexerManager} import lightdb.sql.{ConnectionManager, HikariConnectionManager, SQLConfig, SQLIndexer, SingleConnectionManager} import java.nio.file.{Files, Path} @@ -16,3 +17,7 @@ case class SQLiteIndexer[D <: Document[D], M <: DocumentModel[D]]() extends SQLI ) override protected lazy val connectionManager: ConnectionManager[D] = SingleConnectionManager(config) } + +object SQLiteIndexer extends IndexerManager { + override def create[D <: Document[D], M <: DocumentModel[D]](): Indexer[D, M] = SQLiteIndexer() +} \ No newline at end of file diff --git a/run_benchmarks.sh b/run_benchmarks.sh new file mode 100755 index 00000000..c055bae3 --- /dev/null +++ b/run_benchmarks.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +declare -a arr=("ldbHaloLucene" "ldbMapLucene" "ldbRocksLucene" "ldbAtomicLucene" "ldbMapLucene" "ldbHaloSQLite" "ldbHaloH2" "ldbHaloDuck") + +for i in "${arr[@]}" +do + sbt "benchmark / runMain benchmark.bench.Runner $i" +done \ No newline at end of file