diff --git a/benchmark/src/main/scala/benchmark/bench/Bench.scala b/benchmark/src/main/scala/benchmark/bench/Bench.scala index f5f2b605..d090f096 100644 --- a/benchmark/src/main/scala/benchmark/bench/Bench.scala +++ b/benchmark/src/main/scala/benchmark/bench/Bench.scala @@ -1,7 +1,7 @@ package benchmark.bench import benchmark.ActionIterator -import lightdb.Unique +import rapid.Unique import java.sql.ResultSet import scala.collection.parallel.CollectionConverters._ @@ -33,7 +33,7 @@ trait Bench { private def insertRecordsTask(status: StatusCallback): Int = { val iterator = ActionIterator( - (0 until RecordCount).iterator.map(index => P(Unique(), index, s"id$index")), + (0 until RecordCount).iterator.map(index => P(Unique().sync(), index, s"id$index")), b => if (b) status.progress() ) insertRecords(iterator) diff --git a/benchmark/src/main/scala/benchmark/bench/Runner.scala b/benchmark/src/main/scala/benchmark/bench/Runner.scala index 57b81b7b..b6d71b51 100644 --- a/benchmark/src/main/scala/benchmark/bench/Runner.scala +++ b/benchmark/src/main/scala/benchmark/bench/Runner.scala @@ -31,7 +31,6 @@ object Runner { "LightDB-RocksDB-Lucene" -> LightDBBench(SplitStoreManager(RocksDBStore, LuceneStore)), "LightDB-H2" -> LightDBBench(H2Store), "LightDB-HaloDB-H2" -> LightDBBench(SplitStoreManager(HaloDBStore, H2Store)), - "LightDB-Async-HaloDB-Lucene" -> LightDBAsyncBench(SplitStoreManager(HaloDBStore, LuceneStore)), // "LightDB-PostgreSQL" -> LightDBBench(PostgreSQLStoreManager(HikariConnectionManager(SQLConfig( // jdbcUrl = s"jdbc:postgresql://localhost:5432/basic", // username = Some("postgres"), diff --git a/benchmark/src/main/scala/benchmark/bench/impl/DerbyBench.scala b/benchmark/src/main/scala/benchmark/bench/impl/DerbyBench.scala index ccc0c52f..772ce192 100644 --- a/benchmark/src/main/scala/benchmark/bench/impl/DerbyBench.scala +++ b/benchmark/src/main/scala/benchmark/bench/impl/DerbyBench.scala @@ -1,7 +1,7 @@ package benchmark.bench.impl import benchmark.bench.Bench -import lightdb.Unique +import rapid.Unique import java.io.File import java.sql.{Connection, DriverManager} @@ -106,5 +106,5 @@ object DerbyBench extends Bench { } } - case class Person(name: String, age: Int, id: String = Unique()) + case class Person(name: String, age: Int, id: String = Unique().sync()) } \ No newline at end of file diff --git a/benchmark/src/main/scala/benchmark/bench/impl/H2Bench.scala b/benchmark/src/main/scala/benchmark/bench/impl/H2Bench.scala index 0fcb326c..6dab96d2 100644 --- a/benchmark/src/main/scala/benchmark/bench/impl/H2Bench.scala +++ b/benchmark/src/main/scala/benchmark/bench/impl/H2Bench.scala @@ -1,7 +1,7 @@ package benchmark.bench.impl import benchmark.bench.Bench -import lightdb.Unique +import rapid.Unique import java.io.File import java.sql.{Connection, DriverManager} @@ -95,5 +95,5 @@ object H2Bench extends Bench { } } - case class Person(name: String, age: Int, id: String = Unique()) + case class Person(name: String, age: Int, id: String = Unique().sync()) } \ No newline at end of file diff --git a/benchmark/src/main/scala/benchmark/bench/impl/LightDBAsyncBench.scala b/benchmark/src/main/scala/benchmark/bench/impl/LightDBAsyncBench.scala deleted file mode 100644 index cda16818..00000000 --- a/benchmark/src/main/scala/benchmark/bench/impl/LightDBAsyncBench.scala +++ /dev/null @@ -1,97 +0,0 @@ -package benchmark.bench.impl - -import benchmark.bench.Bench -import fabric.rw.RW -import lightdb.Id -import lightdb.collection.Collection -import lightdb.doc.{Document, DocumentModel, JsonConversion} -import lightdb.sql.SQLConversion -import lightdb.store.StoreManager -import rapid.Task - -import java.nio.file.Path -import java.sql.ResultSet - -case class LightDBAsyncBench(storeManager: StoreManager) extends Bench { bench => - override def name: String = s"LightDB Async ${storeManager.name}" - - override def init(): Unit = db.init.sync() - - implicit def p2Person(p: P): Person = Person(p.name, p.age, Id(p.id)) - - def toP(person: Person): P = P(person.name, person.age, person._id.value) - - override protected def insertRecords(iterator: Iterator[P]): Unit = DB.people.transaction { implicit transaction => - rapid.Stream.fromIterator(Task(iterator)).evalMap { p => - DB.people.insert(p) - }.drain - }.sync() - - override protected def streamRecords(f: Iterator[P] => Unit): Unit = DB.people.transaction { implicit transaction => - Task(f(DB.people.underlying.iterator.map(toP))) - }.sync() - - override protected def getEachRecord(idIterator: Iterator[String]): Unit = DB.people.transaction { implicit transaction => - rapid.Stream.fromIterator(Task(idIterator)) - .evalMap { idString => - val id = Person.id(idString) - DB.people.get(id).map { - case Some(person) => - if (person._id.value != idString) { - scribe.warn(s"${person._id.value} was not $id") - } - case None => scribe.warn(s"$id was not found") - } - } - .drain - }.sync() - - override protected def searchEachRecord(ageIterator: Iterator[Int]): Unit = DB.people.transaction { implicit transaction => - rapid.Stream.fromIterator(Task(ageIterator)) - .evalMap { age => - DB.people.query.filter(_.age === age).one.map { person => - if (person.age != age) { - scribe.warn(s"${person.age} was not $age") - } - } - } - .drain - }.sync() - - override protected def searchAllRecords(f: Iterator[P] => Unit): Unit = DB.people.transaction { implicit transaction => - Task { - val iterator = DB.people.underlying.query.search.docs.iterator.map(toP) - f(iterator) - } - }.sync() - - override def size(): Long = -1L - - override def dispose(): Unit = DB.people.dispose().sync() - - object DB extends AsyncLightDB { - Collection.CacheQueries = true - - override def directory: Option[Path] = Some(Path.of(s"db/Async${storeManager.getClass.getSimpleName.replace("$", "")}")) - - val people: AsyncCollection[Person, Person.type] = collection(Person) - - override def storeManager: StoreManager = bench.storeManager - override def upgrades: List[AsyncDatabaseUpgrade] = Nil - } - - case class Person(name: String, age: Int, _id: Id[Person] = Person.id()) extends Document[Person] - - object Person extends DocumentModel[Person] with SQLConversion[Person] with JsonConversion[Person] { - override implicit val rw: RW[Person] = RW.gen - - override def convertFromSQL(rs: ResultSet): Person = Person( - name = rs.getString("name"), - age = rs.getInt("age"), - _id = id(rs.getString("_id")) - ) - - val name: F[String] = field("name", _.name) - val age: I[Int] = field.index("age", _.age) - } -} diff --git a/benchmark/src/main/scala/benchmark/bench/impl/LightDBBench.scala b/benchmark/src/main/scala/benchmark/bench/impl/LightDBBench.scala index 6e4971b4..987361b1 100644 --- a/benchmark/src/main/scala/benchmark/bench/impl/LightDBBench.scala +++ b/benchmark/src/main/scala/benchmark/bench/impl/LightDBBench.scala @@ -8,6 +8,7 @@ import lightdb.sql.SQLConversion import lightdb.store.StoreManager import lightdb.upgrade.DatabaseUpgrade import lightdb.{Id, LightDB} +import rapid.Task import java.nio.file.Path import java.sql.ResultSet @@ -16,25 +17,25 @@ import scala.language.implicitConversions case class LightDBBench(storeManager: StoreManager) extends Bench { bench => override def name: String = s"LightDB ${storeManager.name}" - override def init(): Unit = db.init + override def init(): Unit = DB.init.sync() implicit def p2Person(p: P): Person = Person(p.name, p.age, Id(p.id)) def toP(person: Person): P = P(person.name, person.age, person._id.value) override protected def insertRecords(iterator: Iterator[P]): Unit = DB.people.transaction { implicit transaction => - iterator.foreach { p => - val person: Person = p - DB.people.insert(person) - } - } + rapid.Stream.fromIterator(Task(iterator)) + .evalMap(p => DB.people.insert(p)) + .drain + }.sync() override protected def streamRecords(f: Iterator[P] => Unit): Unit = DB.people.transaction { implicit transaction => - f(DB.people.iterator.map(toP)) +// f(DB.people.iterator.map(toP)) + ??? } override protected def getEachRecord(idIterator: Iterator[String]): Unit = DB.people.transaction { implicit transaction => - idIterator.foreach { idString => + /*idIterator.foreach { idString => val id = Person.id(idString) DB.people.get(id) match { case Some(person) => @@ -43,11 +44,12 @@ case class LightDBBench(storeManager: StoreManager) extends Bench { bench => } case None => scribe.warn(s"$id was not found!") } - } + }*/ + ??? } override protected def searchEachRecord(ageIterator: Iterator[Int]): Unit = DB.people.transaction { implicit transaction => - ageIterator.foreach { age => + /*ageIterator.foreach { age => try { val list = DB.people.query.filter(_.age === age).search.docs.list val person = list.head @@ -60,12 +62,14 @@ case class LightDBBench(storeManager: StoreManager) extends Bench { bench => } catch { case t: Throwable => throw new RuntimeException(s"Error with $age", t) } - } + }*/ + ??? } override protected def searchAllRecords(f: Iterator[P] => Unit): Unit = DB.people.transaction { implicit transaction => - val iterator = DB.people.query.search.docs.iterator.map(toP) - f(iterator) +// val iterator = DB.people.query.search.docs.iterator.map(toP) +// f(iterator) + ??? } override def size(): Long = -1L diff --git a/benchmark/src/main/scala/benchmark/bench/impl/PostgreSQLBench.scala b/benchmark/src/main/scala/benchmark/bench/impl/PostgreSQLBench.scala index d916d6a1..41eeec88 100644 --- a/benchmark/src/main/scala/benchmark/bench/impl/PostgreSQLBench.scala +++ b/benchmark/src/main/scala/benchmark/bench/impl/PostgreSQLBench.scala @@ -1,7 +1,7 @@ package benchmark.bench.impl import benchmark.bench.Bench -import lightdb.Unique +import rapid.Unique import java.sql.{Connection, DriverManager} @@ -93,5 +93,5 @@ object PostgreSQLBench extends Bench { } } - case class Person(name: String, age: Int, id: String = Unique()) + case class Person(name: String, age: Int, id: String = Unique().sync()) } \ No newline at end of file diff --git a/benchmark/src/main/scala/benchmark/bench/impl/SQLiteBench.scala b/benchmark/src/main/scala/benchmark/bench/impl/SQLiteBench.scala index 3bffb24e..127161d3 100644 --- a/benchmark/src/main/scala/benchmark/bench/impl/SQLiteBench.scala +++ b/benchmark/src/main/scala/benchmark/bench/impl/SQLiteBench.scala @@ -1,7 +1,7 @@ package benchmark.bench.impl import benchmark.bench.Bench -import lightdb.Unique +import rapid.Unique import java.io.File import java.sql.{Connection, DriverManager} @@ -94,5 +94,5 @@ object SQLiteBench extends Bench { } } - case class Person(name: String, age: Int, id: String = Unique()) + case class Person(name: String, age: Int, id: String = Unique().sync()) } \ No newline at end of file diff --git a/benchmark/src/main/scala/benchmark/imdb/MariaDBImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/MariaDBImplementation.scala index 878dea25..d66cfc7d 100644 --- a/benchmark/src/main/scala/benchmark/imdb/MariaDBImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/MariaDBImplementation.scala @@ -76,7 +76,7 @@ object MariaDBImplementation extends BenchmarkImplementation { } override def map2TitleAka(map: Map[String, String]): TitleAka = TitleAkaPG( - id = map.option("id").getOrElse(lightdb.Unique()), + id = map.option("id").getOrElse(rapid.Unique().sync()), titleId = map.value("titleId"), ordering = map.int("ordering"), title = map.value("title"), @@ -88,7 +88,7 @@ object MariaDBImplementation extends BenchmarkImplementation { ) override def map2TitleBasics(map: Map[String, String]): TitleBasicsPG = TitleBasicsPG( - id = map.option("id").getOrElse(lightdb.Unique()), + id = map.option("id").getOrElse(rapid.Unique().sync()), tconst = map.value("tconst"), titleType = map.value("titleType"), primaryTitle = map.value("primaryTitle"), diff --git a/benchmark/src/main/scala/benchmark/imdb/MongoDBImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/MongoDBImplementation.scala index 2f4228ae..88afca74 100644 --- a/benchmark/src/main/scala/benchmark/imdb/MongoDBImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/MongoDBImplementation.scala @@ -4,9 +4,8 @@ import benchmark.FlushingBacklog import cats.effect.unsafe.IORuntime import com.mongodb.client.MongoClients import com.mongodb.client.model.Indexes -import lightdb.Unique import org.bson.Document -import rapid.Task +import rapid.{Task, Unique} import java.{lang, util} import scala.jdk.CollectionConverters._ @@ -26,7 +25,7 @@ object MongoDBImplementation extends BenchmarkImplementation { override def map2TitleAka(map: Map[String, String]): Document = { new Document(Map[String, AnyRef]( - "_id" -> Unique(), + "_id" -> Unique().sync(), "titleId" -> map.value("titleId"), "ordering" -> Integer.valueOf(map.int("ordering")), "title" -> map.value("title"), @@ -40,7 +39,7 @@ object MongoDBImplementation extends BenchmarkImplementation { override def map2TitleBasics(map: Map[String, String]): Document = { new Document(Map[String, AnyRef]( - "_id" -> Unique(), + "_id" -> Unique().sync(), "tconst" -> map.value("tconst"), "titleType" -> map.value("titleType"), "primaryTitle" -> map.value("primaryTitle"), diff --git a/benchmark/src/main/scala/benchmark/imdb/PostgresImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/PostgresImplementation.scala index 57a3d69c..7eb9bc94 100644 --- a/benchmark/src/main/scala/benchmark/imdb/PostgresImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/PostgresImplementation.scala @@ -76,7 +76,7 @@ object PostgresImplementation extends BenchmarkImplementation { } override def map2TitleAka(map: Map[String, String]): TitleAka = TitleAkaPG( - id = map.option("id").getOrElse(lightdb.Unique()), + id = map.option("id").getOrElse(rapid.Unique().sync()), titleId = map.value("titleId"), ordering = map.int("ordering"), title = map.value("title"), @@ -88,7 +88,7 @@ object PostgresImplementation extends BenchmarkImplementation { ) override def map2TitleBasics(map: Map[String, String]): TitleBasicsPG = TitleBasicsPG( - id = map.option("id").getOrElse(lightdb.Unique()), + id = map.option("id").getOrElse(rapid.Unique().sync()), tconst = map.value("tconst"), titleType = map.value("titleType"), primaryTitle = map.value("primaryTitle"), diff --git a/benchmark/src/main/scala/benchmark/imdb/SQLiteImplementation.scala b/benchmark/src/main/scala/benchmark/imdb/SQLiteImplementation.scala index 659d23fe..3e4da34e 100644 --- a/benchmark/src/main/scala/benchmark/imdb/SQLiteImplementation.scala +++ b/benchmark/src/main/scala/benchmark/imdb/SQLiteImplementation.scala @@ -2,8 +2,7 @@ package benchmark.imdb import benchmark.FlushingBacklog import cats.effect.unsafe.IORuntime -import lightdb.Unique -import rapid.Task +import rapid.{Task, Unique} import java.sql.{Connection, DriverManager, ResultSet} @@ -77,7 +76,7 @@ object SQLiteImplementation extends BenchmarkImplementation { } override def map2TitleAka(map: Map[String, String]): TitleAka = TitleAkaPG( - id = map.option("id").getOrElse(Unique()), + id = map.option("id").getOrElse(Unique().sync()), titleId = map.value("titleId"), ordering = map.int("ordering"), title = map.value("title"), @@ -89,7 +88,7 @@ object SQLiteImplementation extends BenchmarkImplementation { ) override def map2TitleBasics(map: Map[String, String]): TitleBasicsPG = TitleBasicsPG( - id = map.option("id").getOrElse(Unique()), + id = map.option("id").getOrElse(Unique().sync()), tconst = map.value("tconst"), titleType = map.value("titleType"), primaryTitle = map.value("primaryTitle"), diff --git a/core/src/main/scala/lightdb/LightDB.scala b/core/src/main/scala/lightdb/LightDB.scala index 1b468e0d..305b4632 100644 --- a/core/src/main/scala/lightdb/LightDB.scala +++ b/core/src/main/scala/lightdb/LightDB.scala @@ -12,6 +12,7 @@ import scribe.{rapid => logger} import java.nio.file.Path import java.util.concurrent.atomic.AtomicBoolean +import scala.util.{Failure, Success} /** * The database to be implemented. Collections *may* be used without a LightDB instance, but with drastically diminished @@ -91,6 +92,7 @@ trait LightDB extends Initializable with Disposable with FeatureSupport[DBFeatur // Get applied database upgrades applied <- appliedUpgrades.get() // Determine upgrades that need to be applied + // TODO: Test upgrades that run asynchronously! upgrades = this.upgrades.filter(u => u.alwaysRun || !applied.contains(u.label)) _ <- logger.info(s"Applying ${upgrades.length} upgrades (${upgrades.map(_.label).mkString(", ")})...") .when(upgrades.nonEmpty) @@ -166,17 +168,24 @@ trait LightDB extends Initializable with Disposable with FeatureSupport[DBFeatur val runUpgrade = dbInitialized || upgrade.applyToNew val continueBlocking = upgrades.exists(u => u.blockStartup && (dbInitialized || u.applyToNew)) - upgrade.upgrade(this).when(runUpgrade).flatMap { _ => - appliedUpgrades.modify { set => - set + upgrade.label - }.flatMap { _ => - val next = doUpgrades(upgrades.tail, dbInitialized, continueBlocking) - if (stillBlocking && !continueBlocking) { - next.start() - } else { - next - } + val task = upgrade + .upgrade(this) + .flatMap { _ => + appliedUpgrades.modify(_ + upgrade.label) } + .when(runUpgrade) + .attempt + .flatMap[Unit] { + case Success(_) => doUpgrades(upgrades.tail, dbInitialized, continueBlocking) + case Failure(throwable) => logger + .error(s"Database Upgrade: ${upgrade.label} failed", throwable) + .map(_ => throw throwable) + } + if (stillBlocking && !continueBlocking) { + task.start() + Task.unit + } else { + task } case None => logger.info("Upgrades completed successfully") } diff --git a/core/src/main/scala/lightdb/Query.scala b/core/src/main/scala/lightdb/Query.scala index b44ec3c4..b1b36127 100644 --- a/core/src/main/scala/lightdb/Query.scala +++ b/core/src/main/scala/lightdb/Query.scala @@ -154,7 +154,7 @@ case class Query[Doc <: Document[Doc], Model <: DocumentModel[Doc], V](model: Mo case None => Task.pure(None) } } else { - store.lock(doc._id, Some(doc), establishLock) { current => + store.lock(doc._id, Task.pure(Some(doc)), establishLock) { current => f(current.getOrElse(doc)).flatMap { case Some(modified) => store.upsert(modified).when(!current.contains(modified)) case None => store.delete(store.idField, doc._id).when(deleteOnNone) diff --git a/core/src/main/scala/lightdb/lock/LockManager.scala b/core/src/main/scala/lightdb/lock/LockManager.scala index 87533200..bac425f7 100644 --- a/core/src/main/scala/lightdb/lock/LockManager.scala +++ b/core/src/main/scala/lightdb/lock/LockManager.scala @@ -7,19 +7,19 @@ import java.util.concurrent.ConcurrentHashMap class LockManager[K, V] { private val locks = new ConcurrentHashMap[K, Lock[V]]() - def apply(key: K, value: => Option[V], establishLock: Boolean = true) + def apply(key: K, value: => Task[Option[V]], establishLock: Boolean = true) (f: Forge[Option[V], Option[V]]): Task[Option[V]] = if (establishLock) { acquire(key, value).flatMap { v => f(v).guarantee(release(key, v).unit) } } else { - f(value) + value.flatMap(f(_)) } // Attempts to acquire a lock for a given K and V. - def acquire(key: K, value: => Option[V]): Task[Option[V]] = Task { + def acquire(key: K, value: => Task[Option[V]]): Task[Option[V]] = Task { // Get or create the Lock object with the ReentrantLock. - val lock = locks.computeIfAbsent(key, _ => new Lock(value)) + val lock = locks.computeIfAbsent(key, _ => new Lock(value.sync())) // Acquire the underlying ReentrantLock. lock.lock.acquire() diff --git a/core/src/main/scala/lightdb/store/Store.scala b/core/src/main/scala/lightdb/store/Store.scala index 3220c409..6ea278fa 100644 --- a/core/src/main/scala/lightdb/store/Store.scala +++ b/core/src/main/scala/lightdb/store/Store.scala @@ -89,7 +89,7 @@ abstract class Store[Doc <: Document[Doc], Model <: DocumentModel[Doc]](val name deleteOnNone: Boolean = false) (f: Forge[Option[Doc], Option[Doc]]) (implicit transaction: Transaction[Doc]): Task[Option[Doc]] = { - lock(id, get(idField, id).sync(), establishLock) { existing => + lock(id, get(idField, id), establishLock) { existing => f(existing).flatMap { case Some(doc) => upsert(doc).map(_ => Some(doc)) case None if deleteOnNone => delete(idField, id).map(_ => None) diff --git a/core/src/test/scala/spec/AbstractBasicSpec.scala b/core/src/test/scala/spec/AbstractBasicSpec.scala index adeab0a6..cf6451b3 100644 --- a/core/src/test/scala/spec/AbstractBasicSpec.scala +++ b/core/src/test/scala/spec/AbstractBasicSpec.scala @@ -8,7 +8,7 @@ import lightdb.feature.DBFeatureKey import lightdb.filter._ import lightdb.store.StoreManager import lightdb.upgrade.DatabaseUpgrade -import lightdb.{Id, LightDB, Sort, StoredValue} +import lightdb.{Id, LightDB, Sort, StoredValue, Timestamp} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec import perfolation.double2Implicits @@ -20,6 +20,8 @@ import java.nio.file.Path abstract class AbstractBasicSpec extends AsyncWordSpec with AsyncTaskSpec with Matchers { spec => val CreateRecords = 10_000 + private val start = Timestamp() + protected def aggregationSupported: Boolean = true protected def filterBuilderSupported: Boolean = false protected def memoryOnly: Boolean = false @@ -392,6 +394,27 @@ abstract class AbstractBasicSpec extends AsyncWordSpec with AsyncTaskSpec with M } } } + "sort by age and verify top results" in { + db.people.transaction { implicit transaction => + db.people.query.sort(Sort.ByField(Person.age).desc).limit(5).toList.map { people => + people.map(_.name) should be(List("Not Ruth", "Zoey", "Quintin", "Ian", "Sam")) + } + } + } + "sort by age double and verify top results" in { + db.people.transaction { implicit transaction => + db.people.query.sort(Sort.ByField(Person.ageDouble).desc).limit(5).toList.map { people => + people.map(_.name) should be(List("Not Ruth", "Zoey", "Quintin", "Ian", "Sam")) + } + } + } + "filter by created after" in { + db.people.transaction { implicit transaction => + db.people.query.filter(_.created >= start).toList.map { people => + people.map(_.name).toSet should be(Set("Brenda", "Charlie", "Diana", "Evan", "Fiona", "Greg", "Hanna", "Ian", "Jenna", "Kevin", "Mike", "Nancy", "Oscar", "Penny", "Quintin", "Sam", "Tori", "Uba", "Veronica", "Wyatt", "Xena", "Allan", "Zoey", "Not Ruth")) + } + } + } "filter by list of friend ids" in { db.people.transaction { implicit transaction => val q = db.people.query.filter(_ @@ -555,9 +578,11 @@ abstract class AbstractBasicSpec extends AsyncWordSpec with AsyncTaskSpec with M city: Option[City] = None, nicknames: Set[String] = Set.empty, friends: List[Id[Person]] = Nil, - _id: Id[Person] = Person.id()) extends Document[Person] + created: Timestamp = Timestamp(), + modified: Timestamp = Timestamp(), + _id: Id[Person] = Person.id()) extends RecordDocument[Person] - object Person extends DocumentModel[Person] with JsonConversion[Person] { + object Person extends RecordDocumentModel[Person] with JsonConversion[Person] { override implicit val rw: RW[Person] = RW.gen val name: I[String] = field.index("name", (p: Person) => p.name) @@ -568,6 +593,7 @@ abstract class AbstractBasicSpec extends AsyncWordSpec with AsyncTaskSpec with M val allNames: I[List[String]] = field.index("allNames", p => (p.name :: p.nicknames.toList).map(_.toLowerCase)) val search: T = field.tokenized("search", (doc: Person) => s"${doc.name} ${doc.age}") val doc: I[Person] = field.index("doc", (p: Person) => p) + val ageDouble: I[Double] = field.index("ageDouble", _.age.toDouble) } case class City(name: String) diff --git a/core/src/test/scala/spec/AbstractFacetSpec.scala b/core/src/test/scala/spec/AbstractFacetSpec.scala index 23dafd6e..4d16aac7 100644 --- a/core/src/test/scala/spec/AbstractFacetSpec.scala +++ b/core/src/test/scala/spec/AbstractFacetSpec.scala @@ -46,6 +46,7 @@ abstract class AbstractFacetSpec extends AsyncWordSpec with AsyncTaskSpec with M db.entries.query .facet(_.authorsFacet) .docs + .limit(1) .search .map { results => val authorsResult = results.facet(_.authorsFacet) diff --git a/lucene/src/main/scala/lightdb/lucene/LuceneStore.scala b/lucene/src/main/scala/lightdb/lucene/LuceneStore.scala index eaee764e..a5a1a60f 100644 --- a/lucene/src/main/scala/lightdb/lucene/LuceneStore.scala +++ b/lucene/src/main/scala/lightdb/lucene/LuceneStore.scala @@ -17,7 +17,7 @@ import lightdb.spatial.{DistanceAndDoc, Geo, Spatial} import lightdb.store.{Conversion, Store, StoreManager, StoreMode} import lightdb.transaction.Transaction import lightdb.util.Aggregator -import org.apache.lucene.document.{DoubleField, DoublePoint, IntField, IntPoint, LatLonDocValuesField, LatLonPoint, LatLonShape, LongField, LongPoint, NumericDocValuesField, SortedDocValuesField, StoredField, StringField, TextField, Document => LuceneDocument, Field => LuceneField} +import org.apache.lucene.document.{DoubleDocValuesField, DoubleField, DoublePoint, IntField, IntPoint, LatLonDocValuesField, LatLonPoint, LatLonShape, LongField, LongPoint, NumericDocValuesField, SortedDocValuesField, StoredField, StringField, TextField, Document => LuceneDocument, Field => LuceneField} import org.apache.lucene.facet.taxonomy.FastTaxonomyFacetCounts import org.apache.lucene.facet.{DrillDownQuery, FacetsCollector, FacetsCollectorManager, FacetsConfig, FacetField => LuceneFacetField} import org.apache.lucene.geo.{Line, Polygon} @@ -185,6 +185,7 @@ class LuceneStore[Doc <: Document[Doc], Model <: DocumentModel[Doc]](name: Strin val sorted = new SortedDocValuesField(fieldSortName, bytes) add(sorted) case NumInt(l, _) => add(new NumericDocValuesField(fieldSortName, l)) + case NumDec(d, _) => add(new DoubleDocValuesField(fieldSortName, d.toDouble)) case j if field.isSpatial && j != Null => val list = j match { case Arr(values, _) => values.toList.map(_.as[Geo])