Skip to content

Commit

Permalink
(dsl): Support highlights (#141)
Browse files Browse the repository at this point in the history
Closes #114
  • Loading branch information
markaya authored Apr 4, 2023
1 parent ace5b2a commit 3d2998c
Show file tree
Hide file tree
Showing 10 changed files with 632 additions and 53 deletions.
158 changes: 156 additions & 2 deletions modules/library/src/it/scala/zio/elasticsearch/HttpExecutorSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@

package zio.elasticsearch

import zio.Chunk
import zio.{Chunk, ZIO}
import zio.elasticsearch.ElasticAggregation.{multipleAggregations, termsAggregation}
import zio.elasticsearch.ElasticHighlight.highlight
import zio.elasticsearch.ElasticQuery._
import zio.elasticsearch.ElasticSort.sortBy
import zio.elasticsearch.domain.{TestDocument, TestSubDocument}
Expand All @@ -27,6 +28,7 @@ import zio.elasticsearch.query.sort.SortOrder._
import zio.elasticsearch.query.sort.SourceType.NumberType
import zio.elasticsearch.result.Item
import zio.elasticsearch.script.Script
import zio.json.ast.Json.{Arr, Str}
import zio.stream.{Sink, ZSink}
import zio.test.Assertion._
import zio.test.TestAspect._
Expand Down Expand Up @@ -254,7 +256,7 @@ object HttpExecutorSpec extends IntegrationSpec {
Executor.execute(ElasticRequest.createIndex(firstSearchIndex)),
Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie
)
),
) @@ shrinks(0),
suite("counting documents")(
test("successfully count documents with given query") {
checkOnce(genTestDocument) { document =>
Expand Down Expand Up @@ -549,6 +551,158 @@ object HttpExecutorSpec extends IntegrationSpec {
Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie
)
) @@ shrinks(0),
suite("searching for documents with highlights")(
test("successfully find document with highlight") {
checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
(firstDocumentId, firstDocument, secondDocumentId, secondDocument) =>
for {
_ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll))
_ <- Executor.execute(
ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument)
)
_ <- Executor.execute(
ElasticRequest
.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument)
.refreshTrue
)
query = should(matches("stringField", firstDocument.stringField))
res <-
Executor.execute(
ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField"))
)
items <- res.items
} yield assert(items.map(_.highlight("stringField")))(
hasSameElements(List(Some(Chunk(s"<em>${firstDocument.stringField}</em>"))))
)
}
} @@ around(
Executor.execute(ElasticRequest.createIndex(firstSearchIndex)),
Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie
),
test("successfully find document with highlight using field accessor") {
checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
(firstDocumentId, firstDocument, secondDocumentId, secondDocument) =>
for {
_ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll))
_ <- Executor.execute(
ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument)
)
_ <- Executor.execute(
ElasticRequest
.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument)
.refreshTrue
)
query = should(matches("stringField", firstDocument.stringField))
res <-
Executor.execute(
ElasticRequest.search(firstSearchIndex, query).highlights(highlight(TestDocument.stringField))
)
items <- res.items
} yield assert(items.map(_.highlight(TestDocument.stringField)))(
hasSameElements(List(Some(Chunk(s"<em>${firstDocument.stringField}</em>"))))
)
}
} @@ around(
Executor.execute(ElasticRequest.createIndex(firstSearchIndex)),
Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie
),
test("successfully find document with highlights and return highlights map successfully") {
checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
(firstDocumentId, firstDocument, secondDocumentId, secondDocument) =>
for {
_ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll))
_ <- Executor.execute(
ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument)
)
_ <- Executor.execute(
ElasticRequest
.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument)
.refreshTrue
)
query = should(matches("stringField", firstDocument.stringField))
res <-
Executor.execute(
ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField"))
)
items <- res.items
} yield assert(items.map(_.highlights))(
hasSameElements(List(Some(Map("stringField" -> Chunk(s"<em>${firstDocument.stringField}</em>")))))
)
}
} @@ around(
Executor.execute(ElasticRequest.createIndex(firstSearchIndex)),
Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie
),
test("successfully find document with highlight while using global config") {
checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
(firstDocumentId, firstDocument, secondDocumentId, secondDocument) =>
for {
_ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll))
_ <- Executor.execute(
ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument)
)
_ <- Executor.execute(
ElasticRequest
.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument)
.refreshTrue
)
query = should(matches("stringField", firstDocument.stringField))
res <-
Executor.execute(
ElasticRequest
.search(firstSearchIndex, query)
.highlights(
highlight(TestDocument.stringField)
.withGlobalConfig("pre_tags", Arr(Str("<ul>")))
.withGlobalConfig("post_tags", Arr(Str("</ul>")))
)
)
items <- res.items
} yield assert(items.map(_.highlight(TestDocument.stringField)))(
hasSameElements(List(Some(Chunk(s"<ul>${firstDocument.stringField}</ul>"))))
)
}
} @@ around(
Executor.execute(ElasticRequest.createIndex(firstSearchIndex)),
Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie
),
test("successfully find document with highlight while using local config to overwrite global config") {
checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
(firstDocumentId, firstDocument, secondDocumentId, secondDocument) =>
for {
_ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll))
_ <- Executor.execute(
ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument)
)
_ <- Executor.execute(
ElasticRequest
.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument)
.refreshTrue
)
query = should(matches("stringField", firstDocument.stringField))
res <-
Executor.execute(
ElasticRequest
.search(firstSearchIndex, query)
.highlights(
highlight(
TestDocument.stringField,
config = Map("pre_tags" -> Arr(Str("<ol>")), "post_tags" -> Arr(Str("</ol>")))
)
.withGlobalConfig("pre_tags", Arr(Str("<ul>")))
.withGlobalConfig("post_tags", Arr(Str("</ul>")))
)
)
items <- res.items
} yield assert(items.map(_.highlight(TestDocument.stringField)))(
hasSameElements(List(Some(Chunk(s"<ol>${firstDocument.stringField}</ol>"))))
)
}
} @@ around(
Executor.execute(ElasticRequest.createIndex(firstSearchIndex)),
Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie
)
),
suite("searching for sorted documents")(
test("search for document sorted by descending age and by ascending birthDate using range query") {
checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Copyright 2022 LambdaWorks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package zio.elasticsearch

import zio.Chunk
import zio.elasticsearch.highlights.{HighlightField, Highlights}
import zio.json.ast.Json

object ElasticHighlight {

def highlight(field: Field[_, _]): Highlights =
Highlights(Chunk(HighlightField(field.toString, Map.empty)))

def highlight(field: String): Highlights =
Highlights(Chunk(HighlightField(field, Map.empty)))

def highlight(field: Field[_, _], config: Map[String, Json]): Highlights =
Highlights(Chunk(HighlightField(field.toString, config)))

def highlight(field: String, config: Map[String, Json]): Highlights =
Highlights(Chunk(HighlightField(field, config)))
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package zio.elasticsearch

import zio.elasticsearch.ElasticPrimitive.ElasticPrimitiveOps
import zio.elasticsearch.aggregation.ElasticAggregation
import zio.elasticsearch.highlights.Highlights
import zio.elasticsearch.query.ElasticQuery
import zio.elasticsearch.query.sort.Sort
import zio.elasticsearch.request._
Expand Down Expand Up @@ -72,7 +73,15 @@ object ElasticRequest {
GetById(index = index, id = id, refresh = None, routing = None)

def search(index: IndexName, query: ElasticQuery[_]): SearchRequest =
Search(index = index, query = query, sortBy = Set.empty, from = None, routing = None, size = None)
Search(
index = index,
query = query,
sortBy = Set.empty,
from = None,
highlights = None,
routing = None,
size = None
)

def search(index: IndexName, query: ElasticQuery[_], aggregation: ElasticAggregation): SearchAndAggregateRequest =
SearchAndAggregate(
Expand All @@ -81,6 +90,7 @@ object ElasticRequest {
aggregation = aggregation,
sortBy = Set.empty,
from = None,
highlights = None,
routing = None,
size = None
)
Expand Down Expand Up @@ -324,30 +334,38 @@ object ElasticRequest {
with WithSort[SearchRequest]
with HasSize[SearchRequest] {
def aggregate(aggregation: ElasticAggregation): SearchAndAggregateRequest

def highlights(value: Highlights): Search
}

private[elasticsearch] final case class Search(
index: IndexName,
query: ElasticQuery[_],
sortBy: Set[Sort],
from: Option[Int],
highlights: Option[Highlights],
routing: Option[Routing],
size: Option[Int]
) extends SearchRequest { self =>

def aggregate(aggregation: ElasticAggregation): SearchAndAggregateRequest =
SearchAndAggregate(
index = index,
query = query,
aggregation = aggregation,
sortBy = sortBy,
from = from,
highlights = highlights,
routing = routing,
size = size
)

def from(value: Int): SearchRequest =
self.copy(from = Some(value))

def highlights(value: Highlights): Search =
self.copy(highlights = Some(value))

def routing(value: Routing): SearchRequest =
self.copy(routing = Some(value))

Expand All @@ -358,23 +376,16 @@ object ElasticRequest {
self.copy(sortBy = sortBy ++ sorts.toSet)

def toJson: Json = {
val baseJson = (self.from, self.size) match {
case (Some(from), Some(size)) =>
Obj("from" -> from.toJson) merge Obj("size" -> size.toJson) merge self.query.toJson
case (Some(from), None) =>
Obj("from" -> from.toJson) merge self.query.toJson
case (None, Some(size)) =>
Obj("size" -> size.toJson) merge self.query.toJson
case _ =>
self.query.toJson
}
val fromJson: Json = self.from.map(f => Obj("from" -> f.toJson)).getOrElse(Obj())

sortBy match {
case sorts if sorts.nonEmpty =>
baseJson merge Obj("sort" -> Arr(self.sortBy.toList.map(_.paramsToJson): _*))
case _ =>
baseJson
}
val sizeJson: Json = self.size.map(s => Obj("size" -> s.toJson)).getOrElse(Obj())

val highlightsJson: Json = highlights.map(_.toJson).getOrElse(Obj())

val sortJson: Json =
if (self.sortBy.nonEmpty) Obj("sort" -> Arr(self.sortBy.toList.map(_.paramsToJson): _*)) else Obj()

fromJson merge sizeJson merge highlightsJson merge sortJson merge self.query.toJson
}
}

Expand All @@ -391,12 +402,16 @@ object ElasticRequest {
aggregation: ElasticAggregation,
sortBy: Set[Sort],
from: Option[Int],
highlights: Option[Highlights],
routing: Option[Routing],
size: Option[Int]
) extends SearchAndAggregateRequest { self =>
def from(value: Int): SearchAndAggregateRequest =
self.copy(from = Some(value))

def highlights(value: Highlights): SearchAndAggregateRequest =
self.copy(highlights = Some(value))

def routing(value: Routing): SearchAndAggregateRequest =
self.copy(routing = Some(value))

Expand All @@ -407,23 +422,16 @@ object ElasticRequest {
self.copy(sortBy = sortBy ++ sorts.toSet)

def toJson: Json = {
val baseJson = (self.from, self.size) match {
case (Some(from), Some(size)) =>
Obj("from" -> from.toJson) merge Obj("size" -> size.toJson) merge self.query.toJson
case (Some(from), None) =>
Obj("from" -> from.toJson) merge self.query.toJson
case (None, Some(size)) =>
Obj("size" -> size.toJson) merge self.query.toJson
case _ =>
self.query.toJson
}
val fromJson: Json = self.from.map(f => Obj("from" -> f.toJson)).getOrElse(Obj())

sortBy match {
case sorts if sorts.nonEmpty =>
baseJson merge Obj("sort" -> Arr(self.sortBy.toList.map(_.paramsToJson): _*)) merge aggregation.toJson
case _ =>
baseJson merge aggregation.toJson
}
val sizeJson: Json = self.size.map(s => Obj("size" -> s.toJson)).getOrElse(Obj())

val highlightsJson: Json = self.highlights.map(_.toJson).getOrElse(Obj())

val sortJson: Json =
if (self.sortBy.nonEmpty) Obj("sort" -> Arr(self.sortBy.toList.map(_.paramsToJson): _*)) else Obj()

fromJson merge sizeJson merge highlightsJson merge sortJson merge self.query.toJson merge aggregation.toJson
}
}

Expand Down
Loading

0 comments on commit 3d2998c

Please sign in to comment.