diff --git a/chutney/packaging/local-dev/src/main/resources/application.yml b/chutney/packaging/local-dev/src/main/resources/application.yml
index 9471bc90c..9b7920aa6 100644
--- a/chutney/packaging/local-dev/src/main/resources/application.yml
+++ b/chutney/packaging/local-dev/src/main/resources/application.yml
@@ -62,7 +62,7 @@ chutney:
configuration-folder: ${chutney.configuration-folder}/environment
jira:
configuration-folder: ${chutney.configuration-folder}/jira
-
+ index-folder: .chutney/index
server:
editions:
ttl:
diff --git a/chutney/pom.xml b/chutney/pom.xml
index 3687d65b7..0724baea8 100644
--- a/chutney/pom.xml
+++ b/chutney/pom.xml
@@ -74,6 +74,7 @@
1.9.1
3.9.1
+ 9.12.0
@@ -261,6 +262,21 @@
liquibase-core
${liquibase.version}
+
+ org.apache.lucene
+ lucene-core
+ ${lucene.version}
+
+
+ org.apache.lucene
+ lucene-analysis-common
+ ${lucene.version}
+
+
+ org.apache.lucene
+ lucene-queryparser
+ ${lucene.version}
+
org.springframework.boot
diff --git a/chutney/server-core/src/main/java/com/chutneytesting/server/core/domain/execution/history/ExecutionHistoryRepository.java b/chutney/server-core/src/main/java/com/chutneytesting/server/core/domain/execution/history/ExecutionHistoryRepository.java
index 3e068ec95..9e416905d 100644
--- a/chutney/server-core/src/main/java/com/chutneytesting/server/core/domain/execution/history/ExecutionHistoryRepository.java
+++ b/chutney/server-core/src/main/java/com/chutneytesting/server/core/domain/execution/history/ExecutionHistoryRepository.java
@@ -47,7 +47,7 @@ public interface ExecutionHistoryRepository {
*/
ExecutionHistory.Execution getExecution(String scenarioId, Long reportId) throws ReportNotFoundException;
- List getExecutionReportMatchQuery(String query);
+ List getExecutionReportMatchKeyword(String query);
/**
* Override a previously stored {@link ExecutionHistory.Execution}.
diff --git a/chutney/server/pom.xml b/chutney/server/pom.xml
index 3e59e5dd1..73ab9b498 100644
--- a/chutney/server/pom.xml
+++ b/chutney/server/pom.xml
@@ -50,6 +50,10 @@
org.springframework.boot
spring-boot-starter-data-jpa
+
+ org.springframework.boot
+ spring-boot-starter-aop
+
org.springframework.boot
spring-boot-starter-webflux
@@ -181,6 +185,19 @@
runtime
+
+ org.apache.lucene
+ lucene-core
+
+
+ org.apache.lucene
+ lucene-analysis-common
+
+
+ org.apache.lucene
+ lucene-queryparser
+
+
org.springframework.boot
diff --git a/chutney/server/src/main/java/com/chutneytesting/ServerConfiguration.java b/chutney/server/src/main/java/com/chutneytesting/ServerConfiguration.java
index db6baef68..568ac0b90 100644
--- a/chutney/server/src/main/java/com/chutneytesting/ServerConfiguration.java
+++ b/chutney/server/src/main/java/com/chutneytesting/ServerConfiguration.java
@@ -61,11 +61,13 @@
import org.springframework.boot.autoconfigure.liquibase.LiquibaseAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.core.task.TaskExecutor;
import org.springframework.core.task.support.ExecutorServiceAdapter;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
@SpringBootApplication(exclude = {LiquibaseAutoConfiguration.class, ActiveMQAutoConfiguration.class, MongoAutoConfiguration.class})
+@EnableAspectJAutoProxy
public class ServerConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(ServerConfiguration.class);
diff --git a/chutney/server/src/main/java/com/chutneytesting/admin/api/DatabaseManagementController.java b/chutney/server/src/main/java/com/chutneytesting/admin/api/DatabaseManagementController.java
index 442f2a92a..db2d341ed 100644
--- a/chutney/server/src/main/java/com/chutneytesting/admin/api/DatabaseManagementController.java
+++ b/chutney/server/src/main/java/com/chutneytesting/admin/api/DatabaseManagementController.java
@@ -40,7 +40,7 @@ public class DatabaseManagementController {
@PreAuthorize("hasAuthority('ADMIN_ACCESS')")
@GetMapping(path = "/execution", produces = MediaType.APPLICATION_JSON_VALUE)
public List getExecutionReportMatchQuery(@QueryParam("query") String query) {
- return executionHistoryRepository.getExecutionReportMatchQuery(query).stream().map(ExecutionSummaryDto::toDto).toList();
+ return executionHistoryRepository.getExecutionReportMatchKeyword(query).stream().map(ExecutionSummaryDto::toDto).toList();
}
@PreAuthorize("hasAuthority('ADMIN_ACCESS')")
diff --git a/chutney/server/src/main/java/com/chutneytesting/execution/infra/aop/ScenarioExecutionReportIndexingAspect.java b/chutney/server/src/main/java/com/chutneytesting/execution/infra/aop/ScenarioExecutionReportIndexingAspect.java
new file mode 100644
index 000000000..a96cee6ea
--- /dev/null
+++ b/chutney/server/src/main/java/com/chutneytesting/execution/infra/aop/ScenarioExecutionReportIndexingAspect.java
@@ -0,0 +1,53 @@
+/*
+ * SPDX-FileCopyrightText: 2017-2024 Enedis
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ */
+
+package com.chutneytesting.execution.infra.aop;
+
+import com.chutneytesting.execution.infra.storage.DatabaseExecutionJpaRepository;
+import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionEntity;
+import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
+import com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository;
+import com.chutneytesting.scenario.infra.jpa.ScenarioEntity;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+import org.aspectj.lang.annotation.After;
+import org.aspectj.lang.annotation.Aspect;
+import org.springframework.stereotype.Component;
+
+@Aspect
+@Component
+public class ScenarioExecutionReportIndexingAspect {
+ private final ScenarioExecutionReportIndexRepository reportIndexRepository;
+ private final DatabaseExecutionJpaRepository scenarioExecutionRepository;
+
+ public ScenarioExecutionReportIndexingAspect(ScenarioExecutionReportIndexRepository reportIndexRepository, DatabaseExecutionJpaRepository scenarioExecutionRepository) {
+ this.reportIndexRepository = reportIndexRepository;
+ this.scenarioExecutionRepository = scenarioExecutionRepository;
+ }
+
+ @After("execution(* com.chutneytesting.execution.infra.storage.ScenarioExecutionReportJpaRepository.save(..)) && args(reportEntity)")
+ public void index(ScenarioExecutionReportEntity reportEntity) {
+ if (reportEntity.status().isFinal()){
+ reportIndexRepository.save(reportEntity);
+ }
+ }
+
+ @After("execution(* com.chutneytesting.scenario.infra.raw.ScenarioJpaRepository.save(..)) && args(scenario)")
+ public void deleteDeactivatedScenarioExecutions(ScenarioEntity scenario) {
+ if (!scenario.isActivated()){
+ List executions = scenarioExecutionRepository.findAllByScenarioId(String.valueOf(scenario.getId()));
+ reportIndexRepository.deleteAllById(executions.stream().map(ScenarioExecutionEntity::getId).collect(Collectors.toSet()));
+ }
+
+ }
+
+ @After("execution(* com.chutneytesting.execution.infra.storage.ScenarioExecutionReportJpaRepository.deleteAllById(..)) && args(scenarioExecutionIds)")
+ public void deleteById(Set scenarioExecutionIds) {
+ reportIndexRepository.deleteAllById(scenarioExecutionIds);
+ }
+}
diff --git a/chutney/server/src/main/java/com/chutneytesting/execution/infra/migration/ZipReportMigration.java b/chutney/server/src/main/java/com/chutneytesting/execution/infra/migration/ZipReportMigration.java
new file mode 100644
index 000000000..f7960b3ec
--- /dev/null
+++ b/chutney/server/src/main/java/com/chutneytesting/execution/infra/migration/ZipReportMigration.java
@@ -0,0 +1,97 @@
+/*
+ * SPDX-FileCopyrightText: 2017-2024 Enedis
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ */
+
+package com.chutneytesting.execution.infra.migration;
+
+import static com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository.SCENARIO_EXECUTION_REPORT;
+import static com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository.WHAT;
+
+import com.chutneytesting.execution.infra.storage.ScenarioExecutionReportJpaRepository;
+import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
+import com.chutneytesting.index.infra.IndexRepository;
+import com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository;
+import jakarta.persistence.EntityManager;
+import java.util.List;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.CommandLineRunner;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Pageable;
+import org.springframework.data.domain.Slice;
+import org.springframework.stereotype.Component;
+import org.springframework.transaction.annotation.Transactional;
+
+@Component
+public class ZipReportMigration implements CommandLineRunner {
+
+
+ private final ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository;
+ private final ScenarioExecutionReportJpaRepository scenarioExecutionReportJpaRepository;
+ private final IndexRepository indexRepository;
+ private final EntityManager entityManager;
+ private static final Logger LOGGER = LoggerFactory.getLogger(ZipReportMigration.class);
+
+
+ public ZipReportMigration(ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository, ScenarioExecutionReportJpaRepository scenarioExecutionReportJpaRepository, IndexRepository indexRepository, EntityManager entityManager) {
+ this.scenarioExecutionReportIndexRepository = scenarioExecutionReportIndexRepository;
+ this.scenarioExecutionReportJpaRepository = scenarioExecutionReportJpaRepository;
+ this.indexRepository = indexRepository;
+ this.entityManager = entityManager;
+ }
+
+ @Override
+ @Transactional
+ public void run(String... args) {
+ if (isMigrationDone()) {
+ LOGGER.info("Report compression & indexing already done, skipping...");
+ return;
+ }
+ PageRequest firstPage = PageRequest.of(0, 10);
+ int count = 0;
+ compressAndIndex(firstPage, count);
+ }
+
+ private void compressAndIndex(Pageable pageable, int previousCount) {
+ Slice slice = scenarioExecutionReportJpaRepository.findAll(pageable);
+ List reports = slice.getContent();
+
+ compressAndSaveInDb(reports);
+ index(reports);
+
+ int count = previousCount + slice.getNumberOfElements();
+ if (slice.hasNext()) {
+ compressAndIndex(slice.nextPageable(), count);
+ } else {
+ LOGGER.info("{} report(s) successfully compressed and indexed", count);
+ }
+ }
+
+ private void compressAndSaveInDb(List reportsInDb) {
+ // calling scenarioExecutionReportJpaRepository find() and then save() doesn't call ReportConverter
+ // ReportConverter will be called by entityManager update. So compression will be done
+ reportsInDb.forEach(report -> {
+ entityManager.createQuery(
+ "UPDATE SCENARIO_EXECUTIONS_REPORTS SET report = :report WHERE id = :id")
+ .setParameter("report", report.getReport())
+ .setParameter("id", report.scenarioExecutionId())
+ .executeUpdate();
+ });
+ }
+
+ private void index(List reportsInDb) {
+ scenarioExecutionReportIndexRepository.saveAll(reportsInDb);
+ }
+
+ private boolean isMigrationDone() {
+ Query whatQuery = new TermQuery(new Term(WHAT, SCENARIO_EXECUTION_REPORT));
+ int indexedReports = indexRepository.count(whatQuery);
+ return indexedReports > 0;
+ }
+}
diff --git a/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionHistoryRepository.java b/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionHistoryRepository.java
index dd1dc650e..493d15db0 100644
--- a/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionHistoryRepository.java
+++ b/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionHistoryRepository.java
@@ -16,6 +16,7 @@
import com.chutneytesting.campaign.infra.jpa.CampaignExecutionEntity;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionEntity;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
+import com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository;
import com.chutneytesting.server.core.domain.dataset.DataSet;
import com.chutneytesting.server.core.domain.execution.history.ExecutionHistory.DetachedExecution;
import com.chutneytesting.server.core.domain.execution.history.ExecutionHistory.Execution;
@@ -51,6 +52,7 @@ class DatabaseExecutionHistoryRepository implements ExecutionHistoryRepository {
private final CampaignJpaRepository campaignJpaRepository;
private final CampaignExecutionJpaRepository campaignExecutionJpaRepository;
private final TestCaseRepository testCaseRepository;
+ private final ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository;
private final ObjectMapper objectMapper;
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseExecutionHistoryRepository.class);
@@ -60,12 +62,14 @@ class DatabaseExecutionHistoryRepository implements ExecutionHistoryRepository {
ScenarioExecutionReportJpaRepository scenarioExecutionReportJpaRepository,
CampaignJpaRepository campaignJpaRepository, TestCaseRepository testCaseRepository,
CampaignExecutionJpaRepository campaignExecutionJpaRepository,
+ ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository,
@Qualifier("reportObjectMapper") ObjectMapper objectMapper) {
this.scenarioExecutionsJpaRepository = scenarioExecutionsJpaRepository;
this.scenarioExecutionReportJpaRepository = scenarioExecutionReportJpaRepository;
this.campaignJpaRepository = campaignJpaRepository;
this.testCaseRepository = testCaseRepository;
this.campaignExecutionJpaRepository = campaignExecutionJpaRepository;
+ this.scenarioExecutionReportIndexRepository = scenarioExecutionReportIndexRepository;
this.objectMapper = objectMapper;
}
@@ -128,7 +132,8 @@ public Execution store(String scenarioId, DetachedExecution detachedExecution) t
scenarioExecution.forCampaignExecution(campaignExecution.get());
}
scenarioExecution = scenarioExecutionsJpaRepository.save(scenarioExecution);
- scenarioExecutionReportJpaRepository.save(new ScenarioExecutionReportEntity(scenarioExecution, detachedExecution.report()));
+ ScenarioExecutionReportEntity reportEntity = new ScenarioExecutionReportEntity(scenarioExecution, detachedExecution.report());
+ scenarioExecutionReportJpaRepository.save(reportEntity);
Execution execution = detachedExecution.attach(scenarioExecution.id(), scenarioId);
return ImmutableExecutionHistory.Execution.builder().from(execution).build();
}
@@ -146,9 +151,10 @@ public Execution getExecution(String scenarioId, Long reportId) throws ReportNot
}
@Override
- public List getExecutionReportMatchQuery(String query) {
+ public List getExecutionReportMatchKeyword(String keyword) {
+ List matchedReportsIds = scenarioExecutionReportIndexRepository.idsByKeywordInReport(keyword);
return scenarioExecutionsJpaRepository
- .getExecutionReportMatchQuery(query)
+ .getExecutionReportByIds(matchedReportsIds)
.stream()
.map(this::scenarioExecutionToExecutionSummary)
.toList();
diff --git a/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionJpaRepository.java b/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionJpaRepository.java
index 8d1e7ef11..037195e4e 100644
--- a/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionJpaRepository.java
+++ b/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionJpaRepository.java
@@ -53,9 +53,8 @@ ELSE MAX(CASE WHEN se.status != 'NOT_EXECUTED' THEN se.id END)
inner join ser.scenarioExecution se
where s.activated = true
and cast(s.id as string) = se.scenarioId
- and ser.report like '%' || :query || '%'
+ and ser.scenarioExecutionId in (:executionsIds)
order by se.id desc
- limit 100
""")
- List getExecutionReportMatchQuery(@Param("query") String query);
+ List getExecutionReportByIds(@Param("executionsIds") List executionsIds);
}
diff --git a/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/jpa/ReportConverter.java b/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/jpa/ReportConverter.java
new file mode 100644
index 000000000..acf6cf91f
--- /dev/null
+++ b/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/jpa/ReportConverter.java
@@ -0,0 +1,63 @@
+/*
+ * SPDX-FileCopyrightText: 2017-2024 Enedis
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ */
+
+package com.chutneytesting.execution.infra.storage.jpa;
+
+import jakarta.persistence.AttributeConverter;
+import jakarta.persistence.Converter;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.GZIPOutputStream;
+
+@Converter
+public class ReportConverter implements AttributeConverter {
+ @Override
+ public byte[] convertToDatabaseColumn(String report) {
+ return compress(report);
+ }
+
+ @Override
+ public String convertToEntityAttribute(byte[] zippedReport) {
+ if (!isCompressed(zippedReport)) {
+ return new String(zippedReport, StandardCharsets.UTF_8);
+ }
+ return decompress(zippedReport);
+ }
+
+ private boolean isCompressed(byte[] data) {
+ return (data != null && data.length >= 2 &&
+ (data[0] == (byte) 0x1f && data[1] == (byte) 0x8b));
+ }
+
+ private byte[] compress(String report) {
+ try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+ GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream)) {
+
+ gzipOutputStream.write(report.getBytes(StandardCharsets.UTF_8));
+ gzipOutputStream.finish();
+ return byteArrayOutputStream.toByteArray();
+
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to compress report content", e);
+ }
+ }
+
+ private String decompress(byte[] compressedData) {
+ try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(compressedData);
+ GZIPInputStream gzipInputStream = new GZIPInputStream(byteArrayInputStream);
+ ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
+ byteArrayOutputStream.write(gzipInputStream.readAllBytes());
+ return byteArrayOutputStream.toString(StandardCharsets.UTF_8);
+
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to decompress report content", e);
+ }
+ }
+}
diff --git a/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/jpa/ScenarioExecutionReportEntity.java b/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/jpa/ScenarioExecutionReportEntity.java
index df4ecb7b7..dd9481a57 100644
--- a/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/jpa/ScenarioExecutionReportEntity.java
+++ b/chutney/server/src/main/java/com/chutneytesting/execution/infra/storage/jpa/ScenarioExecutionReportEntity.java
@@ -15,8 +15,10 @@
import com.chutneytesting.server.core.domain.execution.history.ExecutionHistory;
import com.chutneytesting.server.core.domain.execution.history.ImmutableExecutionHistory;
import com.chutneytesting.server.core.domain.execution.report.ScenarioExecutionReport;
+import com.chutneytesting.server.core.domain.execution.report.ServerReportStatus;
import jakarta.persistence.Basic;
import jakarta.persistence.Column;
+import jakarta.persistence.Convert;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.Id;
@@ -42,6 +44,7 @@ public class ScenarioExecutionReportEntity {
@Column(name = "REPORT")
@Basic(fetch = FetchType.LAZY)
+ @Convert(converter = ReportConverter.class)
private String report;
@Column(name = "VERSION")
@@ -65,6 +68,15 @@ public String getReport() {
return report;
}
+ public Long scenarioExecutionId() {
+ return scenarioExecutionId;
+ }
+
+ public ServerReportStatus status(){
+ return scenarioExecution.status();
+ }
+
+
public ExecutionHistory.Execution toDomain() {
return ImmutableExecutionHistory.Execution.builder()
.executionId(scenarioExecutionId)
diff --git a/chutney/server/src/main/java/com/chutneytesting/index/infra/IndexConfig.java b/chutney/server/src/main/java/com/chutneytesting/index/infra/IndexConfig.java
new file mode 100644
index 000000000..cf42e76c4
--- /dev/null
+++ b/chutney/server/src/main/java/com/chutneytesting/index/infra/IndexConfig.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-FileCopyrightText: 2017-2024 Enedis
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ */
+
+package com.chutneytesting.index.infra;
+
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.Directory;
+
+public interface IndexConfig {
+ Directory directory();
+ IndexWriter indexWriter();
+}
diff --git a/chutney/server/src/main/java/com/chutneytesting/index/infra/IndexRepository.java b/chutney/server/src/main/java/com/chutneytesting/index/infra/IndexRepository.java
new file mode 100644
index 000000000..df984427b
--- /dev/null
+++ b/chutney/server/src/main/java/com/chutneytesting/index/infra/IndexRepository.java
@@ -0,0 +1,88 @@
+/*
+ * SPDX-FileCopyrightText: 2017-2024 Enedis
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ */
+
+package com.chutneytesting.index.infra;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.StoredFields;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.store.Directory;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class IndexRepository {
+
+ private final IndexWriter indexWriter;
+ private final Directory indexDirectory;
+
+ public IndexRepository(IndexConfig config) {
+ this.indexDirectory = config.directory();
+ this.indexWriter = config.indexWriter();
+ }
+
+ public void index(Document document) {
+ try {
+ this.indexWriter.addDocument(document);
+ this.indexWriter.commit();
+ } catch (IOException e) {
+ throw new RuntimeException("Couldn't index data", e);
+ }
+ }
+
+ public List search(Query query, int limit, Sort sort) {
+ List result = new ArrayList<>();
+ try (DirectoryReader reader = DirectoryReader.open(indexDirectory)) {
+ IndexSearcher searcher = new IndexSearcher(reader);
+ ScoreDoc[] hits = searcher.search(query, limit, sort).scoreDocs;
+ StoredFields storedFields = searcher.storedFields();
+ for (ScoreDoc hit : hits){
+ result.add(storedFields.document(hit.doc));
+ }
+ } catch (IOException ignored) {
+ }
+ return result;
+ }
+
+ public int count(Query query) {
+ int count = 0;
+ try (DirectoryReader reader = DirectoryReader.open(indexDirectory)) {
+ IndexSearcher searcher = new IndexSearcher(reader);
+ count = searcher.count(query);
+
+ } catch (IOException e) {
+ throw new RuntimeException("Couldn't count elements in index", e);
+ }
+ return count;
+ }
+
+ public void delete(Query query) {
+ try {
+ indexWriter.deleteDocuments(query);
+ indexWriter.commit();
+ } catch (IOException e) {
+ throw new RuntimeException("Couldn't delete index using query " + query, e);
+ }
+ }
+
+ public void deleteAll() {
+ try {
+ indexWriter.deleteAll();
+ indexWriter.commit();
+ } catch (IOException e) {
+ throw new RuntimeException("Couldn't delete all indexes", e);
+ }
+ }
+}
+
diff --git a/chutney/server/src/main/java/com/chutneytesting/index/infra/OnDiskIndexConfig.java b/chutney/server/src/main/java/com/chutneytesting/index/infra/OnDiskIndexConfig.java
new file mode 100644
index 000000000..7dfe437f7
--- /dev/null
+++ b/chutney/server/src/main/java/com/chutneytesting/index/infra/OnDiskIndexConfig.java
@@ -0,0 +1,50 @@
+/*
+ * SPDX-FileCopyrightText: 2017-2024 Enedis
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ */
+
+package com.chutneytesting.index.infra;
+
+import static com.chutneytesting.tools.file.FileUtils.initFolder;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+
+@Component
+public class OnDiskIndexConfig implements IndexConfig{
+ private final IndexWriter indexWriter;
+ private final Directory indexDirectory;
+
+ public OnDiskIndexConfig(@Value("${chutney.index-folder:~/.chutney/index}") String indexDir) {
+ try {
+ Path path = Paths.get(indexDir);
+ initFolder(path);
+ this.indexDirectory = FSDirectory.open(path);
+ IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
+ this.indexWriter = new IndexWriter(indexDirectory, config);
+ this.indexWriter.commit();
+ } catch (IOException e) {
+ throw new RuntimeException("Couldn't open index directory", e);
+ }
+ }
+
+ @Override
+ public Directory directory() {
+ return indexDirectory;
+ }
+
+ @Override
+ public IndexWriter indexWriter() {
+ return indexWriter;
+ }
+}
diff --git a/chutney/server/src/main/java/com/chutneytesting/index/infra/ScenarioExecutionReportIndexRepository.java b/chutney/server/src/main/java/com/chutneytesting/index/infra/ScenarioExecutionReportIndexRepository.java
new file mode 100644
index 000000000..32ac58439
--- /dev/null
+++ b/chutney/server/src/main/java/com/chutneytesting/index/infra/ScenarioExecutionReportIndexRepository.java
@@ -0,0 +1,92 @@
+/*
+ * SPDX-FileCopyrightText: 2017-2024 Enedis
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ */
+
+package com.chutneytesting.index.infra;
+
+import static org.apache.lucene.document.Field.Store;
+
+import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
+import java.util.List;
+import java.util.Set;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.WildcardQuery;
+import org.apache.lucene.util.BytesRef;
+import org.springframework.stereotype.Repository;
+
+@Repository
+public class ScenarioExecutionReportIndexRepository {
+
+ public static final String SCENARIO_EXECUTION_REPORT = "scenario_execution_report";
+ public static final String WHAT = "what";
+ private static final String SCENARIO_EXECUTION_ID = "scenarioExecutionId";
+ private static final String REPORT = "report";
+ private final IndexRepository indexRepository;
+
+ public ScenarioExecutionReportIndexRepository(IndexRepository indexRepository) {
+ this.indexRepository = indexRepository;
+ }
+
+ public void save(ScenarioExecutionReportEntity report) {
+ Document document = new Document();
+ document.add(new StringField(WHAT, SCENARIO_EXECUTION_REPORT, Store.NO));
+ document.add(new StringField(SCENARIO_EXECUTION_ID, report.scenarioExecutionId().toString(),Store.YES));
+ document.add(new TextField(REPORT, report.getReport().toLowerCase(), Store.NO));
+ // for sorting
+ document.add(new SortedDocValuesField(SCENARIO_EXECUTION_ID, new BytesRef(report.scenarioExecutionId().toString().getBytes()) ));
+
+
+ indexRepository.index(document);
+ }
+
+ public void saveAll(List reports) {
+ reports.forEach(this::save);
+ }
+
+ public void delete(Long scenarioExecutionId) {
+ Query whatQuery = new TermQuery(new Term(WHAT, SCENARIO_EXECUTION_REPORT));
+ Query idQuery = new TermQuery(new Term(SCENARIO_EXECUTION_ID, scenarioExecutionId.toString()));
+ BooleanQuery query = new BooleanQuery.Builder()
+ .add(idQuery, BooleanClause.Occur.MUST)
+ .add(whatQuery, BooleanClause.Occur.MUST)
+ .build();
+ indexRepository.delete(query);
+ }
+
+ public void deleteAllById(Set scenarioExecutionIds) {
+ scenarioExecutionIds.forEach(this::delete);
+ }
+
+
+ public List idsByKeywordInReport(String keyword) {
+ Query whatQuery = new TermQuery(new Term(WHAT, SCENARIO_EXECUTION_REPORT));
+ Query reportQuery = new WildcardQuery(new Term(REPORT, "*" + keyword.toLowerCase() + "*"));
+
+ BooleanQuery query = new BooleanQuery.Builder()
+ .add(reportQuery, BooleanClause.Occur.MUST)
+ .add(whatQuery, BooleanClause.Occur.MUST)
+ .build();
+
+ Sort sort = new Sort(SortField.FIELD_SCORE, new SortField(SCENARIO_EXECUTION_ID, SortField.Type.STRING, true));
+
+ return indexRepository.search(query, 100, sort)
+ .stream()
+ .map(doc -> doc.get(SCENARIO_EXECUTION_ID))
+ .map(Long::parseLong)
+ .toList();
+
+ }
+}
diff --git a/chutney/server/src/main/java/com/chutneytesting/scenario/infra/raw/DatabaseTestCaseRepository.java b/chutney/server/src/main/java/com/chutneytesting/scenario/infra/raw/DatabaseTestCaseRepository.java
index b6c8fddf3..4e6383f32 100644
--- a/chutney/server/src/main/java/com/chutneytesting/scenario/infra/raw/DatabaseTestCaseRepository.java
+++ b/chutney/server/src/main/java/com/chutneytesting/scenario/infra/raw/DatabaseTestCaseRepository.java
@@ -54,8 +54,7 @@ public class DatabaseTestCaseRepository implements AggregatedRepository
+
+
+
+
+
+
+
+
diff --git a/chutney/server/src/test/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionHistoryRepositoryTest.java b/chutney/server/src/test/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionHistoryRepositoryTest.java
index b71ab4743..6908e74c1 100644
--- a/chutney/server/src/test/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionHistoryRepositoryTest.java
+++ b/chutney/server/src/test/java/com/chutneytesting/execution/infra/storage/DatabaseExecutionHistoryRepositoryTest.java
@@ -90,6 +90,7 @@ class PostreSQL extends AllTests {
abstract class AllTests extends AbstractLocalDatabaseTest {
@Autowired
private DatabaseExecutionHistoryRepository sut;
+
@Autowired
private CampaignExecutionDBRepository campaignExecutionDBRepository;
@@ -550,7 +551,7 @@ void simple_case() {
var exec1 = sut.store(scenarioId1, buildDetachedExecution("toto"));
sut.store(scenarioId2, buildDetachedExecution("tutu"));
- var executionSummaryList = sut.getExecutionReportMatchQuery("to");
+ var executionSummaryList = sut.getExecutionReportMatchKeyword("to");
assertThat(executionSummaryList).hasSize(1);
assertThat(executionSummaryList.get(0).executionId()).isEqualTo(exec1.executionId());
@@ -565,7 +566,7 @@ void filter_unactivated_scenario_execution() {
sut.store(scenarioId2, buildDetachedExecution("tutu"));
databaseTestCaseRepository.removeById(scenarioId2);
- var executionSummaryList = sut.getExecutionReportMatchQuery("t");
+ var executionSummaryList = sut.getExecutionReportMatchKeyword("t");
assertThat(executionSummaryList).hasSize(1);
assertThat(executionSummaryList.get(0).executionId()).isEqualTo(exec1.executionId());
@@ -579,7 +580,7 @@ void limit_results_to_100() {
sut.store(scenarioId, buildDetachedExecution("report"));
});
- var executionSummaryList = sut.getExecutionReportMatchQuery("ort");
+ var executionSummaryList = sut.getExecutionReportMatchKeyword("ort");
assertThat(executionSummaryList).hasSize(100);
}
@@ -594,7 +595,7 @@ void order_by_id_descending() {
});
var expectedOrder = executionsIds.stream().sorted(Comparator.naturalOrder().reversed()).toList();
- var executionSummaryList = sut.getExecutionReportMatchQuery("ort");
+ var executionSummaryList = sut.getExecutionReportMatchKeyword("ort");
assertThat(executionSummaryList)
.map(ExecutionSummary::executionId)
diff --git a/chutney/server/src/test/java/com/chutneytesting/index/infra/IndexRepositoryTest.java b/chutney/server/src/test/java/com/chutneytesting/index/infra/IndexRepositoryTest.java
new file mode 100644
index 000000000..bcab272a8
--- /dev/null
+++ b/chutney/server/src/test/java/com/chutneytesting/index/infra/IndexRepositoryTest.java
@@ -0,0 +1,140 @@
+/*
+ * SPDX-FileCopyrightText: 2017-2024 Enedis
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ */
+
+package com.chutneytesting.index.infra;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import com.chutneytesting.tools.file.FileUtils;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.List;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.WildcardQuery;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+class IndexRepositoryTest {
+
+ private IndexRepository indexRepository;
+ private IndexConfig indexConfig;
+ private Path tmpDir;
+
+ @BeforeEach
+ public void setUp() throws IOException {
+ this.tmpDir = Files.createTempDirectory("index");
+ indexConfig = new OnDiskIndexConfig(tmpDir.toString());
+ indexRepository = new IndexRepository(indexConfig);
+ }
+
+ @AfterEach
+ void tearDown() {
+ FileUtils.deleteFolder(tmpDir);
+ }
+
+ @Test
+ void should_index_document() throws IOException {
+ // Given
+ Document doc = new Document();
+ doc.add(new StringField("id", "1", Field.Store.YES));
+ doc.add(new StringField("title", "Indexed Document", Field.Store.YES));
+
+ // When
+ indexRepository.index(doc);
+
+ // Then
+ assertTrue(DirectoryReader.indexExists(indexConfig.directory()), "Index should exist after document is indexed.");
+
+ }
+
+ @Test
+ void should_search_by_query() {
+ //Given
+ Document doc = new Document();
+ doc.add(new StringField("id", "1", Field.Store.YES));
+ doc.add(new StringField("title", "Searchable Document", Field.Store.YES));
+ indexRepository.index(doc);
+
+ doc = new Document();
+ doc.add(new StringField("id", "2", Field.Store.YES));
+ doc.add(new StringField("title", "other title", Field.Store.YES));
+ indexRepository.index(doc);
+
+ Query query = new TermQuery(new Term("title", "Searchable Document"));
+
+ // When
+ List results = indexRepository.search(query, 10, Sort.RELEVANCE);
+
+ // Then
+ assertThat(results).hasSize(1);
+ assertThat(results.get(0).getField("id").stringValue()).isEqualTo("1");
+ }
+
+ @Test
+ void should_delete_document_from_index() {
+ // Given
+ Document doc = new Document();
+ doc.add(new StringField("id", "1", Field.Store.YES));
+ doc.add(new StringField("title", "Document to be deleted", Field.Store.YES));
+ indexRepository.index(doc);
+
+ doc = new Document();
+ doc.add(new StringField("id", "2", Field.Store.YES));
+ doc.add(new StringField("title", "other title", Field.Store.YES));
+ indexRepository.index(doc);
+
+ Query query = new TermQuery(new Term("title", "Document to be deleted"));
+
+ // When
+ indexRepository.delete(query);
+
+ // Then
+ List resultsAfterDelete = indexRepository.search(query, 10, Sort.RELEVANCE);
+ assertEquals(0, resultsAfterDelete.size());
+
+ query = new TermQuery(new Term("title", "other title"));
+ List results = indexRepository.search(query, 10, Sort.RELEVANCE);
+ assertThat(results).hasSize(1);
+ assertThat(results.get(0).getField("id").stringValue()).isEqualTo("2");
+ }
+
+ @Test
+ void should_clean_index() {
+ // Given
+ Document doc = new Document();
+ doc.add(new StringField("id", "1", Field.Store.YES));
+ doc.add(new StringField("title", "Document to be deleted", Field.Store.YES));
+ indexRepository.index(doc);
+
+ doc = new Document();
+ doc.add(new StringField("id", "2", Field.Store.YES));
+ doc.add(new StringField("title", "other title", Field.Store.YES));
+ indexRepository.index(doc);
+
+ // When
+ indexRepository.deleteAll();
+
+ // Then
+ Query query = new WildcardQuery(new Term("title", "*"));
+ List resultsAfterDeleteAll = indexRepository.search(query, 10, Sort.RELEVANCE);
+ assertEquals(0, resultsAfterDeleteAll.size());
+
+ }
+
+}
+
diff --git a/chutney/server/src/test/java/util/infra/AbstractLocalDatabaseTest.java b/chutney/server/src/test/java/util/infra/AbstractLocalDatabaseTest.java
index 25a82b0a2..a8c0ddb6a 100644
--- a/chutney/server/src/test/java/util/infra/AbstractLocalDatabaseTest.java
+++ b/chutney/server/src/test/java/util/infra/AbstractLocalDatabaseTest.java
@@ -12,6 +12,7 @@
import com.chutneytesting.campaign.infra.jpa.CampaignEntity;
import com.chutneytesting.campaign.infra.jpa.CampaignScenarioEntity;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionEntity;
+import com.chutneytesting.index.infra.IndexRepository;
import com.chutneytesting.scenario.infra.jpa.ScenarioEntity;
import com.chutneytesting.scenario.infra.raw.TagListMapper;
import com.chutneytesting.server.core.domain.execution.report.ServerReportStatus;
@@ -58,6 +59,9 @@ public abstract class AbstractLocalDatabaseTest {
@Autowired
private Liquibase liquibase;
+ @Autowired
+ private IndexRepository indexRepository;
+
@BeforeEach
void setTransactionTemplate() {
transactionTemplate.setTransactionManager(transactionManager);
@@ -71,6 +75,11 @@ protected void clearTables() {
jdbcTemplate.execute("DELETE FROM CAMPAIGN_SCENARIOS");
jdbcTemplate.execute("DELETE FROM CAMPAIGN");
jdbcTemplate.execute("DELETE FROM SCENARIO");
+ clearIndexes();
+ }
+
+ private void clearIndexes() {
+ indexRepository.deleteAll();
}
protected void liquibaseUpdate() throws LiquibaseException, SQLException {
diff --git a/chutney/server/src/test/java/util/infra/TestInfraConfiguration.java b/chutney/server/src/test/java/util/infra/TestInfraConfiguration.java
index 113dca889..22dc3cc7b 100644
--- a/chutney/server/src/test/java/util/infra/TestInfraConfiguration.java
+++ b/chutney/server/src/test/java/util/infra/TestInfraConfiguration.java
@@ -10,6 +10,12 @@
import static util.infra.AbstractLocalDatabaseTest.DB_CHANGELOG_DB_CHANGELOG_MASTER_XML;
import com.chutneytesting.ServerConfiguration;
+import com.chutneytesting.execution.infra.aop.ScenarioExecutionReportIndexingAspect;
+import com.chutneytesting.execution.infra.storage.DatabaseExecutionJpaRepository;
+import com.chutneytesting.index.infra.IndexConfig;
+import com.chutneytesting.index.infra.IndexRepository;
+import com.chutneytesting.index.infra.OnDiskIndexConfig;
+import com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
@@ -40,6 +46,7 @@
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.Profile;
import org.springframework.jdbc.core.JdbcTemplate;
@@ -57,6 +64,7 @@
@Configuration
@EnableTransactionManagement(proxyTargetClass = true)
@EnableJpa
+@EnableAspectJAutoProxy
@Profile("test-infra")
class TestInfraConfiguration {
@@ -187,6 +195,24 @@ public ObjectMapper reportObjectMapper() {
return new ServerConfiguration().reportObjectMapper();
}
+ @Bean
+ public IndexRepository indexRepository() throws IOException {
+ Path tempDirectory = Files.createTempDirectory("test-infra-index");
+ IndexConfig config = new OnDiskIndexConfig(tempDirectory.toString());
+ return new IndexRepository(config);
+ }
+
+ @Bean
+ public ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository(IndexRepository indexRepository) {
+ return new ScenarioExecutionReportIndexRepository(indexRepository);
+ }
+
+ @Bean
+ public ScenarioExecutionReportIndexingAspect indexingAspect(ScenarioExecutionReportIndexRepository indexRepository, DatabaseExecutionJpaRepository scenarioExecutionsJpaRepository) {
+ return new ScenarioExecutionReportIndexingAspect(indexRepository, scenarioExecutionsJpaRepository);
+ }
+
+
@Primary
@Bean
@Profile("!test-infra-sqlite")