Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat/zip #208

Merged
merged 7 commits into from
Nov 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ chutney:
configuration-folder: ${chutney.configuration-folder}/environment
jira:
configuration-folder: ${chutney.configuration-folder}/jira

index-folder: .chutney/index
server:
editions:
ttl:
Expand Down
16 changes: 16 additions & 0 deletions chutney/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@
<!-- test dependencies versions -->
<jqwik.version>1.9.1</jqwik.version>
<wiremock.version>3.9.1</wiremock.version>
<lucene.version>9.12.0</lucene.version>
</properties>

<dependencies>
Expand Down Expand Up @@ -261,6 +262,21 @@
<artifactId>liquibase-core</artifactId>
<version>${liquibase.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analysis-common</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
<version>${lucene.version}</version>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public interface ExecutionHistoryRepository {
*/
ExecutionHistory.Execution getExecution(String scenarioId, Long reportId) throws ReportNotFoundException;

List<ExecutionHistory.ExecutionSummary> getExecutionReportMatchQuery(String query);
List<ExecutionHistory.ExecutionSummary> getExecutionReportMatchKeyword(String query);

/**
* Override a previously stored {@link ExecutionHistory.Execution}.
Expand Down
17 changes: 17 additions & 0 deletions chutney/server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
Expand Down Expand Up @@ -181,6 +185,19 @@
<scope>runtime</scope>
</dependency>

<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analysis-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
</dependency>

<!-- Test dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,13 @@
import org.springframework.boot.autoconfigure.liquibase.LiquibaseAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.core.task.TaskExecutor;
import org.springframework.core.task.support.ExecutorServiceAdapter;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;

@SpringBootApplication(exclude = {LiquibaseAutoConfiguration.class, ActiveMQAutoConfiguration.class, MongoAutoConfiguration.class})
@EnableAspectJAutoProxy
public class ServerConfiguration {

private static final Logger LOGGER = LoggerFactory.getLogger(ServerConfiguration.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class DatabaseManagementController {
@PreAuthorize("hasAuthority('ADMIN_ACCESS')")
@GetMapping(path = "/execution", produces = MediaType.APPLICATION_JSON_VALUE)
public List<ExecutionSummaryDto> getExecutionReportMatchQuery(@QueryParam("query") String query) {
return executionHistoryRepository.getExecutionReportMatchQuery(query).stream().map(ExecutionSummaryDto::toDto).toList();
return executionHistoryRepository.getExecutionReportMatchKeyword(query).stream().map(ExecutionSummaryDto::toDto).toList();
}

@PreAuthorize("hasAuthority('ADMIN_ACCESS')")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* SPDX-FileCopyrightText: 2017-2024 Enedis
*
* SPDX-License-Identifier: Apache-2.0
*
*/

package com.chutneytesting.execution.infra.aop;

import com.chutneytesting.execution.infra.storage.DatabaseExecutionJpaRepository;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionEntity;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
import com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository;
import com.chutneytesting.scenario.infra.jpa.ScenarioEntity;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.springframework.stereotype.Component;

@Aspect
@Component
public class ScenarioExecutionReportIndexingAspect {
private final ScenarioExecutionReportIndexRepository reportIndexRepository;
private final DatabaseExecutionJpaRepository scenarioExecutionRepository;

public ScenarioExecutionReportIndexingAspect(ScenarioExecutionReportIndexRepository reportIndexRepository, DatabaseExecutionJpaRepository scenarioExecutionRepository) {
this.reportIndexRepository = reportIndexRepository;
this.scenarioExecutionRepository = scenarioExecutionRepository;
}

@After("execution(* com.chutneytesting.execution.infra.storage.ScenarioExecutionReportJpaRepository.save(..)) && args(reportEntity)")
public void index(ScenarioExecutionReportEntity reportEntity) {
if (reportEntity.status().isFinal()){
reportIndexRepository.save(reportEntity);
}
}

@After("execution(* com.chutneytesting.scenario.infra.raw.ScenarioJpaRepository.save(..)) && args(scenario)")
public void deleteDeactivatedScenarioExecutions(ScenarioEntity scenario) {
if (!scenario.isActivated()){
List<ScenarioExecutionEntity> executions = scenarioExecutionRepository.findAllByScenarioId(String.valueOf(scenario.getId()));
reportIndexRepository.deleteAllById(executions.stream().map(ScenarioExecutionEntity::getId).collect(Collectors.toSet()));
}

}

@After("execution(* com.chutneytesting.execution.infra.storage.ScenarioExecutionReportJpaRepository.deleteAllById(..)) && args(scenarioExecutionIds)")
public void deleteById(Set<Long> scenarioExecutionIds) {
reportIndexRepository.deleteAllById(scenarioExecutionIds);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
/*
* SPDX-FileCopyrightText: 2017-2024 Enedis
*
* SPDX-License-Identifier: Apache-2.0
*
*/

package com.chutneytesting.execution.infra.migration;

import static com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository.SCENARIO_EXECUTION_REPORT;
import static com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository.WHAT;

import com.chutneytesting.execution.infra.storage.ScenarioExecutionReportJpaRepository;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
import com.chutneytesting.index.infra.IndexRepository;
import com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository;
import jakarta.persistence.EntityManager;
import java.util.List;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.CommandLineRunner;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

@Component
public class ZipReportMigration implements CommandLineRunner {


private final ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository;
private final ScenarioExecutionReportJpaRepository scenarioExecutionReportJpaRepository;
private final IndexRepository indexRepository;
private final EntityManager entityManager;
private static final Logger LOGGER = LoggerFactory.getLogger(ZipReportMigration.class);


public ZipReportMigration(ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository, ScenarioExecutionReportJpaRepository scenarioExecutionReportJpaRepository, IndexRepository indexRepository, EntityManager entityManager) {
this.scenarioExecutionReportIndexRepository = scenarioExecutionReportIndexRepository;
this.scenarioExecutionReportJpaRepository = scenarioExecutionReportJpaRepository;
this.indexRepository = indexRepository;
this.entityManager = entityManager;
}

@Override
@Transactional
public void run(String... args) {
if (isMigrationDone()) {
LOGGER.info("Report compression & indexing already done, skipping...");
return;
}
PageRequest firstPage = PageRequest.of(0, 10);
int count = 0;
compressAndIndex(firstPage, count);
}

private void compressAndIndex(Pageable pageable, int previousCount) {
Slice<ScenarioExecutionReportEntity> slice = scenarioExecutionReportJpaRepository.findAll(pageable);
List<ScenarioExecutionReportEntity> reports = slice.getContent();

compressAndSaveInDb(reports);
index(reports);

int count = previousCount + slice.getNumberOfElements();
if (slice.hasNext()) {
compressAndIndex(slice.nextPageable(), count);
} else {
LOGGER.info("{} report(s) successfully compressed and indexed", count);
}
}

private void compressAndSaveInDb(List<ScenarioExecutionReportEntity> reportsInDb) {
// calling scenarioExecutionReportJpaRepository find() and then save() doesn't call ReportConverter
// ReportConverter will be called by entityManager update. So compression will be done
reportsInDb.forEach(report -> {
entityManager.createQuery(
"UPDATE SCENARIO_EXECUTIONS_REPORTS SET report = :report WHERE id = :id")
.setParameter("report", report.getReport())
.setParameter("id", report.scenarioExecutionId())
.executeUpdate();
});
}

private void index(List<ScenarioExecutionReportEntity> reportsInDb) {
scenarioExecutionReportIndexRepository.saveAll(reportsInDb);
}

private boolean isMigrationDone() {
Query whatQuery = new TermQuery(new Term(WHAT, SCENARIO_EXECUTION_REPORT));
int indexedReports = indexRepository.count(whatQuery);
return indexedReports > 0;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import com.chutneytesting.campaign.infra.jpa.CampaignExecutionEntity;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionEntity;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
import com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository;
import com.chutneytesting.server.core.domain.dataset.DataSet;
import com.chutneytesting.server.core.domain.execution.history.ExecutionHistory.DetachedExecution;
import com.chutneytesting.server.core.domain.execution.history.ExecutionHistory.Execution;
Expand Down Expand Up @@ -51,6 +52,7 @@ class DatabaseExecutionHistoryRepository implements ExecutionHistoryRepository {
private final CampaignJpaRepository campaignJpaRepository;
private final CampaignExecutionJpaRepository campaignExecutionJpaRepository;
private final TestCaseRepository testCaseRepository;
private final ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository;
private final ObjectMapper objectMapper;
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseExecutionHistoryRepository.class);

Expand All @@ -60,12 +62,14 @@ class DatabaseExecutionHistoryRepository implements ExecutionHistoryRepository {
ScenarioExecutionReportJpaRepository scenarioExecutionReportJpaRepository,
CampaignJpaRepository campaignJpaRepository, TestCaseRepository testCaseRepository,
CampaignExecutionJpaRepository campaignExecutionJpaRepository,
ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository,
@Qualifier("reportObjectMapper") ObjectMapper objectMapper) {
this.scenarioExecutionsJpaRepository = scenarioExecutionsJpaRepository;
this.scenarioExecutionReportJpaRepository = scenarioExecutionReportJpaRepository;
this.campaignJpaRepository = campaignJpaRepository;
this.testCaseRepository = testCaseRepository;
this.campaignExecutionJpaRepository = campaignExecutionJpaRepository;
this.scenarioExecutionReportIndexRepository = scenarioExecutionReportIndexRepository;
this.objectMapper = objectMapper;
}

Expand Down Expand Up @@ -128,7 +132,8 @@ public Execution store(String scenarioId, DetachedExecution detachedExecution) t
scenarioExecution.forCampaignExecution(campaignExecution.get());
}
scenarioExecution = scenarioExecutionsJpaRepository.save(scenarioExecution);
scenarioExecutionReportJpaRepository.save(new ScenarioExecutionReportEntity(scenarioExecution, detachedExecution.report()));
ScenarioExecutionReportEntity reportEntity = new ScenarioExecutionReportEntity(scenarioExecution, detachedExecution.report());
scenarioExecutionReportJpaRepository.save(reportEntity);
Execution execution = detachedExecution.attach(scenarioExecution.id(), scenarioId);
return ImmutableExecutionHistory.Execution.builder().from(execution).build();
}
Expand All @@ -146,9 +151,10 @@ public Execution getExecution(String scenarioId, Long reportId) throws ReportNot
}

@Override
public List<ExecutionSummary> getExecutionReportMatchQuery(String query) {
public List<ExecutionSummary> getExecutionReportMatchKeyword(String keyword) {
List<Long> matchedReportsIds = scenarioExecutionReportIndexRepository.idsByKeywordInReport(keyword);
return scenarioExecutionsJpaRepository
.getExecutionReportMatchQuery(query)
.getExecutionReportByIds(matchedReportsIds)
.stream()
.map(this::scenarioExecutionToExecutionSummary)
.toList();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,8 @@ ELSE MAX(CASE WHEN se.status != 'NOT_EXECUTED' THEN se.id END)
inner join ser.scenarioExecution se
where s.activated = true
and cast(s.id as string) = se.scenarioId
and ser.report like '%' || :query || '%'
and ser.scenarioExecutionId in (:executionsIds)
order by se.id desc
limit 100
""")
List<ScenarioExecutionEntity> getExecutionReportMatchQuery(@Param("query") String query);
List<ScenarioExecutionEntity> getExecutionReportByIds(@Param("executionsIds") List<Long> executionsIds);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
* SPDX-FileCopyrightText: 2017-2024 Enedis
*
* SPDX-License-Identifier: Apache-2.0
*
*/

package com.chutneytesting.execution.infra.storage.jpa;

import jakarta.persistence.AttributeConverter;
import jakarta.persistence.Converter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;

@Converter
public class ReportConverter implements AttributeConverter<String, byte[]> {
@Override
public byte[] convertToDatabaseColumn(String report) {
return compress(report);
}

@Override
public String convertToEntityAttribute(byte[] zippedReport) {
if (!isCompressed(zippedReport)) {
return new String(zippedReport, StandardCharsets.UTF_8);
}
return decompress(zippedReport);
}

private boolean isCompressed(byte[] data) {
return (data != null && data.length >= 2 &&
(data[0] == (byte) 0x1f && data[1] == (byte) 0x8b));
}

private byte[] compress(String report) {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream)) {

gzipOutputStream.write(report.getBytes(StandardCharsets.UTF_8));
gzipOutputStream.finish();
return byteArrayOutputStream.toByteArray();

} catch (IOException e) {
throw new RuntimeException("Failed to compress report content", e);
}
}

private String decompress(byte[] compressedData) {
try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(compressedData);
GZIPInputStream gzipInputStream = new GZIPInputStream(byteArrayInputStream);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
byteArrayOutputStream.write(gzipInputStream.readAllBytes());
return byteArrayOutputStream.toString(StandardCharsets.UTF_8);

} catch (IOException e) {
throw new RuntimeException("Failed to decompress report content", e);
}
}
}
Loading
Loading