Skip to content

Commit

Permalink
chore(): one shot batch
Browse files Browse the repository at this point in the history
  • Loading branch information
KarimGl committed Oct 25, 2024
1 parent 7f07b7f commit c4167e2
Show file tree
Hide file tree
Showing 7 changed files with 137 additions and 22 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
/*
* SPDX-FileCopyrightText: 2017-2024 Enedis
*
* SPDX-License-Identifier: Apache-2.0
*
*/

package com.chutneytesting.execution.infra.migration;

import static com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository.SCENARIO_EXECUTION_REPORT;
import static com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository.WHAT;

import com.chutneytesting.execution.infra.storage.ScenarioExecutionReportJpaRepository;
import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
import com.chutneytesting.index.infra.IndexRepository;
import com.chutneytesting.index.infra.ScenarioExecutionReportIndexRepository;
import jakarta.persistence.EntityManager;
import java.util.List;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

@Component
public class ZipReportMigration implements CommandLineRunner {


private final ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository;
private final ScenarioExecutionReportJpaRepository scenarioExecutionReportJpaRepository;
private final IndexRepository indexRepository;
private final EntityManager entityManager;
private static final Logger LOGGER = LoggerFactory.getLogger(ZipReportMigration.class);


public ZipReportMigration(ScenarioExecutionReportIndexRepository scenarioExecutionReportIndexRepository, ScenarioExecutionReportJpaRepository scenarioExecutionReportJpaRepository, IndexRepository indexRepository, EntityManager entityManager) {
this.scenarioExecutionReportIndexRepository = scenarioExecutionReportIndexRepository;
this.scenarioExecutionReportJpaRepository = scenarioExecutionReportJpaRepository;
this.indexRepository = indexRepository;
this.entityManager = entityManager;
}

@Override
@Transactional
public void run(String... args) {
if (isMigrationDone()) {
LOGGER.info("Report compression already done, skipping...");
return;
}
List<ScenarioExecutionReportEntity> reportsInDb = scenarioExecutionReportJpaRepository.findAll();
compressAndSaveInDb(reportsInDb);
index(reportsInDb);
LOGGER.info("{} report(s) successfully compressed and indexed", reportsInDb.size());
}

private void index(List<ScenarioExecutionReportEntity> reportsInDb) {
LOGGER.info("{} report(s) will be indexed", reportsInDb.size());
scenarioExecutionReportIndexRepository.saveAll(reportsInDb);
}

private void compressAndSaveInDb(List<ScenarioExecutionReportEntity> reportsInDb) {
LOGGER.info("{} report(s) will be compressed", reportsInDb.size());

// calling scenarioExecutionReportJpaRepository save/saveAll doesn't call ReportConverter
// ReportConverter will be called by entityManager update. So compression will be done
reportsInDb.forEach(report -> {
entityManager.createQuery(
"UPDATE SCENARIO_EXECUTIONS_REPORTS SET report = :report WHERE id = :id")
.setParameter("report", report.getReport())
.setParameter("id", report.scenarioExecutionId())
.executeUpdate();
});
}

private boolean isMigrationDone() {
Query whatQuery = new TermQuery(new Term(WHAT, SCENARIO_EXECUTION_REPORT));
int indexedReports = indexRepository.count(whatQuery);
return indexedReports > 0;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ private void updateReport(Execution execution) throws ReportNotFoundException {
);
scenarioExecutionReport.updateReport(execution);
scenarioExecutionReportJpaRepository.save(scenarioExecutionReport);
scenarioExecutionReportIndexRepository.save(scenarioExecutionReport);
}

@Override
Expand All @@ -212,6 +213,7 @@ public void deleteExecutions(Set<Long> executionsIds) {

campaignExecutionJpaRepository.deleteAllByIdInBatch(campaignExecutionsIds);
scenarioExecutionReportJpaRepository.deleteAllById(executionsIds);
scenarioExecutionReportIndexRepository.deleteAllById(executionsIds);
scenarioExecutionsJpaRepository.deleteAllByIdInBatch(executionsIds);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,39 +8,49 @@
package com.chutneytesting.execution.infra.storage.jpa;

import jakarta.persistence.AttributeConverter;
import jakarta.persistence.Converter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.lang3.StringUtils;

@Converter
public class ReportConverter implements AttributeConverter<String, byte[]> {
@Override
public byte[] convertToDatabaseColumn(String report) {
if (StringUtils.isNoneEmpty()) {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream)) {

gzipOutputStream.write(report.getBytes(StandardCharsets.UTF_8));
gzipOutputStream.finish();
return byteArrayOutputStream.toByteArray();

} catch (IOException e) {
throw new RuntimeException("Failed to compress report content", e);
}
}
return null;
return compress(report);
}

@Override
public String convertToEntityAttribute(byte[] zippedReport) {
if (zippedReport == null || zippedReport.length == 0) {
return null;
if (!isCompressed(zippedReport)) {
return new String(zippedReport, StandardCharsets.UTF_8);
}
return decompress(zippedReport);
}

private boolean isCompressed(byte[] data) {
return (data != null && data.length >= 2 &&
(data[0] == (byte) 0x1f && data[1] == (byte) 0x8b));
}

private byte[] compress(String report) {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream)) {

gzipOutputStream.write(report.getBytes(StandardCharsets.UTF_8));
gzipOutputStream.finish();
return byteArrayOutputStream.toByteArray();

} catch (IOException e) {
throw new RuntimeException("Failed to compress report content", e);
}
}

try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(zippedReport);
private String decompress(byte[] compressedData) {
try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(compressedData);
GZIPInputStream gzipInputStream = new GZIPInputStream(byteArrayInputStream);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,17 @@ public List<Document> search(Query query, int limit, Sort sort) {
return result;
}

public int count(Query query) {
int count = 0;
try (DirectoryReader reader = DirectoryReader.open(indexDirectory)) {
IndexSearcher searcher = new IndexSearcher(reader);
count = searcher.count(query);

} catch (IOException ignored) {
}
return count;
}

public void delete(Query query) {
try {
indexWriter.deleteDocuments(query);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import com.chutneytesting.execution.infra.storage.jpa.ScenarioExecutionReportEntity;
import java.util.List;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.StringField;
Expand All @@ -31,8 +32,8 @@ public class ScenarioExecutionReportIndexRepository {

public static final String SCENARIO_EXECUTION_REPORT = "scenario_execution_report";
public static final String WHAT = "what";
public static final String SCENARIO_EXECUTION_ID = "scenarioExecutionId";
public static final String REPORT = "report";
private static final String SCENARIO_EXECUTION_ID = "scenarioExecutionId";
private static final String REPORT = "report";
private final IndexRepository indexRepository;

public ScenarioExecutionReportIndexRepository(IndexRepository indexRepository) {
Expand All @@ -51,7 +52,11 @@ public void save(ScenarioExecutionReportEntity report) {
indexRepository.index(document);
}

public void remove(Long scenarioExecutionId) {
public void saveAll(List<ScenarioExecutionReportEntity> reports) {
reports.forEach(this::save);
}

public void delete(Long scenarioExecutionId) {
Query whatQuery = new TermQuery(new Term(WHAT, SCENARIO_EXECUTION_REPORT));
Query idQuery = new TermQuery(new Term(SCENARIO_EXECUTION_ID, scenarioExecutionId.toString()));
BooleanQuery query = new BooleanQuery.Builder()
Expand All @@ -61,6 +66,10 @@ public void remove(Long scenarioExecutionId) {
indexRepository.delete(query);
}

public void deleteAllById(Set<Long> scenarioExecutionIds) {
scenarioExecutionIds.forEach(this::delete);
}


public List<Long> idsByKeywordInReport(String keyword) {
Query whatQuery = new TermQuery(new Term(WHAT, SCENARIO_EXECUTION_REPORT));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public void removeById(String scenarioId) {
allExecutions.forEach(e -> {
e.forCampaignExecution(null);
scenarioExecutionsJpaRepository.save(e);
scenarioExecutionReportIndexRepository.remove(e.id());
scenarioExecutionReportIndexRepository.delete(e.id());
});

List<CampaignScenarioEntity> allCampaignScenarioEntities = campaignScenarioJpaRepository.findAllByScenarioId(scenarioId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1072,7 +1072,7 @@
</addColumn>
</changeSet>

<changeSet id="scenario-execution-report-as-blob" author="ICG">
<changeSet id="scenario-execution-report-as-blob-h2-postgres" author="ICG" dbms="h2,postgresql">
<modifyDataType
tableName="SCENARIO_EXECUTIONS_REPORTS"
columnName="REPORT"
Expand Down

0 comments on commit c4167e2

Please sign in to comment.