Skip to content

Commit

Permalink
feat: Improved logging
Browse files Browse the repository at this point in the history
  • Loading branch information
Julien Ruaux committed Aug 5, 2022
1 parent f3ffae7 commit cbee5a7
Show file tree
Hide file tree
Showing 84 changed files with 1,204 additions and 678 deletions.
1 change: 1 addition & 0 deletions connectors/riot-db/riot-db.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ dependencies {
implementation 'org.postgresql:postgresql'
implementation group: 'org.xerial', name: 'sqlite-jdbc', version: sqliteVersion
testImplementation project(':riot-test')
testImplementation 'org.slf4j:slf4j-simple'
testImplementation group: 'org.testcontainers', name: 'postgresql', version: testcontainersVersion
testImplementation group: 'org.testcontainers', name: 'oracle-xe', version: testcontainersVersion
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,10 @@ public DataSource dataSource() {
return properties.initializeDataSourceBuilder().build();
}

@Override
public String toString() {
return "DataSourceOptions [driver=" + driver + ", url=" + url + ", username=" + username + ", password="
+ password + "]";
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
import java.sql.Connection;
import java.util.Map;
import java.util.Optional;
import java.util.logging.Level;
import java.util.logging.Logger;

import javax.sql.DataSource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.item.ItemProcessor;
Expand All @@ -18,18 +18,18 @@
import com.redis.riot.processor.DataStructureItemProcessor;
import com.redis.spring.batch.DataStructure;

import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Mixin;
import picocli.CommandLine.Parameters;

@Command(name = "export", description = "Export to a database")
public class DatabaseExportCommand extends AbstractExportCommand<Map<String, Object>> {

private static final Logger log = LoggerFactory.getLogger(DatabaseExportCommand.class);
private static Logger log = Logger.getLogger(DatabaseExportCommand.class.getName());

private static final String NAME = "db-export";

@CommandLine.Parameters(arity = "1", description = "SQL INSERT statement.", paramLabel = "SQL")
@Parameters(arity = "1", description = "SQL INSERT statement.", paramLabel = "SQL")
private String sql;
@Mixin
private DataSourceOptions dataSourceOptions = new DataSourceOptions();
Expand All @@ -46,11 +46,11 @@ public DataSourceOptions getDataSourceOptions() {

@Override
protected Job job(JobBuilder jobBuilder) throws Exception {
log.debug("Creating data source with {}", dataSourceOptions);
log.log(Level.FINE, "Creating data source with {0}", dataSourceOptions);
DataSource dataSource = dataSourceOptions.dataSource();
try (Connection connection = dataSource.getConnection()) {
String dbName = connection.getMetaData().getDatabaseProductName();
log.debug("Creating writer for database {} with {}", dbName, exportOptions);
log.log(Level.FINE, "Creating writer for database {0} with {1}", new Object[] { dbName, exportOptions });
JdbcBatchItemWriterBuilder<Map<String, Object>> builder = new JdbcBatchItemWriterBuilder<>();
builder.itemSqlParameterSourceProvider(NullableMapSqlParameterSource::new);
builder.dataSource(dataSource);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
package com.redis.riot.db;

import picocli.CommandLine;
import picocli.CommandLine.Option;

public class DatabaseExportOptions {

@CommandLine.Option(names = "--key-regex", description = "Regex for key-field extraction (default: ${DEFAULT-VALUE}).", paramLabel = "<str>")
@Option(names = "--key-regex", description = "Regex for key-field extraction (default: ${DEFAULT-VALUE}).", paramLabel = "<str>")
private String keyRegex = "\\w+:(?<id>.+)";
@CommandLine.Option(names = "--no-assert-updates", description = "Confirm every insert results in update of at least one row. True by default.", negatable = true)
@Option(names = "--no-assert-updates", description = "Confirm every insert results in update of at least one row. True by default.", negatable = true)
private boolean assertUpdates = true;

public boolean isAssertUpdates() {
Expand All @@ -24,4 +24,10 @@ public String getKeyRegex() {
public void setKeyRegex(String keyRegex) {
this.keyRegex = keyRegex;
}

@Override
public String toString() {
return "DatabaseExportOptions [keyRegex=" + keyRegex + ", assertUpdates=" + assertUpdates + "]";
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@

import java.sql.Connection;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;

import javax.sql.DataSource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.item.database.JdbcCursorItemReader;
Expand All @@ -15,18 +15,18 @@

import com.redis.riot.AbstractImportCommand;

import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Mixin;
import picocli.CommandLine.Parameters;

@Command(name = "import", description = "Import from a database")
public class DatabaseImportCommand extends AbstractImportCommand {

private static final Logger log = LoggerFactory.getLogger(DatabaseImportCommand.class);
private static final Logger log = Logger.getLogger(DatabaseImportCommand.class.getName());

private static final String NAME = "db-import";

@CommandLine.Parameters(arity = "1", description = "SQL SELECT statement", paramLabel = "SQL")
@Parameters(arity = "1", description = "SQL SELECT statement", paramLabel = "SQL")
private String sql;
@Mixin
private DataSourceOptions dataSourceOptions = new DataSourceOptions();
Expand All @@ -39,11 +39,11 @@ public DataSourceOptions getDataSourceOptions() {

@Override
protected Job job(JobBuilder jobBuilder) throws Exception {
log.debug("Creating data source: {}", dataSourceOptions);
log.log(Level.FINE, "Creating data source: {0}", dataSourceOptions);
DataSource dataSource = dataSourceOptions.dataSource();
try (Connection connection = dataSource.getConnection()) {
String name = connection.getMetaData().getDatabaseProductName();
log.debug("Creating {} database reader: {}", name, importOptions);
log.log(Level.FINE, "Creating {0} database reader: {1}", new Object[] { name, importOptions });
JdbcCursorItemReaderBuilder<Map<String, Object>> builder = new JdbcCursorItemReaderBuilder<>();
builder.saveState(false);
builder.dataSource(dataSource);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,19 @@

import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;

import picocli.CommandLine;
import picocli.CommandLine.Option;

public class DatabaseImportOptions {

@CommandLine.Option(names = "--fetch", description = "Number of rows to return with each fetch.", paramLabel = "<size>")
@Option(names = "--fetch", description = "Number of rows to return with each fetch.", paramLabel = "<size>")
private Optional<Integer> fetchSize = Optional.empty();
@CommandLine.Option(names = "--rows", description = "Max number of rows the ResultSet can contain.", paramLabel = "<count>")
@Option(names = "--rows", description = "Max number of rows the ResultSet can contain.", paramLabel = "<count>")
private Optional<Integer> maxRows = Optional.empty();
@CommandLine.Option(names = "--query-timeout", description = "The time in milliseconds for the query to timeout.", paramLabel = "<ms>")
@Option(names = "--query-timeout", description = "The time in milliseconds for the query to timeout.", paramLabel = "<ms>")
private Optional<Integer> queryTimeout = Optional.empty();
@CommandLine.Option(names = "--shared-connection", description = "Use same connection for cursor and other processing.", hidden = true)
@Option(names = "--shared-connection", description = "Use same connection for cursor and other processing.", hidden = true)
private boolean useSharedExtendedConnection;
@CommandLine.Option(names = "--verify", description = "Verify position of result set after row mapper.", hidden = true)
@Option(names = "--verify", description = "Verify position of result set after row mapper.", hidden = true)
private boolean verifyCursorPosition;

public void setFetchSize(int fetchSize) {
Expand Down Expand Up @@ -48,4 +48,11 @@ public void configure(JdbcCursorItemReaderBuilder<Map<String, Object>> builder)
builder.verifyCursorPosition(verifyCursorPosition);
}

@Override
public String toString() {
return "DatabaseImportOptions [fetchSize=" + fetchSize + ", maxRows=" + maxRows + ", queryTimeout="
+ queryTimeout + ", useSharedExtendedConnection=" + useSharedExtendedConnection
+ ", verifyCursorPosition=" + verifyCursorPosition + "]";
}

}
1 change: 1 addition & 0 deletions connectors/riot-file/riot-file.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ dependencies {
implementation (group: 'org.springframework.cloud', name: 'spring-cloud-gcp-starter-storage', version: gcpVersion) {
exclude group: 'javax.annotation', module: 'javax.annotation-api'
}
testImplementation 'org.slf4j:slf4j-simple'
testImplementation project(':riot-test')
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,10 @@

import io.lettuce.core.ScoredValue;
import io.lettuce.core.StreamMessage;
import picocli.CommandLine;
import picocli.CommandLine.ArgGroup;
import picocli.CommandLine.Command;
import picocli.CommandLine.Mixin;
import picocli.CommandLine.Parameters;

@Command(name = "import-dump", description = "Import Redis data files into Redis")
public class DumpFileImportCommand extends AbstractTransferCommand {
Expand All @@ -42,10 +43,10 @@ public class DumpFileImportCommand extends AbstractTransferCommand {

private static final String NAME = "dump-file-import";

@CommandLine.Parameters(arity = "0..*", description = "One ore more files or URLs", paramLabel = "FILE")
@Parameters(arity = "0..*", description = "One ore more files or URLs", paramLabel = "FILE")
private List<String> files;
@CommandLine.Mixin
private DumpFileImportOptions options = new DumpFileImportOptions();
@Mixin
private DumpFileOptions options = new DumpFileOptions();
@ArgGroup(exclusive = false, heading = "Writer options%n")
private RedisWriterOptions writerOptions = new RedisWriterOptions();

Expand All @@ -57,7 +58,7 @@ public void setFiles(List<String> files) {
this.files = files;
}

public DumpFileImportOptions getOptions() {
public DumpFileOptions getOptions() {
return options;
}

Expand Down Expand Up @@ -88,9 +89,8 @@ private List<TaskletStep> fileImportSteps() throws Exception {
}

private TaskletStep fileImportStep(String file) throws Exception {
DumpFileType fileType = DumpFileType.of(file, options.getType());
Resource resource = options.inputResource(file);
AbstractItemStreamItemReader<DataStructure<String>> reader = reader(fileType, resource);
AbstractItemStreamItemReader<DataStructure<String>> reader = reader(options.type(resource), resource);
reader.setName(file + "-" + NAME + "-reader");
return step(RiotStep.reader(reader).writer(writer()).name(file + "-" + NAME).taskName("Importing " + file)
.processor(this::processDataStructure).build()).build();
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package com.redis.riot.file;

import java.util.Optional;

import org.springframework.core.io.Resource;

import picocli.CommandLine.Option;

public class DumpFileOptions extends FileOptions {

@Option(names = { "-t", "--filetype" }, description = "File type: ${COMPLETION-CANDIDATES}", paramLabel = "<type>")
protected Optional<DumpFileType> type = Optional.empty();

public Optional<DumpFileType> getType() {
return type;
}

public void setType(DumpFileType type) {
this.type = Optional.of(type);
}

@Override
public String toString() {
return "DumpFileOptions [type=" + type + ", encoding=" + encoding + ", gzip=" + gzip + ", s3=" + s3 + ", gcs="
+ gcs + "]";
}

public DumpFileType type(Resource resource) {
if (type.isPresent()) {
return type.get();
}
Optional<FileExtension> extension = FileUtils.extension(resource);
if (extension.isEmpty()) {
throw new UnknownFileTypeException("Unknown file extension");
}
switch (extension.get()) {
case XML:
return DumpFileType.XML;
case JSON:
return DumpFileType.JSON;
default:
throw new UnsupportedOperationException("Unsupported file extension: " + extension.get());
}

}

}
Original file line number Diff line number Diff line change
@@ -1,19 +1,7 @@
package com.redis.riot.file;

import java.util.Optional;

public enum DumpFileType {

JSON, XML;
JSON, XML

public static DumpFileType of(String file, Optional<DumpFileType> type) {
if (type.isPresent()) {
return type.get();
}
Optional<String> extension = FileUtils.extension(file);
if (extension.isPresent() && extension.get().equalsIgnoreCase(FileUtils.EXTENSION_XML)) {
return XML;
}
return JSON;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@
import com.redis.riot.file.resource.XmlResourceItemWriterBuilder;
import com.redis.spring.batch.DataStructure;

import picocli.CommandLine;
import picocli.CommandLine.ArgGroup;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;

@Command(name = "export", description = "Export Redis data to JSON or XML files")
public class FileExportCommand extends AbstractExportCommand<DataStructure<String>> {
Expand All @@ -27,9 +28,9 @@ public class FileExportCommand extends AbstractExportCommand<DataStructure<Strin

private static final String NAME = "file-export";

@CommandLine.Parameters(arity = "1", description = "File path or URL", paramLabel = "FILE")
@Parameters(arity = "1", description = "File path or URL", paramLabel = "FILE")
private String file;
@CommandLine.ArgGroup(exclusive = false, heading = "File export options%n")
@ArgGroup(exclusive = false, heading = "File export options%n")
private FileExportOptions options = new FileExportOptions();

public String getFile() {
Expand All @@ -52,8 +53,9 @@ protected Job job(JobBuilder jobBuilder) throws Exception {
}

private ItemWriter<DataStructure<String>> writer(WritableResource resource) {
DumpFileType fileType = DumpFileType.of(file, options.getType());
if (fileType == DumpFileType.XML) {
DumpFileType type = options.type(resource);
switch (type) {
case XML:
XmlResourceItemWriterBuilder<DataStructure<String>> xmlWriterBuilder = new XmlResourceItemWriterBuilder<>();
xmlWriterBuilder.name("xml-resource-item-writer");
xmlWriterBuilder.append(options.isAppend());
Expand All @@ -65,17 +67,20 @@ private ItemWriter<DataStructure<String>> writer(WritableResource resource) {
xmlWriterBuilder.saveState(false);
log.debug("Creating XML writer with {} for file {}", options, file);
return xmlWriterBuilder.build();
case JSON:
JsonResourceItemWriterBuilder<DataStructure<String>> jsonWriterBuilder = new JsonResourceItemWriterBuilder<>();
jsonWriterBuilder.name("json-resource-item-writer");
jsonWriterBuilder.append(options.isAppend());
jsonWriterBuilder.encoding(options.getEncoding().name());
jsonWriterBuilder.jsonObjectMarshaller(new JacksonJsonObjectMarshaller<>());
jsonWriterBuilder.lineSeparator(options.getLineSeparator());
jsonWriterBuilder.resource(resource);
jsonWriterBuilder.saveState(false);
log.debug("Creating JSON writer with {} for file {}", options, file);
return jsonWriterBuilder.build();
default:
throw new UnsupportedOperationException("Unsupported file type: " + type);
}
JsonResourceItemWriterBuilder<DataStructure<String>> jsonWriterBuilder = new JsonResourceItemWriterBuilder<>();
jsonWriterBuilder.name("json-resource-item-writer");
jsonWriterBuilder.append(options.isAppend());
jsonWriterBuilder.encoding(options.getEncoding().name());
jsonWriterBuilder.jsonObjectMarshaller(new JacksonJsonObjectMarshaller<>());
jsonWriterBuilder.lineSeparator(options.getLineSeparator());
jsonWriterBuilder.resource(resource);
jsonWriterBuilder.saveState(false);
log.debug("Creating JSON writer with {} for file {}", options, file);
return jsonWriterBuilder.build();
}

private JsonObjectMarshaller<DataStructure<String>> xmlMarshaller() {
Expand Down
Loading

0 comments on commit cbee5a7

Please sign in to comment.