Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[0.11.x] Implement Basic Authentication for Elasticsearch datastore: Fixes #1295 #1302

Merged
merged 1 commit into from
Feb 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion docs/site/content/en/openapi/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2557,7 +2557,6 @@ components:
description: Type of backend datastore
required:
- builtIn
- apiKey
- url
type: object
properties:
Expand All @@ -2570,6 +2569,12 @@ components:
url:
description: Elasticsearch url
type: string
username:
description: Elasticsearch username
type: string
password:
description: Elasticsearch password
type: string
ErrorDetails:
required:
- type
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,6 @@ public BaseDatastoreConfig() {
public BaseDatastoreConfig(Boolean builtIn) {
this.builtIn = builtIn;
}

public abstract String validateConfig();
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package io.hyperfoil.tools.horreum.api.data.datastore;


import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

Expand All @@ -12,7 +13,7 @@ public ElasticsearchDatastoreConfig() {
super(false);
}

@Schema(type = SchemaType.STRING, required = true,
@Schema(type = SchemaType.STRING,
description = "Elasticsearch API KEY")
public String apiKey;

Expand All @@ -21,4 +22,41 @@ public ElasticsearchDatastoreConfig() {
public String url;


@Schema(type = SchemaType.STRING,
description = "Elasticsearch username")
public String username;

@Schema(type = SchemaType.STRING,
description = "Elasticsearch password")
@JsonIgnore
public String password;


@JsonProperty("password")
public void setSecrets(String password) {
this.password = password;
}

@JsonProperty("password")
public String getMaskedSecrets() {
if (this.password != null) {
return "********";
} else {
return null;
}
}
@Override
public String validateConfig() {
if ( "".equals(apiKey) && ( "".equals(username) || "".equals(password)) ) {
return "Either apiKey or username and password must be set";
}

if( !"".equals(apiKey) && !( "".equals(username) || "".equals(password) ) ) {
return "Only apiKey or username and password can be set";
}

return null;
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,8 @@
description = "Built in backend datastore")
public class PostgresDatastoreConfig extends BaseDatastoreConfig {

@Override
public String validateConfig() {
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public interface ConfigService {
@APIResponseSchema(value = Integer.class,
responseDescription = "The ID for the new Datastore",
responseCode = "200")
Integer newDatastore(Datastore datastore);
Integer newDatastore(@NotNull Datastore datastore);

@PUT
@Path("datastore")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType;
import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO;
import jakarta.ws.rs.BadRequestException;
Expand All @@ -22,6 +23,8 @@ public interface Datastore{

UploadType uploadType();

String validateConfig(Object config);

enum UploadType {
SINGLE, MUILTI
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,15 @@
import io.hyperfoil.tools.horreum.api.data.datastore.ElasticsearchDatastoreConfig;
import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.ws.rs.BadRequestException;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.http.message.BasicHeader;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
Expand All @@ -30,6 +36,9 @@ public class ElasticsearchDatastore implements Datastore {

protected static final Logger log = Logger.getLogger(ElasticsearchDatastore.class);

@Inject
ObjectMapper mapper;

Map<String, RestClient> hostCache = new ConcurrentHashMap<>();

@Override
Expand All @@ -40,6 +49,8 @@ public DatastoreResponse handleRun(JsonNode payload,
ObjectMapper mapper)
throws BadRequestException{

RestClient restClient = null;

try {

if ( metaData != null ){
Expand All @@ -52,12 +63,21 @@ public DatastoreResponse handleRun(JsonNode payload,

if ( elasticsearchDatastoreConfig != null ){

RestClientBuilder builder = RestClient.builder(HttpHost.create(elasticsearchDatastoreConfig.url))
.setDefaultHeaders(new Header[]{
new BasicHeader("Authorization", "ApiKey " + elasticsearchDatastoreConfig.apiKey)
});
RestClientBuilder builder = RestClient.builder(HttpHost.create(elasticsearchDatastoreConfig.url));
if( elasticsearchDatastoreConfig.apiKey != null) {
builder.setDefaultHeaders(new Header[]{
new BasicHeader("Authorization", "ApiKey " + elasticsearchDatastoreConfig.apiKey)
});
} else {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(elasticsearchDatastoreConfig.username, elasticsearchDatastoreConfig.password));

builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
.setDefaultCredentialsProvider(credentialsProvider));
}

RestClient restClient = builder.build();
restClient = builder.build();

if ( restClient == null ) {
log.warn("Could not find elasticsearch datastore: " + configuration.name);
Expand Down Expand Up @@ -134,6 +154,8 @@ public DatastoreResponse handleRun(JsonNode payload,
extractedResults = mapper.createArrayNode();

//2nd retrieve the docs from 2nd Index and combine into a single result with metadata and doc contents
final RestClient finalRestClient = restClient; //copy of restClient for use in lambda

elasticResults.forEach(jsonNode -> {

ObjectNode result = ((ObjectNode) jsonNode.get("_source")).put("$schema", schemaUri);
Expand All @@ -149,7 +171,7 @@ public DatastoreResponse handleRun(JsonNode payload,
"/" + multiIndexQuery.targetIndex + "/_doc/" + jsonNode.get("_source").get(multiIndexQuery.docField).textValue());

try {
docString = extracted(restClient, subRequest);
docString = extracted(finalRestClient, subRequest);

} catch (IOException e) {

Expand Down Expand Up @@ -184,6 +206,15 @@ public DatastoreResponse handleRun(JsonNode payload,
} catch (IOException e) {
throw new RuntimeException(e);
}
finally {
if ( restClient != null ) {
try {
restClient.close();
} catch (IOException e) {
log.errorf("Error closing rest client: %s", e.getMessage());
}
}
}
}

private static String extracted(RestClient restClient, Request request) throws IOException {
Expand All @@ -205,6 +236,15 @@ public UploadType uploadType() {
return UploadType.MUILTI;
}

@Override
public String validateConfig(Object config) {
try {
return mapper.treeToValue((ObjectNode) config, ElasticsearchDatastoreConfig.class).validateConfig();
} catch (JsonProcessingException e) {
return "Unable to read configuration. if the problem persists, please contact a system administrator";
}
}

private static class ElasticRequest {
public ElasticRequest() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType;
import io.hyperfoil.tools.horreum.api.data.datastore.PostgresDatastoreConfig;
import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.ws.rs.BadRequestException;
Expand Down Expand Up @@ -32,5 +34,11 @@ public UploadType uploadType() {
return UploadType.SINGLE;
}

@Override
public String validateConfig(Object config) {
//do not validate internal datastore
return null;
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import io.hyperfoil.tools.horreum.api.data.Test;
import io.hyperfoil.tools.horreum.api.data.datastore.Datastore;
import io.hyperfoil.tools.horreum.api.services.ConfigService;
import io.hyperfoil.tools.horreum.datastore.BackendResolver;
import io.hyperfoil.tools.horreum.datastore.DatastoreResponse;
import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO;
import io.hyperfoil.tools.horreum.entity.data.TestDAO;
import io.hyperfoil.tools.horreum.mapper.DatasourceMapper;
Expand All @@ -16,14 +18,18 @@
import jakarta.inject.Inject;
import jakarta.persistence.EntityManager;
import jakarta.transaction.Transactional;
import jakarta.validation.constraints.NotNull;
import org.eclipse.microprofile.config.ConfigProvider;
import org.jboss.logging.Logger;

import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;

import static org.keycloak.util.JsonSerialization.mapper;

@ApplicationScoped
public class ConfigServiceImpl implements ConfigService {

Expand All @@ -35,6 +41,9 @@ public class ConfigServiceImpl implements ConfigService {
@Inject
EntityManager em;

@Inject
BackendResolver backendResolver;


@Override
public KeycloakConfig keycloak() {
Expand All @@ -60,7 +69,7 @@ public List<Datastore> datastores(String team) {
String queryWhere = "where access = 0";
Set<String> roles = identity.getRoles();
long rolesCount = roles.stream().filter(role -> role.endsWith("-team")).count();
if (rolesCount != 0) { //user has access to team, retrieve the team datastores as well
if (rolesCount != 0) { //user has access to team, retrieve the team datastore as well
queryWhere = queryWhere.concat(" or owner in ('" + team + "')");
}
List<DatastoreConfigDAO> backends = DatastoreConfigDAO.list(queryWhere);
Expand All @@ -82,19 +91,37 @@ public Integer newDatastore(Datastore datastore) {
if (dao.owner == null) {
List<String> uploaders = identity.getRoles().stream().filter(role -> role.endsWith("-uploader")).collect(Collectors.toList());
if (uploaders.size() != 1) {
log.debugf("Failed to create new backend %s: no owner, available uploaders: %s", dao.name, uploaders);
log.debugf("Failed to create datastore %s: no owner, available uploaders: %s", dao.name, uploaders);
throw ServiceException.badRequest("Missing owner and cannot select single default owners; this user has these uploader roles: " + uploaders);
}
String uploader = uploaders.get(0);
dao.owner = uploader.substring(0, uploader.length() - 9) + "-team";
} else if (!identity.getRoles().contains(dao.owner)) {
log.debugf("Failed to create backend configuration %s: requested owner %s, available roles: %s", dao.name, dao.owner, identity.getRoles());
throw ServiceException.badRequest("This user does not have permissions to upload backend configuration for owner=" + dao.owner);
log.debugf("Failed to create datastore %s: requested owner %s, available roles: %s", dao.name, dao.owner, identity.getRoles());
throw ServiceException.badRequest("This user does not have permissions to upload datastore for owner=" + dao.owner);
}
if (dao.access == null) {
dao.access = Access.PRIVATE;
}
log.debugf("Uploading with owner=%s and access=%s", dao.owner, dao.access);

io.hyperfoil.tools.horreum.datastore.Datastore datastoreImpl;
try {
datastoreImpl = backendResolver.getBackend(datastore.type);
} catch (IllegalStateException e) {
throw ServiceException.badRequest("Unknown datastore type: " + datastore.type + ". Please try again, if the problem persists please contact the system administrator.");
}

if ( datastoreImpl == null ){
throw ServiceException.badRequest("Unknown datastore type: " + datastore.type);
}

String error = datastoreImpl.validateConfig(datastore.config);

if ( error != null ) {
throw ServiceException.badRequest(error);
}

log.debugf("Creating new Datastore with owner=%s and access=%s", dao.owner, dao.access);

try {
em.persist(dao);
Expand Down
Loading
Loading