diff --git a/docs/site/content/en/openapi/openapi.yaml b/docs/site/content/en/openapi/openapi.yaml index 3ef780893..6e35bfc98 100644 --- a/docs/site/content/en/openapi/openapi.yaml +++ b/docs/site/content/en/openapi/openapi.yaml @@ -66,6 +66,21 @@ paths: schema: format: int32 type: integer + /api/config/datastore/types: + get: + tags: + - Config + description: Obtain list of available datastore types + operationId: datastoreTypes + responses: + "200": + description: OK + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/TypeConfig" /api/config/datastore/{id}: delete: tags: @@ -77,7 +92,8 @@ paths: in: path required: true schema: - type: string + format: int32 + type: integer responses: "204": description: No Content @@ -92,7 +108,8 @@ paths: in: path required: true schema: - type: string + format: int32 + type: integer responses: "200": description: OK @@ -2569,6 +2586,15 @@ paths: description: No Content components: schemas: + APIKeyAuth: + type: object + properties: + type: + description: type + type: string + apiKey: + description: Api key + type: string Access: description: "Resources have different visibility within the UI. 'PUBLIC', 'PROTECTED'\ \ and 'PRIVATE'. Restricted resources are not visible to users who do not\ @@ -2710,17 +2736,26 @@ components: CollectorApiDatastoreConfig: description: Type of backend datastore required: + - authentication - builtIn - - apiKey - url type: object properties: + authentication: + type: object + oneOf: + - $ref: "#/components/schemas/NoAuth" + - $ref: "#/components/schemas/APIKeyAuth" + - $ref: "#/components/schemas/UsernamePassAuth" + discriminator: + propertyName: type + mapping: + none: "#/components/schemas/NoAuth" + api-key: "#/components/schemas/APIKeyAuth" + username: "#/components/schemas/UsernamePassAuth" builtIn: description: Built In type: boolean - apiKey: - description: Collector API KEY - type: string url: description: "Collector url, e.g. https://collector.foci.life/api/v1/image-stats" type: string @@ -2992,13 +3027,12 @@ components: items: $ref: "#/components/schemas/ValidationError" Datastore: - description: Type of backend datastore + description: Instance of backend datastore required: - access - owner - id - name - - builtIn - config - type type: object @@ -3029,14 +3063,10 @@ components: \ Test definition" type: string example: Perf Elasticsearch - builtIn: - description: Is this a built-in datastore? Built-in datastores cannot be - deleted or modified - type: boolean - example: false config: type: object oneOf: + - $ref: "#/components/schemas/CollectorApiDatastoreConfig" - $ref: "#/components/schemas/ElasticsearchDatastoreConfig" - $ref: "#/components/schemas/PostgresDatastoreConfig" type: @@ -3078,25 +3108,29 @@ components: ElasticsearchDatastoreConfig: description: Type of backend datastore required: + - authentication - builtIn - url type: object properties: + authentication: + type: object + oneOf: + - $ref: "#/components/schemas/NoAuth" + - $ref: "#/components/schemas/APIKeyAuth" + - $ref: "#/components/schemas/UsernamePassAuth" + discriminator: + propertyName: type + mapping: + none: "#/components/schemas/NoAuth" + api-key: "#/components/schemas/APIKeyAuth" + username: "#/components/schemas/UsernamePassAuth" builtIn: description: Built In type: boolean - apiKey: - description: Elasticsearch API KEY - type: string url: description: Elasticsearch url type: string - username: - description: Elasticsearch username - type: string - password: - description: Elasticsearch password - type: string ErrorDetails: required: - type @@ -3675,6 +3709,12 @@ components: testId: format: int32 type: integer + NoAuth: + type: object + properties: + type: + description: type + type: string PersistentLog: description: Persistent Log required: @@ -3699,9 +3739,22 @@ components: PostgresDatastoreConfig: description: Built in backend datastore required: + - authentication - builtIn type: object properties: + authentication: + type: object + oneOf: + - $ref: "#/components/schemas/NoAuth" + - $ref: "#/components/schemas/APIKeyAuth" + - $ref: "#/components/schemas/UsernamePassAuth" + discriminator: + propertyName: type + mapping: + none: "#/components/schemas/NoAuth" + api-key: "#/components/schemas/APIKeyAuth" + username: "#/components/schemas/UsernamePassAuth" builtIn: description: Built In type: boolean @@ -4883,6 +4936,21 @@ components: description: Transformer name type: string example: my-dataset-transformer + TypeConfig: + type: object + properties: + enumName: + type: string + name: + type: string + label: + type: string + supportedAuths: + type: array + items: + type: string + builtIn: + type: boolean UserData: required: - id @@ -4899,6 +4967,18 @@ components: type: string email: type: string + UsernamePassAuth: + type: object + properties: + type: + description: type + type: string + username: + description: Username + type: string + password: + description: Password + type: string ValidationError: required: - schemaId diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/BaseDatastoreConfig.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/BaseDatastoreConfig.java index 929e92ebf..685b0891c 100644 --- a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/BaseDatastoreConfig.java +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/BaseDatastoreConfig.java @@ -1,19 +1,51 @@ package io.hyperfoil.tools.horreum.api.data.datastore; +import jakarta.validation.constraints.NotNull; + import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; +import org.eclipse.microprofile.openapi.annotations.media.DiscriminatorMapping; import org.eclipse.microprofile.openapi.annotations.media.Schema; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.UsernamePassAuth; + public abstract class BaseDatastoreConfig { - @Schema(type = SchemaType.BOOLEAN, required = true, description = "Built In") - public Boolean builtIn = true; + @NotNull + @JsonProperty(required = true) + @Schema(type = SchemaType.OBJECT, discriminatorProperty = "type", discriminatorMapping = { + @DiscriminatorMapping(schema = NoAuth.class, value = NoAuth._TYPE), + @DiscriminatorMapping(schema = APIKeyAuth.class, value = APIKeyAuth._TYPE), + @DiscriminatorMapping(schema = UsernamePassAuth.class, value = UsernamePassAuth._TYPE) + }, oneOf = { //subtype mapping for openapi + NoAuth.class, + APIKeyAuth.class, + UsernamePassAuth.class + }) + @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") + @JsonSubTypes({ //subtype mapping for jackson + @JsonSubTypes.Type(value = NoAuth.class, name = NoAuth._TYPE), + @JsonSubTypes.Type(value = APIKeyAuth.class, name = APIKeyAuth._TYPE), + @JsonSubTypes.Type(value = UsernamePassAuth.class, name = UsernamePassAuth._TYPE) + }) + public Object authentication; //the python generator is failing if this is a concrete type - public BaseDatastoreConfig() { - } + @Schema(type = SchemaType.BOOLEAN, required = true, description = "Built In") + public Boolean builtIn; public BaseDatastoreConfig(Boolean builtIn) { this.builtIn = builtIn; } + public BaseDatastoreConfig() { + this.builtIn = false; + } + public abstract String validateConfig(); + } diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/CollectorApiDatastoreConfig.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/CollectorApiDatastoreConfig.java index 21b2d5927..f87906988 100644 --- a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/CollectorApiDatastoreConfig.java +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/CollectorApiDatastoreConfig.java @@ -3,26 +3,30 @@ import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; import org.eclipse.microprofile.openapi.annotations.media.Schema; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth; + @Schema(type = SchemaType.OBJECT, required = true, description = "Type of backend datastore") public class CollectorApiDatastoreConfig extends BaseDatastoreConfig { + public static final String[] auths = { NoAuth._TYPE, APIKeyAuth._TYPE }; + public static final String name = "Collectorapi"; + public static final String label = "Collector API"; + public static final Boolean builtIn = false; + public CollectorApiDatastoreConfig() { - super(false); } - @Schema(type = SchemaType.STRING, required = true, description = "Collector API KEY") - public String apiKey; - @Schema(type = SchemaType.STRING, required = true, description = "Collector url, e.g. https://collector.foci.life/api/v1/image-stats") public String url; @Override public String validateConfig() { - if ("".equals(apiKey)) { - return "apiKey must be set"; - } - if ("".equals(url)) { - return "url must be set"; + if (authentication instanceof APIKeyAuth) { + APIKeyAuth apiKeyAuth = (APIKeyAuth) authentication; + if (apiKeyAuth.apiKey.isBlank() || apiKeyAuth.apiKey == null) { + return "apiKey must be set"; + } } return null; diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/Datastore.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/Datastore.java index 4f63bb9c6..aa443d437 100644 --- a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/Datastore.java +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/Datastore.java @@ -10,7 +10,7 @@ import io.hyperfoil.tools.horreum.api.data.ProtectedType; -@Schema(type = SchemaType.OBJECT, required = true, description = "Type of backend datastore") +@Schema(type = SchemaType.OBJECT, required = true, description = "Instance of backend datastore") public class Datastore extends ProtectedType { @JsonProperty(required = true) @Schema(description = "Unique Datastore id", example = "101") @@ -21,14 +21,10 @@ public class Datastore extends ProtectedType { @Schema(description = "Name of the datastore, used to identify the datastore in the Test definition", example = "Perf Elasticsearch") public String name; - @NotNull - @JsonProperty(required = true) - @Schema(description = "Is this a built-in datastore? Built-in datastores cannot be deleted or modified", example = "false") - public Boolean builtIn; - @NotNull @JsonProperty(required = true) @Schema(type = SchemaType.OBJECT, oneOf = { + CollectorApiDatastoreConfig.class, ElasticsearchDatastoreConfig.class, PostgresDatastoreConfig.class }) diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/DatastoreType.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/DatastoreType.java index 8770743f5..f7c6a76f5 100644 --- a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/DatastoreType.java +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/DatastoreType.java @@ -1,32 +1,48 @@ package io.hyperfoil.tools.horreum.api.data.datastore; +import java.lang.reflect.Field; + import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; import org.eclipse.microprofile.openapi.annotations.media.Schema; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.core.type.TypeReference; @Schema(type = SchemaType.STRING, required = true, description = "Type of backend datastore") public enum DatastoreType { - POSTGRES("POSTGRES", new TypeReference() { - }), - ELASTICSEARCH("ELASTICSEARCH", new TypeReference() { - }), - COLLECTORAPI("COLLECTORAPI", new TypeReference() { - }); + POSTGRES(PostgresDatastoreConfig.class), + ELASTICSEARCH(ElasticsearchDatastoreConfig.class), + COLLECTORAPI(CollectorApiDatastoreConfig.class); private static final DatastoreType[] VALUES = values(); - + // private final String name; + private final String label; private final String name; - private final TypeReference typeReference; + private final String[] supportedAuths; + private final Boolean buildIn; + private final Class klass; - private DatastoreType(String name, TypeReference typeReference) { - this.typeReference = typeReference; - this.name = name; + DatastoreType(Class klass) { + this.klass = klass; + // this.name = extractField(klass, "name"); + this.label = extractField(klass, "label"); + this.name = extractField(klass, "name"); + this.supportedAuths = extractField(klass, "auths"); + this.buildIn = extractField(klass, "builtIn"); } - public TypeReference getTypeReference() { - return (TypeReference) typeReference; + private static T extractField(Class klass, String name) { + try { + Field supportedAuthField = klass.getField(name); + return (T) supportedAuthField.get(null); + } catch (NoSuchFieldException | IllegalAccessException e) { + return null; + } catch (NullPointerException e) { + return null; + } + } + + public Class getTypeReference() { + return (Class) klass; } @JsonCreator @@ -37,4 +53,25 @@ public static DatastoreType fromString(String str) { return DatastoreType.valueOf(str); } } + + public TypeConfig getConfig() { + return new TypeConfig(this, name, label, buildIn, supportedAuths); + } + + public static class TypeConfig { + public String enumName; + public String name; + public String label; + + public String[] supportedAuths; + public Boolean builtIn; + + public TypeConfig(DatastoreType type, String name, String label, Boolean builtIn, String[] supportedAuths) { + this.enumName = type.name(); + this.name = name; + this.label = label; + this.builtIn = builtIn; + this.supportedAuths = supportedAuths; + } + } } diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/ElasticsearchDatastoreConfig.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/ElasticsearchDatastoreConfig.java index eaed6afce..7fc4714b9 100644 --- a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/ElasticsearchDatastoreConfig.java +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/ElasticsearchDatastoreConfig.java @@ -1,62 +1,44 @@ package io.hyperfoil.tools.horreum.api.data.datastore; -import static java.util.Objects.requireNonNullElse; - import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; import org.eclipse.microprofile.openapi.annotations.media.Schema; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.UsernamePassAuth; @Schema(type = SchemaType.OBJECT, required = true, description = "Type of backend datastore") public class ElasticsearchDatastoreConfig extends BaseDatastoreConfig { + public static final String[] auths = { NoAuth._TYPE, APIKeyAuth._TYPE, UsernamePassAuth._TYPE }; + public static final String name = "Elasticsearch"; + public static final String label = "Elasticsearch"; + public static final Boolean builtIn = false; + public ElasticsearchDatastoreConfig() { - super(false); - } - @Schema(type = SchemaType.STRING, description = "Elasticsearch API KEY") - public String apiKey; + } @Schema(type = SchemaType.STRING, required = true, description = "Elasticsearch url") public String url; - @Schema(type = SchemaType.STRING, description = "Elasticsearch username") - public String username; - - @Schema(type = SchemaType.STRING, description = "Elasticsearch password") - @JsonIgnore - public String password; - - @JsonProperty("password") - public void setSecrets(String password) { - this.password = password; - } - - @JsonProperty("password") - public String getMaskedSecrets() { - if (this.password != null) { - return "********"; - } else { - return null; - } - } - @Override public String validateConfig() { - String _apiKey = requireNonNullElse(apiKey, ""); - String _username = requireNonNullElse(username, ""); - String _password = requireNonNullElse(password, ""); - - if ("".equals(_apiKey) && ("".equals(_username) || "".equals(_password))) { - return "Either apiKey or username and password must be set"; + //TODO:: replace with pattern matching after upgrading to java 17 + if (authentication instanceof APIKeyAuth) { + APIKeyAuth apiKeyAuth = (APIKeyAuth) authentication; + if (apiKeyAuth.apiKey == null || apiKeyAuth.apiKey.isBlank()) { + return "apiKey must be set"; + } + } else if (authentication instanceof UsernamePassAuth) { + UsernamePassAuth usernamePassAuth = (UsernamePassAuth) authentication; + + if (usernamePassAuth.username == null || usernamePassAuth.username.isBlank() + || usernamePassAuth.password == null || usernamePassAuth.password.isBlank()) { + return "username and password must be set"; + } } - - if (!"".equals(_apiKey) && !("".equals(_username) || "".equals(_password))) { - return "Only apiKey or username and password can be set"; - } - return null; } diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/PostgresDatastoreConfig.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/PostgresDatastoreConfig.java index 2082f764d..9bd393159 100644 --- a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/PostgresDatastoreConfig.java +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/PostgresDatastoreConfig.java @@ -3,9 +3,20 @@ import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; import org.eclipse.microprofile.openapi.annotations.media.Schema; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth; + @Schema(type = SchemaType.OBJECT, required = true, description = "Built in backend datastore") public class PostgresDatastoreConfig extends BaseDatastoreConfig { + public static final String[] auths = { NoAuth._TYPE }; + public static final String name = "Postgres"; + public static final String label = "Postgres"; + public static final Boolean builtIn = true; + + public PostgresDatastoreConfig() { + super(true); + } + @Override public String validateConfig() { return null; diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/APIKeyAuth.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/APIKeyAuth.java new file mode 100644 index 000000000..3ee8639f8 --- /dev/null +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/APIKeyAuth.java @@ -0,0 +1,18 @@ +package io.hyperfoil.tools.horreum.api.data.datastore.auth; + +import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; +import org.eclipse.microprofile.openapi.annotations.media.Schema; + +public class APIKeyAuth { + public static final String _TYPE = "api-key"; + + @Schema(type = SchemaType.STRING, description = "type") + public String type; + + @Schema(type = SchemaType.STRING, description = "Api key") + public String apiKey; + + public APIKeyAuth() { + this.type = _TYPE; + } +} diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/NoAuth.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/NoAuth.java new file mode 100644 index 000000000..705b2fd78 --- /dev/null +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/NoAuth.java @@ -0,0 +1,15 @@ +package io.hyperfoil.tools.horreum.api.data.datastore.auth; + +import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; +import org.eclipse.microprofile.openapi.annotations.media.Schema; + +public class NoAuth { + public static final String _TYPE = "none"; + + @Schema(type = SchemaType.STRING, description = "type") + public String type; + + public NoAuth() { + this.type = _TYPE; + } +} diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/UsernamePassAuth.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/UsernamePassAuth.java new file mode 100644 index 000000000..676188ca3 --- /dev/null +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/data/datastore/auth/UsernamePassAuth.java @@ -0,0 +1,21 @@ +package io.hyperfoil.tools.horreum.api.data.datastore.auth; + +import org.eclipse.microprofile.openapi.annotations.enums.SchemaType; +import org.eclipse.microprofile.openapi.annotations.media.Schema; + +public class UsernamePassAuth { + public static final String _TYPE = "username"; + + @Schema(type = SchemaType.STRING, description = "type") + public String type; + + @Schema(type = SchemaType.STRING, description = "Username") + public String username; + + @Schema(type = SchemaType.STRING, description = "Password") + public String password; + + public UsernamePassAuth() { + this.type = _TYPE; + } +} diff --git a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/services/ConfigService.java b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/services/ConfigService.java index 6949ccf60..bcd02746b 100644 --- a/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/services/ConfigService.java +++ b/horreum-api/src/main/java/io/hyperfoil/tools/horreum/api/services/ConfigService.java @@ -19,6 +19,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import io.hyperfoil.tools.horreum.api.data.datastore.Datastore; +import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; import io.quarkus.runtime.Startup; @Startup @@ -49,6 +50,11 @@ public interface ConfigService { }) List datastores(@PathParam("team") String team); + @GET + @Path("datastore/types") + @Operation(description = "Obtain list of available datastore types") + List datastoreTypes(); + @POST @Path("datastore") @Operation(description = "Create a new Datastore") @@ -66,12 +72,12 @@ public interface ConfigService { @GET @Path("datastore/{id}/test") @Operation(description = "Test a Datastore connection") - DatastoreTestResponse testDatastore(@PathParam("id") String datastoreId); + DatastoreTestResponse testDatastore(@PathParam("id") Integer datastoreId); @DELETE @Path("datastore/{id}") @Operation(description = "Test a Datastore") - void deleteDatastore(@PathParam("id") String datastoreId); + void deleteDatastore(@PathParam("id") Integer datastoreId); class VersionInfo { @Schema(description = "Version of Horreum", example = "0.9.4") diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/BackendResolver.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/BackendResolver.java deleted file mode 100644 index fc9e4fe8c..000000000 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/BackendResolver.java +++ /dev/null @@ -1,24 +0,0 @@ -package io.hyperfoil.tools.horreum.datastore; - -import java.util.List; - -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; - -import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; -import io.quarkus.arc.All; - -@ApplicationScoped -public class BackendResolver { - @Inject - @All - List backendStores; - - public Datastore getBackend(DatastoreType type) { - return backendStores.stream() - .filter(store -> store.type().equals(type)) - .findFirst() - .orElseThrow(() -> new IllegalStateException("Unknown datastore type: " + type)); - } - -} diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/CollectorApiDatastore.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/CollectorApiDatastore.java index 2e802859a..9a5b8f35a 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/CollectorApiDatastore.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/CollectorApiDatastore.java @@ -25,6 +25,7 @@ import io.hyperfoil.tools.horreum.api.data.datastore.CollectorApiDatastoreConfig; import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth; import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO; import io.hyperfoil.tools.horreum.svc.ServiceException; @@ -66,8 +67,10 @@ public DatastoreResponse handleRun(JsonNode payload, + "&newerThan=" + newerThan + "&olderThan=" + olderThan); HttpRequest.Builder builder = HttpRequest.newBuilder().uri(uri); - builder.header("Content-Type", "application/json") - .header("token", jsonDatastoreConfig.apiKey); + builder.header("Content-Type", "application/json"); + if (jsonDatastoreConfig.authentication instanceof APIKeyAuth) { + builder.header("token", ((APIKeyAuth) jsonDatastoreConfig.authentication).apiKey); + } HttpRequest request = builder.build(); HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); if (response.statusCode() != Response.Status.OK.getStatusCode()) { @@ -93,10 +96,17 @@ private static void verifyPayload(ObjectMapper mapper, CollectorApiDatastoreConf // Verify that the tag is in the distinct list of tags URI tagsUri = URI.create(jsonDatastoreConfig.url + "/tags/distinct"); HttpRequest.Builder tagsBuilder = HttpRequest.newBuilder().uri(tagsUri); - HttpRequest tagsRequest = tagsBuilder - .header("Content-Type", "application/json") - .header("token", jsonDatastoreConfig.apiKey).build(); - HttpResponse response = client.send(tagsRequest, HttpResponse.BodyHandlers.ofString()); + + if (jsonDatastoreConfig.authentication instanceof APIKeyAuth) { + tagsBuilder + .header("Content-Type", "application/json") + .header("token", ((APIKeyAuth) jsonDatastoreConfig.authentication).apiKey); + } else { + tagsBuilder + .header("Content-Type", "application/json"); + + } + HttpResponse response = client.send(tagsBuilder.build(), HttpResponse.BodyHandlers.ofString()); String[] distinctTags; try { distinctTags = mapper.readValue(response.body(), String[].class); @@ -141,7 +151,9 @@ private static CollectorApiDatastoreConfig getCollectorApiDatastoreConfig(Datast log.error("Could not find collector API datastore: " + configuration.name); throw ServiceException.serverError("Could not find CollectorAPI datastore: " + configuration.name); } - assert jsonDatastoreConfig.apiKey != null : "API key must be set"; + if (jsonDatastoreConfig.authentication instanceof APIKeyAuth) { + assert ((APIKeyAuth) jsonDatastoreConfig.authentication).apiKey != null : "API key must be set"; + } assert jsonDatastoreConfig.url != null : "URL must be set"; return jsonDatastoreConfig; } diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/DatastoreResolver.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/DatastoreResolver.java new file mode 100644 index 000000000..ae51373e4 --- /dev/null +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/DatastoreResolver.java @@ -0,0 +1,46 @@ +package io.hyperfoil.tools.horreum.datastore; + +import java.util.List; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; +import io.hyperfoil.tools.horreum.svc.ServiceException; +import io.quarkus.arc.All; + +@ApplicationScoped +public class DatastoreResolver { + @Inject + @All + List datastores; + + public Datastore getDatastore(DatastoreType type) { + return datastores.stream() + .filter(store -> store.type().equals(type)) + .findFirst() + .orElseThrow(() -> new IllegalStateException("Unknown datastore type: " + type)); + } + + public void validatedDatastoreConfig(DatastoreType type, Object config) { + io.hyperfoil.tools.horreum.datastore.Datastore datastoreImpl; + try { + datastoreImpl = this.getDatastore(type); + } catch (IllegalStateException e) { + throw ServiceException.badRequest("Unknown datastore type: " + type + + ". Please try again, if the problem persists please contact the system administrator."); + } + + if (datastoreImpl == null) { + throw ServiceException.badRequest("Unknown datastore type: " + type); + } + + String error = datastoreImpl.validateConfig(config); + + if (error != null) { + throw ServiceException.badRequest(error); + } + + } + +} diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/ElasticsearchDatastore.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/ElasticsearchDatastore.java index c5f6f1304..7d0b468fe 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/ElasticsearchDatastore.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/datastore/ElasticsearchDatastore.java @@ -2,9 +2,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.util.Map; import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; @@ -32,6 +30,8 @@ import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; import io.hyperfoil.tools.horreum.api.data.datastore.ElasticsearchDatastoreConfig; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.UsernamePassAuth; import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO; @ApplicationScoped @@ -42,8 +42,6 @@ public class ElasticsearchDatastore implements Datastore { @Inject ObjectMapper mapper; - Map hostCache = new ConcurrentHashMap<>(); - @Override public DatastoreResponse handleRun(JsonNode payload, JsonNode metaData, @@ -67,18 +65,27 @@ public DatastoreResponse handleRun(JsonNode payload, if (elasticsearchDatastoreConfig != null) { RestClientBuilder builder = RestClient.builder(HttpHost.create(elasticsearchDatastoreConfig.url)); - if (elasticsearchDatastoreConfig.apiKey != null) { + + if (elasticsearchDatastoreConfig.authentication instanceof APIKeyAuth) { + + APIKeyAuth apiKeyAuth = (((APIKeyAuth) elasticsearchDatastoreConfig.authentication)); + builder.setDefaultHeaders(new Header[] { - new BasicHeader("Authorization", "ApiKey " + elasticsearchDatastoreConfig.apiKey) + new BasicHeader("Authorization", "ApiKey " + apiKeyAuth.apiKey) }); - } else { + + } else if (elasticsearchDatastoreConfig.authentication instanceof UsernamePassAuth) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + + UsernamePassAuth usernamePassAuth = (((UsernamePassAuth) elasticsearchDatastoreConfig.authentication)); + credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials(elasticsearchDatastoreConfig.username, - elasticsearchDatastoreConfig.password)); + new UsernamePasswordCredentials(usernamePassAuth.username, + usernamePassAuth.password)); builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder .setDefaultCredentialsProvider(credentialsProvider)); + } restClient = builder.build(); @@ -146,62 +153,64 @@ public DatastoreResponse handleRun(JsonNode payload, throw new BadRequestException("Schema is required for search requests"); } - //TODO: error handling - final MultiIndexQuery multiIndexQuery = mapper.treeToValue(apiRequest.query, MultiIndexQuery.class); - - //1st retrieve the list of docs from 1st Index - request = new Request( - "GET", - "/" + apiRequest.index + "/_search"); + try { + final MultiIndexQuery multiIndexQuery = mapper.treeToValue(apiRequest.query, MultiIndexQuery.class); + //1st retrieve the list of docs from 1st Index + request = new Request( + "GET", + "/" + apiRequest.index + "/_search"); - request.setJsonEntity(mapper.writeValueAsString(multiIndexQuery.metaQuery)); - finalString = extracted(restClient, request); + request.setJsonEntity(mapper.writeValueAsString(multiIndexQuery.metaQuery)); + finalString = extracted(restClient, request); - elasticResults = (ArrayNode) mapper.readTree(finalString).get("hits").get("hits"); - extractedResults = mapper.createArrayNode(); + elasticResults = (ArrayNode) mapper.readTree(finalString).get("hits").get("hits"); + extractedResults = mapper.createArrayNode(); - //2nd retrieve the docs from 2nd Index and combine into a single result with metadata and doc contents - final RestClient finalRestClient = restClient; //copy of restClient for use in lambda + //2nd retrieve the docs from 2nd Index and combine into a single result with metadata and doc contents + final RestClient finalRestClient = restClient; //copy of restClient for use in lambda - elasticResults.forEach(jsonNode -> { + elasticResults.forEach(jsonNode -> { - ObjectNode result = ((ObjectNode) jsonNode.get("_source")).put("$schema", schemaUri); - String docString = """ - { - "error": "Could not retrieve doc from secondary index" - "msg": "ERR_MSG" - } - """; + ObjectNode result = ((ObjectNode) jsonNode.get("_source")).put("$schema", schemaUri); + String docString = """ + { + "error": "Could not retrieve doc from secondary index" + "msg": "ERR_MSG" + } + """; - var subRequest = new Request( - "GET", - "/" + multiIndexQuery.targetIndex + "/_doc/" - + jsonNode.get("_source").get(multiIndexQuery.docField).textValue()); + var subRequest = new Request( + "GET", + "/" + multiIndexQuery.targetIndex + "/_doc/" + + jsonNode.get("_source").get(multiIndexQuery.docField).textValue()); - try { - docString = extracted(finalRestClient, subRequest); + try { + docString = extracted(finalRestClient, subRequest); - } catch (IOException e) { + } catch (IOException e) { - docString.replaceAll("ERR_MSG", e.getMessage()); - String msg = String.format("Could not query doc request: index: %s; docID: %s (%s)", - multiIndexQuery.targetIndex, multiIndexQuery.docField, e.getMessage()); - log.error(msg); - } + docString.replaceAll("ERR_MSG", e.getMessage()); + String msg = String.format("Could not query doc request: index: %s; docID: %s (%s)", + multiIndexQuery.targetIndex, multiIndexQuery.docField, e.getMessage()); + log.error(msg); + } - try { - result.put("$doc", mapper.readTree(docString)); - } catch (JsonProcessingException e) { - docString.replaceAll("ERR_MSG", e.getMessage()); - String msg = String.format("Could not parse doc result: %s, %s", docString, e.getMessage()); - log.error(msg); - } + try { + result.put("$doc", mapper.readTree(docString)); + } catch (JsonProcessingException e) { + docString.replaceAll("ERR_MSG", e.getMessage()); + String msg = String.format("Could not parse doc result: %s, %s", docString, e.getMessage()); + log.error(msg); + } - extractedResults.add(result); + extractedResults.add(result); - }); + }); - return new DatastoreResponse(extractedResults, payload); + return new DatastoreResponse(extractedResults, payload); + } catch (JsonProcessingException e) { + throw new RuntimeException("Could not process json query: " + e.getMessage()); + } default: throw new BadRequestException("Invalid request type: " + apiRequest.type); diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ConfigServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ConfigServiceImpl.java index edbfbd5d1..2863f2f08 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ConfigServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/ConfigServiceImpl.java @@ -1,9 +1,6 @@ package io.hyperfoil.tools.horreum.svc; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Set; +import java.util.*; import java.util.stream.Collectors; import jakarta.annotation.security.PermitAll; @@ -20,8 +17,9 @@ import io.hyperfoil.tools.horreum.api.Version; import io.hyperfoil.tools.horreum.api.data.Access; import io.hyperfoil.tools.horreum.api.data.datastore.Datastore; +import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; import io.hyperfoil.tools.horreum.api.services.ConfigService; -import io.hyperfoil.tools.horreum.datastore.BackendResolver; +import io.hyperfoil.tools.horreum.datastore.DatastoreResolver; import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO; import io.hyperfoil.tools.horreum.mapper.DatasourceMapper; import io.hyperfoil.tools.horreum.server.WithRoles; @@ -32,6 +30,10 @@ public class ConfigServiceImpl implements ConfigService { private static final Logger log = Logger.getLogger(ConfigServiceImpl.class); + //cache available dataStore configurations + private static final List datastoreTypes = Arrays.stream(DatastoreType.values()) + .map(DatastoreType::getConfig).toList(); + @ConfigProperty(name = "horreum.privacy") Optional privacyStatement; @@ -42,7 +44,7 @@ public class ConfigServiceImpl implements ConfigService { EntityManager em; @Inject - BackendResolver backendResolver; + DatastoreResolver backendResolver; @Override public KeycloakConfig keycloak() { @@ -80,6 +82,11 @@ public List datastores(String team) { } } + @Override + public List datastoreTypes() { + return datastoreTypes; + } + @Override @RolesAllowed(Roles.TESTER) @WithRoles @@ -102,29 +109,13 @@ public Integer newDatastore(Datastore datastore) { } else if (!identity.getRoles().contains(dao.owner)) { log.debugf("Failed to create datastore %s: requested owner %s, available roles: %s", dao.name, dao.owner, identity.getRoles()); - throw ServiceException.badRequest("This user does not have permissions to upload datastore for owner=" + dao.owner); + throw ServiceException.badRequest("This user does not have permissions to create datastore for owner=" + dao.owner); } if (dao.access == null) { dao.access = Access.PRIVATE; } - io.hyperfoil.tools.horreum.datastore.Datastore datastoreImpl; - try { - datastoreImpl = backendResolver.getBackend(datastore.type); - } catch (IllegalStateException e) { - throw ServiceException.badRequest("Unknown datastore type: " + datastore.type - + ". Please try again, if the problem persists please contact the system administrator."); - } - - if (datastoreImpl == null) { - throw ServiceException.badRequest("Unknown datastore type: " + datastore.type); - } - - String error = datastoreImpl.validateConfig(datastore.config); - - if (error != null) { - throw ServiceException.badRequest(error); - } + backendResolver.validatedDatastoreConfig(datastore.type, datastore.config); log.debugf("Creating new Datastore with owner=%s and access=%s", dao.owner, dao.access); @@ -144,18 +135,20 @@ public Integer newDatastore(Datastore datastore) { @RolesAllowed(Roles.TESTER) @WithRoles @Transactional - public Integer updateDatastore(Datastore backend) { - DatastoreConfigDAO dao = DatastoreConfigDAO.findById(backend.id); + public Integer updateDatastore(Datastore datastore) { + DatastoreConfigDAO dao = DatastoreConfigDAO.findById(datastore.id); if (dao == null) - throw ServiceException.notFound("Datastore with id " + backend.id + " does not exist"); + throw ServiceException.notFound("Datastore with id " + datastore.id + " does not exist"); - DatastoreConfigDAO newDao = DatasourceMapper.to(backend); + DatastoreConfigDAO newDao = DatasourceMapper.to(datastore); dao.type = newDao.type; dao.name = newDao.name; dao.configuration = newDao.configuration; dao.access = newDao.access; + backendResolver.validatedDatastoreConfig(datastore.type, datastore.config); + dao.persist(); return dao.id; @@ -166,7 +159,7 @@ public Integer updateDatastore(Datastore backend) { @RolesAllowed(Roles.TESTER) @WithRoles @Transactional - public DatastoreTestResponse testDatastore(String datastoreId) { + public DatastoreTestResponse testDatastore(Integer datastoreId) { return null; } @@ -174,8 +167,8 @@ public DatastoreTestResponse testDatastore(String datastoreId) { @RolesAllowed(Roles.TESTER) @WithRoles @Transactional - public void deleteDatastore(String datastoreId) { - DatastoreConfigDAO.deleteById(Integer.parseInt(datastoreId)); + public void deleteDatastore(Integer datastoreId) { + DatastoreConfigDAO.deleteById(datastoreId); } private String getString(String propertyName) { diff --git a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java index 4d65d03e6..b357eb154 100644 --- a/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java +++ b/horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java @@ -67,8 +67,8 @@ import io.hyperfoil.tools.horreum.api.services.SchemaService; import io.hyperfoil.tools.horreum.api.services.TestService; import io.hyperfoil.tools.horreum.bus.AsyncEventChannels; -import io.hyperfoil.tools.horreum.datastore.BackendResolver; import io.hyperfoil.tools.horreum.datastore.Datastore; +import io.hyperfoil.tools.horreum.datastore.DatastoreResolver; import io.hyperfoil.tools.horreum.datastore.DatastoreResponse; import io.hyperfoil.tools.horreum.entity.PersistentLogDAO; import io.hyperfoil.tools.horreum.entity.alerting.DataPointDAO; @@ -148,7 +148,7 @@ WHEN jsonb_typeof(data) = 'array' THEN ?1 IN (SELECT jsonb_array_elements(data)- @Inject ServiceMediator mediator; @Inject - BackendResolver backendResolver; + DatastoreResolver backendResolver; @Inject Session session; @@ -440,17 +440,13 @@ public Response addRunFromData(String start, String stop, String test, String ow } /** - * Processes and persists a run or multiple runs based on the provided data and metadata. - * It performs the following steps: - * - Validates and parses the input data string into a JSON structure. - * - Resolves the appropriate datastore to handle the run processing. - * - Handles single or multiple runs based on the datastore's response type. - * - Persists runs and their associated datasets in the database. - * - Queues dataset recalculation tasks for further processing. + * Processes and persists a run or multiple runs based on the provided data and metadata. It performs the following steps: - + * Validates and parses the input data string into a JSON structure. - Resolves the appropriate datastore to handle the run + * processing. - Handles single or multiple runs based on the datastore's response type. - Persists runs and their + * associated datasets in the database. - Queues dataset recalculation tasks for further processing. * - * If the response, in the case of datastore, contains more than 10 runs, - * the processing of the entire run is offloaded to an asynchronous queue. - * For fewer runs, processing occurs synchronously. + * If the response, in the case of datastore, contains more than 10 runs, the processing of the entire run is offloaded to + * an asynchronous queue. For fewer runs, processing occurs synchronously. * * @param start the start time for the run * @param stop the stop time for the run @@ -490,7 +486,8 @@ Response addRunFromData(String start, String stop, String test, TestDAO testEntity = testService.ensureTestExists(testNameOrId); - Datastore datastore = backendResolver.getBackend(testEntity.backendConfig.type); + Datastore datastore = backendResolver.getDatastore(testEntity.backendConfig.type); + DatastoreResponse response = datastore.handleRun(data, metadata, testEntity.backendConfig, Optional.ofNullable(schemaUri)); @@ -619,12 +616,10 @@ private Object findIfNotSet(String value, JsonNode data) { } /** - * Adds a new authenticated run to the database with appropriate ownership and access settings. - * This method performs the following tasks: - * - Ensures the run's ID is reset and metadata is correctly handled. - * - Determines the owner of the run, defaulting to a specific uploader role if no owner is provided. - * - Validates ownership permissions against the user's roles. - * - Persists or updates the run in the database and handles related datasets. + * Adds a new authenticated run to the database with appropriate ownership and access settings. This method performs the + * following tasks: - Ensures the run's ID is reset and metadata is correctly handled. - Determines the owner of the run, + * defaulting to a specific uploader role if no owner is provided. - Validates ownership permissions against the user's + * roles. - Persists or updates the run in the database and handles related datasets. * * @param run the RunDAO object containing the run details * @param test the TestDAO object containing the test details @@ -1134,15 +1129,15 @@ public void recalculateAll(String fromStr, String toStr) { } /** - * Transforms the data for a given run by applying applicable schemas and transformers. - * It ensures any existing datasets for the run are removed before creating new ones, - * handles timeouts for ongoing transformations, and creates datasets with the transformed data. - * If the flag {isRecalculation} is set to true the label values recalculation is performed - * right away synchronously otherwise it is completely skipped and let to the caller trigger it + * Transforms the data for a given run by applying applicable schemas and transformers. It ensures any existing datasets for + * the run are removed before creating new ones, handles timeouts for ongoing transformations, and creates datasets with the + * transformed data. If the flag {isRecalculation} is set to true the label values recalculation is performed right away + * synchronously otherwise it is completely skipped and let to the caller trigger it * * @param runId the ID of the run to transform * @param isRecalculation flag indicating if this is a recalculation - * @return the list of datasets ids that have been created, or empty list if the run is invalid or not found or already ongoing + * @return the list of datasets ids that have been created, or empty list if the run is invalid or not found or already + * ongoing */ @WithRoles(extras = Roles.HORREUM_SYSTEM) @Transactional @@ -1374,9 +1369,9 @@ List transform(int runId, boolean isRecalculation) { } /** - * Persists a dataset, optionally triggers recalculation events, and validates the dataset. - * The recalculation is getting triggered sync only if the {isRecalculation} is set to true - * otherwise it is completely skipped + * Persists a dataset, optionally triggers recalculation events, and validates the dataset. The recalculation is getting + * triggered sync only if the {isRecalculation} is set to true otherwise it is completely skipped + * * @param ds the DatasetDAO object to be persisted * @param isRecalculation whether the dataset is a result of recalculation * @return the ID of the persisted dataset @@ -1490,9 +1485,9 @@ static class RunFromUri { } /** - * Represents the result of persisting a run, including the run ID and associated dataset IDs. - * This class is used to encapsulate the ID of the newly persisted run and the IDs of the datasets - * connected to the run, providing a structured way to return this data. + * Represents the result of persisting a run, including the run ID and associated dataset IDs. This class is used to + * encapsulate the ID of the newly persisted run and the IDs of the datasets connected to the run, providing a structured + * way to return this data. */ public static class RunPersistence { private final Integer runId; diff --git a/horreum-backend/src/main/resources/db/changeLog.xml b/horreum-backend/src/main/resources/db/changeLog.xml index ae409e624..ba3d7324e 100644 --- a/horreum-backend/src/main/resources/db/changeLog.xml +++ b/horreum-backend/src/main/resources/db/changeLog.xml @@ -4688,4 +4688,74 @@ $$ LANGUAGE plpgsql; + + + ANY + + + UPDATE backendconfig + SET configuration = '{"builtIn": true, "authentication": {"type": "none"}}' + WHERE id = 1 + + + + UPDATE backendconfig + SET configuration = updated.newConfig + FROM (select id, jsonb_insert(newConfig - 'apiKey' - 'username' - 'password', '{authentication,type}', '"none"') as newConfig + from (select + id, + jsonb_insert(configuration, '{authentication}', '{}') as newConfig + from backendconfig + where jsonb_path_exists(configuration, '$.apiKey') + AND configuration ->> 'apiKey' = '' + AND not jsonb_path_exists(configuration, '$.username')) + ) + as updated + WHERE backendconfig.id = updated.id; + + + + + UPDATE backendconfig + SET configuration = updated.newConfig + FROM (select id, jsonb_insert(newConfig, '{authentication,type}', '"api-key"') as newConfig + from (Select rootConfig.id, + jsonb_insert(rootConfig.newConfig - 'apiKey' - 'username' - 'password', '{authentication,apiKey}', + to_jsonb(rootConfig.apiKey)) as newConfig + FROM (select id, + name, + configuration ->> 'apiKey' as apiKey, + configuration, + jsonb_insert(configuration, '{authentication}', '{}') as newConfig + from backendconfig + where jsonb_path_exists(configuration, '$.apiKey') + AND not configuration ->> 'apiKey' = '' + and not jsonb_path_exists(configuration, '$.authentication')) + as rootConfig) + as updated) + as updated + WHERE backendconfig.id = updated.id + + + + + UPDATE backendconfig + SET configuration = updated.newConfig + FROM + (select id, jsonb_insert(newConfig, '{authentication,type}', '"username"') as newConfig + FROM + (select id, jsonb_insert(updatedConfig.newConfig - 'password', '{authentication,password}', to_jsonb(updatedConfig.password)) as newConfig + from + (Select rootConfig.id, username, password, jsonb_insert(rootConfig.newConfig - 'apiKey' - 'username', '{authentication,username}', to_jsonb(rootConfig.username)) as newConfig + FROM + (select id, name, configuration ->> 'username' as username, configuration ->> 'password' as password, configuration, jsonb_insert(configuration, '{authentication}', '{}') as newConfig + from backendconfig + where jsonb_path_exists(configuration, '$.username') + ) as rootConfig + ) as updatedConfig) + ) as updated + WHERE backendconfig.id = updated.id; + + + diff --git a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/DatasourceTest.java b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/DatasourceTest.java index c733620aa..4ee774411 100644 --- a/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/DatasourceTest.java +++ b/horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/DatasourceTest.java @@ -28,6 +28,7 @@ import io.hyperfoil.tools.horreum.api.data.datastore.Datastore; import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType; import io.hyperfoil.tools.horreum.api.data.datastore.ElasticsearchDatastoreConfig; +import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth; import io.hyperfoil.tools.horreum.bus.AsyncEventChannels; import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO; import io.hyperfoil.tools.horreum.entity.data.DatasetDAO; @@ -233,13 +234,12 @@ private TestConfig createNewTestAndDatastores(TestInfo info) { Datastore newDatastore = new Datastore(); newDatastore.name = info.getDisplayName(); newDatastore.type = DatastoreType.ELASTICSEARCH; - newDatastore.builtIn = false; newDatastore.access = Access.PRIVATE; newDatastore.owner = TESTER_ROLES[0]; ElasticsearchDatastoreConfig elasticConfig = new ElasticsearchDatastoreConfig(); elasticConfig.url = hosts.get().get(0); - elasticConfig.apiKey = apiKey.orElse("123"); + elasticConfig.authentication = new NoAuth(); newDatastore.config = mapper.valueToTree(elasticConfig); diff --git a/horreum-web/src/domain/admin/Datastores.tsx b/horreum-web/src/domain/admin/Datastores.tsx index d7c028c73..749b57105 100644 --- a/horreum-web/src/domain/admin/Datastores.tsx +++ b/horreum-web/src/domain/admin/Datastores.tsx @@ -3,8 +3,7 @@ import {useContext, useEffect, useState} from "react" import {Button, Form, FormGroup} from "@patternfly/react-core" import ConfirmDeleteModal from "../../components/ConfirmDeleteModal" -import TeamSelect, {Team, SHOW_ALL} from "../../components/TeamSelect" - +import TeamSelect, {Team, SHOW_ALL, createTeam} from "../../components/TeamSelect" import { Table, @@ -26,22 +25,30 @@ import { configApi, Datastore, DatastoreTypeEnum, - ElasticsearchDatastoreConfig + ElasticsearchDatastoreConfig, TypeConfig } from "../../api"; import {AppContext} from "../../context/appContext"; import {AppContextType} from "../../context/@types/appContextTypes"; -import {noop} from "../../utils"; +import {useSelector} from "react-redux"; +import {defaultTeamSelector} from "../../auth"; interface dataStoreTableProps { datastores: Datastore[] + datastoreTypes: TypeConfig[] team: Team - persistDatastore: (backend: Datastore) => Promise - deleteDatastore: (id: string) => Promise + modifyDatastore: (backend: Datastore) => void + verifyDatastore: (backend: Datastore) => void + deleteDatastore: (backend: Datastore) => void } -const DatastoresTable = ( props: dataStoreTableProps) => { +const newBackendConfig: ElasticsearchDatastoreConfig | CollectorApiDatastoreConfig = { + url: "", + builtIn: false, + authentication: {'type': 'none'} +} +const DatastoresTable = (props: dataStoreTableProps) => { const columnNames = { type: 'Type', @@ -51,32 +58,82 @@ const DatastoresTable = ( props: dataStoreTableProps) => { }; - const defaultActions = (datastore: Datastore): IAction[] => [ + const defaultActions = (selectedDatastore: Datastore): IAction[] => [ { - title: `Edit`, onClick: () => editModalToggle(datastore.id) + title: `Edit`, onClick: () => props.modifyDatastore(selectedDatastore) }, { - title: `Test`, onClick: () => verifyModalToggle(datastore.id) + title: `Test`, onClick: () => props.verifyDatastore(selectedDatastore) }, { isSeparator: true }, { - title: `Delete`, onClick: () => deleteModalToggle(datastore.id) + title: `Delete`, onClick: () => props.deleteDatastore(selectedDatastore) }, ]; - const newBackendConfig: ElasticsearchDatastoreConfig | CollectorApiDatastoreConfig = { - url: "", - apiKey: "", - builtIn: false + + return ( + + + + + + + + + + + {props.datastores.map(teamDatastore => { + const rowActions: IAction[] | null = defaultActions(teamDatastore); + return ( + + + + + ) + })} + +
{columnNames.type}{columnNames.name}{columnNames.action}
{props.datastoreTypes.find((type) => type.enumName === teamDatastore.type)?.label}{teamDatastore.name} + +
+ + ); +} + +const errorFormatter = (error: any) => { + // Check if error has a message property + if (error.message) { + return error.message; + } + // If error is a string, return it as is + if (typeof error === 'string') { + return error; + } + // If error is an object, stringify it + if (typeof error === 'object') { + return JSON.stringify(error); } + // If none of the above, return a generic error message + return 'An error occurred'; +} + + +export default function Datastores() { + const {alerting} = useContext(AppContext) as AppContextType; + const defaultTeam = useSelector(defaultTeamSelector) || SHOW_ALL.key; + const [datastores, setDatastores] = useState([]) + const [datastoreTypes, setDatastoreTypes] = useState([]) + const [curTeam, setCurTeam] = useState(createTeam(defaultTeam)); const newDataStore: Datastore = { id: -1, name: "", - owner: props.team.key, - builtIn: false, + owner: curTeam.key, access: Access.Private, config: newBackendConfig, type: DatastoreTypeEnum.Postgres @@ -85,26 +142,19 @@ const DatastoresTable = ( props: dataStoreTableProps) => { const [deleteModalOpen, setDeleteModalOpen] = useState(false); const [editModalOpen, setEditModalOpen] = useState(false); const [verifyModalOpen, setVerifyModalOpen] = useState(false); - const [datastore, setDatastore] = useState(newDataStore) - - const findDatastore = (id: number) => { - return props.datastores.filter( datastore => datastore.id === id).pop() || newDataStore - } - - const updateDatastore = ( datastore: Datastore) : void => { - setDatastore(datastore) - } + const [datastore, setDatastore] = useState(newDataStore) - const deleteModalToggle = (id: number) => { - setDatastore(findDatastore(id)) - setDeleteModalOpen(!deleteModalOpen); + const deleteModalToggle = () => { + if (datastore.config && !datastore.config.builtIn) { + setDeleteModalOpen(!deleteModalOpen); + } else { + alerting.dispatchError(null, "DELETE", "Can not delete built in datastore") + } }; - const editModalToggle = (id: number) => { - setDatastore(findDatastore(id)) + const editModalToggle = () => { setEditModalOpen(!editModalOpen); }; - const verifyModalToggle = (id: number) => { - setDatastore(findDatastore(id)) + const verifyModalToggle = () => { setVerifyModalOpen(!verifyModalOpen); }; @@ -113,106 +163,18 @@ const DatastoresTable = ( props: dataStoreTableProps) => { setEditModalOpen(!editModalOpen); }; - - return ( - - - - - - - - - - - - - - {props.datastores?.map(repo => { - const rowActions: IAction[] | null = defaultActions(repo); - return ( - - - - - ) - })} - -
{columnNames.type}{columnNames.name}{columnNames.action}
{repo.type}{repo.name} - -
-
- - - - - deleteModalToggle(0)} - onDelete={() => { - props.deleteDatastore(datastore?.id?.toString() || "") - deleteModalToggle(0) - return Promise.resolve() - } - } - /> - - - editModalToggle(0)} - persistDatastore={props.persistDatastore} - dataStore={datastore} - updateDatastore={updateDatastore} - onDelete={() => { - editModalToggle(0) - return Promise.resolve() - } - } - /> - - - verifyModalToggle(0)} - onDelete={() => { - verifyModalToggle(0) - return Promise.resolve() - } - } - /> - -
- ); -} - -export default function Datastores() { - const { alerting } = useContext(AppContext) as AppContextType; - - const [datastores, setDatastores] = useState([]) - const [curTeam, setCurTeam] = useState(SHOW_ALL) - - const fetchDataStores = () : Promise => { + const fetchDataStores = (): Promise => { return apiCall(configApi.datastores(curTeam.key), alerting, "FETCH_DATASTORES", "Cannot fetch datastores") .then(setDatastores) - // userApi.administrators().then( - // list => setAdmins(list.map(userElement)), - // error => dispatchError(dispatch, error, "FETCH ADMINS", "Cannot fetch administrators") - // ) + } + const fetchDataStoreTypes = (): Promise => { + return apiCall(configApi.datastoreTypes(), alerting, "FETCH_DATASTORE_TYPES", "Cannot fetch Datastore Types") + .then(setDatastoreTypes) } - const deleteDatastore = (id: string) : Promise => { - return apiCall(configApi.deleteDatastore(id), alerting, "DELETE_BACKEND", "Cannot delete datastore") + const deleteDatastore = (datastore: Datastore): Promise => { + return apiCall(configApi.deleteDatastore(datastore.id), alerting, "DELETE_BACKEND", "Cannot delete datastore") .then(fetchDataStores) } @@ -220,17 +182,42 @@ export default function Datastores() { fetchDataStores() }, [curTeam]) - const persistNewBackend = (datastore: Datastore) : Promise => { + useEffect(() => { + fetchDataStoreTypes() + }, []) + + + const updateDatastore = (datastore: Datastore): void => { + setDatastore(datastore) + } + + const handleDeleteDatastore = (datastore: Datastore) => { + setDatastore(datastore) + deleteModalToggle() + } + const handleModifyDatastore = (datastore: Datastore): void => { + setDatastore(datastore) + editModalToggle() + } + const handleVerifyDatastore = (datastore: Datastore): void => { + setDatastore(datastore) + verifyModalToggle() + } + const persistDatastore = (): Promise => { let apicall: Promise - if ( datastore.id == -1){ + if (datastore.id == -1) { apicall = apiCall(configApi.newDatastore(datastore), alerting, "NEW_DATASTORE", "Could create new datastore") } else { apicall = apiCall(configApi.updateDatastore(datastore), alerting, "UPDATE_DATASTORE", "Could create new datastore") } - return apicall.then(fetchDataStores) + return apicall + .then(fetchDataStores) + .then(editModalToggle) + .then(() => alerting.dispatchInfo("SAVE", "Saved!", "Datastore was successfully updated!", 3000)) + .catch(reason => alerting.dispatchError(reason, "Saved!", "Failed to save changes to Datastore", errorFormatter)) } return ( @@ -238,19 +225,68 @@ export default function Datastores() { { setCurTeam(selection) }} /> - + + + + + + + + + { + deleteDatastore(datastore) + deleteModalToggle() + return Promise.resolve() + } + } + /> + + + + + + { + verifyModalToggle() + return Promise.resolve() + } + } + /> + + ) diff --git a/horreum-web/src/domain/admin/datastore/ModifyDatastoreModal.tsx b/horreum-web/src/domain/admin/datastore/ModifyDatastoreModal.tsx index c21104af4..40d7c6ef2 100644 --- a/horreum-web/src/domain/admin/datastore/ModifyDatastoreModal.tsx +++ b/horreum-web/src/domain/admin/datastore/ModifyDatastoreModal.tsx @@ -1,4 +1,4 @@ -import React, {useContext, useState} from "react" +import React, {useEffect, useState} from "react" import { Button, Form, @@ -9,84 +9,179 @@ import { Modal, TextInput } from "@patternfly/react-core" import { + APIKeyAuth, Datastore, - DatastoreTypeEnum, ElasticsearchDatastoreConfig, + DatastoreTypeEnum, ElasticsearchDatastoreConfig, TypeConfig, UsernamePassAuth, } from "../../../api"; -import {AppContext} from "../../../context/appContext"; -import {AppContextType} from "../../../context/@types/appContextTypes"; type ConfirmDeleteModalProps = { isOpen: boolean dataStore: Datastore + dataStoreTypes: Array onClose(): void - onDelete(): Promise + onDelete(): void updateDatastore(datastore: Datastore): void - persistDatastore: (datastore: Datastore) => Promise + persistDatastore: () => Promise description: string extra?: string } -interface datastoreOption { - value: DatastoreTypeEnum, - label: string, - disabled: boolean, - urlDisabled: boolean, - usernameDisable: boolean, - tokenDisbaled: boolean +type UpdateDatastoreProps = { + dataStore: Datastore + updateDatastore(datastore: Datastore): void } -export default function ModifyDatastoreModal({isOpen, onClose, persistDatastore, dataStore, updateDatastore}: ConfirmDeleteModalProps) { +function NoAuth() { + return (<>) +} - const { alerting } = useContext(AppContext) as AppContextType; - const [enabledURL, setEnableUrl] = useState(false); - const [enabledToken, setEnableToken] = useState(false); +function UsernameAuth({dataStore, updateDatastore}: UpdateDatastoreProps) { + return ( + <> + + { + updateDatastore({ + ...dataStore, + config: { + ...dataStore.config, + authentication: {...dataStore.config.authentication, type: "username", username: value} + } + }) + }} + type="text" + id="horizontal-form-username" + aria-describedby="horizontal-form-token-helper" + name="horizontal-form-token" + isRequired={true} + /> + + + Please provide a Username to authenticate against datastore + + + + + { + updateDatastore({ + ...dataStore, + config: { + ...dataStore.config, + authentication: {...dataStore.config.authentication, type: "username", password: value} + } + }) + }} + type="text" + id="horizontal-form-password" + aria-describedby="horizontal-form-token-helper" + name="horizontal-form-token" + /> + + + Please provide a Password to authenticate against datastore + + + + + ) +} +function ApiKeyAuth({dataStore, updateDatastore}: UpdateDatastoreProps) { + return ( + <> + + { + updateDatastore({ + ...dataStore, + config: { + ...dataStore.config, + authentication: {...dataStore.config.authentication, type: "api-key", apiKey: value} + } + }) + }} + type="text" + id="horizontal-form-api-key" + aria-describedby="horizontal-form-token-helper" + name="horizontal-form-token" + isRequired={true} + /> + + + Please provide an API token to authenticate against datastore + + + + + ) +} - const handleOptionChange = (_event: React.FormEvent, value: string) => { - const option: datastoreOption | undefined = options.filter( optionvalue => optionvalue.value === value).pop() - if ( option ){ - setEnableUrl(option.urlDisabled) - setEnableToken(option.tokenDisbaled) +export default function ModifyDatastoreModal({ + isOpen, + onClose, + persistDatastore, + dataStore, + dataStoreTypes, + updateDatastore + }: ConfirmDeleteModalProps) { + + + const [authOptions, setAuthOptions] = useState>([]); - updateDatastore({...dataStore, type: option.value}) + useEffect(() => { //find initial auth options for the selected datastore + setAuthOptions(dataStoreTypes.find(t => t.enumName === dataStore.type)?.supportedAuths || []) + }, [dataStore]); + + const handleOptionChange = (_event: React.FormEvent, value: string) => { + const selectedOption = dataStoreTypes.find(t => t.enumName === value) + if (selectedOption) { + //some ts wizardry to get the enum value from the option name string + const datastoreTyped = selectedOption.name as keyof typeof DatastoreTypeEnum; + // let enumKey = Object.keys(DatastoreTypeEnum)[Object.values(DatastoreTypeEnum).indexOf(option.name)]; + updateDatastore({ + ...dataStore, + type: DatastoreTypeEnum[datastoreTyped], + config: {...dataStore.config, authentication: {type: 'none'}} + }) } }; - const errorFormatter = (error: any) => { - // Check if error has a message property - if (error.message) { - return error.message; + const handleAuthOptionChange = (_event: React.FormEvent, value: string) => { + switch (value) { //pita, but TS compiler complains need to switch on String value to get the correct union type + case "none": + updateDatastore({...dataStore, config: {...dataStore.config, authentication: {type: 'none'}}}) + break; + case "username": + updateDatastore({ + ...dataStore, + config: {...dataStore.config, authentication: {type: 'username', username: "", password: ""}} + }) + break; + case "api-key": + updateDatastore({ + ...dataStore, + config: {...dataStore.config, authentication: {type: 'api-key', apiKey: ""}} + }) + break; } - // If error is a string, return it as is - if (typeof error === 'string') { - return error; - } - // If error is an object, stringify it - if (typeof error === 'object') { - return JSON.stringify(error); - } - // If none of the above, return a generic error message - return 'An error occurred'; - } - - const saveBackend = () => { - persistDatastore(dataStore) - .then( () => { - onClose(); - alerting.dispatchInfo("SAVE", "Saved!", "Datastore was successfully updated!", 3000) - }) - .catch(reason => alerting.dispatchError(reason, "Saved!", "Failed to save changes to Datastore", errorFormatter)) - } - - const options : datastoreOption[] = [ - { value: DatastoreTypeEnum.Postgres, label: 'Please select...', disabled: true, urlDisabled: true, usernameDisable: true, tokenDisbaled: true }, - { value: DatastoreTypeEnum.Elasticsearch, label: 'Elasticsearch', disabled: false, urlDisabled: false, usernameDisable: false, tokenDisbaled: false }, - { value: DatastoreTypeEnum.Collectorapi, label: 'Collector API', disabled: false, urlDisabled: false, usernameDisable: true, tokenDisbaled: false }, - ]; + }; const actionButtons = [ - , - + , + ] return ( @@ -102,8 +197,11 @@ export default function ModifyDatastoreModal({isOpen, onClose, persistDatastore, name="horizontal-form-datastore-type" aria-label="Backend Type" > - {options.map((option, index) => ( - + + {dataStoreTypes.filter(type => !type.builtIn).map((option, index) => ( + ))} @@ -134,11 +232,10 @@ export default function ModifyDatastoreModal({isOpen, onClose, persistDatastore, { - const config :ElasticsearchDatastoreConfig = dataStore.config as ElasticsearchDatastoreConfig; + const config: ElasticsearchDatastoreConfig = dataStore.config as ElasticsearchDatastoreConfig; config.url = value updateDatastore({...dataStore, config: config}) - }} - isDisabled={enabledURL} + }} type="text" id="horizontal-form-url" aria-describedby="horizontal-form-name-helper" @@ -146,77 +243,40 @@ export default function ModifyDatastoreModal({isOpen, onClose, persistDatastore, /> - Please provide the full host URL to for the datastore service + Please provide the full host URL to for the datastore + service - - { - const config :ElasticsearchDatastoreConfig = dataStore.config as ElasticsearchDatastoreConfig; - config.apiKey = value - updateDatastore({...dataStore, config: config}) - }}isDisabled={enabledToken} - type="text" - id="horizontal-form-api-key" - aria-describedby="horizontal-form-token-helper" - name="horizontal-form-token" - /> - - - Please provide an API token to authenticate against datastore - - + + + + {authOptions.map((authOption, index) => ( + + ))} + - - { - const config :ElasticsearchDatastoreConfig = dataStore.config as ElasticsearchDatastoreConfig; - config.username = value - updateDatastore({...dataStore, config: config}) - }}isDisabled={enabledToken} - type="text" - id="horizontal-form-username" - aria-describedby="horizontal-form-token-helper" - name="horizontal-form-token" + {dataStore.config.authentication.type === "none" && ( + )} + {dataStore.config.authentication.type === "username" && ( + - - - Please provide a Username to authenticate against datastore - - - - - { - const config :ElasticsearchDatastoreConfig = dataStore.config as ElasticsearchDatastoreConfig; - config.password = value - updateDatastore({...dataStore, config: config}) - }}isDisabled={enabledToken} - type="text" - id="horizontal-form-password" - aria-describedby="horizontal-form-token-helper" - name="horizontal-form-token" + )} + {dataStore.config.authentication.type === "api-key" && ( + - - - Please provide a Password to authenticate against datastore - - - + )} ) diff --git a/horreum-web/src/domain/runs/ValidationErrorTable.tsx b/horreum-web/src/domain/runs/ValidationErrorTable.tsx index 93cdea06e..a46fee4d6 100644 --- a/horreum-web/src/domain/runs/ValidationErrorTable.tsx +++ b/horreum-web/src/domain/runs/ValidationErrorTable.tsx @@ -26,7 +26,7 @@ export default function ValidationErrorTable(props: ValidationErrorTableProps) { {props.schemas.find(s => s.id === error.schemaId)?.name || "unknown schema " + error.schemaId} - : "None"} + : "none"} {error.error.type} {error.error.path}