Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into feat-5192-2
Browse files Browse the repository at this point in the history
  • Loading branch information
hdygxsj committed Dec 17, 2024
2 parents 9a336eb + 4daf9d1 commit d5fdc7e
Show file tree
Hide file tree
Showing 93 changed files with 2,855 additions and 370 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.gravitino.credential;

import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;

/** ADLS SAS token credential. */
public class ADLSTokenCredential implements Credential {

/** ADLS SAS token credential type. */
public static final String ADLS_SAS_TOKEN_CREDENTIAL_TYPE = "adls-sas-token";
/** ADLS base domain */
public static final String ADLS_DOMAIN = "dfs.core.windows.net";
/** ADLS storage account name */
public static final String GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME = "azure-storage-account-name";
/** ADLS SAS token used to access ADLS data. */
public static final String GRAVITINO_ADLS_SAS_TOKEN = "adls-sas-token";

private String accountName;
private String sasToken;
private long expireTimeInMS;

/**
* Constructs an instance of {@link ADLSTokenCredential} with SAS token.
*
* @param accountName The ADLS account name.
* @param sasToken The ADLS SAS token.
* @param expireTimeInMS The SAS token expire time in ms.
*/
public ADLSTokenCredential(String accountName, String sasToken, long expireTimeInMS) {
validate(accountName, sasToken, expireTimeInMS);
this.accountName = accountName;
this.sasToken = sasToken;
this.expireTimeInMS = expireTimeInMS;
}

/**
* This is the constructor that is used by credential factory to create an instance of credential
* according to the credential information.
*/
public ADLSTokenCredential() {}

@Override
public String credentialType() {
return ADLS_SAS_TOKEN_CREDENTIAL_TYPE;
}

@Override
public long expireTimeInMs() {
return expireTimeInMS;
}

@Override
public Map<String, String> credentialInfo() {
return (new ImmutableMap.Builder<String, String>())
.put(GRAVITINO_ADLS_SAS_TOKEN, sasToken)
.build();
}

@Override
public void initialize(Map<String, String> credentialInfo, long expireTimeInMS) {
String accountName = credentialInfo.get(GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME);
String sasToken = credentialInfo.get(GRAVITINO_ADLS_SAS_TOKEN);
validate(accountName, sasToken, expireTimeInMS);
this.accountName = accountName;
this.sasToken = sasToken;
this.expireTimeInMS = expireTimeInMS;
}

/**
* Get ADLS account name
*
* @return The ADLS account name
*/
public String accountName() {
return accountName;
}

/**
* Get ADLS SAS token.
*
* @return The ADLS SAS token.
*/
public String sasToken() {
return sasToken;
}

private void validate(String accountName, String sasToken, long expireTimeInMS) {
Preconditions.checkArgument(
StringUtils.isNotBlank(accountName), "ADLS account name should not be empty.");
Preconditions.checkArgument(
StringUtils.isNotBlank(sasToken), "ADLS SAS token should not be empty.");
Preconditions.checkArgument(
expireTimeInMS > 0, "The expire time of ADLSTokenCredential should great than 0");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,7 @@ public class OSSTokenCredential implements Credential {
*/
public OSSTokenCredential(
String accessKeyId, String secretAccessKey, String securityToken, long expireTimeInMS) {
Preconditions.checkArgument(
StringUtils.isNotBlank(accessKeyId), "OSS access key Id should not be empty");
Preconditions.checkArgument(
StringUtils.isNotBlank(secretAccessKey), "OSS access key secret should not be empty");
Preconditions.checkArgument(
StringUtils.isNotBlank(securityToken), "OSS security token should not be empty");

validate(accessKeyId, secretAccessKey, securityToken, expireTimeInMS);
this.accessKeyId = accessKeyId;
this.secretAccessKey = secretAccessKey;
this.securityToken = securityToken;
Expand Down Expand Up @@ -133,12 +127,12 @@ public String securityToken() {
private void validate(
String accessKeyId, String secretAccessKey, String sessionToken, long expireTimeInMs) {
Preconditions.checkArgument(
StringUtils.isNotBlank(accessKeyId), "S3 access key Id should not be empty");
StringUtils.isNotBlank(accessKeyId), "OSS access key Id should not be empty");
Preconditions.checkArgument(
StringUtils.isNotBlank(secretAccessKey), "S3 secret access key should not be empty");
StringUtils.isNotBlank(secretAccessKey), "OSS secret access key should not be empty");
Preconditions.checkArgument(
StringUtils.isNotBlank(sessionToken), "S3 session token should not be empty");
StringUtils.isNotBlank(sessionToken), "OSS session token should not be empty");
Preconditions.checkArgument(
expireTimeInMs > 0, "The expire time of S3TokenCredential should great than 0");
expireTimeInMs > 0, "The expire time of OSSTokenCredential should great than 0");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,4 @@ org.apache.gravitino.credential.S3SecretKeyCredential
org.apache.gravitino.credential.GCSTokenCredential
org.apache.gravitino.credential.OSSTokenCredential
org.apache.gravitino.credential.OSSSecretKeyCredential
org.apache.gravitino.credential.ADLSTokenCredential
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.gravitino.authorization.ranger;

import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
* The properties for Chain authorization plugin. <br>
* <br>
* Configuration Example: <br>
* "authorization.chain.plugins" = "hive1,hdfs1" <br>
* "authorization.chain.hive1.provider" = "ranger"; <br>
* "authorization.chain.hive1.ranger.service.type" = "HadoopSQL"; <br>
* "authorization.chain.hive1.ranger.service.name" = "hiveDev"; <br>
* "authorization.chain.hive1.ranger.auth.type" = "simple"; <br>
* "authorization.chain.hive1.ranger.admin.url" = "http://localhost:6080"; <br>
* "authorization.chain.hive1.ranger.username" = "admin"; <br>
* "authorization.chain.hive1.ranger.password" = "admin"; <br>
* "authorization.chain.hdfs1.provider" = "ranger"; <br>
* "authorization.chain.hdfs1.ranger.service.type" = "HDFS"; <br>
* "authorization.chain.hdfs1.ranger.service.name" = "hdfsDev"; <br>
* "authorization.chain.hdfs1.ranger.auth.type" = "simple"; <br>
* "authorization.chain.hdfs1.ranger.admin.url" = "http://localhost:6080"; <br>
* "authorization.chain.hdfs1.ranger.username" = "admin"; <br>
* "authorization.chain.hdfs1.ranger.password" = "admin"; <br>
*/
public class ChainAuthorizationProperties {
public static final String PLUGINS_SPLITTER = ",";
/** Chain authorization plugin names */
public static final String CHAIN_PLUGINS_PROPERTIES_KEY = "authorization.chain.plugins";

/** Chain authorization plugin provider */
public static final String CHAIN_PROVIDER = "authorization.chain.*.provider";

static Map<String, String> fetchAuthPluginProperties(
String pluginName, Map<String, String> properties) {
Preconditions.checkArgument(
properties.containsKey(CHAIN_PLUGINS_PROPERTIES_KEY)
&& properties.get(CHAIN_PLUGINS_PROPERTIES_KEY) != null,
String.format("%s is required", CHAIN_PLUGINS_PROPERTIES_KEY));

String[] pluginNames = properties.get(CHAIN_PLUGINS_PROPERTIES_KEY).split(PLUGINS_SPLITTER);
Preconditions.checkArgument(
Arrays.asList(pluginNames).contains(pluginName),
String.format("pluginName %s must be one of %s", pluginName, Arrays.toString(pluginNames)));

String regex = "^authorization\\.chain\\.(" + pluginName + ")\\..*";
Pattern pattern = Pattern.compile(regex);

Map<String, String> filteredProperties = new HashMap<>();
for (Map.Entry<String, String> entry : properties.entrySet()) {
Matcher matcher = pattern.matcher(entry.getKey());
if (matcher.matches()) {
filteredProperties.put(entry.getKey(), entry.getValue());
}
}

String removeRegex = "^authorization\\.chain\\.(" + pluginName + ")\\.";
Pattern removePattern = Pattern.compile(removeRegex);

Map<String, String> resultProperties = new HashMap<>();
for (Map.Entry<String, String> entry : filteredProperties.entrySet()) {
Matcher removeMatcher = removePattern.matcher(entry.getKey());
if (removeMatcher.find()) {
resultProperties.put(removeMatcher.replaceFirst("authorization."), entry.getValue());
}
}

return resultProperties;
}

public static void validate(Map<String, String> properties) {
Preconditions.checkArgument(
properties.containsKey(CHAIN_PLUGINS_PROPERTIES_KEY),
String.format("%s is required", CHAIN_PLUGINS_PROPERTIES_KEY));
List<String> pluginNames =
Arrays.stream(properties.get(CHAIN_PLUGINS_PROPERTIES_KEY).split(PLUGINS_SPLITTER))
.map(String::trim)
.collect(Collectors.toList());
Preconditions.checkArgument(
!pluginNames.isEmpty(),
String.format("%s must have at least one plugin name", CHAIN_PLUGINS_PROPERTIES_KEY));
Preconditions.checkArgument(
pluginNames.size() == pluginNames.stream().distinct().count(),
"Duplicate plugin name in %s: %s",
CHAIN_PLUGINS_PROPERTIES_KEY,
pluginNames);
pluginNames.stream()
.filter(v -> v.contains("."))
.forEach(
v -> {
throw new IllegalArgumentException(
String.format(
"Plugin name cannot be contain `.` character in the `%s = %s`.",
CHAIN_PLUGINS_PROPERTIES_KEY, properties.get(CHAIN_PLUGINS_PROPERTIES_KEY)));
});

Pattern pattern = Pattern.compile("^authorization\\.chain\\..*\\..*$");
Map<String, String> filteredProperties =
properties.entrySet().stream()
.filter(entry -> pattern.matcher(entry.getKey()).matches())
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));

String pluginNamesPattern = String.join("|", pluginNames);
Pattern patternPluginNames =
Pattern.compile("^authorization\\.chain\\.(" + pluginNamesPattern + ")\\..*$");
for (String key : filteredProperties.keySet()) {
Matcher matcher = patternPluginNames.matcher(key);
Preconditions.checkArgument(
matcher.matches(),
"The key %s does not match the pattern %s",
key,
patternPluginNames.pattern());
}

// Generate regex patterns from wildcardProperties
List<String> wildcardProperties = ImmutableList.of(CHAIN_PROVIDER);
for (String pluginName : pluginNames) {
List<Pattern> patterns =
wildcardProperties.stream()
.map(wildcard -> "^" + wildcard.replace("*", pluginName) + "$")
.map(Pattern::compile)
.collect(Collectors.toList());
// Validate properties keys
for (Pattern pattern1 : patterns) {
boolean matches =
filteredProperties.keySet().stream().anyMatch(key -> pattern1.matcher(key).matches());
Preconditions.checkArgument(
matches,
"Missing required properties %s for plugin: %s",
filteredProperties,
pattern1.pattern());
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
*/
package org.apache.gravitino.authorization.ranger;

import static org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties.RANGER_SERVICE_TYPE;

import com.google.common.base.Preconditions;
import java.util.Map;
import org.apache.gravitino.connector.authorization.AuthorizationPlugin;
import org.apache.gravitino.connector.authorization.BaseAuthorization;
Expand All @@ -31,16 +34,18 @@ public String shortName() {

@Override
protected AuthorizationPlugin newPlugin(
String metalake, String catalogProvider, Map<String, String> config) {
switch (catalogProvider) {
case "hive":
case "lakehouse-iceberg":
case "lakehouse-paimon":
return RangerAuthorizationHadoopSQLPlugin.getInstance(metalake, config);
case "hadoop":
return RangerAuthorizationHDFSPlugin.getInstance(metalake, config);
String metalake, String catalogProvider, Map<String, String> properties) {
Preconditions.checkArgument(
properties.containsKey(RANGER_SERVICE_TYPE),
String.format("%s is required", RANGER_SERVICE_TYPE));
String serviceType = properties.get(RANGER_SERVICE_TYPE).toUpperCase();
switch (serviceType) {
case "HADOOPSQL":
return RangerAuthorizationHadoopSQLPlugin.getInstance(metalake, properties);
case "HDFS":
return RangerAuthorizationHDFSPlugin.getInstance(metalake, properties);
default:
throw new IllegalArgumentException("Unknown catalog provider: " + catalogProvider);
throw new IllegalArgumentException("Unsupported service type: " + serviceType);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@
import org.apache.gravitino.authorization.ranger.reference.VXGroupList;
import org.apache.gravitino.authorization.ranger.reference.VXUser;
import org.apache.gravitino.authorization.ranger.reference.VXUserList;
import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.connector.authorization.AuthorizationPlugin;
import org.apache.gravitino.exceptions.AuthorizationPluginException;
import org.apache.gravitino.meta.AuditInfo;
Expand Down Expand Up @@ -88,17 +87,13 @@ public abstract class RangerAuthorizationPlugin

protected RangerAuthorizationPlugin(String metalake, Map<String, String> config) {
this.metalake = metalake;
String rangerUrl = config.get(AuthorizationPropertiesMeta.RANGER_ADMIN_URL);
String authType = config.get(AuthorizationPropertiesMeta.RANGER_AUTH_TYPE);
rangerAdminName = config.get(AuthorizationPropertiesMeta.RANGER_USERNAME);
RangerAuthorizationProperties.validate(config);
String rangerUrl = config.get(RangerAuthorizationProperties.RANGER_ADMIN_URL);
String authType = config.get(RangerAuthorizationProperties.RANGER_AUTH_TYPE);
rangerAdminName = config.get(RangerAuthorizationProperties.RANGER_USERNAME);
// Apache Ranger Password should be minimum 8 characters with min one alphabet and one numeric.
String password = config.get(AuthorizationPropertiesMeta.RANGER_PASSWORD);
rangerServiceName = config.get(AuthorizationPropertiesMeta.RANGER_SERVICE_NAME);
Preconditions.checkArgument(rangerUrl != null, "Ranger admin URL is required");
Preconditions.checkArgument(authType != null, "Ranger auth type is required");
Preconditions.checkArgument(rangerAdminName != null, "Ranger username is required");
Preconditions.checkArgument(password != null, "Ranger password is required");
Preconditions.checkArgument(rangerServiceName != null, "Ranger service name is required");
String password = config.get(RangerAuthorizationProperties.RANGER_PASSWORD);
rangerServiceName = config.get(RangerAuthorizationProperties.RANGER_SERVICE_NAME);
rangerClient = new RangerClientExtension(rangerUrl, authType, rangerAdminName, password);

rangerHelper =
Expand Down
Loading

0 comments on commit d5fdc7e

Please sign in to comment.