Skip to content

Commit

Permalink
Add Transport UDF plugin to create platform SourceSets and generate w…
Browse files Browse the repository at this point in the history
…rappers
  • Loading branch information
shardulm94 committed Apr 2, 2019
1 parent 35a3ffc commit 50a05ab
Show file tree
Hide file tree
Showing 33 changed files with 706 additions and 74 deletions.
Binary file modified gradle/wrapper/gradle-wrapper.jar
Binary file not shown.
3 changes: 1 addition & 2 deletions gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#Wed Aug 08 12:17:32 PDT 2018
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.0-all.zip
2 changes: 1 addition & 1 deletion gradlew
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`

# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
DEFAULT_JVM_OPTS='"-Xmx64m"'

# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
Expand Down
2 changes: 1 addition & 1 deletion gradlew.bat
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%

@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DEFAULT_JVM_OPTS="-Xmx64m"

@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
Expand Down
1 change: 1 addition & 0 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ def modules = [
'transportable-udfs-codegen',
'transportable-udfs-compile-utils',
'transportable-udfs-hive',
'transportable-udfs-plugin',
'transportable-udfs-presto',
'transportable-udfs-spark',
'transportable-udfs-test:transportable-udfs-test-api',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@


public class Constants {
public static final String UDF_RESOURCE_FILE_PATH = "META-INF/transport-udfs/udf-properties.json";
public static final String UDF_RESOURCE_FILE_PATH = "META-INF/transport-udfs/metadata.json";

public static final String INTERFACE_NOT_IMPLEMENTED_ERROR =
String.format(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment

private void processImpl(RoundEnvironment roundEnv) {
if (roundEnv.processingOver()) {
generateUDFPropertiesFile();
generateUDFMetadataFile();
} else {
processElements(roundEnv.getRootElements());
}
Expand Down Expand Up @@ -178,18 +178,18 @@ private boolean typeElementOverridesTopLevelStdUDFMethods(TypeElement typeElemen
}

/**
* Generates the UDF properties resource file in a pretty-printed JSON format
* Generates the UDF metadata resource file in a pretty-printed JSON format
*/
private void generateUDFPropertiesFile() {
private void generateUDFMetadataFile() {
Filer filer = processingEnv.getFiler();
try {
FileObject fileObject = filer.createResource(StandardLocation.CLASS_OUTPUT, "", Constants.UDF_RESOURCE_FILE_PATH);
try (Writer writer = fileObject.openWriter()) {
_transportUdfMetadata.toJson(writer);
}
debug("Wrote Transport UDF properties file to: " + fileObject.toUri());
debug("Wrote Transport UDF metadata file to: " + fileObject.toUri());
} catch (IOException e) {
fatalError(String.format("Unable to create UDF properties resource file: %s", e));
fatalError(String.format("Unable to create UDF metadata resource file: %s", e));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@ void teardown() {
_resourcesOutputDir.delete();
}

void testWrapperGenerator(String udfPropertiesFileResource, String expectedSourcesOutputFolderResource) {
testWrapperGenerator(udfPropertiesFileResource, expectedSourcesOutputFolderResource, null);
void testWrapperGenerator(String udfMetadataFileResource, String expectedSourcesOutputFolderResource) {
testWrapperGenerator(udfMetadataFileResource, expectedSourcesOutputFolderResource, null);
}

void testWrapperGenerator(String udfPropertiesFileResource, String expectedSourcesOutputFolderResource,
void testWrapperGenerator(String udfMetadataFileResource, String expectedSourcesOutputFolderResource,
String expectedResourcesOutputFolderResource) {
WrapperGeneratorContext context =
new WrapperGeneratorContext(TestUtils.getUDFPropertiesFromResource(udfPropertiesFileResource),
new WrapperGeneratorContext(TestUtils.getUDFMetadataFromResource(udfMetadataFileResource),
_sourcesOutputDir, _resourcesOutputDir);

getWrapperGenerator().generateWrappers(context);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,6 @@ WrapperGenerator getWrapperGenerator() {

@Test
public void testHiveWrapperGenerator() {
testWrapperGenerator("inputs/sample-udf-properties.json", "outputs/sample-udf-properties/hive/sources");
testWrapperGenerator("inputs/sample-udf-metadata.json", "outputs/sample-udf-metadata/hive/sources");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ WrapperGenerator getWrapperGenerator() {

@Test
public void testPrestoWrapperGenerator() {
testWrapperGenerator("inputs/sample-udf-properties.json", "outputs/sample-udf-properties/presto/sources",
"outputs/sample-udf-properties/presto/resources");
testWrapperGenerator("inputs/sample-udf-metadata.json", "outputs/sample-udf-metadata/presto/sources",
"outputs/sample-udf-metadata/presto/resources");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,6 @@ WrapperGenerator getWrapperGenerator() {

@Test
public void testSparkWrapperGenerator() {
testWrapperGenerator("inputs/sample-udf-properties.json", "outputs/sample-udf-properties/spark/sources");
testWrapperGenerator("inputs/sample-udf-metadata.json", "outputs/sample-udf-metadata/spark/sources");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ class TestUtils {
private TestUtils() {
}

static TransportUDFMetadata getUDFPropertiesFromResource(String resource) {
static TransportUDFMetadata getUDFMetadataFromResource(String resource) {
try (InputStreamReader reader = new InputStreamReader(
Thread.currentThread().getContextClassLoader().getResourceAsStream(resource))) {
return TransportUDFMetadata.fromJson(reader);
} catch (IOException e) {
throw new RuntimeException("Could not read UDF properties from resource: " + resource, e);
throw new RuntimeException("Could not read UDF metadata from resource: " + resource, e);
}
}

Expand Down
2 changes: 1 addition & 1 deletion transportable-udfs-examples/gradlew
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`

# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
DEFAULT_JVM_OPTS='"-Xmx64m"'

# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
Expand Down
2 changes: 1 addition & 1 deletion transportable-udfs-examples/gradlew.bat
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%

@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DEFAULT_JVM_OPTS="-Xmx64m"

@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
Expand Down
Original file line number Diff line number Diff line change
@@ -1,67 +1,28 @@
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath('com.linkedin.transport:transportable-udfs-plugin')
}
}

apply plugin: 'java'
apply plugin: 'com.linkedin.transport.plugin'

dependencies {
compile('com.linkedin.transport:transportable-udfs-api')
// TODO: Reference all external dependencies from a single gradle file
compile('com.google.guava:guava:24.1-jre')
compile('org.apache.commons:commons-io:1.3.2')
testCompile('com.linkedin.transport:transportable-udfs-test-api')
}

// ============================================================================
// transportable-udfs-plugin cannot be applied to this module as the plugin is also
// built in this project. In projects using the Transportable UDF framework, the
// following code will be applied by the plugin
// ============================================================================

dependencies {
annotationProcessor('com.linkedin.transport:transportable-udfs-annotation-processor')
}
licenseHive.enabled = false
licensePresto.enabled = false
licenseSpark.enabled = false

// TODO: Add a debugPlatform flag to allow debugging specific test methods in IntelliJ
// for a particular platform other than default

configurations {
platformTestRuntime { extendsFrom testRuntime }
hiveTestRuntime { extendsFrom platformTestRuntime }
prestoTestRuntime { extendsFrom platformTestRuntime }
sparkTestRuntime { extendsFrom platformTestRuntime }
}

dependencies {
testRuntimeOnly('com.linkedin.transport:transportable-udfs-test-generic')
platformTestRuntime sourceSets.main.output, sourceSets.test.output
hiveTestRuntime('com.linkedin.transport:transportable-udfs-test-hive')
prestoTestRuntime('com.linkedin.transport:transportable-udfs-test-presto')
sparkTestRuntime('com.linkedin.transport:transportable-udfs-test-spark')
}

task hiveTest(type: Test, dependsOn: test) {
group 'Verification'
description 'Runs the Hive tests.'
testClassesDirs = sourceSets.test.output.classesDirs
classpath = configurations.hiveTestRuntime
useTestNG()
}

task prestoTest(type: Test, dependsOn: test) {
group 'Verification'
description 'Runs the Presto tests.'
testClassesDirs = sourceSets.test.output.classesDirs
classpath = configurations.prestoTestRuntime
useTestNG()
}

task sparkTest(type: Test, dependsOn: test) {
group 'Verification'
description 'Runs the Spark tests.'
testClassesDirs = sourceSets.test.output.classesDirs
classpath = configurations.sparkTestRuntime
useTestNG()
}

check.dependsOn(hiveTest, prestoTest, sparkTest)

prestoTest {
exclude '**/TestArrayFillFunctionFailsOnPresto.class'
exclude '**/TestStructCreateByIndexFunctionFailsOnPresto.class'
Expand Down
36 changes: 36 additions & 0 deletions transportable-udfs-plugin/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
plugins {
id 'java'
id 'java-gradle-plugin'
}

gradlePlugin {
plugins {
simplePlugin {
id = 'com.linkedin.transport.plugin'
implementationClass = 'com.linkedin.transport.plugin.TransportPlugin'
}
}
}

dependencies {
compile project(':transportable-udfs-api')
compile project(':transportable-udfs-codegen')
compile ('com.google.guava:guava:24.1-jre')
compile ('com.google.code.gson:gson:2.8.5')
testCompile('org.spockframework:spock-core:1.1-groovy-2.4') {
exclude group: 'org.codehaus.groovy'
}
}

def writeVersionInfo = { file ->
ant.propertyfile(file: file) {
entry(key: "transport-version", value: version)
entry(key: "hive-version", value: '1.2.2')
entry(key: "presto-version", value: '0.203')
entry(key: "spark-version", value: '2.3.0')
}
}

processResources.doLast {
writeVersionInfo(new File(sourceSets.main.output.resourcesDir, "version-info.properties"))
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
/**
* Copyright 2019 LinkedIn Corporation. All rights reserved.
* Licensed under the BSD-2 Clause license.
* See LICENSE in the project root for license information.
*/
package com.linkedin.transport.plugin;

import com.google.common.collect.ImmutableList;
import com.linkedin.transport.codegen.HiveWrapperGenerator;
import com.linkedin.transport.codegen.PrestoWrapperGenerator;
import com.linkedin.transport.codegen.SparkWrapperGenerator;
import java.io.IOException;
import java.util.List;
import java.util.Properties;

import static com.linkedin.transport.plugin.DependencyConfigurationName.*;


/**
* Stores default configurations for the Transport UDF plugin
*/
class Defaults {

private Defaults() {
}

// The versions of the Transport and supported platforms to apply corresponding versions of the platform dependencies
private static final Properties DEFAULT_VERSIONS;

static {
DEFAULT_VERSIONS = new Properties();
try {
DEFAULT_VERSIONS.load(
Thread.currentThread().getContextClassLoader().getResourceAsStream("version-info.properties"));
} catch (IOException e) {
throw new RuntimeException("Error loading version-info.properties", e);
}
}

static final List<DependencyConfiguration> MAIN_SOURCE_SET_DEPENDENCIES = ImmutableList.of(
getDependencyConfiguration(IMPLEMENTATION, "com.linkedin.transport:transportable-udfs-api", "transport"),
getDependencyConfiguration(ANNOTATION_PROCESSOR, "com.linkedin.transport:transportable-udfs-annotation-processor",
"transport")
);

static final List<DependencyConfiguration> TEST_SOURCE_SET_DEPENDENCIES = ImmutableList.of(
getDependencyConfiguration(IMPLEMENTATION, "com.linkedin.transport:transportable-udfs-test-api", "transport"),
getDependencyConfiguration(RUNTIME_ONLY, "com.linkedin.transport:transportable-udfs-test-generic", "transport")
);

static final List<PlatformConfiguration> DEFAULT_PLATFORMS = ImmutableList.of(
new PlatformConfiguration(
"presto",
Language.JAVA,
PrestoWrapperGenerator.class,
ImmutableList.of(
getDependencyConfiguration(IMPLEMENTATION, "com.linkedin.transport:transportable-udfs-presto",
"transport"),
getDependencyConfiguration(COMPILE_ONLY, "com.facebook.presto:presto-main", "presto")
),
ImmutableList.of(
getDependencyConfiguration(RUNTIME_ONLY, "com.linkedin.transport:transportable-udfs-test-presto",
"transport")
)
),
new PlatformConfiguration(
"hive",
Language.JAVA,
HiveWrapperGenerator.class,
ImmutableList.of(
getDependencyConfiguration(IMPLEMENTATION, "com.linkedin.transport:transportable-udfs-hive", "transport"),
getDependencyConfiguration(COMPILE_ONLY, "org.apache.hive:hive-exec", "hive")
),
ImmutableList.of(
getDependencyConfiguration(RUNTIME_ONLY, "com.linkedin.transport:transportable-udfs-test-hive",
"transport")
)
),
new PlatformConfiguration(
"spark",
Language.SCALA,
SparkWrapperGenerator.class,
ImmutableList.of(
getDependencyConfiguration(IMPLEMENTATION, "com.linkedin.transport:transportable-udfs-spark",
"transport"),
getDependencyConfiguration(COMPILE_ONLY, "org.apache.spark:spark-sql_2.11", "spark")
),
ImmutableList.of(
getDependencyConfiguration(RUNTIME_ONLY, "com.linkedin.transport:transportable-udfs-test-spark",
"transport")
)
)
);

private static DependencyConfiguration getDependencyConfiguration(DependencyConfigurationName configurationName,
String moduleCoordinate, String platform) {
return new DependencyConfiguration(configurationName,
moduleCoordinate + ":" + DEFAULT_VERSIONS.getProperty(platform + "-version"));
}
}
Loading

0 comments on commit 50a05ab

Please sign in to comment.