diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index eb3a529498fa7..60ae4d58f343e 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -166,7 +166,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { tasks.register('buildDependencyArtifacts') { group = 'ide' description = 'Builds artifacts needed as dependency for IDE modules' - dependsOn([':plugins:repository-hdfs:hadoop-client-api:shadowJar', + dependsOn([':plugins:repository-hdfs:hadoop-client-api:jar', ':x-pack:plugin:esql:compute:ann:jar', ':x-pack:plugin:esql:compute:gen:jar', ':server:generateModulesList', diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 4da7c24de80f1..dea1e1bdd273f 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -28,7 +28,7 @@ configurations { } dependencies { - api project(path: 'hadoop-client-api', configuration: 'shadow') + api project(path: 'hadoop-client-api', configuration: 'default') if (isEclipse) { /* * Eclipse can't pick up the shadow dependency so we point it at *something* diff --git a/plugins/repository-hdfs/hadoop-client-api/build.gradle b/plugins/repository-hdfs/hadoop-client-api/build.gradle index 24e4213780fe2..5e87b81292501 100644 --- a/plugins/repository-hdfs/hadoop-client-api/build.gradle +++ b/plugins/repository-hdfs/hadoop-client-api/build.gradle @@ -1,16 +1,46 @@ -apply plugin: 'elasticsearch.build' -apply plugin: 'com.gradleup.shadow' +apply plugin: 'elasticsearch.java' + +sourceSets { + patcher +} + +configurations { + thejar { + canBeResolved = true + } +} dependencies { - implementation "org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}" + thejar("org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}") { + transitive = false + } + + patcherImplementation 'org.ow2.asm:asm:9.7.1' + patcherImplementation 'org.ow2.asm:asm-tree:9.7.1' } -tasks.named('shadowJar').configure { - exclude 'org/apache/hadoop/util/ShutdownHookManager$*.class' +def outputDir = layout.buildDirectory.dir("patched-classes") + +def patchTask = tasks.register("patchClasses", JavaExec) { + inputs.files(configurations.thejar).withPathSensitivity(PathSensitivity.RELATIVE) + inputs.files(sourceSets.patcher.output).withPathSensitivity(PathSensitivity.RELATIVE) + outputs.dir(outputDir) + classpath = sourceSets.patcher.runtimeClasspath + mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher' + doFirst { + args(configurations.thejar.singleFile, outputDir.get().asFile) + } } -['jarHell', 'thirdPartyAudit', 'forbiddenApisMain', 'splitPackagesAudit'].each { - tasks.named(it).configure { - enabled = false +tasks.named('jar').configure { + dependsOn(configurations.thejar) + + from(patchTask) + from({ project.zipTree(configurations.thejar.singleFile) }) { + eachFile { + if (outputDir.get().file(it.relativePath.pathString).asFile.exists()) { + it.exclude() + } + } } } diff --git a/plugins/repository-hdfs/hadoop-client-api/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java b/plugins/repository-hdfs/hadoop-client-api/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java deleted file mode 100644 index c3d15dc06e7c1..0000000000000 --- a/plugins/repository-hdfs/hadoop-client-api/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.apache.hadoop.util; - -import java.util.concurrent.TimeUnit; - -/** - * A replacement for the ShutdownHookManager from hadoop. - * - * This class does not actually add a shutdown hook. Hadoop's shutdown hook - * manager does not fail gracefully when it lacks security manager permissions - * to add shutdown hooks. This implements the same api as the hadoop class, but - * with no-ops. - */ -public class ShutdownHookManager { - private static final ShutdownHookManager MGR = new ShutdownHookManager(); - - public static ShutdownHookManager get() { - return MGR; - } - - private ShutdownHookManager() {} - - public void addShutdownHook(Runnable shutdownHook, int priority) {} - - public void addShutdownHook(Runnable shutdownHook, int priority, long timeout, TimeUnit unit) {} - - public boolean removeShutdownHook(Runnable shutdownHook) { - return false; - } - - public boolean hasShutdownHook(Runnable shutdownHook) { - return false; - } - - public boolean isShutdownInProgress() { - return false; - } - - public void clearShutdownHooks() {} -} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java new file mode 100644 index 0000000000000..6636b39445964 --- /dev/null +++ b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.hdfs.patch; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Map; +import java.util.function.Function; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +public class HdfsClassPatcher { + static final Map> patchers = Map.of( + "org/apache/hadoop/util/ShutdownHookManager.class", + ShutdownHookManagerPatcher::new, + "org/apache/hadoop/util/Shell.class", + ShellPatcher::new + ); + + public static void main(String[] args) throws Exception { + String jarPath = args[0]; + Path outputDir = Paths.get(args[1]); + + try (JarFile jarFile = new JarFile(new File(jarPath))) { + for (var patcher : patchers.entrySet()) { + JarEntry jarEntry = jarFile.getJarEntry(patcher.getKey()); + if (jarEntry == null) { + throw new IllegalArgumentException("path [" + patcher.getKey() + "] not found in [" + jarPath + "]"); + } + byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes(); + + ClassReader classReader = new ClassReader(classToPatch); + ClassWriter classWriter = new ClassWriter(classReader, 0); + classReader.accept(patcher.getValue().apply(classWriter), 0); + + Path outputFile = outputDir.resolve(patcher.getKey()); + Files.createDirectories(outputFile.getParent()); + Files.write(outputFile, classWriter.toByteArray()); + } + } + } +} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java new file mode 100644 index 0000000000000..e07a32cc294a5 --- /dev/null +++ b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.hdfs.patch; + +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; + +public class MethodReplacement extends MethodVisitor { + private final MethodVisitor delegate; + private final Runnable bodyWriter; + + MethodReplacement(MethodVisitor delegate, Runnable bodyWriter) { + super(Opcodes.ASM9); + this.delegate = delegate; + this.bodyWriter = bodyWriter; + } + + @Override + public void visitCode() { + // delegate.visitCode(); + bodyWriter.run(); + // delegate.visitEnd(); + } + + @Override + public void visitMaxs(int maxStack, int maxLocals) { + delegate.visitMaxs(maxStack, maxLocals); + } +} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java new file mode 100644 index 0000000000000..397b63e434ba2 --- /dev/null +++ b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.hdfs.patch; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; + +class ShellPatcher extends ClassVisitor { + + ShellPatcher(ClassWriter classWriter) { + super(Opcodes.ASM9, classWriter); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodVisitor mv = super.visitMethod(access, name, descriptor, signature, exceptions); + if (name.equals("isSetsidSupported")) { + return new MethodReplacement(mv, () -> { + mv.visitInsn(Opcodes.ICONST_0); + mv.visitInsn(Opcodes.IRETURN); + }); + } + return mv; + } +} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java new file mode 100644 index 0000000000000..1235b5af9002f --- /dev/null +++ b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.hdfs.patch; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.Type; + +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; + +class ShutdownHookManagerPatcher extends ClassVisitor { + private static final String CLASSNAME = "org/apache/hadoop/util/ShutdownHookManager"; + private static final Set VOID_METHODS = Set.of("addShutdownHook", "clearShutdownHooks"); + private static final Set BOOLEAN_METHODS = Set.of("removeShutdownHook", "hasShutdownHook", "isShutdownInProgress"); + + ShutdownHookManagerPatcher(ClassWriter classWriter) { + super(Opcodes.ASM9, classWriter); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodVisitor mv = super.visitMethod(access, name, descriptor, signature, exceptions); + if (VOID_METHODS.contains(name)) { + // make void methods noops + return new MethodReplacement(mv, () -> { mv.visitInsn(Opcodes.RETURN); }); + } else if (BOOLEAN_METHODS.contains(name)) { + // make boolean methods always return false + return new MethodReplacement(mv, () -> { + mv.visitInsn(Opcodes.ICONST_0); + mv.visitInsn(Opcodes.IRETURN); + }); + } else if (name.equals("")) { + return new MethodReplacement(mv, () -> { + // just initialize the statics, don't actually get runtime to add shutdown hook + + var classType = Type.getObjectType(CLASSNAME); + mv.visitTypeInsn(Opcodes.NEW, CLASSNAME); + mv.visitInsn(Opcodes.DUP); + mv.visitMethodInsn(Opcodes.INVOKESPECIAL, CLASSNAME, "", "()V", false); + mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "MGR", classType.getDescriptor()); + + var timeUnitType = Type.getType(TimeUnit.class); + mv.visitFieldInsn(Opcodes.GETSTATIC, timeUnitType.getInternalName(), "SECONDS", timeUnitType.getDescriptor()); + mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "TIME_UNIT_DEFAULT", timeUnitType.getDescriptor()); + + var executorServiceType = Type.getType(ExecutorService.class); + mv.visitInsn(Opcodes.ACONST_NULL); + mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "EXECUTOR", executorServiceType.getDescriptor()); + + mv.visitInsn(Opcodes.RETURN); + }); + } + return mv; + } +}