diff --git a/distribution/pom.xml b/distribution/pom.xml
index 5aa71130f777..cdd5359ff650 100644
--- a/distribution/pom.xml
+++ b/distribution/pom.xml
@@ -184,6 +184,9 @@
-Ddruid.extensions.loadList=[]
-Ddruid.extensions.directory=${project.build.directory}/extensions
+
+ -Ddruid.extensions.commonDependenciesDir=${project.build.directory}/commonDeps
+
-Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies
@@ -385,6 +388,9 @@
-Ddruid.extensions.loadList=[]
-Ddruid.extensions.directory=${project.build.directory}/extensions
+
+ -Ddruid.extensions.commonDependenciesDir=${project.build.directory}/commonDeps
+
-Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies
@@ -493,6 +499,9 @@
-Ddruid.extensions.loadList=[]
-Ddruid.extensions.directory=${project.build.directory}/extensions
+
+ -Ddruid.extensions.commonDependenciesDir=${project.build.directory}/commonDeps
+
-Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies
diff --git a/distribution/src/assembly/assembly.xml b/distribution/src/assembly/assembly.xml
index ff8e0d2fdd5b..e1cd8513705c 100644
--- a/distribution/src/assembly/assembly.xml
+++ b/distribution/src/assembly/assembly.xml
@@ -34,6 +34,15 @@
extensions
+
+ ${project.build.directory}/commonDeps
+
+ *
+
+ lib
+
+
+
${project.build.directory}/hadoop-dependencies
diff --git a/distribution/src/assembly/integration-test-assembly.xml b/distribution/src/assembly/integration-test-assembly.xml
index 7b2e91a18867..4177022e4ea4 100644
--- a/distribution/src/assembly/integration-test-assembly.xml
+++ b/distribution/src/assembly/integration-test-assembly.xml
@@ -35,6 +35,14 @@
extensions
+
+ ${project.build.directory}/commonDeps
+
+ *
+
+ lib
+
+
${project.build.directory}/hadoop-dependencies
diff --git a/pom.xml b/pom.xml
index e70542cdea64..6ea1d1e39245 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1873,7 +1873,7 @@
org.apache.maven.plugins
maven-assembly-plugin
- 3.1.0
+ 3.4.0
org.codehaus.mojo
diff --git a/processing/src/main/java/org/apache/druid/guice/ExtensionsConfig.java b/processing/src/main/java/org/apache/druid/guice/ExtensionsConfig.java
index 3af3db347aa1..4487ac3c404b 100644
--- a/processing/src/main/java/org/apache/druid/guice/ExtensionsConfig.java
+++ b/processing/src/main/java/org/apache/druid/guice/ExtensionsConfig.java
@@ -46,6 +46,9 @@ public class ExtensionsConfig
@JsonProperty
private String hadoopContainerDruidClasspath = null;
+ @JsonProperty
+ private String commonDependenciesDir = null;
+
//Only applicable when hadoopContainerDruidClasspath is explicitly specified.
@JsonProperty
private boolean addExtensionsToHadoopContainer = false;
@@ -73,6 +76,11 @@ public String getHadoopDependenciesDir()
return hadoopDependenciesDir;
}
+ public String getCommonDependenciesDir()
+ {
+ return commonDependenciesDir;
+ }
+
public String getHadoopContainerDruidClasspath()
{
return hadoopContainerDruidClasspath;
diff --git a/services/src/main/java/org/apache/druid/cli/PullDependencies.java b/services/src/main/java/org/apache/druid/cli/PullDependencies.java
index f0588d2b372d..7d1595d163b8 100644
--- a/services/src/main/java/org/apache/druid/cli/PullDependencies.java
+++ b/services/src/main/java/org/apache/druid/cli/PullDependencies.java
@@ -62,6 +62,7 @@
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@@ -78,6 +79,8 @@ public class PullDependencies implements Runnable
"https://repo1.maven.org/maven2/"
);
+ private static final HashMap ARTIFACT_FILE_HASH_MAP = new HashMap<>();
+
private static final Dependencies PROVIDED_BY_CORE_DEPENDENCIES =
Dependencies.builder()
.put("com.squareup.okhttp", "okhttp")
@@ -260,14 +263,24 @@ public void run()
final File extensionsDir = new File(extensionsConfig.getDirectory());
final File hadoopDependenciesDir = new File(extensionsConfig.getHadoopDependenciesDir());
+ File commonDependenciesDir = null;
+ if (extensionsConfig.getCommonDependenciesDir() != null) {
+ commonDependenciesDir = new File(extensionsConfig.getCommonDependenciesDir());
+ }
try {
if (clean) {
FileUtils.deleteDirectory(extensionsDir);
FileUtils.deleteDirectory(hadoopDependenciesDir);
+ if (commonDependenciesDir != null) {
+ FileUtils.deleteDirectory(commonDependenciesDir);
+ }
}
FileUtils.mkdirp(extensionsDir);
FileUtils.mkdirp(hadoopDependenciesDir);
+ if (commonDependenciesDir != null) {
+ FileUtils.mkdirp(commonDependenciesDir);
+ }
}
catch (IOException e) {
log.error(e, "Unable to clear or create extension directory at [%s]", extensionsDir);
@@ -289,7 +302,7 @@ public void run()
File currExtensionDir = new File(extensionsDir, versionedArtifact.getArtifactId());
createExtensionDirectory(coordinate, currExtensionDir);
- downloadExtension(versionedArtifact, currExtensionDir);
+ downloadExtension(versionedArtifact, currExtensionDir, commonDependenciesDir);
}
log.info("Finish downloading dependencies for extension coordinates: [%s]", coordinates);
@@ -308,7 +321,7 @@ public void run()
currExtensionDir = new File(currExtensionDir, versionedArtifact.getVersion());
createExtensionDirectory(hadoopCoordinate, currExtensionDir);
- downloadExtension(versionedArtifact, currExtensionDir, hadoopExclusions);
+ downloadExtension(versionedArtifact, currExtensionDir, hadoopExclusions, null);
}
log.info("Finish downloading dependencies for hadoop extension coordinates: [%s]", hadoopCoordinates);
}
@@ -341,12 +354,12 @@ protected Artifact getArtifact(String coordinate)
* @param versionedArtifact The maven artifact of the extension
* @param toLocation The location where this extension will be downloaded to
*/
- private void downloadExtension(Artifact versionedArtifact, File toLocation)
+ private void downloadExtension(Artifact versionedArtifact, File toLocation, File commonDependenciesDir)
{
- downloadExtension(versionedArtifact, toLocation, PROVIDED_BY_CORE_DEPENDENCIES);
+ downloadExtension(versionedArtifact, toLocation, PROVIDED_BY_CORE_DEPENDENCIES, commonDependenciesDir);
}
- private void downloadExtension(Artifact versionedArtifact, File toLocation, Dependencies exclusions)
+ private void downloadExtension(Artifact versionedArtifact, File toLocation, Dependencies exclusions, File commonDependenciesDir)
{
final CollectRequest collectRequest = new CollectRequest();
collectRequest.setRoot(new Dependency(versionedArtifact, JavaScopes.RUNTIME));
@@ -400,8 +413,14 @@ private void downloadExtension(Artifact versionedArtifact, File toLocation, Depe
for (Artifact artifact : artifacts) {
if (exclusions.contain(artifact)) {
log.debug("Skipped Artifact[%s]", artifact);
+ } else if (commonDependenciesDir != null && ARTIFACT_FILE_HASH_MAP.containsKey(artifact)) {
+ log.info("Copying file [%s] to common dependencies directory [%s]", artifact.getFile().getName(), commonDependenciesDir.getAbsolutePath());
+ org.apache.commons.io.FileUtils.copyFileToDirectory(artifact.getFile(), commonDependenciesDir);
+ org.apache.commons.io.FileUtils.deleteQuietly(ARTIFACT_FILE_HASH_MAP.get(artifact));
} else {
log.info("Adding file [%s] at [%s]", artifact.getFile().getName(), toLocation.getAbsolutePath());
+ File file = new File(toLocation, artifact.getFile().getName());
+ ARTIFACT_FILE_HASH_MAP.put(artifact, file);
org.apache.commons.io.FileUtils.copyFileToDirectory(artifact.getFile(), toLocation);
}
}
diff --git a/services/src/test/java/org/apache/druid/cli/PullDependenciesTest.java b/services/src/test/java/org/apache/druid/cli/PullDependenciesTest.java
index 851d0f177202..958863075561 100644
--- a/services/src/test/java/org/apache/druid/cli/PullDependenciesTest.java
+++ b/services/src/test/java/org/apache/druid/cli/PullDependenciesTest.java
@@ -100,9 +100,13 @@ public class PullDependenciesTest
private final Artifact extension_B = new DefaultArtifact(EXTENSION_B_COORDINATE);
private final Artifact hadoop_client_2_3_0 = new DefaultArtifact(HADOOP_CLIENT_2_3_0_COORDINATE);
private final Artifact hadoop_client_2_4_0 = new DefaultArtifact(HADOOP_CLIENT_2_4_0_COORDINATE);
+ private List commonDependencyList;
+ private List extension_A_expected;
+ private List extension_B_expected;
private PullDependencies pullDependencies;
private File rootExtensionsDir;
private File rootHadoopDependenciesDir;
+ private File rootCommonDependenciesDir;
@Before
public void setUp() throws Exception
@@ -110,16 +114,20 @@ public void setUp() throws Exception
localRepo = temporaryFolder.newFolder("local_repo");
extensionToDependency = new HashMap<>();
- extensionToDependency.put(extension_A, ImmutableList.of("a", "b", "c"));
- extensionToDependency.put(extension_B, ImmutableList.of("d", "e"));
+ extensionToDependency.put(extension_A, ImmutableList.of("a", "b", "c", "d"));
+ extensionToDependency.put(extension_B, ImmutableList.of("c", "d", "e"));
extensionToDependency.put(hadoop_client_2_3_0, ImmutableList.of("f", "g"));
extensionToDependency.put(
hadoop_client_2_4_0,
ImmutableList.of("h", "i", HADOOP_CLIENT_VULNERABLE_ARTIFACTID1, HADOOP_CLIENT_VULNERABLE_ARTIFACTID2)
);
+ commonDependencyList = ImmutableList.of("c", "d");
+ extension_A_expected = ImmutableList.of("a", "b");
+ extension_B_expected = ImmutableList.of("e");
rootExtensionsDir = temporaryFolder.newFolder("extensions");
rootHadoopDependenciesDir = temporaryFolder.newFolder("druid_hadoop_dependencies");
+ rootCommonDependenciesDir = temporaryFolder.newFolder("commonDeps");
RepositorySystem realRepositorySystem = RealRepositorySystemUtil.newRepositorySystem();
RepositorySystem spyMockRepositorySystem = spy(realRepositorySystem);
@@ -150,6 +158,12 @@ public String getHadoopDependenciesDir()
{
return rootHadoopDependenciesDir.getAbsolutePath();
}
+
+ @Override
+ public String getCommonDependenciesDir()
+ {
+ return rootCommonDependenciesDir.getAbsolutePath();
+ }
},
HADOOP_EXCLUSIONS
);
@@ -196,6 +210,20 @@ private DependencyResult mockDependencyResult(Artifact artifact)
return result;
}
+ private List getExpectedJarFiles(List expectedJarList, File baseDir, Artifact artifact)
+ {
+ return expectedJarList.stream()
+ .map(name -> new File(
+ StringUtils.format(
+ "%s/%s/%s",
+ baseDir,
+ artifact == null ? "" : artifact.getArtifactId(),
+ name + ".jar"
+ )
+ ))
+ .collect(Collectors.toList());
+ }
+
private List getExpectedJarFiles(Artifact artifact)
{
final String artifactId = artifact.getArtifactId();
@@ -249,6 +277,26 @@ public void testPullDependencies_root_extension_dir_bad_state() throws IOExcepti
pullDependencies.run();
}
+ /**
+ * If --clean is not specified and common dependencies directory already exists, skip creating.
+ */
+ @Test()
+ public void testPullDependencies_root_common_dependencies_dir_exists()
+ {
+ pullDependencies.run();
+ }
+
+ /**
+ * A file exists on the root extension directory path, but it's not a directory, throw exception.
+ */
+ @Test(expected = RuntimeException.class)
+ public void testPullDependencies_root_common_dependencies_dir_bad_state() throws IOException
+ {
+ Assert.assertTrue(rootCommonDependenciesDir.delete());
+ Assert.assertTrue(rootCommonDependenciesDir.createNewFile());
+ pullDependencies.run();
+ }
+
/**
* If --clean is not specified and hadoop dependencies directory already exists, skip creating.
*/
@@ -281,11 +329,14 @@ public void testPullDependencies()
final List jarsUnderExtensionA = Arrays.asList(actualExtensions[0].listFiles());
Collections.sort(jarsUnderExtensionA);
- Assert.assertEquals(getExpectedJarFiles(extension_A), jarsUnderExtensionA);
+ Assert.assertEquals(getExpectedJarFiles(extension_A_expected, rootExtensionsDir, extension_A), jarsUnderExtensionA);
final List jarsUnderExtensionB = Arrays.asList(actualExtensions[1].listFiles());
Collections.sort(jarsUnderExtensionB);
- Assert.assertEquals(getExpectedJarFiles(extension_B), jarsUnderExtensionB);
+ Assert.assertEquals(getExpectedJarFiles(extension_B_expected, rootExtensionsDir, extension_B), jarsUnderExtensionB);
+
+ final List jarsUnderCommon = Arrays.asList(rootCommonDependenciesDir.listFiles());
+ Assert.assertEquals(getExpectedJarFiles(commonDependencyList, rootCommonDependenciesDir, null), jarsUnderCommon);
final File[] actualHadoopDependencies = rootHadoopDependenciesDir.listFiles();
Arrays.sort(actualHadoopDependencies);