Skip to content

Remove provided optional configs #1452

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Apr 22, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,6 @@ repositories {
jcenter()
mavenCentral()

// For resolving propdeps (provided/optional dependencies)
maven { url 'https://repo.spring.io/plugins-release' }

// For Elasticsearch snapshots.
if (localRepo) {
// For some reason the root dirs all point to the buildSrc folder. The local Repo will be one above that.
Expand All @@ -72,10 +69,6 @@ dependencies {
compile gradleApi()
compile localGroovy()

// Provided/Optional Dependencies
compile 'org.springframework.build.gradle:propdeps-plugin:0.0.7'
implementation 'org.apache.logging.log4j:log4j-core:2.11.1'

if (localRepo) {
compile name: "build-tools-${buildToolsVersion}"
// Required for dependency licenses task (explicitly added in case of localRepo missing transitive dependencies)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import org.gradle.api.artifacts.maven.MavenPom
import org.gradle.api.artifacts.maven.MavenResolver
import org.gradle.api.file.CopySpec
import org.gradle.api.java.archives.Manifest
import org.gradle.api.plugins.JavaLibraryPlugin
import org.gradle.api.plugins.JavaPlugin
import org.gradle.api.plugins.MavenPlugin
import org.gradle.api.plugins.MavenPluginConvention
Expand All @@ -33,10 +34,6 @@ import org.gradle.external.javadoc.JavadocOutputLevel
import org.gradle.external.javadoc.MinimalJavadocOptions
import org.gradle.plugins.ide.eclipse.EclipsePlugin
import org.gradle.plugins.ide.idea.IdeaPlugin
import org.springframework.build.gradle.propdep.PropDepsEclipsePlugin
import org.springframework.build.gradle.propdep.PropDepsIdeaPlugin
import org.springframework.build.gradle.propdep.PropDepsMavenPlugin
import org.springframework.build.gradle.propdep.PropDepsPlugin

class BuildPlugin implements Plugin<Project> {

Expand All @@ -62,20 +59,14 @@ class BuildPlugin implements Plugin<Project> {
project.getPluginManager().apply(BaseBuildPlugin.class)

// BuildPlugin will continue to assume Java projects for the time being.
project.getPluginManager().apply(JavaPlugin.class)
project.getPluginManager().apply(JavaLibraryPlugin.class)

// IDE Support
project.getPluginManager().apply(IdeaPlugin.class)
project.getPluginManager().apply(EclipsePlugin.class)

// Maven Support
project.getPluginManager().apply(MavenPlugin.class)

// Support for modeling provided/optional dependencies
project.getPluginManager().apply(PropDepsPlugin.class)
project.getPluginManager().apply(PropDepsIdeaPlugin.class)
project.getPluginManager().apply(PropDepsEclipsePlugin.class)
project.getPluginManager().apply(PropDepsMavenPlugin.class)
}

/** Return the configuration name used for finding transitive deps of the given dependency. */
Expand Down Expand Up @@ -103,11 +94,10 @@ class BuildPlugin implements Plugin<Project> {
}
}

project.configurations.compile.dependencies.all(disableTransitiveDeps)
project.configurations.api.dependencies.all(disableTransitiveDeps)
project.configurations.implementation.dependencies.all(disableTransitiveDeps)
project.configurations.provided.dependencies.all(disableTransitiveDeps)
project.configurations.optional.dependencies.all(disableTransitiveDeps)
project.configurations.compileOnly.dependencies.all(disableTransitiveDeps)
project.configurations.runtimeOnly.dependencies.all(disableTransitiveDeps)
}

/**
Expand Down Expand Up @@ -137,7 +127,6 @@ class BuildPlugin implements Plugin<Project> {

itestImplementation(project.sourceSets.main.output)
itestImplementation(project.configurations.testImplementation)
itestImplementation(project.configurations.provided)
itestImplementation(project.sourceSets.test.output)
itestImplementation(project.configurations.testRuntimeClasspath)
}
Expand Down Expand Up @@ -356,17 +345,6 @@ class BuildPlugin implements Plugin<Project> {
dep.scope == 'test' || dep.artifactId == 'elasticsearch-hadoop-mr'
}

// Mark the optional dependencies to actually be optional
generatedPom.dependencies.findAll { it.scope == 'optional' }.each {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How are we handling this now? The generated POMs are going to be different now. Dependencies that were previously marked as optional are now going to be brought in by consumers by default. Is this fine or expected behavior?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We used optional primarily in two places:

First, in the spark projects where if you were only using RDD's then you didn't need to necessarily pull in the SQL libraries. That said, both core Spark and Spark SQL were listed as provided, and thus would always both be on the classpath if launching via spark-submit. In terms of a strict build footprint, this may affect those who are strictly avoiding SQL, though a decent amount of Spark development is done in live environments/notebooks/platforms. I don't expect this to be a widespread problem since those who are actively trying to avoid SQL should be able to ignore this dependency with their own build tools.

Second, in the project-wide pom, where we have the code for every integration. The expectation here I assume was that every integration was optional within the project-wide artifact, and thus each integration's dependency was marked as optional. This meant that you could pull in the root project's artifact for Spark and not need to pull in Pig dependencies. This might be the biggest issue to address for the removal of optional scope dependencies, but to work around this, we publish integration specific artifacts which provide isolation based on the supported integration for users who only want support for one integration.

In fact, I'm willing to have a discussion on whether or not the project-wide artifact is even needed. I believe that it is a nice getting started point where you can put in a simple dependency name to get support for any integration, but beyond getting started, most users are much better off using integration specific artifacts for their projects.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't expect this to be a widespread problem since those who are actively trying to avoid SQL should be able to ignore this dependency with their own build tools.

Right. Folks bringing this in will now need to explicitly exclude this dependency rather than explicitly include it. If you don't feel this is going to be terribly disruptive then 👍.

the project-wide pom

What is the "project-wide pom" and what specifically is its purpose? Would a BOM be a more appropriate solution if you want folks to be able to import a simple POM that defines things like common dependency versions?

In fact, I'm willing to have a discussion on whether or not the project-wide artifact is even needed.

FWIW, Gradle has support specifically for publishing BOMs. We can still provide this kind of thing, but in a more appropriate form than a POM that declares nothing but optional dependencies. This is better modeled with by the <dependencyManagement> block of a POM.

https://docs.gradle.org/current/userguide/java_platform_plugin.html

it.optional = "true"
}

// By default propdeps models optional dependencies as compile/optional
// for es-hadoop optional is best if these are modeled as provided/optional
generatedPom.dependencies.findAll { it.optional == "true" }.each {
it.scope = "provided"
}

// Storm hosts their jars outside of maven central.
boolean storm = generatedPom.dependencies.any { it.groupId == 'org.apache.storm' }

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,15 +65,18 @@ class IntegrationBuildPlugin implements Plugin<Project> {
*/
private static void configureProjectZip(Project project) {
// We do this after evaluation since the scala projects may change around what the final archive name is.
// TODO: Swap this out with exposing those jars as artifacts to be consumed in a dist project.
project.afterEvaluate {
Zip rootDistZip = project.rootProject.getTasks().getByName('distZip') as Zip
rootDistZip.dependsOn(project.getTasks().pack)

project.getTasks().withType(Jar.class).each { Jar jarTask ->
project.getTasks().withType(Jar.class) { Jar jarTask ->
// Add jar output under the dist directory
rootDistZip.from(jarTask.archivePath) { CopySpec copySpecification ->
copySpecification.into("${project.rootProject.ext.folderName}/dist")
copySpecification.setDuplicatesStrategy(DuplicatesStrategy.WARN)
if (jarTask.name != "itestJar") {
rootDistZip.from(jarTask.archiveFile) { CopySpec copySpecification ->
copySpecification.into("${project.rootProject.ext.folderName}/dist")
copySpecification.setDuplicatesStrategy(DuplicatesStrategy.WARN)
}
}
}
}
Expand All @@ -84,36 +87,15 @@ class IntegrationBuildPlugin implements Plugin<Project> {
* @param project to be configured
*/
private static void configureRootProjectDependencies(Project project) {
// We do this in an after evaluate so that we pick up all dependencies set after the plugin was configured.
// If this becomes a problem, we could see if there's a way to listen for new dependencies and add them
// to root at the same time.
project.afterEvaluate {
project.getConfigurations().getByName('implementation').getAllDependencies()
.withType(ExternalDependency.class)
.each { Dependency dependency ->
// Convert the scope to optional on the root project - it will have every integration in it, and
// users may not need every dependency (except hadoop and jackson)
String scope = (dependency.group in ['org.apache.hadoop', 'org.codehaus.jackson'] ? 'provided' : 'optional')
project.rootProject.getDependencies().add(scope, dependency)
}

project.getConfigurations().getByName('provided').getAllDependencies()
.withType(ExternalDependency.class)
.each { Dependency dependency ->
// Convert the scope to optional on the root project - it will have every integration in it, and
// users may not need every dependency (except hadoop and jackson)
String scope = (dependency.group in ['org.apache.hadoop', 'org.codehaus.jackson'] ? 'provided' : 'optional')
project.rootProject.getDependencies().add(scope, dependency)
project.getConfigurations().getByName('api').getAllDependencies()
.withType(ExternalDependency.class) { Dependency dependency ->
// Set API dependencies as implementation in the uberjar so that not everything is compile scope
project.rootProject.getDependencies().add('implementation', dependency)
}

project.getConfigurations().getByName('optional').getAllDependencies()
.withType(ExternalDependency.class)
.each { Dependency dependency ->
// Convert the scope to optional on the root project - it will have every integration in it, and
// users may not need every dependency (except hadoop and jackson)
String scope = (dependency.group in ['org.apache.hadoop', 'org.codehaus.jackson'] ? 'provided' : 'optional')
project.rootProject.getDependencies().add(scope, dependency)
project.getConfigurations().getByName('implementation').getAllDependencies()
.withType(ExternalDependency.class) { Dependency dependency ->
project.rootProject.getDependencies().add('implementation', dependency)
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ class RootBuildPlugin implements Plugin<Project> {
// Copy master jar, sourceJar, and javadocJar to zip
project.afterEvaluate {
// Do not copy the hadoop testing jar
project.getTasks().withType(Jar.class).findAll { it.name != 'hadoopTestingJar' }.each { Jar jarTask ->
distZip.from(jarTask.archivePath) { CopySpec spec ->
project.getTasks().withType(Jar.class) { Jar jarTask ->
distZip.from(jarTask.archiveFile) { CopySpec spec ->
spec.into("${project.rootProject.ext.folderName}/dist")
}
}
Expand Down
9 changes: 6 additions & 3 deletions hive/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,13 @@ configurations {
dependencies {
embedded(project(":elasticsearch-hadoop-mr"))

provided("org.apache.hive:hive-service:$hiveVersion") {
implementation("org.apache.hive:hive-service:$hiveVersion") {
exclude module: "log4j-slf4j-impl"
}
provided("org.apache.hive:hive-exec:$hiveVersion")
provided("org.apache.hive:hive-metastore:$hiveVersion")
implementation("org.apache.hive:hive-exec:$hiveVersion")
implementation("org.apache.hive:hive-metastore:$hiveVersion")
implementation("commons-logging:commons-logging:1.1.1")
implementation("javax.xml.bind:jaxb-api:2.3.1")

testImplementation(project(":test:shared"))

Expand All @@ -36,6 +38,7 @@ dependencies {
}

jar {
dependsOn(project.configurations.embedded)
from(project.configurations.embedded.collect { it.isDirectory() ? it : zipTree(it)}) {
include "org/elasticsearch/hadoop/**"
include "esh-build.properties"
Expand Down
1 change: 1 addition & 0 deletions hive/licenses/commons-logging-1.1.1.jar.sha1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
5043bfebc3db072ed80fbd362e7caf00e885d8ae
Loading