Skip to content

Commit

Permalink
[SEDONA-211] Enforce release managers to use JDK 8 (#724)
Browse files Browse the repository at this point in the history
  • Loading branch information
jiayuasu authored Dec 10, 2022
1 parent 460f097 commit a928188
Show file tree
Hide file tree
Showing 10 changed files with 68 additions and 50 deletions.
4 changes: 0 additions & 4 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,6 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
Expand Down
21 changes: 19 additions & 2 deletions docs/community/release-manager.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,27 @@ You only need to perform these steps if this is your first time being a release

### 0. Software requirement

* JDK 8 or 11. Other versions might cause problems.
* Maven 3.X. Your Maven must point to the correct JDK version (8 or 11). Check it by `mvn --version`
* JDK 8: `brew install openjdk@8`
* Maven 3.X. Your Maven must point to JDK 8 (1.8). Check it by `mvn --version`
* Git and SVN

If your Maven (`mvn --version`) points to other JDK versions, you must change it to JDK 8. Steps are as follows:

1. Find all Java installed on your machine: `/usr/libexec/java_home -V`. You should see multiple JDK versions including JDK 8.
2. Run `whereis mvn` to get the installation location of your Maven. The result is a symlink to the actual location.
3. Open it in the terminal (with `sudo` if needed). It will be like this
```
#!/bin/bash
JAVA_HOME="${JAVA_HOME:-$(/usr/libexec/java_home)}" exec "/usr/local/Cellar/maven/3.6.3/libexec/bin/mvn" "$@"
```
4. Change `JAVA_HOME:-$(/usr/libexec/java_home)}` to `JAVA_HOME:-$(/usr/libexec/java_home -v 1.8)}`. The resulting content will be like this:
```
#!/bin/bash
JAVA_HOME="${JAVA_HOME:-$(/usr/libexec/java_home -v 1.8)}" exec "/usr/local/Cellar/maven/3.6.3/libexec/bin/mvn" "$@"
```
5. Run `mvn --version` again. It should now point to JDK 8.


### 1. Obtain Write Access to Sedona GitHub repo

1. Verify you have a Github ID enabled with 2FA https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/
Expand Down
13 changes: 5 additions & 8 deletions examples/rdd-colocation-mining/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -33,20 +33,20 @@ lazy val root = (project in file(".")).
publishMavenStyle := true
)

val SparkVersion = "3.2.0"
val SparkVersion = "3.3.1"

val SparkCompatibleVersion = "3.0"

val HadoopVersion = "2.7.2"

val SedonaVersion = "1.1.1-incubating"
val SedonaVersion = "1.3.1-incubating-SNAPSHOT"

val ScalaCompatibleVersion = "2.12"

// Change the dependency scope to "provided" when you run "sbt assembly"
val dependencyScope = "compile"

val geotoolsVersion = "1.1.0-25.2"
val geotoolsVersion = "1.3.0-27.2"

//val jacksonVersion = "2.10.0"

Expand All @@ -62,11 +62,8 @@ libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-mapreduce-client-core" % HadoopVersion % dependencyScope,
"org.apache.hadoop" % "hadoop-common" % HadoopVersion % dependencyScope,
"org.apache.hadoop" % "hadoop-hdfs" % HadoopVersion % dependencyScope,
"org.apache.sedona" % "sedona-core-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion,
"org.apache.sedona" % "sedona-sql-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion ,
"org.apache.sedona" % "sedona-viz-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion,
"org.locationtech.jts"% "jts-core"% "1.18.0" % "compile",
"org.wololo" % "jts2geojson" % "0.14.3" % "compile", // Only needed if you read GeoJSON files. Under MIT License
"org.apache.sedona" % "sedona-python-adapter-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion changing(),
"org.apache.sedona" % "sedona-viz-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion changing(),
// The following GeoTools packages are only required if you need CRS transformation. Under GNU Lesser General Public License (LGPL) license
"org.datasyslab" % "geotools-wrapper" % geotoolsVersion % "compile"
)
Expand Down
14 changes: 6 additions & 8 deletions examples/sql/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/

import sbt.Keys.{libraryDependencies, version}


Expand All @@ -32,20 +33,20 @@ lazy val root = (project in file(".")).
publishMavenStyle := true
)

val SparkVersion = "3.2.0"
val SparkVersion = "3.3.1"

val SparkCompatibleVersion = "3.0"

val HadoopVersion = "2.7.2"

val SedonaVersion = "1.1.1-incubating"
val SedonaVersion = "1.3.1-incubating-SNAPSHOT"

val ScalaCompatibleVersion = "2.12"

// Change the dependency scope to "provided" when you run "sbt assembly"
val dependencyScope = "compile"

val geotoolsVersion = "1.1.0-25.2"
val geotoolsVersion = "1.3.0-27.2"

//val jacksonVersion = "2.10.0"

Expand All @@ -61,11 +62,8 @@ libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-mapreduce-client-core" % HadoopVersion % dependencyScope,
"org.apache.hadoop" % "hadoop-common" % HadoopVersion % dependencyScope,
"org.apache.hadoop" % "hadoop-hdfs" % HadoopVersion % dependencyScope,
"org.apache.sedona" % "sedona-core-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion,
"org.apache.sedona" % "sedona-sql-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion ,
"org.apache.sedona" % "sedona-viz-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion,
"org.locationtech.jts"% "jts-core"% "1.18.0" % "compile",
"org.wololo" % "jts2geojson" % "0.14.3" % "compile", // Only needed if you read GeoJSON files. Under MIT License
"org.apache.sedona" % "sedona-python-adapter-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion changing(),
"org.apache.sedona" % "sedona-viz-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion changing(),
// The following GeoTools packages are only required if you need CRS transformation. Under GNU Lesser General Public License (LGPL) license
"org.datasyslab" % "geotools-wrapper" % geotoolsVersion % "compile"
)
Expand Down
8 changes: 4 additions & 4 deletions examples/sql/src/main/scala/ScalaExample.scala
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ object ScalaExample extends App{

def testPredicatePushdownAndRangeJonQuery():Unit =
{
val sedonaConf = new SedonaConf(sparkSession.sparkContext.getConf)
val sedonaConf = new SedonaConf(sparkSession.conf)
println(sedonaConf)

var polygonCsvDf = sparkSession.read.format("csv").option("delimiter",",").option("header","false").load(csvPolygonInputLocation)
Expand All @@ -84,7 +84,7 @@ object ScalaExample extends App{

def testDistanceJoinQuery(): Unit =
{
val sedonaConf = new SedonaConf(sparkSession.sparkContext.getConf)
val sedonaConf = new SedonaConf(sparkSession.conf)
println(sedonaConf)

var pointCsvDF1 = sparkSession.read.format("csv").option("delimiter",",").option("header","false").load(csvPointInputLocation)
Expand All @@ -109,7 +109,7 @@ object ScalaExample extends App{

def testAggregateFunction(): Unit =
{
val sedonaConf = new SedonaConf(sparkSession.sparkContext.getConf)
val sedonaConf = new SedonaConf(sparkSession.conf)
println(sedonaConf)

var pointCsvDF = sparkSession.read.format("csv").option("delimiter",",").option("header","false").load(csvPointInputLocation)
Expand Down Expand Up @@ -145,7 +145,7 @@ object ScalaExample extends App{
def testRasterIOAndMapAlgebra(): Unit = {
var df = sparkSession.read.format("geotiff").option("dropInvalid", true).load(rasterdatalocation)
df.printSchema()
df.selectExpr("image.origin as origin","ST_GeomFromWkt(image.wkt) as Geom", "image.height as height", "image.width as width", "image.data as data", "image.nBands as numBands").show()
df.selectExpr("image.origin as origin","ST_GeomFromWkt(image.geometry) as Geom", "image.height as height", "image.width as width", "image.data as data", "image.nBands as numBands").show()
df = df.selectExpr(" image.data as data", "image.nBands as numBands")
df = df.selectExpr("RS_GetBand(data, 1, numBands) as targetBand")
df.selectExpr("RS_MultiplyFactor(targetBand, 3) as multiply").show()
Expand Down
16 changes: 6 additions & 10 deletions examples/viz/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import sbt.Keys.{libraryDependencies, version}



lazy val root = (project in file(".")).
settings(
name := "SedonaVizTemplate",
Expand All @@ -29,25 +28,25 @@ lazy val root = (project in file(".")).

scalaVersion := "2.12.11",

organization := "org.apache.sedona",
organization := "org.apache.sedona",

publishMavenStyle := true
)

val SparkVersion = "3.1.2"
val SparkVersion = "3.3.1"

val SparkCompatibleVersion = "3.0"

val HadoopVersion = "2.7.2"

val SedonaVersion = "1.1.1-incubating"
val SedonaVersion = "1.3.1-incubating-SNAPSHOT"

val ScalaCompatibleVersion = "2.12"

// Change the dependency scope to "provided" when you run "sbt assembly"
val dependencyScope = "compile"

val geotoolsVersion = "1.1.0-25.2"
val geotoolsVersion = "1.3.0-27.2"

//val jacksonVersion = "2.10.0"

Expand All @@ -63,11 +62,8 @@ libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-mapreduce-client-core" % HadoopVersion % dependencyScope,
"org.apache.hadoop" % "hadoop-common" % HadoopVersion % dependencyScope,
"org.apache.hadoop" % "hadoop-hdfs" % HadoopVersion % dependencyScope,
"org.apache.sedona" % "sedona-core-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion,
"org.apache.sedona" % "sedona-sql-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion ,
"org.apache.sedona" % "sedona-viz-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion,
"org.locationtech.jts"% "jts-core"% "1.18.0" % "compile",
"org.wololo" % "jts2geojson" % "0.14.3" % "compile", // Only needed if you read GeoJSON files. Under MIT License
"org.apache.sedona" % "sedona-python-adapter-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion changing(),
"org.apache.sedona" % "sedona-viz-".concat(SparkCompatibleVersion).concat("_").concat(ScalaCompatibleVersion) % SedonaVersion changing(),
// The following GeoTools packages are only required if you need CRS transformation. Under GNU Lesser General Public License (LGPL) license
"org.datasyslab" % "geotools-wrapper" % geotoolsVersion % "compile"
)
Expand Down
8 changes: 8 additions & 0 deletions flink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,14 @@
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
</plugin>
</plugins>
</build>
<!-- <build>-->
<!-- <sourceDirectory>src/main/scala</sourceDirectory>-->
<!-- <plugins>-->
Expand Down
21 changes: 10 additions & 11 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
<sedona.jackson.version>2.13.3</sedona.jackson.version>
<hadoop.version>3.2.4</hadoop.version>
<maven.deploy.skip>false</maven.deploy.skip>

<maven.compiler.plugin.version>3.10.1</maven.compiler.plugin.version>
<!-- Actual scala version will be set by a profile.
Setting a default value helps IDE:s that can't make sense of profiles. -->
<scala.compat.version>2.12</scala.compat.version>
Expand Down Expand Up @@ -299,11 +299,19 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<version>${maven.compiler.plugin.version}</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>
Expand Down Expand Up @@ -348,15 +356,6 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
Expand Down
4 changes: 4 additions & 0 deletions viz/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,10 @@
<build>
<sourceDirectory>src/main/java</sourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@
* http://nyomdmegteis.hu/en/
*/

import sun.nio.ch.DirectBuffer;

import javax.imageio.ImageIO;
import javax.imageio.ImageReadParam;
import javax.imageio.ImageReader;
Expand Down Expand Up @@ -517,7 +515,12 @@ private void disposeNow(final MappedByteBuffer[] disposedBuffer)
FileDataBufferDeleterHook.undisposedBuffers.remove(this);
if (disposedBuffer != null) {
for (MappedByteBuffer b : disposedBuffer) {
((DirectBuffer) b).cleaner().clean();
// This method does not actually erase the data in the buffer,
// but it is named as if it did because it will most often be used in situations
// in which that might as well be the case
// The original method uses the ((DirectBuffer) b).cleaner().clean(), which is
// no longer available since Java 9
b.clear();
}
}
if (accessFiles != null) {
Expand Down

0 comments on commit a928188

Please sign in to comment.