Skip to content

Commit

Permalink
[LIVY-510] Remove support for JDK7.
Browse files Browse the repository at this point in the history
With this change Livy will only build and run with JDK8. While this
doesn't necessarily preclude using Java 7 for running Spark itself,
that is not encouraged due to possible issues with serialization.

There are a few small changes elsewhere that I made so tests would pass,
but aren't related to the java 8 change:

- a small tweak to the fallback code that initializes the python
  interpreter, which was failing with a weird reflection error.

- expose the user's home directory in the MiniYarnCluster, to allow
  the tests to see pip packages installed in the user's home.

- tweak the python API tests so that the pytest cache does not pollute
  the source directory.

- changed Travis to use Oracle's JDK, since the Open JDK path was
  mixing Java 8 and 9 and causing errors.

- updated jacoco to latest stable.

Author: Marcelo Vanzin <[email protected]>

Closes apache#111 from vanzin/LIVY-510.
  • Loading branch information
Marcelo Vanzin authored and jerryshao committed Sep 20, 2018
1 parent f99cc32 commit d39ab35
Show file tree
Hide file tree
Showing 8 changed files with 34 additions and 95 deletions.
6 changes: 1 addition & 5 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,17 +33,13 @@ matrix:
# Spark 2.2+ will only be verified using JDK8
# Thriftserver requires JDK8
- env: MVN_FLAG='-Pthriftserver -Pspark-2.2-it -DskipTests'
jdk: oraclejdk8
- env: MVN_FLAG='-Pthriftserver -Pspark-2.2 -DskipITs'
jdk: oraclejdk8
- env: MVN_FLAG='-Pthriftserver -Pspark-2.3-it -DskipTests'
jdk: oraclejdk8
- env: MVN_FLAG='-Pthriftserver -Pspark-2.3 -DskipITs'
jdk: oraclejdk8


jdk:
- openjdk7
- oraclejdk8

addons:
apt:
Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,19 +28,19 @@ To build Livy, you will need:

Debian/Ubuntu:
* mvn (from ``maven`` package or maven3 tarball)
* openjdk-7-jdk (or Oracle Java7 jdk)
* openjdk-8-jdk (or Oracle JDK 8)
* Python 2.6+
* R 3.x

Redhat/CentOS:
* mvn (from ``maven`` package or maven3 tarball)
* java-1.7.0-openjdk (or Oracle Java7 jdk)
* java-1.8.0-openjdk (or Oracle JDK 8)
* Python 2.6+
* R 3.x

MacOS:
* Xcode command line tools
* Oracle's JDK 1.7+
* Oracle's JDK 1.8
* Maven (Homebrew)
* Python 2.6+
* R 3.x
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,10 @@ object MiniYarnMain extends MiniClusterBase {
config.localDirCount, config.logDirCount)
yarnCluster.init(baseConfig)

// This allows applications run by YARN during the integration tests to find PIP modules
// installed in the user's home directory (instead of just the global ones).
baseConfig.set(YarnConfiguration.NM_USER_HOME_DIR, sys.env("HOME"))

// Install a shutdown hook for stop the service and kill all running applications.
Runtime.getRuntime().addShutdownHook(new Thread() {
override def run(): Unit = yarnCluster.stop()
Expand Down Expand Up @@ -283,8 +287,7 @@ class MiniCluster(config: Map[String, String]) extends Cluster with MiniClusterU
sys.props("java.home") + "/bin/java",
"-Dtest.appender=console",
"-Djava.io.tmpdir=" + procTmp.getAbsolutePath(),
"-cp", childClasspath + File.pathSeparator + configDir.getAbsolutePath(),
"-XX:MaxPermSize=256m") ++
"-cp", childClasspath + File.pathSeparator + configDir.getAbsolutePath()) ++
extraJavaArgs ++
Seq(
klass.getName().stripSuffix("$"),
Expand Down
3 changes: 1 addition & 2 deletions integration-test/src/test/resources/test_python_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,6 @@ def upload_pyfile_job(context):


if __name__ == '__main__':
test_dir_path = os.getcwd() + "/src"
value = pytest.main(test_dir_path)
value = pytest.main([os.path.dirname(__file__)])
if value != 0:
raise Exception("One or more test cases have failed.")
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,12 @@ class JobApiIT extends BaseIntegrationTestSuite with BeforeAndAfterAll with Logg
val uploadPyFilePath = createTempFilesForTest("upload_pyfile", ".py",
uploadPyFileContent, false)

val builder = new ProcessBuilder(Seq("python", createPyTestsForPythonAPI().toString).asJava)
val tmpDir = new File(sys.props("java.io.tmpdir")).getAbsoluteFile()
val testDir = Files.createTempDirectory(tmpDir.toPath(), "python-tests-").toFile()
val testFile = createPyTestsForPythonAPI(testDir)

val builder = new ProcessBuilder(Seq("python", testFile.getAbsolutePath()).asJava)
builder.directory(testDir)

val env = builder.environment()
env.put("LIVY_END_POINT", livyEndpoint)
Expand All @@ -258,8 +263,8 @@ class JobApiIT extends BaseIntegrationTestSuite with BeforeAndAfterAll with Logg
env.put("UPLOAD_FILE_URL", uploadFilePath)
env.put("UPLOAD_PYFILE_URL", uploadPyFilePath)

builder.redirectOutput(new File(sys.props("java.io.tmpdir") + "/pytest_results.log"))
builder.redirectErrorStream(true)
builder.redirectOutput(new File(tmpDir, "pytest_results.log"))

val process = builder.start()

Expand All @@ -268,11 +273,10 @@ class JobApiIT extends BaseIntegrationTestSuite with BeforeAndAfterAll with Logg
assert(process.exitValue() === 0)
}

private def createPyTestsForPythonAPI(): File = {
var source: InputStream = null
private def createPyTestsForPythonAPI(testDir: File): File = {
val file = Files.createTempFile(testDir.toPath(), "test_python_api-", ".py").toFile()
val source = getClass().getClassLoader().getResourceAsStream("test_python_api.py")
try {
source = getClass.getClassLoader.getResourceAsStream("test_python_api.py")
val file = Files.createTempFile("", "").toFile
Files.copy(source, file.toPath, StandardCopyOption.REPLACE_EXISTING)
file
} finally {
Expand Down
82 changes: 8 additions & 74 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@
<httpcore.version>4.4.4</httpcore.version>
<jackson.version>2.9.5</jackson.version>
<javax.servlet-api.version>3.1.0</javax.servlet-api.version>
<jetty.version>9.2.16.v20160414</jetty.version>
<jetty.version>9.3.8.v20160314</jetty.version>
<json4s.version>3.2.10</json4s.version>
<junit.version>4.11</junit.version>
<libthrift.version>0.9.3</libthrift.version>
Expand All @@ -108,9 +108,7 @@
<scala.version>${scala-2.11.version}</scala.version>
<scalatest.version>2.2.4</scalatest.version>
<scalatra.version>2.3.0</scalatra.version>
<java.version>1.7</java.version>
<minJavaVersion>1.7</minJavaVersion>
<maxJavaVersion>1.8</maxJavaVersion>
<java.version>1.8</java.version>
<test.redirectToFile>true</test.redirectToFile>
<execution.root>${user.dir}</execution.root>
<spark.home>${execution.root}/dev/spark</spark.home>
Expand Down Expand Up @@ -578,26 +576,6 @@
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>1.4.1</version>
<executions>
<execution>
<id>enforce-versions</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<requireJavaVersion>
<version>1.7</version>
</requireJavaVersion>
</rules>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
Expand Down Expand Up @@ -675,7 +653,6 @@
<jvmArgs>
<jvmArg>-Xms1024m</jvmArg>
<jvmArg>-Xmx1024m</jvmArg>
<jvmArg>-XX:MaxPermSize=${MaxPermGen}</jvmArg>
<jvmArg>-XX:ReservedCodeCacheSize=${CodeCacheSize}</jvmArg>
</jvmArgs>
</configuration>
Expand Down Expand Up @@ -714,7 +691,7 @@
</systemProperties>
<redirectTestOutputToFile>${test.redirectToFile}</redirectTestOutputToFile>
<useFile>${test.redirectToFile}</useFile>
<argLine>${argLine} -Xmx2g -XX:MaxPermSize=512m</argLine>
<argLine>${argLine} -Xmx2g</argLine>
<failIfNoTests>false</failIfNoTests>
</configuration>
</plugin>
Expand Down Expand Up @@ -743,7 +720,7 @@
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>WDF TestSuite.txt</filereports>
<argLine>${argLine} -Xmx2g -XX:MaxPermSize=512m</argLine>
<argLine>${argLine} -Xmx2g</argLine>
</configuration>
<executions>
<execution>
Expand Down Expand Up @@ -818,39 +795,25 @@
<inherited>false</inherited>
<configuration>
<rules>
<requireJavaVersion>
<version>${java.version}</version>
</requireJavaVersion>
<requireMavenVersion>
<version>[3.0.0,)</version>
</requireMavenVersion>
<requireJavaVersion>
<version>[${minJavaVersion}.0,${maxJavaVersion}.1000}]</version>
</requireJavaVersion>
<requireOS>
<family>unix</family>
</requireOS>
</rules>
</configuration>
<executions>
<execution>
<id>clean</id>
<goals>
<goal>enforce</goal>
</goals>
<phase>pre-clean</phase>
</execution>
<execution>
<id>default</id>
<goals>
<goal>enforce</goal>
</goals>
<phase>validate</phase>
</execution>
<execution>
<id>site</id>
<goals>
<goal>enforce</goal>
</goals>
<phase>pre-site</phase>
</execution>
</executions>
</plugin>

Expand Down Expand Up @@ -996,7 +959,7 @@
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.7.7.201606060606</version>
<version>0.8.2</version>
<executions>
<execution>
<goals>
Expand Down Expand Up @@ -1065,33 +1028,6 @@
<modules>
<module>thriftserver/server</module>
</modules>
<properties>
<jetty.version>9.3.8.v20160314</jetty.version>
</properties>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<executions>
<execution>
<id>enforce-java</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<requireJavaVersion>
<version>1.8</version>
</requireJavaVersion>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>

<!-- Spark version profiles -->
Expand Down Expand Up @@ -1184,7 +1120,6 @@
<spark.scala-2.11.version>2.2.0</spark.scala-2.11.version>
<spark.scala-2.10.version>2.2.0</spark.scala-2.10.version>
<spark.version>${spark.scala-2.11.version}</spark.version>
<java.version>1.8</java.version>
<py4j.version>0.10.4</py4j.version>
<json4s.version>3.2.11</json4s.version>
</properties>
Expand Down Expand Up @@ -1218,7 +1153,6 @@
<spark.version>${spark.scala-2.11.version}</spark.version>
<netty.spark-2.11.version>4.1.17.Final</netty.spark-2.11.version>
<netty.spark-2.10.version>4.0.37.Final</netty.spark-2.10.version>
<java.version>1.8</java.version>
<py4j.version>0.10.7</py4j.version>
<json4s.version>3.2.11</json4s.version>
</properties>
Expand Down
1 change: 1 addition & 0 deletions python-api/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ test=pytest
[tool:pytest]
addopts = --verbose
python_files = src/test/python/*/*.py
cache_dir = target/.pytest_cache
Original file line number Diff line number Diff line change
Expand Up @@ -201,9 +201,11 @@ object PythonInterpreter extends Logging {
.newInstance(parts(0), gateway)
} catch {
case NonFatal(e) =>
classOf[PythonProxyHandler].getConstructor(classOf[String],
Class.forName("py4j.CallbackClient"), classOf[Gateway])
.newInstance(parts(0), gateway.getCallbackClient, gateway)
val cbClient = gateway.getClass().getMethod("getCallbackClient").invoke(gateway)
val cbClass = Class.forName("py4j.CallbackClient")
classOf[PythonProxyHandler]
.getConstructor(classOf[String], cbClass, classOf[Gateway])
.newInstance(parts(0), cbClient, gateway)
}

Proxy.newProxyInstance(Thread.currentThread.getContextClassLoader,
Expand Down

0 comments on commit d39ab35

Please sign in to comment.