Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 5 additions & 10 deletions .github/workflows/integration-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,18 +29,15 @@ jobs:
strategy:
matrix:
maven_profile:
- "-Pscala-2.11 -Pspark2 -DskipRTests"
- "-Pscala-2.12 -Pspark3"
jdk_path:
- "/usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java"
include:
- maven_profile: "-Pscala-2.12 -Pspark3"
jdk_path: "/usr/lib/jvm/java-17-openjdk-amd64/bin/java"
- "/usr/lib/jvm/java-17-openjdk-amd64/bin/java"
steps:
-
-
name: Checkout
uses: actions/checkout@v3
-
-
name: Cache local Maven repository
uses: actions/cache@v3
with:
Expand All @@ -51,14 +48,12 @@ jobs:
restore-keys: |
${{ runner.os }}-maven-
-
name: Set Python 3 as default for Spark 3 builds
if: ${{ contains(matrix.maven_profile, 'spark3') }}
# This can be removed once support for Python 2 and Spark 2 is removed and the default python executable is python3
name: Set Python 3 as default
run: pyenv global 3 && echo "PYSPARK_PYTHON=$(which python3)" >> "$GITHUB_ENV"
-
name: Set JDK version
run: update-alternatives --set java ${{ matrix.jdk_path }}
-
-
name: Build with Maven
run: mvn -Pthriftserver ${{ matrix.maven_profile }} -DskipTests -Dmaven.javadoc.skip=true -B -V -e verify
-
Expand Down
10 changes: 3 additions & 7 deletions .github/workflows/unit-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,15 @@ jobs:
strategy:
matrix:
maven_profile:
- "-Pscala-2.11 -Pspark2"
- "-Pscala-2.12 -Pspark2"
- "-Pscala-2.12 -Pspark3"
jdk_path:
- "/usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java"
include:
- maven_profile: "-Pscala-2.12 -Pspark3"
jdk_path: "/usr/lib/jvm/java-17-openjdk-amd64/bin/java"
- "/usr/lib/jvm/java-17-openjdk-amd64/bin/java"
steps:
-
-
name: Checkout
uses: actions/checkout@v3
-
-
name: Cache local Maven repository
uses: actions/cache@v3
with:
Expand Down
19 changes: 3 additions & 16 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,25 +21,12 @@ language: scala

matrix:
include:
- name: "Spark 2.4 Unit Tests (Scala 2.11)"
scala: 2.11.12
env: MVN_FLAG='-Pscala-2.11 -Pspark2 -Pthriftserver -DskipITs'
- name: "Spark 2.4 ITs (Scala 2.11)"
scala: 2.11.12
env: MVN_FLAG='-Pscala-2.11 -Pspark2 -Pthriftserver -DskipTests'
- name: "Spark 2.4 Unit Tests (Scala 2.12)"
scala: 2.12.10
env: MVN_FLAG='-Pscala-2.12 -Pspark2 -Pthriftserver -DskipITs'
- name: "Spark 2.4 ITs (Scala 2.12)"
scala: 2.12.10
env: MVN_FLAG='-Pscala-2.12 -Pspark2 -Pthriftserver -DskipTests'
# No scala 2.11.x build for spark3
- name: "Spark 3.0 Unit Tests (Scala 2.12)"
- name: "Spark 3.x Unit Tests (Scala 2.12)"
scala: 2.12.10
env: MVN_FLAG='-Pscala-2.12 -Pspark3 -Pthriftserver -DskipITs'
- name: "Spark 3.0 ITs (Scala 2.12)"
- name: "Spark 3.x ITs (Scala 2.12)"
scala: 2.12.10
env: MVN_FLAG='-Pscala-2.12 -Pspark3 -Pthriftserver -DskipITs'
env: MVN_FLAG='-Pscala-2.12 -Pspark3 -Pthriftserver -DskipTests'

jdk:
- openjdk8
Expand Down
12 changes: 5 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ Required python packages for building Livy:
To run Livy, you will also need a Spark installation. You can get Spark releases at
https://spark.apache.org/downloads.html.

Livy requires Spark 2.4+. You can switch to a different version of Spark by setting the
Livy requires Spark 3.0+. You can switch to a different version of Spark by setting the
``SPARK_HOME`` environment variable in the Livy server process, without needing to rebuild Livy.


Expand All @@ -82,7 +82,7 @@ docker run --rm -it -v $(pwd):/workspace -v $HOME/.m2:/root/.m2 livy-ci mvn pack

> **Note**: The `docker run` command maps the maven repository to your host machine's maven cache so subsequent runs will not need to download dependencies.
By default Livy is built against Apache Spark 2.4.5, but the version of Spark used when running
By default Livy is built against Apache Spark 3.3.4, but the version of Spark used when running
Livy does not need to match the version used to build Livy. Livy internally handles the differences
between different Spark versions.

Expand All @@ -93,8 +93,6 @@ version of Spark without needing to rebuild.

| Flag | Purpose |
|--------------|--------------------------------------------------------------------|
| -Phadoop2 | Choose Hadoop2 based build dependencies (default configuration) |
| -Pspark2 | Choose Spark 2.x based build dependencies (default configuration) |
| -Pspark3 | Choose Spark 3.x based build dependencies |
| -Pscala-2.11 | Choose Scala 2.11 based build dependencies (default configuration) |
| -Pscala-2.12 | Choose scala 2.12 based build dependencies |
| -Phadoop2 | Choose Hadoop2 based build dependencies |
| -Pspark3 | Choose Spark 3.x based build dependencies (default configuration) |
| -Pscala-2.12 | Choose Scala 2.12 based build dependencies (default configuration) |
48 changes: 0 additions & 48 deletions core/scala-2.11/pom.xml

This file was deleted.

29 changes: 0 additions & 29 deletions integration-test/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -246,35 +246,6 @@
</executions>
</plugin>

<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<execution>
<id>parse-spark-version</id>
<phase>process-test-sources</phase>
<goals>
<goal>parse-version</goal>
</goals>
<configuration>
<propertyPrefix>spark</propertyPrefix>
<versionString>${spark.version}</versionString>
</configuration>
</execution>
<execution>
<id>add-spark-version-specific-test</id>
<phase>process-test-sources</phase>
<goals>
<goal>add-test-source</goal>
</goals>
<configuration>
<sources>
<source>${project.basedir}/src/test/spark${spark.majorVersion}/scala</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,7 @@ class InteractiveIT extends BaseIntegrationTestSuite {
s.run("from pyspark.sql.types import Row").verifyResult("")
s.run("x = [Row(age=1, name=u'a'), Row(age=2, name=u'b'), Row(age=3, name=u'c')]")
.verifyResult("")
// Check if we're running with Spark2.
if (s.run("spark").result().isLeft) {
s.run("sqlContext.sparkSession").verifyResult(".*pyspark\\.sql\\.session\\.SparkSession.*")
}
s.run("spark").verifyResult(".*pyspark\\.sql\\.session\\.SparkSession.*")
s.run("%table x").verifyResult(".*headers.*type.*name.*data.*")
s.run("abcde").verifyError(ename = "NameError", evalue = "name 'abcde' is not defined")
s.run("raise KeyError('foo')").verifyError(ename = "KeyError", evalue = "'foo'")
Expand Down
131 changes: 0 additions & 131 deletions integration-test/src/test/spark2/scala/Spark2JobApiIT.scala

This file was deleted.

Loading