Skip to content

Commit accc681

Browse files
authored
do not depend on private Spark API, avoids sealing violation (#3586)
#3585
1 parent ee57602 commit accc681

File tree

1 file changed

+41
-2
lines changed

1 file changed

+41
-2
lines changed

spark/src/main/scala/org/apache/spark/rdd/FilteredCartesianRDD.scala renamed to spark/src/main/scala/geotrellis/spark/join/FilteredCartesianRDD.scala

Lines changed: 41 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,51 @@
1919
*
2020
* 1. https://github.com/apache/spark/blob/2f8776ccad532fbed17381ff97d302007918b8d8/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
2121
*/
22-
package org.apache.spark.rdd
22+
package geotrellis.spark.join
2323

24+
import org.apache.spark._
25+
import org.apache.spark.rdd.RDD
26+
import org.log4s.getLogger
2427

28+
import java.io.{IOException, ObjectOutputStream}
2529
import scala.reflect.ClassTag
30+
import scala.util.control.NonFatal
2631

27-
import org.apache.spark._
32+
private class CartesianPartition(
33+
idx: Int,
34+
@transient private val rdd1: RDD[_],
35+
@transient private val rdd2: RDD[_],
36+
s1Index: Int,
37+
s2Index: Int
38+
) extends Partition {
39+
40+
@transient private[this] lazy val logger = getLogger
41+
42+
var s1 = rdd1.partitions(s1Index)
43+
var s2 = rdd2.partitions(s2Index)
44+
override val index: Int = idx
45+
46+
private def tryOrIOException[T](block: => T): T = {
47+
try {
48+
block
49+
} catch {
50+
case e: IOException =>
51+
logger.error(e)("Exception encountered")
52+
throw e
53+
case NonFatal(e) =>
54+
logger.error(e)("Exception encountered")
55+
throw new IOException(e)
56+
}
57+
}
58+
59+
@throws(classOf[IOException])
60+
private def writeObject(oos: ObjectOutputStream): Unit = tryOrIOException {
61+
// Update the reference to parent split at the time of task serialization
62+
s1 = rdd1.partitions(s1Index)
63+
s2 = rdd2.partitions(s2Index)
64+
oos.defaultWriteObject()
65+
}
66+
}
2867

2968
/** Performs a cartesian join of two RDDs using filter and refine pattern.
3069
*

0 commit comments

Comments
 (0)