Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- Add ZStd compression support for GTiff [#3580](https://github.com/locationtech/geotrellis/pull/3580)
- Do not depend on private Spark API, avoids sealing violation [#3586](https://github.com/locationtech/geotrellis/pull/3586)
- Add predictor 2 (integer) and predictor 3 (float) support for writing compressed GTiff files [#3588](https://github.com/locationtech/geotrellis/pull/3588)

## [3.8.0] - 2025-04-23

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,34 +18,30 @@ package geotrellis.raster.io.geotiff.compression

import io.circe._
import io.circe.syntax._
import cats.syntax.either._

trait Compression extends Serializable {
def createCompressor(segmentCount: Int): Compressor

def withPredictor(predictor: Predictor): Compression =
(segmentCount: Int) => createCompressor(segmentCount).withPredictorEncoding(predictor)
}

object Compression {
implicit val compressionDecoder: Decoder[Compression] =
new Decoder[Compression] {
final def apply(c: HCursor): Decoder.Result[Compression] = {
c.downField("compressionType").as[String].map {
case "NoCompression" => NoCompression
case _ =>
c.downField("level").as[Int] match {
case Left(_) => DeflateCompression()
case Right(i) => DeflateCompression(i)
}
}
(c: HCursor) =>
c.downField("compressionType").as[String].map {
case "NoCompression" => NoCompression
case _ =>
c.downField("level").as[Int] match {
case Left(_) => DeflateCompression()
case Right(i) => DeflateCompression(i)
}
}
}

implicit val compressionEncoder: Encoder[Compression] =
new Encoder[Compression] {
final def apply(a: Compression): Json = a match {
case NoCompression =>
Json.obj(("compressionType", "NoCompression".asJson))
case d: DeflateCompression =>
Json.obj(("compressionType", "Deflate".asJson), ("level", d.level.asJson))
}
}
implicit val compressionEncoder: Encoder[Compression] = {
case NoCompression =>
Json.obj(("compressionType", "NoCompression".asJson))
case d: DeflateCompression =>
Json.obj(("compressionType", "Deflate".asJson), ("level", d.level.asJson))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,23 @@

package geotrellis.raster.io.geotiff.compression

import geotrellis.raster.io.geotiff.tags.TiffTags

trait Compressor extends Serializable {
def compress(bytes: Array[Byte], segmentIndex: Int): Array[Byte]

/** Returns the decompressor that can decompress the segments compressed by this compressor */
def createDecompressor(): Decompressor

def withPredictorEncoding(predictor: Predictor): Compressor =
new Compressor {
def wrapped: Compressor = Compressor.this

def compress(bytes: Array[Byte], segmentIndex: Int): Array[Byte] =
wrapped.compress(predictor.encode(bytes, segmentIndex), segmentIndex = segmentIndex)

/** Returns the decompressor that can decompress the segments compressed by this compressor */
def createDecompressor(): Decompressor = wrapped.createDecompressor().withPredictorDecoding(predictor)
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,18 @@ trait Decompressor extends Serializable {
*/
def flipEndian(bytesPerFlip: Int): Decompressor =
new Decompressor {
def code = Decompressor.this.code
override def predictorCode = Decompressor.this.predictorCode
def code: Int = Decompressor.this.code
override def predictorCode: Int = Decompressor.this.predictorCode

override
def byteOrder = ByteOrder.LITTLE_ENDIAN // Since we have to flip, image data is in Little Endian
def byteOrder: ByteOrder = ByteOrder.LITTLE_ENDIAN // Since we have to flip, image data is in Little Endian

def decompress(bytes: Array[Byte], segmentIndex: Int): Array[Byte] =
flip(Decompressor.this.decompress(bytes, segmentIndex))

def flip(bytes: Array[Byte]): Array[Byte] = {
val arr = bytes.clone
val size = arr.size
val size = arr.length

var i = 0
while (i < size) {
Expand All @@ -62,14 +62,14 @@ trait Decompressor extends Serializable {
}
}

def withPredictor(predictor: Predictor): Decompressor =
def withPredictorDecoding(predictor: Predictor): Decompressor =
new Decompressor {
def code = Decompressor.this.code
override def predictorCode = predictor.code
override def byteOrder = Decompressor.this.byteOrder
def code: Int = Decompressor.this.code
override def predictorCode: Int = predictor.code
override def byteOrder: ByteOrder = Decompressor.this.byteOrder

def decompress(bytes: Array[Byte], segmentIndex: Int): Array[Byte] =
predictor(Decompressor.this.decompress(bytes, segmentIndex), segmentIndex)
predictor.decode(Decompressor.this.decompress(bytes, segmentIndex), segmentIndex)
}
}

Expand All @@ -88,9 +88,9 @@ object Decompressor {
def checkPredictor(d: Decompressor): Decompressor = {
val predictor = Predictor(tiffTags)
if(predictor.checkEndian)
checkEndian(d).withPredictor(predictor)
checkEndian(d).withPredictorDecoding(predictor)
else
d.withPredictor(predictor)
d.withPredictorDecoding(predictor)
}

val segmentCount = tiffTags.segmentCount
Expand All @@ -108,7 +108,7 @@ object Decompressor {

tiffTags.compression match {
case Uncompressed =>
checkEndian(NoCompression)
checkEndian(NoCompressor)
case LZWCoded =>
checkPredictor(LZWDecompressor(segmentSizes))
case ZLibCoded | PkZipCoded =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,20 @@ import spire.syntax.cfor._

/** See TIFF Technical Note 3 */
object FloatingPointPredictor {

def apply(imageData: GeoTiffImageData): Predictor = {
val colsPerRow = Predictor.colsPerRow(imageData)
val rowsInSegment = Predictor.rowsInSegment(imageData)

if (imageData.segmentLayout.hasPixelInterleave)
new FloatingPointPredictor(colsPerRow, rowsInSegment, imageData.bandType, imageData.bandCount)
else
new FloatingPointPredictor(colsPerRow, rowsInSegment, imageData.bandType, 1)
}

def apply(tiffTags: TiffTags): Predictor = {
val colsPerRow = tiffTags.rowSize
val rowsInSegment: (Int => Int) = { i => tiffTags.rowsInSegment(i) }
val rowsInSegment: Int => Int = { i => tiffTags.rowsInSegment(i) }

val bandType = tiffTags.bandType

Expand All @@ -36,10 +47,45 @@ object FloatingPointPredictor {
}

private class FloatingPointPredictor(colsPerRow: Int, rowsInSegment: Int => Int, bandType: BandType, bandCount: Int) extends Predictor {
val code = Predictor.PREDICTOR_FLOATINGPOINT
val code: Int = Predictor.PREDICTOR_FLOATINGPOINT
val checkEndian = false

def apply(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
private def encodeDeltaBytes(bytes: Array[Byte], rows:Int): Array[Byte] = {
val bytesPerSample = bandType.bytesPerSample
val colValuesPerRow = colsPerRow * bandCount
val the_cols = colsPerRow * bytesPerSample
val bytesPerRow = colValuesPerRow * bytesPerSample

cfor(0)(_ < rows, _ + 1) { row =>
val rowOffset = row * bytesPerRow
cfor(the_cols-1)(_ > 0, _ - 1) { col =>
cfor(0)(_ < bandCount, _ + 1) { band =>
bytes(rowOffset + col * bandCount + band) = (bytes(rowOffset + col * bandCount + band) - bytes(rowOffset + (col - 1) * bandCount + band)).toByte
}
}
}
bytes
}

def encode(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
val rows = rowsInSegment(segmentIndex)
val bytesPerSample = bandType.bytesPerSample
val bytesPerRow = colsPerRow * bandCount * bytesPerSample

val outputBytes = new Array[Byte](bytes.length)
val rowIncrement = colsPerRow * bandCount
cfor(0)(_ < rows, _ + 1) { row =>
val rowOffset = bytesPerRow * row
cfor(0)(_ < rowIncrement, _ + 1) { col =>
cfor(0)(_ < bytesPerSample, _ + 1) { b =>
outputBytes(rowOffset + b * rowIncrement + col) = bytes(rowOffset + bytesPerSample * col + b)
}
}
}
encodeDeltaBytes(outputBytes, rows)
}

override def decode(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
val rows = rowsInSegment(segmentIndex)
val stride = bandCount
val bytesPerSample = bandType.bytesPerSample
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,31 @@ import java.nio.ByteBuffer
import spire.syntax.cfor._

object HorizontalPredictor {

def apply(imageData: GeoTiffImageData): Predictor = {
val colsPerRow = Predictor.colsPerRow(imageData)
val rowsInSegment = Predictor.rowsInSegment(imageData)

val bandType = imageData.bandType

val predictor =
if (imageData.segmentLayout.hasPixelInterleave)
new HorizontalPredictor(colsPerRow, rowsInSegment, imageData.bandCount)
else
new HorizontalPredictor(colsPerRow, rowsInSegment, 1)

predictor.forBandType(bandType)
}


def apply(tiffTags: TiffTags): Predictor = {
val colsPerRow = tiffTags.rowSize
val rowsInSegment: (Int => Int) = { i => tiffTags.rowsInSegment(i) }
val rowsInSegment: Int => Int = { i => tiffTags.rowsInSegment(i) }

val bandType = tiffTags.bandType

val predictor =
if(tiffTags.hasPixelInterleave) {
if (tiffTags.hasPixelInterleave) {
new HorizontalPredictor(colsPerRow, rowsInSegment, tiffTags.bandCount)
} else {
new HorizontalPredictor(colsPerRow, rowsInSegment, 1)
Expand All @@ -42,24 +59,55 @@ object HorizontalPredictor {

private class HorizontalPredictor(cols: Int, rowsInSegment: Int => Int, bandCount: Int) {
def forBandType(bandType: BandType): Predictor = {
val applyFunc: (Array[Byte], Int) => Array[Byte] =
val encodeFunc: (Array[Byte], Int) => Array[Byte] =
bandType.bitsPerSample match {
case 8 => apply8 _
case 16 => apply16 _
case 32 => apply32 _
case 8 => encode8
case 16 => encode16
case 32 => encode32
case _ =>
throw new MalformedGeoTiffException(s"""Horizontal differencing "Predictor" not supported with ${bandType.bitsPerSample} bits per sample""")
}

val decodeFunc: (Array[Byte], Int) => Array[Byte] =
bandType.bitsPerSample match {
case 8 => decode8
case 16 => decode16
case 32 => decode32
case _ =>
throw new MalformedGeoTiffException(s"""Horizontal differencing "Predictor" not supported with ${bandType.bitsPerSample} bits per sample""")
}


new Predictor {
val code = Predictor.PREDICTOR_HORIZONTAL
val code: Int = Predictor.PREDICTOR_HORIZONTAL
val checkEndian = true
def apply(bytes: Array[Byte], segmentIndex: Int): Array[Byte] =
applyFunc(bytes, segmentIndex)

def encode(bytes: Array[Byte], segmentIndex: Int): Array[Byte] =
encodeFunc(bytes, segmentIndex)

def decode(bytes: Array[Byte], segmentIndex: Int): Array[Byte] =
decodeFunc(bytes, segmentIndex)
}
}

def encode8(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
val encodedBytes = new Array[Byte](bytes.length)
val rows = rowsInSegment(segmentIndex)
val n = bytes.length / rows

cfor(0)(_ < rows, _ + 1) { row =>
cfor(n - 1)({ k => k >= 0 }, _ - 1) { col =>
val index = row * n + col
if (col < bandCount)
encodedBytes(index) = bytes(index)
else
encodedBytes(index) = (bytes(index) - bytes(index - bandCount)).toByte
}
}
encodedBytes
}

def apply8(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
def decode8(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
val rows = rowsInSegment(segmentIndex)

cfor(0)(_ < rows, _ + 1) { row =>
Expand All @@ -69,11 +117,30 @@ object HorizontalPredictor {
count += 1
}
}

bytes
}

def apply16(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
def encode16(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
val encodedBytes = new Array[Byte](bytes.length)
val encodedBuffer = ByteBuffer.wrap(encodedBytes).asShortBuffer
val buffer = ByteBuffer.wrap(bytes).asShortBuffer

val rows = rowsInSegment(segmentIndex)
val n = bytes.length / (rows * 2)

cfor(0)(_ < rows, _ + 1) { row =>
cfor(n - 1)({ k => k >= 0 }, _ - 1) { col =>
val index = row * n + col
if (col < bandCount)
encodedBuffer.put(index, buffer.get(index))
else
encodedBuffer.put(index, (buffer.get(index) - buffer.get(index - bandCount)).toShort)
}
}
encodedBytes
}

def decode16(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
val buffer = ByteBuffer.wrap(bytes).asShortBuffer
val rows = rowsInSegment(segmentIndex)

Expand All @@ -84,11 +151,30 @@ object HorizontalPredictor {
count += 1
}
}

bytes
}

def apply32(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
def encode32(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
val encodedBytes = new Array[Byte](bytes.length)
val encodedBuffer = ByteBuffer.wrap(encodedBytes).asIntBuffer
val buffer = ByteBuffer.wrap(bytes).asIntBuffer

val rows = rowsInSegment(segmentIndex)
val n = bytes.length / (rows * 4)

cfor(0)(_ < rows, _ + 1) { row =>
cfor(n - 1)({ k => k >= 0 }, _ - 1) { col =>
val index = row * n + col
if (col < bandCount)
encodedBuffer.put(index, buffer.get(index))
else
encodedBuffer.put(index, buffer.get(index) - buffer.get(index - bandCount))
}
}
encodedBytes
}

def decode32(bytes: Array[Byte], segmentIndex: Int): Array[Byte] = {
val buffer = ByteBuffer.wrap(bytes).asIntBuffer
val rows = rowsInSegment(segmentIndex)

Expand All @@ -99,7 +185,6 @@ object HorizontalPredictor {
count += 1
}
}

bytes
}
}
Expand Down
Loading