-
Notifications
You must be signed in to change notification settings - Fork 141
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Make it easy to configure compression and linger
- Loading branch information
1 parent
9133254
commit cc605df
Showing
3 changed files
with
98 additions
and
1 deletion.
There are no files selected for viewing
23 changes: 23 additions & 0 deletions
23
zio-kafka-test/src/test/scala/zio/kafka/producer/ProducerCompressionSpec.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
package zio.kafka.producer | ||
|
||
import org.apache.kafka.common.record.CompressionType | ||
import zio._ | ||
import zio.test._ | ||
|
||
object ProducerCompressionSpec extends ZIOSpecDefault { | ||
|
||
override def spec: Spec[TestEnvironment with Scope, Any] = | ||
suite("ProducerCompression")( | ||
test("all Kafka supported compression codes have a corresponding ProducerCompression") { | ||
val compressions = Seq( | ||
ProducerCompression.NoCompression, | ||
ProducerCompression.Gzip(), | ||
ProducerCompression.Snappy(), | ||
ProducerCompression.Lz4(), | ||
ProducerCompression.Zstd() | ||
) | ||
val availableCompressionsCount = CompressionType.values().length | ||
assertTrue(availableCompressionsCount == compressions.size) | ||
} | ||
) | ||
} |
55 changes: 55 additions & 0 deletions
55
zio-kafka/src/main/scala/zio/kafka/producer/ProducerCompression.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
package zio.kafka.producer | ||
|
||
import org.apache.kafka.clients.producer.ProducerConfig | ||
import org.apache.kafka.common.record.CompressionType | ||
|
||
abstract sealed class ProducerCompression(name: String, extra: Option[(String, AnyRef)] = None) { | ||
def properties: Map[String, AnyRef] = | ||
Map(ProducerConfig.COMPRESSION_TYPE_CONFIG -> name) ++ extra | ||
} | ||
|
||
/** | ||
* The compression codecs that Kafka supports while producing records. | ||
*/ | ||
object ProducerCompression { | ||
|
||
/** Produce kafka records without compression. */ | ||
case object NoCompression extends ProducerCompression(CompressionType.NONE.name) | ||
|
||
/** | ||
* Produce kafka records with GZIP compression. | ||
* @param level | ||
* a value between 1 and 9 or -1 (defaults to -1) | ||
*/ | ||
case class Gzip(level: Int = CompressionType.GZIP.defaultLevel()) | ||
extends ProducerCompression( | ||
CompressionType.GZIP.name, | ||
Some(ProducerConfig.COMPRESSION_GZIP_LEVEL_CONFIG -> Int.box(level)) | ||
) | ||
|
||
/** Produce kafka records with Snappy compression. */ | ||
case class Snappy() extends ProducerCompression(CompressionType.SNAPPY.name) | ||
|
||
/** | ||
* Produce kafka records with Lz4 compression. | ||
* @param level | ||
* a value between 1 and 17 (defaults to 9) | ||
*/ | ||
case class Lz4(level: Int = CompressionType.LZ4.defaultLevel()) | ||
extends ProducerCompression( | ||
CompressionType.LZ4.name, | ||
Some(ProducerConfig.COMPRESSION_LZ4_LEVEL_CONFIG -> Int.box(level)) | ||
) | ||
|
||
/** | ||
* Produce kafka records with Zstd compression. | ||
* | ||
* @param level | ||
* a value between -131072 and 22 (defaults to 3) | ||
*/ | ||
case class Zstd(level: Int = CompressionType.ZSTD.defaultLevel()) | ||
extends ProducerCompression( | ||
CompressionType.ZSTD.name, | ||
Some(ProducerConfig.COMPRESSION_ZSTD_LEVEL_CONFIG -> Int.box(level)) | ||
) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters