Skip to content

Commit

Permalink
Fix compilation after #1340 merge (#1353)
Browse files Browse the repository at this point in the history
* Fix compilation after #1340 merge

* Fix compilation test

* Bump `tlBaseVersion`

* Add MiMa exclusions for new added methods
  • Loading branch information
aartigao authored Oct 9, 2024
1 parent 2441b86 commit 800ea54
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 6 deletions.
15 changes: 13 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ val scala213 = "2.13.15"

val scala3 = "3.3.4"

ThisBuild / tlBaseVersion := "3.5"
ThisBuild / tlBaseVersion := "3.6"

ThisBuild / tlCiReleaseBranches := Seq("series/3.x")

Expand Down Expand Up @@ -282,7 +282,18 @@ ThisBuild / mimaBinaryIssueFilters ++= {
),
ProblemFilters
.exclude[InheritedNewAbstractMethodProblem]("fs2.kafka.KafkaConsumer.offsetsForTimes"),
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("fs2.kafka.KafkaConsumer.listTopics")
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("fs2.kafka.KafkaConsumer.listTopics"),
ProblemFilters
.exclude[ReversedMissingMethodProblem]("fs2.kafka.ProducerSettings.failFastProduce"),
ProblemFilters
.exclude[ReversedMissingMethodProblem]("fs2.kafka.ProducerSettings.withFailFastProduce"),
ProblemFilters
.exclude[DirectMissingMethodProblem]("fs2.kafka.ProducerSettings#ProducerSettingsImpl.copy"),
ProblemFilters
.exclude[DirectMissingMethodProblem]("fs2.kafka.ProducerSettings#ProducerSettingsImpl.this"),
ProblemFilters
.exclude[DirectMissingMethodProblem]("fs2.kafka.ProducerSettings#ProducerSettingsImpl.apply"),
ProblemFilters.exclude[DirectMissingMethodProblem]("fs2.kafka.KafkaProducer.produceRecord")
)
}

Expand Down
2 changes: 1 addition & 1 deletion modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ object KafkaProducer {
Async[F]
.race(
Async[F]
.fromFutureCancelable(Async[F].delay(produceRecordError.future, Async[F].unit)),
.fromFutureCancelable(Async[F].delay((produceRecordError.future, Async[F].unit))),
produceRecords(produceRecordError.some)
)
.rethrow
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ object TransactionalKafkaProducer {
Async[F]
.race(
Async[F].fromFutureCancelable(
Async[F].delay(produceRecordError.future, Async[F].unit)
Async[F].delay((produceRecordError.future, Async[F].unit))
),
produceRecords(produceRecordError.some)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import fs2.kafka.internal.converters.collection.*
import fs2.kafka.producer.MkProducer

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerGroupMetadata, OffsetAndMetadata}
import org.apache.kafka.clients.producer
import org.apache.kafka.clients.producer.{Callback, ProducerConfig, RecordMetadata}
import org.apache.kafka.common.errors.ProducerFencedException
import org.apache.kafka.common.serialization.ByteArraySerializer
Expand Down Expand Up @@ -425,7 +424,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues {
new ByteArraySerializer
) {
override def send(
record: producer.ProducerRecord[Array[Byte], Array[Byte]],
record: org.apache.kafka.clients.producer.ProducerRecord[Array[Byte], Array[Byte]],
callback: Callback
): Future[RecordMetadata] = {
val key = new String(record.key(), StandardCharsets.UTF_8)
Expand Down

0 comments on commit 800ea54

Please sign in to comment.