Skip to content

Commit

Permalink
feat: upgrade to Confluent.Kafka 2.1.0
Browse files Browse the repository at this point in the history
  • Loading branch information
Willimann Marco, I231 extern authored and BEagle1984 committed Apr 17, 2023
1 parent 4b5b6b7 commit 147e8de
Show file tree
Hide file tree
Showing 6 changed files with 12 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ This package adds the support for Apache Avro and the schema registry on top of
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Confluent.Kafka" Version="2.0.2" />
<PackageReference Include="Confluent.SchemaRegistry.Serdes.Avro" Version="2.0.2" />
<PackageReference Include="Confluent.Kafka" Version="2.1.0" />
<PackageReference Include="Confluent.SchemaRegistry.Serdes.Avro" Version="2.1.0" />
</ItemGroup>

<ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ public MockedConfluentAdminClient(ClientConfig config, IMockedKafkaOptions optio

public Handle Handle { get; } = new();

public void SetSaslCredentials(string username, string password) => throw new NotSupportedException();

public int AddBrokers(string brokers) => throw new NotSupportedException();

public List<GroupInfo> ListGroups(TimeSpan timeout) => throw new NotSupportedException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,8 @@ public MockedConfluentConsumer(

internal Action<IConsumer<byte[]?, byte[]?>, CommittedOffsets>? OffsetsCommittedHandler { get; set; }

public void SetSaslCredentials(string username, string password) => throw new NotSupportedException();

public int AddBrokers(string brokers) => throw new NotSupportedException();

public ConsumeResult<byte[]?, byte[]?> Consume(int millisecondsTimeout) =>
Expand Down Expand Up @@ -258,6 +260,8 @@ public List<TopicPartitionOffset> Committed(

public Offset Position(TopicPartition partition) => throw new NotSupportedException();

public TopicPartitionOffset PositionTopicPartitionOffset(TopicPartition partition) => throw new NotSupportedException();

public List<TopicPartitionOffset> OffsetsForTimes(
IEnumerable<TopicPartitionTimestamp> timestampsToSearch,
TimeSpan timeout) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ public MockedConfluentProducer(

internal Action<IProducer<byte[]?, byte[]?>, string>? StatisticsHandler { get; set; }

public void SetSaslCredentials(string username, string password) => throw new NotSupportedException();

public int AddBrokers(string brokers) => throw new NotSupportedException();

public Task<DeliveryResult<byte[]?, byte[]?>> ProduceAsync(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ This package contains an implementation of Silverback.Integration for the popula
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Confluent.Kafka" Version="2.0.2" />
<PackageReference Include="Confluent.Kafka" Version="2.1.0" />
</ItemGroup>

<ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Confluent.Kafka" Version="2.0.2">
<PackageReference Include="Confluent.Kafka" Version="2.1.0">
<!-- Workaround for xml documentation not being copied to output folder -->
<CopyToOutputDirectory>lib/netcoreapp2.1/*.xml</CopyToOutputDirectory>
</PackageReference>
Expand Down

0 comments on commit 147e8de

Please sign in to comment.