Skip to content

FlowWithContext #83

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Nov 5, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 9 additions & 6 deletions examples/SimpleProducer/Program.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
using System;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Akka;
using Akka.Actor;
using Akka.Configuration;
using Akka.Streams;
Expand Down Expand Up @@ -31,14 +33,15 @@ public static void Main(string[] args)
Source
.Cycle(() => Enumerable.Range(1, 100).GetEnumerator())
.Select(c => c.ToString())
.Select(elem => new MessageAndMeta<Null, string> { Topic = "akka100", Message = new Message<Null, string> { Value = elem }})
.Via(KafkaProducer.PlainFlow(producerSettings))
.Select(record =>
.Select(elem => ProducerMessage.Single(new ProducerRecord<Null, string>("akka100", elem)))
.Via(KafkaProducer.FlexiFlow<Null, string, NotUsed>(producerSettings))
.Select(result =>
{
Console.WriteLine($"Producer: {record.Topic}/{record.Partition} {record.Offset}: {record.Value}");
return record;
var response = result as Result<Null, string, NotUsed>;
Console.WriteLine($"Producer: {response.Metadata.Topic}/{response.Metadata.Partition} {response.Metadata.Offset}: {response.Metadata.Value}");
return result;
})
.RunWith(Sink.Ignore<DeliveryReport<Null, string>>(), materializer);
.RunWith(Sink.Ignore<IResults<Null, string, NotUsed>>(), materializer);

// TODO: producer as a Commitable Sink

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,8 @@ public async Task CommittablePartitionedSource_Should_handle_exceptions_in_strea
return t.Result;
});
})
.MergeSubstreams().As<Source<int, IControl>>()
.MergeSubstreams()
.As<Source<int, IControl>>()
.Scan(0, (c, n) => c + n)
.ToMaterialized(Sink.Last<int>(), Keep.Both)
.MapMaterializedValue(tuple => DrainingControl<int>.Create(tuple.Item1, tuple.Item2))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public async Task CommitableSource_consumes_messages_from_Producer_without_commi

await Source
.From(Enumerable.Range(1, elementsCount))
.Select(elem => new MessageAndMeta<Null, string> { TopicPartition = topicPartition1, Message = new Message<Null, string> { Value = elem.ToString() } })
.Select(elem => new ProducerRecord<Null, string>(topicPartition1, elem.ToString()))
.RunWith(KafkaProducer.PlainSink(ProducerSettings), Materializer);

var consumerSettings = CreateConsumerSettings<string>(group1);
Expand Down Expand Up @@ -65,7 +65,7 @@ public async Task CommitableSource_resume_from_commited_offset()

await Source
.From(Enumerable.Range(1, 100))
.Select(elem => new MessageAndMeta<Null, string> { TopicPartition = topicPartition1, Message = new Message<Null, string> { Value = elem.ToString() } })
.Select(elem => new ProducerRecord<Null, string>(topicPartition1, elem.ToString()))
.RunWith(KafkaProducer.PlainSink(ProducerSettings), Materializer);

var consumerSettings = CreateConsumerSettings<string>(group1);
Expand Down Expand Up @@ -103,7 +103,7 @@ await Source
// some concurrent publish
await Source
.From(Enumerable.Range(101, 100))
.Select(elem => new MessageAndMeta<Null, string> { TopicPartition = topicPartition1, Message = new Message<Null, string> { Value = elem.ToString() } })
.Select(elem => new ProducerRecord<Null, string>(topicPartition1, elem.ToString()))
.RunWith(KafkaProducer.PlainSink(ProducerSettings), Materializer);

probe2.Request(100);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
using System;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using Akka.Streams.Dsl;
using Akka.Streams.Implementation.Fusing;
using Akka.Streams.Kafka.Dsl;
using Akka.Streams.Kafka.Extensions;
using Akka.Streams.Kafka.Helpers;
using Akka.Streams.Kafka.Messages;
using Akka.Streams.Kafka.Settings;
using Confluent.Kafka;
using FluentAssertions;
using Xunit;
using Xunit.Abstractions;

namespace Akka.Streams.Kafka.Tests.Integration
{
public class FlowWithContextIntegrationTests : KafkaIntegrationTests
{
public FlowWithContextIntegrationTests(ITestOutputHelper output, KafkaFixture fixture)
: base(nameof(FlowWithContextIntegrationTests), output, fixture)
{
}

[Fact]
public async Task ProducerFlowWithContext_should_work_with_source_with_context()
{
bool Duplicate(string value) => value == "1";
bool Ignore(string value) => value == "2";

var consumerSettings = CreateConsumerSettings<string, string>(CreateGroup(1));
var topic1 = CreateTopic(1);
var topic2 = CreateTopic(2);
var topic3 = CreateTopic(3);
var topic4 = CreateTopic(4);
var producerSettings = BuildProducerSettings<string, string>();
var committerSettings = CommitterSettings;
var totalMessages = 10;
var totalConsumed = 0;

await ProduceStrings(topic1, Enumerable.Range(1, totalMessages), producerSettings);

var control = KafkaConsumer.SourceWithOffsetContext(consumerSettings, Subscriptions.Topics(topic1))
.Select(record =>
{
IEnvelope<string, string, NotUsed> output;
if (Duplicate(record.Value))
{
output = ProducerMessage.Multi(new[]
{
new ProducerRecord<string, string>(topic2, record.Key, record.Value),
new ProducerRecord<string, string>(topic3, record.Key, record.Value)
}.ToImmutableSet());
}
else if (Ignore(record.Value))
{
output = ProducerMessage.PassThrough<string, string>();
}
else
{
output = ProducerMessage.Single(new ProducerRecord<string, string>(topic4, record.Key, record.Value));
}

Log.Debug($"Giving message of type {output.GetType().Name}");
return output;
})
.Via(KafkaProducer.FlowWithContext<string, string, ICommittableOffset>(producerSettings))
.AsSource()
.Log("Produced messages", r => $"Committing {r.Item2.Offset.Topic}:{r.Item2.Offset.Partition}[{r.Item2.Offset.Offset}]")
.ToMaterialized(Committer.SinkWithOffsetContext<IResults<string, string, ICommittableOffset>>(committerSettings), Keep.Both)
.MapMaterializedValue(tuple => DrainingControl<NotUsed>.Create(tuple.Item1, tuple.Item2.ContinueWith(t => NotUsed.Instance)))
.Run(Materializer);

var (control2, result) = KafkaConsumer.PlainSource(consumerSettings, Subscriptions.Topics(topic2, topic3, topic4))
.Scan(0, (c, _) => c + 1)
.Select(consumed =>
{
totalConsumed = consumed;
return consumed;
})
.ToMaterialized(Sink.Last<int>(), Keep.Both)
.Run(Materializer);

AwaitCondition(() => totalConsumed == totalMessages, TimeSpan.FromSeconds(30));

AssertTaskCompletesWithin(TimeSpan.FromSeconds(10), control.DrainAndShutdown());
AssertTaskCompletesWithin(TimeSpan.FromSeconds(10), control2.Shutdown());
AssertTaskCompletesWithin(TimeSpan.FromSeconds(10), result).Should().Be(totalConsumed);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public async Task PlainSink_should_publish_100_elements_to_Kafka_producer()
await Source
.From(Enumerable.Range(1, 100))
.Select(c => c.ToString())
.Select(elem => new MessageAndMeta<Null, string> { TopicPartition = topicPartition1, Message = new Message<Null, string> { Value = elem } })
.Select(elem => new ProducerRecord<Null, string>(topicPartition1, elem.ToString()))
.RunWith(KafkaProducer.PlainSink(ProducerSettings), Materializer);

var dateTimeStart = DateTime.UtcNow;
Expand Down Expand Up @@ -80,9 +80,10 @@ public async Task PlainSink_should_fail_stage_if_broker_unavailable()
var probe = Source
.From(Enumerable.Range(1, 100))
.Select(c => c.ToString())
.Select(elem => new MessageAndMeta<Null, string> { Topic = topic1, Message = new Message<Null, string> { Value = elem } })
.Via(KafkaProducer.PlainFlow(config))
.RunWith(this.SinkProbe<DeliveryReport<Null, string>>(), Materializer);
.Select(elem => new ProducerRecord<Null, string>(topic1, elem.ToString()))
.Select(record => new Message<Null, string, NotUsed>(record, NotUsed.Instance) as IEnvelope<Null, string, NotUsed>)
.Via(KafkaProducer.FlexiFlow<Null, string, NotUsed>(config))
.RunWith(this.SinkProbe<IResults<Null, string, NotUsed>>(), Materializer);

probe.ExpectSubscription();
probe.OnError(new KafkaException(ErrorCode.Local_Transport));
Expand Down
50 changes: 37 additions & 13 deletions src/Akka.Streams.Kafka.Tests/KafkaIntegrationTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,16 +38,27 @@ public KafkaIntegrationTests(string actorSystemName, ITestOutputHelper output, K

protected string CreateTopic(int number) => $"topic-{number}-{Uuid}";
protected string CreateGroup(int number) => $"group-{number}-{Uuid}";

protected ProducerSettings<Null, string> ProducerSettings => BuildProducerSettings<Null, string>();

protected ProducerSettings<Null, string> ProducerSettings
protected ProducerSettings<TKey, TValue> BuildProducerSettings<TKey, TValue>()
{
get => ProducerSettings<Null, string>.Create(Sys, null, null).WithBootstrapServers(_fixture.KafkaServer);
return ProducerSettings<TKey, TValue>.Create(Sys, null, null).WithBootstrapServers(_fixture.KafkaServer);
}

protected CommitterSettings CommitterSettings
{
get => CommitterSettings.Create(Sys);
}

protected ConsumerSettings<TKey, TValue> CreateConsumerSettings<TKey, TValue>(string group)
{
return ConsumerSettings<TKey, TValue>.Create(Sys, null, null)
.WithBootstrapServers(_fixture.KafkaServer)
.WithStopTimeout(TimeSpan.FromSeconds(1))
.WithProperty("auto.offset.reset", "earliest")
.WithGroupId(group);
}

protected ConsumerSettings<Null, TValue> CreateConsumerSettings<TValue>(string group)
{
Expand All @@ -58,41 +69,54 @@ protected ConsumerSettings<Null, TValue> CreateConsumerSettings<TValue>(string g
.WithGroupId(group);
}

protected async Task ProduceStrings(string topic, IEnumerable<int> range, ProducerSettings<Null, string> producerSettings)
protected async Task ProduceStrings<TKey>(string topic, IEnumerable<int> range, ProducerSettings<TKey, string> producerSettings)
{
await Source
.From(range)
.Select(elem => new MessageAndMeta<Null, string> { Topic = topic, Message = new Message<Null, string> { Value = elem.ToString() } })
.Select(elem => new ProducerRecord<TKey, string>(topic, elem.ToString()))
.RunWith(KafkaProducer.PlainSink(producerSettings), Materializer);
}

protected async Task ProduceStrings(Func<int, TopicPartition> partitionSelector, IEnumerable<int> range, ProducerSettings<Null, string> producerSettings)
protected async Task ProduceStrings<TKey>(Func<int, TopicPartition> partitionSelector, IEnumerable<int> range, ProducerSettings<TKey, string> producerSettings)
{
await Source
.From(range)
.Select(elem => new MessageAndMeta<Null, string> { TopicPartition = partitionSelector(elem), Message = new Message<Null, string> { Value = elem.ToString() } })
.Select(elem => new ProducerRecord<TKey, string>(partitionSelector(elem), elem.ToString()))
.RunWith(KafkaProducer.PlainSink(producerSettings), Materializer);
}

protected async Task ProduceStrings(TopicPartition topicPartition, IEnumerable<int> range, ProducerSettings<Null, string> producerSettings)
protected async Task ProduceStrings<TKey>(TopicPartition topicPartition, IEnumerable<int> range, ProducerSettings<TKey, string> producerSettings)
{
await Source
.From(range)
.Select(elem => new MessageAndMeta<Null, string> { TopicPartition = topicPartition, Message = new Message<Null, string> { Value = elem.ToString() } })
.Select(elem => new ProducerRecord<TKey, string>(topicPartition, elem.ToString()))
.RunWith(KafkaProducer.PlainSink(producerSettings), Materializer);
}

/// <summary>
/// Asserts that task will finish successfully until specified timeout.
/// Throws task exception if task failes
/// </summary>
protected async Task AssertCompletesSuccessfullyWithin(TimeSpan timeout, Task task)
protected void AssertTaskCompletesWithin(TimeSpan timeout, Task task, bool assertIsSuccessful = true)
{
var timeoutTask = Task.Delay(timeout);

await Task.WhenAny(timeoutTask, task);
AwaitCondition(() => task.IsCompleted, timeout, $"task should complete within {timeout} timeout");

if (assertIsSuccessful)
task.IsCompletedSuccessfully.Should().Be(true, "task should compete successfully");
}

/// <summary>
/// Asserts that task will finish successfully until specified timeout.
/// Throws task exception if task failes
/// </summary>
protected TResult AssertTaskCompletesWithin<TResult>(TimeSpan timeout, Task<TResult> task, bool assertIsSuccessful = true)
{
AwaitCondition(() => task.IsCompleted, timeout, $"task should complete within {timeout} timeout");

if (assertIsSuccessful)
task.IsCompletedSuccessfully.Should().Be(true, "task should compete successfully");

task.IsCompletedSuccessfully.Should().Be(true, $"Timeout {timeout} while waitilng task finish successfully");
return task.Result;
}

protected async Task GivenInitializedTopic(string topic)
Expand Down
Loading