This commit is contained in:
2024-06-14 16:06:24 -05:00
parent e75c10d6f0
commit 5e3f417c55
8 changed files with 215 additions and 90 deletions

View File

@ -4,53 +4,17 @@ using Confluent.Kafka;
using Model;
using Repository;
using Serializers;
using Subscriber;
class Consumer
{
public static void Main(string[] args)
public static async Task Main(string[] args)
{
var conf = new ConsumerConfig{
GroupId = "test-consumer-group",
BootstrapServers = "localhost:29092",
AutoOffsetReset = AutoOffsetReset.Latest
};
DataRepository dataRepository = new DataRepository("mongodb://mongo:mongo@localhost:27017", "mongo");
using (var consumer = new ConsumerBuilder<Ignore, DataModel>(conf)
.SetValueDeserializer(new JsonSerializer<DataModel>())
.Build())
{
consumer.Subscribe("test-topic");
DataSubscriber dataSubscriber = new DataSubscriber("test-topic", "localhost:29092", dataRepository);
CancellationTokenSource cts = new CancellationTokenSource();
Console.CancelKeyPress += (_, e) => {
e.Cancel = true;
cts.Cancel();
};
try
{
while(true)
{
try
{
var cr = consumer.Consume(cts.Token);
dataRepository.Save(cr.Message.Value);
Console.WriteLine($"Consumed message with id '{cr.Message.Value.id}'. Saving value to database");
}
catch (ConsumeException e)
{
Console.WriteLine($"Error occured: {e.Error.Reason}");
}
}
}
catch (OperationCanceledException)
{
// Ensure the consumer leaves the group cleanly and final offsets are committed.
consumer.Close();
}
}
await dataSubscriber.Subscribe();
}
}

View File

@ -1,4 +1,5 @@
using Model;
using MongoDB.Bson;
using MongoDB.Driver;
namespace Repository
@ -29,9 +30,15 @@ namespace Repository
this._dataCollection = db.GetCollection<DataModel>(COLLECTION_NAME);
}
public async void Save(DataModel data)
public async Task Save(DataModel data)
{
await this._dataCollection.InsertOneAsync(data);
}
public async Task<DataModel> FindById(string id)
{
var idFilter = Builders<DataModel>.Filter.Eq(data => data.id, id);
return await this._dataCollection.Find(idFilter).FirstOrDefaultAsync();
}
}
}

View File

@ -0,0 +1,68 @@
using Confluent.Kafka;
using Model;
using Repository;
using Serializers;
namespace Subscriber
{
public class DataSubscriber
{
private string _topic;
private string _bootstrapServers;
private DataRepository _dataRepository;
private IConsumer<Ignore, DataModel> _consumer;
public DataSubscriber(string topic, string bootstrapServers, DataRepository dataRepository)
{
this._topic = topic;
this._bootstrapServers = bootstrapServers;
this._dataRepository = dataRepository;
var conf = new ConsumerConfig{
GroupId = "test-consumer-group",
BootstrapServers = this._bootstrapServers,
AutoOffsetReset = AutoOffsetReset.Earliest
};
this._consumer = new ConsumerBuilder<Ignore, DataModel>(conf)
.SetValueDeserializer(new JsonSerializer<DataModel>())
.Build();
}
public Task Subscribe()
{
this._consumer.Subscribe(_topic);
CancellationTokenSource cts = new CancellationTokenSource();
Console.CancelKeyPress += (_, e) => {
e.Cancel = true;
cts.Cancel();
};
return Task.Run(async () =>
{
try
{
while(true)
{
try
{
var cr = this._consumer.Consume(cts.Token);
await this._dataRepository.Save(cr.Message.Value);
Console.WriteLine($"Consumed message with id '{cr.Message.Value.id}'. Saving value to database");
}
catch (ConsumeException e)
{
Console.WriteLine($"Error occured: {e.Error.Reason}");
}
}
}
catch (OperationCanceledException)
{
// Ensure the consumer leaves the group cleanly and final offsets are committed.
this._consumer.Close();
}
});
}
}
}