I would like to set flowId to MDLC before consuming an item by any of my Rabbit consumers.
At first, I called SeMetaData after Consume:
public class SomeConsumer: IConsumer<ItemDto>
{
private IServiceLogger _logger;
public SomeConsumer(IServiceLogger logger)
{
_logger = logger;
}
public async Task Consume(ConsumeContext<ItemDto> context)
{
//Set flowId
_logger.SetMetaData(GetFlowId(context.Message));
...
}
}
public class ServiceLogger : IServiceLogger
{
public static void SetMetaData(string id)
{
MappedDiagnosticsLogicalContext.Set("flowId", id);
}
}
Since I have multiple consumers, I looked for a more generic solution so I implemented (MassTransit) IConsumeObserver, and tried SetMetaData on PreConsume:
public class ConsumeObserver : IConsumeObserver
{
private IServiceLogger _logger;
public ConsumeObserver(IServiceLogger logger)
{
_logger = logger;
}
public async Task PreConsume<T>(ConsumeContext<T> context) where T : class
{
//Set flowId
_logger.SetMetaData(GetFlowId(context.Message));
await context.ConsumeCompleted;
}
...
}
But flowId is set only inside PreConsume. Once the item was consumed by a consumer, flowId was empty in the logger.
How can I set MDLC before consuming an item?
Related
Let's say I have several ASP.NET BackgroundServices and each is logging to its own scope/operation (OP1 and OP2).
public class MyBackgroundService1 : BackgroundService
{
private readonly ILogger<MyBackgroundService1> _logger;
public MyBackgroundService1(ILogger<MyBackgroundService1> logger)
{
_logger = logger;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
var activity = new Activity("OP1");
activity.Start();
while (!stoppingToken.IsCancellationRequested)
{
_logger.LogInformation("Hello from MyBackgroundService1");
await Task.Delay(5000, stoppingToken);
}
}
}
public class MyBackgroundService2 : BackgroundService
{
private readonly ILogger<MyBackgroundService2> _logger;
public MyBackgroundService2(ILogger<MyBackgroundService2> logger)
{
_logger = logger;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
var activity = new Activity("OP2");
activity.Start();
while (!stoppingToken.IsCancellationRequested)
{
_logger.LogInformation("Hello from MyBackgroundService2");
await Task.Delay(1000, stoppingToken);
}
}
}
Now I would like to use Blazor and want to display a table per operation with all corresponding logs.
Example output
OP1 Logs:
Hello from MyBackgroundService1
Hello from MyBackgroundService1
OP2 Logs:
Hello from MyBackgroundService2
Hello from MyBackgroundService2
How would I do that?
For this purpose, you need to create a log provider that stores the information in the database and then retrieves the information from the log table.
First, create a class to store logs in the database as follows:
public class DBLog
{
public int DBLogId { get; set; }
public string? LogLevel { get; set; }
public string? EventName { get; set; }
public string? Message { get; set; }
public string? StackTrace { get; set; }
public DateTime CreatedDate { get; set; }=DateTime.Now;
}
Now, We need to create a custom DBLogger. The DBLogger class inherits from the ILogger interface and has three methods, the most important of which is the Log method, which is actually called every time the Logger is called in the program. To read more about the other two methods, you can refer here.
public class DBLogger:ILogger
{
private readonly LogLevel _minLevel;
private readonly DbLoggerProvider _loggerProvider;
private readonly string _categoryName;
public DBLogger(
DbLoggerProvider loggerProvider,
string categoryName
)
{
_loggerProvider= loggerProvider ?? throw new ArgumentNullException(nameof(loggerProvider));
_categoryName= categoryName;
}
public IDisposable BeginScope<TState>(TState state)
{
return new NoopDisposable();
}
public bool IsEnabled(LogLevel logLevel)
{
return logLevel >= _minLevel;
}
public void Log<TState>(
LogLevel logLevel,
EventId eventId,
TState state,
Exception exception,
Func<TState, Exception, string> formatter)
{
if (!IsEnabled(logLevel))
{
return;
}
if (formatter == null)
{
throw new ArgumentNullException(nameof(formatter));
}
var message = formatter(state, exception);
if (exception != null)
{
message = $"{message}{Environment.NewLine}{exception}";
}
if (string.IsNullOrEmpty(message))
{
return;
}
var dblLogItem = new DBLog()
{
EventName = eventId.Name,
LogLevel = logLevel.ToString(),
Message = $"{_categoryName}{Environment.NewLine}{message}",
StackTrace=exception?.StackTrace
};
_loggerProvider.AddLogItem(dblLogItem);
}
private class NoopDisposable : IDisposable
{
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
}
}
}
Now we need to create a custom log provider so that an instance of the above custom database logger (DBLogger) can be created.
public class DbLoggerProvider : ILoggerProvider
{
private readonly CancellationTokenSource _cancellationTokenSource = new();
private readonly IList<DBLog> _currentBatch = new List<DBLog>();
private readonly TimeSpan _interval = TimeSpan.FromSeconds(2);
private readonly BlockingCollection<DBLog> _messageQueue = new(new ConcurrentQueue<DBLog>());
private readonly Task _outputTask;
private readonly IServiceProvider _serviceProvider;
private bool _isDisposed;
public DbLoggerProvider(IServiceProvider serviceProvider)
{
_serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider));
_outputTask = Task.Run(ProcessLogQueue);
}
public ILogger CreateLogger(string categoryName)
{
return new DBLogger(this, categoryName);
}
private async Task ProcessLogQueue()
{
while (!_cancellationTokenSource.IsCancellationRequested)
{
while (_messageQueue.TryTake(out var message))
{
try
{
_currentBatch.Add(message);
}
catch
{
//cancellation token canceled or CompleteAdding called
}
}
await SaveLogItemsAsync(_currentBatch, _cancellationTokenSource.Token);
_currentBatch.Clear();
await Task.Delay(_interval, _cancellationTokenSource.Token);
}
}
internal void AddLogItem(DBLog appLogItem)
{
if (!_messageQueue.IsAddingCompleted)
{
_messageQueue.Add(appLogItem, _cancellationTokenSource.Token);
}
}
private async Task SaveLogItemsAsync(IList<DBLog> items, CancellationToken cancellationToken)
{
try
{
if (!items.Any())
{
return;
}
// We need a separate context for the logger to call its SaveChanges several times,
// without using the current request's context and changing its internal state.
var scopeFactory = _serviceProvider.GetRequiredService<IServiceScopeFactory>();
using (var scope = scopeFactory.CreateScope())
{
var scopedProvider = scope.ServiceProvider;
using (var newDbContext = scopedProvider.GetRequiredService<ApplicationDbContext>())
{
foreach (var item in items)
{
var addedEntry = newDbContext.DbLogs.Add(item);
}
await newDbContext.SaveChangesAsync(cancellationToken);
// ...
}
}
}
catch
{
// don't throw exceptions from logger
}
}
[SuppressMessage("Microsoft.Usage", "CA1031:catch a more specific allowed exception type, or rethrow the exception",
Justification = "don't throw exceptions from logger")]
private void Stop()
{
_cancellationTokenSource.Cancel();
_messageQueue.CompleteAdding();
try
{
_outputTask.Wait(_interval);
}
catch
{
// don't throw exceptions from logger
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (!_isDisposed)
{
try
{
if (disposing)
{
Stop();
_messageQueue.Dispose();
_cancellationTokenSource.Dispose();
}
}
finally
{
_isDisposed = true;
}
}
}
}
In the end, it is enough to call this custom log provider (DbLoggerProvider) in the Startup.cs or Program.cs class.
var serviceProvider = app.ApplicationServices.CreateScope().ServiceProvider;
loggerFactory.AddProvider(new DbLoggerProvider(serviceProvider));
From now on, every time we call the _logger.LogInformation("");, the log information will also be stored in the database.
Note: Because the number of calls to record logs in the database may be high, a concurrent queue is used to store logs.
If you like, you can refer to my repository that implements the same method.
In order to log the areas separately(scope/operation), you can create several different DBLoggers to store the information in different tables.
I have the following controller
[Route("some-value")]
[ApiController]
public class SomeValueController : ControllerBase
{
private readonly ClientProvider _clientProvider;
public SomeValueController(ClientProvider clientProvider)
{
_clientProvider = clientProvider;
}
[HttpGet]
public async Task<ActionResult> Index([FromQuery] string from, [FromQuery] string to)
{
var values = await _clientProvider.GetClient().GetValues(from, to);
return Ok(values);
}
}
I want this controller to use one HttpClient for 50% of requests and a different HttpClient for the other 50% of requests. That's why i have introduced the ClientProvider. It is currently implemented as follows:
public class ClientProvider
{
private readonly MainClient _mainClient;
private readonly BackupClient _backupClient;
private readonly ILogger<ClientProvider> _logger;
private bool _flag = false;
private object _lock = new Object();
public ClientProvider(BackupClient backupClient,
MainClient mainClient, ILogger<ClientProvider> logger)
{
_backupClient = backupClient;
_mainClient = mainClient;
_logger = logger;
}
public IExchangeClient GetClient()
{
lock (_lock)
{
var client = _flag ? _backupClient as IExchangeClient : _mainClient;
_flag = !_flag;
return client;
}
}
}
It is obviously very naive because it will block for some time when the appropriate client is being selected. The lock is required for situations in which requests arrive almost simultanously. If the lock wasn't there i would have the same client selected for all those simultanous requests because they would all execute the same line at the same time in separate threads. Can this problem be solved in a more elegant and efficient way?
The following is working for me, but not sure this is the right way to do use DI in .NET6 blazor.
I have the following class
public class Authentication
{
private IConfiguration _configuration;
private AppState _appState;
public Authentication(IConfiguration Configuration, AppState appState)
{
_configuration = Configuration;
_appState = appState; ;
}
public async Task<AccessToken?> getAccessToken()
{
var tokenServer = _configuration.GetValue<string>("tokenUrl");
var clientID = _configuration.GetValue<string>("ABC:ClientID");
var clientSecret = _configuration.GetValue<string>("ABC:ClientSecret");
var grantType = _configuration.GetValue<string>("ABC:GrantType");
AccessToken? accessToken = null;
.............
............
return accessToken;
}
}
in my code behind of razor page
namespace XXXXXXXXXXX.Pages
{
public partial class Index
{
[Inject]
public ILogger<Index> _Logger { get; set; }
[Inject]
public IConfiguration Configuration { get; set; }
[Inject]
public AppState _appState { get; set; }
**Authentication auth;**
protected override void OnInitialized()
{
**auth = new Authentication(Configuration, _appState);**
base.OnInitialized();
}
private async Task HandleValidSubmit()
{
_Logger.LogInformation("HandleValidSubmit called");
auth.getAccessToken();
// Process the valid form
}
}
}
My Question is I was Expecting the DI to do its magic and Insert the Dependency in my class.
but to get this working i had to write
auth = new Authentication(Configuration, _appState);
I was expecting to instantiate
using auth = new Authentication() , but this one throws compiler error.
I have an aspnetcore server (serving a Blazor wasm app), which has a Quartz scheduled job running. When the job triggers, it is calling a method on one of my server's controllers.
If I call this method on my controller normally (e.g. via a web API call), it works fine.
When I call the method from the Quartz IJob, the DbContext used in the controller seems to be disposed.
I've tried injecting the controller into the job in the normal way, and also via IServiceProvider, and both have the same result.
Controller:
public class NotificationController : ControllerBase
{
private readonly ApplicationDbContext context;
public NotificationService(ApplicationDbContext context)
{
this.context = context;
}
public async Task MyMethod()
{
await context.SaveChangesAsync(); //This is where it fails when Quartz calls it, seems context is not populated
}
}
My job (IServiceProvider attempt):
public class ReminderJob : IJob
{
private readonly IServiceProvider serviceProvider;
public ReminderJob(IServiceProvider serviceProvider)
{
this.serviceProvider = serviceProvider;
}
public async Task Execute(IJobExecutionContext jobcontext)
{
using (var scope = serviceProvider.CreateScope())
{
await scope.ServiceProvider.GetRequiredService<NotificationController>().MyMethod();
}
}
}
My job (DI attempt):
public class ReminderJob : IJob
{
private readonly NotificationController notificationController;
public ReminderJob(NotificationController notificationController)
{
this.notificationController = notificationController;
}
public async Task Execute(IJobExecutionContext jobcontext)
{
await notificationController.MyMethod();
}
}
My Startup.cs (relevant lines in ConfigureServices):
public void ConfigureServices(IServiceCollection services)
{
services.AddSignalR();
services.AddControllersWithViews();
services.AddMvcCore().AddControllersAsServices();
services.AddRazorPages();
services.AddQuartz(q =>
{
q.UseMicrosoftDependencyInjectionScopedJobFactory(); //I also tried passing options.CreateScope to this method, but no difference
q.AddJobAndTrigger<ReminderJob>(configuration);
});
services.AddQuartzHostedService(q => q.WaitForJobsToComplete = true);
services.AddDbContext<ApplicationDbContext>(options =>
options.UseSqlServer(configuration.GetConnectionString("DefaultConnection")));
}
No exception is thrown in VS when it attempts to do context.SaveChangesAsync(), however, a breakpoint directly after it is not hit, however when I check the details of context while debugging, it doesn't seem to be populated correctly.
How do I use the Controller from within the IJob, and ensure the Controller's dependencies are not disposed of?
How do I use the Controller from within the IJob
Do not use the controller in the Job.
Move/Extract/Refactor the desired functionality into a service abstraction
//Service abstraction
public interface INotificationService {
Task MyMethod();
}
public class NotificationService : INotificationService {
private readonly ApplicationDbContext context;
public NotificationService(ApplicationDbContext context) {
this.context = context;
}
public async Task MyMethod() {
await context.SaveChangesAsync();
}
}
and have the job and controller depend on the service to invoke the desired functionality.
public class NotificationController : ControllerBase {
private readonly INotificationService service;
public NotificationController (INotificationService service ) {
this.service = service ;
}
public async Task<IActionResult> MyMethod() {
await service.MyMethod();
return Ok();
}
}
public class ReminderJob : IJob {
private readonly INotificationService service;
public ReminderJob(INotificationService service) {
this.service = service;
}
public Task Execute(IJobExecutionContext jobcontext) {
return service.MyMethod();
}
}
And of course register all the necessary services with the DI container.
//...
services.AddScoped<INotificationService, NotificationService>();
//...
I use the following:
public interface IRepository<T>
{
void Add(T entity);
}
public class Repository<T>
{
private readonly ISession session;
public Repository(ISession session)
{
this.session = session;
}
public void Add(T entity)
{
session.Save(entity);
}
}
public class SomeHandler : IHandleMessages<SomeMessage>
{
private readonly IRepository<EntityA> aRepository;
private readonly IRepository<EntityB> bRepository;
public SomeHandler(IRepository<EntityA> aRepository, IRepository<EntityB> bRepository)
{
this.aRepository = aRepository;
this.bRepository = bRepository;
}
public void Handle(SomeMessage message)
{
aRepository.Add(new A(message.Property);
bRepository.Add(new B(message.Property);
}
}
public class MessageEndPoint : IConfigureThisEndpoint, AsA_Server, IWantCustomInitialization
{
public void Init()
{
ObjectFactory.Configure(config =>
{
config.For<ISession>()
.CacheBy(InstanceScope.ThreadLocal)
.TheDefault.Is.ConstructedBy(ctx => ctx.GetInstance<ISessionFactory>().OpenSession());
config.ForRequestedType(typeof(IRepository<>))
.TheDefaultIsConcreteType(typeof(Repository<>));
}
}
My problem with the threadlocal storage is, is that the same session is used during the whole application thread. I discovered this when I saw the first level cache wasn't cleared. What I want is using a new session instance, before each call to IHandleMessages<>.Handle.
How can I do this with structuremap? Do I have to create a message module?
You're right in that the same session is used for all requests to the same thread. This is because NSB doesn't create new threads for each request. The workaround is to add a custom cache mode and have it cleared when message handling is complete.
1.Extend the thread storage lifecycle and hook it up a a message module
public class NServiceBusThreadLocalStorageLifestyle : ThreadLocalStorageLifecycle, IMessageModule
{
public void HandleBeginMessage(){}
public void HandleEndMessage()
{
EjectAll();
}
public void HandleError(){}
}
2.Configure your structuremap as follows:
For<<ISession>>
.LifecycleIs(new NServiceBusThreadLocalStorageLifestyle())
...
Hope this helps!