Azure Redis Cache - add_ServerMaintenanceEvent from StackExchange.Redis ConnectionMultiplexer pool does not have an implementation - asp.net-core

I was creating a library in .net core 6 for using with other projects.I am trying to add cache pooling using StackExchange.Redis.MultiplexerPool in that library.
public class Library1 : ILibrary1
{
public Library1(IConfiguration configuration, string azureOptionsKey)
{
configuration.Bind(azureOptionsKey, azureOptions);
//redis client
_redisClient = new LibraryRedisCacheService();
}
}
below is my CacheService
public class LibraryRedisCacheService
{
private readonly IConnectionMultiplexerPool _connectionMultiplexerPool;
private readonly int[] _connectionsErrorCount;
public LibraryRedisCacheService()
{
var configureOptions = ConfigurationOptions.Parse("Cache connection string");
configureOptions.AllowAdmin = true;
configureOptions.AsyncTimeout = 10000;
_connectionMultiplexerPool = ConnectionMultiplexerPoolFactory.Create(
poolSize: 10,
configurationOptions: configureOptions,
connectionSelectionStrategy: ConnectionSelectionStrategy.RoundRobin);
_connectionsErrorCount = new int[_connectionMultiplexerPool.PoolSize];
}
public async Task<TResult> QueryRedisAsync<TResult>(Func<IDatabase, Task<TResult>> op)
{
var connection = await _connectionMultiplexerPool.GetAsync();
try
{
return await op(connection.Connection.GetDatabase());
}
catch (RedisConnectionException)
{
_connectionsErrorCount[connection.ConnectionIndex]++;
if (_connectionsErrorCount[connection.ConnectionIndex] < 3)
{
throw;
}
await connection.ReconnectAsync();
return await op(connection.Connection.GetDatabase());
}
}
public async Task SetCacheAsync<T>(string key, T value, TimeSpan? expiry = null, int retryCount = 0)
{
try
{
await QueryRedisAsync(async db => await db.StringSetAsync(key, JsonConvert.SerializeObject(value), expiry ?? TimeSpan.FromHours(12)));
}
catch (Exception)
{
if (retryCount < 3)
{
await Task.Delay(3000);
await SetCacheAsync(key, value, expiry, retryCount + 1);
}
}
}
public async Task<T?> GetCacheAsync<T>(string key, int retryCount = 0)
{
try
{
var value = await QueryRedisAsync(async db => await db.StringGetAsync(key));
return !value.HasValue ? default(T) : JsonConvert.DeserializeObject<T>(value);
}
catch (Exception)
{
if (retryCount < 3)
{
await Task.Delay(3000);
await GetCacheAsync<T>(key, retryCount + 1);
}
}
return default;
}
public async Task<TResult> RetryCachConnectAsync<TResult>(Func<IConnectionMultiplexer, TResult> op)
{
var connection = await _connectionMultiplexerPool.GetAsync();
try
{
return op(connection.Connection);
}
catch (RedisConnectionException)
{
_connectionsErrorCount[connection.ConnectionIndex]++;
if (_connectionsErrorCount[connection.ConnectionIndex] < 3)
{
throw;
}
await connection.ReconnectAsync();
return op(connection.Connection);
}
}
}
}
and in my project startup i am adding this library like this
services.AddSingleton<ILibrary1, Library1>(
_ => new Library1(Configuration, "KeyvaultReference"));
after that i am getting run time exception in my application like below
Unable to load one or more of the requested types.
Method 'add_ServerMaintenanceEvent' in type 'StackExchange.Redis.MultiplexerPool.Multiplexers.InternalDisposableConnectionMultiplexer' from assembly 'StackExchange.Redis.MultiplexerPool, Version=1.0.2.0, Culture=neutral, PublicKeyToken=null' does not have an implementation.
How to add this method implementation, Or I am doing something wrong in the library implementation

Related

Using MQTT ManagedClient with ASP NET API, how to?

I'm currently working on a project that has to rely heavily on MQTT - one of the parts that needs to utilize MQTT is a ASP Net API, but I'm having difficulties receiving messages.
Here is my MQTTHandler:
public MQTTHandler()
{
_mqttUrl = Properties.Resources.mqttURL ?? "";
_mqttPort = Properties.Resources.mqttPort ?? "";
_mqttUsername = Properties.Resources.mqttUsername ?? "";
_mqttPassword = Properties.Resources.mqttUsername ?? "";
_mqttFactory = new MqttFactory();
_tls = false;
}
public async Task<IManagedMqttClient> ConnectClientAsync()
{
var clientID = Guid.NewGuid().ToString();
var messageBuilder = new MqttClientOptionsBuilder()
.WithClientId(clientID)
.WithCredentials(_mqttUsername, _mqttPassword)
.WithTcpServer(_mqttUrl, Convert.ToInt32(_mqttPort));
var options = _tls ? messageBuilder.WithTls().Build() : messageBuilder.Build();
var managedOptions = new ManagedMqttClientOptionsBuilder()
.WithAutoReconnectDelay(TimeSpan.FromSeconds(5))
.WithClientOptions(options)
.Build();
_mqttClient = new MqttFactory().CreateManagedMqttClient();
await _mqttClient.StartAsync(managedOptions);
Console.WriteLine("Klient startet");
return _mqttClient;
}
public async Task PublishAsync(string topic, string payload, bool retainFlag = true, int qos = 1)
{
await _mqttClient.EnqueueAsync(new MqttApplicationMessageBuilder()
.WithTopic(topic)
.WithPayload(payload)
.WithQualityOfServiceLevel((MQTTnet.Protocol.MqttQualityOfServiceLevel)qos)
.WithRetainFlag(retainFlag)
.Build());
Console.WriteLine("Besked published");
}
public async Task SubscribeAsync(string topic, int qos = 1)
{
var topicFilters = new List<MQTTnet.Packets.MqttTopicFilter>
{
new MqttTopicFilterBuilder()
.WithTopic(topic)
.WithQualityOfServiceLevel((MQTTnet.Protocol.MqttQualityOfServiceLevel)(qos))
.Build()
};
await _mqttClient.SubscribeAsync(topicFilters);
}
public Status GetSystemStatus(MqttApplicationMessageReceivedEventArgs e)
{
try
{
var json = Encoding.UTF8.GetString(e.ApplicationMessage.Payload);
var status = JsonSerializer.Deserialize<Status>(json);
if (status != null)
{
return status;
}
else
{
return null;
}
}
catch (Exception)
{
throw;
}
}
The above has been tested with a console app and works as it should.
The reason I need MQTT in the APi is that a POST method has to act on the value of a topic;
In particular I need to check a systems status before allowing the post;
[HttpPost]
public async Task<ActionResult<Order>> PostOrder(Order order)
{
if (_lastStatus != null)
{
if (_lastStatus.OpStatus)
{
return StatusCode(400, "System is busy!");
}
else
{
var response = await _orderManager.AddOrder(order);
return StatusCode(response.StatusCode, response.Message);
}
}
return StatusCode(400, "Something went wrong");
}
So I will need to set up a subscriber for this controller, and set the value of _lastStatus on received messages:
private readonly MQTTHandler _mqttHandler;
private IManagedMqttClient _mqttClient;
private Status _lastStatus;
public OrdersController(OrderManager orderManager)
{
_orderManager = orderManager;
_mqttHandler = new MQTTHandler();
_mqttClient = _mqttHandler.ConnectClientAsync().Result;
_mqttHandler.SubscribeAsync("JSON/Status");
_mqttClient.ApplicationMessageReceivedAsync += e =>
{
_lastStatus = _mqttHandler.GetSystemStatus(e);
return Task.CompletedTask;
};
}
However, it's behaving a little odd and I'm not experienced enough to know why.
The first time I make a POST request, _lastStatus is null - every following POST request seem to have the last retained message.
I'm guessing that I am struggling due to stuff being asynchronous, but not sure, and every attempt I've attempted to make it synchronous have failed.
Anyone have a clue about what I'm doing wrong?

error CS0103: The name 'ModelHelper' does not exist in the current context

I am trying to implement Object detection on HoloLens2 using Microsoft Custom Vision.
I have been following the tutorial (https://learn.microsoft.com/en-us/archive/blogs/appconsult/23535)
but faced with this error..
error CS0103: The name 'ModelHelper' does not exist in the current context
the codes are as below.
what is this ModelHelper for?
and is there anything that i have to add to use it?
using System;
using System.Linq;
using System.Threading.Tasks;
#if UNITY_WSA && !UNITY_EDITOR
using Windows.Media.Capture;
using Windows.Media.Capture.Frames;
using Windows.Media.MediaProperties;
public class ScanEngine
{
public TimeSpan PredictionFrequency = TimeSpan.FromMilliseconds(400);
private MediaCapture CameraCapture;
private MediaFrameReader CameraFrameReader;
private Int64 FramesCaptured;
IUnityScanScene UnityApp;
public ScanEngine()
{
}
public async Task Inititalize(IUnityScanScene unityApp)
{
UnityApp = unityApp;
await InitializeCameraCapture();
await InitializeCameraFrameReader();
}
private async Task InitializeCameraCapture()
{
CameraCapture = new MediaCapture();
MediaCaptureInitializationSettings settings = new
MediaCaptureInitializationSettings();
settings.StreamingCaptureMode = StreamingCaptureMode.Video;
await CameraCapture.InitializeAsync(settings);
}
private async Task InitializeCameraFrameReader()
{
var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();
MediaFrameSourceGroup selectedGroup = null;
MediaFrameSourceInfo colorSourceInfo = null;
foreach (var sourceGroup in frameSourceGroups)
{
foreach (var sourceInfo in sourceGroup.SourceInfos)
{
if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview
&& sourceInfo.SourceKind == MediaFrameSourceKind.Color)
{
colorSourceInfo = sourceInfo;
break;
}
}
if (colorSourceInfo != null)
{
selectedGroup = sourceGroup;
break;
}
}
var colorFrameSource = CameraCapture.FrameSources[colorSourceInfo.Id];
var preferredFormat = colorFrameSource.SupportedFormats.Where(format =>
{
return format.Subtype == MediaEncodingSubtypes.Argb32;
}).FirstOrDefault();
CameraFrameReader = await CameraCapture.CreateFrameReaderAsync(colorFrameSource);
await CameraFrameReader.StartAsync();
}
public void StartPullCameraFrames()
{
Task.Run(async () =>
{
for (; ; ) // Forever = While the app runs
{
FramesCaptured++;
await Task.Delay(PredictionFrequency);
using (var frameReference = CameraFrameReader.TryAcquireLatestFrame())
using (var videoFrame = frameReference?.VideoMediaFrame?.GetVideoFrame())
{
if (videoFrame == null)
{
continue; //ignoring frame
}
if (videoFrame.Direct3DSurface == null)
{
videoFrame.Dispose();
continue; //ignoring frame
}
try
{
await
ModelHelper.EvaluateVideoFrameAsync(videoFrame).ConfigureAwait(false);
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine(ex.Message);
}
finally
{
}
}
}
});
}
}
#endif

May i subscribe multiple hosts (and clusters) with one subscriber pattern in Redis?

I am implementing a feature which integrating redis published messages to mongodb that i made the project and working perfect on testing environment.
But I'm concerning about the production environment, that i have 3 master server and they exist 12 slave cluster. if i publish message from them to a channel pattern may i subscribe all messages in one place
Yes it is possible by stackexchange redis settings, i had done my general structure like below
public class RedisSubscriber: IRedisSubscriber
{
private readonly RedisConfigurationManager _config;
private readonly IMongoDbRepository _mongoDbRepository;
private readonly ILogger<RedisSubscriber> _logger;
private readonly IConnectionMultiplexer _connectionMultiplexer;
public RedisSubscriber(IServiceProvider serviceLocator, ILogger<RedisSubscriber> logger, IConnectionMultiplexer conn)
{
_config = (RedisConfigurationManager)serviceLocator.GetService(typeof(RedisConfigurationManager));
_mongoDbRepository = (IMongoDbRepository)serviceLocator.GetService(typeof(IMongoDbRepository));
_connectionMultiplexer = conn;
_logger = logger;
}
public void SubScribeChannel()
{
_logger.LogInformation("!SubScribeChannel started!!");
string channelName = _config.ActiveChannelName;
var pubSub = _connectionMultiplexer.GetSubscriber();
try
{
pubSub.Subscribe(channelName, async (channel, message) => await MessageActionAsync(message, channel));
}
catch(Exception ex)
{
_logger.LogInformation(String.Format("!error: {0}", ex.Message));
}
Debug.WriteLine("EOF");
}
private async Task MessageActionAsync(RedisValue message, string channel)
{
try
{
Transformer t = new Transformer(_logger);
_logger.LogInformation(String.Format("!SubScribeChannel message received on message!! channel: {0}, message: {1}", channel, message));
string transformedMessage = Transformer.TransformJsonStringData2Message(message);
List<Document> documents = Transformer.Deserialize<List<Document>>(transformedMessage);
await MergeToMongoDb(documents, channel);
_logger.LogInformation("!Merged");
}
catch (Exception ex)
{
_logger.LogInformation(String.Format("!error: {0}", ex.Message));
}
}
private async Task MergeToMongoDb(IList<Document> documents, string channelName)
{
try
{
foreach (Document doc in documents)
{
TurSysPartitionedDocument td = JsonConvert.DeserializeObject<TurSysPartitionedDocument>(JsonConvert.SerializeObject(doc));
td.DepartureDate = td.DepartureDate.ToLocalTime();
td.PartitionKey = channelName;
TurSysPartitionedDocument isExist = await _mongoDbRepository.GetOneAsync<TurSysPartitionedDocument>(k =>
k.ProductCode == td.ProductCode &&
k.ProviderCode == td.ProviderCode &&
k.CabinClassName == td.CabinClassName &&
k.OriginAirport == td.OriginAirport &&
k.DestinationAirport == td.DestinationAirport &&
k.Adult >= td.Adult &&
k.DepartureDate == td.DepartureDate,
td.PartitionKey);
if (isExist != null)
{
//_logger.LogInformation(String.Format("!isExist departure date: {0}", isExist.DepartureDate));
isExist.SearchCount++;
await _mongoDbRepository.UpdateOneAsync(isExist, k => k.Adult, td.Adult);
await _mongoDbRepository.UpdateOneAsync(isExist, k => k.SearchCount, isExist.SearchCount);
}
else
{
//_logger.LogInformation(String.Format("!last ToLocalTime td departure date: {0}", td.DepartureDate));
td.SearchCount = 1;
await _mongoDbRepository.AddOneAsync(td);
//_logger.LogInformation(String.Format("!last ToLocalTime result td departure date: {0}", td.DepartureDate));
}
}
}
catch(Exception ex)
{
_logger.LogInformation(String.Format("!error: {0}", ex.Message));
}
}
}

org.apache.fop.fo.flow.ExternalGraphic catches and logs ImageException I want to handle myself

I am transforming an Image into pdf for test purposes.
To ensure that the Image is compatible with the printing process later on, I'm running a quick test print during the upload.
I'm creating a simple Test-PDF with a transformer. When I try to print an image with an incompatible format, the ImageManager of the transformer throws an ImageException, starting in the preloadImage() function:
public ImageInfo preloadImage(String uri, Source src)
throws ImageException, IOException {
Iterator iter = registry.getPreloaderIterator();
while (iter.hasNext()) {
ImagePreloader preloader = (ImagePreloader)iter.next();
ImageInfo info = preloader.preloadImage(uri, src, imageContext);
if (info != null) {
return info;
}
}
throw new ImageException("The file format is not supported. No ImagePreloader found for "
+ uri);
}
throwing it to:
public ImageInfo needImageInfo(String uri, ImageSessionContext session, ImageManager manager)
throws ImageException, IOException {
//Fetch unique version of the URI and use it for synchronization so we have some sort of
//"row-level" locking instead of "table-level" locking (to use a database analogy).
//The fine locking strategy is necessary since preloading an image is a potentially long
//operation.
if (isInvalidURI(uri)) {
throw new FileNotFoundException("Image not found: " + uri);
}
String lockURI = uri.intern();
synchronized (lockURI) {
ImageInfo info = getImageInfo(uri);
if (info == null) {
try {
Source src = session.needSource(uri);
if (src == null) {
registerInvalidURI(uri);
throw new FileNotFoundException("Image not found: " + uri);
}
info = manager.preloadImage(uri, src);
session.returnSource(uri, src);
} catch (IOException ioe) {
registerInvalidURI(uri);
throw ioe;
} catch (ImageException e) {
registerInvalidURI(uri);
throw e;
}
putImageInfo(info);
}
return info;
}
}
throwing it to :
public ImageInfo getImageInfo(String uri, ImageSessionContext session)
throws ImageException, IOException {
if (getCache() != null) {
return getCache().needImageInfo(uri, session, this);
} else {
return preloadImage(uri, session);
}
}
Finally it gets caught and logged in the ExternalGraphic.class:
/** {#inheritDoc} */
public void bind(PropertyList pList) throws FOPException {
super.bind(pList);
src = pList.get(PR_SRC).getString();
//Additional processing: obtain the image's intrinsic size and baseline information
url = URISpecification.getURL(src);
FOUserAgent userAgent = getUserAgent();
ImageManager manager = userAgent.getFactory().getImageManager();
ImageInfo info = null;
try {
info = manager.getImageInfo(url, userAgent.getImageSessionContext());
} catch (ImageException e) {
ResourceEventProducer eventProducer = ResourceEventProducer.Provider.get(
getUserAgent().getEventBroadcaster());
eventProducer.imageError(this, url, e, getLocator());
} catch (FileNotFoundException fnfe) {
ResourceEventProducer eventProducer = ResourceEventProducer.Provider.get(
getUserAgent().getEventBroadcaster());
eventProducer.imageNotFound(this, url, fnfe, getLocator());
} catch (IOException ioe) {
ResourceEventProducer eventProducer = ResourceEventProducer.Provider.get(
getUserAgent().getEventBroadcaster());
eventProducer.imageIOError(this, url, ioe, getLocator());
}
if (info != null) {
this.intrinsicWidth = info.getSize().getWidthMpt();
this.intrinsicHeight = info.getSize().getHeightMpt();
int baseline = info.getSize().getBaselinePositionFromBottom();
if (baseline != 0) {
this.intrinsicAlignmentAdjust
= FixedLength.getInstance(-baseline);
}
}
}
That way it isn't accessible for me in my code that uses the transformer.
I tried to use a custom ErrorListener, but the transformer only registers fatalErrors to the ErrorListener.
Is there any way to access the Exception and handle it myself without changing the code of the library?
It was easier than I thought. Before I call the transformation I register a costum EventListener to the User Agent of the Fop I'm using. This Listener just stores the Information what kind of Event was triggered, so I can throw an Exception if it's an ImageError.
My Listener:
import org.apache.fop.events.Event;
import org.apache.fop.events.EventListener;
public class ImageErrorListener implements EventListener
{
private String eventKey = "";
private boolean imageError = false;
#Override
public void processEvent(Event event)
{
eventKey = event.getEventKey();
if(eventKey.equals("imageError")) {
imageError = true;
}
}
public String getEventKey()
{
return eventKey;
}
public void setEventKey(String eventKey)
{
this.eventKey = eventKey;
}
public boolean isImageError()
{
return imageError;
}
public void setImageError(boolean imageError)
{
this.imageError = imageError;
}
}
Use of the Listener:
// Start XSLT transformation and FOP processing
ImageErrorListener imageListener = new ImageErrorListener();
fop.getUserAgent().getEventBroadcaster().addEventListener(imageListener);
if (res != null)
{
transformer.transform(xmlDomStreamSource, res);
}
if(imageListener.isImageError()) {
throw new ImageException("");
}
fop is of the type Fop ,xmlDomStreamSource ist the xml-Source I want to transform and res is my SAXResult.

Using MiniProfiler's database profiling with NHibernate

What's the simplest way to use MiniProfiler's database profiling with NHibernate? In order for the profiler to work, I need to wrap the DbConnection that NHibernate uses in a ProfiledDbConnection.
I'm not too familiar with the internals of NHibernate, so I don't know where all the extensibility points are. (I noticed that an NHibernate ISession has a Connection property, but it is read-only.)
[UPDATE] Please see the following links for a version of that uses RealProxy to proxy the SqlCommand - batching is now supported
blog http://blog.fearofaflatplanet.me.uk/mvcminiprofiler-and-nhibernate-take-2
gist https://gist.github.com/1110153
I've left the original answer unaltered as it was accepted. [/UPDATE]
I've managed to partially get this to work by implementing a Profiled Client Driver (example for Sql Server 2008 below) - this works for simple examples, however I haven't yet found a solution for NH batching (which attempts to cast the command back to SqlCommand)
public class ProfiledSql2008ClientDriver : Sql2008ClientDriver
{
public override IDbCommand CreateCommand()
{
return new ProfiledDbCommand(
base.CreateCommand() as DbCommand,
null,
MiniProfiler.Current);
}
public override IDbConnection CreateConnection()
{
return ProfiledDbConnection.Get(
base.CreateConnection() as DbConnection,
MiniProfiler.Current);
}
}
I extended Roberts answer above to work with NHibernate batching. There is a lot of code here so it can possibly be shortened, some of it based on the nHibernate source for the client driver.
<property name="connection.driver_class">YoureOnTime.Data.ProfiledSqlClientDriver, YoureOnTime.Common</property>
public class ProfiledSqlClientDriver : DriverBase, IEmbeddedBatcherFactoryProvider
{
public override IDbConnection CreateConnection()
{
return new ProfiledSqlDbConnection(
new SqlConnection(),
MiniProfiler.Current);
}
public override IDbCommand CreateCommand()
{
return new ProfiledSqlDbCommand(
new SqlCommand(),
null,
MiniProfiler.Current);
}
public override bool UseNamedPrefixInSql
{
get { return true; }
}
public override bool UseNamedPrefixInParameter
{
get { return true; }
}
public override string NamedPrefix
{
get { return "#"; }
}
public override bool SupportsMultipleOpenReaders
{
get { return false; }
}
public static void SetParameterSizes(IDataParameterCollection parameters, SqlType[] parameterTypes)
{
for (int i = 0; i < parameters.Count; i++)
{
SetVariableLengthParameterSize((IDbDataParameter)parameters[i], parameterTypes[i]);
}
}
private const int MaxAnsiStringSize = 8000;
private const int MaxBinarySize = MaxAnsiStringSize;
private const int MaxStringSize = MaxAnsiStringSize / 2;
private const int MaxBinaryBlobSize = int.MaxValue;
private const int MaxStringClobSize = MaxBinaryBlobSize / 2;
private const byte MaxPrecision = 28;
private const byte MaxScale = 5;
private const byte MaxDateTime2 = 8;
private const byte MaxDateTimeOffset = 10;
private static void SetDefaultParameterSize(IDbDataParameter dbParam, SqlType sqlType)
{
switch (dbParam.DbType)
{
case DbType.AnsiString:
case DbType.AnsiStringFixedLength:
dbParam.Size = MaxAnsiStringSize;
break;
case DbType.Binary:
if (sqlType is BinaryBlobSqlType)
{
dbParam.Size = MaxBinaryBlobSize;
}
else
{
dbParam.Size = MaxBinarySize;
}
break;
case DbType.Decimal:
dbParam.Precision = MaxPrecision;
dbParam.Scale = MaxScale;
break;
case DbType.String:
case DbType.StringFixedLength:
dbParam.Size = IsText(dbParam, sqlType) ? MaxStringClobSize : MaxStringSize;
break;
case DbType.DateTime2:
dbParam.Size = MaxDateTime2;
break;
case DbType.DateTimeOffset:
dbParam.Size = MaxDateTimeOffset;
break;
}
}
private static bool IsText(IDbDataParameter dbParam, SqlType sqlType)
{
return (sqlType is StringClobSqlType) || (sqlType.LengthDefined && sqlType.Length > MsSql2000Dialect.MaxSizeForLengthLimitedStrings &&
(DbType.String == dbParam.DbType || DbType.StringFixedLength == dbParam.DbType));
}
private static void SetVariableLengthParameterSize(IDbDataParameter dbParam, SqlType sqlType)
{
SetDefaultParameterSize(dbParam, sqlType);
// Override the defaults using data from SqlType.
if (sqlType.LengthDefined && !IsText(dbParam, sqlType))
{
dbParam.Size = sqlType.Length;
}
if (sqlType.PrecisionDefined)
{
dbParam.Precision = sqlType.Precision;
dbParam.Scale = sqlType.Scale;
}
}
public override IDbCommand GenerateCommand(CommandType type, SqlString sqlString, SqlType[] parameterTypes)
{
IDbCommand command = base.GenerateCommand(type, sqlString, parameterTypes);
//if (IsPrepareSqlEnabled)
{
SetParameterSizes(command.Parameters, parameterTypes);
}
return command;
}
public override bool SupportsMultipleQueries
{
get { return true; }
}
#region IEmbeddedBatcherFactoryProvider Members
System.Type IEmbeddedBatcherFactoryProvider.BatcherFactoryClass
{
get { return typeof(ProfiledSqlClientBatchingBatcherFactory); }
}
#endregion
}
public class ProfiledSqlClientBatchingBatcher : AbstractBatcher
{
private int batchSize;
private int totalExpectedRowsAffected;
private SqlClientSqlCommandSet currentBatch;
private StringBuilder currentBatchCommandsLog;
private readonly int defaultTimeout;
public ProfiledSqlClientBatchingBatcher(ConnectionManager connectionManager, IInterceptor interceptor)
: base(connectionManager, interceptor)
{
batchSize = Factory.Settings.AdoBatchSize;
defaultTimeout = PropertiesHelper.GetInt32(NHibernate.Cfg.Environment.CommandTimeout, NHibernate.Cfg.Environment.Properties, -1);
currentBatch = CreateConfiguredBatch();
//we always create this, because we need to deal with a scenario in which
//the user change the logging configuration at runtime. Trying to put this
//behind an if(log.IsDebugEnabled) will cause a null reference exception
//at that point.
currentBatchCommandsLog = new StringBuilder().AppendLine("Batch commands:");
}
public override int BatchSize
{
get { return batchSize; }
set { batchSize = value; }
}
protected override int CountOfStatementsInCurrentBatch
{
get { return currentBatch.CountOfCommands; }
}
public override void AddToBatch(IExpectation expectation)
{
totalExpectedRowsAffected += expectation.ExpectedRowCount;
IDbCommand batchUpdate = CurrentCommand;
string lineWithParameters = null;
var sqlStatementLogger = Factory.Settings.SqlStatementLogger;
if (sqlStatementLogger.IsDebugEnabled || log.IsDebugEnabled)
{
lineWithParameters = sqlStatementLogger.GetCommandLineWithParameters(batchUpdate);
var formatStyle = sqlStatementLogger.DetermineActualStyle(FormatStyle.Basic);
lineWithParameters = formatStyle.Formatter.Format(lineWithParameters);
currentBatchCommandsLog.Append("command ")
.Append(currentBatch.CountOfCommands)
.Append(":")
.AppendLine(lineWithParameters);
}
if (log.IsDebugEnabled)
{
log.Debug("Adding to batch:" + lineWithParameters);
}
currentBatch.Append(((ProfiledSqlDbCommand)batchUpdate).Command);
if (currentBatch.CountOfCommands >= batchSize)
{
ExecuteBatchWithTiming(batchUpdate);
}
}
protected void ProfiledPrepare(IDbCommand cmd)
{
try
{
IDbConnection sessionConnection = ConnectionManager.GetConnection();
if (cmd.Connection != null)
{
// make sure the commands connection is the same as the Sessions connection
// these can be different when the session is disconnected and then reconnected
if (cmd.Connection != sessionConnection)
{
cmd.Connection = sessionConnection;
}
}
else
{
cmd.Connection = (sessionConnection as ProfiledSqlDbConnection).Connection;
}
ProfiledSqlDbTransaction trans = (ProfiledSqlDbTransaction)typeof(NHibernate.Transaction.AdoTransaction).InvokeMember("trans", System.Reflection.BindingFlags.GetField | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance, null, ConnectionManager.Transaction, null);
if (trans != null)
cmd.Transaction = trans.Transaction;
Factory.ConnectionProvider.Driver.PrepareCommand(cmd);
}
catch (InvalidOperationException ioe)
{
throw new ADOException("While preparing " + cmd.CommandText + " an error occurred", ioe);
}
}
protected override void DoExecuteBatch(IDbCommand ps)
{
log.DebugFormat("Executing batch");
CheckReaders();
ProfiledPrepare(currentBatch.BatchCommand);
if (Factory.Settings.SqlStatementLogger.IsDebugEnabled)
{
Factory.Settings.SqlStatementLogger.LogBatchCommand(currentBatchCommandsLog.ToString());
currentBatchCommandsLog = new StringBuilder().AppendLine("Batch commands:");
}
int rowsAffected;
try
{
rowsAffected = currentBatch.ExecuteNonQuery();
}
catch (DbException e)
{
throw ADOExceptionHelper.Convert(Factory.SQLExceptionConverter, e, "could not execute batch command.");
}
Expectations.VerifyOutcomeBatched(totalExpectedRowsAffected, rowsAffected);
currentBatch.Dispose();
totalExpectedRowsAffected = 0;
currentBatch = CreateConfiguredBatch();
}
private SqlClientSqlCommandSet CreateConfiguredBatch()
{
var result = new SqlClientSqlCommandSet();
if (defaultTimeout > 0)
{
try
{
result.CommandTimeout = defaultTimeout;
}
catch (Exception e)
{
if (log.IsWarnEnabled)
{
log.Warn(e.ToString());
}
}
}
return result;
}
}
public class ProfiledSqlClientBatchingBatcherFactory : IBatcherFactory
{
public virtual IBatcher CreateBatcher(ConnectionManager connectionManager, IInterceptor interceptor)
{
return new ProfiledSqlClientBatchingBatcher(connectionManager, interceptor);
}
}
public class ProfiledSqlDbCommand : ProfiledDbCommand
{
public ProfiledSqlDbCommand(SqlCommand cmd, SqlConnection conn, MiniProfiler profiler)
: base(cmd, conn, profiler)
{
Command = cmd;
}
public SqlCommand Command { get; set; }
private DbTransaction _trans;
protected override DbTransaction DbTransaction
{
get { return _trans; }
set
{
this._trans = value;
ProfiledSqlDbTransaction awesomeTran = value as ProfiledSqlDbTransaction;
Command.Transaction = awesomeTran == null ? (SqlTransaction)value : awesomeTran.Transaction;
}
}
}
public class ProfiledSqlDbConnection : ProfiledDbConnection
{
public ProfiledSqlDbConnection(SqlConnection connection, MiniProfiler profiler)
: base(connection, profiler)
{
Connection = connection;
}
public SqlConnection Connection { get; set; }
protected override DbTransaction BeginDbTransaction(System.Data.IsolationLevel isolationLevel)
{
return new ProfiledSqlDbTransaction(Connection.BeginTransaction(isolationLevel), this);
}
}
public class ProfiledSqlDbTransaction : ProfiledDbTransaction
{
public ProfiledSqlDbTransaction(SqlTransaction transaction, ProfiledDbConnection connection)
: base(transaction, connection)
{
Transaction = transaction;
}
public SqlTransaction Transaction { get; set; }
}
Try implementing NHibernate.Connection.IConnectionProvider (you could just inherit DriverConnectionProvider), in GetConnection() wrap the IDbConnection as you need.
Plug your connection provider using the Environment.ConnectionProvider key in your config properties.
If anyone is interested I have done an integration using a custom Log4net appender instead. This way I feel safe that I don't mess with the Connection object.
The rough outline is something along these lines: NHibernate emits the sqlstrings as debug statements and the appender configured in log4net.xml calls Start and Dispose on the MiniProfiler.