Insert not being reflected in later Select - sql

I’m developing a web app with JSF and Hibernate, and I’m facing a problem with Hibernate. At some point, I want to INSERT a row in a table. Just after that, I want to retrieve the 3 last inserted elements of the same table. The problem is that the INSERT is being made correctly (I checked that with a DB Client), but when I try to retrieve those 3 elements, the one I have just inserted is not returned by the SELECT statement. It seems like a kind of hibernate session problem, not reflecting the change when I make the SELECT. I attach the relevant part of the code:
hibernate.cfg.xml:
<hibernate-configuration>
<session-factory>
<property name="hibernate.connection.driver_class">com.mysql.jdbc.Driver</property>
<property name="hibernate.connection.url">jdbc:mysql://localhost:3306/habana</property>
<property name="hibernate.connection.username">root</property>
<property name="connection.password"></property>
<property name="hibernate.dialect">org.hibernate.dialect.MySQLDialect</property>
<property name="current_session_context_class">thread</property>
<property name="show_sql">true</property>
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<mapping class="org.blk.lahabana.model.ClaseMovimiento"/>
<mapping class="org.blk.lahabana.model.Devolucion"/>
<mapping class="org.blk.lahabana.model.DevolucionProducto"/>
<mapping class="org.blk.lahabana.model.DevolucionProductoId"/>
<mapping class="org.blk.lahabana.model.Evento"/>
<mapping class="org.blk.lahabana.model.Movimiento"/>
<mapping class="org.blk.lahabana.model.Pedido"/>
<mapping class="org.blk.lahabana.model.PedidoProducto"/>
<mapping class="org.blk.lahabana.model.PedidoProductoId"/>
<mapping class="org.blk.lahabana.model.Permiso"/>
<mapping class="org.blk.lahabana.model.Producto"/>
<mapping class="org.blk.lahabana.model.TipoProducto"/>
<mapping class="org.blk.lahabana.model.Trabajador"/>
<mapping class="org.blk.lahabana.model.Turno"/>
<mapping class="org.blk.lahabana.model.TurnoTrabajador"/>
<mapping class="org.blk.lahabana.model.TurnoTrabajadorId"/>
<mapping class="org.blk.lahabana.model.Usuario"/>
</session-factory>
</hibernate-configuration>
HibernateUtil:
public class HibernateUtil {
/** Logger for this class and subclasses */
protected static final Log logger = LogFactory.getLog(HibernateUtil.class);
private static SessionFactory sessionFactory;
static {
try {
sessionFactory = new AnnotationConfiguration().configure()
.buildSessionFactory();
} catch (Throwable ex) {
logger.error("Error al crear el sessionFactory de Hibernate",ex);
throw new ExceptionInInitializerError(ex);
}
}
public static SessionFactory getSessionFactory() {
// Alternatively, we could look up in JNDI here
return sessionFactory;
}
public static void closeSession() {
// Close caches and connection pools
getSessionFactory().close();
}
}
EventsController:
public String create(){
String view = "";
Integer id = eventosService.createEvent(evento);
if(id !=null){
FacesMessage facesMsg = new FacesMessage(FacesMessage.SEVERITY_INFO, "Evento Guardado", "El evento se ha guardado con éxito");
FacesContext.getCurrentInstance().addMessage(null, facesMsg);
}
else{
FacesMessage facesMsg = new FacesMessage(FacesMessage.SEVERITY_ERROR, "Evento No Guardado", "Se ha producido un error al guardar el evento");
FacesContext.getCurrentInstance().addMessage(null, facesMsg);
}
view = dashboardController.input();
return view;
}
#Override
public Integer createEvent(Evento event) {
Integer result = null;
event.setEstado(Constants.EVENT_STATE_PLANED);
boolean saved = eventoDao.save(event);
if( saved ){
result = event.getId();
}
return result;
}
public boolean save(T entity) {
Session hbsession = HibernateUtil.getSessionFactory().getCurrentSession();
Transaction transaction = null;
boolean result = false;
try {
transaction = hbsession.beginTransaction();
hbsession.save(entity);
hbsession.flush();
transaction.commit();
result = true;
} catch (HibernateException e) {
logger.error("Error al guardar una entidad entidad",e);
if(transaction != null){
transaction.rollback();
}
}
finally{
if(hbsession != null && hbsession.isOpen()){
hbsession.close();
}
}
return result;
}
DashboardController:
public String input(){
String view = "dashboard";
// Obtenemos los útimos eventos
List<Evento> events = eventosService.getLastEvents();
// Convertimos los eventos al VO para la vista
this.lastEvents = new ArrayList<EventoResumenVO>();
for (Evento evento : events) {
this.lastEvents.add(eventoResumenConverter.convertFromEvent(evento));
}
return view;
}
#Override
public List<Evento> getLastEvents() {
return eventoDao.findLastEvents(3);
}
#SuppressWarnings("unchecked")
#Override
public List<Evento> findLastEvents(Integer number) {
Session hbsession = HibernateUtil.getSessionFactory().getCurrentSession();
Transaction transaction = null;
List<Evento> result = null;
try {
transaction = hbsession.beginTransaction();
Criteria criteria = hbsession.createCriteria(entityClass);
criteria.setFetchMode("movimientos", FetchMode.JOIN);
criteria.addOrder(Order.desc("fechaFin"));
criteria.setMaxResults(number.intValue());
result = criteria.list();
} catch (HibernateException e) {
logger.error("Error al recuperar la lista de entidades",e);
if(transaction != null){
transaction.rollback();
}
}
finally{
if(hbsession != null && hbsession.isOpen()){
hbsession.close();
}
}
return result;
}
Thanks for helping!

Related

How to write Unit Test for Hybris DAO Implementation

I am new with unit test and I am trying to test this method, but it did not manage to capture the query of the method, I only managed to get it to enter an exception but not to take the query and return it.
Is there a way to return "result.getResult().get(0)" in the unit test?
Thanks
#Override
public HouseModel findByCode(String code) {
var sQuery = "SELECT {h:pk} FROM {House as h} WHERE {h:id} = ?id ";
var query = new FlexibleSearchQuery(sQuery);
query.addQueryParameter("id", Objects.requireNonNullElse(code, ""));
SearchResult<HouseModel> result = flexibleSearchService.search(query);
return result.getResult().get(0);
}
Code Test:
#Test
public void testFindByCode() {
when(flexibleSearchService.search((FlexibleSearchQuery) any())).thenThrow(new RuntimeException("test"));
RuntimeException exception = new RuntimeException();
try {
var result2 = houseDAOImpl.findByCode("testcode");
} catch (RuntimeException e) {
e.printStackTrace();
exception = e;
}
boolean shouldtrue = exception.getMessage().equalsIgnoreCase("test");
System.out.println(exception.getMessage());
System.out.println(shouldtrue);
assertTrue(shouldtrue);
}
Hybris supports TransactionTest incase of interaction with db.
public class HouseDAOImpTest extends HybrisJUnit4TransactionalTest
{
private TypeService typeService;
private ModelService modelService;
private DeeplinkUrlDao dao;
private List<HouseModel> createdRules;
/**
* #throws java.lang.Exception
*/
#Before
public void setUp() throws Exception
{
createdRules = createHouses();
houseDAOImpl = (HouseFinderDao) Registry.getApplicationContext().getBean("houseFinderDao");
}
#Test
public void testFindByCode()
{
final HouseModel hm = houseDAOImpl.findByCode("testcode");
assertThat(hm.getCode(), is(equalTo(""testcode""));
}
private ModelService getModelService()
{
if (modelService == null)
{
modelService = (ModelService) Registry.getApplicationContext().getBean("modelService");
}
return modelService;
}
private TypeService getTypeService()
{
if (typeService == null)
{
typeService = (TypeService) Registry.getApplicationContext().getBean("typeService");
}
return typeService;
}
/**
* Creates the Houses.
*/
private List<HouseModel> createHouses()
{
final List<HouseModel> result = new ArrayList<HouseModel>();
final HouseModel houseModel1 = getModelService().create(HouseModel.class);
houseModel.setCode("testcode");
modelService.save(houseModel1);
// create other houses model and follow previous steps
result.add(houseModel1);
result.add(houseModel2);
result.add(houseModel3);
return result;
}
}

place picker closes immediately

my PlacePicker is closing when I open it, it stays open for about 3 seconds, I can see the location and I can move it, but it closes, I already tried everything already activated the api in google console, I changed the key, it does not give any error in logcat and neither in the RUN tab help me!
MY ANDROID MANIFEST
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION"/>
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<application
android:allowBackup="true"
android:icon="#mipmap/ic_launcher"
android:label="#string/app_name"
android:roundIcon="#mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="#style/AppTheme">
<meta-data
android:name="com.google.android.geo.API_KEY"
android:value="AIzaSyCD70mX9tljEfiDiLaCdHEUNMvq40AJDyI"/>
<meta-data
android:name="com.google.android.gms.version"
android:value="#integer/google_play_services_version" />
<activity android:name=".activity.TelefoneActivity" />
<activity android:name=".activity.CadastrarActivity" />
<activity android:name=".activity.MainActivity" />
<activity android:name=".activity.PassageiroActivity" />
<uses-library android:name="org.apache.http.legacy" android:required="false"/>
<meta-data
android:name="com.facebook.sdk.App.Application"
android:value="#string/facebook_app_id" />
<!-- Facebook API Key -->
<meta-data
tools:replace="android:value"
android:name="com.facebook.sdk.ApplicationId"
android:value="#string/facebook_app_id" />
<activity
android:name="com.facebook.CustomTabActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.VIEW" />
<category android:name="android.intent.category.DEFAULT" />
<category android:name="android.intent.category.BROWSABLE" />
<data android:scheme="#string/fb_login_protocol_scheme" />
</intent-filter>
</activity>
<!--
The API key for Google Maps-based APIs is defined as a string resource.
(See the file "res/values/google_maps_api.xml").
Note that the API key is linked to the encryption key used to sign the APK.
You need a different API key for each encryption key, including the release key that is used to
sign the APK for publishing.
You can define the keys for the debug and release targets in src/debug/ and src/release/.
-->
<activity
android:name=".activity.SplashActivity"
android:theme="#style/AppCompat.TelaCheia">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
ACTIVITY WHERE PLACE PICKER IS IMPLEMENTED
import static android.Manifest.permission.ACCESS_FINE_LOCATION;
public class PassageiroActivity extends FragmentActivity implements OnMapReadyCallback, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener {
int PLACE_PICKER_REQUEST = 1;
private String placeId;
TextView btnChamarMoto;
/**
* Mapa da aplicação
*/
private GoogleMap mMap;
/**
* Responsável por disponibilizar a localização do smartphone
*/
private GoogleApiClient mGoogleApiClient;
/**
* Guarda a ultima posição do smartphone.
*/
private Location mLastLocation;
private TextInputEditText editMeuLocal;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_passageiro);
SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
iniciaComponentes();
placesApi();
btnChamarMoto = (TextView) findViewById(R.id.btnChamarMoto);
if (mGoogleApiClient == null) {
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addConnectionCallbacks(this) // Interface ConnectionCallbacks
.addOnConnectionFailedListener(this) //Interface OnConnectionFailedListener
.addApi(LocationServices.API) // Vamos a API do LocationServices
.build();
}
}
public void placePiker(View view) {
PlacePicker.IntentBuilder builder = new PlacePicker.IntentBuilder();
try {
try {
startActivityForResult(builder.build(PassageiroActivity.this), PLACE_PICKER_REQUEST);
} catch (GooglePlayServicesNotAvailableException e) {
e.printStackTrace();
}
} catch (GooglePlayServicesRepairableException e) {
e.printStackTrace();
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == PLACE_PICKER_REQUEST) {
if (resultCode == RESULT_OK) {
Place place = (Place) PlacePicker.getPlace(PassageiroActivity.this, data);
btnChamarMoto.setText(place.getAddress());
}
}
}
protected void onStart() {
mGoogleApiClient.connect();
super.onStart();
}
protected void onStop() {
mGoogleApiClient.disconnect();
super.onStop();
}
#Override
public void onMapReady(GoogleMap googleMap) {
mMap = googleMap;
}
#Override
public void onConnected(Bundle bundle) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
return;
}
mLastLocation = LocationServices.FusedLocationApi.getLastLocation(mGoogleApiClient);
if (mLastLocation != null) {
if (mMap != null) {
// Criamos o LatLng através do Location
final LatLng latLng = new LatLng(mLastLocation.getLatitude(), mLastLocation.getLongitude());
// Adicionamos um Marker com a posição...
mMap.addMarker(new MarkerOptions().position(latLng).title("Minha Posição"));
// Um zoom no mapa para a seua posição atual...
mMap.animateCamera(CameraUpdateFactory.newLatLngZoom(latLng, 18));
}
}
}
#Override
public void onConnectionSuspended(int i) {
}
#Override
public void onConnectionFailed(ConnectionResult connectionResult) {
}
//Place Api
private void placesApi() {
Places.initialize(getApplicationContext(), "AIzaSyDxFTRAaJ-FecUs8SZj6MBYwwzD447Nces");
final PlacesClient placesClient = Places.createClient(this);
AutocompleteSupportFragment autocompleteFragment = (AutocompleteSupportFragment)
getSupportFragmentManager().findFragmentById(R.id.autocomplete_fragment);
autocompleteFragment.setPlaceFields(Arrays.asList(Place.Field.ID, Place.Field.NAME));
autocompleteFragment.setOnPlaceSelectedListener(new PlaceSelectionListener() {
#Override
public void onPlaceSelected(Place place) {
//
// Double latitude = place.getLatLng().latitude;
Log.i("Places", "Place: " + place.getName() + ", " + place.getId());
placeId = place.getId();
}
#Override
public void onError(Status status) {
// TODO: Handle the error.
Log.i("errorOccurred", "An error occurred: " + status);
}
});
}
private void meuLocal(PlacesClient placesClient) {
List < Place.Field > placeFields = Arrays.asList(Place.Field.NAME);
FindCurrentPlaceRequest request =
FindCurrentPlaceRequest.builder(placeFields).build();
if (ContextCompat.checkSelfPermission(this, ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) {
Task < FindCurrentPlaceResponse > placeResponse = placesClient.findCurrentPlace(request);
placeResponse.addOnCompleteListener(task - > {
if (task.isSuccessful()) {
FindCurrentPlaceResponse response = task.getResult();
for (PlaceLikelihood placeLikelihood: response.getPlaceLikelihoods()) {
Log.i("likelihood", String.format("Place '%s' has likelihood: %f",
placeLikelihood.getPlace().getName(),
placeLikelihood.getLikelihood()));
LatLng nome = placeLikelihood.getPlace().getLatLng();
double latitude = nome.latitude;
double longitude = nome.longitude;
Toast.makeText(this, "latitude: " + latitude + "longitude: " + longitude, Toast.LENGTH_SHORT).show();
}
} else {
Exception exception = task.getException();
if (exception instanceof ApiException) {
ApiException apiException = (ApiException) exception;
Log.e("notFound", "Place not found: " + apiException.getStatusCode());
}
}
});
} else {
}
}
private void verificaIdPlaces(PlacesClient placesClient) {
List < Place.Field > placeFields = Arrays.asList(Place.Field.ID, Place.Field.NAME);
FetchPlaceRequest request = FetchPlaceRequest.builder(placeId, placeFields).build();
String nome = request.getPlaceId();
placesClient.fetchPlace(request).addOnSuccessListener((response) - > {
Place place = response.getPlace();
Log.i("PlaceFOund", "Place found: " + place.getName() + "," + place.getLatLng());
}).addOnFailureListener((exception) - > {
if (exception instanceof ApiException) {
ApiException apiException = (ApiException) exception;
int statusCode = apiException.getStatusCode();
// Handle error with given status code.
Log.e("Place not found", "Place not found: " + exception.getMessage());
}
});
}
private void iniciaComponentes() {
btnChamarMoto = findViewById(R.id.btnChamarMoto);
}
Please check your api key
Note there is two api key , one for the debug and the other for the release , so make you sure you are using the right one for the right version

Mixing MVC and Web API Error Handling

We have an MVC 4 web application where we use the web.config file to handle custom errors.
<system.webServer>
<validation validateIntegratedModeConfiguration="false" />
<httpErrors errorMode="Custom" existingResponse="Replace">
<remove statusCode="403" />
<error statusCode="403" responseMode="ExecuteURL" path="/Error/AccessDenied" />
<remove statusCode="404" />
<error statusCode="404" responseMode="ExecuteURL" path="/Error/NotFound" />
<remove statusCode="500" />
<error statusCode="500" responseMode="ExecuteURL" path="/Error/ApplicationError" />
</httpErrors>
</system.webServer>
All ,of which works as expected.
We are now beginning to implement some new features in this project using AngularJS and Web API. In our Web API controller actions, we are consistently returning a HttpResponseMessage to indicate success/failure of the call. For example:
return Request.CreateResponse(HttpStatusCode.BadRequest, result);
The problem (I think!) we are having is that originally MVC error handling is intercepting the BadRequest result (as is reasonable) so that the HttpResponseMessage result data never gets returned to the calling AngularJS method.
What is the best way to handle errors in this mixed (MVC/Web API) environment so that the Web API HttpResponseMessages are not lost?
Thanks.
I am not sure I have found the best solution, but in the end I removed the httpErrors section from the Web.config and built my own error handler in the Global.asax assisted by the following posts:
StackOverflow
PrideParrot
Global.asax
public void Application_Error(Object sender, EventArgs e)
{
var httpContext = ((MvcApplication) sender).Context;
var currentController = "";
var currentAction = "";
var currentRouteData =
RouteTable.Routes.GetRouteData(new HttpContextWrapper(httpContext));
if (currentRouteData != null)
{
if (
!String.IsNullOrEmpty(
currentRouteData.Values["controller"]?.ToString()))
{
currentController = currentRouteData.Values["controller"].ToString();
}
if (!String.IsNullOrEmpty(currentRouteData.Values["action"]?.ToString()))
{
currentAction = currentRouteData.Values["action"].ToString();
}
}
var ex = Server.GetLastError();
var httpEx = ex as HttpException;
var controller = new ErrorController();
var routeData = new RouteData();
var statusCode = httpEx?.GetHttpCode() ?? 500;
string action;
switch (statusCode)
{
case 400:
action = "BadRequest";
break;
case 403:
action = "AccessDenied";
break;
case 404:
action = "NotFound";
break;
default:
action = "Index";
break;
}
httpContext.ClearError();
httpContext.Response.Clear();
httpContext.Response.StatusCode = statusCode;
httpContext.Response.TrySkipIisCustomErrors = true;
if (statusCode >= 500)
{
Server.Transfer("/Error/ServerError.html");
return;
}
routeData.Values["controller"] = "Error";
routeData.Values["action"] = action;
routeData.Values["statusCode"] = statusCode;
controller.ViewData.Model = new HandleErrorInfo(ex, currentController,
currentAction);
((IController) controller).Execute(
new RequestContext(new HttpContextWrapper(httpContext), routeData));
}
My error controller then looked like this:
[AllowAnonymous]
public sealed class ErrorController
: AblController
{
public ActionResult Index(int statusCode)
{
ViewBag.StatusCode = statusCode;
return View("Error");
}
// HTTP 400 - Bad Request
public ActionResult BadRequest()
{
// Now handled by Global.asax - Application_Error
// Response.StatusCode = (int) HttpStatusCode.BadRequest;
// Response.TrySkipIisCustomErrors = true;
if (Request.IsAjaxRequest())
{
return Json(
new
{
error = new ErrorSummary("Bad Request")
});
}
return View();
}
// HTTP 403 - Access Denied
public ActionResult AccessDenied()
{
// Now handled by Global.asax - Application_Error
// Response.StatusCode = (int) HttpStatusCode.Forbidden;
// Response.TrySkipIisCustomErrors = true;
if (Request.IsAjaxRequest())
{
return Json(
new
{
error = new ErrorSummary("Access Denied")
});
}
return View();
}
// HTTP 404 - Not Found
public ActionResult NotFound()
{
// Now handled by Global.asax - Application_Error
// Response.StatusCode = (int) HttpStatusCode.NotFound;
// Response.TrySkipIisCustomErrors = true;
if (Request.IsAjaxRequest())
{
return Json(
new
{
error = new ErrorSummary("Not Found")
});
}
return View();
}
}
}
I also turned the custom error mode off in the Web.config
<customErrors mode="Off" />
This solution needs more testing, but so far it seems to be performing as expected/as required.

Lucene Index: Missing documents

We have a pretty basic Lucene set up. We recently noticed that some documents aren't written to the index.
This is how we create the document:
private void addToDirectory(SpecialDomainObject specialDomainObject) throws IOException {
Document document = new Document();
document.add(new TextField("id", String.valueOf(specialDomainObject.getId()), Field.Store.YES));
document.add(new TextField("name", specialDomainObject.getName(), Field.Store.YES));
document.add(new TextField("tags", joinTags(specialDomainObject.getTags()), Field.Store.YES));
document.add(new TextField("contents", getContents(specialDomainObject), Field.Store.YES));
for (Language language : getAllAssociatedLanguages(specialDomainObject)) {
document.add(new IntField("languageId", language.getId(), Field.Store.YES));
}
specialDomainObjectIndexWriter.updateDocument(new Term("id", document.getField("id").stringValue()), document);
specialDomainObjectIndexWriter.commit();
}
This is how we create the analyzer and the index writer:
<bean id="luceneVersion" class="org.apache.lucene.util.Version" factory-method="valueOf">
<constructor-arg value="LUCENE_46"/>
</bean>
<bean id="analyzer" class="org.apache.lucene.analysis.standard.StandardAnalyzer">
<constructor-arg ref="luceneVersion"/>
</bean>
<bean id="specialDomainObjectIndexWriter" class="org.apache.lucene.index.IndexWriter">
<constructor-arg ref="specialDomainObjectDirectory" />
<constructor-arg>
<bean class="org.apache.lucene.index.IndexWriterConfig">
<constructor-arg ref="luceneVersion"/>
<constructor-arg ref="analyzer" />
<property name="openMode" value="CREATE_OR_APPEND"/>
</bean>
</constructor-arg>
</bean>
Indexing is done with a scheduled task:
#Component
public class ScheduledSpecialDomainObjectIndexCreationTask implements ScheduledIndexCreationTask {
private static final Logger logger = LoggerFactory.getLogger(ScheduledSpecialDomainObjectIndexCreationTask.class);
#Autowired
private IndexOperator specialDomainObjectIndexOperator;
#Scheduled(fixedDelay = 3600 * 1000)
#Override
public void createIndex() {
Date indexCreationStartDate = new Date();
try {
logger.info("Updating complete special domain object index...");
specialDomainObjectIndexOperator.createIndex();
if (logger.isDebugEnabled()) {
Date indexCreationEndDate = new Date();
logger.debug("Index creation duration: {} ms", indexCreationEndDate.getTime() - indexCreationStartDate.getTime());
}
} catch (IOException e) {
logger.error("Could update complete special domain object index.", e);
}
}
}
createIndex() is implemented as follows:
#Override
public void createIndex() throws IOException {
logger.trace("Preparing for index generation...");
IndexWriter indexWriter = getIndexWriter();
Date start = new Date();
logger.trace("Deleting all documents from index...");
indexWriter.deleteAll();
logger.trace("Starting index generation...");
long numberOfProcessedObjects = fillIndex();
logger.debug("Index written in " + (new Date().getTime() - start.getTime()) + " milliseconds.");
logger.debug("Number of processed objects: {}", numberOfProcessedObjects);
logger.debug("Number of documents in index: {}", indexWriter.numDocs());
indexWriter.commit();
indexWriter.forceMerge(1);
}
#Override
protected long fillIndex() throws IOException {
Page<SpecialDomainObject> specialDomainObjectsPage = specialDomainObjectRepository.findAll(new PageRequest(0, MAXIMUM_PAGE_ELEMENTS));
while (true) {
addToDirectory(specialDomainObjectsPage);
if (specialDomainObjectsPage.hasNextPage()) {
specialDomainObjectsPage =
specialDomainObjectRepository.findAll(new PageRequest(specialDomainObjectsPage.getNumber() + 1, specialDomainObjectsPage.getSize()));
} else {
break;
}
}
return specialDomainObjectsPage.getTotalElements();
}
There are about 2000 specialDomainObject instances and about 80 aren't written to the index (we checked this with Luke).
Is there anything that could cause the missing documents?
We found the problem: The default encoding of the operating system was not set to UTF-8.

Using MiniProfiler's database profiling with NHibernate

What's the simplest way to use MiniProfiler's database profiling with NHibernate? In order for the profiler to work, I need to wrap the DbConnection that NHibernate uses in a ProfiledDbConnection.
I'm not too familiar with the internals of NHibernate, so I don't know where all the extensibility points are. (I noticed that an NHibernate ISession has a Connection property, but it is read-only.)
[UPDATE] Please see the following links for a version of that uses RealProxy to proxy the SqlCommand - batching is now supported
blog http://blog.fearofaflatplanet.me.uk/mvcminiprofiler-and-nhibernate-take-2
gist https://gist.github.com/1110153
I've left the original answer unaltered as it was accepted. [/UPDATE]
I've managed to partially get this to work by implementing a Profiled Client Driver (example for Sql Server 2008 below) - this works for simple examples, however I haven't yet found a solution for NH batching (which attempts to cast the command back to SqlCommand)
public class ProfiledSql2008ClientDriver : Sql2008ClientDriver
{
public override IDbCommand CreateCommand()
{
return new ProfiledDbCommand(
base.CreateCommand() as DbCommand,
null,
MiniProfiler.Current);
}
public override IDbConnection CreateConnection()
{
return ProfiledDbConnection.Get(
base.CreateConnection() as DbConnection,
MiniProfiler.Current);
}
}
I extended Roberts answer above to work with NHibernate batching. There is a lot of code here so it can possibly be shortened, some of it based on the nHibernate source for the client driver.
<property name="connection.driver_class">YoureOnTime.Data.ProfiledSqlClientDriver, YoureOnTime.Common</property>
public class ProfiledSqlClientDriver : DriverBase, IEmbeddedBatcherFactoryProvider
{
public override IDbConnection CreateConnection()
{
return new ProfiledSqlDbConnection(
new SqlConnection(),
MiniProfiler.Current);
}
public override IDbCommand CreateCommand()
{
return new ProfiledSqlDbCommand(
new SqlCommand(),
null,
MiniProfiler.Current);
}
public override bool UseNamedPrefixInSql
{
get { return true; }
}
public override bool UseNamedPrefixInParameter
{
get { return true; }
}
public override string NamedPrefix
{
get { return "#"; }
}
public override bool SupportsMultipleOpenReaders
{
get { return false; }
}
public static void SetParameterSizes(IDataParameterCollection parameters, SqlType[] parameterTypes)
{
for (int i = 0; i < parameters.Count; i++)
{
SetVariableLengthParameterSize((IDbDataParameter)parameters[i], parameterTypes[i]);
}
}
private const int MaxAnsiStringSize = 8000;
private const int MaxBinarySize = MaxAnsiStringSize;
private const int MaxStringSize = MaxAnsiStringSize / 2;
private const int MaxBinaryBlobSize = int.MaxValue;
private const int MaxStringClobSize = MaxBinaryBlobSize / 2;
private const byte MaxPrecision = 28;
private const byte MaxScale = 5;
private const byte MaxDateTime2 = 8;
private const byte MaxDateTimeOffset = 10;
private static void SetDefaultParameterSize(IDbDataParameter dbParam, SqlType sqlType)
{
switch (dbParam.DbType)
{
case DbType.AnsiString:
case DbType.AnsiStringFixedLength:
dbParam.Size = MaxAnsiStringSize;
break;
case DbType.Binary:
if (sqlType is BinaryBlobSqlType)
{
dbParam.Size = MaxBinaryBlobSize;
}
else
{
dbParam.Size = MaxBinarySize;
}
break;
case DbType.Decimal:
dbParam.Precision = MaxPrecision;
dbParam.Scale = MaxScale;
break;
case DbType.String:
case DbType.StringFixedLength:
dbParam.Size = IsText(dbParam, sqlType) ? MaxStringClobSize : MaxStringSize;
break;
case DbType.DateTime2:
dbParam.Size = MaxDateTime2;
break;
case DbType.DateTimeOffset:
dbParam.Size = MaxDateTimeOffset;
break;
}
}
private static bool IsText(IDbDataParameter dbParam, SqlType sqlType)
{
return (sqlType is StringClobSqlType) || (sqlType.LengthDefined && sqlType.Length > MsSql2000Dialect.MaxSizeForLengthLimitedStrings &&
(DbType.String == dbParam.DbType || DbType.StringFixedLength == dbParam.DbType));
}
private static void SetVariableLengthParameterSize(IDbDataParameter dbParam, SqlType sqlType)
{
SetDefaultParameterSize(dbParam, sqlType);
// Override the defaults using data from SqlType.
if (sqlType.LengthDefined && !IsText(dbParam, sqlType))
{
dbParam.Size = sqlType.Length;
}
if (sqlType.PrecisionDefined)
{
dbParam.Precision = sqlType.Precision;
dbParam.Scale = sqlType.Scale;
}
}
public override IDbCommand GenerateCommand(CommandType type, SqlString sqlString, SqlType[] parameterTypes)
{
IDbCommand command = base.GenerateCommand(type, sqlString, parameterTypes);
//if (IsPrepareSqlEnabled)
{
SetParameterSizes(command.Parameters, parameterTypes);
}
return command;
}
public override bool SupportsMultipleQueries
{
get { return true; }
}
#region IEmbeddedBatcherFactoryProvider Members
System.Type IEmbeddedBatcherFactoryProvider.BatcherFactoryClass
{
get { return typeof(ProfiledSqlClientBatchingBatcherFactory); }
}
#endregion
}
public class ProfiledSqlClientBatchingBatcher : AbstractBatcher
{
private int batchSize;
private int totalExpectedRowsAffected;
private SqlClientSqlCommandSet currentBatch;
private StringBuilder currentBatchCommandsLog;
private readonly int defaultTimeout;
public ProfiledSqlClientBatchingBatcher(ConnectionManager connectionManager, IInterceptor interceptor)
: base(connectionManager, interceptor)
{
batchSize = Factory.Settings.AdoBatchSize;
defaultTimeout = PropertiesHelper.GetInt32(NHibernate.Cfg.Environment.CommandTimeout, NHibernate.Cfg.Environment.Properties, -1);
currentBatch = CreateConfiguredBatch();
//we always create this, because we need to deal with a scenario in which
//the user change the logging configuration at runtime. Trying to put this
//behind an if(log.IsDebugEnabled) will cause a null reference exception
//at that point.
currentBatchCommandsLog = new StringBuilder().AppendLine("Batch commands:");
}
public override int BatchSize
{
get { return batchSize; }
set { batchSize = value; }
}
protected override int CountOfStatementsInCurrentBatch
{
get { return currentBatch.CountOfCommands; }
}
public override void AddToBatch(IExpectation expectation)
{
totalExpectedRowsAffected += expectation.ExpectedRowCount;
IDbCommand batchUpdate = CurrentCommand;
string lineWithParameters = null;
var sqlStatementLogger = Factory.Settings.SqlStatementLogger;
if (sqlStatementLogger.IsDebugEnabled || log.IsDebugEnabled)
{
lineWithParameters = sqlStatementLogger.GetCommandLineWithParameters(batchUpdate);
var formatStyle = sqlStatementLogger.DetermineActualStyle(FormatStyle.Basic);
lineWithParameters = formatStyle.Formatter.Format(lineWithParameters);
currentBatchCommandsLog.Append("command ")
.Append(currentBatch.CountOfCommands)
.Append(":")
.AppendLine(lineWithParameters);
}
if (log.IsDebugEnabled)
{
log.Debug("Adding to batch:" + lineWithParameters);
}
currentBatch.Append(((ProfiledSqlDbCommand)batchUpdate).Command);
if (currentBatch.CountOfCommands >= batchSize)
{
ExecuteBatchWithTiming(batchUpdate);
}
}
protected void ProfiledPrepare(IDbCommand cmd)
{
try
{
IDbConnection sessionConnection = ConnectionManager.GetConnection();
if (cmd.Connection != null)
{
// make sure the commands connection is the same as the Sessions connection
// these can be different when the session is disconnected and then reconnected
if (cmd.Connection != sessionConnection)
{
cmd.Connection = sessionConnection;
}
}
else
{
cmd.Connection = (sessionConnection as ProfiledSqlDbConnection).Connection;
}
ProfiledSqlDbTransaction trans = (ProfiledSqlDbTransaction)typeof(NHibernate.Transaction.AdoTransaction).InvokeMember("trans", System.Reflection.BindingFlags.GetField | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance, null, ConnectionManager.Transaction, null);
if (trans != null)
cmd.Transaction = trans.Transaction;
Factory.ConnectionProvider.Driver.PrepareCommand(cmd);
}
catch (InvalidOperationException ioe)
{
throw new ADOException("While preparing " + cmd.CommandText + " an error occurred", ioe);
}
}
protected override void DoExecuteBatch(IDbCommand ps)
{
log.DebugFormat("Executing batch");
CheckReaders();
ProfiledPrepare(currentBatch.BatchCommand);
if (Factory.Settings.SqlStatementLogger.IsDebugEnabled)
{
Factory.Settings.SqlStatementLogger.LogBatchCommand(currentBatchCommandsLog.ToString());
currentBatchCommandsLog = new StringBuilder().AppendLine("Batch commands:");
}
int rowsAffected;
try
{
rowsAffected = currentBatch.ExecuteNonQuery();
}
catch (DbException e)
{
throw ADOExceptionHelper.Convert(Factory.SQLExceptionConverter, e, "could not execute batch command.");
}
Expectations.VerifyOutcomeBatched(totalExpectedRowsAffected, rowsAffected);
currentBatch.Dispose();
totalExpectedRowsAffected = 0;
currentBatch = CreateConfiguredBatch();
}
private SqlClientSqlCommandSet CreateConfiguredBatch()
{
var result = new SqlClientSqlCommandSet();
if (defaultTimeout > 0)
{
try
{
result.CommandTimeout = defaultTimeout;
}
catch (Exception e)
{
if (log.IsWarnEnabled)
{
log.Warn(e.ToString());
}
}
}
return result;
}
}
public class ProfiledSqlClientBatchingBatcherFactory : IBatcherFactory
{
public virtual IBatcher CreateBatcher(ConnectionManager connectionManager, IInterceptor interceptor)
{
return new ProfiledSqlClientBatchingBatcher(connectionManager, interceptor);
}
}
public class ProfiledSqlDbCommand : ProfiledDbCommand
{
public ProfiledSqlDbCommand(SqlCommand cmd, SqlConnection conn, MiniProfiler profiler)
: base(cmd, conn, profiler)
{
Command = cmd;
}
public SqlCommand Command { get; set; }
private DbTransaction _trans;
protected override DbTransaction DbTransaction
{
get { return _trans; }
set
{
this._trans = value;
ProfiledSqlDbTransaction awesomeTran = value as ProfiledSqlDbTransaction;
Command.Transaction = awesomeTran == null ? (SqlTransaction)value : awesomeTran.Transaction;
}
}
}
public class ProfiledSqlDbConnection : ProfiledDbConnection
{
public ProfiledSqlDbConnection(SqlConnection connection, MiniProfiler profiler)
: base(connection, profiler)
{
Connection = connection;
}
public SqlConnection Connection { get; set; }
protected override DbTransaction BeginDbTransaction(System.Data.IsolationLevel isolationLevel)
{
return new ProfiledSqlDbTransaction(Connection.BeginTransaction(isolationLevel), this);
}
}
public class ProfiledSqlDbTransaction : ProfiledDbTransaction
{
public ProfiledSqlDbTransaction(SqlTransaction transaction, ProfiledDbConnection connection)
: base(transaction, connection)
{
Transaction = transaction;
}
public SqlTransaction Transaction { get; set; }
}
Try implementing NHibernate.Connection.IConnectionProvider (you could just inherit DriverConnectionProvider), in GetConnection() wrap the IDbConnection as you need.
Plug your connection provider using the Environment.ConnectionProvider key in your config properties.
If anyone is interested I have done an integration using a custom Log4net appender instead. This way I feel safe that I don't mess with the Connection object.
The rough outline is something along these lines: NHibernate emits the sqlstrings as debug statements and the appender configured in log4net.xml calls Start and Dispose on the MiniProfiler.