Ignite cache expiry for multiple cahce - ignite

I have 10 ignite cache but i want to set expiry policy for only 4 caches. I have googled for example but with no luck. I have prepared an code for but need expert's opinion. The XML is as below,
<property name="cacheConfiguration">
<list>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name">
<list>
<value>CACHE_L4_TRIGGER_NOTIFICATION</value>
<value>CACHE_L2_COTH</value>
<value>CACHE_L2_CVOC</value>
<value>CACHE_L3_MSC</value>
</list>
</property>
<property name="expiryPolicyFactory">
<bean class="javax.cache.expiry.CreatedExpiryPolicy" factory-method="factoryOf">
<constructor-arg>
<bean class="javax.cache.expiry.Duration">
<constructor-arg value="DAYS"/>
<constructor-arg value="3"/>
</bean>
</constructor-arg>
</bean>
</property>
</bean>
</list>
</property>

Rather than have a list of cache names, you define a list of CacheConfigurations:
<property name="cacheConfiguration">
<list>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="CACHE_L4_TRIGGER_NOTIFICATION"/>
<property name="expiryPolicyFactory">
<bean class="javax.cache.expiry.CreatedExpiryPolicy" factory-method="factoryOf">
<constructor-arg>
<bean class="javax.cache.expiry.Duration">
<constructor-arg value="DAYS"/>
<constructor-arg value="3"/>
</bean>
</constructor-arg>
</bean>
</property>
</bean>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="CACHE_L2_COTH"/>
<property name="expiryPolicyFactory">
<bean class="javax.cache.expiry.CreatedExpiryPolicy" factory-method="factoryOf">
<constructor-arg>
<bean class="javax.cache.expiry.Duration">
<constructor-arg value="DAYS"/>
<constructor-arg value="1"/>
</bean>
</constructor-arg>
</bean>
</property>
</bean>
...

Related

Ignite backups=1 works incorrectly

I am using pure in-memory ignite on k8s. There are 3 instances with backups=1 as the config below. I tried to restart one instance to see if it will be re-fill after restarting but the size of all other instances was reduced instead of re-filling the restarted one.
Size of Ignite cache
my config
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="groupName" value="bi_user_mask"/>
<property name="name" value="dmp_user_mask"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="partitionLossPolicy" value="IGNORE"/>
<property name="backups" value="1"/>
<property name="onheapCacheEnabled" value="true"/>
<property name="evictionPolicyFactory">
<bean class="org.apache.ignite.cache.eviction.sorted.SortedEvictionPolicyFactory">
<property name="maxMemorySize" value="#{512L * 1024 * 1024}"/>
<property name="batchSize" value="1024"/>
<property name="maxSize" value="#{100L * 1024 * 1024}"/>
<property name="comp" ref="sorted_value_eviction"/>
</bean>
</property>
<property name="writeSynchronizationMode" value="FULL_SYNC"/>
<property name="eagerTtl" value="false"/>
<property name="expiryPolicyFactory">
<bean class="javax.cache.expiry.TouchedExpiryPolicy" factory-method="factoryOf">
<constructor-arg>
<bean class="javax.cache.expiry.Duration">
<constructor-arg value="DAYS"/>
<constructor-arg value="10"/>
</bean>
</constructor-arg>
</bean>
</property>
<property name="statisticsEnabled" value="true"/>
</bean>
Is there anyone have an idea to fix it?
Thank you

How to add a new node into Ignite cluster?

We’ve an ignite instance(Gridgain) running in the server and would like to add one more node into the same cluster. I used the same config as the current instance but nothing happening after a long wait.I tested the connection with the server by creating a thick client and works well, So I am not sure what is going on?. Please see the screen below
Running server config
<bean id="grid.cfg" class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="igniteInstanceName" value="rho-cache"/>
<property name="consistentId" value="rho-cache"/>
<property name="dataStorageConfiguration">
<bean class="org.apache.ignite.configuration.DataStorageConfiguration">
<!-- Set the size of wal segments to 128MB -->
<property name="walSegmentSize" value="#{128 * 1024 * 1024}"/>
<property name="defaultDataRegionConfiguration">
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<!-- Enable perisistence -->
<property name="persistenceEnabled" value="true"/>
</bean>
</property>
<property name="storagePath" value="work"/>
</bean>
</property>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder">
<property name="addresses">
<list>
<value>10.1.8.186</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
</bean>
New Node Config
<bean id="grid.cfg" class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="igniteInstanceName" value="rho-1-cache"/>
<property name="consistentId" value="rho-1-cache"/>
<property name="dataStorageConfiguration">
<bean class="org.apache.ignite.configuration.DataStorageConfiguration">
<!-- Set the size of wal segments to 128MB -->
<property name="walSegmentSize" value="#{128 * 1024 * 1024}"/>
<property name="defaultDataRegionConfiguration">
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<!-- Enable perisistence -->
<property name="persistenceEnabled" value="true"/>
</bean>
</property>
<property name="storagePath" value="work"/>
</bean>
</property>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder">
<property name="addresses">
<list>
<value>10.1.8.186</value>
<value>195.168.44.88</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
</bean>
Screen when starting the second node

Apache Ignite Join Query Returns Incorrect Results Due to Data not Being Co-located

I'm new to Ignite and following various blogs to understand the configuration required.
As a starter, I have used "movie lens" data (Movies & Ratings) and used Ignite Web Console to generate the required configuration.
Please find the required details about my code below:
1) MySQL Table Structure
CREATE TABLE movies( movie_id INTEGER,
movie_name VARCHAR(250), genre VARCHAR(250),
CONSTRAINT pk_movie PRIMARY KEY (movie_id) );
CREATE TABLE ratings(
user_id INTEGER, movie_id INTEGER, rating FLOAT, timestamp TIMESTAMP,
CONSTRAINT pk_rating PRIMARY KEY (user_id, movie_id) );
2) Ignite Config File in Spring Boot Project (Auto Generated by Ignite Web Console)
<?xml version="1.0" encoding="UTF-8"?>
<!-- This file was generated by Ignite Web Console (05/15/2019, 18:37) -->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:util="http://www.springframework.org/schema/util"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/util
http://www.springframework.org/schema/util/spring-util.xsd">
<!-- Load external properties file. -->
<bean id="placeholderConfig" class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="location" value="classpath:secret.properties"/>
</bean>
<!-- Data source beans will be initialized from external properties file. -->
<bean id="dsGeneric_Movielens" class="com.mchange.v2.c3p0.ComboPooledDataSource">
<property name="jdbcUrl" value="${dsGeneric_Movielens.jdbc.url}"/>
<property name="user" value="${dsGeneric_Movielens.jdbc.username}"/>
<property name="password" value="${dsGeneric_Movielens.jdbc.password}"/>
</bean>
<bean class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="igniteInstanceName" value="ImportedCluster"/>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.multicast.TcpDiscoveryMulticastIpFinder">
<property name="addresses">
<list>
<value>127.0.0.1:47500..47510</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
<property name="cacheConfiguration">
<list>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="MoviesCache"/>
<property name="cacheMode" value="REPLICATED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="cacheStoreFactory">
<bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory">
<property name="dataSourceBean" value="dsGeneric_Movielens"/>
<property name="dialect">
<bean class="org.apache.ignite.cache.store.jdbc.dialect.BasicJdbcDialect">
</bean>
</property>
<property name="types">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcType">
<property name="cacheName" value="MoviesCache"/>
<property name="keyType" value="java.lang.Integer"/>
<property name="valueType" value="com.learnwithmanoj.ignite.model.Movies"/>
<property name="databaseSchema" value="movielens"/>
<property name="databaseTable" value="movies"/>
<property name="keyFields">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.INTEGER"/>
</constructor-arg>
<constructor-arg value="movie_id"/>
<constructor-arg value="int"/>
<constructor-arg value="movieId"/>
</bean>
</list>
</property>
<property name="valueFields">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.VARCHAR"/>
</constructor-arg>
<constructor-arg value="movie_name"/>
<constructor-arg value="java.lang.String"/>
<constructor-arg value="movieName"/>
</bean>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.VARCHAR"/>
</constructor-arg>
<constructor-arg value="genre"/>
<constructor-arg value="java.lang.String"/>
<constructor-arg value="genre"/>
</bean>
</list>
</property>
</bean>
</list>
</property>
</bean>
</property>
<property name="readThrough" value="true"/>
<property name="writeThrough" value="true"/>
<property name="queryEntities">
<list>
<bean class="org.apache.ignite.cache.QueryEntity">
<property name="keyType" value="java.lang.Integer"/>
<property name="valueType" value="com.learnwithmanoj.ignite.model.Movies"/>
<property name="keyFieldName" value="movieId"/>
<property name="keyFields">
<list>
<value>movieId</value>
</list>
</property>
<property name="fields">
<map>
<entry key="movieName" value="java.lang.String"/>
<entry key="genre" value="java.lang.String"/>
<entry key="movieId" value="java.lang.Integer"/>
</map>
</property>
<property name="aliases">
<map>
<entry key="movieId" value="movie_id"/>
<entry key="movieName" value="movie_name"/>
</map>
</property>
</bean>
</list>
</property>
</bean>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="RatingsCache"/>
<property name="cacheMode" value="REPLICATED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="cacheStoreFactory">
<bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory">
<property name="dataSourceBean" value="dsGeneric_Movielens"/>
<property name="dialect">
<bean class="org.apache.ignite.cache.store.jdbc.dialect.BasicJdbcDialect">
</bean>
</property>
<property name="types">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcType">
<property name="cacheName" value="RatingsCache"/>
<property name="keyType" value="com.learnwithmanoj.ignite.model.RatingsKey"/>
<property name="valueType" value="com.learnwithmanoj.ignite.model.Ratings"/>
<property name="databaseSchema" value="movielens"/>
<property name="databaseTable" value="ratings"/>
<property name="keyFields">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.INTEGER"/>
</constructor-arg>
<constructor-arg value="user_id"/>
<constructor-arg value="int"/>
<constructor-arg value="userId"/>
</bean>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.INTEGER"/>
</constructor-arg>
<constructor-arg value="movie_id"/>
<constructor-arg value="int"/>
<constructor-arg value="movieId"/>
</bean>
</list>
</property>
<property name="valueFields">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.REAL"/>
</constructor-arg>
<constructor-arg value="rating"/>
<constructor-arg value="java.lang.Double"/>
<constructor-arg value="rating"/>
</bean>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.TIMESTAMP"/>
</constructor-arg>
<constructor-arg value="timestamp"/>
<constructor-arg value="java.sql.Timestamp"/>
<constructor-arg value="timestamp"/>
</bean>
</list>
</property>
</bean>
</list>
</property>
</bean>
</property>
<property name="readThrough" value="true"/>
<property name="writeThrough" value="true"/>
<property name="queryEntities">
<list>
<bean class="org.apache.ignite.cache.QueryEntity">
<property name="keyType" value="com.learnwithmanoj.ignite.model.RatingsKey"/>
<property name="valueType" value="com.learnwithmanoj.ignite.model.Ratings"/>
<property name="keyFields">
<list>
<value>userId</value>
<value>movieId</value>
</list>
</property>
<property name="fields">
<map>
<entry key="userId" value="java.lang.Integer"/>
<entry key="movieId" value="java.lang.Integer"/>
<entry key="rating" value="java.lang.Double"/>
<entry key="timestamp" value="java.sql.Timestamp"/>
</map>
</property>
<property name="aliases">
<map>
<entry key="userId" value="user_id"/>
<entry key="movieId" value="movie_id"/>
</map>
</property>
<property name="indexes">
<list>
<bean class="org.apache.ignite.cache.QueryIndex">
<property name="name" value="fk_movie_id"/>
<property name="indexType" value="SORTED"/>
<property name="fields">
<map>
<entry key="movieId" value="false"/>
</map>
</property>
</bean>
</list>
</property>
</bean>
</list>
</property>
</bean>
</list>
</property>
</bean>
</beans>
3) Ignite Config File in Ignite Installation Folder
<?xml version="1.0" encoding="UTF-8"?>
<!-- This file was generated by Ignite Web Console (05/15/2019, 08:52) -->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:util="http://www.springframework.org/schema/util"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/util
http://www.springframework.org/schema/util/spring-util.xsd">
<!-- Load external properties file. -->
<bean id="placeholderConfig" class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="location" value="classpath:secret.properties"/>
</bean>
<!-- Data source beans will be initialized from external properties file. -->
<bean id="datasource" class="com.mchange.v2.c3p0.ComboPooledDataSource">
<property name="jdbcUrl" value="${datasource.jdbc.url}"/>
<property name="user" value="${datasource.jdbc.username}"/>
<property name="password" value="${datasource.jdbc.password}"/>
</bean>
<bean class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="igniteInstanceName" value="MovieRatingsCluster"/>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.multicast.TcpDiscoveryMulticastIpFinder">
<property name="addresses">
<list>
<value>127.0.0.1:47500..47510</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</beans>
Problem Statement:
When I start my Spring Boot Application as a stand-alone Ignite node, then all the data from the MySQL database is getting synced into Ignite node as expected. Also, if I try to join between the Ratings and Movies table, it's joining properly and giving the correct result.
Join Query Used:
select r.movie_id, m.movie_name, count(r.rating) as count from
"RatingsCache".ratings r inner join "MoviesCache".movies m where
r.movie_id = m.movie_id group by r.movie_id order by count desc limit
5;
Now coming to the problem. If I do the same testing using more than one node, then the Join starts to give incorrect results due to the data not being colocated.
Can anyone guide me with the right set of configuration which needs to be added to fix the Join problem in Partition Mode?
I have even tried to add the below config.
<property name="cacheKeyConfiguration">
<list>
<bean class="org.apache.ignite.cache.CacheKeyConfiguration">
<property name="typeName" value="com.learnwithmanoj.ignite.model.RatingsKey" />
<property name="affinityKeyFieldName" value="movie_id" />
</bean>
</list>
</property>
But got the below error.
Caused by: org.apache.ignite.binary.BinaryObjectException: Binary type
has different affinity key fields
[typeName=com.learnwithmanoj.ignite.model.RatingsKey,
affKeyFieldName1=null, affKeyFieldName2=movie_id]
Your solution should work. First you need to clean marshaller/ directory under Ignite home dir on all nodes, then restart those nodes.
You might also need to specify key field name in caps: MOVIE_ID.

Ignite DataStreamer not loading data

I'm using ignite version 2.0.0
I am trying to load about 40M data from db to my ignite instances(2 in this case). I read about the Ignite DataStreamers from https://apacheignite.readme.io/v1.2/docs/data-streamers
Because of the very low data loading speed of loadCache() i changed loadCache() with IgniteDataStreamer.addData().
Upon execution, i notice in the web console that the metrics for on-heap entries gets incrementing(i.e., it shows that data is being loaded). But when i query the ignite cache, i get the result as empty.
Also i notice that the server logs are showcasing this exception:
[01:46:19,541][ERROR][flusher-0-#66%RemoteIgniteCluster%][GridCacheWriteBehindStore] Unable to update underlying store: CacheJdbcPojoStore []
javax.cache.CacheException: Failed to read property value from non binary object [class=class java.lang.Integer, property=class_no]
at org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStore.extractBinaryParameter(CacheJdbcPojoStore.java:122)
at org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStore.extractParameter(CacheJdbcPojoStore.java:69)
at org.apache.ignite.cache.store.jdbc.CacheAbstractJdbcStore.fillValueParameters(CacheAbstractJdbcStore.java:1414)
at org.apache.ignite.cache.store.jdbc.CacheAbstractJdbcStore.writeAll(CacheAbstractJdbcStore.java:1081)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore.updateStore(GridCacheWriteBehindStore.java:804)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore.applyBatch(GridCacheWriteBehindStore.java:720)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore.access$2400(GridCacheWriteBehindStore.java:75)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore$Flusher.flushCacheCoalescing(GridCacheWriteBehindStore.java:1108)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1006)
at org.apache.ignite.internal.util.worker.GridWorker.run(GridWorker.java:110)
at java.lang.Thread.run(Thread.java:745)
[01:50:00,945][ERROR][flusher-0-#66%RemoteIgniteCluster%][GridCacheWriteBehindStore] Unable to update underlying store: CacheJdbcPojoStore []
javax.cache.integration.CacheWriterException: Failed to write entries in database
at org.apache.ignite.cache.store.jdbc.CacheAbstractJdbcStore.writeAll(CacheAbstractJdbcStore.java:1151)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore.updateStore(GridCacheWriteBehindStore.java:804)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore.applyBatch(GridCacheWriteBehindStore.java:720)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore.access$2400(GridCacheWriteBehindStore.java:75)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore$Flusher.flushCacheCoalescing(GridCacheWriteBehindStore.java:1108)
at org.apache.ignite.internal.processors.cache.store.GridCacheWriteBehindStore$Flusher.body(GridCacheWriteBehindStore.java:1006)
at org.apache.ignite.internal.util.worker.GridWorker.run(GridWorker.java:110)
at java.lang.Thread.run(Thread.java:745)
My LoadCache.java code is:
package load;
import javax.naming.event.ObjectChangeListener;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.Ignition;
public class LoadCaches {
public static void main(String[] args) throws Exception {
try (Ignite ignite = Ignition.start("RemoteIgniteCluster-client.xml");IgniteDataStreamer<Integer, Object> stmr = ignite.dataStreamer("PersonsCache")) {
System.out.println(">>> Loading caches...");
stmr.allowOverwrite(true);
stmr.autoFlushFrequency(1000);
System.out.println(">>> Loading cache: PersonsCache");
for (int i = 0; i < 5000000; i++)
stmr.addData(i, new Integer(i));
/*ignite.cache("PersonsCache").loadCache(null);*/
System.out.println(">>> All caches loaded!");
}
}
}
I tried changing
IgniteDataStreamer<Integer, Object> stmr = ignite.dataStreamer("PersonsCache"))
and
stmr.addData(i, new Integer(i));
with
IgniteDataStreamer<Integer, String> stmr = ignite.dataStreamer("PersonsCache"))
and
stmr.addData(i, Integer.toString(i));
and yet i get the same exception in the server logs, and when i try to query the cache i get empty results.
My spring xmls look like this:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:util="http://www.springframework.org/schema/util"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/util
http://www.springframework.org/schema/util/spring-util.xsd">
<!-- Load external properties file. -->
<bean id="placeholderConfig" class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="location" value="classpath:secret.properties"/>
</bean>
<!-- Data source beans will be initialized from external properties file. -->
<bean id="dsMySQL_DB" class="com.mysql.jdbc.jdbc2.optional.MysqlDataSource">
<property name="URL" value="${dsMySQL_DB.jdbc.url}"/>
<property name="user" value="${dsMySQL_DB.jdbc.username}"/>
<property name="password" value="${dsMySQL_DB.jdbc.password}"/>
</bean>
<bean class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="igniteInstanceName" value="testcluster"/>
<property name="peerClassLoadingEnabled" value="false" />
<property name="includeEventTypes">
<list>
<util:constant static-field="org.apache.ignite.events.EventType.EVT_TASK_STARTED" />
<util:constant
static-field="org.apache.ignite.events.EventType.EVT_TASK_FINISHED" />
<util:constant static-field="org.apache.ignite.events.EventType.EVT_TASK_FAILED" />
</list>
</property>
<!-- Configure internal thread pool. -->
<property name="publicThreadPoolSize" value="64" />
<!-- Configure system thread pool. -->
<property name="systemThreadPoolSize" value="32" />
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.multicast.TcpDiscoveryMulticastIpFinder">
<!-- <bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder"> -->
<property name="addresses">
<list>
<value>127.0.0.1:47500..47510</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
<property name="atomicConfiguration">
<bean class="org.apache.ignite.configuration.AtomicConfiguration">
<property name="backups" value="0"/>
</bean>
</property>
<property name="cacheKeyConfiguration">
<list>
<bean class="org.apache.ignite.cache.CacheKeyConfiguration">
<constructor-arg value="com.gmail.testcluster.model.Persons"/>
<constructor-arg value="age"/>
</bean>
</list>
</property>
<property name="cacheConfiguration">
<list>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<!-- Set rebalance batch size to 1 MB. -->
<property name="rebalanceBatchSize" value="#{1024 * 1024}" />
<!-- Explicitly disable rebalance throttling. -->
<property name="rebalanceThrottle" value="0" />
<!-- Set 4 threads for rebalancing. -->
<property name="rebalanceThreadPoolSize" value="4" />
<property name="name" value="PersonsCache"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="affinity">
<bean class="org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction">
<property name="partitions" value="2048"/>
</bean>
</property>
<!-- <property name="queryDetailMetricsSize" value="50"/> -->
<property name="cacheStoreFactory">
<bean class="org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreFactory">
<property name="dataSourceBean" value="dsMySQL_DB"/>
<property name="dialect">
<bean class="org.apache.ignite.cache.store.jdbc.dialect.MySQLDialect">
</bean>
</property>
<property name="types">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcType">
<property name="cacheName" value="PersonsCache"/>
<property name="keyType" value="java.lang.Integer"/>
<property name="valueType" value="com.gmail.testcluster.model.Persons"/>
<property name="databaseSchema" value="MY_DB"/>
<property name="databaseTable" value="PERSONS"/>
<property name="keyFields">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.INTEGER"/>
</constructor-arg>
<constructor-arg value="ID"/>
<constructor-arg value="int"/>
<constructor-arg value="id"/>
</bean>
</list>
</property>
<property name="valueFields">
<list>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.INTEGER"/>
</constructor-arg>
<constructor-arg value="CLASS_NO"/>
<constructor-arg value="java.lang.Integer"/>
<constructor-arg value="class_no"/>
</bean>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.VARCHAR"/>
</constructor-arg>
<constructor-arg value="NAME"/>
<constructor-arg value="java.lang.String"/>
<constructor-arg value="name"/>
</bean>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.INTEGER"/>
</constructor-arg>
<constructor-arg value="AGE"/>
<constructor-arg value="java.lang.Integer"/>
<constructor-arg value="age"/>
</bean>
<bean class="org.apache.ignite.cache.store.jdbc.JdbcTypeField">
<constructor-arg>
<util:constant static-field="java.sql.Types.DOUBLE"/>
</constructor-arg>
<constructor-arg value="R_AMT"/>
<constructor-arg value="java.lang.Double"/>
<constructor-arg value="rAmt"/>
</bean>
</list>
</property>
</bean>
</list>
</property>
</bean>
</property>
<property name="readThrough" value="true"/>
<property name="writeThrough" value="true"/>
<property name="writeBehindEnabled" value="true"/>
<property name="writeBehindBatchSize" value="500"/>
<property name="writeBehindFlushSize" value="1000"/>
<property name="writeBehindFlushFrequency" value="60000"/>
<property name="queryEntities">
<list>
<bean class="org.apache.ignite.cache.QueryEntity">
<property name="keyType" value="java.lang.Integer"/>
<property name="valueType" value="com.gmail.testcluster.model.Persons"/>
<property name="fields">
<map>
<entry key="class_no" value="java.lang.Integer"/>
<entry key="name" value="java.lang.String"/>
<entry key="age" value="java.lang.Integer"/>
<entry key="rAmt" value="java.lang.Double"/>
</map>
</property>
<property name="aliases">
<map>
<entry key="rAmt" value="R_AMT"/>
</map>
</property>
<!-- <property name="indexes">
<list>
<bean class="org.apache.ignite.cache.QueryIndex">
<property name="name" value="queryindexing"/>
<property name="indexType" value="SORTED"/>
<property name="fields">
<map>
<entry key="class_no" value="true"/>
</map>
</property>
</bean>
</list>
</property> -->
</bean>
</list>
</property>
</bean>
</list>
</property>
</bean>
I am not sure where the problem is, can someone take a look? Thanks!!!
When DataStreamer's property allowOverwrite=false (default) it causes Persistent Store to be skipped.
Cache store is invoked only when AllowOverwrite is true. Link to doc
if you want to use readThrough, then use writeThrough. You can read about these modes in doc

Spring-WS WS-Security LDAP Authentication

I'm having trouble getting LDAP Authentication working with Spring-WS using LDAP user authentication. I've found several examples of how to do this and several of them use different beans, some even write their own authenticators. Unfortunately none seem to have all the necessary detail to get this working. In addition, since there seem to be several different ways to get this working, I'm unsure of the best approach. I've tried both the wss4j (apache) and xwss (sun) security providers and get similar results. I know the ws-security piece is working (it works fine without the LDAP piece using the SimplePasswordValidationCallbackHandler) and even the ldap context is able to retrieve the user from the LDAP repository, but in the end, the password authentication fails. Also, I don't see why it would matter, but I'm using Active Directory as my LDAP provider...
<sws:interceptors>
<bean class="com.xxxxx.xxxxxx.xxxx.controller.interceptors.EcrsPayloadLoggingInterceptor"/>
<bean class="org.springframework.ws.soap.security.xwss.XwsSecurityInterceptor">
<property name="policyConfiguration" value="classpath:securityPolicy.xml" />
<property name="callbackHandlers">
<list>
<ref bean="authenticationHandler"/>
</list>
</property>
</bean>
</sws:interceptors>
<bean id="securityContextSource" class="org.springframework.security.ldap.DefaultSpringSecurityContextSource">
<constructor-arg value="ldap://localhost:389/DC=xxxx,DC=xxxx,DC=local"/>
<property name="userDn" value="CN=user1,CN=Users,DC=xxxx,DC=xxxxx,DC=local"/>
<property name="password" value="password1"/>
</bean>
<bean id="ldapUserSearch" class="org.springframework.security.ldap.search.FilterBasedLdapUserSearch">
<constructor-arg name="searchBase" value=""/>
<constructor-arg name="searchFilter" value="(sAMAccountName={0})"/>
<constructor-arg name="contextSource" ref="apacheContextSource"/>
<property name="searchSubtree" value="true"/>
</bean>
<bean id="ldapAuthoritiesPopulator" class="org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator">
<constructor-arg name="contextSource" ref="apacheContextSource"/>
<constructor-arg name="groupSearchBase" value="CN=Users"/>
<property name="groupRoleAttribute" value="CN"/>
</bean>
<bean id="ldapUserDetailsService" class="org.springframework.security.ldap.userdetails.LdapUserDetailsService">
<constructor-arg name="userSearch" ref="ldapUserSearch"/>
<constructor-arg name="authoritiesPopulator" ref="ldapAuthoritiesPopulator"/>
</bean>
<bean id="authenticationHandler" class="org.springframework.ws.soap.security.xwss.callback.SpringDigestPasswordValidationCallbackHandler">
<property name="userDetailsService" ref="ldapUserDetailsService"/>
</bean>
securityPolicy.xml:
<xwss:SecurityConfiguration xmlns:xwss="http://java.sun.com/xml/ns/xwss/config" dumpMessages="true" >
<xwss:RequireTimestamp maxClockSkew="60" timestampFreshnessLimit="300"/>
<xwss:RequireUsernameToken passwordDigestRequired="false" nonceRequired="true"/>
</xwss:SecurityConfiguration>
Here's the solution I found...
<sws:interceptors>
<bean class="com.xxxx.xxxxx.xxxx.controller.interceptors.EcrsPayloadLoggingInterceptor"/>
<bean class="org.springframework.ws.soap.security.xwss.XwsSecurityInterceptor">
<property name="policyConfiguration" value="classpath:securityPolicy.xml" />
<property name="callbackHandlers">
<list>
<ref bean="authenticationHandler"/>
</list>
</property>
</bean>
</sws:interceptors>
<bean id="authenticationHandler" class="org.springframework.ws.soap.security.xwss.callback.SpringPlainTextPasswordValidationCallbackHandler">
<property name="authenticationManager" ref="authManager" />
</bean>
<s:authentication-manager id="authManager">
<s:authentication-provider ref='ldapAuthProvider'/>
</s:authentication-manager>
<bean id="ldapAuthProvider" class="org.springframework.security.ldap.authentication.LdapAuthenticationProvider">
<constructor-arg>
<bean class="org.springframework.security.ldap.authentication.BindAuthenticator">
<constructor-arg ref="securityContextSource" />
<property name="userSearch" ref="ldapUserSearch"/>
</bean>
</constructor-arg>
<constructor-arg name="authoritiesPopulator" ref="ldapAuthoritiesPopulator"/>
</bean>
<bean id="securityContextSource" class="org.springframework.security.ldap.DefaultSpringSecurityContextSource">
<constructor-arg value="ldap://localhost:389/DC=xxx,DC=xxxxx,DC=local"/>
<property name="userDn" value="CN=xxxxxx,CN=xxxx,DC=xxx,DC=xxxx,DC=local"/>
<property name="password" value="xxxxxxx"/>
</bean>
<bean id="ldapUserSearch" class="org.springframework.security.ldap.search.FilterBasedLdapUserSearch">
<constructor-arg name="searchBase" value=""/>
<constructor-arg name="searchFilter" value="(sAMAccountName={0})"/>
<constructor-arg name="contextSource" ref="securityContextSource"/>
<property name="searchSubtree" value="true"/>
</bean>
<bean id="ldapAuthoritiesPopulator" class="org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator">
<constructor-arg name="contextSource" ref="securityContextSource"/>
<constructor-arg name="groupSearchBase" value="CN=Users"/>
<property name="groupRoleAttribute" value="CN"/>
</bean>