Stopping "Closing JPA EntityManagerFactory shutdown" - sql

** I'm trying to connect my Spring boot application to an Azure SQL cloud database. But my spring boot application shutdown without giving errors now. I want to stop it and listen to my Azure SQL database. Now I just want to check that spring boot is connected with the Azure database. Please tell me if there is a good way to do it.
**
C:\Program Files\Java\jdk1.8.0_231\bin\java.exe"
-XX:TieredStopAtLevel=1 -noverify -Dspring.output.ansi.enabled=always -Dcom.sun.management.jmxremote -Dspring.jmx.enabled=true -Dspring.liveBeansView.mbeanDomain -Dspring.application.admin.enabled=true "-javaagent:C:\Program Files\JetBrains\IntelliJ IDEA
2019.2.4\lib\idea_rt.jar=64378:C:\Program Files\JetBrains\IntelliJ IDEA 2019.2.4\bin" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.8.0_231\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\cldrdata.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\jfxrt.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\nashorn.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\sunpkcs11.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\ext\zipfs.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\jfxswt.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_231\jre\lib\rt.jar;D:\Private\IIT\L5\Sem - 2\SDGP\Backend_2\Mealize with Azure\Mealize\target\classes;C:\Users\User\.m2\repository\com\microsoft\azure\azure-active-directory-spring-boot-starter\2.2.0\azure-active-directory-spring-boot-starter-2.2.0.jar;C:\Users\User\.m2\repository\org\springframework\spring-web\5.2.4.RELEASE\spring-web-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\spring-beans\5.2.4.RELEASE\spring-beans-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\security\spring-security-core\5.2.2.RELEASE\spring-security-core-5.2.2.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\spring-aop\5.2.4.RELEASE\spring-aop-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\spring-context\5.2.4.RELEASE\spring-context-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\spring-expression\5.2.4.RELEASE\spring-expression-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\security\spring-security-web\5.2.2.RELEASE\spring-security-web-5.2.2.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\security\spring-security-config\5.2.2.RELEASE\spring-security-config-5.2.2.RELEASE.jar;C:\Users\User\.m2\repository\com\microsoft\azure\adal4j\1.6.4\adal4j-1.6.4.jar;C:\Users\User\.m2\repository\com\nimbusds\oauth2-oidc-sdk\6.5\oauth2-oidc-sdk-6.5.jar;C:\Users\User\.m2\repository\com\sun\mail\javax.mail\1.6.1\javax.mail-1.6.1.jar;C:\Users\User\.m2\repository\javax\activation\activation\1.1\activation-1.1.jar;C:\Users\User\.m2\repository\com\github\stephenc\jcip\jcip-annotations\1.0-1\jcip-annotations-1.0-1.jar;C:\Users\User\.m2\repository\com\nimbusds\lang-tag\1.4.4\lang-tag-1.4.4.jar;C:\Users\User\.m2\repository\com\nimbusds\nimbus-jose-jwt\8.10\nimbus-jose-jwt-8.10.jar;C:\Users\User\.m2\repository\com\google\code\gson\gson\2.8.6\gson-2.8.6.jar;C:\Users\User\.m2\repository\org\slf4j\slf4j-api\1.7.30\slf4j-api-1.7.30.jar;C:\Users\User\.m2\repository\commons-codec\commons-codec\1.13\commons-codec-1.13.jar;C:\Users\User\.m2\repository\org\apache\commons\commons-lang3\3.9\commons-lang3-3.9.jar;C:\Users\User\.m2\repository\com\fasterxml\jackson\core\jackson-databind\2.10.2\jackson-databind-2.10.2.jar;C:\Users\User\.m2\repository\com\fasterxml\jackson\core\jackson-annotations\2.10.2\jackson-annotations-2.10.2.jar;C:\Users\User\.m2\repository\com\fasterxml\jackson\core\jackson-core\2.10.2\jackson-core-2.10.2.jar;C:\Users\User\.m2\repository\com\microsoft\azure\azure-spring-boot-starter\2.2.0\azure-spring-boot-starter-2.2.0.jar;C:\Users\User\.m2\repository\org\springframework\boot\spring-boot-starter\2.2.5.RELEASE\spring-boot-starter-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\boot\spring-boot\2.2.5.RELEASE\spring-boot-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\boot\spring-boot-autoconfigure\2.2.5.RELEASE\spring-boot-autoconfigure-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\boot\spring-boot-starter-logging\2.2.5.RELEASE\spring-boot-starter-logging-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\ch\qos\logback\logback-classic\1.2.3\logback-classic-1.2.3.jar;C:\Users\User\.m2\repository\ch\qos\logback\logback-core\1.2.3\logback-core-1.2.3.jar;C:\Users\User\.m2\repository\org\apache\logging\log4j\log4j-to-slf4j\2.12.1\log4j-to-slf4j-2.12.1.jar;C:\Users\User\.m2\repository\org\apache\logging\log4j\log4j-api\2.12.1\log4j-api-2.12.1.jar;C:\Users\User\.m2\repository\org\slf4j\jul-to-slf4j\1.7.30\jul-to-slf4j-1.7.30.jar;C:\Users\User\.m2\repository\jakarta\annotation\jakarta.annotation-api\1.3.5\jakarta.annotation-api-1.3.5.jar;C:\Users\User\.m2\repository\org\yaml\snakeyaml\1.25\snakeyaml-1.25.jar;C:\Users\User\.m2\repository\org\springframework\boot\spring-boot-starter-validation\2.2.5.RELEASE\spring-boot-starter-validation-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\jakarta\validation\jakarta.validation-api\2.0.2\jakarta.validation-api-2.0.2.jar;C:\Users\User\.m2\repository\org\apache\tomcat\embed\tomcat-embed-el\9.0.31\tomcat-embed-el-9.0.31.jar;C:\Users\User\.m2\repository\org\hibernate\validator\hibernate-validator\6.0.18.Final\hibernate-validator-6.0.18.Final.jar;C:\Users\User\.m2\repository\com\microsoft\azure\azure-spring-boot\2.2.0\azure-spring-boot-2.2.0.jar;C:\Users\User\.m2\repository\com\microsoft\sqlserver\mssql-jdbc\7.4.1.jre8\mssql-jdbc-7.4.1.jre8.jar;C:\Users\User\.m2\repository\net\minidev\json-smart\2.3\json-smart-2.3.jar;C:\Users\User\.m2\repository\net\minidev\accessors-smart\1.2\accessors-smart-1.2.jar;C:\Users\User\.m2\repository\org\ow2\asm\asm\5.0.4\asm-5.0.4.jar;C:\Users\User\.m2\repository\jakarta\xml\bind\jakarta.xml.bind-api\2.3.2\jakarta.xml.bind-api-2.3.2.jar;C:\Users\User\.m2\repository\net\bytebuddy\byte-buddy\1.10.8\byte-buddy-1.10.8.jar;C:\Users\User\.m2\repository\org\springframework\spring-core\5.2.4.RELEASE\spring-core-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\spring-jcl\5.2.4.RELEASE\spring-jcl-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\boot\spring-boot-starter-data-jpa\2.2.5.RELEASE\spring-boot-starter-data-jpa-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\boot\spring-boot-starter-aop\2.2.5.RELEASE\spring-boot-starter-aop-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\org\aspectj\aspectjweaver\1.9.5\aspectjweaver-1.9.5.jar;C:\Users\User\.m2\repository\org\springframework\boot\spring-boot-starter-jdbc\2.2.5.RELEASE\spring-boot-starter-jdbc-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\com\zaxxer\HikariCP\3.4.2\HikariCP-3.4.2.jar;C:\Users\User\.m2\repository\org\springframework\spring-jdbc\5.2.4.RELEASE\spring-jdbc-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\jakarta\activation\jakarta.activation-api\1.2.2\jakarta.activation-api-1.2.2.jar;C:\Users\User\.m2\repository\jakarta\persistence\jakarta.persistence-api\2.2.3\jakarta.persistence-api-2.2.3.jar;C:\Users\User\.m2\repository\jakarta\transaction\jakarta.transaction-api\1.3.3\jakarta.transaction-api-1.3.3.jar;C:\Users\User\.m2\repository\org\hibernate\hibernate-core\5.4.12.Final\hibernate-core-5.4.12.Final.jar;C:\Users\User\.m2\repository\org\jboss\logging\jboss-logging\3.4.1.Final\jboss-logging-3.4.1.Final.jar;C:\Users\User\.m2\repository\org\javassist\javassist\3.24.0-GA\javassist-3.24.0-GA.jar;C:\Users\User\.m2\repository\antlr\antlr\2.7.7\antlr-2.7.7.jar;C:\Users\User\.m2\repository\org\jboss\jandex\2.1.1.Final\jandex-2.1.1.Final.jar;C:\Users\User\.m2\repository\com\fasterxml\classmate\1.5.1\classmate-1.5.1.jar;C:\Users\User\.m2\repository\org\dom4j\dom4j\2.1.1\dom4j-2.1.1.jar;C:\Users\User\.m2\repository\org\hibernate\common\hibernate-commons-annotations\5.1.0.Final\hibernate-commons-annotations-5.1.0.Final.jar;C:\Users\User\.m2\repository\org\glassfish\jaxb\jaxb-runtime\2.3.2\jaxb-runtime-2.3.2.jar;C:\Users\User\.m2\repository\org\glassfish\jaxb\txw2\2.3.2\txw2-2.3.2.jar;C:\Users\User\.m2\repository\com\sun\istack\istack-commons-runtime\3.0.8\istack-commons-runtime-3.0.8.jar;C:\Users\User\.m2\repository\org\jvnet\staxex\stax-ex\1.8.1\stax-ex-1.8.1.jar;C:\Users\User\.m2\repository\com\sun\xml\fastinfoset\FastInfoset\1.2.16\FastInfoset-1.2.16.jar;C:\Users\User\.m2\repository\org\springframework\data\spring-data-jpa\2.2.5.RELEASE\spring-data-jpa-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\data\spring-data-commons\2.2.5.RELEASE\spring-data-commons-2.2.5.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\spring-orm\5.2.4.RELEASE\spring-orm-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\spring-tx\5.2.4.RELEASE\spring-tx-5.2.4.RELEASE.jar;C:\Users\User\.m2\repository\org\springframework\spring-aspects\5.2.4.RELEASE\spring-aspects-5.2.4.RELEASE.jar" project.mealize.Mealize.MealizeApplication
. ____ _ __ _ _
/\\ / ___'_ __ _ _(_)_ __ __ _ \ \ \ \
( ( )\___ | '_ | '_| | '_ \/ _` | \ \ \ \
\\/ ___)| |_)| | | | | || (_| | ) ) ) )
' |____| .__|_| |_|_| |_\__, | / / / /
=========|_|==============|___/=/_/_/_/
:: Spring Boot :: (v2.2.5.RELEASE)
2020-03-16 13:20:57.131 INFO 14064 --- [ main] p.mealize.Mealize.MealizeApplication : Starting MealizeApplication on DESKTOP-6C6BSF1 with PID 14064 (started by User in D:\Private\IIT\L5\Sem - 2\SDGP\Backend_2\Mealize with Azure\Mealize)
2020-03-16 13:20:57.138 INFO 14064 --- [ main] p.mealize.Mealize.MealizeApplication : No active profile set, falling back to default profiles: default
2020-03-16 13:20:58.543 INFO 14064 --- [ main] .s.d.r.c.RepositoryConfigurationDelegate : Bootstrapping Spring Data JPA repositories in DEFAULT mode.
2020-03-16 13:20:58.708 INFO 14064 --- [ main] .s.d.r.c.RepositoryConfigurationDelegate : Finished Spring Data repository scanning in 149ms. Found 1 JPA repository interfaces.
2020-03-16 13:20:59.940 INFO 14064 --- [ main] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Starting...
2020-03-16 13:21:01.491 INFO 14064 --- [ main] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Start completed.
2020-03-16 13:21:01.565 INFO 14064 --- [ main] o.hibernate.jpa.internal.util.LogHelper : HHH000204: Processing PersistenceUnitInfo [name: default]
2020-03-16 13:21:01.670 INFO 14064 --- [ main] org.hibernate.Version : HHH000412: Hibernate ORM core version 5.4.12.Final
2020-03-16 13:21:01.889 INFO 14064 --- [ main] o.hibernate.annotations.common.Version : HCANN000001: Hibernate Commons Annotations {5.1.0.Final}
2020-03-16 13:21:02.170 INFO 14064 --- [ main] org.hibernate.dialect.Dialect : HHH000400: Using dialect: org.hibernate.dialect.MySQL5InnoDBDialect
2020-03-16 13:21:03.730 INFO 14064 --- [ main] o.h.e.t.j.p.i.JtaPlatformInitiator : HHH000490: Using JtaPlatform implementation: [org.hibernate.engine.transaction.jta.platform.internal.NoJtaPlatform]
2020-03-16 13:21:03.745 INFO 14064 --- [ main] j.LocalContainerEntityManagerFactoryBean : Initialized JPA EntityManagerFactory for persistence unit 'default'
2020-03-16 13:21:04.626 INFO 14064 --- [ main] p.mealize.Mealize.MealizeApplication : Started MealizeApplication in 8.354 seconds (JVM running for 10.429)
2020-03-16 13:21:04.635 INFO 14064 --- [extShutdownHook] j.LocalContainerEntityManagerFactoryBean : Closing JPA EntityManagerFactory for persistence unit 'default'
2020-03-16 13:21:04.641 INFO 14064 --- [extShutdownHook] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Shutdown initiated...
2020-03-16 13:21:05.041 INFO 14064 --- [extShutdownHook] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Shutdown completed.
Process finished with exit code 0
application.properties
spring.jpa.database-platform=org.hibernate.dialect.MySQL5InnoDBDialect
spring.datasource.url=jdbc:sqlserver://mealizeserver.database.windows.net:1433;database=Mealize;user=dbman#mealizeserver;password=pword;encrypt=true;trustServerCertificate=false;hostNameInCertificate=*.database.windows.net;loginTimeout=30;
UserDetails.java
package project.mealize.Mealize.model;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
#Entity
#Table(name="UserDetails")
public class UserDetails {
#Id
private String email;
private String username;;
private String password;
private int age;
public UserDetails(){}
public UserDetails(String email,String username, String password, int age) {
this.email=email;;
this.username = username;
this.password = password;
this.age = age;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
}
UserRepository.java
package project.mealize.Mealize.repository;
import org.springframework.data.repository.CrudRepository;
import project.mealize.Mealize.model.UserDetails;
public interface UserRepository extends CrudRepository<UserDetails, String> {
}

I had the same error and I removed this line
spring.main.web-application-type=none
from application.properties file

Related

Reactive Spring Webflux exception while finding by id

I am writing simple CRUD reactive app with webflux
I have Author and Book repos and services
I have an exception below
Book Repository
#Repository
public interface BookRepository extends R2dbcRepository<Book, Long> {
}
Author Repository
#Repository
public interface AuthorRepository extends R2dbcRepository<Author, Long> {
}
BookService methods
#Override
public Mono<BookDto> findById(Long id) {
return bookRepository
.findById(id)
.log()
.map(BookDto::toDto)
.flatMap(dto -> authorRepository.findAllByBookId(dto.getId())
.map(AuthorDto::toDto)
.collectList()
.map(authors -> {
dto.setAuthors(authors);
return dto;
})
);
}
findById method in BookController
#RequestMapping(value = "/{id}", method = RequestMethod.GET)
public Mono<BookDto> findById(#PathVariable("id") Long id) {
return bookService.findById(id);
}
I sending this request
###
GET http://localhost:8085/book/4
And this exception occured
2023-01-29T14:30:52.193+05:00 ERROR 8328 --- [actor-tcp-nio-1] a.w.r.e.AbstractErrorWebExceptionHandler : [56530bed-1] 500 Server Error for HTTP GET "/book"
java.lang.IndexOutOfBoundsException: Binding index 0 when only 0 parameters are expected
at io.r2dbc.postgresql.client.Binding.add(Binding.java:75) ~[r2dbc-postgresql-1.0.0.RELEASE.jar:1.0.0.RELEASE]
Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException:
Error has been observed at the following site(s):
*__checkpoint ⇢ Handler uz.md.bookservicewithwebflux.controller.BookController#findAll() [DispatcherHandler]
*__checkpoint ⇢ HTTP GET "/book" [ExceptionHandlingWebHandler]

Redis - Read old data from a key while it is being written

Reading keys in Redis while they are being written produces a read-fail as if the key were not present in the cache. Is there any connection configuration in spring/jedis that allow clients to avoid the read fail returning the old data of a key while a new data it is being written?
I saw that to avoid this issue some people uses a local cache, but I am looking for a simpler solution. In my case reading the old value would not be a concern.
Artifacts:
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
</dependency>
Spring configuration class:
#Configuration
#EntityScan(basePackages = { "com.entities.redis" })
#EnableRedisRepositories("com.repository.redis")
public class RedisConfig {
#Value("${redis.server}")
private String server;
#Value("${redis.port}")
private Integer port;
#Value("${redis.pool-size}")
private Integer poolSize;
#Value("${redis.database-index}")
private Integer databaseIndex;
#Bean
public JedisPoolConfig jedisPoolConfig() {
JedisPoolConfig jedisPoolConfig = new JedisPoolConfig();
jedisPoolConfig.setMaxTotal(poolSize);
return jedisPoolConfig;
}
#Bean
public JedisClientConfiguration jedisClientConfiguration(JedisPoolConfig jedisPoolConfig) {
return JedisClientConfiguration
.builder()
.usePooling()
.poolConfig(jedisPoolConfig)
.build();
}
#Bean
public JedisConnectionFactory jedisConnectionFactory(JedisClientConfiguration jedisClientConfiguration) {
RedisStandaloneConfiguration config = new RedisStandaloneConfiguration(server, port);
config.setDatabase(databaseIndex);
return new JedisConnectionFactory(config, jedisClientConfiguration);
}
#Bean
public RedisTemplate<?, ?> redisTemplate(JedisConnectionFactory jedisConnectionFactory) {
RedisTemplate<byte[], byte[]> template = new RedisTemplate<byte[], byte[]>();
template.setConnectionFactory(jedisConnectionFactory);
return template;
}
}
Repository class
#Repository
public interface DataBeanRepository extends CrudRepository<DataBean, String> {
}
Entity class
#RedisHash("DataBean")
public class DataBean {
#Id
private String data;
public DataBean(String data) {
this.data = data;
}
public String getData() {
return data;
}
}
Test class
#SpringBootTest(webEnvironment = WebEnvironment.MOCK)
public class RedisTest {
private static Logger LOGGER = LoggerFactory.getLogger(RedisTest.class);
#Autowired
private DataBeanRepository repository;
#Test
public void testPutAndGet() throws InterruptedException {
final Duration TIME_TO_RUN = Duration.of(1, ChronoUnit.MINUTES);
final int threadInternalTimeMilli = 200;
final List<String> dataStrings = List.of("TEST0001", "TEST0002");
// Start the consumer thread
getConsumerThread(dataStrings, threadInternalTimeMilli).start();
// Producer thread
getProducerThread(dataStrings, threadInternalTimeMilli).start();
// Wait until finishing...
Instant endTime = Instant.now().plusSeconds(TIME_TO_RUN.getSeconds());
while (Instant.now().isBefore(endTime)) {
Thread.sleep(100);
}
}
private Thread getConsumerThread(List<String> serials, int threadInternalTimeMilli) {
return new Thread(() -> {
LOGGER.info("Starting consumer thread...");
while (true) {
List<String> foundStrings = StreamSupport.stream(repository.findAllById(serials).spliterator(), false).map(DataBean::getData).collect(Collectors.toList());
List<String> notFoundStrings = serials.stream().filter(sn -> !foundStrings.contains(sn)).collect(Collectors.toList());
if (CollectionUtils.nonEmpty(notFoundStrings)) {
LOGGER.error("Reading ERROR > Not found strings ({})", notFoundStrings.stream().collect(Collectors.joining(",")));
} else {
LOGGER.info("Reading OK (All were found)");
}
// Thread sleep time
try {
Thread.sleep(threadInternalTimeMilli);
} catch (InterruptedException e) {
throw new IllegalStateException();
}
}
});
}
private Thread getProducerThread(List<String> serials, int threadInternalTimeMilli) {
return new Thread(() -> {
LOGGER.info("Starting producer thread...");
while (true) {
serials.stream().forEach(data -> {
LOGGER.info("Saving data: '{}'", data);
repository.save(new DataBean(data));
});
// Thread sleep time
try {
Thread.sleep(threadInternalTimeMilli);
} catch (InterruptedException e) {
throw new IllegalStateException();
}
}
});
}
}
When I run the program I get:
2021-09-10 11:31:33.595 INFO 61439 --- [ Thread-2] : Starting consumer thread...
2021-09-10 11:31:33.596 INFO 61439 --- [ Thread-3] : Starting producer thread...
2021-09-10 11:31:33.596 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0001'
2021-09-10 11:31:34.505 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0002'
2021-09-10 11:31:34.506 INFO 61439 --- [ Thread-2] : Reading OK (All were found)
2021-09-10 11:31:35.295 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0001'
2021-09-10 11:31:35.297 INFO 61439 --- [ Thread-2] : Reading OK (All were found)
2021-09-10 11:31:35.884 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0002'
2021-09-10 11:31:36.088 ERROR 61439 --- [ Thread-2] : Reading ERROR > Not found strings (TEST0001)
2021-09-10 11:31:36.681 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0001'
2021-09-10 11:31:36.877 INFO 61439 --- [ Thread-2] : Reading OK (All were found)
2021-09-10 11:31:37.274 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0002'
2021-09-10 11:31:37.660 ERROR 61439 --- [ Thread-2] : Reading ERROR > Not found strings (TEST0002)
2021-09-10 11:31:38.061 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0001'
2021-09-10 11:31:38.447 INFO 61439 --- [ Thread-2] : Reading OK (All were found)
2021-09-10 11:31:38.649 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0002'
2021-09-10 11:31:39.233 INFO 61439 --- [ Thread-2] : Reading OK (All were found)
2021-09-10 11:31:39.433 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0001'
2021-09-10 11:31:40.035 INFO 61439 --- [ Thread-3] : Saving data: 'TEST0002'
2021-09-10 11:31:40.037 ERROR 61439 --- [ Thread-2] : Reading ERROR > Not found strings (TEST0001)
Thank you,
Regards

Around annotion executed twice using WebFlux

I'm facing a weird behaviour while using AOP with AspectJ.
Basically the #Around method its called either once either twice and while trying to debugging I can't find the reason why it's being executing twice (I mean what triggers the second execution of the method)
here is some code :
#Aspect
#Slf4j
public class ReactiveRedisCacheAspect {
#Pointcut("#annotation(com.xxx.xxx.cache.aop.annotations.ReactiveRedisCacheable)")
public void cacheablePointCut() {}
#Around("cacheablePointCut()")
public Object cacheableAround(final ProceedingJoinPoint proceedingJoinPoint) {
log.debug("ReactiveRedisCacheAspect cacheableAround.... - {}", proceedingJoinPoint);
MethodSignature methodSignature = (MethodSignature) proceedingJoinPoint.getSignature();
Method method = methodSignature.getMethod();
Class<?> returnTypeName = method.getReturnType();
Duration duration = Duration.ofHours(getDuration(method));
String redisKey = getKey(method, proceedingJoinPoint);
if (returnTypeName.isAssignableFrom(Flux.class)) {
log.debug("returning Flux");
return cacheRepository.hasKey(redisKey)
.filter(found -> found)
.flatMapMany(found -> cacheRepository.findByKey(redisKey))
.flatMap(found -> saveFlux(proceedingJoinPoint, redisKey, duration));
} else if (returnTypeName.isAssignableFrom(Mono.class)) {
log.debug("Returning Mono");
return cacheRepository.hasKey(redisKey)
.flatMap(found -> {
if (found) {
return cacheRepository.findByKey(redisKey);
} else {
return saveMono(proceedingJoinPoint, redisKey, duration);
}
});
} else {
throw new RuntimeException("non reactive object supported (Mono,Flux)");
}
}
private String getKey(final Method method, final ProceedingJoinPoint proceedingJoinPoint) {
ReactiveRedisCacheable annotation = method.getAnnotation(ReactiveRedisCacheable.class);
String cacheName = annotation.cacheName();
String key = annotation.key();
cacheName = (String) AspectSupportUtils.getKeyValue(proceedingJoinPoint, cacheName);
key = (String) AspectSupportUtils.getKeyValue(proceedingJoinPoint, key);
return cacheName + "_" + key;
}
}
public class AspectSupportUtils {
private static final ExpressionEvaluator evaluator = new ExpressionEvaluator();
public static Object getKeyValue(JoinPoint joinPoint, String keyExpression) {
if (keyExpression.contains("#") || keyExpression.contains("'")) {
return getKeyValue(joinPoint.getTarget(), joinPoint.getArgs(), joinPoint.getTarget().getClass(),
((MethodSignature) joinPoint.getSignature()).getMethod(), keyExpression);
}
return keyExpression;
}
private static Object getKeyValue(Object object, Object[] args, Class<?> clazz, Method method, String keyExpression) {
if (StringUtils.hasText(keyExpression)) {
EvaluationContext evaluationContext = evaluator.createEvaluationContext(object, clazz, method, args);
AnnotatedElementKey methodKey = new AnnotatedElementKey(method, clazz);
return evaluator.key(keyExpression, methodKey, evaluationContext);
}
return SimpleKeyGenerator.generateKey(args);
}
}
#Target({ElementType.METHOD})
#Retention(RetentionPolicy.RUNTIME)
#Documented
public #interface ReactiveRedisCacheable {
String key();
String cacheName();
long duration() default 1L;
}
#RestController
#RequestMapping("api/pub/v1")
public class TestRestController{
#ReactiveRedisCacheable(cacheName = "test-cache", key = "#name", duration = 1L)
#GetMapping(value = "test")
public Mono<String> getName(#RequestParam(value = "name") String name){
return Mono.just(name);
}
}
#Configuration
public class Config {
#Bean
public ReactiveRedisCacheAspect reactiveRedisCache (ReactiveRedisCacheAspect reactiveRedisCacheAspect) {
return reactiveRedisCacheAspect;
}
}
logs:
ReactiveRedisCacheAspect cacheableAround.... - {}execution(Mono com.abc.def.xxx.rest.TestRestcontroller.getName(String))
2021-06-04 15:36:23.096 INFO [fo-bff,f688025287be7e7c,f688025287be7e7c] 20060 --- [ctor-http-nio-3] c.m.s.c.a.i.ReactiveRedisCacheAspect : Returning Mono
2021-06-04 15:36:23.097 INFO [fo-bff,f688025287be7e7c,f688025287be7e7c] 20060 --- [ctor-http-nio-3] c.m.s.c.repository.CacheRepositoryImpl : searching key: (bff_pippo)
ReactiveRedisCacheAspect cacheableAround.... - {}execution(Mono com.abc.def.xxx.rest.TestRestcontroller.getName(String))
2021-06-04 15:36:23.236 INFO [fo-bff,f688025287be7e7c,f688025287be7e7c] 20060 --- [ioEventLoop-7-2] c.m.s.c.a.i.ReactiveRedisCacheAspect : Returning Mono
2021-06-04 15:36:23.236 INFO [fo-bff,f688025287be7e7c,f688025287be7e7c] 20060 --- [ioEventLoop-7-2] c.m.s.c.repository.CacheRepositoryImpl : searching key: (bff_pippo)
2021-06-04 15:36:23.250 INFO [fo-bff,f688025287be7e7c,f688025287be7e7c] 20060 --- [ioEventLoop-7-2] c.m.s.c.repository.CacheRepositoryImpl : saving obj: (key:bff_pippo) (expiresIn:3600s)
2021-06-04 15:36:23.275 INFO [fo-bff,f688025287be7e7c,f688025287be7e7c] 20060 --- [ioEventLoop-7-2] c.m.s.c.repository.CacheRepositoryImpl : saving obj: (key:bff_pippo) (expiresIn:3600s)
So far I would have expected the cacheableAround would be executed only once, but what happens its a bit weird, if the object is present on redis the method is executed only once but if is not present the method is executed twice which it doesn't make sense, moreover it should be the business logic to manage what to do inside the method.
Thanks in advance!
You did not mention whether you use native AspectJ via load- or compile-time weaving or simply Spring AOP. Because I see not #Component annotation on your aspect, it might as well be native AspectJ, unless you configure your beans via #Bean factory methods in a configuration class or XML.
Assuming that you are using full AspectJ, a common problem newbies coming from Spring AOP have, is that they are not used to the fact that AspectJ not only intercepts execution joinpoints, but also call ones. This leads to the superficial perception that the same joinpoint is intercepted twice. But in reality, it is once the method call (in the class from which the call is made) and once the method execution (in the class where the target method resides). This is easy to determine if at the beginning of your advice method you simply log the joinpoint. In your case:
System.out.println(proceedingJoinPoint);
If then on the console you see something like
call(public void org.acme.MyClass.myMethod())
execution(public void org.acme.MyClass.myMethod())
then you know what is happening.
In case you use Spring AOP, probably it is an issue with the aspect or the Redis caching behaviour that is different from your expectation.

Is there a way to execute WebFilter for a specific urls in spring webflux

I have a WebFilter which I would like to exclude for couple of urls.
I have used PathPattern which can be used to exclude 1 url but not more than that.
private final PathPattern pathPattern;
public MyFilter() {
pathPattern = new PathPatternParser().parse("/url");
}
                            
#Override
public Mono<Void> filter(ServerWebExchange exchange, WebFilterChain chain) {
if (pathPattern.matches(exchange.getRequest().getPath().pathWithinApplication())) {
return chain.filter(exchange);
}
There are multiple ways to do it, below is one of the way
#Slf4j
#Component
public class LogFilter implements WebFilter {
List<PathPattern> pathPatternList;
public LogFilter() {
PathPattern pathPattern1 = new PathPatternParser()
.parse("/admin");
PathPattern pathPattern2 = new PathPatternParser().parse("/emp");
pathPatternList = new ArrayList<>();
pathPatternList.add(pathPattern1);
pathPatternList.add(pathPattern2);
}
#Override
public Mono<Void> filter(ServerWebExchange exchange, WebFilterChain chain) {
RequestPath path = exchange.getRequest().getPath();
if (pathPatternList.stream().anyMatch(pathPattern -> pathPattern.matches(path.pathWithinApplication()))) {
log.info(path.toString() + " path excluded");
return chain.filter(exchange);
}
log.info("executing logic for " + path.toString() + " path");
return chain.filter(exchange);
}
}
For Url /admin and /emp it will exclude the logic for other urls it will execute the logic check below logs
2019-05-10 00:20:55.660 INFO 15837 --- [ctor-http-nio-3] o.l.reactiveapp.filter.LogFilter : /admin path excluded
2019-05-10 00:20:55.661 INFO 15837 --- [ctor-http-nio-3] o.l.r.controller.AdminController : get admin
2019-05-10 00:20:58.361 INFO 15837 --- [ctor-http-nio-3] o.l.reactiveapp.filter.LogFilter : /emp path excluded
2019-05-10 00:20:58.362 INFO 15837 --- [ctor-http-nio-3] o.l.r.controller.EmployeeController : get employee
2019-05-10 00:21:03.649 INFO 15837 --- [ctor-http-nio-3] o.l.reactiveapp.filter.LogFilter : executing logic for /messages/10 path
2019-05-10 00:21:03.651 INFO 15837 --- [ctor-http-nio-3] o.l.r.controller.StoresController : getting message details for id 10 enter code here
i hope this answers your question Thanks
You can wrap your WebFilter with decorator that restricts inner WebFilter invocation by provided ServerWebExchangeMatcher.
#RequiredArgsConstructor
public class RestrictedWebFilterDecorator implements WebFilter {
private final WebFilter inner;
private final ServerWebExchangeMatcher restrictionMatcher;
#Override
public Mono<Void> filter(ServerWebExchange exchange, WebFilterChain chain) {
return restrictionMatcher.matches(exchange)
.flatMap(result -> result.isMatch()
? inner.filter(exchange, chain)
: chain.filter(exchange)
);
}
}
Usage example:
#Order(Ordered.HIGHEST_PRECEDENCE)
#Bean
public WebFilter yourWebFilter() {
return new RestrictedWebFilterDecorator(
new YourWebFilter,
new NegatedServerWebExchangeMatcher(YOUR_EXCHANGE_MATCHER)
);
}

Hadoop project---stuck on the step: File Output Committer Algorithm version is 1

I am a hadoop newbie. I have got the problem when I run the code in this tutorial:
https://github.com/hortonworks/hadoop-tutorials/blob/master/Community/T09_Write_And_Run_Your_Own_MapReduce_Java_Program_Poll_Result_Analysis.md
The map-reduce process will stop on the step blow:
[main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[main] INFO org.apache.hadoop.conf.Configuration.deprecation - session.id is deprecated. Instead, use dfs.metrics.session-id
[main] INFO org.apache.hadoop.metrics.jvm.JvmMetrics - Initializing JVM Metrics with processName=JobTracker, sessionId=
[main] WARN org.apache.hadoop.mapreduce.JobResourceUploader - Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
[main] WARN org.apache.hadoop.mapreduce.JobResourceUploader - No job jar file set. User classes may not be found. See Job or Job#setJar(String).
[main] INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat - Total input paths to process : 4
[main] INFO org.apache.hadoop.mapreduce.JobSubmitter - number of splits:4
[main] INFO org.apache.hadoop.mapreduce.JobSubmitter - Submitting tokens for job: job_local61587531_0001
[main] INFO org.apache.hadoop.mapreduce.Job - The url to track the job: http://localhost:8080/
[Thread-19] INFO org.apache.hadoop.mapred.LocalJobRunner - OutputCommitter set in config null
[Thread-19] INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter - File Output Committer Algorithm version is 1
The code of map-reduce application is
public class VoteCountApplication extends Configured implements Tool {
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new VoteCountApplication(), args);
System.exit(res);
}
#Override
public int run(String[] args) throws Exception {
if (args.length != 2) {
System.out.println("usage: [input] [output]");
System.exit(-1);
}
Job job = Job.getInstance(new Configuration());
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setMapperClass(VoteCountMapper.class);
job.setReducerClass(VoteCountReducer.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.setJarByClass(VoteCountApplication.class);
job.submit();
return 0;
}
}
But if I use the main method from WordCount example to run this project
public class VoteCountApplication extends Configured implements Tool {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "vote count");
job.setJarByClass(VoteCountApplication.class);
job.setMapperClass(VoteCountMapper.class);
job.setCombinerClass(VoteCountReducer.class);
job.setReducerClass(VoteCountReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
That works prefect! I don't know what is the problem in the code from tutorial. Does anyone can understand the difference between the codes? Thanks
Here is the Map and Reduce code:
public class VoteCountMapper extends Mapper<Object, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
#Override
public void map(Object key, Text value, Context output) throws IOException,
InterruptedException {
//If more than one word is present, split using white space.
String[] words = value.toString().split(" ");
//Only the first word is the candidate name
output.write(new Text(words[0]), one);
}
}
public class VoteCountReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
#Override
public void reduce(Text key, Iterable<IntWritable> values, Context output)
throws IOException, InterruptedException {
int voteCount = 0;
for(IntWritable value: values){
voteCount+= value.get();
}
output.write(key, new IntWritable(voteCount));
}
}