I using spring-boot-starter-parent:2.7.7
I have the below config of webclient to call soap web services
#Bean
fun webClientXml(): WebClient {
val httpClient = HttpClient.create()
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 100000).doOnConnected {
it.addHandlerLast(ReadTimeoutHandler(100))
it.addHandlerLast(WriteTimeoutHandler(100))
}.resolver(DefaultAddressResolverGroup.INSTANCE)
val exchangeStrategies = ExchangeStrategies.builder().codecs {
it.defaultCodecs().maxInMemorySize(16 * 1024 * 1024)
it.customCodecs().register(Jaxb2SoapEncoder())
it.customCodecs().register(Jaxb2SoapDecoder())
}.build()
return WebClient.builder()
.codecs { it.defaultCodecs().maxInMemorySize(16 * 1024 * 1024) }
.clientConnector(ReactorClientHttpConnector(httpClient))
.exchangeStrategies(exchangeStrategies).build()
}
application.yml
spring:
elasticsearch:
webclient:
max-in-memory-size: 512MB
codec:
max-in-memory-size: 512MB
config of Jaxb2SoapDecoder
class Jaxb2SoapDecoder : Jaxb2XmlDecoder() {
override fun setMaxInMemorySize(byteCount: Int) {
super.setMaxInMemorySize(16 * 1024 * 1024)
}
#Throws(DecodingException::class)
override fun decode(dataBuffer: DataBuffer, targetType: ResolvableType, #Nullable mimeType: MimeType?, #Nullable hints: MutableMap<String, Any>?): Any {
.....
}
}
config of Jaxb2SoapEncoder
class Jaxb2SoapEncoder : Encoder<Any> {
private fun encode(value: Any, bufferFactory: DataBufferFactory): Flux<DataBuffer> {
return Mono.fromCallable {
var release = true
val buffer = bufferFactory.allocateBuffer(1000000000)
.....
}.flux()
}
}
when i invoke an api, i have the below error
org.springframework.web.reactive.function.client.WebClientResponseException:
200 OK from POST
xxxxxxxxx;
nested exception is
org.springframework.core.io.buffer.DataBufferLimitException: Exceeded
limit on max bytes to buffer : 262144
Related
I am uploading the file using Kotlin workmanager. In CoroutineWorkmanager, I do file upload with a suspend upload function. I want to observe retrofit progress and show it on ui. I can see the retrofit progress state but I cannot observe it in workManager.
My request body class where I can see the retrofit progress state :
class ProgressRequestBody : RequestBody {
val mFile: File
val ignoreFirstNumberOfWriteToCalls : Int
constructor(mFile: File) : super(){
this.mFile = mFile
ignoreFirstNumberOfWriteToCalls = 0
}
constructor(mFile: File, ignoreFirstNumberOfWriteToCalls : Int) : super(){
this.mFile = mFile
this.ignoreFirstNumberOfWriteToCalls = ignoreFirstNumberOfWriteToCalls
}
var numWriteToCalls = 0
private val _shared = MutableStateFlow<Float>(0F)
val shared : StateFlow<Float> = _shared
override fun contentType(): MediaType? {
return "image/*".toMediaTypeOrNull()
}
#Throws(IOException::class)
override fun contentLength(): Long {
return mFile.length()
}
#Throws(IOException::class)
override fun writeTo(sink: BufferedSink) {
numWriteToCalls++
val fileLength = mFile.length()
val buffer = ByteArray(DEFAULT_BUFFER_SIZE)
val `in` = FileInputStream(mFile)
var uploaded: Long = 0
try {
var read: Int
var lastProgressPercentUpdate = 0.0f
read = `in`.read(buffer)
while (read != -1) {
uploaded += read.toLong()
sink.write(buffer, 0, read)
read = `in`.read(buffer)
if (numWriteToCalls > ignoreFirstNumberOfWriteToCalls ) {
val progress = (uploaded.toFloat() / fileLength.toFloat()) * 100f
if (progress - lastProgressPercentUpdate > 1 || progress == 100f) {
_shared.value = progress
Log.d("progress", "${shared.value}")
lastProgressPercentUpdate = progress
}
}
}
} finally {
`in`.close()
}
}
companion object {
private val DEFAULT_BUFFER_SIZE = 2048
}
}
The worker I uploaded the file to:
class UploadWorker #WorkerInject constructor(
private val repository: Repository,
#Assisted context: Context,
#Assisted params: WorkerParameters
): CoroutineWorker(context, params) {
private lateinit var result: UploadResult
#ObsoleteCoroutinesApi
#OptIn(ExperimentalCoroutinesApi::class)
#SuppressLint("RestrictedApi")
override suspend fun doWork(): Result {
return try{
val requestBody = ProgressRequestBody(File(fileUri!!.toUri().path))
val multipartBody = prepareBody(fileUri!!.toUri(), photoPart)
progressState(requestBody)
upload(multipartBody)
Result.Success()
}catch(e :Exception){
Result.failure()
}
}
private fun prepareBody( ): MultipartBody.Part {
return MultipartBody.Part.createFormData("photo", "photo", "image/*")
}
suspend fun upload(
multipartBody: MultipartBody.Part,
) {
repository.uploadPhotos(
multipartBody
).collect{ result ->
if (result is Result.Success) {
this.result = result.data
}
}
}
private suspend fun progressState(photoPart: ProgressRequestBody) {
coroutineScope {
launch {
photoPart.shared.collect{
setProgress(workDataOf(PROGRESS to it))
}
}
}
}
}
While this way I can't run the worker. I am getting the following error from the worker:
java.util.concurrent.CancellationException: Task was cancelled.
at androidx.work.impl.utils.futures.AbstractFuture.cancellationExceptionWithCause(AbstractFuture.java:1184)
at androidx.work.impl.utils.futures.AbstractFuture.getDoneValue(AbstractFuture.java:514)
I have the following code, where I validate the JWT token (with volley):
private fun validateToken(token: String) {
var queue = Volley.newRequestQueue(this)
val yourUrl = "https://mysite/wp-json/jwt-auth/v1/token/validate"
val parameters = JSONObject()
try {
parameters.put("username", "abc#test.com")
parameters.put("password", "12345678")
} catch (e: java.lang.Exception) {
}
val request: JsonObjectRequest =
object : JsonObjectRequest(
Method.POST, yourUrl, parameters,
Response.Listener { response -> Log.i("onResponse", response.toString()) },
Response.ErrorListener { error -> Log.e("onErrorResponse", error.toString()) }) {
#Throws(AuthFailureError::class)
override fun getHeaders(): Map<String, String> {
val headers: MutableMap<String, String> = HashMap()
// Basic Authentication
//String auth = "Basic " + Base64.encodeToString(CONSUMER_KEY_AND_SECRET.getBytes(), Base64.NO_WRAP);
headers["Authorization"] = "Bearer $token"
return headers
}
}
queue.add(request)
}
It works for me and I get the correct response from the server (in Log.i):
{"code":"jwt_auth_valid_token","data":{"status":200}}
My question is how in my code I do to be able to save the status: 200 in a variable so then it applies an ʻif status == 200` and if it is 200 then send it to another activity.
Add implementation 'com.google.code.gson:gson:2.8.6' to build.gradle(app)
Create Model.tk with:
data class dataServer (
#SerializedName("code") val code : String,
#SerializedName("data") val data : Data
)
data class Data (
#SerializedName("status") val status : Int
)
3. Update code:
private fun validateToken(token: String) {
var queue = Volley.newRequestQueue(this)
val yourUrl = "https://myweb/wp-json/jwt-auth/v1/token/validate"
val parameters = JSONObject()
try {
parameters.put("username", "abc#test.com")
parameters.put("password", "12345678")
} catch (e: java.lang.Exception) {
}
val request: JsonObjectRequest =
object : JsonObjectRequest(
Method.POST, yourUrl, parameters,
Response.Listener {
response ->
Log.i("onResponse", response.toString())
val gson = Gson()
val dataToken = gson.fromJson(response.toString(), dataServer::class.java)
val status = dataToken.data.status
println(status)
// use here if then
},
Response.ErrorListener { error -> Log.e("onErrorResponse", error.toString()) }) {
#Throws(AuthFailureError::class)
override fun getHeaders(): Map<String, String> {
val headers: MutableMap<String, String> = HashMap()
// Basic Authentication
//String auth = "Basic " + Base64.encodeToString(CONSUMER_KEY_AND_SECRET.getBytes(), Base64.NO_WRAP);
headers["Authorization"] = "Bearer $token"
return headers
}
}
queue.add(request)
}
I am trying to create a multipart request with ktor, whose code is as follows,
import com.firstapp.modal.response.SuccessResponse
import io.ktor.application.call
import io.ktor.http.HttpStatusCode
import io.ktor.http.content.PartData
import io.ktor.http.content.forEachPart
import io.ktor.http.content.streamProvider
import io.ktor.locations.Location
import io.ktor.locations.post
import io.ktor.request.isMultipart
import io.ktor.request.receive
import io.ktor.request.receiveMultipart
import io.ktor.response.respond
import io.ktor.routing.Route
import io.ktor.util.getOrFail
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import kotlinx.coroutines.yield
import java.io.File
import java.io.InputStream
import java.io.OutputStream
import java.lang.IllegalArgumentException
#Location("/uploadVideo/{title}")
class UploadVideo(val title:String)
fun Route.upload(uploadDir: File) {
post<UploadVideo> {
val multipart = call.receiveMultipart()
var videoFile: File? = null
// Processes each part of the multipart input content of the user
multipart.forEachPart { part ->
when (part) {
is PartData.FormItem -> {
if (part.name != "title")
throw IllegalArgumentException("Title parameter not found")
//title = part.value
}
is PartData.FileItem -> {
if (part.name != "file")
throw IllegalArgumentException("file parameter not found")
val ext = File(part.originalFileName).extension
val file = File(uploadDir, "upload-${System.currentTimeMillis()}-${call.parameters.getOrFail("title").hashCode()}.$ext")
part.streamProvider().use { input -> file.outputStream().buffered().use { output -> input.copyToSuspend(output) } }
videoFile = file
}
}
part.dispose()
}
call.respond(
HttpStatusCode.OK,
SuccessResponse(
videoFile!!,
HttpStatusCode.OK.value,
"video file stored"
)
)
}
}
suspend fun InputStream.copyToSuspend(
out: OutputStream,
bufferSize: Int = DEFAULT_BUFFER_SIZE,
yieldSize: Int = 4 * 1024 * 1024,
dispatcher: CoroutineDispatcher = Dispatchers.IO
): Long {
return withContext(dispatcher) {
val buffer = ByteArray(bufferSize)
var bytesCopied = 0L
var bytesAfterYield = 0L
while (true) {
val bytes = read(buffer).takeIf { it >= 0 } ?: break
out.write(buffer, 0, bytes)
if (bytesAfterYield >= yieldSize) {
yield()
bytesAfterYield %= yieldSize
}
bytesCopied += bytes
bytesAfterYield += bytes
}
return#withContext bytesCopied
}
}
The above code or rest api is working fine but the issue is that, i want to check, whether all parameters are available or not i.e. i want to send additional parameters along with file in format as follows,
class VideoDetail(val type: String, val userId: String, val userName: String)
I am giving here an example, what i want i.e.
post("/") { request ->
val requestParamenter = call.receive<UserInsert>()
}
here, whatever the paramenters, we pass will automatically get converted into pojo classes, and if we hadn't passed it, it will throw exception,
So, the similar thing i want to achieve with multipart.
Finally, i was able to sort the issue, below is the code,
#Location("/uploadVideo/{id}")
class UploadVideo(val id: Int)
fun Route.upload(uploadDir: File) {
post<UploadVideo> {
val multipart = call.receiveMultipart().readAllParts()
val multiMap = multipart.associateBy { it.name }.toMap()
val data = PersonForm(multiMap)
println(data)
val ext = File(data.file.originalFileName).extension
val file = File(uploadDir, "upload-${System.currentTimeMillis()}-${data.file.originalFileName}")
data.file.streamProvider()
.use { input -> file.outputStream().buffered().use { output -> input.copyToSuspend(output) } }
call.respond(
HttpStatusCode.OK,
SuccessResponse(
file,
HttpStatusCode.OK.value,
"video file stored"
)
)
}
}
suspend fun InputStream.copyToSuspend(
out: OutputStream,
bufferSize: Int = DEFAULT_BUFFER_SIZE,
yieldSize: Int = 4 * 1024 * 1024,
dispatcher: CoroutineDispatcher = Dispatchers.IO
): Long {
return withContext(dispatcher) {
val buffer = ByteArray(bufferSize)
var bytesCopied = 0L
var bytesAfterYield = 0L
while (true) {
val bytes = read(buffer).takeIf { it >= 0 } ?: break
out.write(buffer, 0, bytes)
if (bytesAfterYield >= yieldSize) {
yield()
bytesAfterYield %= yieldSize
}
bytesCopied += bytes
bytesAfterYield += bytes
}
return#withContext bytesCopied
}
}
class PersonForm(map: Map<String?, PartData>) {
val file: PartData.FileItem by map
val type: PartData.FormItem by map
val title: PartData.FormItem by map
override fun toString() = "${file.originalFileName}, ${type.value}, ${title.value}"
}
The only issue with this approach is by using map delegation, you have to access, the propertly to know, whether all the parameters are present in the map or not, i.e.
val data = PersonForm(multiMap)
println(data)
I would like to validate body in GlobalFilter.
I need to read two http headers that contain checksum of body and compare it with body itself:
internal class MyFilter : GlobalFilter {
override fun filter(exchange: ServerWebExchange, chain: GatewayFilterChain) =
ByteArrayDecoder()
.decodeToMono(
exchange.request.body,
ResolvableType.forClass(ByteBuffer::class.java),
exchange.request.headers.contentType,
null
)
.flatMap { /* my logic checking body against request headers */ chain.filter(exchange) }
}
The problem is that decodingToMono stucks and does not forward requests.
How I can decode body properly?
I've managed to write a filter that does not stuck after reading body:
interface BodyFilter {
fun filter(
body: Mono<ByteArrayResource>,
exchange: ServerWebExchange,
passRequestFunction: () -> Mono<Void>
): Mono<Void>
}
class HeaderAndBodyGlobalFilter(private val bodyFilter: BodyFilter) : GlobalFilter {
private val messageReaders: List<HttpMessageReader<*>> = HandlerStrategies.withDefaults().messageReaders()
override fun filter(exchange: ServerWebExchange, chain: GatewayFilterChain): Mono<Void> {
val serverRequest: ServerRequest = ServerRequest.create(exchange, messageReaders)
val body: Mono<ByteArrayResource> = serverRequest.bodyToMono<ByteArrayResource>(ByteArrayResource::class.java)
return bodyFilter.filter(body, exchange) { reconstructRequest(body, exchange, chain) }
}
private fun reconstructRequest(
body: Mono<ByteArrayResource>,
exchange: ServerWebExchange,
chain: GatewayFilterChain
): Mono<Void> {
val headers: HttpHeaders = writableHttpHeaders(exchange.request.headers)
val outputMessage = CachedBodyOutputMessage(exchange, headers)
return BodyInserters.fromPublisher(
body,
ByteArrayResource::class.java
).insert(outputMessage, BodyInserterContext())
.then(Mono.defer {
val decorator: ServerHttpRequestDecorator = decorate(
exchange, headers, outputMessage
)
chain
.filter(exchange.mutate().request(decorator).build())
})
}
private fun decorate(
exchange: ServerWebExchange,
headers: HttpHeaders,
outputMessage: CachedBodyOutputMessage
): ServerHttpRequestDecorator {
return object : ServerHttpRequestDecorator(exchange.request) {
override fun getHeaders(): HttpHeaders {
val contentLength = headers.contentLength
val httpHeaders = HttpHeaders()
httpHeaders.putAll(super.getHeaders())
if (contentLength > 0) {
httpHeaders.contentLength = contentLength
} else {
// TODO: this causes a 'HTTP/1.1 411 Length Required' // on
// httpbin.org
httpHeaders.set(HttpHeaders.TRANSFER_ENCODING, "chunked")
}
return httpHeaders
}
override fun getBody(): Flux<DataBuffer> {
return outputMessage.body
}
}
}
}
Then implementation of BodyFilter either returns Mono.empty() on failure or calls passRequestFunction on success.
I am test out exactly-once delivery with spring-kafka. My understanding is that the offsets and any messages published through kafkaTemplate will be part of the same transaction.
However, then i throw an exception after publishing, even though the consumer offset is not commited, the messages are still publish.
Is there something wrong in my setup? Also do I need to set #transaction on each kafkalistener class? when i removed it I saw transactions being created in spring-kafka debug logs.
#Transactional("kafkaTransactionManager")
#KafkaListener(
id = "\${messaging.command.consumer-group-id}",
clientIdPrefix = "\${messaging.command.consumer-group-id}",
topics = ["\${messaging.command.topic}"],
concurrency = "\${messaging.command.listener-count}"
)
fun processCommand1(#Payload command: EntityCommand<JsonNode>, record: ConsumerRecord<String, Array<Byte>>) {
testEventPublisher.publish(record.key(), "test")
testEventPublisher.publish(randomUUID().toString(), "test)
throw RuntimeException("test")
}
Publisher (I added the executeInTransaction() while trying to get it to work properly):
class TransactionalTopicPublisher<TYPE>(val kafkaTemplate: KafkaTemplate<String, Any>, val topic: String) {
fun publish(key: String, message: TYPE) {
kafkaTemplate.executeInTransaction {
kafkaTemplate.send(
topic,
key,
message
)
}
}
}
Producer config:
#Bean
fun kafkaTemplate(producerFactory: ProducerFactory<String, Any>): KafkaTemplate<String, Any> {
return KafkaTemplate(producerFactory)
}
#Bean(KAFKA_TRANSACTION_MANAGER)
fun kafkaTransactionManager(kafkaProducerFactory: ProducerFactory<String, Any>): KafkaTransactionManager<String, Any> {
val kafkaTransactionManager = KafkaTransactionManager<String, Any>(kafkaProducerFactory)
return kafkaTransactionManager
}
#Bean
fun kafkaProducerFactory(kafkaJsonSerializer: JsonSerializer<Any>): ProducerFactory<String, Any> {
val factory = DefaultKafkaProducerFactory<String, Any>(producerConfig())
factory.setTransactionIdPrefix(transactionIdPrefix)
factory.setValueSerializer(kafkaJsonSerializer)
return factory
}
#Bean
fun producerConfig(): Map<String, Any> {
return mapOf(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG to bootstrapServers,
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG to StringSerializer::class.java.name,
ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG to true,
ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION to 1,
ProducerConfig.ACKS_CONFIG to "all",
ProducerConfig.BATCH_SIZE_CONFIG to 16384,
ProducerConfig.LINGER_MS_CONFIG to 1,
ProducerConfig.BUFFER_MEMORY_CONFIG to 33554432,
ProducerConfig.INTERCEPTOR_CLASSES_CONFIG to ProducerInterceptor::class.java.name
)
}
#Bean
fun kafkaJsonSerializer(kafkaObjectMapper: ObjectMapper): JsonSerializer<Any> {
val jsonSerializer = JsonSerializer<Any>(kafkaObjectMapper)
jsonSerializer.isAddTypeInfo = false
return jsonSerializer
}
Consumer Config:
#Bean
fun kafkaListenerContainerFactory(
kafkaTransactionManager: KafkaTransactionManager<String, Any>,
stringJsonMessageConverter: StringJsonMessageConverter
): ConcurrentKafkaListenerContainerFactory<String, String> {
val factory = ConcurrentKafkaListenerContainerFactory<String, String>()
factory.consumerFactory = consumerFactory()
factory.setMessageConverter(stringJsonMessageConverter)
factory.setErrorHandler(messagingErrorHandler())
factory.containerProperties.transactionManager = kafkaTransactionManager
return factory
}
#Bean
fun stringJsonMessageConverter(kafkaObjectMapper: ObjectMapper) =
StringJsonMessageConverter(kafkaObjectMapper)
#Bean
fun messagingErrorHandler() =
MessagingErrorHandler()
#Bean
fun consumerFactory(): ConsumerFactory<String, Any> {
val consumerFactory = DefaultKafkaConsumerFactory<String, Any>(consumerConfig())
return consumerFactory
}
#Bean
fun consumerConfig(): Map<String, Any> {
return mapOf(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to bootstrapServers,
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java.name,
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java.name,
ConsumerConfig.ISOLATION_LEVEL_CONFIG to IsolationLevel.READ_COMMITTED.toString().toLowerCase(Locale.ROOT),
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG to false,
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG to "earliest",
ConsumerConfig.MAX_POLL_RECORDS_CONFIG to MAX_POLL_RECORD,
ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG to ConsumerInterceptor::class.java.name,
JsonDeserializer.USE_TYPE_INFO_HEADERS to false
)
}
OffsetCommittingAndDeadLetterPublishingRecoverer:
#Transactional(KAFKA_TRANSACTION_MANAGER)
class OffsetCommittingAndDeadLetterPublishingRecoverer(val template: KafkaTemplate<Any, Any>) :
DeadLetterPublishingRecoverer(template) {
override fun accept(record: ConsumerRecord<*, *>, exception: Exception) {
super.accept(record, exception)
val topicPartition = TopicPartition(record.topic(), record.partition())
val offsetAndMetadata = OffsetAndMetadata(record.offset() +1)
template.executeInTransaction {
template.sendOffsetsToTransaction(
mapOf(topicPartition to offsetAndMetadata)
)
}
}
}