Vert.x Web API Service codegen and Kotlin, issue with nested properties of referenced object - kotlin

I'm trying to generate Web API Service from yaml
openapi: 3.0.1
info:
title: ""
description: ''
license:
name: Apache 2.0
url: http://www.apache.org/licenses/LICENSE-2.0.html
version: 1.0.0
servers:
- url: http://localhost:3001
tags:
- name: user
description: Operations with users
- name: stream
description: Operation with streams
paths:
/user/register:
post:
x-vertx-event-bus: user_manager.myapp
tags:
- user
summary: Create user
operationId: createUser
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/UserEnter'
required: true
responses:
'200':
description: successful operation
content:
application/json:
schema:
$ref: '#/components/schemas/User'
'400':
description: Invalid phone number
content: {}
x-codegen-request-body-name: body
components:
schemas:
UserEnter:
type: object
properties:
phone:
type: integer
format: int32
password:
type: string
additionalProperties: false
required:
- phone
- password
UserService.kt:
#WebApiServiceGen
interface UserService {
#GenIgnore
companion object{
#JvmStatic
fun create(repository: Repository): UserServiceImpl {
return UserServiceImpl(repository)
}
}
fun createUser(userEnter: UserEnter, request: ServiceRequest, resultHandler: Handler<AsyncResult<ServiceResponse>>)
}
UserEnter.kt:
#DataObject(generateConverter = true, publicConverter = false)
class UserEnter {
var phone: Int = 0
var password: String
constructor(phone: Int, password: String){
this.phone = phone
this.password = password
}
constructor(json: JsonObject): this(
json.getInteger("phone", 0),
json.getString("password", ""),
)
fun toJson(): JsonObject {
return JsonObject.mapFrom(this)
}
}
I'm trying to post the data:
{
"phone": 23423423423,
"password": "enim ut"
}
But the server expects this type of data:
"userEnter": {
{
"phone": 23423423423,
"password": "enim ut"
}
}
The part of generated UserServiceVertxProxyHandler.java:
case "createUser": {
JsonObject contextSerialized = json.getJsonObject("context");
if (contextSerialized == null)
throw new IllegalStateException("Received action " + action + " without ServiceRequest \"context\"");
ServiceRequest context = new ServiceRequest(contextSerialized);
JsonObject params = context.getParams();
try {
service.createUser(
searchOptionalInJson(params, "userEnter").map(j -> (io.vertx.core.json.JsonObject)j).map(j -> new com.md.model.user.UserEnter(j)).orElse(null),
context,
res -> {
if (res.failed()) {
if (res.cause() instanceof ServiceException) {
msg.reply(res.cause());
} else {
msg.reply(new ServiceException(-1, res.cause().getMessage()));
}
} else {
msg.reply(res.result() == null ? null : res.result().toJson());
}
}
);
} catch (Exception e) {
HelperUtils.manageFailure(msg, e, includeDebugInfo);
}
break;
}
Gradle:
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
kotlin("jvm") version "1.6.21"
kotlin("kapt") version "1.7.0"
application
}
group = "com.md"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
google()
}
dependencies {
kapt("io.vertx:vertx-codegen:4.3.1:processor")
kapt("io.vertx:vertx-web-api-service:4.3.1")
implementation("io.vertx:vertx-core:4.3.1")
implementation("io.vertx:vertx-web:4.3.1")
implementation("io.vertx:vertx-web-validation:4.3.1")
implementation("io.vertx:vertx-web-openapi:4.3.1")
implementation("io.vertx:vertx-service-proxy:4.3.1")
implementation("io.vertx:vertx-web-api-service:4.3.1")
compileOnly("io.vertx:vertx-codegen:4.3.1")
implementation("io.vertx:vertx-mongo-client:4.3.1")
implementation("org.slf4j:jcl-over-slf4j:1.7.36")
implementation("ch.qos.logback:logback-classic:1.2.11")
testImplementation(kotlin("test"))
}
tasks.test {
useJUnitPlatform()
}
tasks.withType<KotlinCompile> {
kotlinOptions.jvmTarget = "1.8"
}
application {
mainClass.set("MainKt")
}
What I’m doing wrong? I’ve made all like in this example: https://github.com/vert-x3/vertx-examples/tree/4.x/web-api-service-example/src/main/java/io/vertx/examples/webapiservice
My YAML file is correct, I’ve generated the kotlin-client on https://editor.swagger.io and it sends:
{
"phone": 23423423423,
"password": "enim ut"
}

Related

Why can't I access 'User story' as a type?

I have a (correctly working) workflow script starting with this guard function:
var entities = require('#jetbrains/youtrack-scripting-api/entities');
exports.rule = entities.Issue.action({
title: 'Create default subtasks',
command: 'tt-create-subtasks',
guard: function(ctx) {
return ctx.issue.fields.Type.name == 'User Story';
},
I thought I would replace that with something like
return ctx.issue.fields.Type == UserStory;
and therefore change the requirements from:
requirements: {
Type: {
type: entities.EnumField.fieldType,
Task: {},
}
}
to:
requirements: {
Type: {
type: entities.EnumField.fieldType,
Task: {},
UserStory: {
name: 'User Story'
}
}
}
Task is used elsewhere in a similar fashion and that works:
newIssue.fields.Type = ctx.Type.Task;
But the editor gives red errors on UserStory in the giard function. Am I doing something wrong in the requirements?
If you declare the requirements like you described
requirements: {
Type: {
type: entities.EnumField.fieldType,
Task: {},
UserStory: {
name: 'User Story'
}
}
}
you'll be able to check the value the following way: issue.fields.is(ctx.Type, ctx.Type.UserStory).

Enable diagnostic settings for Storage account using ARMTemplate

Storage account deployed from ARMTemplate is creating diagnostic settings as disabled.
How to enable diagnostics status using ARMTemplate or Powershell script?
Want to automate the process to deploy diagnostic settings.
Here is a solution using ARM templates in the newer Bicep format. In the example, it configures diagnostics settings for:
StorageAccount
Blob
File
Queue
Table
To reduce the template length, it configures only the StorageRead on the storage account services.
param name string
param location string = resourceGroup().location
param sku string
#description('Resource ID for the destination log analytics workspace.')
param logAnalyticsWorkspaceId string
resource storageAccount 'Microsoft.Storage/storageAccounts#2019-06-01' = {
name: name
location: location
kind: 'StorageV2'
sku: {
name: sku
}
properties: {
allowBlobPublicAccess: false
allowSharedKeyAccess: true
minimumTlsVersion: 'TLS1_2'
accessTier: 'Hot'
supportsHttpsTrafficOnly: true
networkAcls: {
defaultAction: 'Deny'
bypass: 'AzureServices'
}
}
}
resource diagnosticsStorage 'Microsoft.Insights/diagnosticSettings#2021-05-01-preview' = {
scope: storageAccount
name: 'diagnostics00'
properties: {
workspaceId: logAnalyticsWorkspaceId
metrics: [
{
category: 'Transaction'
enabled: true
}
]
}
}
resource blobService 'Microsoft.Storage/storageAccounts/blobServices#2021-06-01' = {
parent: storageAccount
name: 'default'
properties: {}
}
resource diagnosticsBlob 'Microsoft.Insights/diagnosticSettings#2021-05-01-preview' = {
scope: blobService
name: 'diagnostics00'
properties: {
workspaceId: logAnalyticsWorkspaceId
logs: [
{
category: 'StorageRead'
enabled: true
}
]
}
}
resource fileService 'Microsoft.Storage/storageAccounts/fileServices#2021-06-01' = {
parent: storageAccount
name: 'default'
properties: {}
}
resource diagnosticsFile 'Microsoft.Insights/diagnosticSettings#2021-05-01-preview' = {
scope: fileService
name: 'diagnostics00'
properties: {
workspaceId: logAnalyticsWorkspaceId
logs: [
{
category: 'StorageRead'
enabled: true
}
]
}
}
resource queueService 'Microsoft.Storage/storageAccounts/queueServices#2021-06-01' = {
parent: storageAccount
name: 'default'
properties: {}
}
resource diagnosticsQueue 'Microsoft.Insights/diagnosticSettings#2021-05-01-preview' = {
scope: queueService
name: 'diagnostics00'
properties: {
workspaceId: logAnalyticsWorkspaceId
logs: [
{
category: 'StorageRead'
enabled: true
}
]
}
}
resource tableService 'Microsoft.Storage/storageAccounts/tableServices#2021-06-01' = {
parent: storageAccount
name: 'default'
properties: {}
}
resource diagnosticsTable 'Microsoft.Insights/diagnosticSettings#2021-05-01-preview' = {
scope: tableService
name: 'diagnostics00'
properties: {
workspaceId: logAnalyticsWorkspaceId
logs: [
{
category: 'StorageRead'
enabled: true
}
]
}
}
Please follow the below URL to enable the Diagnostics Settings for Azure Storage Account using ARM Template:
https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/resource-manager-diagnostic-settings#diagnostic-setting-for-azure-storage

How do I annotate an endpoint in NestJS for OpenAPI that takes Multipart Form Data

My NestJS server has an endpoint that accepts files and also additional form data
For example I pass a file and a user_id of the file creator in the form.
NestJS Swagger needs to be told explicitly that body contains the file and that the endpoint consumes multipart/form-data this is not documented in the NestJS docs https://docs.nestjs.com/openapi/types-and-parameters#types-and-parameters.
Luckily some bugs led to discussion about how to handle this use case
looking at these two discussions
https://github.com/nestjs/swagger/issues/167
https://github.com/nestjs/swagger/issues/417
I was able to put together the following
I have added annotation using a DTO:
the two critical parts are:
in the DTO add
#ApiProperty({
type: 'file',
properties: {
file: {
type: 'string',
format: 'binary',
},
},
})
public readonly file: any;
#IsString()
public readonly user_id: string;
in the controller add
#ApiConsumes('multipart/form-data')
this gets me a working endpoint
and this OpenAPI Json
{
"/users/files":{
"post":{
"operationId":"UsersController_addPrivateFile",
"summary":"...",
"parameters":[
],
"requestBody":{
"required":true,
"content":{
"multipart/form-data":{
"schema":{
"$ref":"#/components/schemas/UploadFileDto"
}
}
}
}
}
}
}
...
{
"UploadFileDto":{
"type":"object",
"properties":{
"file":{
"type":"file",
"properties":{
"file":{
"type":"string",
"format":"binary"
}
},
"description":"...",
"example":"'file': <any-kind-of-binary-file>"
},
"user_id":{
"type":"string",
"description":"...",
"example":"cus_IPqRS333voIGbS"
}
},
"required":[
"file",
"user_id"
]
}
}
Here is what I find a cleaner Approach:
#Injectable()
class FileToBodyInterceptor implements NestInterceptor {
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
const ctx = context.switchToHttp();
const req = ctx.getRequest();
if(req.body && req.file?.fieldname) {
const { fieldname } = req.file;
if(!req.body[fieldname]) {
req.body[fieldname] = req.file;
}
}
return next
.handle();
}
}
const ApiFile = (options?: ApiPropertyOptions): PropertyDecorator => (
target: Object, propertyKey: string | symbol
) => {
ApiProperty({
type: 'file',
properties: {
[propertyKey]: {
type: 'string',
format: 'binary',
},
},
})(target, propertyKey);
};
class UserImageDTO {
#ApiFile()
file: Express.Multer.File; // you can name it something else like image or photo
#ApiProperty()
user_id: string;
}
#Controller('users')
export class UsersController {
#ApiBody({ type: UserImageDTO })
// #ApiResponse( { type: ... } ) // some dto to annotate the response
#Post('files')
#ApiConsumes('multipart/form-data')
#UseInterceptors(
FileInterceptor('file'), //this should match the file property name
FileToBodyInterceptor, // this is to inject the file into the body object
)
async addFile(#Body() userImage: UserImageDTO): Promise<void> { // if you return something to the client put it here
console.log({modelImage}); // all the fields and the file
console.log(userImage.file); // the file is here
// ... your logic
}
}
FileToBodyInterceptor and ApiFile are general, I wish they where in the NestJs
You probably need to install #types/multer to have to Express.Multer.File

GraphQL queries with tables join using Node.js

I am learning GraphQL so I built a little project. Let's say I have 2 models, User and Comment.
const Comment = Model.define('Comment', {
content: {
type: DataType.TEXT,
allowNull: false,
validate: {
notEmpty: true,
},
},
});
const User = Model.define('User', {
name: {
type: DataType.STRING,
allowNull: false,
validate: {
notEmpty: true,
},
},
phone: DataType.STRING,
picture: DataType.STRING,
});
The relations are one-to-many, where a user can have many comments.
I have built the schema like this:
const UserType = new GraphQLObjectType({
name: 'User',
fields: () => ({
id: {
type: GraphQLString
},
name: {
type: GraphQLString
},
phone: {
type: GraphQLString
},
comments: {
type: new GraphQLList(CommentType),
resolve: user => user.getComments()
}
})
});
And the query:
const user = {
type: UserType,
args: {
id: {
type: new GraphQLNonNull(GraphQLString)
}
},
resolve(_, {id}) => User.findById(id)
};
Executing the query for a user and his comments is done with 1 request, like so:
{
User(id:"1"){
Comments{
content
}
}
}
As I understand, the client will get the results using 1 query, this is the benefit using GraphQL. But the server will execute 2 queries, one for the user and another one for his comments.
My question is, what are the best practices for building the GraphQL schema and types and combining join between tables, so that the server could also execute the query with 1 request?
The concept you are refering to is called batching. There are several libraries out there that offer this. For example:
Dataloader: generic utility maintained by Facebook that provides "a consistent API over various backends and reduce requests to those backends via batching and caching"
join-monster: "A GraphQL-to-SQL query execution layer for batch data fetching."
To anyone using .NET and the GraphQL for .NET package, I have made an extension method that converts the GraphQL Query into Entity Framework Includes.
public static class ResolveFieldContextExtensions
{
public static string GetIncludeString(this ResolveFieldContext<object> source)
{
return string.Join(',', GetIncludePaths(source.FieldAst));
}
private static IEnumerable<Field> GetChildren(IHaveSelectionSet root)
{
return root.SelectionSet.Selections.Cast<Field>()
.Where(x => x.SelectionSet.Selections.Any());
}
private static IEnumerable<string> GetIncludePaths(IHaveSelectionSet root)
{
var q = new Queue<Tuple<string, Field>>();
foreach (var child in GetChildren(root))
q.Enqueue(new Tuple<string, Field>(child.Name.ToPascalCase(), child));
while (q.Any())
{
var node = q.Dequeue();
var children = GetChildren(node.Item2).ToList();
if (children.Any())
{
foreach (var child in children)
q.Enqueue(new Tuple<string, Field>
(node.Item1 + "." + child.Name.ToPascalCase(), child));
}
else
{
yield return node.Item1;
}
}}}
Lets say we have the following query:
query {
getHistory {
id
product {
id
category {
id
subCategory {
id
}
subAnything {
id
}
}
}
}
}
We can create a variable in "resolve" method of the field:
var include = context.GetIncludeString();
which generates the following string:
"Product.Category.SubCategory,Product.Category.SubAnything"
and pass it to Entity Framework:
public Task<TEntity> Get(TKey id, string include)
{
var query = Context.Set<TEntity>();
if (!string.IsNullOrEmpty(include))
{
query = include.Split(',', StringSplitOptions.RemoveEmptyEntries)
.Aggregate(query, (q, p) => q.Include(p));
}
return query.SingleOrDefaultAsync(c => c.Id.Equals(id));
}

How to use jsonschema for Loopback remoteMethod?

In my app I want define JSON schemas for custom API.
For example from: http://docs.strongloop.com/display/public/LB/Remote+methods#Remotemethods-Example
module.exports = function(Person){
Person.greet = function(msg, cb) {
cb(null, 'Greetings... ' + msg);
}
Person.remoteMethod(
'greet',
{
accepts: <generate definitions from jsonschema>,
returns: <generate definitions from jsonschema>
}
);
};
How to do that?
This is right way?
MY SOLUTION - validation decorator + remote method params with object type
var validate = require('jsonschema').validate;
bySchema = function (schema) {
return function (func) {
return function () {
var data = arguments[0],
callback = arguments[1];
var result = validate(data, schema);
if (result.errors.length > 0) {
// some errors in request body
callback(null, {
success: false,
error: 'schema validation error',
});
return;
}
return func.apply(this, arguments);
};
};
};
defaultRemoteArguments = {
accepts: {
arg: 'data',
type: 'object',
http: function(ctx) {
return ctx.req.body;
}
},
returns: {
arg: 'data',
type: 'object',
root: true
}
};
Example:
Auth.login = bySchema(require('<path to shcemajson json for this request>'))
(function(data, cb) {
// process request
});
Auth.remoteMethod('login', defaultRemoteArguments);
In this solution contrib loopback explorer will not be useful, because request/response are objects, not fields...
The correct way to do it is to set the type in the returns attribute to the model name.
In your case you would write:
Person.remoteMethod(
'greet',
{
...
returns: {type:'Person', ...}
}
);
You need to modify your output to match the format accepted by the returns property.
...
returns: [{arg: "key1", type: "string"}, {arg: "key2", type: "object"}, ...];
...