I'm new to vue.js. In my code, I try to recurse JSON schema and get all the nested properties in a single object. In some cases, fetch properties from API calls. kindly guide.
JSON Schema
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "UNDP",
"description": "UNDP PRISM Data Capturing Form",
"type": "object",
"x-display": "form",
"properties": {
"concepts": {
"title": "Concepts",
"type": "object",
"properties": {
"indicators": {
"$ref": "/Form/GetIndicators"
}
}
}
}
}
My function
parseJsonToObject(schema:any) {
if (!schema) { return }
if(schema.$ref) {
let params = {
id: schema.id
};
let response = $axios.get(schema.$ref, params);
schema.properties = response.data.properties;
}
if (schema.type === 'string' || schema.type === 'number') {
return schema.title;
}
const parsedData = {};
Object.keys(schema.properties || {}).forEach( (item) => {
parsedData[item] = this.parseDomainSchemaToObject(schema.properties[item])
})
return parsedData
}
I got the result
{
"concepts": {
"indicators": {}
}
}
I have also tried async/await
async parseToObject(schema:any) {
let response = await this.getRefPropertiesFormServer(schema.$ref, params) ;
// code
}
async getRefPropertiesFormServer(url, params){
return $axios.get(url, params);
}
then I got below result
{
"concepts": {
"indicators": new Promise()
}
}
Related
I have a query like below
query {
heroes {
node {
name
}
endCursor
}
}
I am trying to understand how GraphQL can handle the error handling and return partial response. I looked at https://github.com/graphql/dataloader/issues/169 and tried to create a resolver like below;
{
Query: {
heroes: async (_) => {
const heroesData = await loadHeroesFromDataWarehouse();
return {
endCursor: heroesData.endCursor;
node: heroesData.map(h => h.name === 'hulk' ? new ApolloError('Hulk is too powerful') : h)
}
}
}
}
I was hoping it would resolve something like below;
{
"errors": [
{
"message": "Hulk is too powerful",
"path": [
"heroes", "1"
],
}
],
"data": {
"heroes": [
{
"name": "spiderman"
},
null,
{
"name": "ironman"
}
]
}
}
but it is completely failing making the heroes itself null like below;
{
"errors": [
{
"message": "Hulk is too powerful",
"path": [
"heroes"
],
}
],
"data": {
"heroes": null
}
}
How can I make resolver to return me the desired partial response?
Found the solution, basically we need a resolver to resolve the edge model itself;
{
Query: {
heroes: (_) => loadHeroesFromDataWarehouse()
},
HeroesEdge {
node: async (hero) => hero.name === 'hulk' ? new ApolloError('Hulk is too powerful') : hero
}
}
I tried following the offical Shopify Documentation for retrieving ProductMedia.
My Query looks like this:
query getProductMediaById($id: ID!) {
product(id: $id) {
id
media(first: 10) {
edges {
node {
mediaContentType
alt
...mediaFieldsByType
}
}
}
}
}
fragment mediaFieldsByType on Media {
... on ExternalVideo {
id
embeddedUrl
}
... on MediaImage {
image {
...imageAttributes
}
}
... on Model3d {
sources {
url
mimeType
format
filesize
}
}
... on Video {
sources {
url
mimeType
format
height
width
}
}
}
fragment imageAttributes on Image {
altText
url
}
The only thing where I diverged from the official documentation is to put the image attributes to another fragment for code reuse.
But when I try to execute the query I get the following response:
{
"data": {
"product": {
"__typename": "Product",
"id": "Z2lkOi8vc2hvcGlmeS9Qcm9kdWN0LzY3NjcyOTczMzEzMDU=",
"media": {
"__typename": "MediaConnection",
"edges": [
{
"__typename": "MediaEdge",
"node": {
"__typename": "MediaImage",
"mediaContentType": "IMAGE",
"alt": ""
}
}
]
}
}
},
"loading": false,
"networkStatus": 7
}
Or to put it to words my response doesn't contain any information from the mediaFieldsByType fragment.
Any Idea what I'm doing wrong?
When i try to download a file from API Graph accesing to Drive or Sites with javascript on SPFx this return undefined.
my webpart code:
import { Version } from '#microsoft/sp-core-library';
import {
BaseClientSideWebPart,
IPropertyPaneConfiguration,
PropertyPaneTextField
} from '#microsoft/sp-webpart-base';
import * as strings from 'Docx2PdfWebPartStrings';
import { MSGraphClient } from '#microsoft/sp-http';
export interface IDocx2PdfWebPartProps {
description: string;
}
export default class Docx2PdfWebPart extends BaseClientSideWebPart<IDocx2PdfWebPartProps> {
public async render(): Promise<void> {
const client: MSGraphClient = await this.context.msGraphClientFactory.getClient();
var tenant = 'test';
var siteID = `${tenant}.sharepoint.com,12adb250-26f4-4dbb-9545-71d029bad763,8fdc3f56-2d6d-42d9-9a4d-d684e73c341e`;
var fileID = '01MBNFB7EIQLARTATNE5G3XDJNYBD2A3IL';
var fileName = 'Test.docx';
//This work
var site = await client.api(`/sites/${tenant}.sharepoint.com:/sites/dev:/drive?$select=id,weburl`).get();
console.log(site);
try {
//This not work
var fileFromDrive = await client.api(`/drive/root:/${fileName}:/content?format=pdf`).get();
console.log(fileFromDrive);
var fileFromSite = await client.api(`/sites/${siteID}/drive/items/${fileID}/content?format=pdf`).get();
console.log(fileFromSite);
} catch (error) {
console.log(error);
}
this.domElement.innerHTML = `<h1>Hola Mundo</h1>`;
}
protected get dataVersion(): Version {
return Version.parse('1.0');
}
protected getPropertyPaneConfiguration(): IPropertyPaneConfiguration {
return {
pages: [
{
header: {
description: strings.PropertyPaneDescription
},
groups: [
{
groupName: strings.BasicGroupName,
groupFields: [
PropertyPaneTextField('description', {
label: strings.DescriptionFieldLabel
})
]
}
]
}
]
};
}
}
The chrome console log
But when i use Graph Explorer it works correctly
This is my package-solution.json
{
"$schema": "https://developer.microsoft.com/json-schemas/spfx-build/package-solution.schema.json",
"solution": {
"name": "docx-2-pdf-client-side-solution",
"id": "f4b5db4f-d9ff-463e-b62e-0cc9c9e94089",
"version": "1.0.0.0",
"includeClientSideAssets": true,
"skipFeatureDeployment": true,
"isDomainIsolated": false,
"webApiPermissionRequests": [
{
"resource": "Microsoft Graph",
"scope": "Sites.Read.All"
},
{
"resource": "Microsoft Graph",
"scope": "Files.Read.All"
},
{
"resource": "Microsoft Graph",
"scope": "Files.ReadWrite.All"
},
{
"resource": "Microsoft Graph",
"scope": "Sites.ReadWrite.All"
}
]
},
"paths": {
"zippedPackage": "solution/docx-2-pdf.sppkg"
}
}
I use the following articles
https://learn.microsoft.com/en-us/graph/api/driveitem-get-content?view=graph-rest-1.0&tabs=javascript
https://learn.microsoft.com/en-us/graph/api/driveitem-get-content-format?view=graph-rest-1.0&tabs=javascript#code-try-1
Try using the callback property instead of await:
client.api(`/drive/root:/${fileName}:/content?format=pdf`).get((err, response) => console.log("your response:", err, response));
BigQueryIO.write.withCreateDisposition(CreateDisposition.CREATE_IF_NEEDED) along with DynamicDestinations we can write to the dynamic table and if the table does not exist it will create the table from TableSchema provided from DynamicDestinations.
I am not able to add clustering fields part of TableSchema model, because it does not have such a feature.
how we can add DynamicDestinations having TableSchema with clustering fields?
bigQuery API is one way to add cluster field to a table
Using this link you can test the API before writing your code
function execute() {
return gapi.client.bigquery.jobs.insert({
"resource": {
"configuration": {
"query": {
"clustering": {
"fields": [
"Field1",
"Field2"
]
},
"query": "select 5",
"destinationTable": {
"datasetId": "Id1",
"projectId": "Project1",
"tableId": "T1"
}
}
}
}
})
.then(function(response) {
// Handle the results here (response.result has the parsed body).
console.log("Response", response);
},
function(err) { console.error("Execute error", err); });
}
And This is a JS example on how to manipulate the parameters:
static setConfiguration(params, configuration) {
//To have a destination table we MUST have a tableId
if (params.destinationTable && params.destinationTable.tableId) {
configuration.query.destinationTable = params.destinationTable
}
if (params.clusteringFields) {
configuration.query.clustering = {fields: params.clusteringFields}
}
if (params.timePartitioning) {
configuration.query.timePartitioning = {
type: 'DAY',
field: params.timePartitioning
}
}
if (params.writeDisposition) {
configuration.query.writeDisposition = params.writeDisposition
}
if (params.queryPriority && params.queryPriority.toUpperCase() === "BATCH") {
configuration.query.priority = "BATCH"
}
if (params.useCache === false) {
configuration.query.useQueryCache = params.useCache
}
if (params.maxBillBytes) {
configuration.query.maximumBytesBilled = params.maxBillBytes
}
if (params.maxBillTier) {
configuration.query.maximumBillingTier = params.maxBillTier
}
}
Now after version 2.16.0, BigQueryIO does provide an option to add clusteringFields in dynamic destinations.
#Override
public TableDestination getTable(String eventName) {
return new TableDestination(tableSpec,
tableDescription, timePartitioning, clustering);
}
Notice that the 4th parameter is clustering, which you can use.
I've been having troubles with Elastic Search (ES) dynamic mappings. Seems like I'm in a catch-22. https://www.elastic.co/guide/en/elasticsearch/guide/current/custom-dynamic-mapping.html
The main goal is to store everything as a string that comes into ES.
What I've tried:
In ES you can't create a dynamic mapping until the index has been
created. Okay, makes sense.
I can't create an empty index, so if
the first item sent into the index is not a string, I can't
re-assign it... I won't know what type of object with be the first
item in the index, it could be any type, due to how the the app accepts a variety of objects/events.
So if I can't create the mapping ahead of time, and I can't insert an empty index to create the mapping, and I can't change the mapping after the fact, how do I deal with the first item if its NOT a string???
Here's what I'm currently doing (using the Javascript Client).
createESIndex = function (esClient){
esClient.index({
index: 'timeline-2015-11-21',
type: 'event',
body: event
},function (error, response) {
if (error) {
logger.log(logger.SEVERITY.ERROR, 'acceptEvent elasticsearch create failed with: '+ error + " req:" + JSON.stringify(event));
console.log(logger.SEVERITY.ERROR, 'acceptEvent elasticsearch create failed with: '+ error + " req:" + JSON.stringify(event));
res.status(500).send('Error saving document');
} else {
res.status(200).send('Accepted');
}
});
}
esClientLookup.getClient( function(esClient) {
esClient.indices.putTemplate({
name: "timeline-mapping-template",
body:{
"template": "timeline-*",
"mappings": {
"event": {
"dynamic_templates": [
{ "timestamp-only": {
"match": "#timestamp",
"match_mapping_type": "date",
"mapping": {
"type": "date",
}
}},
{ "all-others": {
"match": "*",
"match_mapping_type": "string",
"mapping": {
"type": "string",
}
}
}
]
}
}
}
}).then(function(res){
console.log("put template response: " + JSON.stringify(res));
createESIndex(esClient);
}, function(error){
console.log(error);
res.status(500).send('Error saving document');
});
});
Index templates to the rescue !! That's exactly what you need, the idea is to create a template of your index and as soon as you wish to store a document in that index, ES will create it for you with the mapping you gave (even dynamic ones)
curl -XPUT localhost:9200/_template/my_template -d '{
"template": "index_name_*",
"settings": {
"number_of_shards": 1
},
"mappings": {
"type_name": {
"dynamic_templates": [
{
"strings": {
"match": "*",
"match_mapping_type": "*",
"mapping": {
"type": "string"
}
}
}
],
"properties": {}
}
}
}'
Then when you index anything in an index whose name matches index_name_*, the index will be created with the dynamic mapping above.
For instance:
curl -XPUT localhost:9200/index_name_1/type_name/1 -d '{
"one": 1,
"two": "two",
"three": true
}'
That will create a new index called index_name_1 with a mapping type for type_name where all properties are string. You can verify that with
curl -XGET localhost:9200/index_name_1/_mapping/type_name
Response:
{
"index_name_1" : {
"mappings" : {
"type_name" : {
"dynamic_templates" : [ {
"strings" : {
"mapping" : {
"type" : "string"
},
"match" : "*",
"match_mapping_type" : "*"
}
} ],
"properties" : {
"one" : {
"type" : "string"
},
"three" : {
"type" : "string"
},
"two" : {
"type" : "string"
}
}
}
}
}
}
Note that if you're willing to do this via the Javascript API, you can use the indices.putTemplate call.
export const user = {
email: {
type: 'text',
},
};
export const activity = {
date: {
type: 'text',
},
};
export const common = {
name: {
type: 'text',
},
};
import { Client } from '#elastic/elasticsearch';
import { user } from './user';
import { activity } from './activity';
import { common } from './common';
export class UserDataFactory {
private schema = {
...user,
...activity,
...common,
relation_type: {
type: 'join',
eager_global_ordinals: true,
relations: {
parent: ['activity'],
},
},
};
constructor(private client: Client) {
Object.setPrototypeOf(this, UserDataFactory.prototype);
}
async create() {
const settings = {
settings: {
analysis: {
normalizer: {
useLowercase: {
filter: ['lowercase'],
},
},
},
},
mappings: {
properties: this.schema,
},
};
const { body } = await this.client.indices.exists({
index: ElasticIndex.UserDataFactory,
});
await Promise.all([
await (async (client) => {
await new Promise(async function (resolve, reject) {
if (!body) {
await client.indices.create({
index: ElasticIndex.UserDataFactory,
});
}
resolve({ body });
});
})(this.client),
]);
await this.client.indices.close({ index: ElasticIndex.UserDataFactory });
await this.client.indices.putSettings({
index: ElasticIndex.UserDataFactory,
body: settings,
});
await this.client.indices.open({
index: ElasticIndex.UserDataFactory,
});
await this.client.indices.putMapping({
index: ElasticIndex.UserDataFactory,
body: {
dynamic: 'strict',
properties: {
...this.schema,
},
},
});
}
}
wrapper.ts
class ElasticWrapper {
private _client: Client = new Client({
node: process.env.elasticsearch_node,
auth: {
username: 'elastic',
password: process.env.elasticsearch_password || 'changeme',
},
ssl: {
ca: process.env.elasticsearch_certificate,
rejectUnauthorized: false,
},
});
get client() {
return this._client;
}
}
export const elasticWrapper = new ElasticWrapper();
index.ts
new UserDataFactory(elasticWrapper.client).create();