I am trying to create an API using flask, SQLAlchemy, Marshmallow, PostGIS that return GeoJson FeatureCollection. I want to be able to work with any Geo Objects (Point, Polygone,...).
I tried many things but never successfully recreated the GeoJson FeatureCollection format. It is possible to force a shape to a marshmallow schema ?
This is SQLAlchemy model:
class Locations(db.Model):
__tablename__ = 'locations'
id: int = db.Column(db.Integer, primary_key=True, autoincrement=True)
name: int = db.Column(db.String, nullable=False)
linename: str = db.Column(db.String, nullable=False)
point = db.Column(Geometry('POINT'))
This is my marshmallow schema,
class LocationsSchema(SQLAlchemyAutoSchema):
point = fields.Method('wkt_to_geojson')
def wkt_to_geojson(self, obj):
return {'type': 'Feature', 'properties': {'linename': obj.linename}, 'geometry': shapely.geometry.mapping(to_shape(obj.point))}
class Meta:
model = Locations
locations_schema = LocationsSchema(many=True, only=["point"])
This is my blueprint route:
#map_data_blueprint.route('/locations', methods=['GET'])
def all_items():
locations = Locations.query.all()
serialized = locations_schema.dump(locations)
return jsonify(serialized)
This is the json I receive from the API:
[
{
"id": 2,
"point": {
"geometry": {
"coordinates": [
6.130649,
49.609332
],
"type": "Point"
},
"properties": {
"linename": "1"
},
"type": "Feature"
}
},
{
"id": 3,
"point": {
"geometry": {
"coordinates": [
6.126288,
49.598557
],
"type": "Point"
},
"properties": {
"linename": "1"
},
"type": "Feature"
}
}]
But I am trying to get the FeatureCollection Geojson format here is an example here.
I found a solution by combining geoalchemy2.shape, shapely and geojson package. I removed the marshmallow layer for this specific API as I have not found a way with the marshmallow layer.
def parse_geojson(obj):
geo = shapely.geometry.mapping(to_shape(obj.point))
if geo:
return geojson.Feature(
id=obj.id,
geometry=geo,
properties={
"linename": obj.linename
})
#map_data_blueprint.route('/locations', methods=['GET'])
def all_items():
locations = Locations.query.all()
features = [parse_geojson(location) for location in locations]
serialized = geojson.dumps(geojson.FeatureCollection(features))
return serialized
I don't know if it is best practice to serialize the query answer in the route or not but this works.
Related
I want to send all the data in a response as string, like in database id is stored as integers but I want to send it as string in response.
eg: I have the response as
{
"categories": [
{
"id": 1,
"category": "xya",
"quantity": 25
}
]
}
I want it to be as:
{
"categories": [
{
"id": "1",
"category": "xya",
"quantity": "25"
}
]
}
I am using ModelSerializer to send all the fields.
Another option is to convert int to str using the to_representation method of your model serializer.
class YourSerializer(serializers.ModelSerializer):
# other fields
def to_representation(self, instance):
""" Override `to_representation` method """
repr = super().to_representation(instance)
repr['id'] = str(repr['id'])
repr['quantity'] = str(repr['quantity'])
return repr
You can explicitly define id field in your serializer to be CharField()
Like this
class YourSerializer(serializers.ModelSerializer):
# other fields
id = serializers.CharField()
class Meta:
model = SomeModel
fields = ('id', ..... other fields)
I searched for a similar question without success.. So, i am working on a website in which i am using django-mptt to organize categories. My Model looks like:
class Category(MPTTModel):
parent = TreeForeignKey('self', on_delete=models.CASCADE, null=True, related_name='children')
name = models.CharField(max_length=90)
slug = models.SlugField(unique=True)
_full_slug_separator = '/'
#property
def url(self):
names = [category.name for category in self.get_ancestors(include_self=True)]
return self._full_slug_separator.join(names)
I defined the CategorySerializer as bellow:
class CategorySerializer(serializers.ModelSerializer):
children = serializers.SerializerMethodField()
class Meta:
model = Category
fields = ('name', 'url', 'children')
def get_children(self, obj):
return CategorySerializer(obj.get_children(), many=True).data
# views.py
class CategoryList(generics.ListAPIView):
queryset = Category.objects.root_nodes()
serializer_class = CategorySerializer
The question is how can i:
1. have the 'url' data included in leaf nodes only.
2. have the 'children' data included in non leaf nodes only.
Here is an example of the output I am looking for
[
{
"title":"root node",
"children":[
{
"title":"leaf node",
"url":"link"
},
{
"title":"non leaf node",
"children":[
{
"title":"leaf node",
"url":"link"
}
]
},
{
"title":"non leaf node",
"children":[
{
"title":"non leaf node",
"children":[
{
"title":"leaf node",
"url":"link"
}
]
}
}
]
},
{
"title":"root node",
"url":"link"
}
]
Also i want to know if there is a good way for generating the 'url' to reduce queries
And thanks for any help in advance.
I ran into the same problem and found this question. But there were no answers. So, I am posting how I managed to do it for anyone who may need it in the future. This is most likely not the best solution but it works.
My models.py:
from django.utils.text import slugify
from django.utils.translation import gettext_lazy as _
from mptt.models import MPTTModel, TreeForeignKey
class Category(MPTTModel):
name = models.CharField(max_length=100, null=False, blank=False, verbose_name=_("category name"), help_text=_("format: required, max_length=100"))
slug = models.SlugField(max_length=150, null=False, blank=False, editable=False, verbose_name=_("category url"), help_text=_("format: required, letters, numbers, underscore or hyphen"))
parent = TreeForeignKey("self", on_delete=models.SET_NULL, related_name="children", null=True, blank=True, verbose_name=_("parent category"), help_text=_("format: not required"))
class MPTTMeta:
order_insertion_by = ['name']
class Meta:
verbose_name = _('product category')
verbose_name_plural = _('product categories')
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Category, self).save(*args, **kwargs)
def __str__(self):
return self.name
My serializers.py:
from rest_framework import serializers
class SubCategorySerializer(serializers.ModelSerializer):
"""Serializer for lowest level category that has no children."""
parent = serializers.SerializerMethodField(source='get_parent')
class Meta:
model = Category
fields = ['id', 'name', 'slug', 'parent', ]
def get_parent(self, obj):
if obj.parent:
return obj.parent.name
class CategorySerializer(serializers.ModelSerializer):
"""Serializer for category."""
parent = serializers.SerializerMethodField(source='get_parent')
children = serializers.SerializerMethodField(source='get_children')
class Meta:
model = Category
fields = ['id', 'name', 'slug', 'parent', 'children', ]
def get_parent(self, obj):
if obj.parent:
return obj.parent.name
def get_children(self, obj):
if obj.children.exists():
children = [child for child in obj.children.all()]
children_with_children = [child for child in children if child.children.exists()]
children_without_children = [child for child in children if not child.children.exists()]
if children_with_children:
return CategorySerializer(children_with_children, many=True).data
if children_without_children:
return SubCategorySerializer(children_without_children, many=True).data
This way children is included in non-leaf nodes only.
Here is an example of how the data looks:
{
"count": 2,
"next": null,
"previous": null,
"results": [
{
"id": 4,
"name": "Clothes",
"slug": "clothes",
"parent": null,
"children": [
{
"id": 5,
"name": "Men's Clothes",
"slug": "mens-clothes",
"parent": "Clothes",
"children": [
{
"id": 7,
"name": "Men's Jeans",
"slug": "mens-jeans",
"parent": "Men's Clothes"
}
]
},
{
"id": 6,
"name": "Women's Clothes",
"slug": "womens-clothes",
"parent": "Clothes",
"children": [
{
"id": 8,
"name": "Women's Jeans",
"slug": "womens-jeans",
"parent": "Women's Clothes"
}
]
}
]
},
{
"id": 1,
"name": "Technology",
"slug": "technology",
"parent": null,
"children": [
{
"id": 3,
"name": "Laptop",
"slug": "laptop",
"parent": "Technology"
},
{
"id": 2,
"name": "Mobile Phone",
"slug": "mobile-phone",
"parent": "Technology"
}
]
}
]
}
I have seen a number of discussions on how to implement a permission system in graphene, but have not seen any definitive practical outcome beyond these discussions. Some examples of discussions on this topic are:
Permission System
How do you recommend enforcing authorization?
Splitting a GraphQL API between public & private
Unfortunately, none of these recommend a preferred approach to implementing permissions in graphene. Does anyone know what is currently the best practice for doing this?
First of all splitting API endpoints into public/private make sense only when public interface differs significantly from private one. If not then you will face redundancy in code problem.
In our project we came up with simple solution which seems to be pointed by many as desired solution.
We use the following decorator on resolve methods:
# decorators.py
def permission_required(permission):
""" Checking permissions on per method basis. """
def wrapped_decorator(func):
def inner(cls, info, *args, **kwargs):
if check_permission(permission, info.context):
return func(cls, info, **kwargs)
raise Exception("Permission Denied.")
return inner
return wrapped_decorator
def check_permission(permission, context):
"""
Helper function to resolve permissions.
Permission can be a string "app_name.perm_codename"
or callable (lambda) function with user passed as an argument:
example: lambda(user): user.username.startswith('a')
"""
if callable(permission):
if not permission(context.user):
return False
else:
if not context.user.has_perm(permission):
return False
return True
You can use this decorator as follows:
# schema.py
from . decorators import permission_required
class UserNode(DjangoObjectType):
class Meta:
model = User
interfaces = (relay.Node,)
only_fields = (
'id', 'first_name', 'last_name',
'email', 'username'
)
filter_fields = {
'username': ['exact'],
'id': ['exact'],
}
role = graphene.String(description="User's role in the system.")
#permission_required('our_app.some_perm')
def resolve_role(self, info, **kwargs):
if info.context.user.username in ['dev1', 'dev2']:
return "developer"
if info.context.user.is_superuser:
return "admin"
if info.context.user.is_staff:
return "staff"
return "guest"
If you don't have this particular permission our_app.some_perm you'll get the following response:
{
"errors": [
{
"message": "Permission Denied.",
"locations": [
{
"line": 7,
"column": 9
}
],
"path": [
"userSet",
"edges",
0,
"node",
"role"
]
},
{
"message": "Permission Denied.",
"locations": [
{
"line": 7,
"column": 9
}
],
"path": [
"userSet",
"edges",
1,
"node",
"role"
]
}
],
"data": {
"userSet": {
"edges": [
{
"node": {
"id": "VXNlck5vZGU6MQ==",
"username": "user1",
"role": null
}
},
{
"node": {
"id": "VXNlck5vZGU6Mg==",
"username": "user2",
"role": null
}
}
]
}
}
}
When you need more expressive way to check permission, for example when
checking multiple permissions with or statement, use lambdas in #required_permission decorator:
#permission_required(lambda u: u.has_perm('app.perm1') or u.has_perm('app.perm2'))
def resolve_something1(self, info, **kwargs):
# ... do your stuff here
return data
#permission_required(lambda user: user.username.startswith('a'))
def resolve_something2(self, info, **kwargs):
# ... do your stuff here
return data
The required field in JSON Schema
JSON Schema features the properties, required and additionalProperties fields. For example,
{
"type": "object",
"properties": {
"elephant": {"type": "string"},
"giraffe": {"type": "string"},
"polarBear": {"type": "string"}
},
"required": [
"elephant",
"giraffe",
"polarBear"
],
"additionalProperties": false
}
Will validate JSON objects like:
{
"elephant": "Johnny",
"giraffe": "Jimmy",
"polarBear": "George"
}
But will fail if the list of properties is not exactly elephant, giraffe, polarBear.
The problem
I often copy-paste the list of properties to the list of required, and suffer from annoying bugs when the lists don't match due to typos and other silly errors.
Is there a shorter way to denote that all properties are required, without explicitly naming them?
You can just use the "minProperties" property instead of explicity naming all the fields.
{
"type": "object",
"properties": {
"elephant": {"type": "string"},
"giraffe": {"type": "string"},
"polarBear": {"type": "string"}
},
"additionalProperties": false,
"minProperties": 3
}
I doubt there exists a way to specify required properties other than explicitly name them in required array.
But if you encounter this issue very often I would suggest you to write a small script that post-process your json-schema and add automatically the required array for all defined objects.
The script just need to traverse the json-schema tree, and at each level, if a "properties" keyword is found, add a "required" keyword with all defined keys contained in properties at the same level.
Let the machines do the bore stuff.
I do this in code with a one-liner, for instance, if I want to use required for insert in a DB, but only want to validate against the schema when performing an update.
prepareSchema(action) {
const actionSchema = R.clone(schema)
switch (action) {
case 'insert':
actionSchema.$id = `/${schema.$id}-Insert`
actionSchema.required = Object.keys(schema.properties)
return actionSchema
default:
return schema
}
}
if you using the library jsonschema in python use custom validators:
first create custom validator:
# Custom validator for requiring all properties listed in the instance to be in the 'required' list of the instance
def allRequired(validator, allRequired, instance, schema):
if not validator.is_type(instance, "object"):
return
if allRequired and "required" in instance:
# requiring all properties to 'required'
instanceRequired = instance["required"]
instanceProperties = list(instance["properties"].keys())
for property in instanceProperties:
if property not in instanceRequired:
yield ValidationError("%r should be required but only the following are required: %r" % (property, instanceRequired))
for property in instanceRequired:
if property not in instanceProperties:
yield ValidationError("%r should be in properties but only the following are properties: %r" % (property, instanceProperties))
then extend an exsitsing validator:
all_validators = dict(Draft4Validator.VALIDATORS)
all_validators['allRequired'] = allRequired
customValidator = jsonschema.validators.extend(
validator=Draft4Validator,
validators=all_validators
)
now test:
schema = {"allRequired": True}
instance = {"properties": {"name": {"type": "string"}}, "required": []}
v = customValidator(schema)
errors = validateInstance(v, instance)
you will get the error:
'name' should be required but only the following are required: []
As suggested by others, here's such post-processing python code:
def schema_to_strict(schema):
if schema['type'] not in ['object', 'array']:
return schema
if schema['type'] == 'array':
schema['items'] = schema_to_strict(schema['items'])
return schema
for k, v in schema['properties'].items():
schema['properties'][k] = schema_to_strict(v)
schema['required'] = list(schema['properties'].keys())
schema['additionalProperties'] = False
return schema
You can use the function below:
export function addRequiredAttributeRecursive(schema) {
if (schema.type === 'object') {
schema.required = [];
Object.keys(schema.properties).forEach((key) => {
schema.required.push(key);
if (schema.properties[key].type === 'object') {
schema.properties[key] = addRequiredAttributeRecursive(
schema.properties[key],
);
} else if (schema.properties[key].type === 'array') {
schema.properties[key].items = addRequiredAttributeRecursive(
schema.properties[key].items,
);
}
});
} else if (schema.type === 'array') {
if (schema.items.type === 'object') {
schema.items = addRequiredAttributeRecursive(schema.items);
}
}
return schema;
}
It recursively write the required attribute for every property on all objects from the schema you have.
If you are using Javascript, you can use property getter.
{
"type": "object",
"properties": {
"elephant": {"type": "string"},
"giraffe": {"type": "string"},
"polarBear": {"type": "string"}
},
get required() { return Object.keys(this.properties) },
"additionalProperties": false
}
I am currently trying to setup ember to interact with Django's REST Framework using the ember-django-adapter.
This works flawless. But since I started using djangorestframework-gis, ember is not able to process the responses anymore.
I have not found anyone building geoJSON with ember except for: https://gist.github.com/cspanring/5114078 But that does not seem to be the right approach because I do not want to change the data model?
This is the api-response:
{
"type": "FeatureCollection",
"features": [
{
"id": 1,
"type": "Feature",
"geometry": {
"coordinates": [
9.84375,
53.665466308594
],
"type": "Point"
},
"properties": {
"date_created": "2014-10-05T20:08:43.565Z",
"body": "Hi",
"author": 1,
"expired": false,
"anonymous": false,
"input_device": 1,
"image": "",
"lat": 0.0,
"lng": 0.0
}
}
]
}
While ember expects something like:
[{"id":1,
"date_created":"2014-10-05T20:08:43.565Z",
"body":"Hi",
"author":1,
"expired":false,
"anonymous":false,
"input_device":1,
"image":"",
"lat":0,
"lng":0
}
]
My take on this was to write my own Serializer:
import Ember from "ember";
import DS from "ember-data";
export default DS.DjangoRESTSerializer.extend({
extractArray: function(store, type, payload) {
console.log(payload);
//console.log(JSON.stringify(payload));
var features = payload["features"];
var nPayload = [];
for (var i = features.length - 1; i >= 0; i--) {
var message = features[i];
var nmessage = {"id": message.id};
for(var entry in message.properties){
var props = message.properties;
if (message.properties.hasOwnProperty(entry)) {
var obj = {}
nmessage[entry]=props[entry];
}
}
nPayload.push(nmessage);
};
console.log(nPayload); //prints in the format above
this._super(store, type, nPayload);
},
})
But I receive the following error:
The response from a findAll must be an Array, not undefined
What am I missing here? Or is this the wrong approach? Has anyone ever tried to get this to work?
An alternative would be to handle this on the serverside and simply output a regular restframework response and set lat and long in the backend.
This is not a valid answer for the question above. I wanted to share my solution anyways,
just in case anyone ever gets into the same situation:
I now do not return a valid geoJSON, but custom lat, lng values. The following is backend code for django-rest-framework:
Model:
#models/message.py
class Message(models.Model):
def lat(self):
return self.location.coords[1]
def lng(self):
return self.location.coords[0]
And in the serializer:
#message/serializer.py
class MessageSerializer(serializers.ModelSerializer):
lat = serializers.Field(source="lat")
lng = serializers.Field(source="lng")
Ember can easily handle the format.