is there a way to get FactoryBoy to pass a parameter to the save() method of my Django model? - factory-boy

I have a Django model like this:
class NicePerson(models.Model):
last_name = models.CharField(max_length=100)
def save(self, make_irish=False, *args, **kwargs):
"""if make_irish is True, prepend last_name with O'"
if make_irish:
self.last_name = "O'" + self.last_name
super(MyModel, self).save(*args, **kwargs)
And I have a FactoryBoy class to build NicePerson instances
class NicePersonFactory(factory.django.DjangoModelFactory):
class Meta:
model = NicePerson
I know I can use these together like so:
nice_person = NicePersonFactory(last_name='Shea')
But how can I pass the "make_irish" parameter to my save() method?

factory_boy uses the default manager for creation, i.e it calls NicePerson.objects.create() or NicePerson.objects.get_or_create().
For your example, you could override those managers (in your Model definition):
class NicePersonManager(models.Manager):
def create(self, *args, **kwargs):
if kwargs.pop('make_irish', False):
kwargs.update(...)
return super().create(*args, **kwargs)
Another option would be to override your factory's _create and _get_or_create methods. For instance:
class NicePersonFactory(factory.django.DjangoModelFactory):
#classmethod
def _create(cls, model_class, *args, **kwargs):
make_irish = kwargs.pop('make_irish', False)
instance = model_class(**kwargs)
instance.save(make_irish=make_irish)
return instance

Related

writing a class decorator

i have a class that has a method called my_func(x,s,n). I need to vectorize this function. That is to say, i want to be able to pass x = [3,4,5,6,7] or any range of values and it gives me a result. I am using numpy and looking through here, i managed to find a solution that works. However, I want to make it object oriented. I tried this:
class Vectorize:
"""vectorization wrapper that works with instance methods"""
def __init__(self, otypes=None, signature=None):
self.otypes = otypes
self.sig = signature
# Decorator as an instance method
def decorator(self, fn):
vectorized = np.vectorize(fn, otypes=self.otypes, signature=self.sig)
#wraps(fn)
def wrapper(*args, **kwargs):
return vectorized(*args, **kwargs)
return wrapper
and then i tried this:
#Vectorize(signature=("(),(),(),()->()"))
def my_func(self, k: int, s: float, n: int):
I keep getting an error, Vectorize object is not callable. Is there any other way to do this? Thanks
I managed to fix this issue. But, now that you have said signature degrades performance, I'm considering alternate solution. For those who are curious:
class Vectorize:
"""vectorization decorator that works with instance methods"""
def vectorize(self, otypes=None, signature=None):
# Decorator as an instance method
def decorator(fn):
vectorized = np.vectorize(fn, otypes=otypes, signature=signature)
#wraps(fn)
def wrapper(*args, **kwargs):
return vectorized(*args, **kwargs)
return wrapper
return decorator
class CustomClass:
v = Vectorize()
#v.vectorize(signature=("(),(),(),()->()"))
def my_func(self, k: int, s: float, n: int):

How can I use a SQL nextval function in a Django insert - Custom Model Field?

I'm trying to write a model field that when called on a model insert will issue a nextval("my_seq") such that the resulting query that django issues is something like:
INSERT INTO app_table (a, b) VALUES ("something", nextval("my_seq"));
So far I've come up with a field that I override get_db_prep_value and make it return the nextval string when the object is being inserted, but it gets quoted.
How do I get around the value being quoted?
This is what I have for the field:
class MyField(models.Field):
def __init__(self, *args, **kwargs):
self.obj = None
super(MyField, self).__init__(*args, **kwargs
def contribute_to_class(self, cls, name):
super(myField, self).contribute_to_class(cls, name)
setattr(cls, self.name, self)
def __get__(self, obj, tp = None):
if obj is None:
raise AttributeError('Can only be accessed via instance')
self.obj = obj
return obj.__dict__[self.name]
def __set__(self, obj, value):
obj.__dict__[self.value] = self.to_python(value)
def db_type(self, connection):
return 'varchar(25)'
def get_db_prep_value(self, value, connection, prepared = False):
value = super(MyField, self).get_db_prep_value(value, connection, prepared)
if self.obj is not None and self.obj.id is None:
if self.obj.flag:
value = '( "XXX" || nextval("my_seq") )'
return value
def to_python(self, value):
if isinstance(value, six.string_types) or value is None:
return value
return smart_text(value)
def get_prep_value(self, value):
return self.to_python(value)
I am not sure if what you are doing is the best Django practice but here is how you can use nextval.
Add to your model
def get_placeholder(self, value, connection):
return 'nextval("%s")'
Also consider using SubfieldBase
in Django 1.5+
class MyField(models.Field, metaclass=models.SubfieldBase):
see The SubfieldBase metaclass
in Django < 1.5
__metaclass__ = models.SubfieldBase
see Writing a custom field in Django

Celery: abort task on connection error

I have to implement a Task subclass that gracefully fails if the broker is not running - currently I'm using RabbitMQ.
I could probably just use a try statement to catch the exception:
try:
Mytask.delay(arg1, arg2)
except socket.error:
# Send an notice to an admin
pass
but I'd like to create a subclass of Task that can handle that.
I've tried something like that:
class MyTask(Task):
ignore_result = True
def __call__(self, *args, **kwargs):
try:
return self.run(*args, **kwargs)
except socket.error:
# Send an notice to an admin
return None
but the workflow is clearly wrong. I think I need to inject maybe a backend subclass or a failure policy somehow.
Do you have any suggestion?
A possible solution I came up with:
import socket
from celery.decorators import task
from celery.task import Task
from celery.backends.base import BaseBackend
UNDELIVERED = 'UNDELIVERED'
class DummyBackend(BaseBackend):
"""
Dummy queue backend for undelivered messages (due to the broker being down).
"""
def store_result(self, *args, **kwargs):
pass
def get_status(self, *args, **kwargs):
return UNDELIVERED
def _dummy(self, *args, **kwargs):
return None
wait_for = get_result = get_traceback = _dummy
class SafeTask(Task):
"""
A task not raising socket errors if the broker is down.
"""
abstract = True
on_broker_error = None
errbackend = DummyBackend
#classmethod
def apply_async(cls, *args, **kwargs):
try:
return super(SafeTask, cls).apply_async(*args, **kwargs)
except socket.error, err:
if cls.on_broker_error is not None:
cls.on_broker_error(err, cls, *args, **kwargs)
return cls.app.AsyncResult(None, backend=cls.errbackend(),
task_name=cls.name)
def safetask(*args, **kwargs):
"""
Task factory returning safe tasks handling socket errors.
When a socket error occurs, the given callable *on_broker_error*
is called passing the exception object, the class of the task
and the original args and kwargs.
"""
if 'base' not in kwargs:
on_broker_error = kwargs.pop('on_broker_error', SafeTask.on_broker_error)
errbackend = kwargs.pop('errbackend', SafeTask.errbackend)
kwargs['base'] = type('SafeTask', (SafeTask,), {
'on_broker_error': staticmethod(on_broker_error),
'errbackend': errbackend,
'abstract': True,
})
return task(*args, **kwargs)
You can both subclass SafeTask or use the decorator #safetask.
If you can think of an improvement, don't hesitate to contribute.

Python: Anything wrong with dynamically assigning instance methods as instance attributes

I came up with the following code to decorate instance methods using a decorator that requires the instance itself as an argument:
from functools import wraps
def logging_decorator(tricky_instance):
def wrapper(fn):
#wraps(fn)
def wrapped(*a, **kw):
if tricky_instance.log:
print("Calling %s.." % fn.__name__)
return fn(*a, **kw)
return wrapped
return wrapper
class Tricky(object):
def __init__(self, log):
self.log = log
self.say_hi = logging_decorator(self)(self.say_hi)
def say_hi(self):
print("Hello, world!")
i1 = Tricky(log=True)
i2 = Tricky(log=False)
i1.say_hi()
i2.say_hi()
This seems to work great, but I fear that I may have overlooked some unintended side effects of this trick. Am I about to shoot myself in the foot, or is this safe?
Note that I don't actually want to use this for logging, it's just the shortest meaningful example I could come up with.
It's not really clear to me why you would ever want to do this. If you want to assign a new method type dynamically use types:
import types
class Tricky(object):
def __init__(self):
def method(self):
print('Hello')
self.method = types.MethodType(method, self)
If you want to do something with the instance, do it in the __init__ method. If you just want access to the method's instance inside the decorator, you can use the im_self attribute:
def decorator(tricky_instance):
def wrapper(meth):
print(meth.im_self == tricky_instance)
return meth
return wrapper
Personally, I think this is veering into Maybe-I-Shouldn't-Use-Decorators land.
I think I was trying to be needlessly smart. There seems to be an embarrassingly simpler solution:
from functools import wraps
def logging_decorator(fn):
#wraps(fn)
def wrapped(self, *a, **kw):
if self.log:
print("Calling %s.." % fn.__name__)
return fn(self, *a, **kw)
return wrapped
class Tricky(object):
def __init__(self, log):
self.log = log
#logging_decorator
def say_hi(self):
print("Hello, world!")
i1 = Tricky(log=True)
i2 = Tricky(log=False)
i1.say_hi()
i2.say_hi()

Specify action to be performed at the end of many functions

I have a python object in which a bunch of functions need to perform the same action at the end of execution, just before the return statement. For example:
def MyClass(object):
def __init__(self):
pass
def update_everything(self):
'''update everything'''
pass
def f1(self):
#do stuff
self.update_everything()
return result
def f2(self):
#do stuff
self.update_everything()
return result
def f3(self):
#do stuff
self.update_everything()
return result
What is the best (pythonic?) way to do this, except for the explicit calls at the end of each function?
I think that any solution to your problem would be unpythonic, because (as Tim Peters says in the Zen of Python (import this)):
Explicit is better than implicit.
Yes, using a decorator is actually more code, but it does have the advantage that you can see that a method updates everything at a glance. It's a different kind of explicitness ;-)
def update_after(m):
""" calls self.update_everything() after method m """
def decorated(self, *args, **kwargs):
r = m(self, *args, **kwargs)
self.update_everything()
return r
return decorated
def MyClass(object):
def __init__(self):
pass
def update_everything(self):
'''update everything'''
pass
#update_after
def f1(self):
#do stuff
return result
#update_after
def f2(self):
#do stuff
return result
#update_after
def f3(self):
#do stuff
return result
Maybe the other way round?
class MyClass(object):
def update(self, func):
value = func()
# do something common
return value
def f1(self):
# do stuff
return result
def f2(self):
# do stuff
return result
my_object = MyClass()
my_object.update(my_object.f1)
Edit:
You could also write it in such way that update accepts a string being a name of the object's method. This would prevent running other objects' methods.
my_object.update('f1')