I'll give code on Python, but it's doesn't matter.
I have a module argument_parser with dictionary and class:
FORMATS = {
'JSON': 'json',
'XML': 'xml',
}
class ArgumentParser:
# some methods
def parse():
"""returns 'XML' or 'JSON' string"""
return fomat
and a module with presenters -- presenter
class JSONPresenter:
# some magic
class XMLPresenter:
# some magic
The problem is in dependency injection:
argument_parser = ArgumentParser()
format = argument_parser.parse()
if format == argument_parser.FORMATS['JSON']:
presenter = JSONFilePresenter()
elif format == argument_parser.FORMATS['XML']:
presenter = XMLFilePresenter()
if-elif construction is ugly. If I want to add some other formats I'll must add more elif.
I could define the type of presenter in ArgumentParser class but I think semantically it is wrong -- it is not a filed of responsibility of this class. What should I do to do it right?
PRESENTERS = {
'json': JSONPresenter(),
'xml': XMLPresenter(),
}
argument_parser = ArgumentParser()
fmt = argument_parser.parse()
presenter = PRESENTERS[fmt]
Or, if you need a new presenter every time:
PRESENTERS = {
'json': lambda: JSONPresenter(),
'xml': lambda: XMLPresenter(),
}
argument_parser = ArgumentParser()
fmt = argument_parser.parse()
presenter = PRESENTERS[fmt]()
Related
I'm struggling to get simpleXML and Kotlin to read a XML file properly.
I've got the following Root class:
class ServerConfiguration {
#field:Root(strict = false, name = "serverConfiguration")
#field:ElementList(required = true, name = "channels", entry = "channel", inline = true, type=Channel::class)
lateinit var channels: List<Channel>
#field:Element(name = "serverSettings", required = true, type = ServerSettings::class)
lateinit var serverSettings: ServerSettings
}
(The Channel class itself has also Lists, but even if I leave it with simple Attributes (ie Strings), it won't work.)
The XML contains:
<serverConfiguration version="3.5.2">
<date>2022-07-12 10:57:47</date>
<channelGroups>
[... lots of groups]
</channelGroups>
<channels>
<channel version="3.5.2">
<id>b7cb6bf9-d3a5-4a74-8399-b6689d915a15</id>
<nextMetaDataId>6</nextMetaDataId>
<name>cANACR_1_Fhir2Hl7Omg</name>
<connector class="[...].TcpReceiverProperties" version="3.5.2">
[... more ]
</channel>
[... a lot of channels]
</channels>
[... even more data]
</serverConfiguration>
Since there are multiple Tags in the xml that contain a "class" Attribute, I understand that I need to use inline = true in my #field:ElementList
I got through a lot of errors up until this point which I could resolve by myself but this one eludes me.
I run the Serializer via:
val serializer: Serializer = Persister()
val dataFetch = serializer.read(ServerConfiguration::class.java, myFile!!, false)
The Error (I cut out classpaths):
org.simpleframework.xml.core.ValueRequiredException: Unable to satisfy #org.simpleframework.xml.ElementList(entry="channel", data=false, inline=true, name="channels", type=Channel.class, required=true, empty=true) on field 'channels' private java.util.List ServerConfiguration.channels for class ServerConfiguration at line 1
If anyone could nudge me in the right direction, I'd be very grateful.
Addendum:
If I set required=false the program runs, but not a single channel is read.
I've tried ArrayList, List, and Set as datatype
I've tried to circumvent lateinit with var channels: List<Channel> = mutableListOf()
I've got it working through adding a wrapper class for the nested lists:
ServerConfiguration.kt:
[...]
#field:Element(name="channels", required = true, type=ChannelList::class)
lateinit var channelList: ChannelList
ChannelList.kt:
class ChannelList {
#field:ElementList(required = true, inline = true,name = "channels", entry = "channel", type=Channel::class, data = true, empty=false)
var channels: List<Channel> = mutableListOf()
}
And finally Channel.kt:
class Channel {
#field:Element(name = "destinationConnectors", required = true, type = DestinationConnectorList::class)
lateinit var destinationConnectorList: DestinationConnectorList
#field:Element(name = "exportData", required = true, type=ExportData::class)
lateinit var exportData: ExportData
[...]
While this is working, I would have expected simpleXML to be able to add the Elements of the List directly without needing to use a wrapper class (ChannelList).
If anyone knows how to achieve this, please feel free to comment or add your solution.
Kind regards,
K
I'm not understanding how to use custom fields in a ConnectionField in graphene. I have something like:
class ShipConnection(Connection):
extra = String()
class Meta:
node = Ship
SHIPS = ['Tug boat', 'Row boat', 'Canoe']
class Query(AbstractType):
ships = relay.ConnectionField(ShipConnection)
def resolve_ships(self, args, context, info):
return ShipConnection(
extra='Some extra text',
edges=???
)
Normally, you'd say:
def resolve_ships(self, args, context, info):
return SHIPS
but how do you return something in extra and return a list?
The answer turns out to be to use an undocumented class method of graphene's ConnectionField class, called resolve_connection. The following works:
def resolve_ships(self, args, context, info):
field = relay.ConnectionField.resolve_connection(
ShipConnection,
args,
SHIPS
)
field.extra = 'Whatever'
return field
The proper way to do this is exactly explained here.
class Ship(graphene.ObjectType):
ship_type = String()
def resolve_ship_type(self, info):
return self.ship_type
class Meta:
interfaces = (Node,)
class ShipConnection(Connection):
total_count = Int() # i've found count on connections very useful!
def resolve_total_count(self, info):
return get_count_of_all_ships()
class Meta:
node = Ship
class Edge:
other = String()
def resolve_other(self, info):
return "This is other: " + self.node.other
class Query(graphene.ObjectType):
ships = relay.ConnectionField(ShipConnection)
def resolve_ships(self, info):
return get_list_of_ships_from_database_or_something_idk_its_your_implmentation()
schema = graphene.Schema(query=Query)
I don't know if this is recommended, but the resolve_total_count method can also be implemented as:
def resolve_total_count(self, info):
return len(self.iterable)
I don't know if the iterable property is documented anywhere, but I was able to find it while investigating the Connection class
Is it possible to have two classes
class SimulationDigitizer(HasTraits):
width = Int(1920)
height = Int(1080)
name = 'Simulation'
class FileDigitizer(HasTraits):
Filename = File
name = 'File'
and another class 'Digitizer' having an attribute (or trait) UserDigitizer whose edition dialog will propose a drop-down list with 'Simulation' and 'File' and, depending on the choice, the instance edition of either FileDigitizer or SimulationDigitizer and get the result in UserDigitizer ?
Thanks
Thanks Jonathan, you are right, the problem is to select the appropriate subcomponent among many possible.
I ended with the following code, I guess it can be improved in many ways. I am new both to Python and Traits.
# -*- coding: utf-8 -*-
from traits.api import HasTraits, Int, File, Enum, Instance
from traitsui.api import View, Item, Handler, HGroup, InstanceEditor
class UserComponent ( HasTraits ):
""" An empty class from which all user component classes are
derived.
"""
pass
class SimulationDigitizer(UserComponent):
width = Int(1920)
height = Int(1080)
nature = 'Simulation'
class FileDigitizer(UserComponent):
filename = File
nature = 'Reading a file'
UserDigitizers = [FileDigitizer, SimulationDigitizer]
class UserComponentHandler(Handler):
def __init__(self,_user_components_dict):
Handler.__init__(self)
self._user_components_dict = _user_components_dict
def object_user_component_nature_changed ( self, info ):
# Find new UserComponent class from string in user_component_nature
new_user_component = self._user_components_dict[info.object.user_component_nature]
# If different, change user_component value
if info.object.user_component is not new_user_component:
info.object.user_component = new_user_component
class Digitizer(HasTraits):
user_component_nature = Enum([x().nature for x in UserDigitizers])
_user_file_digitizer = FileDigitizer()
_user_simulation_digitizer = SimulationDigitizer()
# Dictionary with keys = nature and values = user digitizers
_user_digitizers_dict = {x.nature: x for x in [_user_file_digitizer,_user_simulation_digitizer]}
user_component = Enum(_user_file_digitizer,_user_simulation_digitizer)
view = View(HGroup(Item('user_component_nature',
label = 'Nature'),
Item('user_component',
show_label = False,
editor = InstanceEditor(label = 'Edit',
kind = 'modal'))),
handler = UserComponentHandler(_user_digitizers_dict))
d = Digitizer()
if __name__ == '__main__':
d.configure_traits()
I am trying to create a Pig UDF that extracts the locations mentioned in a tweet using the Stanford CoreNLP package interfaced through the sista Scala API. It works fine when run locally with 'sbt run', but throws a "java.lang.NoSuchMethodError" exception when called from Pig:
Loading default properties from tagger
edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger
Reading POS tagger model from
edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger
Loading classifier from edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz
2013-06-14 10:47:54,952 [communication thread] INFO
org.apache.hadoop.mapred.LocalJobRunner - reduce > reduce done [7.5
sec]. Loading classifier from
edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz ...
2013-06-14 10:48:02,108 [Low Memory Detector] INFO
org.apache.pig.impl.util.SpillableMemoryManager - first memory handler
call - Collection threshold init = 18546688(18112K) used =
358671232(350264K) committed = 366542848(357952K) max =
699072512(682688K) done [5.0 sec]. Loading classifier from
edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz
... 2013-06-14 10:48:10,522 [Low Memory Detector] INFO
org.apache.pig.impl.util.SpillableMemoryManager - first memory handler
call- Usage threshold init = 18546688(18112K) used =
590012928(576184K) committed = 597786624(583776K) max =
699072512(682688K) done [5.6 sec]. 2013-06-14 10:48:11,469 [Thread-11]
WARN org.apache.hadoop.mapred.LocalJobRunner - job_local_0001
java.lang.NoSuchMethodError:
org.joda.time.Duration.compareTo(Lorg/joda/time/ReadableDuration;)I
at edu.stanford.nlp.time.SUTime$Duration.compareTo(SUTime.java:3406)
at edu.stanford.nlp.time.SUTime$Duration.max(SUTime.java:3488) at
edu.stanford.nlp.time.SUTime$Time.difference(SUTime.java:1308) at
edu.stanford.nlp.time.SUTime$Range.(SUTime.java:3793) at
edu.stanford.nlp.time.SUTime.(SUTime.java:570)
Here is the relevant code:
object CountryTokenizer {
def tokenize(text: String): String = {
val locations = TweetEntityExtractor.NERLocationFilter(text)
println(locations)
locations.map(x => Cities.country(x)).flatten.mkString(" ")
}
}
class PigCountryTokenizer extends EvalFunc[String] {
override def exec(tuple: Tuple): java.lang.String = {
val text: java.lang.String = Util.cast[java.lang.String](tuple.get(0))
CountryTokenizer.tokenize(text)
}
}
object TweetEntityExtractor {
val processor:Processor = new CoreNLPProcessor()
def NERLocationFilter(text: String): List[String] = {
val doc = processor.mkDocument(text)
processor.tagPartsOfSpeech(doc)
processor.lemmatize(doc)
processor.recognizeNamedEntities(doc)
val locations = doc.sentences.map(sentence => {
val entities = sentence.entities.map(List.fromArray(_)) match {
case Some(l) => l
case _ => List()
}
val words = List.fromArray(sentence.words)
(words zip entities).filter(x => {
x._1 != "" && x._2 == "LOCATION"
}).map(_._1)
})
List.fromArray(locations).flatten
}
}
I am using sbt-assembly to construct a fat-jar, and so the joda-time jar file should be accessible. What is going on?
Pig ships with its own version of joda-time (1.6), which is incompatible with 2.x.
Grails 1.1.
My custom tag:
class MyTagLib {
static namespace 'ct'
def textField = {attrs ->
def bean = attrs.remove('bean')
def field = attrs.remove('field')
attrs.name = field
out << render(template:"/templates/textField", model:[
required: !bean.constraints[field].nullable,
display : bean["${bean.trainingExperience.type}"][field],
theTag : g.textField(name : field, value : bean[field]),
value : bean[field]
])
}
Just about all of the taglib unit tests i see just
AssertEquals "Some String", taglib.out.toString()
Is it possible to test that correct template is being rendered with the correct values in the model?
MyTagLibTests
public class CareertracTagLibTests extends TagLibUnitTestCase{
protected void setUp() {
super.setUp()
mockTagLib(FormTagLib)
mockTagLib(RenderTagLib)
def g = new FormTagLib() // interpret "g" namespace as instances of FormTagLib
tagLib.metaClass.g = g
String.metaClass.encodeAsHTML = {org.codehaus.groovy.grails.plugins.codecs.HTMLCodec.encode(it)}
}
void TestTextField() {
tagLib.textField([bean : mockBean, field : 'viewField'])
def x = new RenderTagLib().render(template:"/templates/textField",
model:[required:false,
display:"view",
// Snip
])
assertEquals tagLib.out, x.out // Or something like this
}
}
}
With TagLibUnitTestCase you can use renderArgs to test calls to the render method the same way that you can in ControllerUnitTestCase. The renderArgs property is simply a map that stores the arguments of the last call to the render dynamic method. So, in your example you would want something like this:
assertEquals "/templates/textField", renderArgs.template
assertFalse renderArgs.model.required
and so on.