summaryrefslogtreecommitdiffstats
path: root/Monitoring/MonitoringService/Semantics
diff options
context:
space:
mode:
authorpikusa <pikusa@man.poznan.pl>2013-04-03 13:18:17 (GMT)
committer pikusa <pikusa@man.poznan.pl>2013-04-03 13:18:17 (GMT)
commit2f2a3a129c91de540e66c3bfbe30b0df1942cd4b (patch)
tree2d313cdf0068af368d4de6067d676be16f6a6464 /Monitoring/MonitoringService/Semantics
parentff8aa232b071a9b54dff833714a870fd0aec0b30 (diff)
downloadnovi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.zip
novi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.tar.gz
novi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.tar.bz2
project commit and dir tree change
Diffstat (limited to 'Monitoring/MonitoringService/Semantics')
-rw-r--r--Monitoring/MonitoringService/Semantics/FeatureModel.py202
-rw-r--r--Monitoring/MonitoringService/Semantics/InformationModel.py109
-rw-r--r--Monitoring/MonitoringService/Semantics/Query.py150
-rw-r--r--Monitoring/MonitoringService/Semantics/QueryInterpreter.py308
-rw-r--r--Monitoring/MonitoringService/Semantics/TaskModel.py304
-rw-r--r--Monitoring/MonitoringService/Semantics/UnitModel.py398
-rw-r--r--Monitoring/MonitoringService/Semantics/__init__.py0
-rw-r--r--Monitoring/MonitoringService/Semantics/test.py336
8 files changed, 1807 insertions, 0 deletions
diff --git a/Monitoring/MonitoringService/Semantics/FeatureModel.py b/Monitoring/MonitoringService/Semantics/FeatureModel.py
new file mode 100644
index 0000000..e266ccb
--- /dev/null
+++ b/Monitoring/MonitoringService/Semantics/FeatureModel.py
@@ -0,0 +1,202 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from DataProcessing.Parameter import ParameterList, Parameter
+
+class FeatureModel(object):
+ '''
+ classdocs
+ '''
+ typelookup = {
+ 'Integer': int,
+ 'Float': float,
+ 'String': str
+ }
+
+ def __init__(self, dimensionmanager, unitmanager, ontology):
+ '''
+ @summary: constructor
+ @param dimensionmanager: the container to form a cell's dimension
+ @type dimensionmanager: DimensionManager
+ @param unitmanager: the container to form a cell's unit
+ @type unitmanager: UnitManager
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+ self.dm = dimensionmanager
+ self.um = unitmanager
+
+ def inferDomains(self):
+ '''
+ @summary: extract the monitoring domains from the information model
+ @return: generator of the list of domains
+ @rtype: URIRef
+ '''
+ for uri_domain, _, _ in self.ontology.triples((None, self.ontology.ns('rdf')['type'], self.ontology.ns('task')['MonitoringDomain'])):
+ yield uri_domain
+
+ def inferFeatures(self):
+ '''
+ @summary: extract the monitored features from the information model
+ @return: a generator of the list of (feature reference, name, resource type) tuples
+ @rtype: (URIRef, str, URIRef)
+ '''
+ q = """
+SELECT ?feature ?name ?resource
+WHERE {
+ ?feature a owl:NamedIndividual ;
+ a ?parent ;
+ feature:featureName ?name .
+ ?parent rdfs:subClassOf feature:MonitoredFeature .
+ ?resource feature:hasFeature ?feature
+}
+ """
+ for uri_feature, name, uri_resource in self.ontology.query(q):
+ yield uri_feature, str(name), uri_resource
+
+ def inferObligatoryParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:obligatoryParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferOptionalParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:optionalParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferFeatureMonitoringParameters(self):
+ '''
+ @summary: extract parameters declared for feature monitoring
+ @return: an iterator over parameters
+ @rtype: (parameter name, dimension, value, unit)
+ '''
+ q = """
+SELECT ?name ?dim ?defval ?unit ?prefix
+WHERE {
+ ?par a feature:FeatureMonitoringParameter ;
+ param:paramName ?name ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """
+ for name, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+#FIXME: duplicate (similar thing in translateParameter!!!
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ yield str(name), d, "", d.unit
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ yield str(name), d, str(default), u
+
+ def translateParameter(self, name, uri_dim, uri_unit, uri_prefix, uri_ptype, default = None):
+ '''
+ @summary: helper method to instantiate a Parameter
+ @param name: the reference name of the parameter
+ @type name: str
+ @param uri_dim: the dimension of the parameter
+ @type uri_dim: URIRef
+ @param uri_unit: the unit of the parameter, if None we fall back to the unit of the dimension
+ @type uri_unit: URIRef
+ @param uri_prefix: accounts only if uri_unit is not None
+ @type uri_prefix: URIRef
+ @param uri_ptype: the type of the parameter to use for serialization
+ @type uri_ptype: URIRef
+ @param default: the parameter value to initialize with, if None, parameter won't hol a value
+ @type default: Literal
+ @return: a parameter
+ @rtype: Parameter
+ '''
+ vt = self.typelookup[ self.ontology._tail(uri_ptype) ]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d)
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d, default = (vt(default), u))
diff --git a/Monitoring/MonitoringService/Semantics/InformationModel.py b/Monitoring/MonitoringService/Semantics/InformationModel.py
new file mode 100644
index 0000000..f43a425
--- /dev/null
+++ b/Monitoring/MonitoringService/Semantics/InformationModel.py
@@ -0,0 +1,109 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from rdflib import Graph, Namespace, URIRef, plugin
+from rdflib.query import Processor, Result
+
+class IMError(Exception):
+ pass
+
+class Ontology(object):
+
+ def __init__(self):
+
+ plugin.register(
+ 'sparql', Processor,
+ 'rdfextras.sparql.processor', 'Processor')
+ plugin.register(
+ 'sparql', Result,
+ 'rdfextras.sparql.query', 'SPARQLQueryResult')
+ self._graph = Graph()
+ self._graph.bind('owl', Namespace("http://www.w3.org/2002/07/owl#"))
+
+ def load(self, prefix, owl_url, namespace_url = None):
+ '''
+ @summary: load owl file and bind name space
+ @param prefix: an abbreviation of the name space to be used in sparql queries
+ @type prefix: str
+ @param owl_url: the location of the owl document to load
+ @type owl_url: str
+ @param namespace_url: the name space if None a # is added to the owl_url
+ @type namespace_url: str or None
+ '''
+ if namespace_url is None:
+ ns = Namespace("%s#" % namespace_url)
+ else:
+ ns = Namespace(namespace_url)
+ try:
+ self._graph += Graph().parse(source = owl_url)
+ #except URLError:
+ except:
+ raise IMError("URLError: Cannot read model %s" % owl_url)
+ try:
+ self._graph.bind(prefix, ns)
+ except:
+ pass
+
+ @staticmethod
+ def _float(f):
+ if '.' in f or 'e' in f or 'E' in f:
+ return float(f)
+ else:
+ return int(f)
+
+ @staticmethod
+ def ipret(x):
+ if not x:
+ return None
+ x = str(x)
+ if len(x):
+ return x
+ else:
+ return None
+
+ @staticmethod
+ def _tail(uriref):
+ if not isinstance(uriref, URIRef):
+ raise IMError("Wrong uriref %s" % uriref)
+ return str(uriref).split("#")[-1]
+
+ def query(self, query):
+ return self._graph.query(query, initNs = dict(self._graph.namespaces()))
+
+ def triples(self, spo_tuple):
+ return self._graph.triples(spo_tuple)
+
+ def ns(self, prefix):
+ for p, ns in self._graph.namespaces():
+ if p == prefix:
+ return Namespace(ns)
+ raise IMError("Unknown prefix: %s" % prefix)
+
+ @property
+ def ns_dict(self):
+ return dict(self._graph.namespaces())
+
+ def emptygraph(self):
+ g = Graph()
+ # bind name spaces
+ for prefix, ns in self.ns_dict.iteritems():
+ g.bind(prefix, ns)
+ return g
+
+ @property
+ def g(self):
+ g = Graph()
+ g += self._graph
+ for prefix, ns in self.ns_dict.iteritems():
+ g.bind(prefix, ns)
+ return g
+
+# @property
+# def graph(self):
+# return self._graph
+
+ def dump(self):
+ for t in self._graph.triples((None, None, None)):
+ print t
diff --git a/Monitoring/MonitoringService/Semantics/Query.py b/Monitoring/MonitoringService/Semantics/Query.py
new file mode 100644
index 0000000..0ffb758
--- /dev/null
+++ b/Monitoring/MonitoringService/Semantics/Query.py
@@ -0,0 +1,150 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+from DataProcessing.Parameter import ParameterList
+from Resource.resource import resource as coreresource
+from DataProcessing.DataFormatter import JsonFormatter, DumbFormatter, Formatter
+
+class Query(object):
+ '''
+ @summary: the common skeleton of all queries
+ '''
+ pass
+
+class InformationSource(object):
+ '''
+ @summary: represents a (feature, resource) pair, standing for what and where to measure
+ @ivar resource: pointer to the resource
+ @ivar feature: ponter to the feature
+ '''
+ def __init__(self):
+ self._feature = None
+ self._resource = None
+
+ @property
+ def resource(self):
+ return self._resource
+ @resource.setter
+ def resource(self, (resourceid, resource)):
+ if not isinstance(resource, coreresource):
+ raise Exception("%s is not a resource type" % resource)
+ self._resource = (resourceid, resource)
+
+ @property
+ def feature(self):
+ return self._feature
+ @feature.setter
+ def feature(self, feature):
+ self._feature = feature
+
+class Identifier(object):
+ '''
+ @summary: represents a task, an aggregate or condition identifier
+ @ivar sourceid: pointer to the task, aggregate or condition
+ @type sourceid: str
+ '''
+ def __init__(self):
+ self._sourceid = None
+
+ @property
+ def sourceid(self):
+ return self._sourceid
+ @sourceid.setter
+ def sourceid(self, sourceid):
+ self._sourceid = sourceid
+
+class SingleQuery(Query, InformationSource):
+ '''
+ @summary: an extension of L{InformationSource} to store:
+ - the requested output format and
+ - some optional parameters L{ParameterList}
+ @ivar samplechain: data manipulation chain
+ @ivar formatter: a pointer to the requested formatter
+ @type formatter: L{Formatter}
+ @ivar paramlist: L{ParameterList}
+ '''
+ def __init__(self):
+ InformationSource.__init__(self)
+ self._samplechain = None
+ self._formatter = None
+ self._parameters = ParameterList()
+
+ @property
+ def samplechain(self):
+ return self._samplechain
+ @samplechain.setter
+ def samplechain(self, samplechain):
+ self._samplechain = samplechain
+
+ @property
+ def formatter(self):
+ return self._formatter
+ @formatter.setter
+ def formatter(self, uri_formatter):
+ if str(uri_formatter).endswith("Formatter_JSON"):
+ self._formatter = JsonFormatter
+ elif str(uri_formatter).endswith("Formatter_CSV"):
+ self._formatter = DumbFormatter
+ elif issubclass(uri_formatter, Formatter):
+ self._formatter = uri_formatter
+ else:
+ raise Exception("%s is not a formatter type" % uri_formatter)
+
+ @property
+ def paramlist(self):
+ return self._parameters
+
+ def addParameter(self, parameter):
+ '''
+ @summary: append a new parameter to the list
+ @param parameter: the parameter to append to the list
+ @type parameter: L{Parameter}
+ '''
+ self._parameters.append(parameter)
+
+class SingleSampleQuery(SingleQuery, Identifier):
+ '''
+ @summary: represents a (feature, resource, source identifier) triplet to fully identify a data generator
+ and binds a chain of operations to it.
+ The requested output format is also stored here
+ '''
+ def __init__(self):
+ SingleQuery.__init__(self)
+ Identifier.__init__(self)
+
+class SingleConditionQuery(SingleQuery, Identifier):
+ def __init__(self):
+ SingleQuery.__init__(self)
+ Identifier.__init__(self)
+
+class QueryBundle(object):
+ '''
+ @summary: represents a collection of SingleQueries
+ '''
+ def __init__(self):
+ self.atoms = {}
+
+ def __len__(self):
+ return len(self.atoms)
+
+ def has_key(self, key):
+ return self.atoms.has_key(key)
+
+ def __iter__(self):
+ for q in self.atoms.itervalues():
+ yield q
+
+ def getResource(self, resourceid):
+ for q in self:
+ if q.resource[0] == resourceid:
+ return q.resource[1]
+ return None
+
+ def add(self, reference, q):
+ if self.atoms.has_key(reference):
+ raise Exception("Duplicate MonitoringQuery entry")
+ if not isinstance(q, Query):
+ raise Exception("Wrong type")
+ self.atoms[reference] = q
diff --git a/Monitoring/MonitoringService/Semantics/QueryInterpreter.py b/Monitoring/MonitoringService/Semantics/QueryInterpreter.py
new file mode 100644
index 0000000..28a2775
--- /dev/null
+++ b/Monitoring/MonitoringService/Semantics/QueryInterpreter.py
@@ -0,0 +1,308 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+
+from Semantics.Query import QueryBundle, SingleQuery, SingleSampleQuery,\
+ SingleConditionQuery
+from Resource.node import node
+from Resource.interface import interface
+from DataProcessing.Aggregator import Max, Min, Percentile, Mean, Deviation
+from DataProcessing.Sampler import Tail, Head
+from DataProcessing.Parameter import ParameterList
+from DataProcessing.Bool import IsPositive, IsNegative
+
+class QueryInterpreter(object):
+ '''
+ classdocs
+ '''
+ samplesource = 'UnmodifiedExtractOfFeatureSamples'
+ lut_skeleton = {
+ 'Maximum': Max,
+ 'Minimum': Min,
+ 'Percentile': Percentile,
+ 'Average': Mean,
+ 'Variance': Deviation,
+ 'Tail': Tail,
+ 'Head': Head
+ }
+ lut_condition = {
+ 'IsPositive': IsPositive,
+ 'IsNegative': IsNegative,
+ #FIXME: IsNotNegative, IsNotPositive
+ 'AndExpression': '',
+ 'OrExpression': '',
+ }
+
+ def __init__(self, model):
+ '''
+ @summary: constructor
+ @param model: the task model to resolve the tools
+ @type model: TaskModel
+ '''
+ self.model = model
+
+ def getUnitOfDimension(self, ref_dim):
+ return self.model.dm[ref_dim].unit
+
+ def getUnit(self, uri_prefix, uri_unit):
+ if uri_prefix is None:
+ uref = self.model._tail(uri_unit)
+ else:
+ uref = "%s_%s" % (self.model._tail(uri_prefix), self.model._tail(uri_unit))
+ return self.um[uref]
+
+ @property
+ def myns(self):
+ return self.model.ontology.ns_dict
+
+ def inferInterfacesOf(self, qgraph, uri_node):
+ q = """
+SELECT ?ifin ?address ?unit ?prefix
+WHERE {
+ <%s> core:hasInboundInterface ?ifin ;
+ core:hasOutboundInterface ?ifout .
+ ?ifin a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?ifout a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?addressobj a owl:NamedIndividual ;
+ a unit:IPAddress ;
+ unit:hasValue ?address .
+ OPTIONAL {
+ ?addressobj unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?addressobj unit:hasPrefix ?prefix .
+ }
+}
+ """ % uri_node
+ for uri_ifin, address, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ name = self.model.ontology._tail(uri_ifin)
+ iface = interface(name, resourceid = uri_ifin)
+ if uri_unit is not None:
+ iface.address = str(address), self.getUnit(uri_prefix, uri_unit)
+ else:
+ iface.address = str(address), self.getUnitOfDimension('IPAddress')
+ iface.direction = iface.EGRESS | iface.INGRESS
+ #FIXME: this info should come from the model
+ iface.interface = "eth0"
+ iface.ispublic = True
+ yield iface
+ #TODO: similarly look up uni directional interfaces of the node and yield them as well
+
+ def inferBundleQueries(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?resource ?feature ?sample ?formatter
+WHERE {
+ ?query a owl:NamedIndividual ;
+ a query:BundleQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasResource ?resource ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = QueryBundle()
+ for uri_query, uri_resource, uri_feature, uri_sample, uri_formatter in qgraph.query(q, initNs = self.myns):
+ r = Q.getResource(uri_resource)
+ if r is None:
+ r = self.translateResource(qgraph, uri_resource)
+ sq = SingleQuery()
+ sq.feature = uri_feature
+ sq.resource = (uri_resource, r)
+ sq.formatter = uri_formatter
+ sq.samplechain = self.inferSampleChain(qgraph, uri_sample)
+ for p in self.inferParameters(qgraph, uri_query):
+ sq.addParameter(parameter = p)
+ Q.add(uri_query, sq)
+ return Q
+
+ def inferSampleManipulationQueries(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?resource ?feature ?sourceid ?sample ?formatter
+WHERE {
+ ?query a owl:NamedIndividual ;
+ a query:SampleManipulationQuery ;
+ query:hasResource ?resource ;
+ feature:hasFeature ?feature ;
+ query:hasProcessid ?sourceid ;
+ stat:hasSample ?sample ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = QueryBundle()
+ for uri_query, uri_resource, uri_feature, uri_sourceid, uri_sample, uri_formatter in qgraph.query(q, initNs = self.myns):
+ r = Q.getResource(uri_resource)
+ if r is None:
+ r = self.translateResource(qgraph, uri_resource)
+ aq = SingleSampleQuery()
+ aq.feature = uri_feature
+ aq.resource = (uri_resource, r)
+ aq.formatter = uri_formatter
+ aq.samplechain = self.inferSampleChain(qgraph, uri_sample)
+ aq.sourceid = str(uri_sourceid)
+ Q.add(uri_query, aq)
+ return Q
+
+ def inferConditionQueries(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?resource ?sourceid ?feature ?cond
+WHERE {
+ ?query a owl:NamedIndividual ;
+ a query:ConditionQuery ;
+ query:hasResource ?resource ;
+ query:hasProcessid ?sourceid ;
+ feature:hasFeature ?feature ;
+ stat:hasCondition ?cond ;
+}
+ """
+ Q = QueryBundle()
+ for uri_query, uri_resource, uri_sourceid, uri_feature, uri_cond in qgraph.query(q, initNs = self.myns):
+ r = Q.getResource(uri_resource)
+ if r is None:
+ r = self.translateResource(qgraph, uri_resource)
+ C, op = self.inferCondition(qgraph, uri_cond)
+ print uri_query, uri_resource, uri_sourceid, uri_cond
+
+ cq = SingleConditionQuery()
+ cq.feature = uri_feature
+ cq.resource = (uri_resource, r)
+ cq.operation = op
+ cq.conditiontype = C
+ cq.sourceid = str(uri_sourceid)
+ Q.add(uri_query, cq)
+ return Q
+
+ def inferCondition(self, qgraph, uri_cond):
+ q = """
+SELECT ?what ?sample
+WHERE {
+ <%s> a owl:NamedIndividual ;
+ a ?what ;
+ stat:hasSample ?sample ;
+}
+ """ % uri_cond
+ for what, uri_sample in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(what)
+ if tail in [ 'NamedIndividual' ]:
+ continue
+ C = self.lut_condition[tail]
+ if C in [IsPositive, IsNegative]: #FIXME: IsNotNegative, IsNotPositive
+ return C, self.inferLineraCombinedSample(qgraph, uri_sample)
+ else:
+ print "BALHE"
+ raise Exception("QI NOT IMPLMENTED")
+
+ def inferLineraCombinedSample(self, qgraph, uri_sample):
+ q = """
+SELECT ?sample ?factor
+WHERE {
+ <%s> a owl:NamedIndividual ;
+ a stat:LinearCombinedSample ;
+ stat:hasTerm ?term .
+ ?term stat:hasSample ?sample .
+ OPTIONAL {
+ ?term stat:hasScale ?factor .
+ }
+}
+ """ % uri_sample
+ terms = []
+ for uri_sample, factor in qgraph.query(q, initNs = self.myns):
+ try:
+ factor = float(factor)
+ except:
+ factor = 1
+ op = self.inferSampleChain(qgraph, uri_sample)
+ terms.append( ( factor, op) )
+ return terms
+
+ def inferSampleChain(self, qgraph, uri_sample):
+ tail = self.model.ontology._tail(uri_sample)
+ if tail == self.samplesource:
+ return []
+ q = """
+SELECT ?nextsample ?sampleop
+WHERE {
+ <%s> a owl:NamedIndividual ;
+ stat:hasSample ?nextsample ;
+ a ?sampleop
+}
+ """ % uri_sample
+ for uri_sample_next, uri_sampleop in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_sampleop)
+ if tail in [ 'NamedIndividual' ]:
+ continue
+ op = self.inferSampleChain(qgraph, uri_sample_next)
+ break
+ skeleton = self.lut_skeleton[tail]
+ parlist = ParameterList([ p for p in self.inferParameters(qgraph, uri_sample) ])
+ op.append( (skeleton, parlist) )
+ return op
+
+ def inferParameters(self, qgraph, uri_query):
+ q = """
+SELECT ?name ?type ?dim ?defval ?unit ?prefix
+WHERE {
+ <%s> param:hasParameter ?par .
+ ?par a owl:NamedIndividual ;
+ param:paramName ?name ;
+ param:hasType ?type ;
+ a ?dim .
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+}
+ """ % uri_query
+ for uri_name, uri_type, uri_dim, uri_default, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_dim)
+#FIXME: query should include the filter, but rdflib has a bug and only the spelt out form would work
+# FILTER ( ?dim != owl:NamedIndividual )
+# FILTER ( ?dim != query:QueryParameter )
+# FILTER ( ?dim != stat:SampleOperatorParameter )
+#
+# like:
+# FILTER ( ?dim != <http://www.w3.org/2002/07/owl#NamedIndividual> )
+ if tail in [ 'QueryParameter', 'SOP_tail', 'SOP_head', 'SOP_order', 'NamedIndividual' ]:
+ continue
+ yield self.model.translateParameter(str(uri_name), uri_dim, uri_unit, uri_prefix, uri_type, uri_default)
+
+ def translateResource(self, qgraph, uri_resource):
+ resource_name = self.model.ontology._tail(uri_resource)
+ q = """
+SELECT ?resourcetype
+WHERE {
+ <%s> a owl:NamedIndividual ;
+ a core:Resource ;
+ a ?resourcetype ;
+}
+ """ % uri_resource
+ for uri_rtype, in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_rtype)
+ if tail in [ 'Resource', 'NamedIndividual' ]:
+ continue
+ if tail == "Node":
+ r = node(name = resource_name, resourceid = uri_resource)
+ for iface in self.inferInterfacesOf(qgraph, uri_resource):
+ r.addinterface(iface)
+ return r
+ else:
+ print "WW: unhandled rtype", uri_rtype
+ continue
+ \ No newline at end of file
diff --git a/Monitoring/MonitoringService/Semantics/TaskModel.py b/Monitoring/MonitoringService/Semantics/TaskModel.py
new file mode 100644
index 0000000..7e0c8bf
--- /dev/null
+++ b/Monitoring/MonitoringService/Semantics/TaskModel.py
@@ -0,0 +1,304 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from Credential.credentialtypes import UsernamePassword, UsernameRSAKey
+from DataProcessing.Data import DataHeader, DataHeaderCell
+from DataProcessing.Parameter import ParameterList, Parameter
+from Driver.SOAPClient import SOAPClient
+from Driver.SshExec import SshExec
+from Driver.LocalExec import LocalExec
+from Driver.REST import RESTDriver
+
+class TaskModelError(Exception):
+ pass
+
+class TaskModel(object):
+ '''
+ classdocs
+ '''
+ hooklookup = {
+ 'hasPreHook' : 'prehook',
+ 'hasStartHook' : 'starthook',
+ 'hasRetrieveHook' : 'retrievehook',
+ 'hasStopHook' : 'stophook',
+ 'hasPostHook' : 'posthook',
+ }
+ typelookup = {
+ 'Integer': int,
+ 'Float': float,
+ 'String': str
+ }
+
+ def __init__(self, dimensionmanager, unitmanager, ontology):
+ '''
+ @summary: constructor
+ @param dimensionmanager: the container to form a cell's dimension
+ @type dimensionmanager: DimensionManager
+ @param unitmanager: the container to form a cell's unit
+ @type unitmanager: UnitManager
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+ self.dm = dimensionmanager
+ self.um = unitmanager
+
+ def inferDomains(self):
+ '''
+ @summary: extract the monitoring domains from the information model
+ @return: generator of the list of domains
+ @rtype: URIRef
+ '''
+ for uri_domain, _, _ in self.ontology.triples((None, self.ontology.ns('rdf')['type'], self.ontology.ns('task')['MonitoringDomain'])):
+ yield uri_domain
+
+ def inferTasks(self, domain, feature):
+ '''
+ @summary: provides a generator to crawl over the tasks that can measure a given feature in the given domain of interest
+ @param domain: domain of interest
+ @type domain: URIRef
+ @param feature: the feature to measure
+ @type feature: URIRef
+ @return: a generator of the list of (task reference, task name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?task ?name
+WHERE {
+ ?task a owl:NamedIndividual ;
+ a task:MonitoringTask ;
+ task:name ?name ;
+ task:hasMonitoringDomain task:%s ;
+ task:hasOutputTableFormat ?data .
+?data task:hasColumn ?col .
+?col task:hasMonitoredFeature feature:%s
+}
+ """ % (self.ontology._tail(domain), self.ontology._tail(feature))
+ for uri_task, tname in self.ontology.query(q):
+ yield uri_task, str(tname)
+
+ def inferCredentialOf(self, task):
+ '''
+ @summary: extracts the set of acceptable credential templates the given task accepts
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a set of an uninitialized Credential classes
+ @rtype: set(Credential)
+ @raise IMError: Unknown authentication type
+ '''
+ creds = set()
+ for (_, _, auth) in self.ontology.triples((task, self.ontology.ns('task')['hasAuthenticationType'], None)):
+ if auth == self.ontology.ns('task')["UsernamePassword"]:
+ creds.add(UsernamePassword)
+ elif auth == self.ontology.ns('task')["UsernameRSAKey"]:
+ creds.add(UsernameRSAKey)
+ else:
+ raise TaskModelError("Unknown authentication type %s" % auth)
+ return creds
+
+ def inferDriverOf(self, task):
+ '''
+ @summary: extarcts the driver of the task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: the appropriate driver class uninstantiated
+ @rtype: Driver
+ @raise IMError: Unknown driver type / hasDriver missing
+ '''
+ try:
+ _, _, driver = self.ontology.triples((task, self.ontology.ns('task')['hasDriver'], None)).next()
+ if driver == self.ontology.ns('task')["SOAPClient"]:
+ return SOAPClient
+ elif driver == self.ontology.ns('task')["SSH"]:
+ return SshExec
+ elif driver == self.ontology.ns('task')["LocalExec"]:
+ return LocalExec
+ elif driver == self.ontology.ns('task')["REST"]:
+ return RESTDriver
+ else:
+ raise TaskModelError("Unknown driver type %s" % driver)
+ except StopIteration:
+ raise TaskModelError("hasDriver is missing for task %s" % task)
+
+ def inferHookparametersOf(self, task):
+ '''
+ @summary: extract the necessary control parameters for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of arguments, which are passed to the Task object's prehook method as keyword arguments
+ @rtype: dict
+ '''
+ q = """
+SELECT ?name ?value ?type
+WHERE {
+ config:%s task:hasHookParameter ?p .
+ ?p param:paramName ?name ;
+ a owl:NamedIndividual ;
+ rdf:type task:HookParameter ;
+ unit:hasValue ?value ;
+ param:hasType ?type .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for pname, pvalue, ptype in self.ontology.query(q):
+ pname = str(pname)
+ if ptype == self.ontology.ns('param')["Integer"]:
+ d[pname] = int(pvalue)
+ elif ptype == self.ontology.ns('param')["Float"]:
+ d[pname] = float(pvalue)
+ else:
+ d[pname] = str(pvalue)
+ return d
+
+ def inferHookdefinitionsOf(self, task):
+ '''
+ @summary: extract the hook implementation details for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of hook definitions
+ @rtype: dict
+ '''
+ q = """
+SELECT ?rel ?value
+WHERE {
+ config:%s ?rel ?h .
+ ?h task:hookCode ?value .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for hrel, hvalue in self.ontology.query(q):
+ hook = self.ontology._tail(uriref = hrel)
+ d[self.hooklookup[hook]] = str(hvalue).replace('\\n', '\n').replace('\\t', '\t').replace('\\\\', '\\').strip()
+ return d
+
+ def inferDataheaderOf(self, task):
+ '''
+ @summary: extract the data header declaration the for task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized DataHeader instance
+ @rtype: DataHeader
+ '''
+ q = """
+SELECT ?tablename ?colname ?dim ?feature ?unit ?prefix
+WHERE {
+ config:%s task:hasOutputTableFormat ?hdr .
+ ?hdr task:name ?tablename .
+ ?hdr task:hasColumn ?col .
+ ?col task:name ?colname ;
+ a owl:NamedIndividual ;
+ a ?dim ;
+ task:sequenceNumber ?seqno .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?col task:hasMonitoredFeature ?feature .
+ }
+ OPTIONAL {
+ ?col unit:hasUnit ?unit .
+ OPTIONAL {
+ ?col unit:hasPrefix ?prefix .
+ }
+ }
+}
+ORDER BY ?seqno
+ """ % (self.ontology._tail(task))
+ datahdr = None
+ for tablename, colname, uri_dim, uri_feature, uri_unit, uri_prefix in self.ontology.query(q):
+ if datahdr is None:
+ datahdr = DataHeader(str(tablename))
+ if uri_unit is None:
+ u = None
+ elif uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if uri_feature is None:
+ cell = DataHeaderCell(name = str(colname), dimension = d, unit = u)
+ else:
+ cell = DataHeaderCell(name = str(colname), dimension = d, feature = uri_feature, unit = u)
+ datahdr.addColumn(cell)
+ return datahdr
+
+ def inferParametersOf(self, task):
+ '''
+ @summary: extract the parameter list for the given task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized list of the parameters of the task
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ config:%s task:hasExecutionParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ a ?dim .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(task))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def translateParameter(self, name, uri_dim, uri_unit, uri_prefix, uri_ptype, default = None):
+ '''
+ @summary: helper method to instantiate a Parameter
+ @param name: the reference name of the parameter
+ @type name: str
+ @param uri_dim: the dimension of the parameter
+ @type uri_dim: URIRef
+ @param uri_unit: the unit of the parameter, if None we fall back to the unit of the dimension
+ @type uri_unit: URIRef
+ @param uri_prefix: accounts only if uri_unit is not None
+ @type uri_prefix: URIRef
+ @param uri_ptype: the type of the parameter to use for serialization
+ @type uri_ptype: URIRef
+ @param default: the parameter value to initialize with, if None, parameter won't hol a value
+ @type default: Literal
+ @return: a parameter
+ @rtype: Parameter
+ '''
+ vt = self.typelookup[ self.ontology._tail(uri_ptype) ]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d)
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d, default = (vt(default), u))
diff --git a/Monitoring/MonitoringService/Semantics/UnitModel.py b/Monitoring/MonitoringService/Semantics/UnitModel.py
new file mode 100644
index 0000000..e09a22a
--- /dev/null
+++ b/Monitoring/MonitoringService/Semantics/UnitModel.py
@@ -0,0 +1,398 @@
+'''
+Created on Feb 12, 2012
+
+@author: steger
+'''
+from DataProcessing.Prefix import PrefixManager
+from DataProcessing.Unit import UnitManager
+from DataProcessing.Dimension import DimensionManager
+from DataProcessing.MeasurementLevel import lut_level
+
+class UnitModel(object):
+ '''
+ @summary: an interface to infer prefix, unit and dimension related information from the model
+ '''
+
+ def __init__(self, ontology):
+ '''
+ @summary: constructor
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+ self.pm = PrefixManager()
+ self.um = UnitManager()
+ self.dm = DimensionManager(self.um)
+
+ # infer and store prefixes
+ for (p_reference, p_symbol, base, exponent) in self.inferPrefixes():
+ self.pm.newPrefix( self.ontology._tail(p_reference), p_symbol, base, exponent )
+
+ # infer basic units
+ for u_reference, u_symbol in self.inferBaseUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, _, _ in self.inferPowerUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, _ in self.inferProductUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, derivedfrom, scale, offset in self.inferLinearTransformedUnits():
+ self.storeLinearTransformedUnit(u_reference, u_symbol, derivedfrom, scale, offset)
+ for u_reference, u_symbol, derivedfrom, expr_fwd, expr_inv in self.inferRegexpTransformedUnits():
+ uref = self.ontology._tail(u_reference)
+ ancestor = self.um[ self.ontology._tail(derivedfrom) ]
+ self.um.addRegexpTransformedUnit(uref, u_symbol, ancestor, expr_fwd, expr_inv)
+
+ # infer dimensions
+ #FIXME: if there is a reference loop an error is raised...
+ for d_reference, u_reference, level in self.inferBaseDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ lref = self.ontology._tail(level)
+ level = lut_level[lref]
+ unit = self.um[uref]
+ self.dm.newBaseDimension(dref, dref, unit, level)
+ for d_reference, u_reference, d_derivedfrom in self.inferDifferenceDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.DifferenceDimension)
+ for d_reference, u_reference, d_derivedfrom, exponent in self.inferPowerDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.PowerDimension, exponent = exponent)
+ for d_reference, u_reference, d_derivedfrom in self.inferProductDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ unit = self.um[uref]
+ derivedfrom = tuple( self.dm[self.ontology._tail(x)] for x in d_derivedfrom )
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.ProductDimension)
+ for d_reference, u_reference, d_derivedfrom in self.inferRatioDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.RatioDimension)
+
+ def storeBasicUnit(self, u_reference, u_symbol):
+ uref = self.ontology._tail(u_reference)
+ bu = self.um.newBasicUnit(uref, u_symbol)
+ for p_reference in self.inferPossiblePrefixesOf(u_reference):
+ p = self.pm[ self.ontology._tail(p_reference) ]
+ puref = "%s_%s" % (p.reference, uref)
+ symbol = "%s%s" % (p.symbol, bu.symbol)
+ self.um.addLinearTransformedUnit(puref, symbol, bu, p.scale)
+
+ def storeLinearTransformedUnit(self, u_reference, u_symbol, derivedfrom, scale, offset):
+ uref = self.ontology._tail(u_reference)
+ ancestor = self.um[ self.ontology._tail(derivedfrom) ]
+ bu = self.um.addLinearTransformedUnit(uref, u_symbol, ancestor, scale, offset)
+ for p_reference in self.inferPossiblePrefixesOf(u_reference):
+ p = self.pm[ self.ontology._tail(p_reference) ]
+ puref = "%s_%s" % (p.reference, uref)
+ symbol = "%s%s" % (p.symbol, bu.symbol)
+ self.um.addLinearTransformedUnit(puref, symbol, bu, p.scale)
+
+ def inferPrefixes(self):
+ '''
+ @summary: iterate over all prefixes defined in the model.
+ @return: a generator of the prefix details: (reference, symbol, base, exponent)
+ @rtype: (URIRef, str, int, int)
+ @todo: in case the unit:base is not present in a Prefix individual,
+ we should fall back to the restriction on the base defined for the given sibling of the Prefix.
+ This sibling is referenced ?basegroup in the query.
+ '''
+ q = """
+SELECT ?prefix ?symbol ?base ?exponent
+WHERE {
+ ?prefix a owl:NamedIndividual ;
+ a ?basegroup ;
+ unit:exponent ?exponent ;
+ unit:base ?base .
+ ?basegroup rdfs:subClassOf unit:Prefix .
+ OPTIONAL {
+ ?prefix unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_prefix, symbol, base, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_prefix, self.ontology._tail(uri_prefix), int(base), int(exponent)
+ else:
+ yield uri_prefix, str(symbol), int(base), int(exponent)
+
+ def inferPrefixSymbolOf(self, prefixuri):
+ '''
+ @summary: generates an short written form of a unit prefix if unit:symbol is present in the model,
+ otherwise an abbreviation is derived from the tail of the uri (the reference name to the individual).
+ @param prefixuri: the uri reference to the unit prefix
+ @type prefixuri: URIRef
+ @return: the short form
+ @rtype: str
+ '''
+ try:
+ _, _, symbol = self.ontology.triples((prefixuri, self.ontology.ns('unit')['symbol'], None)).next()
+ return str(symbol)
+ except StopIteration:
+ return self.ontology._tail(prefixuri)
+
+
+ def inferBaseUnits(self):
+ '''
+ @summary: iterate over all BaseUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol)
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?unit ?symbol
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:BaseUnit .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit)
+ else:
+ yield uri_unit, str(symbol)
+
+ def inferPowerUnits(self):
+ '''
+ @summary: iterate over all PowerUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, powerof, exponent)
+ @rtype: (URIRef, str, URIRef, int)
+ '''
+ q = """
+SELECT ?unit ?symbol ?powerof ?exponent
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:PowerUnit ;
+ unit:exponent ?exponent ;
+ unit:derivedFrom ?powerof .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_powerof, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_powerof, int(exponent)
+ else:
+ yield uri_unit, str(symbol), uri_powerof, int(exponent)
+
+ def inferProductUnits(self):
+ '''
+ @summary: iterate over all ProductUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, productof)
+ @rtype: (URIRef, str, set(URIRef))
+ '''
+ q = """
+SELECT ?unit ?symbol ?productof
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:ProductUnit ;
+ unit:derivedFrom ?productof
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ container = {}
+ for uri_unit, symbol, uri_productof in self.ontology.query(q):
+ if symbol is None:
+ key = uri_unit, self.ontology_tail(uri_unit)
+ else:
+ key = uri_unit, str(symbol)
+ if not container.has_key(key):
+ container[key] = set()
+ container[key].add(uri_productof)
+ for (uri_unit, symbol), productof in container.iteritems():
+ yield uri_unit, symbol, productof
+
+ def inferLinearTransformedUnits(self):
+ '''
+ @summary: iterate over all LinearTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, scale, offset)
+ @rtype: (URIRef, str, URIRef, float, float)
+ '''
+ q = """
+SELECT ?unit ?symbol ?scale ?offset ?derivedfrom
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:LinearTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:scale ?scale .
+ OPTIONAL {
+ ?unit unit:offset ?offset .
+ }
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, scale, offset, uri_derivedfrom in self.ontology.query(q):
+ if offset is None:
+ offset = 0
+ else:
+ offset = self.ontology._float(offset)
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, self.ontology._float(scale), offset
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, self.ontology._float(scale), offset
+
+ def inferRegexpTransformedUnits(self):
+ '''
+ @summary: iterate over all RegexpTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, expr_fwd, expr_inv)
+ @rtype: (URIRef, str, URIRef, str, str)
+ '''
+ q = """
+SELECT ?unit ?symbol ?derivedfrom ?fwd ?inv
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:RegexpTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:forwardExpression ?fwd ;
+ unit:inverseExpression ?inv .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_derivedfrom, expr_fwd, expr_inv in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+
+ def inferPossiblePrefixesOf(self, uri_unit):
+ '''
+ @summary: extract possible prefixes for the given unit
+ @param unit: reference to the unit
+ @type unit: URIRef
+ @return: a generator over the references of the possible unit prefixes
+ @rtype: URIRef
+ '''
+ for _, _, uri_prefix in self.ontology.triples((uri_unit, self.ontology.ns('unit')['possiblePrefix'], None)):
+ yield uri_prefix
+
+ def inferBaseDimensions(self):
+ '''
+ @summary: extract BaseDimensions and their corresponding units from the model
+ @return: a generator of the BaseDimension details: (reference, unit, level)
+ @rtype: (URIRef, URIRef, str)
+ '''
+ q = """
+SELECT ?dimension ?unit ?level
+WHERE {
+ ?dimension rdfs:subClassOf unit:BaseDimension ;
+ rdfs:subClassOf ?constraint ;
+ rdfs:subClassOf ?level .
+ ?constraint owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ FILTER regex(?level, "Level") .
+}
+ """
+ for uri_dimension, uri_unit, level in self.ontology.query(q):
+ yield uri_dimension, uri_unit, level
+
+ def inferDifferenceDimensions(self):
+ '''
+ @summary: extract DifferenceDimensions and their corresponding units from the model
+ @return: a generator of the DifferenceDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:DifferenceDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+ def inferPowerDimensions(self):
+ '''
+ @summary: extract PowerDimensions and their corresponding units from the model
+ @return: a generator of the PowerDimension details: (reference, unit, derivedfrom, exponent)
+ @rtype: (URIRef, URIRef, URIRef, int)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom ?exponent
+WHERE {
+ ?dimension rdfs:subClassOf unit:PowerDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 ;
+ rdfs:subClassOf ?constraint3 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+ ?constraint3 owl:onProperty unit:exponent ;
+ owl:hasValue ?exponent .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom, exponent in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom, int(exponent)
+
+ def inferProductDimensions(self):
+ '''
+ @summary: extract ProductDimensions and their corresponding units from the model
+ @return: a generator of the ProductDimension details: (reference, unit, set of derivedfrom references)
+ @rtype: (URIRef, URIRef, tuple(URIRef))
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:ProductDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ container = {}
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ if not container.has_key(uri_dimension):
+ container[uri_dimension] = (uri_unit, set())
+ container[uri_dimension][1].add(uri_derivedfrom)
+ for uri_dimension, (uri_unit, set_derivedfrom) in container.iteritems():
+ yield uri_dimension, uri_unit, tuple(set_derivedfrom)
+
+ def inferRatioDimensions(self):
+ '''
+ @summary: extract RatioDimensions and their corresponding units from the model
+ @return: a generator of the RatioDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:RatioDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
diff --git a/Monitoring/MonitoringService/Semantics/__init__.py b/Monitoring/MonitoringService/Semantics/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/MonitoringService/Semantics/__init__.py
diff --git a/Monitoring/MonitoringService/Semantics/test.py b/Monitoring/MonitoringService/Semantics/test.py
new file mode 100644
index 0000000..8784e44
--- /dev/null
+++ b/Monitoring/MonitoringService/Semantics/test.py
@@ -0,0 +1,336 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import unittest
+from rdflib import Graph
+from Example.Metrics import RoundTripDelay
+from Example.Tools import sonomashortping
+from DataProcessing.Parameter import ParameterList, Parameter
+from Example.credentials import ple_credentials, fed_credentials
+from DataProcessing.DataHeaderCell import CellRequestByName
+from DataProcessing.DataError import SamplerError
+from DataProcessing.DataReader import DataReader
+from Service.mock_framework import Framework
+from Example.Platforms import federation
+from Semantics.FeatureModel import FeatureModel
+
+fw = Framework()
+for platform, (_, config_owl) in federation.iteritems():
+ plif = fw.add(platform, config_owl)
+
+class Test(unittest.TestCase):
+
+ def setUp(self):
+ self.MS_planetlab = fw.getInterface(platform = 'PlanetLab').service
+ self.MS_federica = fw.getInterface(platform = 'FEDERICA').service
+
+ self.substrate_pl = self.MS_planetlab.ontology.ns('task')['Substrate']
+ self.slice_pl = self.MS_planetlab.ontology.ns('task')['Slice']
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ node = ("157.181.175.243", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_eltenode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("147.102.22.66", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_ntuanode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+
+ self.substrate_fed = self.MS_federica.ontology.ns('task')['Substrate']
+ self.slice_fed = self.MS_federica.ontology.ns('task')['Slice']
+ dim_ipaddress = self.MS_federica.dm['IPAddress']
+ node = ("194.132.52.2", self.MS_federica.um.ipv4dotted) #sw01.erl.de
+ self.p_src_fednode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_federica.um, dimension = dim_ipaddress, default = node)
+ node = ("194.132.52.4", self.MS_federica.um.ipv4dotted) #sw01.poz.pl
+ self.p_dst_fednode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_federica.um, dimension = dim_ipaddress, default = node)
+
+ self.substrate_pl = self.MS_planetlab.ontology.ns('task')['Substrate']
+ self.slice_pl = self.MS_planetlab.ontology.ns('task')['Slice']
+
+ self.feat_task_pl = {
+ 'OnewayDelay': (['SONoMAChirp'], []),
+ 'RoundtripDelay': (['SONoMAPing'], ['sshpingSlice']),
+ 'AvailableBandwidth': ([], ['sshabSlice']),
+ 'AvailableMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'FreeMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'MemoryUtilization': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'Uptime': (['sshuptime'], ['sshuptimeSlice']),
+ 'CPULoad': (['sshcpuload'], ['sshcpuloadSlice']),
+ #'CPUCores': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUCores': (['sshcpuinfo2'], ['sshcpuinfo2Slice']),
+ 'CPUSockets': (['sshcpuinfo2'], ['sshcpuinfo2Slice']),
+ 'CPUUtilization': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'FreeDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice']),
+ 'UsedDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice']),
+ 'DiskUtilization': (['sshdiskinfo'], ['sshdiskinfoSlice']),
+ 'LinkUtilization': ([], []),
+ }
+
+ self.feat_task_fed = {
+#FIXME: 3 items?
+# 'OnewayDelay': (['hadesaggregates', 'hadesaggregates', 'hadesaggregates'], []),
+# 'RoundtripDelay': ([], []),
+# 'AvailableBandwidth': ([], []),
+# 'AvailableMemory': (['sshMeminfo'], []),
+# 'FreeMemory': (['sshMeminfo'], []),
+ 'MemoryUtilization': (['G3 memory utilization look up'], []),
+# 'Uptime': (['sshuptime'], []),
+# 'CPULoad': (['sshcpuload'], []),
+ #'CPUCores': ([], []),
+# 'CPUCores': (['sshcpuload'], []),
+# 'CPUSockets': ([], []),
+ 'CPUUtilization': (['G3 CPU utilization look up'], []),
+# 'FreeDiskSpace': (['sshdiskinfo'], []),
+# 'UsedDiskSpace': (['sshdiskinfo'], []),
+# 'DiskUtilization': ([], []),
+ 'LinkUtilization': (['G3 link utilization look up'], []),
+ }
+
+ dim_nameofsomething = self.MS_planetlab.dm['NameOfSomething']
+ self.slicename = Parameter(name = "SliceName", valuetype = str,
+ unitmanager = self.MS_planetlab.um, dimension = dim_nameofsomething,
+ default = ('novi_novi', self.MS_planetlab.um.unitless))
+ dim_countable = self.MS_planetlab.dm['Countable']
+ self.count = Parameter(name = 'Count', valuetype = int,
+ unitmanager = self.MS_planetlab.um, dimension = dim_countable,
+ default = (5, self.MS_planetlab.um.piece))
+ self.fm = FeatureModel(self.MS_planetlab.dm, self.MS_planetlab.um, self.MS_planetlab.ontology)
+
+ def test_managers(self):
+ expect = 14
+ infer = len(self.MS_planetlab.pm)
+ self.assertEqual(infer, expect, "Prefix: got %d expect %d" % (infer, expect))
+
+ expect = 10
+ infer = [ s for _, s in self.MS_planetlab.unitmodel.inferBaseUnits() ]
+ self.assertEqual(expect, len(infer), "BaseUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _ in self.MS_planetlab.unitmodel.inferProductUnits() ]
+ self.assertEqual(expect, len(infer), "ProductUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _, _ in self.MS_planetlab.unitmodel.inferPowerUnits() ]
+ self.assertEqual(expect, len(infer), "PowerUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 12
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferLinearTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "LinearTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 2
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferRegexpTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "RegexpTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 8
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferBaseDimensions() ]
+ self.assertEqual(expect, len(infer), "BaseDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferDifferenceDimensions() ]
+ self.assertEqual(expect, len(infer), "DifferenceDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _, _ in self.MS_planetlab.unitmodel.inferPowerDimensions() ]
+ self.assertEqual(expect, len(infer), "PowerDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferProductDimensions() ]
+ self.assertEqual(expect, len(infer), "ProductDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 4
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferRatioDimensions() ]
+ self.assertEqual(expect, len(infer), "RatioDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+ NS = self.MS_planetlab.ontology.ns('unit')
+ for expect, uri in [(4, NS['second']), (7, NS['Byte']), (3, NS['bit']), (1, NS['unixtimestamp'])]:
+ infer = [s for s in self.MS_planetlab.unitmodel.inferPossiblePrefixesOf(uri)]
+ self.assertEqual(expect, len(infer), "inferPossiblePrefixesOf: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+
+ def test_IM_domainsfeatures(self):
+ fm = self.fm
+
+ expect = set(['Slice', 'Substrate'])
+ infer = set([ self.MS_planetlab.ontology._tail(x) for x in fm.inferDomains() ])
+ self.assertEqual(expect, infer, "inferDomains: expect %d, got %d\n%s" % (len(expect), len(infer), str(infer)))
+
+ expect = len(self.feat_task_pl)
+ infer = [ x for x in fm.inferFeatures()]
+ self.assertEqual(expect, len(infer), "inferFeatures (PL): expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ def test_IM_featparameters(self):
+ feature = self.MS_planetlab.ontology.ns('feature')['RoundtripDelay']
+ expect = RoundTripDelay.p_obligatory
+ infer = self.fm.inferObligatoryParametersOf(feature)
+ self.assertEqual(len(expect), len(infer), "obligatory parameters for %s differ expect: %s got: %s" % (feature, expect.parameter_names(), infer.parameter_names()))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+
+ def test_IM_task(self):
+ for feat, (t_subst, t_slice) in self.feat_task_pl.iteritems():
+ feature = self.MS_planetlab.ontology.ns('feature')[feat]
+ infer_t_subst = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.substrate_pl, feature)]
+ infer_t_slice = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.slice_pl, feature)]
+ self.assertEqual(infer_t_subst, t_subst, "(PL) feature: %s searchtask (substrate): expect %s, got %s" % (feat, t_subst, infer_t_subst))
+ self.assertEqual(infer_t_slice, t_slice, "(PL) feature: %s searchtask (slice): expect %s, got %s" % (feat, t_slice, infer_t_slice))
+
+ for feat, (t_subst, t_slice) in self.feat_task_fed.iteritems():
+ feature = self.MS_federica.ontology.ns('feature')[feat]
+ infer_t_subst = [ name for _, name in self.MS_federica.taskmodel.inferTasks(self.substrate_fed, feature)]
+ infer_t_slice = [ name for _, name in self.MS_federica.taskmodel.inferTasks(self.slice_fed, feature)]
+ self.assertEqual(infer_t_subst, t_subst, "(FED) feature: %s searchtask (substrate): expect %s, got %s" % (feat, t_subst, infer_t_subst))
+ self.assertEqual(infer_t_slice, t_slice, "(FED) feature: %s searchtask (slice): expect %s, got %s" % (feat, t_slice, infer_t_slice))
+
+
+ task = self.MS_planetlab.ontology.ns('config')['T_SONoMAPing']
+ infer = self.MS_planetlab.taskmodel.inferCredentialOf(task)
+ expect = set(sonomashortping.authtype)
+ self.assertEqual(infer, expect, "credentials differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferDriverOf(task)
+ expect = sonomashortping.driver
+ self.assertEqual(infer, expect, "drivers differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferHookparametersOf(task)
+ expect = sonomashortping.kwargs
+ self.assertEqual(infer, expect, "hook parameters differ expect: %s got: %s" % (expect, infer))
+
+ H = self.MS_planetlab.taskmodel.inferHookdefinitionsOf(task)
+ for k, h in H.iteritems():
+ exp = sonomashortping.hooks[k].strip()
+ h = h.strip()
+ self.assertEqual(h, exp, "%s hook differs\nexpect:\n%s\ngot:\n%s" % (k, exp, h))
+
+ #TODO: check feature equality
+ infer = [ (c.name, str(c._unit), str(c._dimension)) for c in self.MS_planetlab.taskmodel.inferDataheaderOf(task) ]
+ expect = [ (c.name, str(c._unit), str(c._dimension)) for c in sonomashortping.dataheaderdeclaration ]
+ self.assertEqual(infer, expect, "output header declarations differ expect:\n%s\ngot:\n%s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferParametersOf(task)
+ expect = sonomashortping.parameters
+ n_inf, n_exp = set(infer.parameter_names()), set(expect.parameter_names())
+ self.assertEqual(n_inf, n_exp, "runtime parameters differ expect: %s got: %s" %(n_exp, n_inf))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ if exp_v is None:
+ self.assertFalse(inf_v, "Expected uninitialized value, got %s" % inf_v)
+ else:
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+
+ def test_taskBYuri(self):
+ # PlanetLab
+ cases = {
+ 'T_SSHPingSlice': [self.p_src_eltenode, self.slicename, self.count, self.p_dst_ntuanode],
+ 'T_SSHMemInfo': [self.p_src_eltenode],
+ 'T_SSHMemInfoSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHCPULoad': [self.p_src_eltenode],
+ 'T_SSHCPULoadSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHUptime': [self.p_src_eltenode],
+ 'T_SSHUptimeSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHDiskInfo': [self.p_src_eltenode],
+ 'T_SSHDiskInfoSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHCPUInfo': [self.p_src_eltenode],
+ 'T_SSHCPUInfoSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SONoMAPing': [self.p_src_eltenode, self.p_dst_ntuanode],
+ }
+ for l,p in cases.iteritems():
+ task_uri = self.MS_planetlab.ontology.ns('config')[l]
+ print 1, task_uri
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = None,
+ parameters = ParameterList(p))
+# print task_uri, ParameterList(p)
+ task.enable()
+ task.dataAdded.wait( 15 )
+ self.assertGreater(len(task.data), 0, "measurement %s yielded empty result" % l)
+# print task.data._rawrecords
+
+ def test_taskBYuri2(self):
+ # FEDERICA
+ cases = {
+# 'T_SSHPingSlice': [self.p_src_eltenode, self.slicename, self.count, self.p_dst_ntuanode],
+# 'T_SSHMemInfo': [self.p_src_eltenode],
+# 'T_SSHMemInfoSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SSHCPULoad': [self.p_src_eltenode],
+# 'T_SSHCPULoadSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SSHUptime': [self.p_src_eltenode],
+# 'T_SSHUptimeSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SSHDiskInfo': [self.p_src_eltenode],
+# 'T_SSHDiskInfoSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SSHCPUInfo': [self.p_src_eltenode],
+# 'T_SSHCPUInfoSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SONoMAPing': [self.p_src_eltenode, self.p_dst_ntuanode],
+# 'T_hadesaggregate': [self.p_src_fednode, self.p_dst_fednode],
+ 'T_G3linkutil': [self.p_src_fednode, self.p_dst_fednode],
+ 'T_G3CPUutil': [self.p_src_fednode],
+ 'T_G3MEMutil': [self.p_src_fednode],
+ }
+ for l,p in cases.iteritems():
+ task_uri = self.MS_federica.ontology.ns('config')[l]
+ _, task = self.MS_federica.newTask(task = task_uri,
+ cred = fed_credentials,
+ resource = None,
+ parameters = ParameterList(p))
+# print task_uri, ParameterList(p)
+ task.enable()
+ task.dataAdded.wait( 15 )
+ self.assertGreater(len(task.data), 0, "measurement %s yielded empty result" % l)
+# print task.data._data._rawrecords
+
+
+ def test_owlexamples(self):
+ doc = "../../information-model/monitoring-model/monitoringQuery_example.owl"
+ g = Graph()
+ g.parse(source = doc)
+ qdict = self.MS_planetlab.QI.inferBundleQueries(qgraph = g)
+ self.assertTrue(len(qdict), "Got empty query")
+ for q in qdict:
+ domain = self.MS_planetlab.ontology.ns('task')['Substrate']
+ taskgen = self.MS_planetlab.taskmodel.inferTasks(domain, q.feature)
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen: break
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = q.resource[1],
+ parameters = q.paramlist)
+ del task.strategy # make sure STRAT_ONDEMAND
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ if q.samplechain:
+ flow = []
+ for skeleton, parlist in q.samplechain:
+ flow.append((skeleton, parlist.formkeyvaldict()))
+ _, A = self.MS_planetlab.am.newAggregator(task.datasource, CellRequestByName(name = 'Free Memory'), flow)
+ while True:
+ try:
+ s, a = A.data._rawrecords[0]
+ self.assertEqual(s, len(task.data), "inconsistency in length len(data)=%d, max of %d samples?" % (len(task.data), s))
+ R = DataReader(datasource = task.datasource)
+ R.extract(cellrequest = [CellRequestByName(name = 'Free Memory')])
+ expect = max( [ float(x) for x, in R ] )
+ self.assertEqual(expect, a, "inconsistency in aggregare %f <> %f" % (expect, a))
+ break
+ except SamplerError:
+ print "MEASURE SOME MORE ..."
+ task.disable()
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ self.assertGreater(len(task.data), 0, "measurement yielded empty result")
+
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_IM_domainsfeatures']
+ unittest.main()