summaryrefslogtreecommitdiffstats
path: root/Monitoring/src/main/python/Semantics
diff options
context:
space:
mode:
authorpikusa <pikusa@man.poznan.pl>2013-04-03 13:18:17 (GMT)
committer pikusa <pikusa@man.poznan.pl>2013-04-03 13:18:17 (GMT)
commit2f2a3a129c91de540e66c3bfbe30b0df1942cd4b (patch)
tree2d313cdf0068af368d4de6067d676be16f6a6464 /Monitoring/src/main/python/Semantics
parentff8aa232b071a9b54dff833714a870fd0aec0b30 (diff)
downloadnovi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.zip
novi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.tar.gz
novi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.tar.bz2
project commit and dir tree change
Diffstat (limited to 'Monitoring/src/main/python/Semantics')
-rw-r--r--Monitoring/src/main/python/Semantics/InformationModel$py.classbin0 -> 12356 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/InformationModel.py88
-rw-r--r--Monitoring/src/main/python/Semantics/InformationModel.py.old79
-rw-r--r--Monitoring/src/main/python/Semantics/Query$py.classbin0 -> 12155 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/Query.py140
-rw-r--r--Monitoring/src/main/python/Semantics/Query.py.old139
-rw-r--r--Monitoring/src/main/python/Semantics/QueryInterpreter$py.classbin0 -> 18767 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/QueryInterpreter.py272
-rw-r--r--Monitoring/src/main/python/Semantics/QueryInterpreter.py.old223
-rw-r--r--Monitoring/src/main/python/Semantics/TaskModel$py.classbin0 -> 29382 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/TaskModel.py436
-rw-r--r--Monitoring/src/main/python/Semantics/TaskModel.py.old424
-rw-r--r--Monitoring/src/main/python/Semantics/UnitModel$py.classbin0 -> 26117 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/UnitModel.py364
-rw-r--r--Monitoring/src/main/python/Semantics/UnitModel.py.old364
-rw-r--r--Monitoring/src/main/python/Semantics/__init__$py.classbin0 -> 2067 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/__init__.py0
-rw-r--r--Monitoring/src/main/python/Semantics/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Semantics/a0
-rw-r--r--Monitoring/src/main/python/Semantics/b55
-rw-r--r--Monitoring/src/main/python/Semantics/test.py263
-rw-r--r--Monitoring/src/main/python/Semantics/test.py.old250
22 files changed, 3097 insertions, 0 deletions
diff --git a/Monitoring/src/main/python/Semantics/InformationModel$py.class b/Monitoring/src/main/python/Semantics/InformationModel$py.class
new file mode 100644
index 0000000..ab59485
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/InformationModel$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/InformationModel.py b/Monitoring/src/main/python/Semantics/InformationModel.py
new file mode 100644
index 0000000..40aa44d
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/InformationModel.py
@@ -0,0 +1,88 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from urllib2 import URLError
+from rdflib import Graph, Namespace, URIRef, plugin
+from rdflib.query import Processor, Result
+import pkgutil
+import StringIO
+import monitoringmodel.im
+import os.path
+
+class IMError(Exception):
+ pass
+
+class Ontology(object):
+ ontology = {
+ 'owl': (None, "http://www.w3.org/2002/07/owl#"),
+ 'unit': ('unit.owl', "http://fp7-novi.eu/unit.owl#"),
+ 'param': ('monitoring_parameters.owl', "http://fp7-novi.eu/monitoring_parameter.owl#"),
+ 'feature': ('monitoring_features.owl', "http://fp7-novi.eu/monitoring_features.owl#"),
+ 'task': ('monitoring_task.owl', "http://fp7-novi.eu/monitoring_task.owl#"),
+ 'query': (None, "http://fp7-novi.eu/monitoring_query.owl#"), #('monitoring_query.owl', ...)
+ 'conf': (None, "http://fp7-novi.eu/config.owl#"),
+ 'stat': (None, 'http://fp7-novi.eu/monitoring_stat.owl#'),
+ 'core': ('novi-im.owl', "http://fp7-novi.eu/im.owl#"),
+ }
+
+ def __init__(self, baseurl, config_owl):
+
+ plugin.register(
+ 'sparql', Processor,
+ 'rdfextras.sparql.processor', 'Processor')
+ plugin.register(
+ 'sparql', Result,
+ 'rdfextras.sparql.query', 'SPARQLQueryResult')
+
+ # JYTHON hack for accessing owl files
+ im = monitoringmodel.im.im()
+ path = im.path
+ loader = pkgutil.get_loader("monitoringmodel.im")
+
+ self.baseurl = path #baseurl
+ self.graph = Graph()
+ # load owl files and bind name spaces
+ try:
+ url = os.path.join(path, config_owl)
+ self.graph += Graph().parse(source = StringIO.StringIO(loader.get_data(url)) )
+ except URLError:
+ raise IMError("URLError: Cannot read model %s" % config_owl)
+ for prefix, (owl, ns) in self.ontology.iteritems():
+ if owl:
+ url = os.path.join(path, owl) #"%s/%s" % (self.baseurl, owl)
+ try:
+ self.graph += Graph().parse(source = StringIO.StringIO(loader.get_data(url)) )
+ except URLError:
+ raise IMError("URLError: Cannot read model %s" % url)
+ try:
+ self.graph.bind(prefix, Namespace(ns))
+ except:
+ pass
+
+ @staticmethod
+ def _float(f):
+ if '.' in f or 'e' in f or 'E' in f:
+ return float(f)
+ else:
+ return int(f)
+
+ @staticmethod
+ def _tail(uriref):
+ if not isinstance(uriref, URIRef):
+ raise IMError("Wrong uriref %s" % uriref)
+ return str(uriref).split("#")[-1]
+
+ def query(self, query):
+ return self.graph.query(query, initNs = dict(self.graph.namespaces()))
+
+ def ns(self, prefix):
+ for p, ns in self.graph.namespaces():
+ if p == prefix:
+ return Namespace(ns)
+ raise IMError("Unknown prefix: %s" % prefix)
+
+ def dump(self):
+ for t in self.graph.triples((None, None, None)):
+ print t
diff --git a/Monitoring/src/main/python/Semantics/InformationModel.py.old b/Monitoring/src/main/python/Semantics/InformationModel.py.old
new file mode 100644
index 0000000..9de648e
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/InformationModel.py.old
@@ -0,0 +1,79 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from urllib2 import URLError
+from rdflib import Graph, Namespace, URIRef, plugin
+from rdflib.query import Processor, Result
+
+class IMError(Exception):
+ pass
+
+class Ontology(object):
+ ontology = {
+ 'owl': (None, "http://www.w3.org/2002/07/owl#"),
+ 'unit': ('unit.owl', "http://fp7-novi.eu/unit.owl#"),
+ 'param': ('monitoring_parameters.owl', "http://fp7-novi.eu/monitoring_parameter.owl#"),
+ 'feature': ('monitoring_features.owl', "http://fp7-novi.eu/monitoring_features.owl#"),
+ 'task': ('monitoring_task.owl', "http://fp7-novi.eu/monitoring_task.owl#"),
+ 'query': (None, "http://fp7-novi.eu/monitoring_query.owl#"), #('monitoring_query.owl', ...)
+ 'conf': (None, "http://fp7-novi.eu/config.owl#"),
+ 'stat': (None, 'http://fp7-novi.eu/monitoring_stat.owl#'),
+ 'core': ('novi-im.owl', "http://fp7-novi.eu/im.owl#"),
+ }
+
+ def __init__(self, baseurl, config_owl):
+
+ plugin.register(
+ 'sparql', Processor,
+ 'rdfextras.sparql.processor', 'Processor')
+ plugin.register(
+ 'sparql', Result,
+ 'rdfextras.sparql.query', 'SPARQLQueryResult')
+
+
+ self.baseurl = baseurl
+ self.graph = Graph()
+ # load owl files and bind name spaces
+ try:
+ self.graph += Graph().parse(source = config_owl)
+ except URLError:
+ raise IMError("URLError: Cannot read model %s" % config_owl)
+ for prefix, (owl, ns) in self.ontology.iteritems():
+ if owl:
+ url = "%s/%s" % (self.baseurl, owl)
+ try:
+ self.graph += Graph().parse(source = url)
+ except URLError:
+ raise IMError("URLError: Cannot read model %s" % url)
+ try:
+ self.graph.bind(prefix, Namespace(ns))
+ except:
+ pass
+
+ @staticmethod
+ def _float(f):
+ if '.' in f or 'e' in f or 'E' in f:
+ return float(f)
+ else:
+ return int(f)
+
+ @staticmethod
+ def _tail(uriref):
+ if not isinstance(uriref, URIRef):
+ raise IMError("Wrong uriref %s" % uriref)
+ return str(uriref).split("#")[-1]
+
+ def query(self, query):
+ return self.graph.query(query, initNs = dict(self.graph.namespaces()))
+
+ def ns(self, prefix):
+ for p, ns in self.graph.namespaces():
+ if p == prefix:
+ return Namespace(ns)
+ raise IMError("Unknown prefix: %s" % prefix)
+
+ def dump(self):
+ for t in self.graph.triples((None, None, None)):
+ print t
diff --git a/Monitoring/src/main/python/Semantics/Query$py.class b/Monitoring/src/main/python/Semantics/Query$py.class
new file mode 100644
index 0000000..7749289
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/Query$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/Query.py b/Monitoring/src/main/python/Semantics/Query.py
new file mode 100644
index 0000000..93c3a2c
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/Query.py
@@ -0,0 +1,140 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+from DataProcessing.Parameter import ParameterList
+from Resource.resource import resource as coreresource
+from DataProcessing.DataFormatter import JsonFormatter, DumbFormatter
+
+class SingleQuery(object):
+ '''
+ @summary: represents a (feature, resource) pair, representing what and where to measure
+ The requested output format is also stored here
+ Optionally some measurement specific parameters can be added and post processing can be applied
+ '''
+ def __init__(self):
+ self._feature = None
+ self._resource = None
+ self._samplechain = None
+ self._formatter = None
+ self._parameters = ParameterList()
+
+ def _get_resource(self):
+ return self._resource
+ def _set_resource(self, (resourceid, resource)):
+ if not isinstance(resource, coreresource):
+ raise Exception("%s is not a resource type" % resource)
+ self._resource = (resourceid, resource)
+
+ def _get_feature(self):
+ return self._feature
+ def _set_feature(self, feature):
+ self._feature = feature
+
+ def _get_samplechain(self):
+ return self._samplechain
+ def _set_samplechain(self, samplechain):
+ self._samplechain = samplechain
+
+ def _get_formatter(self):
+ return self._formatter
+ def _set_formatter(self, uri_formatter):
+ if str(uri_formatter).endswith("Formatter_JSON"):
+ self._formatter = JsonFormatter
+ elif str(uri_formatter).endswith("Formatter_CSV"):
+ self._formatter = DumbFormatter
+ else:
+ raise Exception("%s is not a formatter type" % uri_formatter)
+
+ def _get_paramlist(self):
+ return self._parameters
+
+ def addParameter(self, parameter):
+ self._parameters.append(parameter)
+
+
+ samplechain = property(_get_samplechain,_set_samplechain,None)
+
+ formatter = property(_get_formatter,_set_formatter,None)
+
+ resource = property(_get_resource,_set_resource,None)
+
+ feature = property(_get_feature,_set_feature,None)
+
+ paramlist = property(_get_paramlist,None,None)
+class QueryBundle(object):
+ '''
+ @summary: represents a collection of SingleQueries
+ '''
+ def __init__(self):
+ self.atoms = {}
+
+ def __len__(self):
+ return len(self.atoms)
+
+ def has_key(self, key):
+ return self.atoms.has_key(key)
+
+ def __iter__(self):
+ for q in self.atoms.itervalues():
+ yield q
+
+ def getResource(self, resourceid):
+ for q in self:
+ if q.resource[0] == resourceid:
+ return q.resource[1]
+ return None
+
+ def add(self, reference, q):
+ if self.atoms.has_key(reference):
+ raise Exception("Duplicate MonitoringQuery entry")
+ if not isinstance(q, SingleQuery):
+ raise Exception("Wrong type")
+ self.atoms[reference] = q
+
+
+
+
+
+#class BundleQueryBundle(QueryBundle):
+# def newQuery(self, key, feature, samplechain, resource, formatter):
+# if self.atoms.has_key(key):
+# raise Exception("Atomic query %s exists" % key)
+# Q = MonitorQuery()
+# Q.resource = resource
+# Q.feature = feature
+# Q.samplechain = samplechain
+# Q.formatter = formatter
+# self.atoms[key] = Q
+
+# def addParameter(self, key, parameter):
+# if not self.atoms.has_key(key):
+# raise Exception("Atomic query %s does not exist" % key)
+# self.atoms[key].addParameter(parameter)
+
+
+#class AggregatorQuery(Query):
+# def __init__(self):
+# Query.__init__(self)
+# self._processid = None
+
+# def _get_processid(self):
+# return self._processid
+# def _set_processid(self, processid):
+# self._processid = processid
+
+
+
+#class SampleQuery(QueryBundle):
+# def newQuery(self, key, processid, feature, samplechain, formatter):
+# if self.atoms.has_key(key):
+# raise Exception("Atomic query %s exists" % key)
+# Q = AggregatorQuery()
+# Q.processid = processid
+# Q.feature = feature
+# Q.samplechain = samplechain
+# Q.formatter = formatter
+# self.atoms[key] = Q
+
+#processid = property(_get_processid,_set_processid,None)
diff --git a/Monitoring/src/main/python/Semantics/Query.py.old b/Monitoring/src/main/python/Semantics/Query.py.old
new file mode 100644
index 0000000..80e7e41
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/Query.py.old
@@ -0,0 +1,139 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+from DataProcessing.Parameter import ParameterList
+from Resource.resource import resource as coreresource
+from DataProcessing.DataFormatter import JsonFormatter, DumbFormatter
+
+class SingleQuery(object):
+ '''
+ @summary: represents a (feature, resource) pair, representing what and where to measure
+ The requested output format is also stored here
+ Optionally some measurement specific parameters can be added and post processing can be applied
+ '''
+ def __init__(self):
+ self._feature = None
+ self._resource = None
+ self._samplechain = None
+ self._formatter = None
+ self._parameters = ParameterList()
+
+ @property
+ def resource(self):
+ return self._resource
+ @resource.setter
+ def resource(self, (resourceid, resource)):
+ if not isinstance(resource, coreresource):
+ raise Exception("%s is not a resource type" % resource)
+ self._resource = (resourceid, resource)
+
+ @property
+ def feature(self):
+ return self._feature
+ @feature.setter
+ def feature(self, feature):
+ self._feature = feature
+
+ @property
+ def samplechain(self):
+ return self._samplechain
+ @samplechain.setter
+ def samplechain(self, samplechain):
+ self._samplechain = samplechain
+
+ @property
+ def formatter(self):
+ return self._formatter
+ @formatter.setter
+ def formatter(self, uri_formatter):
+ if str(uri_formatter).endswith("Formatter_JSON"):
+ self._formatter = JsonFormatter
+ elif str(uri_formatter).endswith("Formatter_CSV"):
+ self._formatter = DumbFormatter
+ else:
+ raise Exception("%s is not a formatter type" % uri_formatter)
+
+ @property
+ def paramlist(self):
+ return self._parameters
+
+ def addParameter(self, parameter):
+ self._parameters.append(parameter)
+
+class QueryBundle(object):
+ '''
+ @summary: represents a collection of SingleQueries
+ '''
+ def __init__(self):
+ self.atoms = {}
+
+ def __len__(self):
+ return len(self.atoms)
+
+ def has_key(self, key):
+ return self.atoms.has_key(key)
+
+ def __iter__(self):
+ for q in self.atoms.itervalues():
+ yield q
+
+ def getResource(self, resourceid):
+ for q in self:
+ if q.resource[0] == resourceid:
+ return q.resource[1]
+ return None
+
+ def add(self, reference, q):
+ if self.atoms.has_key(reference):
+ raise Exception("Duplicate MonitoringQuery entry")
+ if not isinstance(q, SingleQuery):
+ raise Exception("Wrong type")
+ self.atoms[reference] = q
+
+
+
+
+
+#class BundleQueryBundle(QueryBundle):
+# def newQuery(self, key, feature, samplechain, resource, formatter):
+# if self.atoms.has_key(key):
+# raise Exception("Atomic query %s exists" % key)
+# Q = MonitorQuery()
+# Q.resource = resource
+# Q.feature = feature
+# Q.samplechain = samplechain
+# Q.formatter = formatter
+# self.atoms[key] = Q
+
+# def addParameter(self, key, parameter):
+# if not self.atoms.has_key(key):
+# raise Exception("Atomic query %s does not exist" % key)
+# self.atoms[key].addParameter(parameter)
+
+
+#class AggregatorQuery(Query):
+# def __init__(self):
+# Query.__init__(self)
+# self._processid = None
+
+# @property
+# def processid(self):
+# return self._processid
+# @processid.setter
+# def processid(self, processid):
+# self._processid = processid
+
+
+
+#class SampleQuery(QueryBundle):
+# def newQuery(self, key, processid, feature, samplechain, formatter):
+# if self.atoms.has_key(key):
+# raise Exception("Atomic query %s exists" % key)
+# Q = AggregatorQuery()
+# Q.processid = processid
+# Q.feature = feature
+# Q.samplechain = samplechain
+# Q.formatter = formatter
+# self.atoms[key] = Q
diff --git a/Monitoring/src/main/python/Semantics/QueryInterpreter$py.class b/Monitoring/src/main/python/Semantics/QueryInterpreter$py.class
new file mode 100644
index 0000000..d001aa9
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/QueryInterpreter$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/QueryInterpreter.py b/Monitoring/src/main/python/Semantics/QueryInterpreter.py
new file mode 100644
index 0000000..1422554
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/QueryInterpreter.py
@@ -0,0 +1,272 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+
+from Semantics.Query import QueryBundle, SingleQuery
+from Resource.node import node
+from Resource.interface import interface
+from Resource.link import link
+from DataProcessing.Aggregator import Max, Min, Percentile, Mean, Deviation
+from DataProcessing.Sampler import Tail, Head
+from DataProcessing.Parameter import ParameterList
+
+
+class QueryInterpreter(object):
+ '''
+ classdocs
+ '''
+ samplesource = 'UnmodifiedExtractOfFeatureSamples'
+ lut_skeleton = {
+ 'Maximum': Max,
+ 'Minimum': Min,
+ 'Percentile': Percentile,
+ 'Average': Mean,
+ 'Variance': Deviation,
+ 'Tail': Tail,
+ 'Head': Head
+ }
+
+ def __init__(self, model):
+ '''
+ @summary: constructor
+ @param model: the task model to resolve the tools
+ @type model: TaskModel
+ '''
+ self.model = model
+
+ def getUnitOfDimension(self, ref_dim):
+ return self.model.dm[ref_dim].unit
+
+ def getUnit(self, uri_prefix, uri_unit):
+ if uri_prefix is None:
+ uref = self.model._tail(uri_unit)
+ else:
+ uref = "%s_%s" % (self.model._tail(uri_prefix), self.model._tail(uri_unit))
+ return self.um[uref]
+
+ def _get_myns(self):
+ return dict(self.model.ontology.graph.namespaces())
+
+
+ myns = property(_get_myns,None,None)
+
+
+ def inferInterfacesOf(self, qgraph, uri_node):
+ q = """
+SELECT ?ifin ?address ?unit ?prefix
+WHERE {
+ <%s> core:hasInboundInterface ?ifin ;
+ core:hasOutboundInterface ?ifout .
+ ?ifin a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?ifout a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?addressobj a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:IPAddress ;
+ unit:hasValue ?address .
+ OPTIONAL {
+ ?addressobj unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?addressobj unit:hasPrefix ?prefix .
+ }
+}
+ """ % uri_node
+ for uri_ifin, address, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ name = self.model.ontology._tail(uri_ifin)
+ iface = interface(name, resourceid = uri_ifin)
+ if uri_unit is not None:
+ iface.address = str(address), self.getUnit(uri_prefix, uri_unit)
+ else:
+ iface.address = str(address), self.getUnitOfDimension('IPAddress')
+ iface.direction = iface.EGRESS | iface.INGRESS
+ #FIXME: this info should come from the model
+ iface.interface = "eth0"
+ iface.ispublic = True
+ yield iface
+ #TODO: similarly look up uni directional interfaces of the node and yield them as well
+
+ def inferInterfacesOfLink(self, qgraph, uri_node, iftype):
+ q = """
+SELECT ?ifin ?address ?unit ?prefix
+WHERE {
+ <%s> core:%s ?ifin .
+ ?ifin a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?addressobj a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:IPAddress ;
+ unit:hasValue ?address .
+ OPTIONAL {
+ ?addressobj unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?addressobj unit:hasPrefix ?prefix .
+ }
+}
+ """ % (uri_node, iftype)
+ for uri_ifin, address, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ name = self.model.ontology._tail(uri_ifin)
+ iface = interface(name, resourceid = uri_ifin)
+ if uri_unit is not None:
+ iface.address = str(address), self.getUnit(uri_prefix, uri_unit)
+ else:
+ iface.address = str(address), self.getUnitOfDimension('IPAddress')
+ if iftype=="hasSource": iface.direction = iface.EGRESS
+ else: iface.direction = iface.INGRESS
+ #FIXME: this info should come from the model
+ iface.interface = "eth0"
+ iface.ispublic = True
+ yield iface
+ #TODO: similarly look up uni directional interfaces of the node and yield them as well
+
+
+
+
+ def inferBundleQueries(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?feature ?sample ?resource ?formatter
+WHERE {
+ ?query a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a query:BundleQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasResource ?resource ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = QueryBundle()
+ for uri_query, uri_feature, uri_sample, uri_resource, uri_formatter in qgraph.query(q, initNs = self.myns):
+ r = Q.getResource(uri_resource)
+ if r is None:
+ r = self.translateResource(qgraph, uri_resource)
+ sq = SingleQuery()
+ sq.feature = uri_feature
+ sq.resource = (uri_resource, r)
+ sq.formatter = uri_formatter
+ sq.samplechain = self.inferSampleChain(qgraph, uri_sample)
+ for p in self.inferParameters(qgraph, uri_query):
+ sq.addParameter(parameter = p)
+ Q.add(uri_query, sq)
+ return Q
+
+ def getSampleManipulationQuery(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?feature ?sample ?formatter
+WHERE {
+ ?query a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a query:SampleManipulationQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = SampleQuery()
+ resources = {}
+ for uri_query, uri_feature, uri_sample, uri_resource, uri_formatter in qgraph.query(q, initNs = self.myns):
+ resource_name = self.model.ontology._tail(uri_resource)
+ if not resources.has_key(resource_name):
+ resources[resource_name] = self.translateResource(qgraph, uri_resource)
+ if not Q.has_key(uri_query):
+ samplechain = self.inferSampleChain(qgraph, uri_sample)
+ Q.newQuery(key = uri_query, feature = uri_feature, samplechain = samplechain, resource = resources[resource_name], formatter = uri_formatter)
+ for p in self.inferParameters(qgraph, uri_query):
+ Q.addParameter(key = uri_query, parameter = p)
+ return Q
+
+
+ def inferSampleChain(self, qgraph, uri_sample):
+ tail = self.model.ontology._tail(uri_sample)
+ if tail == self.samplesource:
+ return []
+ q = """
+SELECT ?nextsample ?sampleop
+WHERE {
+ <%s> a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ stat:hasSample ?nextsample ;
+ a ?sampleop
+}
+ """ % uri_sample
+ for uri_sample_next, uri_sampleop in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_sampleop)
+ if tail in [ 'NamedIndividual' ]:
+ continue
+ op = self.inferSampleChain(qgraph, uri_sample_next)
+ break
+ skeleton = self.lut_skeleton[tail]
+ parlist = ParameterList([ p for p in self.inferParameters(qgraph, uri_sample) ])
+ op.append( (skeleton, parlist) )
+ return op
+
+ def inferParameters(self, qgraph, uri_query):
+ q = """
+SELECT ?name ?type ?dim ?defval ?unit ?prefix
+WHERE {
+ <%s> param:hasParameter ?par .
+ ?par a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ param:paramName ?name ;
+ param:hasType ?type ;
+ a ?dim .
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+}
+ """ % uri_query
+ for uri_name, uri_type, uri_dim, uri_default, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_dim)
+#FIXME: query should include the filter, but rdflib has a bug and only the spelt out form would work
+# FILTER ( ?dim != <http://www.w3.org/2002/07/owl#NamedIndividual> )
+# FILTER ( ?dim != query:QueryParameter )
+# FILTER ( ?dim != stat:SampleOperatorParameter )
+#
+# like:
+# FILTER ( ?dim != <http://www.w3.org/2002/07/owl#NamedIndividual> )
+ if tail in [ 'QueryParameter', 'SOP_tail', 'SOP_head', 'SOP_order', 'NamedIndividual' ]:
+ continue
+ yield self.model.translateParameter(str(uri_name), uri_dim, uri_unit, uri_prefix, uri_type, uri_default)
+
+ def translateResource(self, qgraph, uri_resource):
+ resource_name = self.model.ontology._tail(uri_resource)
+ q = """
+SELECT ?resourcetype
+WHERE {
+ <%s> a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a core:Resource ;
+ a ?resourcetype ;
+}
+ """ % uri_resource
+ for uri_rtype, in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_rtype)
+ if tail in [ 'Resource', 'NamedIndividual', 'NetworkElement' ]:
+ continue
+ if tail == "Node":
+ r = node(name = resource_name, resourceid = uri_resource)
+ for iface in self.inferInterfacesOf(qgraph, uri_resource):
+ r.addinterface(iface)
+ return r
+ elif tail == "Link":
+ r = link(name = resource_name, resourceid = uri_resource)
+ for iface in self.inferInterfacesOfLink(qgraph, uri_resource, "hasSource"):
+ r.source = iface
+ break
+ for iface in self.inferInterfacesOfLink(qgraph, uri_resource, "hasSink"):
+ r.destination = iface
+ break
+ return r
+ else:
+ print "WW: unhandled rtype", uri_rtype
+ continue
+
diff --git a/Monitoring/src/main/python/Semantics/QueryInterpreter.py.old b/Monitoring/src/main/python/Semantics/QueryInterpreter.py.old
new file mode 100644
index 0000000..c6f5574
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/QueryInterpreter.py.old
@@ -0,0 +1,223 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+
+from Semantics.Query import QueryBundle, SingleQuery
+from Resource.node import node
+from Resource.interface import interface
+from DataProcessing.Aggregator import Max, Min, Percentile, Mean, Deviation
+from DataProcessing.Sampler import Tail, Head
+from DataProcessing.Parameter import ParameterList
+
+
+class QueryInterpreter(object):
+ '''
+ classdocs
+ '''
+ samplesource = 'UnmodifiedExtractOfFeatureSamples'
+ lut_skeleton = {
+ 'Maximum': Max,
+ 'Minimum': Min,
+ 'Percentile': Percentile,
+ 'Average': Mean,
+ 'Variance': Deviation,
+ 'Tail': Tail,
+ 'Head': Head
+ }
+
+ def __init__(self, model):
+ '''
+ @summary: constructor
+ @param model: the task model to resolve the tools
+ @type model: TaskModel
+ '''
+ self.model = model
+
+ def getUnitOfDimension(self, ref_dim):
+ return self.model.dm[ref_dim].unit
+
+ def getUnit(self, uri_prefix, uri_unit):
+ if uri_prefix is None:
+ uref = self.model._tail(uri_unit)
+ else:
+ uref = "%s_%s" % (self.model._tail(uri_prefix), self.model._tail(uri_unit))
+ return self.um[uref]
+
+ @property
+ def myns(self):
+ return dict(self.model.ontology.graph.namespaces())
+
+ def inferInterfacesOf(self, qgraph, uri_node):
+ q = """
+SELECT ?ifin ?address ?unit ?prefix
+WHERE {
+ <%s> core:hasInboundInterface ?ifin ;
+ core:hasOutboundInterface ?ifout .
+ ?ifin a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?ifout a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?addressobj a owl:NamedIndividual ;
+ a unit:IPAddress ;
+ unit:hasValue ?address .
+ OPTIONAL {
+ ?addressobj unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?addressobj unit:hasPrefix ?prefix .
+ }
+}
+ """ % uri_node
+ for uri_ifin, address, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ name = self.model.ontology._tail(uri_ifin)
+ iface = interface(name, resourceid = uri_ifin)
+ if uri_unit is not None:
+ iface.address = str(address), self.getUnit(uri_prefix, uri_unit)
+ else:
+ iface.address = str(address), self.getUnitOfDimension('IPAddress')
+ iface.direction = iface.EGRESS | iface.INGRESS
+ #FIXME: this info should come from the model
+ iface.interface = "eth0"
+ iface.ispublic = True
+ yield iface
+ #TODO: similarly look up uni directional interfaces of the node and yield them as well
+
+ def inferBundleQueries(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?feature ?sample ?resource ?formatter
+WHERE {
+ ?query a owl:NamedIndividual ;
+ a query:BundleQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasResource ?resource ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = QueryBundle()
+ for uri_query, uri_feature, uri_sample, uri_resource, uri_formatter in qgraph.query(q, initNs = self.myns):
+ r = Q.getResource(uri_resource)
+ if r is None:
+ r = self.translateResource(qgraph, uri_resource)
+ sq = SingleQuery()
+ sq.feature = uri_feature
+ sq.resource = (uri_resource, r)
+ sq.formatter = uri_formatter
+ sq.samplechain = self.inferSampleChain(qgraph, uri_sample)
+ for p in self.inferParameters(qgraph, uri_query):
+ sq.addParameter(parameter = p)
+ Q.add(uri_query, sq)
+ return Q
+
+ def getSampleManipulationQuery(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?feature ?sample ?formatter
+WHERE {
+ ?query a owl:NamedIndividual ;
+ a query:SampleManipulationQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = SampleQuery()
+ resources = {}
+ for uri_query, uri_feature, uri_sample, uri_resource, uri_formatter in qgraph.query(q, initNs = self.myns):
+ resource_name = self.model.ontology._tail(uri_resource)
+ if not resources.has_key(resource_name):
+ resources[resource_name] = self.translateResource(qgraph, uri_resource)
+ if not Q.has_key(uri_query):
+ samplechain = self.inferSampleChain(qgraph, uri_sample)
+ Q.newQuery(key = uri_query, feature = uri_feature, samplechain = samplechain, resource = resources[resource_name], formatter = uri_formatter)
+ for p in self.inferParameters(qgraph, uri_query):
+ Q.addParameter(key = uri_query, parameter = p)
+ return Q
+
+
+ def inferSampleChain(self, qgraph, uri_sample):
+ tail = self.model.ontology._tail(uri_sample)
+ if tail == self.samplesource:
+ return []
+ q = """
+SELECT ?nextsample ?sampleop
+WHERE {
+ <%s> a owl:NamedIndividual ;
+ stat:hasSample ?nextsample ;
+ a ?sampleop
+}
+ """ % uri_sample
+ for uri_sample_next, uri_sampleop in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_sampleop)
+ if tail in [ 'NamedIndividual' ]:
+ continue
+ op = self.inferSampleChain(qgraph, uri_sample_next)
+ break
+ skeleton = self.lut_skeleton[tail]
+ parlist = ParameterList([ p for p in self.inferParameters(qgraph, uri_sample) ])
+ op.append( (skeleton, parlist) )
+ return op
+
+ def inferParameters(self, qgraph, uri_query):
+ q = """
+SELECT ?name ?type ?dim ?defval ?unit ?prefix
+WHERE {
+ <%s> param:hasParameter ?par .
+ ?par a owl:NamedIndividual ;
+ param:paramName ?name ;
+ param:hasType ?type ;
+ a ?dim .
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+}
+ """ % uri_query
+ for uri_name, uri_type, uri_dim, uri_default, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_dim)
+#FIXME: query should include the filter, but rdflib has a bug and only the spelt out form would work
+# FILTER ( ?dim != owl:NamedIndividual )
+# FILTER ( ?dim != query:QueryParameter )
+# FILTER ( ?dim != stat:SampleOperatorParameter )
+#
+# like:
+# FILTER ( ?dim != <http://www.w3.org/2002/07/owl#NamedIndividual> )
+ if tail in [ 'QueryParameter', 'SOP_tail', 'SOP_head', 'SOP_order', 'NamedIndividual' ]:
+ continue
+ yield self.model.translateParameter(str(uri_name), uri_dim, uri_unit, uri_prefix, uri_type, uri_default)
+
+ def translateResource(self, qgraph, uri_resource):
+ resource_name = self.model.ontology._tail(uri_resource)
+ q = """
+SELECT ?resourcetype
+WHERE {
+ <%s> a owl:NamedIndividual ;
+ a core:Resource ;
+ a ?resourcetype ;
+}
+ """ % uri_resource
+ for uri_rtype, in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_rtype)
+ if tail in [ 'Resource', 'NamedIndividual' ]:
+ continue
+ if tail == "Node":
+ r = node(name = resource_name, resourceid = uri_resource)
+ for iface in self.inferInterfacesOf(qgraph, uri_resource):
+ r.addinterface(iface)
+ return r
+ else:
+ print "WW: unhandled rtype", uri_rtype
+ continue
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Semantics/TaskModel$py.class b/Monitoring/src/main/python/Semantics/TaskModel$py.class
new file mode 100644
index 0000000..e154c71
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/TaskModel$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/TaskModel.py b/Monitoring/src/main/python/Semantics/TaskModel.py
new file mode 100644
index 0000000..5f77aec
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/TaskModel.py
@@ -0,0 +1,436 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from Credential.credentialtypes import UsernamePassword, UsernameRSAKey
+from DataProcessing.Data import DataHeader, DataHeaderCell
+from DataProcessing.Parameter import ParameterList, Parameter
+from Driver.SOAPClient import SOAPClient
+from Driver.SshExec import SshExec
+from Driver.REST import RESTDriver
+import Driver.REST
+
+class TaskModelError(Exception):
+ pass
+
+class TaskModel(object):
+ '''
+ classdocs
+ '''
+ hooklookup = {
+ 'hasPreHook' : 'prehook',
+ 'hasStartHook' : 'starthook',
+ 'hasRetrieveHook' : 'retrievehook',
+ 'hasStopHook' : 'stophook',
+ 'hasPostHook' : 'posthook',
+ }
+ typelookup = {
+ 'Integer': int,
+ 'Float': float,
+ 'String': str
+ }
+
+ usingREST = False
+
+ def __init__(self, dimensionmanager, unitmanager, ontology):
+ '''
+ @summary: constructor
+ @param dimensionmanager: the container to form a cell's dimension
+ @type dimensionmanager: DimensionManager
+ @param unitmanager: the container to form a cell's unit
+ @type unitmanager: UnitManager
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+ self.dm = dimensionmanager
+ self.um = unitmanager
+
+ def inferDomains(self):
+ '''
+ @summary: extract the monitoring domains from the information model
+ @return: generator of the list of domains
+ @rtype: URIRef
+ '''
+ for uri_domain, _, _ in self.ontology.graph.triples((None, self.ontology.ns('rdf')['type'], self.ontology.ns('task')['MonitoringDomain'])):
+ yield uri_domain
+
+ def inferFeatures(self):
+ '''
+ @summary: extract the monitored features from the information model
+ @return: a generator of the list of (feature reference, name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?feature ?name ?resource
+WHERE {
+ ?feature a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a ?parent ;
+ feature:featureName ?name .
+ ?parent rdfs:subClassOf feature:MonitoredFeature .
+ ?resource feature:hasFeature ?feature
+}
+ """
+ for uri_feature, name, uri_resource in self.ontology.query(q):
+ yield uri_feature, str(name), uri_resource
+
+ def inferTasks(self, domain, feature):
+ '''
+ @summary: provides a generator to crawl over the tasks that can measure a given feature in the given domain of interest
+ @param domain: domain of interest
+ @type domain: URIRef
+ @param feature: the feature to measure
+ @type feature: URIRef
+ @return: a generator of the list of (task reference, task name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?task ?name
+WHERE {
+ ?task a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a task:MonitoringTask ;
+ task:name ?name ;
+ task:hasMonitoringDomain task:%s ;
+ task:hasOutputTableFormat ?data .
+?data task:hasColumn ?col .
+?col task:hasMonitoredFeature feature:%s
+}
+ """ % (self.ontology._tail(domain), self.ontology._tail(feature))
+ for uri_task, tname in self.ontology.query(q):
+ yield uri_task, str(tname)
+
+ def inferCredentialOf(self, task):
+ '''
+ @summary: extracts the set of acceptable credential templates the given task accepts
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a set of an uninitialized Credential classes
+ @rtype: set(Credential)
+ @raise IMError: Unknown authentication type
+ '''
+ creds = set()
+ for (_, _, auth) in self.ontology.graph.triples((task, self.ontology.ns('task')['hasAuthenticationType'], None)):
+ if auth == self.ontology.ns('task')["UsernamePassword"]:
+ creds.add(UsernamePassword)
+ elif auth == self.ontology.ns('task')["UsernameRSAKey"]:
+ creds.add(UsernameRSAKey)
+ else:
+ raise TaskModelError("Unknown authentication type %s" % auth)
+ return creds
+
+ def inferDriverOf(self, task):
+ '''
+ @summary: extarcts the driver of the task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: the appropriate driver class uninstantiated
+ @rtype: Driver
+ @raise IMError: Unknown driver type / hasDriver missing
+ '''
+ try:
+ _, _, driver = self.ontology.graph.triples((task, self.ontology.ns('task')['hasDriver'], None)).next()
+ if driver == self.ontology.ns('task')["SOAPClient"]:
+ return SOAPClient
+ elif driver == self.ontology.ns('task')["SSH"]:
+ return SshExec
+ elif driver == self.ontology.ns('task')["REST"]:
+ if not self.usingREST:
+ self.usingREST = True
+ try:
+ Driver.REST.setSSLTrusted()
+ except:
+ raise TaskModelError("REST SSL Error")
+ return RESTDriver
+ else:
+ raise TaskModelError("Unknown driver type %s" % driver)
+ except StopIteration:
+ raise TaskModelError("hasDriver is missing for task %s" % task)
+
+ def inferHookparametersOf(self, task):
+ '''
+ @summary: extract the necessary control parameters for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of arguments, which are passed to the Task object's prehook method as keyword arguments
+ @rtype: dict
+ '''
+ q = """
+SELECT ?name ?value ?type
+WHERE {
+ conf:%s task:hasHookParameter ?p .
+ ?p param:paramName ?name ;
+ a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ rdf:type task:HookParameter ;
+ unit:hasValue ?value ;
+ param:hasType ?type .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for pname, pvalue, ptype in self.ontology.query(q):
+ pname = str(pname)
+ if ptype == self.ontology.ns('param')["Integer"]:
+ d[pname] = int(pvalue)
+ elif ptype == self.ontology.ns('param')["Float"]:
+ d[pname] = float(pvalue)
+ else:
+ d[pname] = str(pvalue)
+ return d
+
+ def inferHookdefinitionsOf(self, task):
+ '''
+ @summary: extract the hook implementation details for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of hook definitions
+ @rtype: dict
+ '''
+ q = """
+SELECT ?rel ?value
+WHERE {
+ conf:%s ?rel ?h .
+ ?h task:hookCode ?value .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for hrel, hvalue in self.ontology.query(q):
+ hook = self.ontology._tail(uriref = hrel)
+ d[self.hooklookup[hook]] = str(hvalue).replace('\\n', '\n').replace('\\t', '\t').replace('\\\\', '\\').strip()
+ return d
+
+ def inferDataheaderOf(self, task):
+ '''
+ @summary: extract the data header declaration the for task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized DataHeader instance
+ @rtype: DataHeader
+ '''
+ q = """
+SELECT ?tablename ?colname ?dim ?feature ?unit ?prefix
+WHERE {
+ conf:%s task:hasOutputTableFormat ?hdr .
+ ?hdr task:name ?tablename .
+ ?hdr task:hasColumn ?col .
+ ?col task:name ?colname ;
+ a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a ?dim ;
+ task:sequenceNumber ?seqno .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?col task:hasMonitoredFeature ?feature .
+ }
+ OPTIONAL {
+ ?col unit:hasUnit ?unit .
+ OPTIONAL {
+ ?col unit:hasPrefix ?prefix .
+ }
+ }
+}
+ORDER BY ?seqno
+ """ % (self.ontology._tail(task))
+ datahdr = None
+ for tablename, colname, uri_dim, uri_feature, uri_unit, uri_prefix in self.ontology.query(q):
+ if datahdr is None:
+ datahdr = DataHeader(str(tablename))
+ if uri_unit is None:
+ u = None
+ elif uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if uri_feature is None:
+ cell = DataHeaderCell(name = str(colname), dimension = d, unit = u)
+ else:
+ cell = DataHeaderCell(name = str(colname), dimension = d, feature = uri_feature, unit = u)
+ datahdr.addColumn(cell)
+ return datahdr
+
+ def inferParametersOf(self, task):
+ '''
+ @summary: extract the parameter list for the given task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized list of the parameters of the task
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ conf:%s task:hasExecutionParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ a ?dim .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(task))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferObligatoryParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:obligatoryParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferOptionalParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:optionalParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferFeatureMonitoringParameters(self):
+ '''
+ @summary: extract parameters declared for feature monitoring
+ @return: an iterator over parameters
+ @rtype: (parameter name, dimension, value, unit)
+ '''
+ q = """
+SELECT ?name ?dim ?defval ?unit ?prefix
+WHERE {
+ ?par a feature:FeatureMonitoringParameter ;
+ param:paramName ?name ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """
+ for name, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+#FIXME: duplicate (similar thing in translateParameter!!!
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ yield str(name), d, "", d.unit
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ yield str(name), d, str(default), u
+
+ def translateParameter(self, name, uri_dim, uri_unit, uri_prefix, uri_ptype, default = None):
+ '''
+ @summary: helper method to instantiate a Parameter
+ @param name: the reference name of the parameter
+ @type name: str
+ @param uri_dim: the dimension of the parameter
+ @type uri_dim: URIRef
+ @param uri_unit: the unit of the parameter, if None we fall back to the unit of the dimension
+ @type uri_unit: URIRef
+ @param uri_prefix: accounts only if uri_unit is not None
+ @type uri_prefix: URIRef
+ @param uri_ptype: the type of the parameter to use for serialization
+ @type uri_ptype: URIRef
+ @param default: the parameter value to initialize with, if None, parameter won't hol a value
+ @type default: Literal
+ @return: a parameter
+ @rtype: Parameter
+ '''
+ vt = self.typelookup[ self.ontology._tail(uri_ptype) ]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d)
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d, default = (vt(default), u))
diff --git a/Monitoring/src/main/python/Semantics/TaskModel.py.old b/Monitoring/src/main/python/Semantics/TaskModel.py.old
new file mode 100644
index 0000000..2cfc06e
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/TaskModel.py.old
@@ -0,0 +1,424 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from Credential.credentialtypes import UsernamePassword, UsernameRSAKey
+from DataProcessing.Data import DataHeader, DataHeaderCell
+from DataProcessing.Parameter import ParameterList, Parameter
+from Driver.SOAPClient import SOAPClient
+from Driver.SshExec import SshExec
+
+class TaskModelError(Exception):
+ pass
+
+class TaskModel(object):
+ '''
+ classdocs
+ '''
+ hooklookup = {
+ 'hasPreHook' : 'prehook',
+ 'hasStartHook' : 'starthook',
+ 'hasRetrieveHook' : 'retrievehook',
+ 'hasStopHook' : 'stophook',
+ 'hasPostHook' : 'posthook',
+ }
+ typelookup = {
+ 'Integer': int,
+ 'Float': float,
+ 'String': str
+ }
+
+ def __init__(self, dimensionmanager, unitmanager, ontology):
+ '''
+ @summary: constructor
+ @param dimensionmanager: the container to form a cell's dimension
+ @type dimensionmanager: DimensionManager
+ @param unitmanager: the container to form a cell's unit
+ @type unitmanager: UnitManager
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+ self.dm = dimensionmanager
+ self.um = unitmanager
+
+ def inferDomains(self):
+ '''
+ @summary: extract the monitoring domains from the information model
+ @return: generator of the list of domains
+ @rtype: URIRef
+ '''
+ for uri_domain, _, _ in self.ontology.graph.triples((None, self.ontology.ns('rdf')['type'], self.ontology.ns('task')['MonitoringDomain'])):
+ yield uri_domain
+
+ def inferFeatures(self):
+ '''
+ @summary: extract the monitored features from the information model
+ @return: a generator of the list of (feature reference, name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?feature ?name ?resource
+WHERE {
+ ?feature a owl:NamedIndividual ;
+ a ?parent ;
+ feature:featureName ?name .
+ ?parent rdfs:subClassOf feature:MonitoredFeature .
+ ?resource feature:hasFeature ?feature
+}
+ """
+ for uri_feature, name, uri_resource in self.ontology.query(q):
+ yield uri_feature, str(name), uri_resource
+
+ def inferTasks(self, domain, feature):
+ '''
+ @summary: provides a generator to crawl over the tasks that can measure a given feature in the given domain of interest
+ @param domain: domain of interest
+ @type domain: URIRef
+ @param feature: the feature to measure
+ @type feature: URIRef
+ @return: a generator of the list of (task reference, task name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?task ?name
+WHERE {
+ ?task a owl:NamedIndividual ;
+ a task:MonitoringTask ;
+ task:name ?name ;
+ task:hasMonitoringDomain task:%s ;
+ task:hasOutputTableFormat ?data .
+?data task:hasColumn ?col .
+?col task:hasMonitoredFeature feature:%s
+}
+ """ % (self.ontology._tail(domain), self.ontology._tail(feature))
+ for uri_task, tname in self.ontology.query(q):
+ yield uri_task, str(tname)
+
+ def inferCredentialOf(self, task):
+ '''
+ @summary: extracts the set of acceptable credential templates the given task accepts
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a set of an uninitialized Credential classes
+ @rtype: set(Credential)
+ @raise IMError: Unknown authentication type
+ '''
+ creds = set()
+ for (_, _, auth) in self.ontology.graph.triples((task, self.ontology.ns('task')['hasAuthenticationType'], None)):
+ if auth == self.ontology.ns('task')["UsernamePassword"]:
+ creds.add(UsernamePassword)
+ elif auth == self.ontology.ns('task')["UsernameRSAKey"]:
+ creds.add(UsernameRSAKey)
+ else:
+ raise TaskModelError("Unknown authentication type %s" % auth)
+ return creds
+
+ def inferDriverOf(self, task):
+ '''
+ @summary: extarcts the driver of the task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: the appropriate driver class uninstantiated
+ @rtype: Driver
+ @raise IMError: Unknown driver type / hasDriver missing
+ '''
+ try:
+ _, _, driver = self.ontology.graph.triples((task, self.ontology.ns('task')['hasDriver'], None)).next()
+ if driver == self.ontology.ns('task')["SOAPClient"]:
+ return SOAPClient
+ elif driver == self.ontology.ns('task')["SSH"]:
+ return SshExec
+ else:
+ raise TaskModelError("Unknown driver type %s" % driver)
+ except StopIteration:
+ raise TaskModelError("hasDriver is missing for task %s" % task)
+
+ def inferHookparametersOf(self, task):
+ '''
+ @summary: extract the necessary control parameters for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of arguments, which are passed to the Task object's prehook method as keyword arguments
+ @rtype: dict
+ '''
+ q = """
+SELECT ?name ?value ?type
+WHERE {
+ conf:%s task:hasHookParameter ?p .
+ ?p param:paramName ?name ;
+ a owl:NamedIndividual ;
+ rdf:type task:HookParameter ;
+ unit:hasValue ?value ;
+ param:hasType ?type .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for pname, pvalue, ptype in self.ontology.query(q):
+ pname = str(pname)
+ if ptype == self.ontology.ns('param')["Integer"]:
+ d[pname] = int(pvalue)
+ elif ptype == self.ontology.ns('param')["Float"]:
+ d[pname] = float(pvalue)
+ else:
+ d[pname] = str(pvalue)
+ return d
+
+ def inferHookdefinitionsOf(self, task):
+ '''
+ @summary: extract the hook implementation details for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of hook definitions
+ @rtype: dict
+ '''
+ q = """
+SELECT ?rel ?value
+WHERE {
+ conf:%s ?rel ?h .
+ ?h task:hookCode ?value .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for hrel, hvalue in self.ontology.query(q):
+ hook = self.ontology._tail(uriref = hrel)
+ d[self.hooklookup[hook]] = str(hvalue).replace('\\n', '\n').replace('\\t', '\t').replace('\\\\', '\\').strip()
+ return d
+
+ def inferDataheaderOf(self, task):
+ '''
+ @summary: extract the data header declaration the for task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized DataHeader instance
+ @rtype: DataHeader
+ '''
+ q = """
+SELECT ?tablename ?colname ?dim ?feature ?unit ?prefix
+WHERE {
+ conf:%s task:hasOutputTableFormat ?hdr .
+ ?hdr task:name ?tablename .
+ ?hdr task:hasColumn ?col .
+ ?col task:name ?colname ;
+ a owl:NamedIndividual ;
+ a ?dim ;
+ task:sequenceNumber ?seqno .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?col task:hasMonitoredFeature ?feature .
+ }
+ OPTIONAL {
+ ?col unit:hasUnit ?unit .
+ OPTIONAL {
+ ?col unit:hasPrefix ?prefix .
+ }
+ }
+}
+ORDER BY ?seqno
+ """ % (self.ontology._tail(task))
+ datahdr = None
+ for tablename, colname, uri_dim, uri_feature, uri_unit, uri_prefix in self.ontology.query(q):
+ if datahdr is None:
+ datahdr = DataHeader(str(tablename))
+ if uri_unit is None:
+ u = None
+ elif uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if uri_feature is None:
+ cell = DataHeaderCell(name = str(colname), dimension = d, unit = u)
+ else:
+ cell = DataHeaderCell(name = str(colname), dimension = d, feature = uri_feature, unit = u)
+ datahdr.addColumn(cell)
+ return datahdr
+
+ def inferParametersOf(self, task):
+ '''
+ @summary: extract the parameter list for the given task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized list of the parameters of the task
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ conf:%s task:hasExecutionParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ a ?dim .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(task))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferObligatoryParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:obligatoryParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferOptionalParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:optionalParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferFeatureMonitoringParameters(self):
+ '''
+ @summary: extract parameters declared for feature monitoring
+ @return: an iterator over parameters
+ @rtype: (parameter name, dimension, value, unit)
+ '''
+ q = """
+SELECT ?name ?dim ?defval ?unit ?prefix
+WHERE {
+ ?par a feature:FeatureMonitoringParameter ;
+ param:paramName ?name ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """
+ for name, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+#FIXME: duplicate (similar thing in translateParameter!!!
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ yield str(name), d, "", d.unit
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ yield str(name), d, str(default), u
+
+ def translateParameter(self, name, uri_dim, uri_unit, uri_prefix, uri_ptype, default = None):
+ '''
+ @summary: helper method to instantiate a Parameter
+ @param name: the reference name of the parameter
+ @type name: str
+ @param uri_dim: the dimension of the parameter
+ @type uri_dim: URIRef
+ @param uri_unit: the unit of the parameter, if None we fall back to the unit of the dimension
+ @type uri_unit: URIRef
+ @param uri_prefix: accounts only if uri_unit is not None
+ @type uri_prefix: URIRef
+ @param uri_ptype: the type of the parameter to use for serialization
+ @type uri_ptype: URIRef
+ @param default: the parameter value to initialize with, if None, parameter won't hol a value
+ @type default: Literal
+ @return: a parameter
+ @rtype: Parameter
+ '''
+ vt = self.typelookup[ self.ontology._tail(uri_ptype) ]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d)
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d, default = (vt(default), u))
diff --git a/Monitoring/src/main/python/Semantics/UnitModel$py.class b/Monitoring/src/main/python/Semantics/UnitModel$py.class
new file mode 100644
index 0000000..8578183
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/UnitModel$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/UnitModel.py b/Monitoring/src/main/python/Semantics/UnitModel.py
new file mode 100644
index 0000000..af8ec74
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/UnitModel.py
@@ -0,0 +1,364 @@
+'''
+Created on Feb 12, 2012
+
+@author: steger
+'''
+
+class UnitModel(object):
+ '''
+ @summary: an interface to infer prefix, unit and dimension related information from the model
+ '''
+
+ def __init__(self, ontology):
+ '''
+ @summary: constructor
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+
+ def inferPrefixes(self):
+ '''
+ @summary: iterate over all prefixes defined in the model.
+ @return: a generator of the prefix details: (reference, symbol, base, exponent)
+ @rtype: (URIRef, str, int, int)
+ @todo: in case the unit:base is not present in a Prefix individual,
+ we should fall back to the restriction on the base defined for the given sibling of the Prefix.
+ This sibling is referenced ?basegroup in the query.
+ '''
+ q = """
+SELECT ?prefix ?symbol ?base ?exponent
+WHERE {
+ ?prefix a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a ?basegroup ;
+ unit:exponent ?exponent ;
+ unit:base ?base .
+ ?basegroup rdfs:subClassOf unit:Prefix .
+ OPTIONAL {
+ ?prefix unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_prefix, symbol, base, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_prefix, self.ontology._tail(uri_prefix), int(base), int(exponent)
+ else:
+ yield uri_prefix, str(symbol), int(base), int(exponent)
+
+ def inferPrefixSymbolOf(self, prefixuri):
+ '''
+ @summary: generates an short written form of a unit prefix if unit:symbol is present in the model,
+ otherwise an abbreviation is derived from the tail of the uri (the reference name to the individual).
+ @param prefixuri: the uri reference to the unit prefix
+ @type prefixuri: URIRef
+ @return: the short form
+ @rtype: str
+ '''
+ try:
+ _, _, symbol = self.ontology.graph.triples((prefixuri, self.ontology.ns('unit')['symbol'], None)).next()
+ return str(symbol)
+ except StopIteration:
+ return self.ontology._tail(prefixuri)
+
+
+ def inferBaseUnits(self):
+ '''
+ @summary: iterate over all BaseUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol)
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?unit ?symbol
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:BaseUnit .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit)
+ else:
+ yield uri_unit, str(symbol)
+
+ def inferPowerUnits(self):
+ '''
+ @summary: iterate over all PowerUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, powerof, exponent)
+ @rtype: (URIRef, str, URIRef, int)
+ '''
+ q = """
+SELECT ?unit ?symbol ?powerof ?exponent
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:PowerUnit ;
+ unit:exponent ?exponent ;
+ unit:derivedFrom ?powerof .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_powerof, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_powerof, int(exponent)
+ else:
+ yield uri_unit, str(symbol), uri_powerof, int(exponent)
+
+ def inferProductUnits(self):
+ '''
+ @summary: iterate over all ProductUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, productof)
+ @rtype: (URIRef, str, set(URIRef))
+ '''
+ q = """
+SELECT ?unit ?symbol ?productof
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:ProductUnit ;
+ unit:derivedFrom ?productof
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ container = {}
+ for uri_unit, symbol, uri_productof in self.ontology.query(q):
+ if symbol is None:
+ key = uri_unit, self.ontology_tail(uri_unit)
+ else:
+ key = uri_unit, str(symbol)
+ if not container.has_key(key):
+ container[key] = set()
+ container[key].add(uri_productof)
+ for (uri_unit, symbol), productof in container.iteritems():
+ yield uri_unit, symbol, productof
+
+ def inferLinearTransformedUnits(self):
+ '''
+ @summary: iterate over all LinearTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, scale, offset)
+ @rtype: (URIRef, str, URIRef, float, float)
+ '''
+ q = """
+SELECT ?unit ?symbol ?scale ?offset ?derivedfrom
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:LinearTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:scale ?scale .
+ OPTIONAL {
+ ?unit unit:offset ?offset .
+ }
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, scale, offset, uri_derivedfrom in self.ontology.query(q):
+ if offset is None:
+ offset = 0
+ else:
+ offset = self.ontology._float(offset)
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, self.ontology._float(scale), offset
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, self.ontology._float(scale), offset
+
+ def inferRegexpTransformedUnits(self):
+ '''
+ @summary: iterate over all RegexpTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, expr_fwd, expr_inv)
+ @rtype: (URIRef, str, URIRef, str, str)
+ '''
+ q = """
+SELECT ?unit ?symbol ?derivedfrom ?fwd ?inv
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:RegexpTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:forwardExpression ?fwd ;
+ unit:inverseExpression ?inv .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_derivedfrom, expr_fwd, expr_inv in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+
+ def inferPossiblePrefixesOf(self, uri_unit):
+ '''
+ @summary: extract possible prefixes for the given unit
+ @param unit: reference to the unit
+ @type unit: URIRef
+ @return: a generator over the references of the possible unit prefixes
+ @rtype: URIRef
+ '''
+ for _, _, uri_prefix in self.ontology.graph.triples((uri_unit, self.ontology.ns('unit')['possiblePrefix'], None)):
+ yield uri_prefix
+
+ def inferBaseDimensions(self):
+ '''
+ @summary: extract BaseDimensions and their corresponding units from the model
+ @return: a generator of the BaseDimension details: (reference, unit, level)
+ @rtype: (URIRef, URIRef, str)
+ '''
+ q = """
+SELECT ?dimension ?unit ?level
+WHERE {
+ ?dimension rdfs:subClassOf unit:BaseDimension ;
+ rdfs:subClassOf ?constraint ;
+ rdfs:subClassOf ?level .
+ ?constraint owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ FILTER regex(?level, "Level") .
+}
+ """
+ for uri_dimension, uri_unit, level in self.ontology.query(q):
+ yield uri_dimension, uri_unit, level
+
+ def inferDifferenceDimensions(self):
+ '''
+ @summary: extract DifferenceDimensions and their corresponding units from the model
+ @return: a generator of the DifferenceDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:DifferenceDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+ def inferPowerDimensions(self):
+ '''
+ @summary: extract PowerDimensions and their corresponding units from the model
+ @return: a generator of the PowerDimension details: (reference, unit, derivedfrom, exponent)
+ @rtype: (URIRef, URIRef, URIRef, int)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom ?exponent
+WHERE {
+ ?dimension rdfs:subClassOf unit:PowerDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 ;
+ rdfs:subClassOf ?constraint3 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+ ?constraint3 owl:onProperty unit:exponent ;
+ owl:hasValue ?exponent .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom, exponent in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom, int(exponent)
+
+ def inferProductDimensions(self):
+ '''
+ @summary: extract ProductDimensions and their corresponding units from the model
+ @return: a generator of the ProductDimension details: (reference, unit, set of derivedfrom references)
+ @rtype: (URIRef, URIRef, tuple(URIRef))
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:ProductDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ container = {}
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ if not container.has_key(uri_dimension):
+ container[uri_dimension] = (uri_unit, set())
+ container[uri_dimension][1].add(uri_derivedfrom)
+ for uri_dimension, (uri_unit, set_derivedfrom) in container.iteritems():
+ yield uri_dimension, uri_unit, tuple(set_derivedfrom)
+
+ def inferRatioDimensions(self):
+ '''
+ @summary: extract RatioDimensions and their corresponding units from the model
+ @return: a generator of the RatioDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:RatioDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#OBSOLETED:
+ def inferUnitSymbolOf(self, unituri, prefixuri = None):
+ raise Exception("OBSOLETE")
+# '''
+# @summary: generates an short written form of a unit if symbols are present for the prefix (optional) and the unit itself.
+# If either of them is missing abbreviation is derived from the tail of the uri (name of the individual).
+# @param unituri: the unit
+# @type unituri: URIRef
+# @param prefixuri: the prefix (optional)
+# @type prefixuri: URIRef or None
+# @return: the short form
+# @rtype: str
+# '''
+# if prefixuri is None:
+# prefix = ""
+# else:
+# try:
+# _, _, symbol = self.graph.triples((prefixuri, self.ns('unit')['symbol'], None)).next()
+# prefix = str(symbol)
+# except StopIteration:
+# prefix = self._tail(prefixuri)
+# try:
+# _, _, symbol = self.graph.triples((unituri, self.ns('unit')['symbol'], None)).next()
+# return "%s%s" % (prefix, str(symbol))
+# except StopIteration:
+# return "%s%s" % (prefix, self._tail(unituri))
+
diff --git a/Monitoring/src/main/python/Semantics/UnitModel.py.old b/Monitoring/src/main/python/Semantics/UnitModel.py.old
new file mode 100644
index 0000000..2906615
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/UnitModel.py.old
@@ -0,0 +1,364 @@
+'''
+Created on Feb 12, 2012
+
+@author: steger
+'''
+
+class UnitModel(object):
+ '''
+ @summary: an interface to infer prefix, unit and dimension related information from the model
+ '''
+
+ def __init__(self, ontology):
+ '''
+ @summary: constructor
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+
+ def inferPrefixes(self):
+ '''
+ @summary: iterate over all prefixes defined in the model.
+ @return: a generator of the prefix details: (reference, symbol, base, exponent)
+ @rtype: (URIRef, str, int, int)
+ @todo: in case the unit:base is not present in a Prefix individual,
+ we should fall back to the restriction on the base defined for the given sibling of the Prefix.
+ This sibling is referenced ?basegroup in the query.
+ '''
+ q = """
+SELECT ?prefix ?symbol ?base ?exponent
+WHERE {
+ ?prefix a owl:NamedIndividual ;
+ a ?basegroup ;
+ unit:exponent ?exponent ;
+ unit:base ?base .
+ ?basegroup rdfs:subClassOf unit:Prefix .
+ OPTIONAL {
+ ?prefix unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_prefix, symbol, base, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_prefix, self.ontology._tail(uri_prefix), int(base), int(exponent)
+ else:
+ yield uri_prefix, str(symbol), int(base), int(exponent)
+
+ def inferPrefixSymbolOf(self, prefixuri):
+ '''
+ @summary: generates an short written form of a unit prefix if unit:symbol is present in the model,
+ otherwise an abbreviation is derived from the tail of the uri (the reference name to the individual).
+ @param prefixuri: the uri reference to the unit prefix
+ @type prefixuri: URIRef
+ @return: the short form
+ @rtype: str
+ '''
+ try:
+ _, _, symbol = self.ontology.graph.triples((prefixuri, self.ontology.ns('unit')['symbol'], None)).next()
+ return str(symbol)
+ except StopIteration:
+ return self.ontology._tail(prefixuri)
+
+
+ def inferBaseUnits(self):
+ '''
+ @summary: iterate over all BaseUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol)
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?unit ?symbol
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:BaseUnit .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit)
+ else:
+ yield uri_unit, str(symbol)
+
+ def inferPowerUnits(self):
+ '''
+ @summary: iterate over all PowerUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, powerof, exponent)
+ @rtype: (URIRef, str, URIRef, int)
+ '''
+ q = """
+SELECT ?unit ?symbol ?powerof ?exponent
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:PowerUnit ;
+ unit:exponent ?exponent ;
+ unit:derivedFrom ?powerof .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_powerof, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_powerof, int(exponent)
+ else:
+ yield uri_unit, str(symbol), uri_powerof, int(exponent)
+
+ def inferProductUnits(self):
+ '''
+ @summary: iterate over all ProductUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, productof)
+ @rtype: (URIRef, str, set(URIRef))
+ '''
+ q = """
+SELECT ?unit ?symbol ?productof
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:ProductUnit ;
+ unit:derivedFrom ?productof
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ container = {}
+ for uri_unit, symbol, uri_productof in self.ontology.query(q):
+ if symbol is None:
+ key = uri_unit, self.ontology_tail(uri_unit)
+ else:
+ key = uri_unit, str(symbol)
+ if not container.has_key(key):
+ container[key] = set()
+ container[key].add(uri_productof)
+ for (uri_unit, symbol), productof in container.iteritems():
+ yield uri_unit, symbol, productof
+
+ def inferLinearTransformedUnits(self):
+ '''
+ @summary: iterate over all LinearTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, scale, offset)
+ @rtype: (URIRef, str, URIRef, float, float)
+ '''
+ q = """
+SELECT ?unit ?symbol ?scale ?offset ?derivedfrom
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:LinearTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:scale ?scale .
+ OPTIONAL {
+ ?unit unit:offset ?offset .
+ }
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, scale, offset, uri_derivedfrom in self.ontology.query(q):
+ if offset is None:
+ offset = 0
+ else:
+ offset = self.ontology._float(offset)
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, self.ontology._float(scale), offset
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, self.ontology._float(scale), offset
+
+ def inferRegexpTransformedUnits(self):
+ '''
+ @summary: iterate over all RegexpTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, expr_fwd, expr_inv)
+ @rtype: (URIRef, str, URIRef, str, str)
+ '''
+ q = """
+SELECT ?unit ?symbol ?derivedfrom ?fwd ?inv
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:RegexpTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:forwardExpression ?fwd ;
+ unit:inverseExpression ?inv .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_derivedfrom, expr_fwd, expr_inv in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+
+ def inferPossiblePrefixesOf(self, uri_unit):
+ '''
+ @summary: extract possible prefixes for the given unit
+ @param unit: reference to the unit
+ @type unit: URIRef
+ @return: a generator over the references of the possible unit prefixes
+ @rtype: URIRef
+ '''
+ for _, _, uri_prefix in self.ontology.graph.triples((uri_unit, self.ontology.ns('unit')['possiblePrefix'], None)):
+ yield uri_prefix
+
+ def inferBaseDimensions(self):
+ '''
+ @summary: extract BaseDimensions and their corresponding units from the model
+ @return: a generator of the BaseDimension details: (reference, unit, level)
+ @rtype: (URIRef, URIRef, str)
+ '''
+ q = """
+SELECT ?dimension ?unit ?level
+WHERE {
+ ?dimension rdfs:subClassOf unit:BaseDimension ;
+ rdfs:subClassOf ?constraint ;
+ rdfs:subClassOf ?level .
+ ?constraint owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ FILTER regex(?level, "Level") .
+}
+ """
+ for uri_dimension, uri_unit, level in self.ontology.query(q):
+ yield uri_dimension, uri_unit, level
+
+ def inferDifferenceDimensions(self):
+ '''
+ @summary: extract DifferenceDimensions and their corresponding units from the model
+ @return: a generator of the DifferenceDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:DifferenceDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+ def inferPowerDimensions(self):
+ '''
+ @summary: extract PowerDimensions and their corresponding units from the model
+ @return: a generator of the PowerDimension details: (reference, unit, derivedfrom, exponent)
+ @rtype: (URIRef, URIRef, URIRef, int)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom ?exponent
+WHERE {
+ ?dimension rdfs:subClassOf unit:PowerDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 ;
+ rdfs:subClassOf ?constraint3 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+ ?constraint3 owl:onProperty unit:exponent ;
+ owl:hasValue ?exponent .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom, exponent in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom, int(exponent)
+
+ def inferProductDimensions(self):
+ '''
+ @summary: extract ProductDimensions and their corresponding units from the model
+ @return: a generator of the ProductDimension details: (reference, unit, set of derivedfrom references)
+ @rtype: (URIRef, URIRef, tuple(URIRef))
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:ProductDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ container = {}
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ if not container.has_key(uri_dimension):
+ container[uri_dimension] = (uri_unit, set())
+ container[uri_dimension][1].add(uri_derivedfrom)
+ for uri_dimension, (uri_unit, set_derivedfrom) in container.iteritems():
+ yield uri_dimension, uri_unit, tuple(set_derivedfrom)
+
+ def inferRatioDimensions(self):
+ '''
+ @summary: extract RatioDimensions and their corresponding units from the model
+ @return: a generator of the RatioDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:RatioDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#OBSOLETED:
+ def inferUnitSymbolOf(self, unituri, prefixuri = None):
+ raise Exception("OBSOLETE")
+# '''
+# @summary: generates an short written form of a unit if symbols are present for the prefix (optional) and the unit itself.
+# If either of them is missing abbreviation is derived from the tail of the uri (name of the individual).
+# @param unituri: the unit
+# @type unituri: URIRef
+# @param prefixuri: the prefix (optional)
+# @type prefixuri: URIRef or None
+# @return: the short form
+# @rtype: str
+# '''
+# if prefixuri is None:
+# prefix = ""
+# else:
+# try:
+# _, _, symbol = self.graph.triples((prefixuri, self.ns('unit')['symbol'], None)).next()
+# prefix = str(symbol)
+# except StopIteration:
+# prefix = self._tail(prefixuri)
+# try:
+# _, _, symbol = self.graph.triples((unituri, self.ns('unit')['symbol'], None)).next()
+# return "%s%s" % (prefix, str(symbol))
+# except StopIteration:
+# return "%s%s" % (prefix, self._tail(unituri))
+
diff --git a/Monitoring/src/main/python/Semantics/__init__$py.class b/Monitoring/src/main/python/Semantics/__init__$py.class
new file mode 100644
index 0000000..f47471b
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/__init__$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/__init__.py b/Monitoring/src/main/python/Semantics/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/__init__.py
diff --git a/Monitoring/src/main/python/Semantics/__init__.py.old b/Monitoring/src/main/python/Semantics/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/__init__.py.old
diff --git a/Monitoring/src/main/python/Semantics/a b/Monitoring/src/main/python/Semantics/a
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/a
diff --git a/Monitoring/src/main/python/Semantics/b b/Monitoring/src/main/python/Semantics/b
new file mode 100644
index 0000000..7b8569a
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/b
@@ -0,0 +1,55 @@
+EEEEE
+======================================================================
+ERROR: test_IM_domainsfeatures (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+======================================================================
+ERROR: test_IM_task (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+======================================================================
+ERROR: test_managers (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+======================================================================
+ERROR: test_owlexamples (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+======================================================================
+ERROR: test_taskBYuri (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+----------------------------------------------------------------------
+Ran 5 tests in 0.014s
+
+FAILED (errors=5)
diff --git a/Monitoring/src/main/python/Semantics/test.py b/Monitoring/src/main/python/Semantics/test.py
new file mode 100644
index 0000000..70d335d
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/test.py
@@ -0,0 +1,263 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import site
+site.addsitedir('../site-packages')
+
+
+import unittest2
+from rdflib import Graph
+from Example.Metrics import RoundTripDelay
+from Example.Tools import sonomashortping
+from DataProcessing.Parameter import ParameterList, Parameter
+from Example.credentials import ple_credentials
+from Example.Platforms import FRAMEWORK
+from DataProcessing.DataHeaderCell import CellRequestByName
+from DataProcessing.DataError import SamplerError
+from DataProcessing.DataReader import DataReader
+import pkgutil
+import StringIO
+import monitoringmodel.im
+import os.path
+
+
+class Test(unittest2.TestCase):
+
+ def setUp(self):
+ self.MS_planetlab = FRAMEWORK.getService('PlanetLab')
+ self.MS_federica = FRAMEWORK.getService('FEDERICA')
+
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ node = ("157.181.175.243", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_eltenode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("147.102.22.66", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_ntuanode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("192.168.31.1", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_fednode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("192.168.31.9", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_fednode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+
+ self.substrate = self.MS_planetlab.ontology.ns('task')['Substrate']
+ self.slice = self.MS_planetlab.ontology.ns('task')['Slice']
+
+ self.feat_task = {
+ 'OnewayDelay': (['SONoMAChirp'], []),
+ 'RoundtripDelay': (['SONoMAPing'], ['sshpingSlice']),
+ 'AvailableBandwidth': ([], ['sshabSlice']),
+ 'AvailableMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'FreeMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'MemoryUtilization': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'Uptime': (['sshuptime'], ['sshuptimeSlice']),
+ 'CPULoad': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUCores': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUUtilization': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'FreeDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice']),
+ 'UsedDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice'])
+ }
+
+ dim_nameofsomething = self.MS_planetlab.dm['NameOfSomething']
+ self.slicename = Parameter(name = "SliceName", valuetype = str,
+ unitmanager = self.MS_planetlab.um, dimension = dim_nameofsomething,
+ default = ('novi_novi', self.MS_planetlab.um.unitless))
+ dim_countable = self.MS_planetlab.dm['Countable']
+ self.count = Parameter(name = 'Count', valuetype = int,
+ unitmanager = self.MS_planetlab.um, dimension = dim_countable,
+ default = (5, self.MS_planetlab.um.piece))
+
+ def test_managers(self):
+ expect = 14
+ infer = len(self.MS_planetlab.pm)
+ self.assertEqual(infer, expect, "Prefix: got %d expect %d" % (infer, expect))
+
+ expect = 10
+ infer = [ s for _, s in self.MS_planetlab.unitmodel.inferBaseUnits() ]
+ self.assertEqual(expect, len(infer), "BaseUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _ in self.MS_planetlab.unitmodel.inferProductUnits() ]
+ self.assertEqual(expect, len(infer), "ProductUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _, _ in self.MS_planetlab.unitmodel.inferPowerUnits() ]
+ self.assertEqual(expect, len(infer), "PowerUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 12
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferLinearTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "LinearTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 2
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferRegexpTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "RegexpTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 8
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferBaseDimensions() ]
+ self.assertEqual(expect, len(infer), "BaseDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferDifferenceDimensions() ]
+ self.assertEqual(expect, len(infer), "DifferenceDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _, _ in self.MS_planetlab.unitmodel.inferPowerDimensions() ]
+ self.assertEqual(expect, len(infer), "PowerDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferProductDimensions() ]
+ self.assertEqual(expect, len(infer), "ProductDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 4
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferRatioDimensions() ]
+ self.assertEqual(expect, len(infer), "RatioDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+ NS = self.MS_planetlab.ontology.ns('unit')
+ for expect, uri in [(4, NS['second']), (7, NS['Byte']), (3, NS['bit']), (1, NS['unixtimestamp'])]:
+ infer = [s for s in self.MS_planetlab.unitmodel.inferPossiblePrefixesOf(uri)]
+ self.assertEqual(expect, len(infer), "inferPossiblePrefixesOf: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+
+ def test_IM_domainsfeatures(self):
+ expect = set(['Slice', 'Substrate'])
+ infer = set([ self.MS_planetlab.ontology._tail(x) for x in self.MS_planetlab.taskmodel.inferDomains() ])
+ self.assertEqual(expect, infer, "inferDomains: expect %d, got %d\n%s" % (len(expect), len(infer), str(infer)))
+
+ expect = len(self.feat_task) #19 feature van, de nehanynak nincs neve
+ infer = [ x for x in self.MS_planetlab.taskmodel.inferFeatures()]
+ self.assertEqual(expect, len(infer), "inferFeatures: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ def test_IM_task(self):
+ for feat, (t_subst, t_slice) in self.feat_task.iteritems():
+ feature = self.MS_planetlab.ontology.ns('feature')[feat]
+ infer_t_subst = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.substrate, feature)]
+ infer_t_slice = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.slice, feature)]
+ self.assertEqual(infer_t_subst, t_subst, "feature: %s searchtask (substrate): expect %s, got %s" % (feat, t_subst, infer_t_subst))
+ self.assertEqual(infer_t_slice, t_slice, "feature: %s searchtask (slice): expect %s, got %s" % (feat, t_slice, infer_t_slice))
+
+ task = self.MS_planetlab.ontology.ns('conf')['T_SONoMAPing']
+ infer = self.MS_planetlab.taskmodel.inferCredentialOf(task)
+ expect = set(sonomashortping.authtype)
+ self.assertEqual(infer, expect, "credentials differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferDriverOf(task)
+ expect = sonomashortping.driver
+ self.assertEqual(infer, expect, "drivers differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferHookparametersOf(task)
+ expect = sonomashortping.kwargs
+ self.assertEqual(infer, expect, "hook parameters differ expect: %s got: %s" % (expect, infer))
+
+ H = self.MS_planetlab.taskmodel.inferHookdefinitionsOf(task)
+ for k, h in H.iteritems():
+ exp = sonomashortping.hooks[k].strip()
+ h = h.strip()
+ self.assertEqual(h, exp, "%s hook differs\nexpect:\n%s\ngot:\n%s" % (k, exp, h))
+
+ #TODO: check feature equality
+ infer = [ (c.name, str(c._unit), str(c._dimension)) for c in self.MS_planetlab.taskmodel.inferDataheaderOf(task) ]
+ expect = [ (c.name, str(c._unit), str(c._dimension)) for c in sonomashortping.dataheaderdeclaration ]
+ self.assertEqual(infer, expect, "output header declarations differ expect:\n%s\ngot:\n%s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferParametersOf(task)
+ expect = sonomashortping.parameters
+ n_inf, n_exp = set(infer.parameter_names()), set(expect.parameter_names())
+ self.assertEqual(n_inf, n_exp, "runtime parameters differ expect: %s got: %s" %(n_exp, n_inf))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ if exp_v is None:
+ self.assertFalse(inf_v, "Expected uninitialized value, got %s" % inf_v)
+ else:
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+ feature = self.MS_planetlab.ontology.ns('feature')['RoundtripDelay']
+ expect = RoundTripDelay.p_obligatory
+ infer = self.MS_planetlab.taskmodel.inferObligatoryParametersOf(feature)
+ self.assertEqual(len(expect), len(infer), "obligatory parameters for %s differ expect: %s got: %s" % (feature, expect.parameter_names(), infer.parameter_names()))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+ def test_taskBYuri(self):
+ cases = {
+ 'T_SSHPingSlice': [self.p_src_eltenode, self.slicename, self.count, self.p_dst_ntuanode],
+ 'T_SSHMemInfo': [self.p_src_eltenode],
+ 'T_SSHMemInfoSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHCPULoad': [self.p_src_eltenode],
+ 'T_SSHCPULoadSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHUptime': [self.p_src_eltenode],
+ 'T_SSHUptimeSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SONoMAPing': [self.p_src_eltenode, self.p_dst_ntuanode],
+# 'T_hadesaggregate': [self.p_src_fednode, self.p_dst_fednode],
+ }
+ for l,p in cases.iteritems():
+ task_uri = self.MS_planetlab.ontology.ns('conf')[l]
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = None,
+ parameters = ParameterList(p))
+ task.enable()
+ task.dataAdded.wait( 15 )
+ self.assertGreater(len(task.data), 0, "measurement %s yielded empty result" % l)
+# print task.data._rawrecords
+
+
+ def test_owlexamples(self):
+ doc = "../monitoringmodel/monitoringQuery_example.owl" #% self.MS_planetlab.ontology.baseurl
+ # JYTHON hack for accessing owl files
+ im = monitoringmodel.im.im()
+ path = im.path
+ loader = pkgutil.get_loader("monitoringmodel.im")
+ g = Graph()
+ g.parse(source = StringIO.StringIO(loader.get_data(os.path.join(path, "monitoringQuery_example.owl"))))
+ print str(g)
+ qdict = self.MS_planetlab.QI.inferBundleQueries(qgraph = g)
+ self.assertTrue(len(qdict), "Got empty query")
+ for q in qdict:
+ domain = self.MS_planetlab.ontology.ns('task')['Substrate']
+ taskgen = self.MS_planetlab.taskmodel.inferTasks(domain, q.feature)
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen: break
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = q.resource,
+ parameters = q.paramlist)
+ del task.strategy # make sure STRAT_ONDEMAND
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ if q.samplechain:
+ flow = []
+ for skeleton, parlist in q.samplechain:
+ flow.append((skeleton, parlist.formkeyvaldict()))
+ aid = self.MS_planetlab.am.newAggregator(task.data, CellRequestByName(name = 'Free Memory'), flow)
+ A = self.MS_planetlab.am[ aid ]
+ while True:
+ try:
+ s, a = A.data._rawrecords[0]
+ self.assertEqual(s, len(task.data), "inconsistency in length len(data)=%d, max of %d samples?" % (len(task.data), s))
+ R = DataReader(datasource = task.data)
+ R.extract(cellrequest = [CellRequestByName(name = 'Free Memory')])
+ expect = max( [ float(x) for x, in R ] )
+ self.assertEqual(expect, a, "inconsistency in aggregare %f <> %f" % (expect, a))
+ break
+ except SamplerError:
+ print "MEASURE SOME MORE ..."
+ task.disable()
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ self.assertGreater(len(task.data), 0, "measurement yielded empty result")
+
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_IM_domainsfeatures']
+ unittest2.main()
diff --git a/Monitoring/src/main/python/Semantics/test.py.old b/Monitoring/src/main/python/Semantics/test.py.old
new file mode 100644
index 0000000..2e31cb9
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/test.py.old
@@ -0,0 +1,250 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import unittest
+from rdflib import Graph
+from Example.Metrics import RoundTripDelay
+from Example.Tools import sonomashortping
+from DataProcessing.Parameter import ParameterList, Parameter
+from Example.credentials import ple_credentials
+from Example.Platforms import FRAMEWORK
+from DataProcessing.DataHeaderCell import CellRequestByName
+from DataProcessing.DataError import SamplerError
+from DataProcessing.DataReader import DataReader
+
+
+class Test(unittest.TestCase):
+
+ def setUp(self):
+ self.MS_planetlab = FRAMEWORK.getService('PlanetLab')
+ self.MS_federica = FRAMEWORK.getService('FEDERICA')
+
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ node = ("157.181.175.243", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_eltenode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("147.102.22.66", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_ntuanode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("192.168.31.1", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_fednode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("192.168.31.9", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_fednode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+
+ self.substrate = self.MS_planetlab.ontology.ns('task')['Substrate']
+ self.slice = self.MS_planetlab.ontology.ns('task')['Slice']
+
+ self.feat_task = {
+ 'OnewayDelay': (['SONoMAChirp'], []),
+ 'RoundtripDelay': (['SONoMAPing'], ['sshpingSlice']),
+ 'AvailableBandwidth': ([], ['sshabSlice']),
+ 'AvailableMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'FreeMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'MemoryUtilization': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'Uptime': (['sshuptime'], ['sshuptimeSlice']),
+ 'CPULoad': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUCores': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUUtilization': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'FreeDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice']),
+ 'UsedDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice'])
+ }
+
+ dim_nameofsomething = self.MS_planetlab.dm['NameOfSomething']
+ self.slicename = Parameter(name = "SliceName", valuetype = str,
+ unitmanager = self.MS_planetlab.um, dimension = dim_nameofsomething,
+ default = ('novi_novi', self.MS_planetlab.um.unitless))
+ dim_countable = self.MS_planetlab.dm['Countable']
+ self.count = Parameter(name = 'Count', valuetype = int,
+ unitmanager = self.MS_planetlab.um, dimension = dim_countable,
+ default = (5, self.MS_planetlab.um.piece))
+
+ def test_managers(self):
+ expect = 14
+ infer = len(self.MS_planetlab.pm)
+ self.assertEqual(infer, expect, "Prefix: got %d expect %d" % (infer, expect))
+
+ expect = 10
+ infer = [ s for _, s in self.MS_planetlab.unitmodel.inferBaseUnits() ]
+ self.assertEqual(expect, len(infer), "BaseUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _ in self.MS_planetlab.unitmodel.inferProductUnits() ]
+ self.assertEqual(expect, len(infer), "ProductUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _, _ in self.MS_planetlab.unitmodel.inferPowerUnits() ]
+ self.assertEqual(expect, len(infer), "PowerUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 12
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferLinearTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "LinearTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 2
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferRegexpTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "RegexpTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 8
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferBaseDimensions() ]
+ self.assertEqual(expect, len(infer), "BaseDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferDifferenceDimensions() ]
+ self.assertEqual(expect, len(infer), "DifferenceDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _, _ in self.MS_planetlab.unitmodel.inferPowerDimensions() ]
+ self.assertEqual(expect, len(infer), "PowerDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferProductDimensions() ]
+ self.assertEqual(expect, len(infer), "ProductDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 4
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferRatioDimensions() ]
+ self.assertEqual(expect, len(infer), "RatioDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+ NS = self.MS_planetlab.ontology.ns('unit')
+ for expect, uri in [(4, NS['second']), (7, NS['Byte']), (3, NS['bit']), (1, NS['unixtimestamp'])]:
+ infer = [s for s in self.MS_planetlab.unitmodel.inferPossiblePrefixesOf(uri)]
+ self.assertEqual(expect, len(infer), "inferPossiblePrefixesOf: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+
+ def test_IM_domainsfeatures(self):
+ expect = set(['Slice', 'Substrate'])
+ infer = set([ self.MS_planetlab.ontology._tail(x) for x in self.MS_planetlab.taskmodel.inferDomains() ])
+ self.assertEqual(expect, infer, "inferDomains: expect %d, got %d\n%s" % (len(expect), len(infer), str(infer)))
+
+ expect = len(self.feat_task) #19 feature van, de nehanynak nincs neve
+ infer = [ x for x in self.MS_planetlab.taskmodel.inferFeatures()]
+ self.assertEqual(expect, len(infer), "inferFeatures: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ def test_IM_task(self):
+ for feat, (t_subst, t_slice) in self.feat_task.iteritems():
+ feature = self.MS_planetlab.ontology.ns('feature')[feat]
+ infer_t_subst = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.substrate, feature)]
+ infer_t_slice = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.slice, feature)]
+ self.assertEqual(infer_t_subst, t_subst, "feature: %s searchtask (substrate): expect %s, got %s" % (feat, t_subst, infer_t_subst))
+ self.assertEqual(infer_t_slice, t_slice, "feature: %s searchtask (slice): expect %s, got %s" % (feat, t_slice, infer_t_slice))
+
+ task = self.MS_planetlab.ontology.ns('conf')['T_SONoMAPing']
+ infer = self.MS_planetlab.taskmodel.inferCredentialOf(task)
+ expect = set(sonomashortping.authtype)
+ self.assertEqual(infer, expect, "credentials differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferDriverOf(task)
+ expect = sonomashortping.driver
+ self.assertEqual(infer, expect, "drivers differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferHookparametersOf(task)
+ expect = sonomashortping.kwargs
+ self.assertEqual(infer, expect, "hook parameters differ expect: %s got: %s" % (expect, infer))
+
+ H = self.MS_planetlab.taskmodel.inferHookdefinitionsOf(task)
+ for k, h in H.iteritems():
+ exp = sonomashortping.hooks[k].strip()
+ h = h.strip()
+ self.assertEqual(h, exp, "%s hook differs\nexpect:\n%s\ngot:\n%s" % (k, exp, h))
+
+ #TODO: check feature equality
+ infer = [ (c.name, str(c._unit), str(c._dimension)) for c in self.MS_planetlab.taskmodel.inferDataheaderOf(task) ]
+ expect = [ (c.name, str(c._unit), str(c._dimension)) for c in sonomashortping.dataheaderdeclaration ]
+ self.assertEqual(infer, expect, "output header declarations differ expect:\n%s\ngot:\n%s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferParametersOf(task)
+ expect = sonomashortping.parameters
+ n_inf, n_exp = set(infer.parameter_names()), set(expect.parameter_names())
+ self.assertEqual(n_inf, n_exp, "runtime parameters differ expect: %s got: %s" %(n_exp, n_inf))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ if exp_v is None:
+ self.assertFalse(inf_v, "Expected uninitialized value, got %s" % inf_v)
+ else:
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+ feature = self.MS_planetlab.ontology.ns('feature')['RoundtripDelay']
+ expect = RoundTripDelay.p_obligatory
+ infer = self.MS_planetlab.taskmodel.inferObligatoryParametersOf(feature)
+ self.assertEqual(len(expect), len(infer), "obligatory parameters for %s differ expect: %s got: %s" % (feature, expect.parameter_names(), infer.parameter_names()))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+ def test_taskBYuri(self):
+ cases = {
+ 'T_SSHPingSlice': [self.p_src_eltenode, self.slicename, self.count, self.p_dst_ntuanode],
+ 'T_SSHMemInfo': [self.p_src_eltenode],
+ 'T_SSHMemInfoSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHCPULoad': [self.p_src_eltenode],
+ 'T_SSHCPULoadSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHUptime': [self.p_src_eltenode],
+ 'T_SSHUptimeSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SONoMAPing': [self.p_src_eltenode, self.p_dst_ntuanode],
+# 'T_hadesaggregate': [self.p_src_fednode, self.p_dst_fednode],
+ }
+ for l,p in cases.iteritems():
+ task_uri = self.MS_planetlab.ontology.ns('conf')[l]
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = None,
+ parameters = ParameterList(p))
+ task.enable()
+ task.dataAdded.wait( 15 )
+ self.assertGreater(len(task.data), 0, "measurement %s yielded empty result" % l)
+# print task.data._rawrecords
+
+
+ def test_owlexamples(self):
+ doc = "%s/monitoringQuery_example.owl" % self.MS_planetlab.ontology.baseurl
+ g = Graph()
+ g.parse(source = doc)
+ qdict = self.MS_planetlab.QI.getBundleQuery(qgraph = g)
+ self.assertTrue(len(qdict), "Got empty query")
+ for q in qdict:
+ domain = self.MS_planetlab.ontology.ns('task')['Substrate']
+ taskgen = self.MS_planetlab.taskmodel.inferTasks(domain, q.feature)
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen: break
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = q.resource,
+ parameters = q.paramlist)
+ del task.strategy # make sure STRAT_ONDEMAND
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ if q.samplechain:
+ flow = []
+ for skeleton, parlist in q.samplechain:
+ flow.append((skeleton, parlist.formkeyvaldict()))
+ aid = self.MS_planetlab.am.newAggregator(task.data, CellRequestByName(name = 'Free Memory'), flow)
+ A = self.MS_planetlab.am[ aid ]
+ while True:
+ try:
+ s, a = A.data._rawrecords[0]
+ self.assertEqual(s, len(task.data), "inconsistency in length len(data)=%d, max of %d samples?" % (len(task.data), s))
+ R = DataReader(datasource = task.data)
+ R.extract(cellrequest = [CellRequestByName(name = 'Free Memory')])
+ expect = max( [ float(x) for x, in R ] )
+ self.assertEqual(expect, a, "inconsistency in aggregare %f <> %f" % (expect, a))
+ break
+ except SamplerError:
+ print "MEASURE SOME MORE ..."
+ task.disable()
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ self.assertGreater(len(task.data), 0, "measurement yielded empty result")
+
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_IM_domainsfeatures']
+ unittest.main()